]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
(try_combine): Don't use split if dest of new I2 is used between I2
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
3d27140a 2 Copyright (C) 1988, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
bbf6f052
RK
20
21
22#include "config.h"
ca695ac9 23#include "machmode.h"
bbf6f052
RK
24#include "rtl.h"
25#include "tree.h"
ca695ac9 26#include "obstack.h"
bbf6f052 27#include "flags.h"
bf76bb5a 28#include "regs.h"
4ed67205 29#include "hard-reg-set.h"
bbf6f052
RK
30#include "function.h"
31#include "insn-flags.h"
32#include "insn-codes.h"
33#include "expr.h"
34#include "insn-config.h"
35#include "recog.h"
36#include "output.h"
bbf6f052
RK
37#include "typeclass.h"
38
ca695ac9
JB
39#include "bytecode.h"
40#include "bc-opcode.h"
41#include "bc-typecd.h"
42#include "bc-optab.h"
43#include "bc-emit.h"
44
45
bbf6f052
RK
46#define CEIL(x,y) (((x) + (y) - 1) / (y))
47
48/* Decide whether a function's arguments should be processed
bbc8a071
RK
49 from first to last or from last to first.
50
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
bbf6f052 53
bbf6f052 54#ifdef PUSH_ROUNDING
bbc8a071 55
3319a347 56#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
57#define PUSH_ARGS_REVERSED /* If it's last to first */
58#endif
bbc8a071 59
bbf6f052
RK
60#endif
61
62#ifndef STACK_PUSH_CODE
63#ifdef STACK_GROWS_DOWNWARD
64#define STACK_PUSH_CODE PRE_DEC
65#else
66#define STACK_PUSH_CODE PRE_INC
67#endif
68#endif
69
70/* Like STACK_BOUNDARY but in units of bytes, not bits. */
71#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
72
73/* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
79int cse_not_expected;
80
81/* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84int do_preexpand_calls = 1;
85
86/* Number of units that we should eventually pop off the stack.
87 These are the arguments to function calls that have already returned. */
88int pending_stack_adjust;
89
90/* Nonzero means stack pops must not be deferred, and deferred stack
91 pops must not be output. It is nonzero inside a function call,
92 inside a conditional expression, inside a statement expression,
93 and in other cases as well. */
94int inhibit_defer_pop;
95
96/* A list of all cleanups which belong to the arguments of
97 function calls being expanded by expand_call. */
98tree cleanups_this_call;
99
d93d4205
MS
100/* When temporaries are created by TARGET_EXPRs, they are created at
101 this level of temp_slot_level, so that they can remain allocated
102 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
103 of TARGET_EXPRs. */
104int target_temp_slot_level;
105
bbf6f052
RK
106/* Nonzero means __builtin_saveregs has already been done in this function.
107 The value is the pseudoreg containing the value __builtin_saveregs
108 returned. */
109static rtx saveregs_value;
110
dcf76fff
TW
111/* Similarly for __builtin_apply_args. */
112static rtx apply_args_value;
113
4969d05d
RK
114/* This structure is used by move_by_pieces to describe the move to
115 be performed. */
116
117struct move_by_pieces
118{
119 rtx to;
120 rtx to_addr;
121 int autinc_to;
122 int explicit_inc_to;
e9cf6a97 123 int to_struct;
4969d05d
RK
124 rtx from;
125 rtx from_addr;
126 int autinc_from;
127 int explicit_inc_from;
e9cf6a97 128 int from_struct;
4969d05d
RK
129 int len;
130 int offset;
131 int reverse;
132};
133
9de08200
RK
134/* This structure is used by clear_by_pieces to describe the clear to
135 be performed. */
136
137struct clear_by_pieces
138{
139 rtx to;
140 rtx to_addr;
141 int autinc_to;
142 int explicit_inc_to;
143 int to_struct;
144 int len;
145 int offset;
146 int reverse;
147};
148
c02bd5d9
JB
149/* Used to generate bytecodes: keep track of size of local variables,
150 as well as depth of arithmetic stack. (Notice that variables are
151 stored on the machine's stack, not the arithmetic stack.) */
152
186f92ce 153extern int local_vars_size;
c02bd5d9
JB
154extern int stack_depth;
155extern int max_stack_depth;
292b1216 156extern struct obstack permanent_obstack;
4ed67205 157extern rtx arg_pointer_save_area;
c02bd5d9 158
4969d05d
RK
159static rtx enqueue_insn PROTO((rtx, rtx));
160static int queued_subexp_p PROTO((rtx));
161static void init_queue PROTO((void));
162static void move_by_pieces PROTO((rtx, rtx, int, int));
163static int move_by_pieces_ninsns PROTO((unsigned int, int));
164static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
165 struct move_by_pieces *));
9de08200
RK
166static void clear_by_pieces PROTO((rtx, int, int));
167static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
168 struct clear_by_pieces *));
169static int is_zeros_p PROTO((tree));
170static int mostly_zeros_p PROTO((tree));
e1a43f73 171static void store_constructor PROTO((tree, rtx, int));
4969d05d
RK
172static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
173 enum machine_mode, int, int, int));
6be58303 174static int get_inner_unaligned_p PROTO((tree));
4969d05d
RK
175static tree save_noncopied_parts PROTO((tree, tree));
176static tree init_noncopied_parts PROTO((tree, tree));
177static int safe_from_p PROTO((rtx, tree));
178static int fixed_type_p PROTO((tree));
179static int get_pointer_alignment PROTO((tree, unsigned));
180static tree string_constant PROTO((tree, tree *));
181static tree c_strlen PROTO((tree));
307b821c
RK
182static rtx expand_builtin PROTO((tree, rtx, rtx,
183 enum machine_mode, int));
0006469d
TW
184static int apply_args_size PROTO((void));
185static int apply_result_size PROTO((void));
186static rtx result_vector PROTO((int, rtx));
187static rtx expand_builtin_apply_args PROTO((void));
188static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
189static void expand_builtin_return PROTO((rtx));
7b8b9722 190static rtx expand_increment PROTO((tree, int, int));
0dc09c0f 191void bc_expand_increment PROTO((struct increment_operator *, tree));
ca695ac9
JB
192rtx bc_allocate_local PROTO((int, int));
193void bc_store_memory PROTO((tree, tree));
194tree bc_expand_component_address PROTO((tree));
195tree bc_expand_address PROTO((tree));
196void bc_expand_constructor PROTO((tree));
197void bc_adjust_stack PROTO((int));
198tree bc_canonicalize_array_ref PROTO((tree));
199void bc_load_memory PROTO((tree, tree));
200void bc_load_externaddr PROTO((rtx));
201void bc_load_externaddr_id PROTO((tree, int));
202void bc_load_localaddr PROTO((rtx));
203void bc_load_parmaddr PROTO((rtx));
4969d05d
RK
204static void preexpand_calls PROTO((tree));
205static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
2e5ec6cf 206void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
4969d05d
RK
207static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
208static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
209static void do_jump_for_compare PROTO((rtx, rtx, rtx));
210static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
211static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
5dab5552 212static tree defer_cleanups_to PROTO((tree));
61d6b1cc 213extern void (*interim_eh_hook) PROTO((tree));
16545b0a 214extern tree truthvalue_conversion PROTO((tree));
bbf6f052 215
4fa52007
RK
216/* Record for each mode whether we can move a register directly to or
217 from an object of that mode in memory. If we can't, we won't try
218 to use that mode directly when accessing a field of that mode. */
219
220static char direct_load[NUM_MACHINE_MODES];
221static char direct_store[NUM_MACHINE_MODES];
222
bbf6f052
RK
223/* MOVE_RATIO is the number of move instructions that is better than
224 a block move. */
225
226#ifndef MOVE_RATIO
266007a7 227#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
228#define MOVE_RATIO 2
229#else
230/* A value of around 6 would minimize code size; infinity would minimize
231 execution time. */
232#define MOVE_RATIO 15
233#endif
234#endif
e87b4f3f 235
266007a7 236/* This array records the insn_code of insns to perform block moves. */
e6677db3 237enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 238
9de08200
RK
239/* This array records the insn_code of insns to perform block clears. */
240enum insn_code clrstr_optab[NUM_MACHINE_MODES];
241
e87b4f3f
RS
242/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
243
244#ifndef SLOW_UNALIGNED_ACCESS
c7a7ac46 245#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
e87b4f3f 246#endif
0006469d
TW
247
248/* Register mappings for target machines without register windows. */
249#ifndef INCOMING_REGNO
250#define INCOMING_REGNO(OUT) (OUT)
251#endif
252#ifndef OUTGOING_REGNO
253#define OUTGOING_REGNO(IN) (IN)
254#endif
bbf6f052 255\f
ca695ac9
JB
256/* Maps used to convert modes to const, load, and store bytecodes. */
257enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
258enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
259enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
260
261/* Initialize maps used to convert modes to const, load, and store
262 bytecodes. */
263void
264bc_init_mode_to_opcode_maps ()
265{
266 int mode;
267
6bd6178d 268 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
ca695ac9
JB
269 mode_to_const_map[mode] =
270 mode_to_load_map[mode] =
271 mode_to_store_map[mode] = neverneverland;
272
273#define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
6bd6178d
RK
274 mode_to_const_map[(int) SYM] = CONST; \
275 mode_to_load_map[(int) SYM] = LOAD; \
276 mode_to_store_map[(int) SYM] = STORE;
ca695ac9
JB
277
278#include "modemap.def"
279#undef DEF_MODEMAP
280}
281\f
4fa52007 282/* This is run once per compilation to set up which modes can be used
266007a7 283 directly in memory and to initialize the block move optab. */
4fa52007
RK
284
285void
286init_expr_once ()
287{
288 rtx insn, pat;
289 enum machine_mode mode;
e2549997
RS
290 /* Try indexing by frame ptr and try by stack ptr.
291 It is known that on the Convex the stack ptr isn't a valid index.
292 With luck, one or the other is valid on any machine. */
4fa52007 293 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
e2549997 294 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
4fa52007
RK
295
296 start_sequence ();
297 insn = emit_insn (gen_rtx (SET, 0, 0));
298 pat = PATTERN (insn);
299
300 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
301 mode = (enum machine_mode) ((int) mode + 1))
302 {
303 int regno;
304 rtx reg;
305 int num_clobbers;
306
307 direct_load[(int) mode] = direct_store[(int) mode] = 0;
308 PUT_MODE (mem, mode);
e2549997 309 PUT_MODE (mem1, mode);
4fa52007 310
e6fe56a4
RK
311 /* See if there is some register that can be used in this mode and
312 directly loaded or stored from memory. */
313
7308a047
RS
314 if (mode != VOIDmode && mode != BLKmode)
315 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
316 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
317 regno++)
318 {
319 if (! HARD_REGNO_MODE_OK (regno, mode))
320 continue;
e6fe56a4 321
7308a047 322 reg = gen_rtx (REG, mode, regno);
e6fe56a4 323
7308a047
RS
324 SET_SRC (pat) = mem;
325 SET_DEST (pat) = reg;
326 if (recog (pat, insn, &num_clobbers) >= 0)
327 direct_load[(int) mode] = 1;
e6fe56a4 328
e2549997
RS
329 SET_SRC (pat) = mem1;
330 SET_DEST (pat) = reg;
331 if (recog (pat, insn, &num_clobbers) >= 0)
332 direct_load[(int) mode] = 1;
333
7308a047
RS
334 SET_SRC (pat) = reg;
335 SET_DEST (pat) = mem;
336 if (recog (pat, insn, &num_clobbers) >= 0)
337 direct_store[(int) mode] = 1;
e2549997
RS
338
339 SET_SRC (pat) = reg;
340 SET_DEST (pat) = mem1;
341 if (recog (pat, insn, &num_clobbers) >= 0)
342 direct_store[(int) mode] = 1;
7308a047 343 }
4fa52007
RK
344 }
345
346 end_sequence ();
347}
348
bbf6f052
RK
349/* This is run at the start of compiling a function. */
350
351void
352init_expr ()
353{
354 init_queue ();
355
356 pending_stack_adjust = 0;
357 inhibit_defer_pop = 0;
358 cleanups_this_call = 0;
359 saveregs_value = 0;
0006469d 360 apply_args_value = 0;
e87b4f3f 361 forced_labels = 0;
bbf6f052
RK
362}
363
364/* Save all variables describing the current status into the structure *P.
365 This is used before starting a nested function. */
366
367void
368save_expr_status (p)
369 struct function *p;
370{
371 /* Instead of saving the postincrement queue, empty it. */
372 emit_queue ();
373
374 p->pending_stack_adjust = pending_stack_adjust;
375 p->inhibit_defer_pop = inhibit_defer_pop;
376 p->cleanups_this_call = cleanups_this_call;
377 p->saveregs_value = saveregs_value;
0006469d 378 p->apply_args_value = apply_args_value;
e87b4f3f 379 p->forced_labels = forced_labels;
bbf6f052
RK
380
381 pending_stack_adjust = 0;
382 inhibit_defer_pop = 0;
383 cleanups_this_call = 0;
384 saveregs_value = 0;
0006469d 385 apply_args_value = 0;
e87b4f3f 386 forced_labels = 0;
bbf6f052
RK
387}
388
389/* Restore all variables describing the current status from the structure *P.
390 This is used after a nested function. */
391
392void
393restore_expr_status (p)
394 struct function *p;
395{
396 pending_stack_adjust = p->pending_stack_adjust;
397 inhibit_defer_pop = p->inhibit_defer_pop;
398 cleanups_this_call = p->cleanups_this_call;
399 saveregs_value = p->saveregs_value;
0006469d 400 apply_args_value = p->apply_args_value;
e87b4f3f 401 forced_labels = p->forced_labels;
bbf6f052
RK
402}
403\f
404/* Manage the queue of increment instructions to be output
405 for POSTINCREMENT_EXPR expressions, etc. */
406
407static rtx pending_chain;
408
409/* Queue up to increment (or change) VAR later. BODY says how:
410 BODY should be the same thing you would pass to emit_insn
411 to increment right away. It will go to emit_insn later on.
412
413 The value is a QUEUED expression to be used in place of VAR
414 where you want to guarantee the pre-incrementation value of VAR. */
415
416static rtx
417enqueue_insn (var, body)
418 rtx var, body;
419{
420 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
906c4e36 421 var, NULL_RTX, NULL_RTX, body, pending_chain);
bbf6f052
RK
422 return pending_chain;
423}
424
425/* Use protect_from_queue to convert a QUEUED expression
426 into something that you can put immediately into an instruction.
427 If the queued incrementation has not happened yet,
428 protect_from_queue returns the variable itself.
429 If the incrementation has happened, protect_from_queue returns a temp
430 that contains a copy of the old value of the variable.
431
432 Any time an rtx which might possibly be a QUEUED is to be put
433 into an instruction, it must be passed through protect_from_queue first.
434 QUEUED expressions are not meaningful in instructions.
435
436 Do not pass a value through protect_from_queue and then hold
437 on to it for a while before putting it in an instruction!
438 If the queue is flushed in between, incorrect code will result. */
439
440rtx
441protect_from_queue (x, modify)
442 register rtx x;
443 int modify;
444{
445 register RTX_CODE code = GET_CODE (x);
446
447#if 0 /* A QUEUED can hang around after the queue is forced out. */
448 /* Shortcut for most common case. */
449 if (pending_chain == 0)
450 return x;
451#endif
452
453 if (code != QUEUED)
454 {
e9baa644
RK
455 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
456 use of autoincrement. Make a copy of the contents of the memory
457 location rather than a copy of the address, but not if the value is
458 of mode BLKmode. Don't modify X in place since it might be
459 shared. */
bbf6f052
RK
460 if (code == MEM && GET_MODE (x) != BLKmode
461 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
462 {
463 register rtx y = XEXP (x, 0);
e9baa644
RK
464 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
465
466 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
467 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
468 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
469
bbf6f052
RK
470 if (QUEUED_INSN (y))
471 {
e9baa644
RK
472 register rtx temp = gen_reg_rtx (GET_MODE (new));
473 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
474 QUEUED_INSN (y));
475 return temp;
476 }
e9baa644 477 return new;
bbf6f052
RK
478 }
479 /* Otherwise, recursively protect the subexpressions of all
480 the kinds of rtx's that can contain a QUEUED. */
481 if (code == MEM)
3f15938e
RS
482 {
483 rtx tem = protect_from_queue (XEXP (x, 0), 0);
484 if (tem != XEXP (x, 0))
485 {
486 x = copy_rtx (x);
487 XEXP (x, 0) = tem;
488 }
489 }
bbf6f052
RK
490 else if (code == PLUS || code == MULT)
491 {
3f15938e
RS
492 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
493 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
494 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
495 {
496 x = copy_rtx (x);
497 XEXP (x, 0) = new0;
498 XEXP (x, 1) = new1;
499 }
bbf6f052
RK
500 }
501 return x;
502 }
503 /* If the increment has not happened, use the variable itself. */
504 if (QUEUED_INSN (x) == 0)
505 return QUEUED_VAR (x);
506 /* If the increment has happened and a pre-increment copy exists,
507 use that copy. */
508 if (QUEUED_COPY (x) != 0)
509 return QUEUED_COPY (x);
510 /* The increment has happened but we haven't set up a pre-increment copy.
511 Set one up now, and use it. */
512 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
513 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
514 QUEUED_INSN (x));
515 return QUEUED_COPY (x);
516}
517
518/* Return nonzero if X contains a QUEUED expression:
519 if it contains anything that will be altered by a queued increment.
520 We handle only combinations of MEM, PLUS, MINUS and MULT operators
521 since memory addresses generally contain only those. */
522
523static int
524queued_subexp_p (x)
525 rtx x;
526{
527 register enum rtx_code code = GET_CODE (x);
528 switch (code)
529 {
530 case QUEUED:
531 return 1;
532 case MEM:
533 return queued_subexp_p (XEXP (x, 0));
534 case MULT:
535 case PLUS:
536 case MINUS:
537 return queued_subexp_p (XEXP (x, 0))
538 || queued_subexp_p (XEXP (x, 1));
539 }
540 return 0;
541}
542
543/* Perform all the pending incrementations. */
544
545void
546emit_queue ()
547{
548 register rtx p;
549 while (p = pending_chain)
550 {
551 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
552 pending_chain = QUEUED_NEXT (p);
553 }
554}
555
556static void
557init_queue ()
558{
559 if (pending_chain)
560 abort ();
561}
562\f
563/* Copy data from FROM to TO, where the machine modes are not the same.
564 Both modes may be integer, or both may be floating.
565 UNSIGNEDP should be nonzero if FROM is an unsigned type.
566 This causes zero-extension instead of sign-extension. */
567
568void
569convert_move (to, from, unsignedp)
570 register rtx to, from;
571 int unsignedp;
572{
573 enum machine_mode to_mode = GET_MODE (to);
574 enum machine_mode from_mode = GET_MODE (from);
575 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
576 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
577 enum insn_code code;
578 rtx libcall;
579
580 /* rtx code for making an equivalent value. */
581 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
582
583 to = protect_from_queue (to, 1);
584 from = protect_from_queue (from, 0);
585
586 if (to_real != from_real)
587 abort ();
588
1499e0a8
RK
589 /* If FROM is a SUBREG that indicates that we have already done at least
590 the required extension, strip it. We don't handle such SUBREGs as
591 TO here. */
592
593 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
594 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
595 >= GET_MODE_SIZE (to_mode))
596 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
597 from = gen_lowpart (to_mode, from), from_mode = to_mode;
598
599 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
600 abort ();
601
bbf6f052
RK
602 if (to_mode == from_mode
603 || (from_mode == VOIDmode && CONSTANT_P (from)))
604 {
605 emit_move_insn (to, from);
606 return;
607 }
608
609 if (to_real)
610 {
81d79e2c
RS
611 rtx value;
612
b424402e 613#ifdef HAVE_extendqfhf2
8ab0613c 614 if (HAVE_extendqfhf2 && from_mode == QFmode && to_mode == HFmode)
b424402e 615 {
8ab0613c 616 emit_unop_insn (CODE_FOR_extendqfhf2, to, from, UNKNOWN);
b424402e
RS
617 return;
618 }
619#endif
620#ifdef HAVE_extendqfsf2
621 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
622 {
623 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
624 return;
625 }
626#endif
627#ifdef HAVE_extendqfdf2
628 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
629 {
630 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
631 return;
632 }
633#endif
634#ifdef HAVE_extendqfxf2
635 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
636 {
637 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
638 return;
639 }
640#endif
641#ifdef HAVE_extendqftf2
642 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
643 {
644 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
645 return;
646 }
647#endif
648
03747aa3
RK
649#ifdef HAVE_extendhftqf2
650 if (HAVE_extendhftqf2 && from_mode == HFmode && to_mode == TQFmode)
651 {
652 emit_unop_insn (CODE_FOR_extendhftqf2, to, from, UNKNOWN);
653 return;
654 }
655#endif
656
b424402e
RS
657#ifdef HAVE_extendhfsf2
658 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
659 {
660 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
661 return;
662 }
663#endif
664#ifdef HAVE_extendhfdf2
665 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
666 {
667 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
668 return;
669 }
670#endif
671#ifdef HAVE_extendhfxf2
672 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
673 {
674 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
675 return;
676 }
677#endif
678#ifdef HAVE_extendhftf2
679 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
680 {
681 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
682 return;
683 }
684#endif
685
bbf6f052
RK
686#ifdef HAVE_extendsfdf2
687 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
688 {
689 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
690 return;
691 }
692#endif
b092b471
JW
693#ifdef HAVE_extendsfxf2
694 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
695 {
696 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
697 return;
698 }
699#endif
bbf6f052
RK
700#ifdef HAVE_extendsftf2
701 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
702 {
703 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
704 return;
705 }
706#endif
b092b471
JW
707#ifdef HAVE_extenddfxf2
708 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
709 {
710 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
711 return;
712 }
713#endif
bbf6f052
RK
714#ifdef HAVE_extenddftf2
715 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
716 {
717 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
718 return;
719 }
720#endif
b424402e
RS
721
722#ifdef HAVE_trunchfqf2
723 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
724 {
725 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
726 return;
727 }
728#endif
729#ifdef HAVE_truncsfqf2
730 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
731 {
732 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
733 return;
734 }
735#endif
736#ifdef HAVE_truncdfqf2
737 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
738 {
739 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
740 return;
741 }
742#endif
743#ifdef HAVE_truncxfqf2
744 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
745 {
746 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
747 return;
748 }
749#endif
750#ifdef HAVE_trunctfqf2
751 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
752 {
753 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
754 return;
755 }
756#endif
03747aa3
RK
757
758#ifdef HAVE_trunctqfhf2
759 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
760 {
761 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
762 return;
763 }
764#endif
b424402e
RS
765#ifdef HAVE_truncsfhf2
766 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
767 {
768 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
769 return;
770 }
771#endif
772#ifdef HAVE_truncdfhf2
773 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
774 {
775 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
776 return;
777 }
778#endif
779#ifdef HAVE_truncxfhf2
780 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
781 {
782 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
783 return;
784 }
785#endif
786#ifdef HAVE_trunctfhf2
787 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
788 {
789 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
790 return;
791 }
792#endif
bbf6f052
RK
793#ifdef HAVE_truncdfsf2
794 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
795 {
796 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
797 return;
798 }
799#endif
b092b471
JW
800#ifdef HAVE_truncxfsf2
801 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
802 {
803 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
804 return;
805 }
806#endif
bbf6f052
RK
807#ifdef HAVE_trunctfsf2
808 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
809 {
810 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
811 return;
812 }
813#endif
b092b471
JW
814#ifdef HAVE_truncxfdf2
815 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
816 {
817 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
818 return;
819 }
820#endif
bbf6f052
RK
821#ifdef HAVE_trunctfdf2
822 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
823 {
824 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
825 return;
826 }
827#endif
828
b092b471
JW
829 libcall = (rtx) 0;
830 switch (from_mode)
831 {
832 case SFmode:
833 switch (to_mode)
834 {
835 case DFmode:
836 libcall = extendsfdf2_libfunc;
837 break;
838
839 case XFmode:
840 libcall = extendsfxf2_libfunc;
841 break;
842
843 case TFmode:
844 libcall = extendsftf2_libfunc;
845 break;
846 }
847 break;
848
849 case DFmode:
850 switch (to_mode)
851 {
852 case SFmode:
853 libcall = truncdfsf2_libfunc;
854 break;
855
856 case XFmode:
857 libcall = extenddfxf2_libfunc;
858 break;
859
860 case TFmode:
861 libcall = extenddftf2_libfunc;
862 break;
863 }
864 break;
865
866 case XFmode:
867 switch (to_mode)
868 {
869 case SFmode:
870 libcall = truncxfsf2_libfunc;
871 break;
872
873 case DFmode:
874 libcall = truncxfdf2_libfunc;
875 break;
876 }
877 break;
878
879 case TFmode:
880 switch (to_mode)
881 {
882 case SFmode:
883 libcall = trunctfsf2_libfunc;
884 break;
885
886 case DFmode:
887 libcall = trunctfdf2_libfunc;
888 break;
889 }
890 break;
891 }
892
893 if (libcall == (rtx) 0)
894 /* This conversion is not implemented yet. */
bbf6f052
RK
895 abort ();
896
81d79e2c
RS
897 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
898 1, from, from_mode);
899 emit_move_insn (to, value);
bbf6f052
RK
900 return;
901 }
902
903 /* Now both modes are integers. */
904
905 /* Handle expanding beyond a word. */
906 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
907 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
908 {
909 rtx insns;
910 rtx lowpart;
911 rtx fill_value;
912 rtx lowfrom;
913 int i;
914 enum machine_mode lowpart_mode;
915 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
916
917 /* Try converting directly if the insn is supported. */
918 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
919 != CODE_FOR_nothing)
920 {
cd1b4b44
RK
921 /* If FROM is a SUBREG, put it into a register. Do this
922 so that we always generate the same set of insns for
923 better cse'ing; if an intermediate assignment occurred,
924 we won't be doing the operation directly on the SUBREG. */
925 if (optimize > 0 && GET_CODE (from) == SUBREG)
926 from = force_reg (from_mode, from);
bbf6f052
RK
927 emit_unop_insn (code, to, from, equiv_code);
928 return;
929 }
930 /* Next, try converting via full word. */
931 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
932 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
933 != CODE_FOR_nothing))
934 {
a81fee56
RS
935 if (GET_CODE (to) == REG)
936 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
bbf6f052
RK
937 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
938 emit_unop_insn (code, to,
939 gen_lowpart (word_mode, to), equiv_code);
940 return;
941 }
942
943 /* No special multiword conversion insn; do it by hand. */
944 start_sequence ();
945
5c5033c3
RK
946 /* Since we will turn this into a no conflict block, we must ensure
947 that the source does not overlap the target. */
948
949 if (reg_overlap_mentioned_p (to, from))
950 from = force_reg (from_mode, from);
951
bbf6f052
RK
952 /* Get a copy of FROM widened to a word, if necessary. */
953 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
954 lowpart_mode = word_mode;
955 else
956 lowpart_mode = from_mode;
957
958 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
959
960 lowpart = gen_lowpart (lowpart_mode, to);
961 emit_move_insn (lowpart, lowfrom);
962
963 /* Compute the value to put in each remaining word. */
964 if (unsignedp)
965 fill_value = const0_rtx;
966 else
967 {
968#ifdef HAVE_slt
969 if (HAVE_slt
970 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
971 && STORE_FLAG_VALUE == -1)
972 {
906c4e36
RK
973 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
974 lowpart_mode, 0, 0);
bbf6f052
RK
975 fill_value = gen_reg_rtx (word_mode);
976 emit_insn (gen_slt (fill_value));
977 }
978 else
979#endif
980 {
981 fill_value
982 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
983 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 984 NULL_RTX, 0);
bbf6f052
RK
985 fill_value = convert_to_mode (word_mode, fill_value, 1);
986 }
987 }
988
989 /* Fill the remaining words. */
990 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
991 {
992 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
993 rtx subword = operand_subword (to, index, 1, to_mode);
994
995 if (subword == 0)
996 abort ();
997
998 if (fill_value != subword)
999 emit_move_insn (subword, fill_value);
1000 }
1001
1002 insns = get_insns ();
1003 end_sequence ();
1004
906c4e36 1005 emit_no_conflict_block (insns, to, from, NULL_RTX,
2abec1b7 1006 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
1007 return;
1008 }
1009
d3c64ee3
RS
1010 /* Truncating multi-word to a word or less. */
1011 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
1012 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 1013 {
431a6eca
JW
1014 if (!((GET_CODE (from) == MEM
1015 && ! MEM_VOLATILE_P (from)
1016 && direct_load[(int) to_mode]
1017 && ! mode_dependent_address_p (XEXP (from, 0)))
1018 || GET_CODE (from) == REG
1019 || GET_CODE (from) == SUBREG))
1020 from = force_reg (from_mode, from);
bbf6f052
RK
1021 convert_move (to, gen_lowpart (word_mode, from), 0);
1022 return;
1023 }
1024
1025 /* Handle pointer conversion */ /* SPEE 900220 */
1026 if (to_mode == PSImode)
1027 {
1028 if (from_mode != SImode)
1029 from = convert_to_mode (SImode, from, unsignedp);
1030
1f584163
DE
1031#ifdef HAVE_truncsipsi2
1032 if (HAVE_truncsipsi2)
bbf6f052 1033 {
1f584163 1034 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
1035 return;
1036 }
1f584163 1037#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
1038 abort ();
1039 }
1040
1041 if (from_mode == PSImode)
1042 {
1043 if (to_mode != SImode)
1044 {
1045 from = convert_to_mode (SImode, from, unsignedp);
1046 from_mode = SImode;
1047 }
1048 else
1049 {
1f584163
DE
1050#ifdef HAVE_extendpsisi2
1051 if (HAVE_extendpsisi2)
bbf6f052 1052 {
1f584163 1053 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
1054 return;
1055 }
1f584163 1056#endif /* HAVE_extendpsisi2 */
bbf6f052
RK
1057 abort ();
1058 }
1059 }
1060
0407367d
RK
1061 if (to_mode == PDImode)
1062 {
1063 if (from_mode != DImode)
1064 from = convert_to_mode (DImode, from, unsignedp);
1065
1066#ifdef HAVE_truncdipdi2
1067 if (HAVE_truncdipdi2)
1068 {
1069 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1070 return;
1071 }
1072#endif /* HAVE_truncdipdi2 */
1073 abort ();
1074 }
1075
1076 if (from_mode == PDImode)
1077 {
1078 if (to_mode != DImode)
1079 {
1080 from = convert_to_mode (DImode, from, unsignedp);
1081 from_mode = DImode;
1082 }
1083 else
1084 {
1085#ifdef HAVE_extendpdidi2
1086 if (HAVE_extendpdidi2)
1087 {
1088 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1089 return;
1090 }
1091#endif /* HAVE_extendpdidi2 */
1092 abort ();
1093 }
1094 }
1095
bbf6f052
RK
1096 /* Now follow all the conversions between integers
1097 no more than a word long. */
1098
1099 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1100 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1101 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1102 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1103 {
d3c64ee3
RS
1104 if (!((GET_CODE (from) == MEM
1105 && ! MEM_VOLATILE_P (from)
1106 && direct_load[(int) to_mode]
1107 && ! mode_dependent_address_p (XEXP (from, 0)))
1108 || GET_CODE (from) == REG
1109 || GET_CODE (from) == SUBREG))
1110 from = force_reg (from_mode, from);
34aa3599
RK
1111 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1112 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1113 from = copy_to_reg (from);
bbf6f052
RK
1114 emit_move_insn (to, gen_lowpart (to_mode, from));
1115 return;
1116 }
1117
d3c64ee3 1118 /* Handle extension. */
bbf6f052
RK
1119 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1120 {
1121 /* Convert directly if that works. */
1122 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1123 != CODE_FOR_nothing)
1124 {
1125 emit_unop_insn (code, to, from, equiv_code);
1126 return;
1127 }
1128 else
1129 {
1130 enum machine_mode intermediate;
1131
1132 /* Search for a mode to convert via. */
1133 for (intermediate = from_mode; intermediate != VOIDmode;
1134 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1135 if (((can_extend_p (to_mode, intermediate, unsignedp)
1136 != CODE_FOR_nothing)
1137 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1138 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
bbf6f052
RK
1139 && (can_extend_p (intermediate, from_mode, unsignedp)
1140 != CODE_FOR_nothing))
1141 {
1142 convert_move (to, convert_to_mode (intermediate, from,
1143 unsignedp), unsignedp);
1144 return;
1145 }
1146
1147 /* No suitable intermediate mode. */
1148 abort ();
1149 }
1150 }
1151
1152 /* Support special truncate insns for certain modes. */
1153
1154 if (from_mode == DImode && to_mode == SImode)
1155 {
1156#ifdef HAVE_truncdisi2
1157 if (HAVE_truncdisi2)
1158 {
1159 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1160 return;
1161 }
1162#endif
1163 convert_move (to, force_reg (from_mode, from), unsignedp);
1164 return;
1165 }
1166
1167 if (from_mode == DImode && to_mode == HImode)
1168 {
1169#ifdef HAVE_truncdihi2
1170 if (HAVE_truncdihi2)
1171 {
1172 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1173 return;
1174 }
1175#endif
1176 convert_move (to, force_reg (from_mode, from), unsignedp);
1177 return;
1178 }
1179
1180 if (from_mode == DImode && to_mode == QImode)
1181 {
1182#ifdef HAVE_truncdiqi2
1183 if (HAVE_truncdiqi2)
1184 {
1185 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1186 return;
1187 }
1188#endif
1189 convert_move (to, force_reg (from_mode, from), unsignedp);
1190 return;
1191 }
1192
1193 if (from_mode == SImode && to_mode == HImode)
1194 {
1195#ifdef HAVE_truncsihi2
1196 if (HAVE_truncsihi2)
1197 {
1198 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1199 return;
1200 }
1201#endif
1202 convert_move (to, force_reg (from_mode, from), unsignedp);
1203 return;
1204 }
1205
1206 if (from_mode == SImode && to_mode == QImode)
1207 {
1208#ifdef HAVE_truncsiqi2
1209 if (HAVE_truncsiqi2)
1210 {
1211 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1212 return;
1213 }
1214#endif
1215 convert_move (to, force_reg (from_mode, from), unsignedp);
1216 return;
1217 }
1218
1219 if (from_mode == HImode && to_mode == QImode)
1220 {
1221#ifdef HAVE_trunchiqi2
1222 if (HAVE_trunchiqi2)
1223 {
1224 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1225 return;
1226 }
1227#endif
1228 convert_move (to, force_reg (from_mode, from), unsignedp);
1229 return;
1230 }
1231
b9bcad65
RK
1232 if (from_mode == TImode && to_mode == DImode)
1233 {
1234#ifdef HAVE_trunctidi2
1235 if (HAVE_trunctidi2)
1236 {
1237 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1238 return;
1239 }
1240#endif
1241 convert_move (to, force_reg (from_mode, from), unsignedp);
1242 return;
1243 }
1244
1245 if (from_mode == TImode && to_mode == SImode)
1246 {
1247#ifdef HAVE_trunctisi2
1248 if (HAVE_trunctisi2)
1249 {
1250 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1251 return;
1252 }
1253#endif
1254 convert_move (to, force_reg (from_mode, from), unsignedp);
1255 return;
1256 }
1257
1258 if (from_mode == TImode && to_mode == HImode)
1259 {
1260#ifdef HAVE_trunctihi2
1261 if (HAVE_trunctihi2)
1262 {
1263 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1264 return;
1265 }
1266#endif
1267 convert_move (to, force_reg (from_mode, from), unsignedp);
1268 return;
1269 }
1270
1271 if (from_mode == TImode && to_mode == QImode)
1272 {
1273#ifdef HAVE_trunctiqi2
1274 if (HAVE_trunctiqi2)
1275 {
1276 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1277 return;
1278 }
1279#endif
1280 convert_move (to, force_reg (from_mode, from), unsignedp);
1281 return;
1282 }
1283
bbf6f052
RK
1284 /* Handle truncation of volatile memrefs, and so on;
1285 the things that couldn't be truncated directly,
1286 and for which there was no special instruction. */
1287 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1288 {
1289 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1290 emit_move_insn (to, temp);
1291 return;
1292 }
1293
1294 /* Mode combination is not recognized. */
1295 abort ();
1296}
1297
1298/* Return an rtx for a value that would result
1299 from converting X to mode MODE.
1300 Both X and MODE may be floating, or both integer.
1301 UNSIGNEDP is nonzero if X is an unsigned value.
1302 This can be done by referring to a part of X in place
5d901c31
RS
1303 or by copying to a new temporary with conversion.
1304
1305 This function *must not* call protect_from_queue
1306 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1307
1308rtx
1309convert_to_mode (mode, x, unsignedp)
1310 enum machine_mode mode;
1311 rtx x;
1312 int unsignedp;
5ffe63ed
RS
1313{
1314 return convert_modes (mode, VOIDmode, x, unsignedp);
1315}
1316
1317/* Return an rtx for a value that would result
1318 from converting X from mode OLDMODE to mode MODE.
1319 Both modes may be floating, or both integer.
1320 UNSIGNEDP is nonzero if X is an unsigned value.
1321
1322 This can be done by referring to a part of X in place
1323 or by copying to a new temporary with conversion.
1324
1325 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1326
1327 This function *must not* call protect_from_queue
1328 except when putting X into an insn (in which case convert_move does it). */
1329
1330rtx
1331convert_modes (mode, oldmode, x, unsignedp)
1332 enum machine_mode mode, oldmode;
1333 rtx x;
1334 int unsignedp;
bbf6f052
RK
1335{
1336 register rtx temp;
5ffe63ed 1337
1499e0a8
RK
1338 /* If FROM is a SUBREG that indicates that we have already done at least
1339 the required extension, strip it. */
1340
1341 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1342 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1343 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1344 x = gen_lowpart (mode, x);
bbf6f052 1345
64791b18
RK
1346 if (GET_MODE (x) != VOIDmode)
1347 oldmode = GET_MODE (x);
1348
5ffe63ed 1349 if (mode == oldmode)
bbf6f052
RK
1350 return x;
1351
1352 /* There is one case that we must handle specially: If we are converting
906c4e36 1353 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1354 we are to interpret the constant as unsigned, gen_lowpart will do
1355 the wrong if the constant appears negative. What we want to do is
1356 make the high-order word of the constant zero, not all ones. */
1357
1358 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1359 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1360 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
906c4e36 1361 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
bbf6f052
RK
1362
1363 /* We can do this with a gen_lowpart if both desired and current modes
1364 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1365 non-volatile MEM. Except for the constant case where MODE is no
1366 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1367
ba2e110c
RK
1368 if ((GET_CODE (x) == CONST_INT
1369 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1370 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1371 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1372 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1373 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1374 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1375 && direct_load[(int) mode])
2bf29316
JW
1376 || (GET_CODE (x) == REG
1377 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1378 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1379 {
1380 /* ?? If we don't know OLDMODE, we have to assume here that
1381 X does not need sign- or zero-extension. This may not be
1382 the case, but it's the best we can do. */
1383 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1384 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1385 {
1386 HOST_WIDE_INT val = INTVAL (x);
1387 int width = GET_MODE_BITSIZE (oldmode);
1388
1389 /* We must sign or zero-extend in this case. Start by
1390 zero-extending, then sign extend if we need to. */
1391 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1392 if (! unsignedp
1393 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1394 val |= (HOST_WIDE_INT) (-1) << width;
1395
1396 return GEN_INT (val);
1397 }
1398
1399 return gen_lowpart (mode, x);
1400 }
bbf6f052
RK
1401
1402 temp = gen_reg_rtx (mode);
1403 convert_move (temp, x, unsignedp);
1404 return temp;
1405}
1406\f
1407/* Generate several move instructions to copy LEN bytes
1408 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1409 The caller must pass FROM and TO
1410 through protect_from_queue before calling.
1411 ALIGN (in bytes) is maximum alignment we can assume. */
1412
bbf6f052
RK
1413static void
1414move_by_pieces (to, from, len, align)
1415 rtx to, from;
1416 int len, align;
1417{
1418 struct move_by_pieces data;
1419 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1420 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1421
1422 data.offset = 0;
1423 data.to_addr = to_addr;
1424 data.from_addr = from_addr;
1425 data.to = to;
1426 data.from = from;
1427 data.autinc_to
1428 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1429 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1430 data.autinc_from
1431 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1432 || GET_CODE (from_addr) == POST_INC
1433 || GET_CODE (from_addr) == POST_DEC);
1434
1435 data.explicit_inc_from = 0;
1436 data.explicit_inc_to = 0;
1437 data.reverse
1438 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1439 if (data.reverse) data.offset = len;
1440 data.len = len;
1441
e9cf6a97
JW
1442 data.to_struct = MEM_IN_STRUCT_P (to);
1443 data.from_struct = MEM_IN_STRUCT_P (from);
1444
bbf6f052
RK
1445 /* If copying requires more than two move insns,
1446 copy addresses to registers (to make displacements shorter)
1447 and use post-increment if available. */
1448 if (!(data.autinc_from && data.autinc_to)
1449 && move_by_pieces_ninsns (len, align) > 2)
1450 {
1451#ifdef HAVE_PRE_DECREMENT
1452 if (data.reverse && ! data.autinc_from)
1453 {
1454 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1455 data.autinc_from = 1;
1456 data.explicit_inc_from = -1;
1457 }
1458#endif
1459#ifdef HAVE_POST_INCREMENT
1460 if (! data.autinc_from)
1461 {
1462 data.from_addr = copy_addr_to_reg (from_addr);
1463 data.autinc_from = 1;
1464 data.explicit_inc_from = 1;
1465 }
1466#endif
1467 if (!data.autinc_from && CONSTANT_P (from_addr))
1468 data.from_addr = copy_addr_to_reg (from_addr);
1469#ifdef HAVE_PRE_DECREMENT
1470 if (data.reverse && ! data.autinc_to)
1471 {
1472 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1473 data.autinc_to = 1;
1474 data.explicit_inc_to = -1;
1475 }
1476#endif
1477#ifdef HAVE_POST_INCREMENT
1478 if (! data.reverse && ! data.autinc_to)
1479 {
1480 data.to_addr = copy_addr_to_reg (to_addr);
1481 data.autinc_to = 1;
1482 data.explicit_inc_to = 1;
1483 }
1484#endif
1485 if (!data.autinc_to && CONSTANT_P (to_addr))
1486 data.to_addr = copy_addr_to_reg (to_addr);
1487 }
1488
c7a7ac46 1489 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1490 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1491 align = MOVE_MAX;
bbf6f052
RK
1492
1493 /* First move what we can in the largest integer mode, then go to
1494 successively smaller modes. */
1495
1496 while (max_size > 1)
1497 {
1498 enum machine_mode mode = VOIDmode, tmode;
1499 enum insn_code icode;
1500
e7c33f54
RK
1501 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1502 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1503 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1504 mode = tmode;
1505
1506 if (mode == VOIDmode)
1507 break;
1508
1509 icode = mov_optab->handlers[(int) mode].insn_code;
1510 if (icode != CODE_FOR_nothing
1511 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1512 GET_MODE_SIZE (mode)))
1513 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1514
1515 max_size = GET_MODE_SIZE (mode);
1516 }
1517
1518 /* The code above should have handled everything. */
1519 if (data.len != 0)
1520 abort ();
1521}
1522
1523/* Return number of insns required to move L bytes by pieces.
1524 ALIGN (in bytes) is maximum alignment we can assume. */
1525
1526static int
1527move_by_pieces_ninsns (l, align)
1528 unsigned int l;
1529 int align;
1530{
1531 register int n_insns = 0;
e87b4f3f 1532 int max_size = MOVE_MAX + 1;
bbf6f052 1533
c7a7ac46 1534 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1535 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1536 align = MOVE_MAX;
bbf6f052
RK
1537
1538 while (max_size > 1)
1539 {
1540 enum machine_mode mode = VOIDmode, tmode;
1541 enum insn_code icode;
1542
e7c33f54
RK
1543 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1544 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1545 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1546 mode = tmode;
1547
1548 if (mode == VOIDmode)
1549 break;
1550
1551 icode = mov_optab->handlers[(int) mode].insn_code;
1552 if (icode != CODE_FOR_nothing
1553 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1554 GET_MODE_SIZE (mode)))
1555 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1556
1557 max_size = GET_MODE_SIZE (mode);
1558 }
1559
1560 return n_insns;
1561}
1562
1563/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1564 with move instructions for mode MODE. GENFUN is the gen_... function
1565 to make a move insn for that mode. DATA has all the other info. */
1566
1567static void
1568move_by_pieces_1 (genfun, mode, data)
1569 rtx (*genfun) ();
1570 enum machine_mode mode;
1571 struct move_by_pieces *data;
1572{
1573 register int size = GET_MODE_SIZE (mode);
1574 register rtx to1, from1;
1575
1576 while (data->len >= size)
1577 {
1578 if (data->reverse) data->offset -= size;
1579
1580 to1 = (data->autinc_to
1581 ? gen_rtx (MEM, mode, data->to_addr)
1582 : change_address (data->to, mode,
1583 plus_constant (data->to_addr, data->offset)));
e9cf6a97 1584 MEM_IN_STRUCT_P (to1) = data->to_struct;
bbf6f052
RK
1585 from1 =
1586 (data->autinc_from
1587 ? gen_rtx (MEM, mode, data->from_addr)
1588 : change_address (data->from, mode,
1589 plus_constant (data->from_addr, data->offset)));
e9cf6a97 1590 MEM_IN_STRUCT_P (from1) = data->from_struct;
bbf6f052
RK
1591
1592#ifdef HAVE_PRE_DECREMENT
1593 if (data->explicit_inc_to < 0)
906c4e36 1594 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1595 if (data->explicit_inc_from < 0)
906c4e36 1596 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1597#endif
1598
1599 emit_insn ((*genfun) (to1, from1));
1600#ifdef HAVE_POST_INCREMENT
1601 if (data->explicit_inc_to > 0)
906c4e36 1602 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1603 if (data->explicit_inc_from > 0)
906c4e36 1604 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1605#endif
1606
1607 if (! data->reverse) data->offset += size;
1608
1609 data->len -= size;
1610 }
1611}
1612\f
1613/* Emit code to move a block Y to a block X.
1614 This may be done with string-move instructions,
1615 with multiple scalar move instructions, or with a library call.
1616
1617 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1618 with mode BLKmode.
1619 SIZE is an rtx that says how long they are.
1620 ALIGN is the maximum alignment we can assume they have,
1621 measured in bytes. */
1622
1623void
1624emit_block_move (x, y, size, align)
1625 rtx x, y;
1626 rtx size;
1627 int align;
1628{
1629 if (GET_MODE (x) != BLKmode)
1630 abort ();
1631
1632 if (GET_MODE (y) != BLKmode)
1633 abort ();
1634
1635 x = protect_from_queue (x, 1);
1636 y = protect_from_queue (y, 0);
5d901c31 1637 size = protect_from_queue (size, 0);
bbf6f052
RK
1638
1639 if (GET_CODE (x) != MEM)
1640 abort ();
1641 if (GET_CODE (y) != MEM)
1642 abort ();
1643 if (size == 0)
1644 abort ();
1645
1646 if (GET_CODE (size) == CONST_INT
906c4e36 1647 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1648 move_by_pieces (x, y, INTVAL (size), align);
1649 else
1650 {
1651 /* Try the most limited insn first, because there's no point
1652 including more than one in the machine description unless
1653 the more limited one has some advantage. */
266007a7 1654
0bba3f6f 1655 rtx opalign = GEN_INT (align);
266007a7
RK
1656 enum machine_mode mode;
1657
1658 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1659 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1660 {
266007a7 1661 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1662
1663 if (code != CODE_FOR_nothing
803090c4
RK
1664 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1665 here because if SIZE is less than the mode mask, as it is
8008b228 1666 returned by the macro, it will definitely be less than the
803090c4 1667 actual mode mask. */
8ca00751
RK
1668 && ((GET_CODE (size) == CONST_INT
1669 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1670 <= GET_MODE_MASK (mode)))
1671 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
0bba3f6f
RK
1672 && (insn_operand_predicate[(int) code][0] == 0
1673 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1674 && (insn_operand_predicate[(int) code][1] == 0
1675 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1676 && (insn_operand_predicate[(int) code][3] == 0
1677 || (*insn_operand_predicate[(int) code][3]) (opalign,
1678 VOIDmode)))
bbf6f052 1679 {
1ba1e2a8 1680 rtx op2;
266007a7
RK
1681 rtx last = get_last_insn ();
1682 rtx pat;
1683
1ba1e2a8 1684 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1685 if (insn_operand_predicate[(int) code][2] != 0
1686 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1687 op2 = copy_to_mode_reg (mode, op2);
1688
1689 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1690 if (pat)
1691 {
1692 emit_insn (pat);
1693 return;
1694 }
1695 else
1696 delete_insns_since (last);
bbf6f052
RK
1697 }
1698 }
bbf6f052
RK
1699
1700#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1701 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1702 VOIDmode, 3, XEXP (x, 0), Pmode,
1703 XEXP (y, 0), Pmode,
0fa83258
RK
1704 convert_to_mode (TYPE_MODE (sizetype), size,
1705 TREE_UNSIGNED (sizetype)),
1706 TYPE_MODE (sizetype));
bbf6f052 1707#else
d562e42e 1708 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1709 VOIDmode, 3, XEXP (y, 0), Pmode,
1710 XEXP (x, 0), Pmode,
3b6f75e2
JW
1711 convert_to_mode (TYPE_MODE (integer_type_node), size,
1712 TREE_UNSIGNED (integer_type_node)),
1713 TYPE_MODE (integer_type_node));
bbf6f052
RK
1714#endif
1715 }
1716}
1717\f
1718/* Copy all or part of a value X into registers starting at REGNO.
1719 The number of registers to be filled is NREGS. */
1720
1721void
1722move_block_to_reg (regno, x, nregs, mode)
1723 int regno;
1724 rtx x;
1725 int nregs;
1726 enum machine_mode mode;
1727{
1728 int i;
1729 rtx pat, last;
1730
72bb9717
RK
1731 if (nregs == 0)
1732 return;
1733
bbf6f052
RK
1734 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1735 x = validize_mem (force_const_mem (mode, x));
1736
1737 /* See if the machine can do this with a load multiple insn. */
1738#ifdef HAVE_load_multiple
c3a02afe 1739 if (HAVE_load_multiple)
bbf6f052 1740 {
c3a02afe
RK
1741 last = get_last_insn ();
1742 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1743 GEN_INT (nregs));
1744 if (pat)
1745 {
1746 emit_insn (pat);
1747 return;
1748 }
1749 else
1750 delete_insns_since (last);
bbf6f052 1751 }
bbf6f052
RK
1752#endif
1753
1754 for (i = 0; i < nregs; i++)
1755 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1756 operand_subword_force (x, i, mode));
1757}
1758
1759/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1760 The number of registers to be filled is NREGS. SIZE indicates the number
1761 of bytes in the object X. */
1762
bbf6f052
RK
1763
1764void
0040593d 1765move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1766 int regno;
1767 rtx x;
1768 int nregs;
0040593d 1769 int size;
bbf6f052
RK
1770{
1771 int i;
1772 rtx pat, last;
1773
0040593d
JW
1774 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1775 to the left before storing to memory. */
1776 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1777 {
1778 rtx tem = operand_subword (x, 0, 1, BLKmode);
1779 rtx shift;
1780
1781 if (tem == 0)
1782 abort ();
1783
1784 shift = expand_shift (LSHIFT_EXPR, word_mode,
1785 gen_rtx (REG, word_mode, regno),
1786 build_int_2 ((UNITS_PER_WORD - size)
1787 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1788 emit_move_insn (tem, shift);
1789 return;
1790 }
1791
bbf6f052
RK
1792 /* See if the machine can do this with a store multiple insn. */
1793#ifdef HAVE_store_multiple
c3a02afe 1794 if (HAVE_store_multiple)
bbf6f052 1795 {
c3a02afe
RK
1796 last = get_last_insn ();
1797 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1798 GEN_INT (nregs));
1799 if (pat)
1800 {
1801 emit_insn (pat);
1802 return;
1803 }
1804 else
1805 delete_insns_since (last);
bbf6f052 1806 }
bbf6f052
RK
1807#endif
1808
1809 for (i = 0; i < nregs; i++)
1810 {
1811 rtx tem = operand_subword (x, i, 1, BLKmode);
1812
1813 if (tem == 0)
1814 abort ();
1815
1816 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1817 }
1818}
1819
fffa9c1d
JW
1820/* Emit code to move a block Y to a block X, where X is non-consecutive
1821 registers represented by a PARALLEL. */
1822
1823void
1824emit_group_load (x, y)
1825 rtx x, y;
1826{
1827 rtx target_reg, source;
1828 int i;
1829
1830 if (GET_CODE (x) != PARALLEL)
1831 abort ();
1832
1833 /* Check for a NULL entry, used to indicate that the parameter goes
1834 both on the stack and in registers. */
1835 if (XEXP (XVECEXP (x, 0, 0), 0))
1836 i = 0;
1837 else
1838 i = 1;
1839
1840 for (; i < XVECLEN (x, 0); i++)
1841 {
1842 rtx element = XVECEXP (x, 0, i);
1843
1844 target_reg = XEXP (element, 0);
1845
1846 if (GET_CODE (y) == MEM)
1847 source = change_address (y, GET_MODE (target_reg),
1848 plus_constant (XEXP (y, 0),
1849 INTVAL (XEXP (element, 1))));
1850 else if (XEXP (element, 1) == const0_rtx)
1851 {
1852 if (GET_MODE (target_reg) == GET_MODE (y))
1853 source = y;
1854 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1855 == GET_MODE_SIZE (GET_MODE (y)))
1856 source = gen_rtx (SUBREG, GET_MODE (target_reg), y, 0);
1857 else
1858 abort ();
1859 }
1860 else
1861 abort ();
1862
1863 emit_move_insn (target_reg, source);
1864 }
1865}
1866
1867/* Emit code to move a block Y to a block X, where Y is non-consecutive
1868 registers represented by a PARALLEL. */
1869
1870void
1871emit_group_store (x, y)
1872 rtx x, y;
1873{
1874 rtx source_reg, target;
1875 int i;
1876
1877 if (GET_CODE (y) != PARALLEL)
1878 abort ();
1879
1880 /* Check for a NULL entry, used to indicate that the parameter goes
1881 both on the stack and in registers. */
1882 if (XEXP (XVECEXP (y, 0, 0), 0))
1883 i = 0;
1884 else
1885 i = 1;
1886
1887 for (; i < XVECLEN (y, 0); i++)
1888 {
1889 rtx element = XVECEXP (y, 0, i);
1890
1891 source_reg = XEXP (element, 0);
1892
1893 if (GET_CODE (x) == MEM)
1894 target = change_address (x, GET_MODE (source_reg),
1895 plus_constant (XEXP (x, 0),
1896 INTVAL (XEXP (element, 1))));
1897 else if (XEXP (element, 1) == const0_rtx)
1898 target = x;
1899 else
1900 abort ();
1901
1902 emit_move_insn (target, source_reg);
1903 }
1904}
1905
94b25f81
RK
1906/* Add a USE expression for REG to the (possibly empty) list pointed
1907 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
1908
1909void
b3f8cf4a
RK
1910use_reg (call_fusage, reg)
1911 rtx *call_fusage, reg;
1912{
0304dfbb
DE
1913 if (GET_CODE (reg) != REG
1914 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
b3f8cf4a
RK
1915 abort();
1916
1917 *call_fusage
1918 = gen_rtx (EXPR_LIST, VOIDmode,
0304dfbb 1919 gen_rtx (USE, VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
1920}
1921
94b25f81
RK
1922/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1923 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
1924
1925void
0304dfbb
DE
1926use_regs (call_fusage, regno, nregs)
1927 rtx *call_fusage;
bbf6f052
RK
1928 int regno;
1929 int nregs;
1930{
0304dfbb 1931 int i;
bbf6f052 1932
0304dfbb
DE
1933 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1934 abort ();
1935
1936 for (i = 0; i < nregs; i++)
1937 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
bbf6f052 1938}
fffa9c1d
JW
1939
1940/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1941 PARALLEL REGS. This is for calls that pass values in multiple
1942 non-contiguous locations. The Irix 6 ABI has examples of this. */
1943
1944void
1945use_group_regs (call_fusage, regs)
1946 rtx *call_fusage;
1947 rtx regs;
1948{
1949 int i;
1950
1951 /* Check for a NULL entry, used to indicate that the parameter goes
1952 both on the stack and in registers. */
1953 if (XEXP (XVECEXP (regs, 0, 0), 0))
1954 i = 0;
1955 else
1956 i = 1;
1957
1958 for (; i < XVECLEN (regs, 0); i++)
1959 use_reg (call_fusage, XEXP (XVECEXP (regs, 0, i), 0));
1960}
bbf6f052 1961\f
9de08200
RK
1962/* Generate several move instructions to clear LEN bytes of block TO.
1963 (A MEM rtx with BLKmode). The caller must pass TO through
1964 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1965 we can assume. */
1966
1967static void
1968clear_by_pieces (to, len, align)
1969 rtx to;
1970 int len, align;
1971{
1972 struct clear_by_pieces data;
1973 rtx to_addr = XEXP (to, 0);
1974 int max_size = MOVE_MAX + 1;
1975
1976 data.offset = 0;
1977 data.to_addr = to_addr;
1978 data.to = to;
1979 data.autinc_to
1980 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1981 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1982
1983 data.explicit_inc_to = 0;
1984 data.reverse
1985 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1986 if (data.reverse) data.offset = len;
1987 data.len = len;
1988
1989 data.to_struct = MEM_IN_STRUCT_P (to);
1990
1991 /* If copying requires more than two move insns,
1992 copy addresses to registers (to make displacements shorter)
1993 and use post-increment if available. */
1994 if (!data.autinc_to
1995 && move_by_pieces_ninsns (len, align) > 2)
1996 {
1997#ifdef HAVE_PRE_DECREMENT
1998 if (data.reverse && ! data.autinc_to)
1999 {
2000 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2001 data.autinc_to = 1;
2002 data.explicit_inc_to = -1;
2003 }
2004#endif
2005#ifdef HAVE_POST_INCREMENT
2006 if (! data.reverse && ! data.autinc_to)
2007 {
2008 data.to_addr = copy_addr_to_reg (to_addr);
2009 data.autinc_to = 1;
2010 data.explicit_inc_to = 1;
2011 }
2012#endif
2013 if (!data.autinc_to && CONSTANT_P (to_addr))
2014 data.to_addr = copy_addr_to_reg (to_addr);
2015 }
2016
2017 if (! SLOW_UNALIGNED_ACCESS
2018 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2019 align = MOVE_MAX;
2020
2021 /* First move what we can in the largest integer mode, then go to
2022 successively smaller modes. */
2023
2024 while (max_size > 1)
2025 {
2026 enum machine_mode mode = VOIDmode, tmode;
2027 enum insn_code icode;
2028
2029 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2030 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2031 if (GET_MODE_SIZE (tmode) < max_size)
2032 mode = tmode;
2033
2034 if (mode == VOIDmode)
2035 break;
2036
2037 icode = mov_optab->handlers[(int) mode].insn_code;
2038 if (icode != CODE_FOR_nothing
2039 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2040 GET_MODE_SIZE (mode)))
2041 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2042
2043 max_size = GET_MODE_SIZE (mode);
2044 }
2045
2046 /* The code above should have handled everything. */
2047 if (data.len != 0)
2048 abort ();
2049}
2050
2051/* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2052 with move instructions for mode MODE. GENFUN is the gen_... function
2053 to make a move insn for that mode. DATA has all the other info. */
2054
2055static void
2056clear_by_pieces_1 (genfun, mode, data)
2057 rtx (*genfun) ();
2058 enum machine_mode mode;
2059 struct clear_by_pieces *data;
2060{
2061 register int size = GET_MODE_SIZE (mode);
2062 register rtx to1;
2063
2064 while (data->len >= size)
2065 {
2066 if (data->reverse) data->offset -= size;
2067
2068 to1 = (data->autinc_to
2069 ? gen_rtx (MEM, mode, data->to_addr)
2070 : change_address (data->to, mode,
2071 plus_constant (data->to_addr, data->offset)));
2072 MEM_IN_STRUCT_P (to1) = data->to_struct;
2073
2074#ifdef HAVE_PRE_DECREMENT
2075 if (data->explicit_inc_to < 0)
2076 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2077#endif
2078
2079 emit_insn ((*genfun) (to1, const0_rtx));
2080#ifdef HAVE_POST_INCREMENT
2081 if (data->explicit_inc_to > 0)
2082 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2083#endif
2084
2085 if (! data->reverse) data->offset += size;
2086
2087 data->len -= size;
2088 }
2089}
2090\f
bbf6f052 2091/* Write zeros through the storage of OBJECT.
9de08200
RK
2092 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2093 the maximum alignment we can is has, measured in bytes. */
bbf6f052
RK
2094
2095void
9de08200 2096clear_storage (object, size, align)
bbf6f052 2097 rtx object;
4c08eef0 2098 rtx size;
9de08200 2099 int align;
bbf6f052
RK
2100{
2101 if (GET_MODE (object) == BLKmode)
2102 {
9de08200
RK
2103 object = protect_from_queue (object, 1);
2104 size = protect_from_queue (size, 0);
2105
2106 if (GET_CODE (size) == CONST_INT
2107 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2108 clear_by_pieces (object, INTVAL (size), align);
2109
2110 else
2111 {
2112 /* Try the most limited insn first, because there's no point
2113 including more than one in the machine description unless
2114 the more limited one has some advantage. */
2115
2116 rtx opalign = GEN_INT (align);
2117 enum machine_mode mode;
2118
2119 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2120 mode = GET_MODE_WIDER_MODE (mode))
2121 {
2122 enum insn_code code = clrstr_optab[(int) mode];
2123
2124 if (code != CODE_FOR_nothing
2125 /* We don't need MODE to be narrower than
2126 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2127 the mode mask, as it is returned by the macro, it will
2128 definitely be less than the actual mode mask. */
2129 && ((GET_CODE (size) == CONST_INT
2130 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2131 <= GET_MODE_MASK (mode)))
2132 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2133 && (insn_operand_predicate[(int) code][0] == 0
2134 || (*insn_operand_predicate[(int) code][0]) (object,
2135 BLKmode))
2136 && (insn_operand_predicate[(int) code][2] == 0
2137 || (*insn_operand_predicate[(int) code][2]) (opalign,
2138 VOIDmode)))
2139 {
2140 rtx op1;
2141 rtx last = get_last_insn ();
2142 rtx pat;
2143
2144 op1 = convert_to_mode (mode, size, 1);
2145 if (insn_operand_predicate[(int) code][1] != 0
2146 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2147 mode))
2148 op1 = copy_to_mode_reg (mode, op1);
2149
2150 pat = GEN_FCN ((int) code) (object, op1, opalign);
2151 if (pat)
2152 {
2153 emit_insn (pat);
2154 return;
2155 }
2156 else
2157 delete_insns_since (last);
2158 }
2159 }
2160
2161
bbf6f052 2162#ifdef TARGET_MEM_FUNCTIONS
9de08200
RK
2163 emit_library_call (memset_libfunc, 0,
2164 VOIDmode, 3,
2165 XEXP (object, 0), Pmode,
2166 const0_rtx, TYPE_MODE (integer_type_node),
2167 convert_to_mode (TYPE_MODE (sizetype),
2168 size, TREE_UNSIGNED (sizetype)),
2169 TYPE_MODE (sizetype));
bbf6f052 2170#else
9de08200
RK
2171 emit_library_call (bzero_libfunc, 0,
2172 VOIDmode, 2,
2173 XEXP (object, 0), Pmode,
2174 convert_to_mode (TYPE_MODE (integer_type_node),
2175 size,
2176 TREE_UNSIGNED (integer_type_node)),
2177 TYPE_MODE (integer_type_node));
bbf6f052 2178#endif
9de08200 2179 }
bbf6f052
RK
2180 }
2181 else
2182 emit_move_insn (object, const0_rtx);
2183}
2184
2185/* Generate code to copy Y into X.
2186 Both Y and X must have the same mode, except that
2187 Y can be a constant with VOIDmode.
2188 This mode cannot be BLKmode; use emit_block_move for that.
2189
2190 Return the last instruction emitted. */
2191
2192rtx
2193emit_move_insn (x, y)
2194 rtx x, y;
2195{
2196 enum machine_mode mode = GET_MODE (x);
bbf6f052
RK
2197
2198 x = protect_from_queue (x, 1);
2199 y = protect_from_queue (y, 0);
2200
2201 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2202 abort ();
2203
2204 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2205 y = force_const_mem (mode, y);
2206
2207 /* If X or Y are memory references, verify that their addresses are valid
2208 for the machine. */
2209 if (GET_CODE (x) == MEM
2210 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2211 && ! push_operand (x, GET_MODE (x)))
2212 || (flag_force_addr
2213 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2214 x = change_address (x, VOIDmode, XEXP (x, 0));
2215
2216 if (GET_CODE (y) == MEM
2217 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2218 || (flag_force_addr
2219 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2220 y = change_address (y, VOIDmode, XEXP (y, 0));
2221
2222 if (mode == BLKmode)
2223 abort ();
2224
261c4230
RS
2225 return emit_move_insn_1 (x, y);
2226}
2227
2228/* Low level part of emit_move_insn.
2229 Called just like emit_move_insn, but assumes X and Y
2230 are basically valid. */
2231
2232rtx
2233emit_move_insn_1 (x, y)
2234 rtx x, y;
2235{
2236 enum machine_mode mode = GET_MODE (x);
2237 enum machine_mode submode;
2238 enum mode_class class = GET_MODE_CLASS (mode);
2239 int i;
2240
bbf6f052
RK
2241 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2242 return
2243 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2244
89742723 2245 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2246 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2247 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2248 * BITS_PER_UNIT),
2249 (class == MODE_COMPLEX_INT
2250 ? MODE_INT : MODE_FLOAT),
2251 0))
7308a047
RS
2252 && (mov_optab->handlers[(int) submode].insn_code
2253 != CODE_FOR_nothing))
2254 {
2255 /* Don't split destination if it is a stack push. */
2256 int stack = push_operand (x, GET_MODE (x));
6551fa4d 2257 rtx insns;
7308a047 2258
7308a047
RS
2259 /* If this is a stack, push the highpart first, so it
2260 will be in the argument order.
2261
2262 In that case, change_address is used only to convert
2263 the mode, not to change the address. */
c937357e
RS
2264 if (stack)
2265 {
e33c0d66
RS
2266 /* Note that the real part always precedes the imag part in memory
2267 regardless of machine's endianness. */
c937357e
RS
2268#ifdef STACK_GROWS_DOWNWARD
2269 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2270 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 2271 gen_imagpart (submode, y)));
c937357e
RS
2272 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2273 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 2274 gen_realpart (submode, y)));
c937357e
RS
2275#else
2276 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2277 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 2278 gen_realpart (submode, y)));
c937357e
RS
2279 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2280 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 2281 gen_imagpart (submode, y)));
c937357e
RS
2282#endif
2283 }
2284 else
2285 {
2286 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2287 (gen_realpart (submode, x), gen_realpart (submode, y)));
c937357e 2288 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2289 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
c937357e 2290 }
7308a047 2291
7a1ab50a 2292 return get_last_insn ();
7308a047
RS
2293 }
2294
bbf6f052
RK
2295 /* This will handle any multi-word mode that lacks a move_insn pattern.
2296 However, you will get better code if you define such patterns,
2297 even if they must turn into multiple assembler instructions. */
a4320483 2298 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2299 {
2300 rtx last_insn = 0;
6551fa4d
JW
2301 rtx insns;
2302
a98c9f1a
RK
2303#ifdef PUSH_ROUNDING
2304
2305 /* If X is a push on the stack, do the push now and replace
2306 X with a reference to the stack pointer. */
2307 if (push_operand (x, GET_MODE (x)))
2308 {
2309 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2310 x = change_address (x, VOIDmode, stack_pointer_rtx);
2311 }
2312#endif
2313
15a7a8ec 2314 /* Show the output dies here. */
43e046cb
RK
2315 if (x != y)
2316 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
15a7a8ec 2317
bbf6f052
RK
2318 for (i = 0;
2319 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2320 i++)
2321 {
2322 rtx xpart = operand_subword (x, i, 1, mode);
2323 rtx ypart = operand_subword (y, i, 1, mode);
2324
2325 /* If we can't get a part of Y, put Y into memory if it is a
2326 constant. Otherwise, force it into a register. If we still
2327 can't get a part of Y, abort. */
2328 if (ypart == 0 && CONSTANT_P (y))
2329 {
2330 y = force_const_mem (mode, y);
2331 ypart = operand_subword (y, i, 1, mode);
2332 }
2333 else if (ypart == 0)
2334 ypart = operand_subword_force (y, i, mode);
2335
2336 if (xpart == 0 || ypart == 0)
2337 abort ();
2338
2339 last_insn = emit_move_insn (xpart, ypart);
2340 }
6551fa4d 2341
bbf6f052
RK
2342 return last_insn;
2343 }
2344 else
2345 abort ();
2346}
2347\f
2348/* Pushing data onto the stack. */
2349
2350/* Push a block of length SIZE (perhaps variable)
2351 and return an rtx to address the beginning of the block.
2352 Note that it is not possible for the value returned to be a QUEUED.
2353 The value may be virtual_outgoing_args_rtx.
2354
2355 EXTRA is the number of bytes of padding to push in addition to SIZE.
2356 BELOW nonzero means this padding comes at low addresses;
2357 otherwise, the padding comes at high addresses. */
2358
2359rtx
2360push_block (size, extra, below)
2361 rtx size;
2362 int extra, below;
2363{
2364 register rtx temp;
88f63c77
RK
2365
2366 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
2367 if (CONSTANT_P (size))
2368 anti_adjust_stack (plus_constant (size, extra));
2369 else if (GET_CODE (size) == REG && extra == 0)
2370 anti_adjust_stack (size);
2371 else
2372 {
2373 rtx temp = copy_to_mode_reg (Pmode, size);
2374 if (extra != 0)
906c4e36 2375 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
2376 temp, 0, OPTAB_LIB_WIDEN);
2377 anti_adjust_stack (temp);
2378 }
2379
2380#ifdef STACK_GROWS_DOWNWARD
2381 temp = virtual_outgoing_args_rtx;
2382 if (extra != 0 && below)
2383 temp = plus_constant (temp, extra);
2384#else
2385 if (GET_CODE (size) == CONST_INT)
2386 temp = plus_constant (virtual_outgoing_args_rtx,
2387 - INTVAL (size) - (below ? 0 : extra));
2388 else if (extra != 0 && !below)
2389 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2390 negate_rtx (Pmode, plus_constant (size, extra)));
2391 else
2392 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2393 negate_rtx (Pmode, size));
2394#endif
2395
2396 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2397}
2398
87e38d84 2399rtx
bbf6f052
RK
2400gen_push_operand ()
2401{
2402 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2403}
2404
2405/* Generate code to push X onto the stack, assuming it has mode MODE and
2406 type TYPE.
2407 MODE is redundant except when X is a CONST_INT (since they don't
2408 carry mode info).
2409 SIZE is an rtx for the size of data to be copied (in bytes),
2410 needed only if X is BLKmode.
2411
2412 ALIGN (in bytes) is maximum alignment we can assume.
2413
cd048831
RK
2414 If PARTIAL and REG are both nonzero, then copy that many of the first
2415 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
2416 The amount of space pushed is decreased by PARTIAL words,
2417 rounded *down* to a multiple of PARM_BOUNDARY.
2418 REG must be a hard register in this case.
cd048831
RK
2419 If REG is zero but PARTIAL is not, take any all others actions for an
2420 argument partially in registers, but do not actually load any
2421 registers.
bbf6f052
RK
2422
2423 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 2424 This is ignored if an argument block has already been allocated.
bbf6f052
RK
2425
2426 On a machine that lacks real push insns, ARGS_ADDR is the address of
2427 the bottom of the argument block for this call. We use indexing off there
2428 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2429 argument block has not been preallocated.
2430
2431 ARGS_SO_FAR is the size of args previously pushed for this call. */
2432
2433void
2434emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2435 args_addr, args_so_far)
2436 register rtx x;
2437 enum machine_mode mode;
2438 tree type;
2439 rtx size;
2440 int align;
2441 int partial;
2442 rtx reg;
2443 int extra;
2444 rtx args_addr;
2445 rtx args_so_far;
2446{
2447 rtx xinner;
2448 enum direction stack_direction
2449#ifdef STACK_GROWS_DOWNWARD
2450 = downward;
2451#else
2452 = upward;
2453#endif
2454
2455 /* Decide where to pad the argument: `downward' for below,
2456 `upward' for above, or `none' for don't pad it.
2457 Default is below for small data on big-endian machines; else above. */
2458 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2459
9c7be814
JL
2460 /* If we're placing part of X into a register and part of X onto
2461 the stack, indicate that the entire register is clobbered to
2462 keep flow from thinking the unused part of the register is live. */
22745c7e 2463 if (partial > 0 && reg != 0)
9c7be814
JL
2464 emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
2465
bbf6f052
RK
2466 /* Invert direction if stack is post-update. */
2467 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2468 if (where_pad != none)
2469 where_pad = (where_pad == downward ? upward : downward);
2470
2471 xinner = x = protect_from_queue (x, 0);
2472
2473 if (mode == BLKmode)
2474 {
2475 /* Copy a block into the stack, entirely or partially. */
2476
2477 register rtx temp;
2478 int used = partial * UNITS_PER_WORD;
2479 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2480 int skip;
2481
2482 if (size == 0)
2483 abort ();
2484
2485 used -= offset;
2486
2487 /* USED is now the # of bytes we need not copy to the stack
2488 because registers will take care of them. */
2489
2490 if (partial != 0)
2491 xinner = change_address (xinner, BLKmode,
2492 plus_constant (XEXP (xinner, 0), used));
2493
2494 /* If the partial register-part of the arg counts in its stack size,
2495 skip the part of stack space corresponding to the registers.
2496 Otherwise, start copying to the beginning of the stack space,
2497 by setting SKIP to 0. */
2498#ifndef REG_PARM_STACK_SPACE
2499 skip = 0;
2500#else
2501 skip = used;
2502#endif
2503
2504#ifdef PUSH_ROUNDING
2505 /* Do it with several push insns if that doesn't take lots of insns
2506 and if there is no difficulty with push insns that skip bytes
2507 on the stack for alignment purposes. */
2508 if (args_addr == 0
2509 && GET_CODE (size) == CONST_INT
2510 && skip == 0
2511 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2512 < MOVE_RATIO)
bbf6f052
RK
2513 /* Here we avoid the case of a structure whose weak alignment
2514 forces many pushes of a small amount of data,
2515 and such small pushes do rounding that causes trouble. */
c7a7ac46 2516 && ((! SLOW_UNALIGNED_ACCESS)
e87b4f3f 2517 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2518 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2519 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2520 {
2521 /* Push padding now if padding above and stack grows down,
2522 or if padding below and stack grows up.
2523 But if space already allocated, this has already been done. */
2524 if (extra && args_addr == 0
2525 && where_pad != none && where_pad != stack_direction)
906c4e36 2526 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2527
2528 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2529 INTVAL (size) - used, align);
2530 }
2531 else
2532#endif /* PUSH_ROUNDING */
2533 {
2534 /* Otherwise make space on the stack and copy the data
2535 to the address of that space. */
2536
2537 /* Deduct words put into registers from the size we must copy. */
2538 if (partial != 0)
2539 {
2540 if (GET_CODE (size) == CONST_INT)
906c4e36 2541 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
2542 else
2543 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
2544 GEN_INT (used), NULL_RTX, 0,
2545 OPTAB_LIB_WIDEN);
bbf6f052
RK
2546 }
2547
2548 /* Get the address of the stack space.
2549 In this case, we do not deal with EXTRA separately.
2550 A single stack adjust will do. */
2551 if (! args_addr)
2552 {
2553 temp = push_block (size, extra, where_pad == downward);
2554 extra = 0;
2555 }
2556 else if (GET_CODE (args_so_far) == CONST_INT)
2557 temp = memory_address (BLKmode,
2558 plus_constant (args_addr,
2559 skip + INTVAL (args_so_far)));
2560 else
2561 temp = memory_address (BLKmode,
2562 plus_constant (gen_rtx (PLUS, Pmode,
2563 args_addr, args_so_far),
2564 skip));
2565
2566 /* TEMP is the address of the block. Copy the data there. */
2567 if (GET_CODE (size) == CONST_INT
2568 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2569 < MOVE_RATIO))
2570 {
2571 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2572 INTVAL (size), align);
2573 goto ret;
2574 }
2575 /* Try the most limited insn first, because there's no point
2576 including more than one in the machine description unless
2577 the more limited one has some advantage. */
2578#ifdef HAVE_movstrqi
2579 if (HAVE_movstrqi
2580 && GET_CODE (size) == CONST_INT
2581 && ((unsigned) INTVAL (size)
2582 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2583 {
c841050e
RS
2584 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2585 xinner, size, GEN_INT (align));
2586 if (pat != 0)
2587 {
2588 emit_insn (pat);
2589 goto ret;
2590 }
bbf6f052
RK
2591 }
2592#endif
2593#ifdef HAVE_movstrhi
2594 if (HAVE_movstrhi
2595 && GET_CODE (size) == CONST_INT
2596 && ((unsigned) INTVAL (size)
2597 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2598 {
c841050e
RS
2599 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2600 xinner, size, GEN_INT (align));
2601 if (pat != 0)
2602 {
2603 emit_insn (pat);
2604 goto ret;
2605 }
bbf6f052
RK
2606 }
2607#endif
2608#ifdef HAVE_movstrsi
2609 if (HAVE_movstrsi)
2610 {
c841050e
RS
2611 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2612 xinner, size, GEN_INT (align));
2613 if (pat != 0)
2614 {
2615 emit_insn (pat);
2616 goto ret;
2617 }
bbf6f052
RK
2618 }
2619#endif
2620#ifdef HAVE_movstrdi
2621 if (HAVE_movstrdi)
2622 {
c841050e
RS
2623 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2624 xinner, size, GEN_INT (align));
2625 if (pat != 0)
2626 {
2627 emit_insn (pat);
2628 goto ret;
2629 }
bbf6f052
RK
2630 }
2631#endif
2632
2633#ifndef ACCUMULATE_OUTGOING_ARGS
2634 /* If the source is referenced relative to the stack pointer,
2635 copy it to another register to stabilize it. We do not need
2636 to do this if we know that we won't be changing sp. */
2637
2638 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2639 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2640 temp = copy_to_reg (temp);
2641#endif
2642
2643 /* Make inhibit_defer_pop nonzero around the library call
2644 to force it to pop the bcopy-arguments right away. */
2645 NO_DEFER_POP;
2646#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2647 emit_library_call (memcpy_libfunc, 0,
bbf6f052 2648 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
2649 convert_to_mode (TYPE_MODE (sizetype),
2650 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2651 TYPE_MODE (sizetype));
bbf6f052 2652#else
d562e42e 2653 emit_library_call (bcopy_libfunc, 0,
bbf6f052 2654 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
2655 convert_to_mode (TYPE_MODE (integer_type_node),
2656 size,
2657 TREE_UNSIGNED (integer_type_node)),
2658 TYPE_MODE (integer_type_node));
bbf6f052
RK
2659#endif
2660 OK_DEFER_POP;
2661 }
2662 }
2663 else if (partial > 0)
2664 {
2665 /* Scalar partly in registers. */
2666
2667 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2668 int i;
2669 int not_stack;
2670 /* # words of start of argument
2671 that we must make space for but need not store. */
2672 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2673 int args_offset = INTVAL (args_so_far);
2674 int skip;
2675
2676 /* Push padding now if padding above and stack grows down,
2677 or if padding below and stack grows up.
2678 But if space already allocated, this has already been done. */
2679 if (extra && args_addr == 0
2680 && where_pad != none && where_pad != stack_direction)
906c4e36 2681 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2682
2683 /* If we make space by pushing it, we might as well push
2684 the real data. Otherwise, we can leave OFFSET nonzero
2685 and leave the space uninitialized. */
2686 if (args_addr == 0)
2687 offset = 0;
2688
2689 /* Now NOT_STACK gets the number of words that we don't need to
2690 allocate on the stack. */
2691 not_stack = partial - offset;
2692
2693 /* If the partial register-part of the arg counts in its stack size,
2694 skip the part of stack space corresponding to the registers.
2695 Otherwise, start copying to the beginning of the stack space,
2696 by setting SKIP to 0. */
2697#ifndef REG_PARM_STACK_SPACE
2698 skip = 0;
2699#else
2700 skip = not_stack;
2701#endif
2702
2703 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2704 x = validize_mem (force_const_mem (mode, x));
2705
2706 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2707 SUBREGs of such registers are not allowed. */
2708 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2709 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2710 x = copy_to_reg (x);
2711
2712 /* Loop over all the words allocated on the stack for this arg. */
2713 /* We can do it by words, because any scalar bigger than a word
2714 has a size a multiple of a word. */
2715#ifndef PUSH_ARGS_REVERSED
2716 for (i = not_stack; i < size; i++)
2717#else
2718 for (i = size - 1; i >= not_stack; i--)
2719#endif
2720 if (i >= not_stack + offset)
2721 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2722 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2723 0, args_addr,
2724 GEN_INT (args_offset + ((i - not_stack + skip)
bbf6f052
RK
2725 * UNITS_PER_WORD)));
2726 }
2727 else
2728 {
2729 rtx addr;
2730
2731 /* Push padding now if padding above and stack grows down,
2732 or if padding below and stack grows up.
2733 But if space already allocated, this has already been done. */
2734 if (extra && args_addr == 0
2735 && where_pad != none && where_pad != stack_direction)
906c4e36 2736 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2737
2738#ifdef PUSH_ROUNDING
2739 if (args_addr == 0)
2740 addr = gen_push_operand ();
2741 else
2742#endif
2743 if (GET_CODE (args_so_far) == CONST_INT)
2744 addr
2745 = memory_address (mode,
2746 plus_constant (args_addr, INTVAL (args_so_far)));
2747 else
2748 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2749 args_so_far));
2750
2751 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2752 }
2753
2754 ret:
2755 /* If part should go in registers, copy that part
2756 into the appropriate registers. Do this now, at the end,
2757 since mem-to-mem copies above may do function calls. */
cd048831 2758 if (partial > 0 && reg != 0)
fffa9c1d
JW
2759 {
2760 /* Handle calls that pass values in multiple non-contiguous locations.
2761 The Irix 6 ABI has examples of this. */
2762 if (GET_CODE (reg) == PARALLEL)
2763 emit_group_load (reg, x);
2764 else
2765 move_block_to_reg (REGNO (reg), x, partial, mode);
2766 }
bbf6f052
RK
2767
2768 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 2769 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2770}
2771\f
bbf6f052
RK
2772/* Expand an assignment that stores the value of FROM into TO.
2773 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
2774 (This may contain a QUEUED rtx;
2775 if the value is constant, this rtx is a constant.)
2776 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
2777
2778 SUGGEST_REG is no longer actually used.
2779 It used to mean, copy the value through a register
2780 and return that register, if that is possible.
709f5be1 2781 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
2782
2783rtx
2784expand_assignment (to, from, want_value, suggest_reg)
2785 tree to, from;
2786 int want_value;
2787 int suggest_reg;
2788{
2789 register rtx to_rtx = 0;
2790 rtx result;
2791
2792 /* Don't crash if the lhs of the assignment was erroneous. */
2793
2794 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
2795 {
2796 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2797 return want_value ? result : NULL_RTX;
2798 }
bbf6f052 2799
ca695ac9
JB
2800 if (output_bytecode)
2801 {
2802 tree dest_innermost;
2803
2804 bc_expand_expr (from);
6d6e61ce 2805 bc_emit_instruction (duplicate);
ca695ac9
JB
2806
2807 dest_innermost = bc_expand_address (to);
2808
2809 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2810 take care of it here. */
2811
2812 bc_store_memory (TREE_TYPE (to), dest_innermost);
2813 return NULL;
2814 }
2815
bbf6f052
RK
2816 /* Assignment of a structure component needs special treatment
2817 if the structure component's rtx is not simply a MEM.
6be58303
JW
2818 Assignment of an array element at a constant index, and assignment of
2819 an array element in an unaligned packed structure field, has the same
2820 problem. */
bbf6f052
RK
2821
2822 if (TREE_CODE (to) == COMPONENT_REF
2823 || TREE_CODE (to) == BIT_FIELD_REF
2824 || (TREE_CODE (to) == ARRAY_REF
6be58303
JW
2825 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2826 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
c7a7ac46 2827 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
bbf6f052
RK
2828 {
2829 enum machine_mode mode1;
2830 int bitsize;
2831 int bitpos;
7bb0943f 2832 tree offset;
bbf6f052
RK
2833 int unsignedp;
2834 int volatilep = 0;
0088fcb1 2835 tree tem;
d78d243c 2836 int alignment;
0088fcb1
RK
2837
2838 push_temp_slots ();
2839 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
2840 &mode1, &unsignedp, &volatilep);
2841
2842 /* If we are going to use store_bit_field and extract_bit_field,
2843 make sure to_rtx will be safe for multiple use. */
2844
2845 if (mode1 == VOIDmode && want_value)
2846 tem = stabilize_reference (tem);
2847
d78d243c 2848 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
906c4e36 2849 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2850 if (offset != 0)
2851 {
906c4e36 2852 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2853
2854 if (GET_CODE (to_rtx) != MEM)
2855 abort ();
2856 to_rtx = change_address (to_rtx, VOIDmode,
88f63c77
RK
2857 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2858 force_reg (ptr_mode, offset_rtx)));
d78d243c
RS
2859 /* If we have a variable offset, the known alignment
2860 is only that of the innermost structure containing the field.
2861 (Actually, we could sometimes do better by using the
2862 align of an element of the innermost array, but no need.) */
2863 if (TREE_CODE (to) == COMPONENT_REF
2864 || TREE_CODE (to) == BIT_FIELD_REF)
2865 alignment
2866 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
7bb0943f 2867 }
bbf6f052
RK
2868 if (volatilep)
2869 {
2870 if (GET_CODE (to_rtx) == MEM)
01188446
JW
2871 {
2872 /* When the offset is zero, to_rtx is the address of the
2873 structure we are storing into, and hence may be shared.
2874 We must make a new MEM before setting the volatile bit. */
2875 if (offset == 0)
2876 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2877 MEM_VOLATILE_P (to_rtx) = 1;
2878 }
bbf6f052
RK
2879#if 0 /* This was turned off because, when a field is volatile
2880 in an object which is not volatile, the object may be in a register,
2881 and then we would abort over here. */
2882 else
2883 abort ();
2884#endif
2885 }
2886
2887 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2888 (want_value
2889 /* Spurious cast makes HPUX compiler happy. */
2890 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2891 : VOIDmode),
2892 unsignedp,
2893 /* Required alignment of containing datum. */
d78d243c 2894 alignment,
bbf6f052
RK
2895 int_size_in_bytes (TREE_TYPE (tem)));
2896 preserve_temp_slots (result);
2897 free_temp_slots ();
0088fcb1 2898 pop_temp_slots ();
bbf6f052 2899
709f5be1
RS
2900 /* If the value is meaningful, convert RESULT to the proper mode.
2901 Otherwise, return nothing. */
5ffe63ed
RS
2902 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2903 TYPE_MODE (TREE_TYPE (from)),
2904 result,
2905 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 2906 : NULL_RTX);
bbf6f052
RK
2907 }
2908
cd1db108
RS
2909 /* If the rhs is a function call and its value is not an aggregate,
2910 call the function before we start to compute the lhs.
2911 This is needed for correct code for cases such as
2912 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
2913 requires loading up part of an address in a separate insn.
2914
2915 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2916 a promoted variable where the zero- or sign- extension needs to be done.
2917 Handling this in the normal way is safe because no computation is done
2918 before the call. */
2919 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 2920 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1ad87b63 2921 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 2922 {
0088fcb1
RK
2923 rtx value;
2924
2925 push_temp_slots ();
2926 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108
RS
2927 if (to_rtx == 0)
2928 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
aaf87c45 2929
fffa9c1d
JW
2930 /* Handle calls that return values in multiple non-contiguous locations.
2931 The Irix 6 ABI has examples of this. */
2932 if (GET_CODE (to_rtx) == PARALLEL)
2933 emit_group_load (to_rtx, value);
2934 else if (GET_MODE (to_rtx) == BLKmode)
db3ec607 2935 emit_block_move (to_rtx, value, expr_size (from),
ff9b5bd8 2936 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
aaf87c45
JL
2937 else
2938 emit_move_insn (to_rtx, value);
cd1db108
RS
2939 preserve_temp_slots (to_rtx);
2940 free_temp_slots ();
0088fcb1 2941 pop_temp_slots ();
709f5be1 2942 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
2943 }
2944
bbf6f052
RK
2945 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2946 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2947
2948 if (to_rtx == 0)
906c4e36 2949 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
bbf6f052 2950
86d38d25
RS
2951 /* Don't move directly into a return register. */
2952 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2953 {
0088fcb1
RK
2954 rtx temp;
2955
2956 push_temp_slots ();
2957 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
2958 emit_move_insn (to_rtx, temp);
2959 preserve_temp_slots (to_rtx);
2960 free_temp_slots ();
0088fcb1 2961 pop_temp_slots ();
709f5be1 2962 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
2963 }
2964
bbf6f052
RK
2965 /* In case we are returning the contents of an object which overlaps
2966 the place the value is being stored, use a safe function when copying
2967 a value through a pointer into a structure value return block. */
2968 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2969 && current_function_returns_struct
2970 && !current_function_returns_pcc_struct)
2971 {
0088fcb1
RK
2972 rtx from_rtx, size;
2973
2974 push_temp_slots ();
33a20d10
RK
2975 size = expr_size (from);
2976 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2977
2978#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2979 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
2980 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2981 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
2982 convert_to_mode (TYPE_MODE (sizetype),
2983 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2984 TYPE_MODE (sizetype));
bbf6f052 2985#else
d562e42e 2986 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
2987 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2988 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
2989 convert_to_mode (TYPE_MODE (integer_type_node),
2990 size, TREE_UNSIGNED (integer_type_node)),
2991 TYPE_MODE (integer_type_node));
bbf6f052
RK
2992#endif
2993
2994 preserve_temp_slots (to_rtx);
2995 free_temp_slots ();
0088fcb1 2996 pop_temp_slots ();
709f5be1 2997 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
2998 }
2999
3000 /* Compute FROM and store the value in the rtx we got. */
3001
0088fcb1 3002 push_temp_slots ();
bbf6f052
RK
3003 result = store_expr (from, to_rtx, want_value);
3004 preserve_temp_slots (result);
3005 free_temp_slots ();
0088fcb1 3006 pop_temp_slots ();
709f5be1 3007 return want_value ? result : NULL_RTX;
bbf6f052
RK
3008}
3009
3010/* Generate code for computing expression EXP,
3011 and storing the value into TARGET.
bbf6f052
RK
3012 TARGET may contain a QUEUED rtx.
3013
709f5be1
RS
3014 If WANT_VALUE is nonzero, return a copy of the value
3015 not in TARGET, so that we can be sure to use the proper
3016 value in a containing expression even if TARGET has something
3017 else stored in it. If possible, we copy the value through a pseudo
3018 and return that pseudo. Or, if the value is constant, we try to
3019 return the constant. In some cases, we return a pseudo
3020 copied *from* TARGET.
3021
3022 If the mode is BLKmode then we may return TARGET itself.
3023 It turns out that in BLKmode it doesn't cause a problem.
3024 because C has no operators that could combine two different
3025 assignments into the same BLKmode object with different values
3026 with no sequence point. Will other languages need this to
3027 be more thorough?
3028
3029 If WANT_VALUE is 0, we return NULL, to make sure
3030 to catch quickly any cases where the caller uses the value
3031 and fails to set WANT_VALUE. */
bbf6f052
RK
3032
3033rtx
709f5be1 3034store_expr (exp, target, want_value)
bbf6f052
RK
3035 register tree exp;
3036 register rtx target;
709f5be1 3037 int want_value;
bbf6f052
RK
3038{
3039 register rtx temp;
3040 int dont_return_target = 0;
3041
3042 if (TREE_CODE (exp) == COMPOUND_EXPR)
3043 {
3044 /* Perform first part of compound expression, then assign from second
3045 part. */
3046 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3047 emit_queue ();
709f5be1 3048 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
3049 }
3050 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3051 {
3052 /* For conditional expression, get safe form of the target. Then
3053 test the condition, doing the appropriate assignment on either
3054 side. This avoids the creation of unnecessary temporaries.
3055 For non-BLKmode, it is more efficient not to do this. */
3056
3057 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3058
3059 emit_queue ();
3060 target = protect_from_queue (target, 1);
3061
dabf8373 3062 do_pending_stack_adjust ();
bbf6f052
RK
3063 NO_DEFER_POP;
3064 jumpifnot (TREE_OPERAND (exp, 0), lab1);
709f5be1 3065 store_expr (TREE_OPERAND (exp, 1), target, 0);
bbf6f052
RK
3066 emit_queue ();
3067 emit_jump_insn (gen_jump (lab2));
3068 emit_barrier ();
3069 emit_label (lab1);
709f5be1 3070 store_expr (TREE_OPERAND (exp, 2), target, 0);
bbf6f052
RK
3071 emit_queue ();
3072 emit_label (lab2);
3073 OK_DEFER_POP;
709f5be1 3074 return want_value ? target : NULL_RTX;
bbf6f052 3075 }
709f5be1 3076 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
bbf6f052
RK
3077 && GET_MODE (target) != BLKmode)
3078 /* If target is in memory and caller wants value in a register instead,
3079 arrange that. Pass TARGET as target for expand_expr so that,
709f5be1 3080 if EXP is another assignment, WANT_VALUE will be nonzero for it.
c2e6aff6
RS
3081 We know expand_expr will not use the target in that case.
3082 Don't do this if TARGET is volatile because we are supposed
3083 to write it and then read it. */
bbf6f052 3084 {
906c4e36 3085 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
3086 GET_MODE (target), 0);
3087 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3088 temp = copy_to_reg (temp);
3089 dont_return_target = 1;
3090 }
3091 else if (queued_subexp_p (target))
709f5be1
RS
3092 /* If target contains a postincrement, let's not risk
3093 using it as the place to generate the rhs. */
bbf6f052
RK
3094 {
3095 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3096 {
3097 /* Expand EXP into a new pseudo. */
3098 temp = gen_reg_rtx (GET_MODE (target));
3099 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3100 }
3101 else
906c4e36 3102 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
3103
3104 /* If target is volatile, ANSI requires accessing the value
3105 *from* the target, if it is accessed. So make that happen.
3106 In no case return the target itself. */
3107 if (! MEM_VOLATILE_P (target) && want_value)
3108 dont_return_target = 1;
bbf6f052 3109 }
1499e0a8
RK
3110 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3111 /* If this is an scalar in a register that is stored in a wider mode
3112 than the declared mode, compute the result into its declared mode
3113 and then convert to the wider mode. Our value is the computed
3114 expression. */
3115 {
5a32d038 3116 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
3117 which will often result in some optimizations. Do the conversion
3118 in two steps: first change the signedness, if needed, then
3119 the extend. */
5a32d038 3120 if (! want_value)
f635a84d
RK
3121 {
3122 if (TREE_UNSIGNED (TREE_TYPE (exp))
3123 != SUBREG_PROMOTED_UNSIGNED_P (target))
3124 exp
3125 = convert
3126 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3127 TREE_TYPE (exp)),
3128 exp);
3129
3130 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3131 SUBREG_PROMOTED_UNSIGNED_P (target)),
3132 exp);
3133 }
5a32d038 3134
1499e0a8 3135 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 3136
766f36c7 3137 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
3138 the access now so it gets done only once. Likewise if
3139 it contains TARGET. */
3140 if (GET_CODE (temp) == MEM && want_value
3141 && (MEM_VOLATILE_P (temp)
3142 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
3143 temp = copy_to_reg (temp);
3144
b258707c
RS
3145 /* If TEMP is a VOIDmode constant, use convert_modes to make
3146 sure that we properly convert it. */
3147 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3148 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3149 TYPE_MODE (TREE_TYPE (exp)), temp,
3150 SUBREG_PROMOTED_UNSIGNED_P (target));
3151
1499e0a8
RK
3152 convert_move (SUBREG_REG (target), temp,
3153 SUBREG_PROMOTED_UNSIGNED_P (target));
709f5be1 3154 return want_value ? temp : NULL_RTX;
1499e0a8 3155 }
bbf6f052
RK
3156 else
3157 {
3158 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 3159 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
3160 If TARGET is a volatile mem ref, either return TARGET
3161 or return a reg copied *from* TARGET; ANSI requires this.
3162
3163 Otherwise, if TEMP is not TARGET, return TEMP
3164 if it is constant (for efficiency),
3165 or if we really want the correct value. */
bbf6f052
RK
3166 if (!(target && GET_CODE (target) == REG
3167 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1
RS
3168 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3169 && temp != target
3170 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
3171 dont_return_target = 1;
3172 }
3173
b258707c
RS
3174 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3175 the same as that of TARGET, adjust the constant. This is needed, for
3176 example, in case it is a CONST_DOUBLE and we want only a word-sized
3177 value. */
3178 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 3179 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
3180 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3181 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3182 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3183
bbf6f052
RK
3184 /* If value was not generated in the target, store it there.
3185 Convert the value to TARGET's type first if nec. */
3186
3187 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
3188 {
3189 target = protect_from_queue (target, 1);
3190 if (GET_MODE (temp) != GET_MODE (target)
3191 && GET_MODE (temp) != VOIDmode)
3192 {
3193 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3194 if (dont_return_target)
3195 {
3196 /* In this case, we will return TEMP,
3197 so make sure it has the proper mode.
3198 But don't forget to store the value into TARGET. */
3199 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3200 emit_move_insn (target, temp);
3201 }
3202 else
3203 convert_move (target, temp, unsignedp);
3204 }
3205
3206 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3207 {
3208 /* Handle copying a string constant into an array.
3209 The string constant may be shorter than the array.
3210 So copy just the string's actual length, and clear the rest. */
3211 rtx size;
22619c3f 3212 rtx addr;
bbf6f052 3213
e87b4f3f
RS
3214 /* Get the size of the data type of the string,
3215 which is actually the size of the target. */
3216 size = expr_size (exp);
3217 if (GET_CODE (size) == CONST_INT
3218 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3219 emit_block_move (target, temp, size,
3220 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3221 else
bbf6f052 3222 {
e87b4f3f
RS
3223 /* Compute the size of the data to copy from the string. */
3224 tree copy_size
c03b7665 3225 = size_binop (MIN_EXPR,
b50d17a1 3226 make_tree (sizetype, size),
c03b7665
RK
3227 convert (sizetype,
3228 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
3229 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3230 VOIDmode, 0);
e87b4f3f
RS
3231 rtx label = 0;
3232
3233 /* Copy that much. */
3234 emit_block_move (target, temp, copy_size_rtx,
3235 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3236
88f63c77
RK
3237 /* Figure out how much is left in TARGET that we have to clear.
3238 Do all calculations in ptr_mode. */
3239
3240 addr = XEXP (target, 0);
3241 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3242
e87b4f3f
RS
3243 if (GET_CODE (copy_size_rtx) == CONST_INT)
3244 {
88f63c77 3245 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
22619c3f 3246 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
e87b4f3f
RS
3247 }
3248 else
3249 {
88f63c77
RK
3250 addr = force_reg (ptr_mode, addr);
3251 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
3252 copy_size_rtx, NULL_RTX, 0,
3253 OPTAB_LIB_WIDEN);
e87b4f3f 3254
88f63c77 3255 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
3256 copy_size_rtx, NULL_RTX, 0,
3257 OPTAB_LIB_WIDEN);
e87b4f3f 3258
906c4e36 3259 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
3260 GET_MODE (size), 0, 0);
3261 label = gen_label_rtx ();
3262 emit_jump_insn (gen_blt (label));
3263 }
3264
3265 if (size != const0_rtx)
3266 {
bbf6f052 3267#ifdef TARGET_MEM_FUNCTIONS
3b6f75e2
JW
3268 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3269 addr, Pmode,
3270 const0_rtx, TYPE_MODE (integer_type_node),
3271 convert_to_mode (TYPE_MODE (sizetype),
3272 size,
3273 TREE_UNSIGNED (sizetype)),
3274 TYPE_MODE (sizetype));
bbf6f052 3275#else
d562e42e 3276 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3b6f75e2
JW
3277 addr, Pmode,
3278 convert_to_mode (TYPE_MODE (integer_type_node),
3279 size,
3280 TREE_UNSIGNED (integer_type_node)),
3281 TYPE_MODE (integer_type_node));
bbf6f052 3282#endif
e87b4f3f 3283 }
22619c3f 3284
e87b4f3f
RS
3285 if (label)
3286 emit_label (label);
bbf6f052
RK
3287 }
3288 }
fffa9c1d
JW
3289 /* Handle calls that return values in multiple non-contiguous locations.
3290 The Irix 6 ABI has examples of this. */
3291 else if (GET_CODE (target) == PARALLEL)
3292 emit_group_load (target, temp);
bbf6f052
RK
3293 else if (GET_MODE (temp) == BLKmode)
3294 emit_block_move (target, temp, expr_size (exp),
3295 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3296 else
3297 emit_move_insn (target, temp);
3298 }
709f5be1 3299
766f36c7
RK
3300 /* If we don't want a value, return NULL_RTX. */
3301 if (! want_value)
3302 return NULL_RTX;
3303
3304 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3305 ??? The latter test doesn't seem to make sense. */
3306 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 3307 return temp;
766f36c7
RK
3308
3309 /* Return TARGET itself if it is a hard register. */
3310 else if (want_value && GET_MODE (target) != BLKmode
3311 && ! (GET_CODE (target) == REG
3312 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 3313 return copy_to_reg (target);
766f36c7
RK
3314
3315 else
709f5be1 3316 return target;
bbf6f052
RK
3317}
3318\f
9de08200
RK
3319/* Return 1 if EXP just contains zeros. */
3320
3321static int
3322is_zeros_p (exp)
3323 tree exp;
3324{
3325 tree elt;
3326
3327 switch (TREE_CODE (exp))
3328 {
3329 case CONVERT_EXPR:
3330 case NOP_EXPR:
3331 case NON_LVALUE_EXPR:
3332 return is_zeros_p (TREE_OPERAND (exp, 0));
3333
3334 case INTEGER_CST:
3335 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3336
3337 case COMPLEX_CST:
3338 return
3339 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3340
3341 case REAL_CST:
3342 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3343
3344 case CONSTRUCTOR:
e1a43f73
PB
3345 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3346 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
3347 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3348 if (! is_zeros_p (TREE_VALUE (elt)))
3349 return 0;
3350
3351 return 1;
3352 }
3353
3354 return 0;
3355}
3356
3357/* Return 1 if EXP contains mostly (3/4) zeros. */
3358
3359static int
3360mostly_zeros_p (exp)
3361 tree exp;
3362{
9de08200
RK
3363 if (TREE_CODE (exp) == CONSTRUCTOR)
3364 {
e1a43f73
PB
3365 int elts = 0, zeros = 0;
3366 tree elt = CONSTRUCTOR_ELTS (exp);
3367 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3368 {
3369 /* If there are no ranges of true bits, it is all zero. */
3370 return elt == NULL_TREE;
3371 }
3372 for (; elt; elt = TREE_CHAIN (elt))
3373 {
3374 /* We do not handle the case where the index is a RANGE_EXPR,
3375 so the statistic will be somewhat inaccurate.
3376 We do make a more accurate count in store_constructor itself,
3377 so since this function is only used for nested array elements,
3378 this should be close enough. */
3379 if (mostly_zeros_p (TREE_VALUE (elt)))
3380 zeros++;
3381 elts++;
3382 }
9de08200
RK
3383
3384 return 4 * zeros >= 3 * elts;
3385 }
3386
3387 return is_zeros_p (exp);
3388}
3389\f
e1a43f73
PB
3390/* Helper function for store_constructor.
3391 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3392 TYPE is the type of the CONSTRUCTOR, not the element type.
23ccec44
JW
3393 CLEARED is as for store_constructor.
3394
3395 This provides a recursive shortcut back to store_constructor when it isn't
3396 necessary to go through store_field. This is so that we can pass through
3397 the cleared field to let store_constructor know that we may not have to
3398 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
3399
3400static void
3401store_constructor_field (target, bitsize, bitpos,
3402 mode, exp, type, cleared)
3403 rtx target;
3404 int bitsize, bitpos;
3405 enum machine_mode mode;
3406 tree exp, type;
3407 int cleared;
3408{
3409 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
3410 && bitpos % BITS_PER_UNIT == 0
3411 /* If we have a non-zero bitpos for a register target, then we just
3412 let store_field do the bitfield handling. This is unlikely to
3413 generate unnecessary clear instructions anyways. */
3414 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 3415 {
126e5b0d
JW
3416 if (bitpos != 0)
3417 target = change_address (target, VOIDmode,
3418 plus_constant (XEXP (target, 0),
3419 bitpos / BITS_PER_UNIT));
3420 store_constructor (exp, target, cleared);
e1a43f73
PB
3421 }
3422 else
3423 store_field (target, bitsize, bitpos, mode, exp,
3424 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3425 int_size_in_bytes (type));
3426}
3427
bbf6f052 3428/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73
PB
3429 TARGET is either a REG or a MEM.
3430 CLEARED is true if TARGET is known to have been zero'd. */
bbf6f052
RK
3431
3432static void
e1a43f73 3433store_constructor (exp, target, cleared)
bbf6f052
RK
3434 tree exp;
3435 rtx target;
e1a43f73 3436 int cleared;
bbf6f052 3437{
4af3895e
JVA
3438 tree type = TREE_TYPE (exp);
3439
bbf6f052
RK
3440 /* We know our target cannot conflict, since safe_from_p has been called. */
3441#if 0
3442 /* Don't try copying piece by piece into a hard register
3443 since that is vulnerable to being clobbered by EXP.
3444 Instead, construct in a pseudo register and then copy it all. */
3445 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3446 {
3447 rtx temp = gen_reg_rtx (GET_MODE (target));
e1a43f73 3448 store_constructor (exp, temp, 0);
bbf6f052
RK
3449 emit_move_insn (target, temp);
3450 return;
3451 }
3452#endif
3453
e44842fe
RK
3454 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3455 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
3456 {
3457 register tree elt;
3458
4af3895e 3459 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
3460 if (TREE_CODE (type) == UNION_TYPE
3461 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 3462 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
3463
3464 /* If we are building a static constructor into a register,
3465 set the initial value as zero so we can fold the value into
67225c15
RK
3466 a constant. But if more than one register is involved,
3467 this probably loses. */
3468 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3469 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
3470 {
3471 if (! cleared)
3472 emit_move_insn (target, const0_rtx);
4af3895e 3473
9de08200
RK
3474 cleared = 1;
3475 }
3476
3477 /* If the constructor has fewer fields than the structure
3478 or if we are initializing the structure to mostly zeros,
bbf6f052 3479 clear the whole structure first. */
9de08200
RK
3480 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3481 != list_length (TYPE_FIELDS (type)))
3482 || mostly_zeros_p (exp))
3483 {
3484 if (! cleared)
3485 clear_storage (target, expr_size (exp),
3486 TYPE_ALIGN (type) / BITS_PER_UNIT);
3487
3488 cleared = 1;
3489 }
bbf6f052
RK
3490 else
3491 /* Inform later passes that the old value is dead. */
3492 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3493
3494 /* Store each element of the constructor into
3495 the corresponding field of TARGET. */
3496
3497 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3498 {
3499 register tree field = TREE_PURPOSE (elt);
3500 register enum machine_mode mode;
3501 int bitsize;
b50d17a1 3502 int bitpos = 0;
bbf6f052 3503 int unsignedp;
b50d17a1
RK
3504 tree pos, constant = 0, offset = 0;
3505 rtx to_rtx = target;
bbf6f052 3506
f32fd778
RS
3507 /* Just ignore missing fields.
3508 We cleared the whole structure, above,
3509 if any fields are missing. */
3510 if (field == 0)
3511 continue;
3512
e1a43f73
PB
3513 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3514 continue;
9de08200 3515
bbf6f052
RK
3516 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3517 unsignedp = TREE_UNSIGNED (field);
3518 mode = DECL_MODE (field);
3519 if (DECL_BIT_FIELD (field))
3520 mode = VOIDmode;
3521
b50d17a1
RK
3522 pos = DECL_FIELD_BITPOS (field);
3523 if (TREE_CODE (pos) == INTEGER_CST)
3524 constant = pos;
3525 else if (TREE_CODE (pos) == PLUS_EXPR
3526 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3527 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3528 else
3529 offset = pos;
3530
3531 if (constant)
cd11b87e 3532 bitpos = TREE_INT_CST_LOW (constant);
b50d17a1
RK
3533
3534 if (offset)
3535 {
3536 rtx offset_rtx;
3537
3538 if (contains_placeholder_p (offset))
3539 offset = build (WITH_RECORD_EXPR, sizetype,
3540 offset, exp);
bbf6f052 3541
b50d17a1
RK
3542 offset = size_binop (FLOOR_DIV_EXPR, offset,
3543 size_int (BITS_PER_UNIT));
bbf6f052 3544
b50d17a1
RK
3545 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3546 if (GET_CODE (to_rtx) != MEM)
3547 abort ();
3548
3549 to_rtx
3550 = change_address (to_rtx, VOIDmode,
88f63c77
RK
3551 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3552 force_reg (ptr_mode, offset_rtx)));
b50d17a1 3553 }
cf04eb80
RK
3554 if (TREE_READONLY (field))
3555 {
9151b3bf
RK
3556 if (GET_CODE (to_rtx) == MEM)
3557 to_rtx = change_address (to_rtx, GET_MODE (to_rtx),
3558 XEXP (to_rtx, 0));
cf04eb80
RK
3559 RTX_UNCHANGING_P (to_rtx) = 1;
3560 }
3561
e1a43f73
PB
3562 store_constructor_field (to_rtx, bitsize, bitpos,
3563 mode, TREE_VALUE (elt), type, cleared);
bbf6f052
RK
3564 }
3565 }
4af3895e 3566 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
3567 {
3568 register tree elt;
3569 register int i;
e1a43f73 3570 int need_to_clear;
4af3895e 3571 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
3572 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3573 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 3574 tree elttype = TREE_TYPE (type);
bbf6f052 3575
e1a43f73
PB
3576 /* If the constructor has fewer elements than the array,
3577 clear the whole array first. Similarly if this this is
3578 static constructor of a non-BLKmode object. */
3579 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3580 need_to_clear = 1;
3581 else
3582 {
3583 HOST_WIDE_INT count = 0, zero_count = 0;
3584 need_to_clear = 0;
3585 /* This loop is a more accurate version of the loop in
3586 mostly_zeros_p (it handles RANGE_EXPR in an index).
3587 It is also needed to check for missing elements. */
3588 for (elt = CONSTRUCTOR_ELTS (exp);
3589 elt != NULL_TREE;
3590 elt = TREE_CHAIN (elt), i++)
3591 {
3592 tree index = TREE_PURPOSE (elt);
3593 HOST_WIDE_INT this_node_count;
3594 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3595 {
3596 tree lo_index = TREE_OPERAND (index, 0);
3597 tree hi_index = TREE_OPERAND (index, 1);
3598 if (TREE_CODE (lo_index) != INTEGER_CST
3599 || TREE_CODE (hi_index) != INTEGER_CST)
3600 {
3601 need_to_clear = 1;
3602 break;
3603 }
3604 this_node_count = TREE_INT_CST_LOW (hi_index)
3605 - TREE_INT_CST_LOW (lo_index) + 1;
3606 }
3607 else
3608 this_node_count = 1;
3609 count += this_node_count;
3610 if (mostly_zeros_p (TREE_VALUE (elt)))
3611 zero_count += this_node_count;
3612 }
8e958f70
PB
3613 /* Clear the entire array first if there are any missing elements,
3614 or if the incidence of zero elements is >= 75%. */
3615 if (count < maxelt - minelt + 1
3616 || 4 * zero_count >= 3 * count)
e1a43f73
PB
3617 need_to_clear = 1;
3618 }
3619 if (need_to_clear)
9de08200
RK
3620 {
3621 if (! cleared)
3622 clear_storage (target, expr_size (exp),
3623 TYPE_ALIGN (type) / BITS_PER_UNIT);
9de08200
RK
3624 cleared = 1;
3625 }
bbf6f052
RK
3626 else
3627 /* Inform later passes that the old value is dead. */
3628 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3629
3630 /* Store each element of the constructor into
3631 the corresponding element of TARGET, determined
3632 by counting the elements. */
3633 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3634 elt;
3635 elt = TREE_CHAIN (elt), i++)
3636 {
3637 register enum machine_mode mode;
3638 int bitsize;
3639 int bitpos;
3640 int unsignedp;
e1a43f73 3641 tree value = TREE_VALUE (elt);
03dc44a6
RS
3642 tree index = TREE_PURPOSE (elt);
3643 rtx xtarget = target;
bbf6f052 3644
e1a43f73
PB
3645 if (cleared && is_zeros_p (value))
3646 continue;
9de08200 3647
bbf6f052
RK
3648 mode = TYPE_MODE (elttype);
3649 bitsize = GET_MODE_BITSIZE (mode);
3650 unsignedp = TREE_UNSIGNED (elttype);
3651
e1a43f73
PB
3652 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3653 {
3654 tree lo_index = TREE_OPERAND (index, 0);
3655 tree hi_index = TREE_OPERAND (index, 1);
3656 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3657 struct nesting *loop;
05c0b405
PB
3658 HOST_WIDE_INT lo, hi, count;
3659 tree position;
e1a43f73 3660
05c0b405 3661 /* If the range is constant and "small", unroll the loop. */
e1a43f73 3662 if (TREE_CODE (lo_index) == INTEGER_CST
05c0b405
PB
3663 && TREE_CODE (hi_index) == INTEGER_CST
3664 && (lo = TREE_INT_CST_LOW (lo_index),
3665 hi = TREE_INT_CST_LOW (hi_index),
3666 count = hi - lo + 1,
3667 (GET_CODE (target) != MEM
3668 || count <= 2
3669 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3670 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3671 <= 40 * 8))))
e1a43f73 3672 {
05c0b405
PB
3673 lo -= minelt; hi -= minelt;
3674 for (; lo <= hi; lo++)
e1a43f73 3675 {
05c0b405
PB
3676 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3677 store_constructor_field (target, bitsize, bitpos,
3678 mode, value, type, cleared);
e1a43f73
PB
3679 }
3680 }
3681 else
3682 {
3683 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3684 loop_top = gen_label_rtx ();
3685 loop_end = gen_label_rtx ();
3686
3687 unsignedp = TREE_UNSIGNED (domain);
3688
3689 index = build_decl (VAR_DECL, NULL_TREE, domain);
3690
3691 DECL_RTL (index) = index_r
3692 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3693 &unsignedp, 0));
3694
3695 if (TREE_CODE (value) == SAVE_EXPR
3696 && SAVE_EXPR_RTL (value) == 0)
3697 {
3698 /* Make sure value gets expanded once before the loop. */
3699 expand_expr (value, const0_rtx, VOIDmode, 0);
3700 emit_queue ();
3701 }
3702 store_expr (lo_index, index_r, 0);
3703 loop = expand_start_loop (0);
3704
3705 /* Assign value to element index. */
3706 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3707 size_int (BITS_PER_UNIT));
3708 position = size_binop (MULT_EXPR,
3709 size_binop (MINUS_EXPR, index,
3710 TYPE_MIN_VALUE (domain)),
3711 position);
3712 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3713 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3714 xtarget = change_address (target, mode, addr);
3715 if (TREE_CODE (value) == CONSTRUCTOR)
05c0b405 3716 store_constructor (value, xtarget, cleared);
e1a43f73
PB
3717 else
3718 store_expr (value, xtarget, 0);
3719
3720 expand_exit_loop_if_false (loop,
3721 build (LT_EXPR, integer_type_node,
3722 index, hi_index));
3723
3724 expand_increment (build (PREINCREMENT_EXPR,
3725 TREE_TYPE (index),
7b8b9722 3726 index, integer_one_node), 0, 0);
e1a43f73
PB
3727 expand_end_loop ();
3728 emit_label (loop_end);
3729
3730 /* Needed by stupid register allocation. to extend the
3731 lifetime of pseudo-regs used by target past the end
3732 of the loop. */
3733 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3734 }
3735 }
3736 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
5b6c44ff 3737 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
03dc44a6 3738 {
e1a43f73 3739 rtx pos_rtx, addr;
03dc44a6
RS
3740 tree position;
3741
5b6c44ff
RK
3742 if (index == 0)
3743 index = size_int (i);
3744
e1a43f73
PB
3745 if (minelt)
3746 index = size_binop (MINUS_EXPR, index,
3747 TYPE_MIN_VALUE (domain));
5b6c44ff
RK
3748 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3749 size_int (BITS_PER_UNIT));
3750 position = size_binop (MULT_EXPR, index, position);
03dc44a6
RS
3751 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3752 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3753 xtarget = change_address (target, mode, addr);
e1a43f73 3754 store_expr (value, xtarget, 0);
03dc44a6
RS
3755 }
3756 else
3757 {
3758 if (index != 0)
7c314719 3759 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
3760 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3761 else
3762 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
e1a43f73
PB
3763 store_constructor_field (target, bitsize, bitpos,
3764 mode, value, type, cleared);
03dc44a6 3765 }
bbf6f052
RK
3766 }
3767 }
071a6595
PB
3768 /* set constructor assignments */
3769 else if (TREE_CODE (type) == SET_TYPE)
3770 {
e1a43f73 3771 tree elt = CONSTRUCTOR_ELTS (exp);
071a6595
PB
3772 rtx xtarget = XEXP (target, 0);
3773 int set_word_size = TYPE_ALIGN (type);
e1a43f73 3774 int nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
3775 tree domain = TYPE_DOMAIN (type);
3776 tree domain_min, domain_max, bitlength;
3777
9faa82d8 3778 /* The default implementation strategy is to extract the constant
071a6595
PB
3779 parts of the constructor, use that to initialize the target,
3780 and then "or" in whatever non-constant ranges we need in addition.
3781
3782 If a large set is all zero or all ones, it is
3783 probably better to set it using memset (if available) or bzero.
3784 Also, if a large set has just a single range, it may also be
3785 better to first clear all the first clear the set (using
3786 bzero/memset), and set the bits we want. */
3787
3788 /* Check for all zeros. */
e1a43f73 3789 if (elt == NULL_TREE)
071a6595 3790 {
e1a43f73
PB
3791 if (!cleared)
3792 clear_storage (target, expr_size (exp),
3793 TYPE_ALIGN (type) / BITS_PER_UNIT);
071a6595
PB
3794 return;
3795 }
3796
071a6595
PB
3797 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3798 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3799 bitlength = size_binop (PLUS_EXPR,
3800 size_binop (MINUS_EXPR, domain_max, domain_min),
3801 size_one_node);
3802
e1a43f73
PB
3803 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3804 abort ();
3805 nbits = TREE_INT_CST_LOW (bitlength);
3806
3807 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3808 are "complicated" (more than one range), initialize (the
3809 constant parts) by copying from a constant. */
3810 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3811 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 3812 {
b4ee5a72
PB
3813 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3814 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3815 char *bit_buffer = (char*) alloca (nbits);
3816 HOST_WIDE_INT word = 0;
3817 int bit_pos = 0;
3818 int ibit = 0;
3819 int offset = 0; /* In bytes from beginning of set. */
e1a43f73 3820 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 3821 for (;;)
071a6595 3822 {
b4ee5a72
PB
3823 if (bit_buffer[ibit])
3824 {
b09f3348 3825 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
3826 word |= (1 << (set_word_size - 1 - bit_pos));
3827 else
3828 word |= 1 << bit_pos;
3829 }
3830 bit_pos++; ibit++;
3831 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 3832 {
e1a43f73
PB
3833 if (word != 0 || ! cleared)
3834 {
3835 rtx datum = GEN_INT (word);
3836 rtx to_rtx;
3837 /* The assumption here is that it is safe to use XEXP if
3838 the set is multi-word, but not if it's single-word. */
3839 if (GET_CODE (target) == MEM)
3840 {
3841 to_rtx = plus_constant (XEXP (target, 0), offset);
3842 to_rtx = change_address (target, mode, to_rtx);
3843 }
3844 else if (offset == 0)
3845 to_rtx = target;
3846 else
3847 abort ();
3848 emit_move_insn (to_rtx, datum);
3849 }
b4ee5a72
PB
3850 if (ibit == nbits)
3851 break;
3852 word = 0;
3853 bit_pos = 0;
3854 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
3855 }
3856 }
071a6595 3857 }
e1a43f73
PB
3858 else if (!cleared)
3859 {
3860 /* Don't bother clearing storage if the set is all ones. */
3861 if (TREE_CHAIN (elt) != NULL_TREE
3862 || (TREE_PURPOSE (elt) == NULL_TREE
3863 ? nbits != 1
3864 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3865 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3866 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3867 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3868 != nbits))))
3869 clear_storage (target, expr_size (exp),
3870 TYPE_ALIGN (type) / BITS_PER_UNIT);
3871 }
3872
3873 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595
PB
3874 {
3875 /* start of range of element or NULL */
3876 tree startbit = TREE_PURPOSE (elt);
3877 /* end of range of element, or element value */
3878 tree endbit = TREE_VALUE (elt);
3879 HOST_WIDE_INT startb, endb;
3880 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3881
3882 bitlength_rtx = expand_expr (bitlength,
3883 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3884
3885 /* handle non-range tuple element like [ expr ] */
3886 if (startbit == NULL_TREE)
3887 {
3888 startbit = save_expr (endbit);
3889 endbit = startbit;
3890 }
3891 startbit = convert (sizetype, startbit);
3892 endbit = convert (sizetype, endbit);
3893 if (! integer_zerop (domain_min))
3894 {
3895 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3896 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3897 }
3898 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3899 EXPAND_CONST_ADDRESS);
3900 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3901 EXPAND_CONST_ADDRESS);
3902
3903 if (REG_P (target))
3904 {
3905 targetx = assign_stack_temp (GET_MODE (target),
3906 GET_MODE_SIZE (GET_MODE (target)),
3907 0);
3908 emit_move_insn (targetx, target);
3909 }
3910 else if (GET_CODE (target) == MEM)
3911 targetx = target;
3912 else
3913 abort ();
3914
3915#ifdef TARGET_MEM_FUNCTIONS
3916 /* Optimization: If startbit and endbit are
9faa82d8 3917 constants divisible by BITS_PER_UNIT,
071a6595
PB
3918 call memset instead. */
3919 if (TREE_CODE (startbit) == INTEGER_CST
3920 && TREE_CODE (endbit) == INTEGER_CST
3921 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 3922 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 3923 {
071a6595
PB
3924 emit_library_call (memset_libfunc, 0,
3925 VOIDmode, 3,
e1a43f73
PB
3926 plus_constant (XEXP (targetx, 0),
3927 startb / BITS_PER_UNIT),
071a6595 3928 Pmode,
3b6f75e2 3929 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 3930 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 3931 TYPE_MODE (sizetype));
071a6595
PB
3932 }
3933 else
3934#endif
3935 {
071a6595
PB
3936 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3937 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3938 bitlength_rtx, TYPE_MODE (sizetype),
3939 startbit_rtx, TYPE_MODE (sizetype),
3940 endbit_rtx, TYPE_MODE (sizetype));
3941 }
3942 if (REG_P (target))
3943 emit_move_insn (target, targetx);
3944 }
3945 }
bbf6f052
RK
3946
3947 else
3948 abort ();
3949}
3950
3951/* Store the value of EXP (an expression tree)
3952 into a subfield of TARGET which has mode MODE and occupies
3953 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3954 If MODE is VOIDmode, it means that we are storing into a bit-field.
3955
3956 If VALUE_MODE is VOIDmode, return nothing in particular.
3957 UNSIGNEDP is not used in this case.
3958
3959 Otherwise, return an rtx for the value stored. This rtx
3960 has mode VALUE_MODE if that is convenient to do.
3961 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3962
3963 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3964 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3965
3966static rtx
3967store_field (target, bitsize, bitpos, mode, exp, value_mode,
3968 unsignedp, align, total_size)
3969 rtx target;
3970 int bitsize, bitpos;
3971 enum machine_mode mode;
3972 tree exp;
3973 enum machine_mode value_mode;
3974 int unsignedp;
3975 int align;
3976 int total_size;
3977{
906c4e36 3978 HOST_WIDE_INT width_mask = 0;
bbf6f052 3979
906c4e36
RK
3980 if (bitsize < HOST_BITS_PER_WIDE_INT)
3981 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
3982
3983 /* If we are storing into an unaligned field of an aligned union that is
3984 in a register, we may have the mode of TARGET being an integer mode but
3985 MODE == BLKmode. In that case, get an aligned object whose size and
3986 alignment are the same as TARGET and store TARGET into it (we can avoid
3987 the store if the field being stored is the entire width of TARGET). Then
3988 call ourselves recursively to store the field into a BLKmode version of
3989 that object. Finally, load from the object into TARGET. This is not
3990 very efficient in general, but should only be slightly more expensive
3991 than the otherwise-required unaligned accesses. Perhaps this can be
3992 cleaned up later. */
3993
3994 if (mode == BLKmode
3995 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3996 {
3997 rtx object = assign_stack_temp (GET_MODE (target),
3998 GET_MODE_SIZE (GET_MODE (target)), 0);
3999 rtx blk_object = copy_rtx (object);
4000
24a13950
JW
4001 MEM_IN_STRUCT_P (object) = 1;
4002 MEM_IN_STRUCT_P (blk_object) = 1;
bbf6f052
RK
4003 PUT_MODE (blk_object, BLKmode);
4004
4005 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4006 emit_move_insn (object, target);
4007
4008 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4009 align, total_size);
4010
46093b97
RS
4011 /* Even though we aren't returning target, we need to
4012 give it the updated value. */
bbf6f052
RK
4013 emit_move_insn (target, object);
4014
46093b97 4015 return blk_object;
bbf6f052
RK
4016 }
4017
4018 /* If the structure is in a register or if the component
4019 is a bit field, we cannot use addressing to access it.
4020 Use bit-field techniques or SUBREG to store in it. */
4021
4fa52007
RK
4022 if (mode == VOIDmode
4023 || (mode != BLKmode && ! direct_store[(int) mode])
4024 || GET_CODE (target) == REG
c980ac49 4025 || GET_CODE (target) == SUBREG
ccc98036
RS
4026 /* If the field isn't aligned enough to store as an ordinary memref,
4027 store it as a bit field. */
c7a7ac46 4028 || (SLOW_UNALIGNED_ACCESS
ccc98036 4029 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
c7a7ac46 4030 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 4031 {
906c4e36 4032 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73
RK
4033
4034 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4035 MODE. */
4036 if (mode != VOIDmode && mode != BLKmode
4037 && mode != TYPE_MODE (TREE_TYPE (exp)))
4038 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4039
a281e72d
RK
4040 /* If the modes of TARGET and TEMP are both BLKmode, both
4041 must be in memory and BITPOS must be aligned on a byte
4042 boundary. If so, we simply do a block copy. */
4043 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4044 {
4045 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4046 || bitpos % BITS_PER_UNIT != 0)
4047 abort ();
4048
0086427c
RK
4049 target = change_address (target, VOIDmode,
4050 plus_constant (XEXP (target, 0),
a281e72d
RK
4051 bitpos / BITS_PER_UNIT));
4052
4053 emit_block_move (target, temp,
4054 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4055 / BITS_PER_UNIT),
4056 1);
4057
4058 return value_mode == VOIDmode ? const0_rtx : target;
4059 }
4060
bbf6f052
RK
4061 /* Store the value in the bitfield. */
4062 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4063 if (value_mode != VOIDmode)
4064 {
4065 /* The caller wants an rtx for the value. */
4066 /* If possible, avoid refetching from the bitfield itself. */
4067 if (width_mask != 0
4068 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 4069 {
9074de27 4070 tree count;
5c4d7cfb 4071 enum machine_mode tmode;
86a2c12a 4072
5c4d7cfb
RS
4073 if (unsignedp)
4074 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4075 tmode = GET_MODE (temp);
86a2c12a
RS
4076 if (tmode == VOIDmode)
4077 tmode = value_mode;
5c4d7cfb
RS
4078 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4079 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4080 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4081 }
bbf6f052 4082 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
4083 NULL_RTX, value_mode, 0, align,
4084 total_size);
bbf6f052
RK
4085 }
4086 return const0_rtx;
4087 }
4088 else
4089 {
4090 rtx addr = XEXP (target, 0);
4091 rtx to_rtx;
4092
4093 /* If a value is wanted, it must be the lhs;
4094 so make the address stable for multiple use. */
4095
4096 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4097 && ! CONSTANT_ADDRESS_P (addr)
4098 /* A frame-pointer reference is already stable. */
4099 && ! (GET_CODE (addr) == PLUS
4100 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4101 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4102 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4103 addr = copy_to_reg (addr);
4104
4105 /* Now build a reference to just the desired component. */
4106
4107 to_rtx = change_address (target, mode,
4108 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
4109 MEM_IN_STRUCT_P (to_rtx) = 1;
4110
4111 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4112 }
4113}
4114\f
6be58303
JW
4115/* Return true if any object containing the innermost array is an unaligned
4116 packed structure field. */
4117
4118static int
4119get_inner_unaligned_p (exp)
4120 tree exp;
4121{
4122 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
4123
4124 while (1)
4125 {
4126 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4127 {
4128 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4129 < needed_alignment)
4130 return 1;
4131 }
4132 else if (TREE_CODE (exp) != ARRAY_REF
4133 && TREE_CODE (exp) != NON_LVALUE_EXPR
4134 && ! ((TREE_CODE (exp) == NOP_EXPR
4135 || TREE_CODE (exp) == CONVERT_EXPR)
4136 && (TYPE_MODE (TREE_TYPE (exp))
4137 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4138 break;
4139
4140 exp = TREE_OPERAND (exp, 0);
4141 }
4142
4143 return 0;
4144}
4145
bbf6f052
RK
4146/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4147 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 4148 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
4149
4150 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4151 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
4152 If the position of the field is variable, we store a tree
4153 giving the variable offset (in units) in *POFFSET.
4154 This offset is in addition to the bit position.
4155 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
4156
4157 If any of the extraction expressions is volatile,
4158 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4159
4160 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4161 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
4162 is redundant.
4163
4164 If the field describes a variable-sized object, *PMODE is set to
4165 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4166 this case, but the address of the object can be found. */
bbf6f052
RK
4167
4168tree
4969d05d
RK
4169get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4170 punsignedp, pvolatilep)
bbf6f052
RK
4171 tree exp;
4172 int *pbitsize;
4173 int *pbitpos;
7bb0943f 4174 tree *poffset;
bbf6f052
RK
4175 enum machine_mode *pmode;
4176 int *punsignedp;
4177 int *pvolatilep;
4178{
b50d17a1 4179 tree orig_exp = exp;
bbf6f052
RK
4180 tree size_tree = 0;
4181 enum machine_mode mode = VOIDmode;
742920c7 4182 tree offset = integer_zero_node;
bbf6f052
RK
4183
4184 if (TREE_CODE (exp) == COMPONENT_REF)
4185 {
4186 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4187 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4188 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4189 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4190 }
4191 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4192 {
4193 size_tree = TREE_OPERAND (exp, 1);
4194 *punsignedp = TREE_UNSIGNED (exp);
4195 }
4196 else
4197 {
4198 mode = TYPE_MODE (TREE_TYPE (exp));
4199 *pbitsize = GET_MODE_BITSIZE (mode);
4200 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4201 }
4202
4203 if (size_tree)
4204 {
4205 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
4206 mode = BLKmode, *pbitsize = -1;
4207 else
4208 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
4209 }
4210
4211 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4212 and find the ultimate containing object. */
4213
4214 *pbitpos = 0;
4215
4216 while (1)
4217 {
7bb0943f 4218 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 4219 {
7bb0943f
RS
4220 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4221 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4222 : TREE_OPERAND (exp, 2));
e6d8c385 4223 tree constant = integer_zero_node, var = pos;
bbf6f052 4224
e7f3c83f
RK
4225 /* If this field hasn't been filled in yet, don't go
4226 past it. This should only happen when folding expressions
4227 made during type construction. */
4228 if (pos == 0)
4229 break;
4230
e6d8c385
RK
4231 /* Assume here that the offset is a multiple of a unit.
4232 If not, there should be an explicitly added constant. */
4233 if (TREE_CODE (pos) == PLUS_EXPR
4234 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4235 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
7bb0943f 4236 else if (TREE_CODE (pos) == INTEGER_CST)
e6d8c385
RK
4237 constant = pos, var = integer_zero_node;
4238
4239 *pbitpos += TREE_INT_CST_LOW (constant);
4240
4241 if (var)
4242 offset = size_binop (PLUS_EXPR, offset,
4243 size_binop (EXACT_DIV_EXPR, var,
4244 size_int (BITS_PER_UNIT)));
bbf6f052 4245 }
bbf6f052 4246
742920c7 4247 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 4248 {
742920c7
RK
4249 /* This code is based on the code in case ARRAY_REF in expand_expr
4250 below. We assume here that the size of an array element is
4251 always an integral multiple of BITS_PER_UNIT. */
4252
4253 tree index = TREE_OPERAND (exp, 1);
4254 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4255 tree low_bound
4256 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4257 tree index_type = TREE_TYPE (index);
4258
4259 if (! integer_zerop (low_bound))
4260 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4261
4c08eef0 4262 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
742920c7 4263 {
4c08eef0
RK
4264 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4265 index);
742920c7
RK
4266 index_type = TREE_TYPE (index);
4267 }
4268
4269 index = fold (build (MULT_EXPR, index_type, index,
4270 TYPE_SIZE (TREE_TYPE (exp))));
4271
4272 if (TREE_CODE (index) == INTEGER_CST
4273 && TREE_INT_CST_HIGH (index) == 0)
4274 *pbitpos += TREE_INT_CST_LOW (index);
4275 else
4276 offset = size_binop (PLUS_EXPR, offset,
4277 size_binop (FLOOR_DIV_EXPR, index,
4278 size_int (BITS_PER_UNIT)));
bbf6f052
RK
4279 }
4280 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4281 && ! ((TREE_CODE (exp) == NOP_EXPR
4282 || TREE_CODE (exp) == CONVERT_EXPR)
7f62854a
RK
4283 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4284 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4285 != UNION_TYPE))
bbf6f052
RK
4286 && (TYPE_MODE (TREE_TYPE (exp))
4287 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4288 break;
7bb0943f
RS
4289
4290 /* If any reference in the chain is volatile, the effect is volatile. */
4291 if (TREE_THIS_VOLATILE (exp))
4292 *pvolatilep = 1;
bbf6f052
RK
4293 exp = TREE_OPERAND (exp, 0);
4294 }
4295
4296 /* If this was a bit-field, see if there is a mode that allows direct
4297 access in case EXP is in memory. */
e7f3c83f 4298 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
bbf6f052 4299 {
e8621b3c
RK
4300 mode = mode_for_size (*pbitsize,
4301 (TYPE_MODE (TREE_TYPE (orig_exp)) == BLKmode
4302 ? MODE_INT
4303 : GET_MODE_CLASS (TYPE_MODE
4304 (TREE_TYPE (orig_exp)))),
4305 0);
bbf6f052
RK
4306 if (mode == BLKmode)
4307 mode = VOIDmode;
4308 }
4309
742920c7
RK
4310 if (integer_zerop (offset))
4311 offset = 0;
4312
b50d17a1
RK
4313 if (offset != 0 && contains_placeholder_p (offset))
4314 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4315
bbf6f052 4316 *pmode = mode;
7bb0943f 4317 *poffset = offset;
bbf6f052
RK
4318 return exp;
4319}
4320\f
4321/* Given an rtx VALUE that may contain additions and multiplications,
4322 return an equivalent value that just refers to a register or memory.
4323 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
4324 and returning a pseudo-register containing the value.
4325
4326 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
4327
4328rtx
4329force_operand (value, target)
4330 rtx value, target;
4331{
4332 register optab binoptab = 0;
4333 /* Use a temporary to force order of execution of calls to
4334 `force_operand'. */
4335 rtx tmp;
4336 register rtx op2;
4337 /* Use subtarget as the target for operand 0 of a binary operation. */
4338 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4339
4340 if (GET_CODE (value) == PLUS)
4341 binoptab = add_optab;
4342 else if (GET_CODE (value) == MINUS)
4343 binoptab = sub_optab;
4344 else if (GET_CODE (value) == MULT)
4345 {
4346 op2 = XEXP (value, 1);
4347 if (!CONSTANT_P (op2)
4348 && !(GET_CODE (op2) == REG && op2 != subtarget))
4349 subtarget = 0;
4350 tmp = force_operand (XEXP (value, 0), subtarget);
4351 return expand_mult (GET_MODE (value), tmp,
906c4e36 4352 force_operand (op2, NULL_RTX),
bbf6f052
RK
4353 target, 0);
4354 }
4355
4356 if (binoptab)
4357 {
4358 op2 = XEXP (value, 1);
4359 if (!CONSTANT_P (op2)
4360 && !(GET_CODE (op2) == REG && op2 != subtarget))
4361 subtarget = 0;
4362 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4363 {
4364 binoptab = add_optab;
4365 op2 = negate_rtx (GET_MODE (value), op2);
4366 }
4367
4368 /* Check for an addition with OP2 a constant integer and our first
4369 operand a PLUS of a virtual register and something else. In that
4370 case, we want to emit the sum of the virtual register and the
4371 constant first and then add the other value. This allows virtual
4372 register instantiation to simply modify the constant rather than
4373 creating another one around this addition. */
4374 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4375 && GET_CODE (XEXP (value, 0)) == PLUS
4376 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4377 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4378 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4379 {
4380 rtx temp = expand_binop (GET_MODE (value), binoptab,
4381 XEXP (XEXP (value, 0), 0), op2,
4382 subtarget, 0, OPTAB_LIB_WIDEN);
4383 return expand_binop (GET_MODE (value), binoptab, temp,
4384 force_operand (XEXP (XEXP (value, 0), 1), 0),
4385 target, 0, OPTAB_LIB_WIDEN);
4386 }
4387
4388 tmp = force_operand (XEXP (value, 0), subtarget);
4389 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 4390 force_operand (op2, NULL_RTX),
bbf6f052 4391 target, 0, OPTAB_LIB_WIDEN);
8008b228 4392 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
4393 because the only operations we are expanding here are signed ones. */
4394 }
4395 return value;
4396}
4397\f
4398/* Subroutine of expand_expr:
4399 save the non-copied parts (LIST) of an expr (LHS), and return a list
4400 which can restore these values to their previous values,
4401 should something modify their storage. */
4402
4403static tree
4404save_noncopied_parts (lhs, list)
4405 tree lhs;
4406 tree list;
4407{
4408 tree tail;
4409 tree parts = 0;
4410
4411 for (tail = list; tail; tail = TREE_CHAIN (tail))
4412 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4413 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4414 else
4415 {
4416 tree part = TREE_VALUE (tail);
4417 tree part_type = TREE_TYPE (part);
906c4e36 4418 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
06089a8b 4419 rtx target = assign_temp (part_type, 0, 1, 1);
bbf6f052 4420 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 4421 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 4422 parts = tree_cons (to_be_saved,
906c4e36
RK
4423 build (RTL_EXPR, part_type, NULL_TREE,
4424 (tree) target),
bbf6f052
RK
4425 parts);
4426 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4427 }
4428 return parts;
4429}
4430
4431/* Subroutine of expand_expr:
4432 record the non-copied parts (LIST) of an expr (LHS), and return a list
4433 which specifies the initial values of these parts. */
4434
4435static tree
4436init_noncopied_parts (lhs, list)
4437 tree lhs;
4438 tree list;
4439{
4440 tree tail;
4441 tree parts = 0;
4442
4443 for (tail = list; tail; tail = TREE_CHAIN (tail))
4444 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4445 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4446 else
4447 {
4448 tree part = TREE_VALUE (tail);
4449 tree part_type = TREE_TYPE (part);
906c4e36 4450 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
4451 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4452 }
4453 return parts;
4454}
4455
4456/* Subroutine of expand_expr: return nonzero iff there is no way that
4457 EXP can reference X, which is being modified. */
4458
4459static int
4460safe_from_p (x, exp)
4461 rtx x;
4462 tree exp;
4463{
4464 rtx exp_rtl = 0;
4465 int i, nops;
4466
6676e72f
RK
4467 if (x == 0
4468 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
4469 have no way of allocating temporaries of variable size
4470 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4471 So we assume here that something at a higher level has prevented a
f4510f37
RK
4472 clash. This is somewhat bogus, but the best we can do. Only
4473 do this when X is BLKmode. */
45524ce9 4474 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
f4510f37 4475 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
4476 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4477 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4478 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4479 != INTEGER_CST)
f4510f37 4480 && GET_MODE (x) == BLKmode))
bbf6f052
RK
4481 return 1;
4482
4483 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4484 find the underlying pseudo. */
4485 if (GET_CODE (x) == SUBREG)
4486 {
4487 x = SUBREG_REG (x);
4488 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4489 return 0;
4490 }
4491
4492 /* If X is a location in the outgoing argument area, it is always safe. */
4493 if (GET_CODE (x) == MEM
4494 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4495 || (GET_CODE (XEXP (x, 0)) == PLUS
4496 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4497 return 1;
4498
4499 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4500 {
4501 case 'd':
4502 exp_rtl = DECL_RTL (exp);
4503 break;
4504
4505 case 'c':
4506 return 1;
4507
4508 case 'x':
4509 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
4510 return ((TREE_VALUE (exp) == 0
4511 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
4512 && (TREE_CHAIN (exp) == 0
4513 || safe_from_p (x, TREE_CHAIN (exp))));
4514 else
4515 return 0;
4516
4517 case '1':
4518 return safe_from_p (x, TREE_OPERAND (exp, 0));
4519
4520 case '2':
4521 case '<':
4522 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4523 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4524
4525 case 'e':
4526 case 'r':
4527 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4528 the expression. If it is set, we conflict iff we are that rtx or
4529 both are in memory. Otherwise, we check all operands of the
4530 expression recursively. */
4531
4532 switch (TREE_CODE (exp))
4533 {
4534 case ADDR_EXPR:
e44842fe
RK
4535 return (staticp (TREE_OPERAND (exp, 0))
4536 || safe_from_p (x, TREE_OPERAND (exp, 0)));
bbf6f052
RK
4537
4538 case INDIRECT_REF:
4539 if (GET_CODE (x) == MEM)
4540 return 0;
4541 break;
4542
4543 case CALL_EXPR:
4544 exp_rtl = CALL_EXPR_RTL (exp);
4545 if (exp_rtl == 0)
4546 {
4547 /* Assume that the call will clobber all hard registers and
4548 all of memory. */
4549 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4550 || GET_CODE (x) == MEM)
4551 return 0;
4552 }
4553
4554 break;
4555
4556 case RTL_EXPR:
3bb5826a
RK
4557 /* If a sequence exists, we would have to scan every instruction
4558 in the sequence to see if it was safe. This is probably not
4559 worthwhile. */
4560 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
4561 return 0;
4562
3bb5826a 4563 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
4564 break;
4565
4566 case WITH_CLEANUP_EXPR:
4567 exp_rtl = RTL_EXPR_RTL (exp);
4568 break;
4569
5dab5552
MS
4570 case CLEANUP_POINT_EXPR:
4571 return safe_from_p (x, TREE_OPERAND (exp, 0));
4572
bbf6f052
RK
4573 case SAVE_EXPR:
4574 exp_rtl = SAVE_EXPR_RTL (exp);
4575 break;
4576
8129842c
RS
4577 case BIND_EXPR:
4578 /* The only operand we look at is operand 1. The rest aren't
4579 part of the expression. */
4580 return safe_from_p (x, TREE_OPERAND (exp, 1));
4581
bbf6f052
RK
4582 case METHOD_CALL_EXPR:
4583 /* This takes a rtx argument, but shouldn't appear here. */
4584 abort ();
4585 }
4586
4587 /* If we have an rtx, we do not need to scan our operands. */
4588 if (exp_rtl)
4589 break;
4590
4591 nops = tree_code_length[(int) TREE_CODE (exp)];
4592 for (i = 0; i < nops; i++)
4593 if (TREE_OPERAND (exp, i) != 0
4594 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4595 return 0;
4596 }
4597
4598 /* If we have an rtl, find any enclosed object. Then see if we conflict
4599 with it. */
4600 if (exp_rtl)
4601 {
4602 if (GET_CODE (exp_rtl) == SUBREG)
4603 {
4604 exp_rtl = SUBREG_REG (exp_rtl);
4605 if (GET_CODE (exp_rtl) == REG
4606 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4607 return 0;
4608 }
4609
4610 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4611 are memory and EXP is not readonly. */
4612 return ! (rtx_equal_p (x, exp_rtl)
4613 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4614 && ! TREE_READONLY (exp)));
4615 }
4616
4617 /* If we reach here, it is safe. */
4618 return 1;
4619}
4620
4621/* Subroutine of expand_expr: return nonzero iff EXP is an
4622 expression whose type is statically determinable. */
4623
4624static int
4625fixed_type_p (exp)
4626 tree exp;
4627{
4628 if (TREE_CODE (exp) == PARM_DECL
4629 || TREE_CODE (exp) == VAR_DECL
4630 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4631 || TREE_CODE (exp) == COMPONENT_REF
4632 || TREE_CODE (exp) == ARRAY_REF)
4633 return 1;
4634 return 0;
4635}
4636\f
4637/* expand_expr: generate code for computing expression EXP.
4638 An rtx for the computed value is returned. The value is never null.
4639 In the case of a void EXP, const0_rtx is returned.
4640
4641 The value may be stored in TARGET if TARGET is nonzero.
4642 TARGET is just a suggestion; callers must assume that
4643 the rtx returned may not be the same as TARGET.
4644
4645 If TARGET is CONST0_RTX, it means that the value will be ignored.
4646
4647 If TMODE is not VOIDmode, it suggests generating the
4648 result in mode TMODE. But this is done only when convenient.
4649 Otherwise, TMODE is ignored and the value generated in its natural mode.
4650 TMODE is just a suggestion; callers must assume that
4651 the rtx returned may not have mode TMODE.
4652
d6a5ac33
RK
4653 Note that TARGET may have neither TMODE nor MODE. In that case, it
4654 probably will not be used.
bbf6f052
RK
4655
4656 If MODIFIER is EXPAND_SUM then when EXP is an addition
4657 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4658 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4659 products as above, or REG or MEM, or constant.
4660 Ordinarily in such cases we would output mul or add instructions
4661 and then return a pseudo reg containing the sum.
4662
4663 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4664 it also marks a label as absolutely required (it can't be dead).
26fcb35a 4665 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
4666 This is used for outputting expressions used in initializers.
4667
4668 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4669 with a constant address even if that address is not normally legitimate.
4670 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
4671
4672rtx
4673expand_expr (exp, target, tmode, modifier)
4674 register tree exp;
4675 rtx target;
4676 enum machine_mode tmode;
4677 enum expand_modifier modifier;
4678{
b50d17a1
RK
4679 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4680 This is static so it will be accessible to our recursive callees. */
4681 static tree placeholder_list = 0;
bbf6f052
RK
4682 register rtx op0, op1, temp;
4683 tree type = TREE_TYPE (exp);
4684 int unsignedp = TREE_UNSIGNED (type);
4685 register enum machine_mode mode = TYPE_MODE (type);
4686 register enum tree_code code = TREE_CODE (exp);
4687 optab this_optab;
4688 /* Use subtarget as the target for operand 0 of a binary operation. */
4689 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4690 rtx original_target = target;
ca695ac9 4691 /* Maybe defer this until sure not doing bytecode? */
dd27116b
RK
4692 int ignore = (target == const0_rtx
4693 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4d87de75
RS
4694 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4695 || code == COND_EXPR)
dd27116b 4696 && TREE_CODE (type) == VOID_TYPE));
bbf6f052
RK
4697 tree context;
4698
ca695ac9 4699
1d556704 4700 if (output_bytecode && modifier != EXPAND_INITIALIZER)
ca695ac9
JB
4701 {
4702 bc_expand_expr (exp);
4703 return NULL;
4704 }
4705
bbf6f052
RK
4706 /* Don't use hard regs as subtargets, because the combiner
4707 can only handle pseudo regs. */
4708 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4709 subtarget = 0;
4710 /* Avoid subtargets inside loops,
4711 since they hide some invariant expressions. */
4712 if (preserve_subexpressions_p ())
4713 subtarget = 0;
4714
dd27116b
RK
4715 /* If we are going to ignore this result, we need only do something
4716 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
4717 is, short-circuit the most common cases here. Note that we must
4718 not call expand_expr with anything but const0_rtx in case this
4719 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 4720
dd27116b
RK
4721 if (ignore)
4722 {
4723 if (! TREE_SIDE_EFFECTS (exp))
4724 return const0_rtx;
4725
4726 /* Ensure we reference a volatile object even if value is ignored. */
4727 if (TREE_THIS_VOLATILE (exp)
4728 && TREE_CODE (exp) != FUNCTION_DECL
4729 && mode != VOIDmode && mode != BLKmode)
4730 {
4731 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4732 if (GET_CODE (temp) == MEM)
4733 temp = copy_to_reg (temp);
4734 return const0_rtx;
4735 }
4736
4737 if (TREE_CODE_CLASS (code) == '1')
4738 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4739 VOIDmode, modifier);
4740 else if (TREE_CODE_CLASS (code) == '2'
4741 || TREE_CODE_CLASS (code) == '<')
4742 {
4743 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4744 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4745 return const0_rtx;
4746 }
4747 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4748 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4749 /* If the second operand has no side effects, just evaluate
4750 the first. */
4751 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4752 VOIDmode, modifier);
dd27116b 4753
90764a87 4754 target = 0;
dd27116b 4755 }
bbf6f052 4756
e44842fe
RK
4757 /* If will do cse, generate all results into pseudo registers
4758 since 1) that allows cse to find more things
4759 and 2) otherwise cse could produce an insn the machine
4760 cannot support. */
4761
bbf6f052
RK
4762 if (! cse_not_expected && mode != BLKmode && target
4763 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4764 target = subtarget;
4765
bbf6f052
RK
4766 switch (code)
4767 {
4768 case LABEL_DECL:
b552441b
RS
4769 {
4770 tree function = decl_function_context (exp);
4771 /* Handle using a label in a containing function. */
4772 if (function != current_function_decl && function != 0)
4773 {
4774 struct function *p = find_function_data (function);
4775 /* Allocate in the memory associated with the function
4776 that the label is in. */
4777 push_obstacks (p->function_obstack,
4778 p->function_maybepermanent_obstack);
4779
4780 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4781 label_rtx (exp), p->forced_labels);
4782 pop_obstacks ();
4783 }
4784 else if (modifier == EXPAND_INITIALIZER)
4785 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4786 label_rtx (exp), forced_labels);
26fcb35a 4787 temp = gen_rtx (MEM, FUNCTION_MODE,
b552441b 4788 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
26fcb35a
RS
4789 if (function != current_function_decl && function != 0)
4790 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4791 return temp;
b552441b 4792 }
bbf6f052
RK
4793
4794 case PARM_DECL:
4795 if (DECL_RTL (exp) == 0)
4796 {
4797 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 4798 return CONST0_RTX (mode);
bbf6f052
RK
4799 }
4800
d6a5ac33
RK
4801 /* ... fall through ... */
4802
bbf6f052 4803 case VAR_DECL:
2dca20cd
RS
4804 /* If a static var's type was incomplete when the decl was written,
4805 but the type is complete now, lay out the decl now. */
4806 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4807 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4808 {
4809 push_obstacks_nochange ();
4810 end_temporary_allocation ();
4811 layout_decl (exp, 0);
4812 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4813 pop_obstacks ();
4814 }
d6a5ac33
RK
4815
4816 /* ... fall through ... */
4817
2dca20cd 4818 case FUNCTION_DECL:
bbf6f052
RK
4819 case RESULT_DECL:
4820 if (DECL_RTL (exp) == 0)
4821 abort ();
d6a5ac33 4822
e44842fe
RK
4823 /* Ensure variable marked as used even if it doesn't go through
4824 a parser. If it hasn't be used yet, write out an external
4825 definition. */
4826 if (! TREE_USED (exp))
4827 {
4828 assemble_external (exp);
4829 TREE_USED (exp) = 1;
4830 }
4831
dc6d66b3
RK
4832 /* Show we haven't gotten RTL for this yet. */
4833 temp = 0;
4834
bbf6f052
RK
4835 /* Handle variables inherited from containing functions. */
4836 context = decl_function_context (exp);
4837
4838 /* We treat inline_function_decl as an alias for the current function
4839 because that is the inline function whose vars, types, etc.
4840 are being merged into the current function.
4841 See expand_inline_function. */
d6a5ac33 4842
bbf6f052
RK
4843 if (context != 0 && context != current_function_decl
4844 && context != inline_function_decl
4845 /* If var is static, we don't need a static chain to access it. */
4846 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4847 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4848 {
4849 rtx addr;
4850
4851 /* Mark as non-local and addressable. */
81feeecb 4852 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
4853 if (DECL_NO_STATIC_CHAIN (current_function_decl))
4854 abort ();
bbf6f052
RK
4855 mark_addressable (exp);
4856 if (GET_CODE (DECL_RTL (exp)) != MEM)
4857 abort ();
4858 addr = XEXP (DECL_RTL (exp), 0);
4859 if (GET_CODE (addr) == MEM)
d6a5ac33
RK
4860 addr = gen_rtx (MEM, Pmode,
4861 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
4862 else
4863 addr = fix_lexical_addr (addr, exp);
dc6d66b3 4864 temp = change_address (DECL_RTL (exp), mode, addr);
bbf6f052 4865 }
4af3895e 4866
bbf6f052
RK
4867 /* This is the case of an array whose size is to be determined
4868 from its initializer, while the initializer is still being parsed.
4869 See expand_decl. */
d6a5ac33 4870
dc6d66b3
RK
4871 else if (GET_CODE (DECL_RTL (exp)) == MEM
4872 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4873 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
bbf6f052 4874 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
4875
4876 /* If DECL_RTL is memory, we are in the normal case and either
4877 the address is not valid or it is not a register and -fforce-addr
4878 is specified, get the address into a register. */
4879
dc6d66b3
RK
4880 else if (GET_CODE (DECL_RTL (exp)) == MEM
4881 && modifier != EXPAND_CONST_ADDRESS
4882 && modifier != EXPAND_SUM
4883 && modifier != EXPAND_INITIALIZER
4884 && (! memory_address_p (DECL_MODE (exp),
4885 XEXP (DECL_RTL (exp), 0))
4886 || (flag_force_addr
4887 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4888 temp = change_address (DECL_RTL (exp), VOIDmode,
d6a5ac33 4889 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 4890
dc6d66b3
RK
4891 /* If we got something, return it. But first, set the alignment
4892 the address is a register. */
4893 if (temp != 0)
4894 {
4895 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
4896 mark_reg_pointer (XEXP (temp, 0),
4897 DECL_ALIGN (exp) / BITS_PER_UNIT);
4898
4899 return temp;
4900 }
4901
1499e0a8
RK
4902 /* If the mode of DECL_RTL does not match that of the decl, it
4903 must be a promoted value. We return a SUBREG of the wanted mode,
4904 but mark it so that we know that it was already extended. */
4905
4906 if (GET_CODE (DECL_RTL (exp)) == REG
4907 && GET_MODE (DECL_RTL (exp)) != mode)
4908 {
1499e0a8
RK
4909 /* Get the signedness used for this variable. Ensure we get the
4910 same mode we got when the variable was declared. */
78911e8b
RK
4911 if (GET_MODE (DECL_RTL (exp))
4912 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
4913 abort ();
4914
4915 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4916 SUBREG_PROMOTED_VAR_P (temp) = 1;
4917 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4918 return temp;
4919 }
4920
bbf6f052
RK
4921 return DECL_RTL (exp);
4922
4923 case INTEGER_CST:
4924 return immed_double_const (TREE_INT_CST_LOW (exp),
4925 TREE_INT_CST_HIGH (exp),
4926 mode);
4927
4928 case CONST_DECL:
4929 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4930
4931 case REAL_CST:
4932 /* If optimized, generate immediate CONST_DOUBLE
4933 which will be turned into memory by reload if necessary.
4934
4935 We used to force a register so that loop.c could see it. But
4936 this does not allow gen_* patterns to perform optimizations with
4937 the constants. It also produces two insns in cases like "x = 1.0;".
4938 On most machines, floating-point constants are not permitted in
4939 many insns, so we'd end up copying it to a register in any case.
4940
4941 Now, we do the copying in expand_binop, if appropriate. */
4942 return immed_real_const (exp);
4943
4944 case COMPLEX_CST:
4945 case STRING_CST:
4946 if (! TREE_CST_RTL (exp))
4947 output_constant_def (exp);
4948
4949 /* TREE_CST_RTL probably contains a constant address.
4950 On RISC machines where a constant address isn't valid,
4951 make some insns to get that address into a register. */
4952 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4953 && modifier != EXPAND_CONST_ADDRESS
4954 && modifier != EXPAND_INITIALIZER
4955 && modifier != EXPAND_SUM
d6a5ac33
RK
4956 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4957 || (flag_force_addr
4958 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
4959 return change_address (TREE_CST_RTL (exp), VOIDmode,
4960 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4961 return TREE_CST_RTL (exp);
4962
4963 case SAVE_EXPR:
4964 context = decl_function_context (exp);
d6a5ac33 4965
bbf6f052
RK
4966 /* We treat inline_function_decl as an alias for the current function
4967 because that is the inline function whose vars, types, etc.
4968 are being merged into the current function.
4969 See expand_inline_function. */
4970 if (context == current_function_decl || context == inline_function_decl)
4971 context = 0;
4972
4973 /* If this is non-local, handle it. */
4974 if (context)
4975 {
4976 temp = SAVE_EXPR_RTL (exp);
4977 if (temp && GET_CODE (temp) == REG)
4978 {
4979 put_var_into_stack (exp);
4980 temp = SAVE_EXPR_RTL (exp);
4981 }
4982 if (temp == 0 || GET_CODE (temp) != MEM)
4983 abort ();
4984 return change_address (temp, mode,
4985 fix_lexical_addr (XEXP (temp, 0), exp));
4986 }
4987 if (SAVE_EXPR_RTL (exp) == 0)
4988 {
06089a8b
RK
4989 if (mode == VOIDmode)
4990 temp = const0_rtx;
4991 else
4992 temp = assign_temp (type, 0, 0, 0);
1499e0a8 4993
bbf6f052 4994 SAVE_EXPR_RTL (exp) = temp;
bbf6f052
RK
4995 if (!optimize && GET_CODE (temp) == REG)
4996 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4997 save_expr_regs);
ff78f773
RK
4998
4999 /* If the mode of TEMP does not match that of the expression, it
5000 must be a promoted value. We pass store_expr a SUBREG of the
5001 wanted mode but mark it so that we know that it was already
5002 extended. Note that `unsignedp' was modified above in
5003 this case. */
5004
5005 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5006 {
5007 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5008 SUBREG_PROMOTED_VAR_P (temp) = 1;
5009 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5010 }
5011
4c7a0be9
JW
5012 if (temp == const0_rtx)
5013 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5014 else
5015 store_expr (TREE_OPERAND (exp, 0), temp, 0);
bbf6f052 5016 }
1499e0a8
RK
5017
5018 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5019 must be a promoted value. We return a SUBREG of the wanted mode,
adc22a04 5020 but mark it so that we know that it was already extended. */
1499e0a8
RK
5021
5022 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5023 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5024 {
e70d22c8
RK
5025 /* Compute the signedness and make the proper SUBREG. */
5026 promote_mode (type, mode, &unsignedp, 0);
5027 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
1499e0a8
RK
5028 SUBREG_PROMOTED_VAR_P (temp) = 1;
5029 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5030 return temp;
5031 }
5032
bbf6f052
RK
5033 return SAVE_EXPR_RTL (exp);
5034
679163cf
MS
5035 case UNSAVE_EXPR:
5036 {
5037 rtx temp;
5038 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5039 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5040 return temp;
5041 }
5042
b50d17a1
RK
5043 case PLACEHOLDER_EXPR:
5044 /* If there is an object on the head of the placeholder list,
5045 see if some object in it's references is of type TYPE. For
5046 further information, see tree.def. */
5047 if (placeholder_list)
5048 {
5049 tree object;
f59d43a9 5050 tree old_list = placeholder_list;
b50d17a1
RK
5051
5052 for (object = TREE_PURPOSE (placeholder_list);
330446eb
RK
5053 (TYPE_MAIN_VARIANT (TREE_TYPE (object))
5054 != TYPE_MAIN_VARIANT (type))
b50d17a1 5055 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4805bfa0
RK
5056 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
5057 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
5058 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
b50d17a1
RK
5059 object = TREE_OPERAND (object, 0))
5060 ;
5061
330446eb
RK
5062 if (object != 0
5063 && (TYPE_MAIN_VARIANT (TREE_TYPE (object))
5064 == TYPE_MAIN_VARIANT (type)))
f59d43a9
RK
5065 {
5066 /* Expand this object skipping the list entries before
5067 it was found in case it is also a PLACEHOLDER_EXPR.
5068 In that case, we want to translate it using subsequent
5069 entries. */
5070 placeholder_list = TREE_CHAIN (placeholder_list);
5071 temp = expand_expr (object, original_target, tmode, modifier);
5072 placeholder_list = old_list;
5073 return temp;
5074 }
b50d17a1
RK
5075 }
5076
5077 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5078 abort ();
5079
5080 case WITH_RECORD_EXPR:
5081 /* Put the object on the placeholder list, expand our first operand,
5082 and pop the list. */
5083 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5084 placeholder_list);
5085 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5086 tmode, modifier);
5087 placeholder_list = TREE_CHAIN (placeholder_list);
5088 return target;
5089
bbf6f052 5090 case EXIT_EXPR:
e44842fe
RK
5091 expand_exit_loop_if_false (NULL_PTR,
5092 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
5093 return const0_rtx;
5094
5095 case LOOP_EXPR:
0088fcb1 5096 push_temp_slots ();
bbf6f052
RK
5097 expand_start_loop (1);
5098 expand_expr_stmt (TREE_OPERAND (exp, 0));
5099 expand_end_loop ();
0088fcb1 5100 pop_temp_slots ();
bbf6f052
RK
5101
5102 return const0_rtx;
5103
5104 case BIND_EXPR:
5105 {
5106 tree vars = TREE_OPERAND (exp, 0);
5107 int vars_need_expansion = 0;
5108
5109 /* Need to open a binding contour here because
5110 if there are any cleanups they most be contained here. */
5111 expand_start_bindings (0);
5112
2df53c0b
RS
5113 /* Mark the corresponding BLOCK for output in its proper place. */
5114 if (TREE_OPERAND (exp, 2) != 0
5115 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5116 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
5117
5118 /* If VARS have not yet been expanded, expand them now. */
5119 while (vars)
5120 {
5121 if (DECL_RTL (vars) == 0)
5122 {
5123 vars_need_expansion = 1;
5124 expand_decl (vars);
5125 }
5126 expand_decl_init (vars);
5127 vars = TREE_CHAIN (vars);
5128 }
5129
5130 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
5131
5132 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5133
5134 return temp;
5135 }
5136
5137 case RTL_EXPR:
5138 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5139 abort ();
5140 emit_insns (RTL_EXPR_SEQUENCE (exp));
5141 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
99310285 5142 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
ca814259 5143 free_temps_for_rtl_expr (exp);
bbf6f052
RK
5144 return RTL_EXPR_RTL (exp);
5145
5146 case CONSTRUCTOR:
dd27116b
RK
5147 /* If we don't need the result, just ensure we evaluate any
5148 subexpressions. */
5149 if (ignore)
5150 {
5151 tree elt;
5152 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5153 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
5154 return const0_rtx;
5155 }
3207b172 5156
4af3895e
JVA
5157 /* All elts simple constants => refer to a constant in memory. But
5158 if this is a non-BLKmode mode, let it store a field at a time
5159 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 5160 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
5161 store directly into the target unless the type is large enough
5162 that memcpy will be used. If we are making an initializer and
3207b172 5163 all operands are constant, put it in memory as well. */
dd27116b 5164 else if ((TREE_STATIC (exp)
3207b172
RK
5165 && ((mode == BLKmode
5166 && ! (target != 0 && safe_from_p (target, exp)))
d720b9d1
RK
5167 || TREE_ADDRESSABLE (exp)
5168 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5169 && (move_by_pieces_ninsns
67225c15
RK
5170 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5171 TYPE_ALIGN (type) / BITS_PER_UNIT)
9de08200
RK
5172 > MOVE_RATIO)
5173 && ! mostly_zeros_p (exp))))
dd27116b 5174 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
5175 {
5176 rtx constructor = output_constant_def (exp);
b552441b
RS
5177 if (modifier != EXPAND_CONST_ADDRESS
5178 && modifier != EXPAND_INITIALIZER
5179 && modifier != EXPAND_SUM
d6a5ac33
RK
5180 && (! memory_address_p (GET_MODE (constructor),
5181 XEXP (constructor, 0))
5182 || (flag_force_addr
5183 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
5184 constructor = change_address (constructor, VOIDmode,
5185 XEXP (constructor, 0));
5186 return constructor;
5187 }
5188
bbf6f052
RK
5189 else
5190 {
5191 if (target == 0 || ! safe_from_p (target, exp))
06089a8b
RK
5192 {
5193 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5194 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5195 else
5196 target = assign_temp (type, 0, 1, 1);
5197 }
07604beb
RK
5198
5199 if (TREE_READONLY (exp))
5200 {
9151b3bf
RK
5201 if (GET_CODE (target) == MEM)
5202 target = change_address (target, GET_MODE (target),
5203 XEXP (target, 0));
07604beb
RK
5204 RTX_UNCHANGING_P (target) = 1;
5205 }
5206
e1a43f73 5207 store_constructor (exp, target, 0);
bbf6f052
RK
5208 return target;
5209 }
5210
5211 case INDIRECT_REF:
5212 {
5213 tree exp1 = TREE_OPERAND (exp, 0);
5214 tree exp2;
5215
405f0da6
JW
5216 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5217 op0 = memory_address (mode, op0);
8c8a8e34
JW
5218
5219 temp = gen_rtx (MEM, mode, op0);
5220 /* If address was computed by addition,
5221 mark this as an element of an aggregate. */
5222 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5223 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5224 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
05e3bdb9 5225 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
8c8a8e34
JW
5226 || (TREE_CODE (exp1) == ADDR_EXPR
5227 && (exp2 = TREE_OPERAND (exp1, 0))
05e3bdb9 5228 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
8c8a8e34 5229 MEM_IN_STRUCT_P (temp) = 1;
2c4c436a 5230 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
1125706f
RK
5231
5232 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5233 here, because, in C and C++, the fact that a location is accessed
5234 through a pointer to const does not mean that the value there can
5235 never change. Languages where it can never change should
5236 also set TREE_STATIC. */
5cb7a25a 5237 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
8c8a8e34
JW
5238 return temp;
5239 }
bbf6f052
RK
5240
5241 case ARRAY_REF:
742920c7
RK
5242 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5243 abort ();
bbf6f052 5244
bbf6f052 5245 {
742920c7
RK
5246 tree array = TREE_OPERAND (exp, 0);
5247 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5248 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5249 tree index = TREE_OPERAND (exp, 1);
5250 tree index_type = TREE_TYPE (index);
bbf6f052 5251 int i;
bbf6f052 5252
b50d17a1
RK
5253 if (TREE_CODE (low_bound) != INTEGER_CST
5254 && contains_placeholder_p (low_bound))
5255 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
5256
d4c89139
PB
5257 /* Optimize the special-case of a zero lower bound.
5258
5259 We convert the low_bound to sizetype to avoid some problems
5260 with constant folding. (E.g. suppose the lower bound is 1,
5261 and its mode is QI. Without the conversion, (ARRAY
5262 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5263 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5264
5265 But sizetype isn't quite right either (especially if
5266 the lowbound is negative). FIXME */
5267
742920c7 5268 if (! integer_zerop (low_bound))
d4c89139
PB
5269 index = fold (build (MINUS_EXPR, index_type, index,
5270 convert (sizetype, low_bound)));
742920c7 5271
6be58303
JW
5272 if ((TREE_CODE (index) != INTEGER_CST
5273 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
c7a7ac46 5274 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
742920c7 5275 {
6be58303
JW
5276 /* Nonconstant array index or nonconstant element size, and
5277 not an array in an unaligned (packed) structure field.
742920c7
RK
5278 Generate the tree for *(&array+index) and expand that,
5279 except do it in a language-independent way
5280 and don't complain about non-lvalue arrays.
5281 `mark_addressable' should already have been called
5282 for any array for which this case will be reached. */
5283
5284 /* Don't forget the const or volatile flag from the array
5285 element. */
5286 tree variant_type = build_type_variant (type,
5287 TREE_READONLY (exp),
5288 TREE_THIS_VOLATILE (exp));
5289 tree array_adr = build1 (ADDR_EXPR,
5290 build_pointer_type (variant_type), array);
5291 tree elt;
b50d17a1 5292 tree size = size_in_bytes (type);
742920c7 5293
4c08eef0
RK
5294 /* Convert the integer argument to a type the same size as sizetype
5295 so the multiply won't overflow spuriously. */
5296 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5297 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5298 index);
742920c7 5299
b50d17a1
RK
5300 if (TREE_CODE (size) != INTEGER_CST
5301 && contains_placeholder_p (size))
5302 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
5303
742920c7
RK
5304 /* Don't think the address has side effects
5305 just because the array does.
5306 (In some cases the address might have side effects,
5307 and we fail to record that fact here. However, it should not
5308 matter, since expand_expr should not care.) */
5309 TREE_SIDE_EFFECTS (array_adr) = 0;
5310
2ae342f7
RK
5311 elt
5312 = build1
5313 (INDIRECT_REF, type,
5314 fold (build (PLUS_EXPR,
5315 TYPE_POINTER_TO (variant_type),
5316 array_adr,
5317 fold
5318 (build1
5319 (NOP_EXPR,
5320 TYPE_POINTER_TO (variant_type),
5321 fold (build (MULT_EXPR, TREE_TYPE (index),
5322 index,
5323 convert (TREE_TYPE (index),
5324 size))))))));;
742920c7
RK
5325
5326 /* Volatility, etc., of new expression is same as old
5327 expression. */
5328 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
5329 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
5330 TREE_READONLY (elt) = TREE_READONLY (exp);
5331
5332 return expand_expr (elt, target, tmode, modifier);
5333 }
5334
5335 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
5336 This is not done in fold so it won't happen inside &.
5337 Don't fold if this is for wide characters since it's too
5338 difficult to do correctly and this is a very rare case. */
742920c7
RK
5339
5340 if (TREE_CODE (array) == STRING_CST
5341 && TREE_CODE (index) == INTEGER_CST
5342 && !TREE_INT_CST_HIGH (index)
307b821c 5343 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
ad2e7dd0
RK
5344 && GET_MODE_CLASS (mode) == MODE_INT
5345 && GET_MODE_SIZE (mode) == 1)
307b821c 5346 return GEN_INT (TREE_STRING_POINTER (array)[i]);
bbf6f052 5347
742920c7
RK
5348 /* If this is a constant index into a constant array,
5349 just get the value from the array. Handle both the cases when
5350 we have an explicit constructor and when our operand is a variable
5351 that was declared const. */
4af3895e 5352
742920c7
RK
5353 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5354 {
5355 if (TREE_CODE (index) == INTEGER_CST
5356 && TREE_INT_CST_HIGH (index) == 0)
5357 {
5358 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5359
5360 i = TREE_INT_CST_LOW (index);
5361 while (elem && i--)
5362 elem = TREE_CHAIN (elem);
5363 if (elem)
5364 return expand_expr (fold (TREE_VALUE (elem)), target,
5365 tmode, modifier);
5366 }
5367 }
4af3895e 5368
742920c7
RK
5369 else if (optimize >= 1
5370 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5371 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5372 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5373 {
5374 if (TREE_CODE (index) == INTEGER_CST
5375 && TREE_INT_CST_HIGH (index) == 0)
5376 {
5377 tree init = DECL_INITIAL (array);
5378
5379 i = TREE_INT_CST_LOW (index);
5380 if (TREE_CODE (init) == CONSTRUCTOR)
5381 {
5382 tree elem = CONSTRUCTOR_ELTS (init);
5383
03dc44a6
RS
5384 while (elem
5385 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
5386 elem = TREE_CHAIN (elem);
5387 if (elem)
5388 return expand_expr (fold (TREE_VALUE (elem)), target,
5389 tmode, modifier);
5390 }
5391 else if (TREE_CODE (init) == STRING_CST
5392 && i < TREE_STRING_LENGTH (init))
307b821c 5393 return GEN_INT (TREE_STRING_POINTER (init)[i]);
742920c7
RK
5394 }
5395 }
5396 }
8c8a8e34 5397
bbf6f052
RK
5398 /* Treat array-ref with constant index as a component-ref. */
5399
5400 case COMPONENT_REF:
5401 case BIT_FIELD_REF:
4af3895e 5402 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
5403 appropriate field if it is present. Don't do this if we have
5404 already written the data since we want to refer to that copy
5405 and varasm.c assumes that's what we'll do. */
4af3895e 5406 if (code != ARRAY_REF
7a0b7b9a
RK
5407 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5408 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
5409 {
5410 tree elt;
5411
5412 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5413 elt = TREE_CHAIN (elt))
5414 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
5415 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5416 }
5417
bbf6f052
RK
5418 {
5419 enum machine_mode mode1;
5420 int bitsize;
5421 int bitpos;
7bb0943f 5422 tree offset;
bbf6f052 5423 int volatilep = 0;
7bb0943f 5424 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052 5425 &mode1, &unsignedp, &volatilep);
034f9101 5426 int alignment;
bbf6f052 5427
e7f3c83f
RK
5428 /* If we got back the original object, something is wrong. Perhaps
5429 we are evaluating an expression too early. In any event, don't
5430 infinitely recurse. */
5431 if (tem == exp)
5432 abort ();
5433
3d27140a 5434 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
5435 computation, since it will need a temporary and TARGET is known
5436 to have to do. This occurs in unchecked conversion in Ada. */
5437
5438 op0 = expand_expr (tem,
5439 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5440 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5441 != INTEGER_CST)
5442 ? target : NULL_RTX),
4ed67205
RK
5443 VOIDmode,
5444 modifier == EXPAND_INITIALIZER ? modifier : 0);
bbf6f052 5445
8c8a8e34 5446 /* If this is a constant, put it into a register if it is a
8008b228 5447 legitimate constant and memory if it isn't. */
8c8a8e34
JW
5448 if (CONSTANT_P (op0))
5449 {
5450 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 5451 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
5452 op0 = force_reg (mode, op0);
5453 else
5454 op0 = validize_mem (force_const_mem (mode, op0));
5455 }
5456
034f9101 5457 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
7bb0943f
RS
5458 if (offset != 0)
5459 {
906c4e36 5460 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
5461
5462 if (GET_CODE (op0) != MEM)
5463 abort ();
5464 op0 = change_address (op0, VOIDmode,
88f63c77
RK
5465 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5466 force_reg (ptr_mode, offset_rtx)));
034f9101
RS
5467 /* If we have a variable offset, the known alignment
5468 is only that of the innermost structure containing the field.
5469 (Actually, we could sometimes do better by using the
5470 size of an element of the innermost array, but no need.) */
5471 if (TREE_CODE (exp) == COMPONENT_REF
5472 || TREE_CODE (exp) == BIT_FIELD_REF)
5473 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5474 / BITS_PER_UNIT);
7bb0943f
RS
5475 }
5476
bbf6f052
RK
5477 /* Don't forget about volatility even if this is a bitfield. */
5478 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5479 {
5480 op0 = copy_rtx (op0);
5481 MEM_VOLATILE_P (op0) = 1;
5482 }
5483
ccc98036
RS
5484 /* In cases where an aligned union has an unaligned object
5485 as a field, we might be extracting a BLKmode value from
5486 an integer-mode (e.g., SImode) object. Handle this case
5487 by doing the extract into an object as wide as the field
5488 (which we know to be the width of a basic mode), then
f2420d0b
JW
5489 storing into memory, and changing the mode to BLKmode.
5490 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5491 EXPAND_INITIALIZER), then we must not copy to a temporary. */
bbf6f052 5492 if (mode1 == VOIDmode
ccc98036 5493 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
f9409c3a 5494 || (modifier != EXPAND_CONST_ADDRESS
f9409c3a
JW
5495 && modifier != EXPAND_INITIALIZER
5496 && ((mode1 != BLKmode && ! direct_load[(int) mode1])
5497 /* If the field isn't aligned enough to fetch as a memref,
5498 fetch it as a bit field. */
5499 || (SLOW_UNALIGNED_ACCESS
5500 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5501 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
bbf6f052 5502 {
bbf6f052
RK
5503 enum machine_mode ext_mode = mode;
5504
5505 if (ext_mode == BLKmode)
5506 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5507
5508 if (ext_mode == BLKmode)
a281e72d
RK
5509 {
5510 /* In this case, BITPOS must start at a byte boundary and
5511 TARGET, if specified, must be a MEM. */
5512 if (GET_CODE (op0) != MEM
5513 || (target != 0 && GET_CODE (target) != MEM)
5514 || bitpos % BITS_PER_UNIT != 0)
5515 abort ();
5516
5517 op0 = change_address (op0, VOIDmode,
5518 plus_constant (XEXP (op0, 0),
5519 bitpos / BITS_PER_UNIT));
5520 if (target == 0)
5521 target = assign_temp (type, 0, 1, 1);
5522
5523 emit_block_move (target, op0,
5524 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5525 / BITS_PER_UNIT),
5526 1);
5527
5528 return target;
5529 }
bbf6f052 5530
dc6d66b3
RK
5531 op0 = validize_mem (op0);
5532
5533 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5534 mark_reg_pointer (XEXP (op0, 0), alignment);
5535
5536 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 5537 unsignedp, target, ext_mode, ext_mode,
034f9101 5538 alignment,
bbf6f052
RK
5539 int_size_in_bytes (TREE_TYPE (tem)));
5540 if (mode == BLKmode)
5541 {
5542 rtx new = assign_stack_temp (ext_mode,
5543 bitsize / BITS_PER_UNIT, 0);
5544
5545 emit_move_insn (new, op0);
5546 op0 = copy_rtx (new);
5547 PUT_MODE (op0, BLKmode);
092dded9 5548 MEM_IN_STRUCT_P (op0) = 1;
bbf6f052
RK
5549 }
5550
5551 return op0;
5552 }
5553
05019f83
RK
5554 /* If the result is BLKmode, use that to access the object
5555 now as well. */
5556 if (mode == BLKmode)
5557 mode1 = BLKmode;
5558
bbf6f052
RK
5559 /* Get a reference to just this component. */
5560 if (modifier == EXPAND_CONST_ADDRESS
5561 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5562 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5563 (bitpos / BITS_PER_UNIT)));
5564 else
5565 op0 = change_address (op0, mode1,
5566 plus_constant (XEXP (op0, 0),
5567 (bitpos / BITS_PER_UNIT)));
dc6d66b3
RK
5568 if (GET_CODE (XEXP (op0, 0)) == REG)
5569 mark_reg_pointer (XEXP (op0, 0), alignment);
5570
bbf6f052
RK
5571 MEM_IN_STRUCT_P (op0) = 1;
5572 MEM_VOLATILE_P (op0) |= volatilep;
5573 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
5574 return op0;
5575 if (target == 0)
5576 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5577 convert_move (target, op0, unsignedp);
5578 return target;
5579 }
5580
bbf6f052
RK
5581 /* Intended for a reference to a buffer of a file-object in Pascal.
5582 But it's not certain that a special tree code will really be
5583 necessary for these. INDIRECT_REF might work for them. */
5584 case BUFFER_REF:
5585 abort ();
5586
7308a047 5587 case IN_EXPR:
7308a047 5588 {
d6a5ac33
RK
5589 /* Pascal set IN expression.
5590
5591 Algorithm:
5592 rlo = set_low - (set_low%bits_per_word);
5593 the_word = set [ (index - rlo)/bits_per_word ];
5594 bit_index = index % bits_per_word;
5595 bitmask = 1 << bit_index;
5596 return !!(the_word & bitmask); */
5597
7308a047
RS
5598 tree set = TREE_OPERAND (exp, 0);
5599 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 5600 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 5601 tree set_type = TREE_TYPE (set);
7308a047
RS
5602 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5603 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
5604 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5605 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5606 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5607 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5608 rtx setaddr = XEXP (setval, 0);
5609 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
5610 rtx rlow;
5611 rtx diff, quo, rem, addr, bit, result;
7308a047 5612
d6a5ac33
RK
5613 preexpand_calls (exp);
5614
5615 /* If domain is empty, answer is no. Likewise if index is constant
5616 and out of bounds. */
5617 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5618 && TREE_CODE (set_low_bound) == INTEGER_CST
5619 && tree_int_cst_lt (set_high_bound, set_low_bound)
5620 || (TREE_CODE (index) == INTEGER_CST
5621 && TREE_CODE (set_low_bound) == INTEGER_CST
5622 && tree_int_cst_lt (index, set_low_bound))
5623 || (TREE_CODE (set_high_bound) == INTEGER_CST
5624 && TREE_CODE (index) == INTEGER_CST
5625 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
5626 return const0_rtx;
5627
d6a5ac33
RK
5628 if (target == 0)
5629 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
5630
5631 /* If we get here, we have to generate the code for both cases
5632 (in range and out of range). */
5633
5634 op0 = gen_label_rtx ();
5635 op1 = gen_label_rtx ();
5636
5637 if (! (GET_CODE (index_val) == CONST_INT
5638 && GET_CODE (lo_r) == CONST_INT))
5639 {
17938e57 5640 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
d6a5ac33 5641 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
5642 emit_jump_insn (gen_blt (op1));
5643 }
5644
5645 if (! (GET_CODE (index_val) == CONST_INT
5646 && GET_CODE (hi_r) == CONST_INT))
5647 {
17938e57 5648 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
d6a5ac33 5649 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
5650 emit_jump_insn (gen_bgt (op1));
5651 }
5652
5653 /* Calculate the element number of bit zero in the first word
5654 of the set. */
5655 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
5656 rlow = GEN_INT (INTVAL (lo_r)
5657 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 5658 else
17938e57
RK
5659 rlow = expand_binop (index_mode, and_optab, lo_r,
5660 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 5661 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 5662
d6a5ac33
RK
5663 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5664 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
5665
5666 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 5667 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 5668 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
5669 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5670
7308a047 5671 addr = memory_address (byte_mode,
d6a5ac33
RK
5672 expand_binop (index_mode, add_optab, diff,
5673 setaddr, NULL_RTX, iunsignedp,
17938e57 5674 OPTAB_LIB_WIDEN));
d6a5ac33 5675
7308a047
RS
5676 /* Extract the bit we want to examine */
5677 bit = expand_shift (RSHIFT_EXPR, byte_mode,
17938e57
RK
5678 gen_rtx (MEM, byte_mode, addr),
5679 make_tree (TREE_TYPE (index), rem),
5680 NULL_RTX, 1);
5681 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5682 GET_MODE (target) == byte_mode ? target : 0,
7308a047 5683 1, OPTAB_LIB_WIDEN);
17938e57
RK
5684
5685 if (result != target)
5686 convert_move (target, result, 1);
7308a047
RS
5687
5688 /* Output the code to handle the out-of-range case. */
5689 emit_jump (op0);
5690 emit_label (op1);
5691 emit_move_insn (target, const0_rtx);
5692 emit_label (op0);
5693 return target;
5694 }
5695
bbf6f052
RK
5696 case WITH_CLEANUP_EXPR:
5697 if (RTL_EXPR_RTL (exp) == 0)
5698 {
5699 RTL_EXPR_RTL (exp)
6fcc9690 5700 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
906c4e36
RK
5701 cleanups_this_call
5702 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
bbf6f052
RK
5703 /* That's it for this cleanup. */
5704 TREE_OPERAND (exp, 2) = 0;
61d6b1cc 5705 (*interim_eh_hook) (NULL_TREE);
bbf6f052
RK
5706 }
5707 return RTL_EXPR_RTL (exp);
5708
5dab5552
MS
5709 case CLEANUP_POINT_EXPR:
5710 {
d93d4205 5711 extern int temp_slot_level;
5dab5552 5712 tree old_cleanups = cleanups_this_call;
d93d4205
MS
5713 int old_temp_level = target_temp_slot_level;
5714 push_temp_slots ();
5715 target_temp_slot_level = temp_slot_level;
f283f66b
JM
5716 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5717 /* If we're going to use this value, load it up now. */
5718 if (! ignore)
5719 op0 = force_not_mem (op0);
5dab5552 5720 expand_cleanups_to (old_cleanups);
d93d4205
MS
5721 preserve_temp_slots (op0);
5722 free_temp_slots ();
5723 pop_temp_slots ();
5724 target_temp_slot_level = old_temp_level;
5dab5552
MS
5725 }
5726 return op0;
5727
bbf6f052
RK
5728 case CALL_EXPR:
5729 /* Check for a built-in function. */
5730 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
5731 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5732 == FUNCTION_DECL)
bbf6f052
RK
5733 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5734 return expand_builtin (exp, target, subtarget, tmode, ignore);
d6a5ac33 5735
bbf6f052
RK
5736 /* If this call was expanded already by preexpand_calls,
5737 just return the result we got. */
5738 if (CALL_EXPR_RTL (exp) != 0)
5739 return CALL_EXPR_RTL (exp);
d6a5ac33 5740
8129842c 5741 return expand_call (exp, target, ignore);
bbf6f052
RK
5742
5743 case NON_LVALUE_EXPR:
5744 case NOP_EXPR:
5745 case CONVERT_EXPR:
5746 case REFERENCE_EXPR:
bbf6f052
RK
5747 if (TREE_CODE (type) == UNION_TYPE)
5748 {
5749 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5750 if (target == 0)
06089a8b
RK
5751 {
5752 if (mode != BLKmode)
5753 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5754 else
5755 target = assign_temp (type, 0, 1, 1);
5756 }
d6a5ac33 5757
bbf6f052
RK
5758 if (GET_CODE (target) == MEM)
5759 /* Store data into beginning of memory target. */
5760 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
5761 change_address (target, TYPE_MODE (valtype), 0), 0);
5762
bbf6f052
RK
5763 else if (GET_CODE (target) == REG)
5764 /* Store this field into a union of the proper type. */
5765 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5766 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5767 VOIDmode, 0, 1,
5768 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5769 else
5770 abort ();
5771
5772 /* Return the entire union. */
5773 return target;
5774 }
d6a5ac33 5775
7f62854a
RK
5776 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5777 {
5778 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5779 modifier);
5780
5781 /* If the signedness of the conversion differs and OP0 is
5782 a promoted SUBREG, clear that indication since we now
5783 have to do the proper extension. */
5784 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5785 && GET_CODE (op0) == SUBREG)
5786 SUBREG_PROMOTED_VAR_P (op0) = 0;
5787
5788 return op0;
5789 }
5790
1499e0a8 5791 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
5792 if (GET_MODE (op0) == mode)
5793 return op0;
12342f90 5794
d6a5ac33
RK
5795 /* If OP0 is a constant, just convert it into the proper mode. */
5796 if (CONSTANT_P (op0))
5797 return
5798 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5799 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 5800
26fcb35a
RS
5801 if (modifier == EXPAND_INITIALIZER)
5802 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 5803
bbf6f052 5804 if (target == 0)
d6a5ac33
RK
5805 return
5806 convert_to_mode (mode, op0,
5807 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 5808 else
d6a5ac33
RK
5809 convert_move (target, op0,
5810 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
5811 return target;
5812
5813 case PLUS_EXPR:
5814 /* We come here from MINUS_EXPR when the second operand is a constant. */
5815 plus_expr:
5816 this_optab = add_optab;
5817
5818 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5819 something else, make sure we add the register to the constant and
5820 then to the other thing. This case can occur during strength
5821 reduction and doing it this way will produce better code if the
5822 frame pointer or argument pointer is eliminated.
5823
5824 fold-const.c will ensure that the constant is always in the inner
5825 PLUS_EXPR, so the only case we need to do anything about is if
5826 sp, ap, or fp is our second argument, in which case we must swap
5827 the innermost first argument and our second argument. */
5828
5829 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5830 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5831 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5832 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5833 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5834 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5835 {
5836 tree t = TREE_OPERAND (exp, 1);
5837
5838 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5839 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5840 }
5841
88f63c77 5842 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
5843 something, we might be forming a constant. So try to use
5844 plus_constant. If it produces a sum and we can't accept it,
5845 use force_operand. This allows P = &ARR[const] to generate
5846 efficient code on machines where a SYMBOL_REF is not a valid
5847 address.
5848
5849 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 5850 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
88f63c77 5851 || mode == ptr_mode)
bbf6f052 5852 {
c980ac49
RS
5853 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5854 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5855 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5856 {
5857 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5858 EXPAND_SUM);
5859 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5860 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5861 op1 = force_operand (op1, target);
5862 return op1;
5863 }
bbf6f052 5864
c980ac49
RS
5865 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5866 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5867 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5868 {
5869 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5870 EXPAND_SUM);
5871 if (! CONSTANT_P (op0))
5872 {
5873 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5874 VOIDmode, modifier);
709f5be1
RS
5875 /* Don't go to both_summands if modifier
5876 says it's not right to return a PLUS. */
5877 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5878 goto binop2;
c980ac49
RS
5879 goto both_summands;
5880 }
5881 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5882 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5883 op0 = force_operand (op0, target);
5884 return op0;
5885 }
bbf6f052
RK
5886 }
5887
5888 /* No sense saving up arithmetic to be done
5889 if it's all in the wrong mode to form part of an address.
5890 And force_operand won't know whether to sign-extend or
5891 zero-extend. */
5892 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 5893 || mode != ptr_mode)
c980ac49 5894 goto binop;
bbf6f052
RK
5895
5896 preexpand_calls (exp);
5897 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5898 subtarget = 0;
5899
5900 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
906c4e36 5901 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052 5902
c980ac49 5903 both_summands:
bbf6f052
RK
5904 /* Make sure any term that's a sum with a constant comes last. */
5905 if (GET_CODE (op0) == PLUS
5906 && CONSTANT_P (XEXP (op0, 1)))
5907 {
5908 temp = op0;
5909 op0 = op1;
5910 op1 = temp;
5911 }
5912 /* If adding to a sum including a constant,
5913 associate it to put the constant outside. */
5914 if (GET_CODE (op1) == PLUS
5915 && CONSTANT_P (XEXP (op1, 1)))
5916 {
5917 rtx constant_term = const0_rtx;
5918
5919 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5920 if (temp != 0)
5921 op0 = temp;
6f90e075
JW
5922 /* Ensure that MULT comes first if there is one. */
5923 else if (GET_CODE (op0) == MULT)
5924 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
5925 else
5926 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5927
5928 /* Let's also eliminate constants from op0 if possible. */
5929 op0 = eliminate_constant_term (op0, &constant_term);
5930
5931 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5932 their sum should be a constant. Form it into OP1, since the
5933 result we want will then be OP0 + OP1. */
5934
5935 temp = simplify_binary_operation (PLUS, mode, constant_term,
5936 XEXP (op1, 1));
5937 if (temp != 0)
5938 op1 = temp;
5939 else
5940 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
5941 }
5942
5943 /* Put a constant term last and put a multiplication first. */
5944 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
5945 temp = op1, op1 = op0, op0 = temp;
5946
5947 temp = simplify_binary_operation (PLUS, mode, op0, op1);
5948 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
5949
5950 case MINUS_EXPR:
ea87523e
RK
5951 /* For initializers, we are allowed to return a MINUS of two
5952 symbolic constants. Here we handle all cases when both operands
5953 are constant. */
bbf6f052
RK
5954 /* Handle difference of two symbolic constants,
5955 for the sake of an initializer. */
5956 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5957 && really_constant_p (TREE_OPERAND (exp, 0))
5958 && really_constant_p (TREE_OPERAND (exp, 1)))
5959 {
906c4e36
RK
5960 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
5961 VOIDmode, modifier);
5962 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5963 VOIDmode, modifier);
ea87523e 5964
ea87523e
RK
5965 /* If the last operand is a CONST_INT, use plus_constant of
5966 the negated constant. Else make the MINUS. */
5967 if (GET_CODE (op1) == CONST_INT)
5968 return plus_constant (op0, - INTVAL (op1));
5969 else
5970 return gen_rtx (MINUS, mode, op0, op1);
bbf6f052
RK
5971 }
5972 /* Convert A - const to A + (-const). */
5973 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5974 {
ae431183
RK
5975 tree negated = fold (build1 (NEGATE_EXPR, type,
5976 TREE_OPERAND (exp, 1)));
5977
5978 /* Deal with the case where we can't negate the constant
5979 in TYPE. */
5980 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
5981 {
5982 tree newtype = signed_type (type);
5983 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
5984 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
5985 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
5986
5987 if (! TREE_OVERFLOW (newneg))
5988 return expand_expr (convert (type,
5989 build (PLUS_EXPR, newtype,
5990 newop0, newneg)),
5991 target, tmode, modifier);
5992 }
5993 else
5994 {
5995 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
5996 goto plus_expr;
5997 }
bbf6f052
RK
5998 }
5999 this_optab = sub_optab;
6000 goto binop;
6001
6002 case MULT_EXPR:
6003 preexpand_calls (exp);
6004 /* If first operand is constant, swap them.
6005 Thus the following special case checks need only
6006 check the second operand. */
6007 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6008 {
6009 register tree t1 = TREE_OPERAND (exp, 0);
6010 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6011 TREE_OPERAND (exp, 1) = t1;
6012 }
6013
6014 /* Attempt to return something suitable for generating an
6015 indexed address, for machines that support that. */
6016
88f63c77 6017 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 6018 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 6019 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
6020 {
6021 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
6022
6023 /* Apply distributive law if OP0 is x+c. */
6024 if (GET_CODE (op0) == PLUS
6025 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6026 return gen_rtx (PLUS, mode,
6027 gen_rtx (MULT, mode, XEXP (op0, 0),
906c4e36
RK
6028 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6029 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6030 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
6031
6032 if (GET_CODE (op0) != REG)
906c4e36 6033 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
6034 if (GET_CODE (op0) != REG)
6035 op0 = copy_to_mode_reg (mode, op0);
6036
6037 return gen_rtx (MULT, mode, op0,
906c4e36 6038 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
6039 }
6040
6041 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6042 subtarget = 0;
6043
6044 /* Check for multiplying things that have been extended
6045 from a narrower type. If this machine supports multiplying
6046 in that narrower type with a result in the desired type,
6047 do it that way, and avoid the explicit type-conversion. */
6048 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6049 && TREE_CODE (type) == INTEGER_TYPE
6050 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6051 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6052 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6053 && int_fits_type_p (TREE_OPERAND (exp, 1),
6054 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6055 /* Don't use a widening multiply if a shift will do. */
6056 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 6057 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
6058 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6059 ||
6060 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6061 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6062 ==
6063 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6064 /* If both operands are extended, they must either both
6065 be zero-extended or both be sign-extended. */
6066 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6067 ==
6068 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6069 {
6070 enum machine_mode innermode
6071 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
6072 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6073 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
6074 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6075 ? umul_widen_optab : smul_widen_optab);
b10af0c8 6076 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 6077 {
b10af0c8
TG
6078 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6079 {
6080 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6081 NULL_RTX, VOIDmode, 0);
6082 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6083 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6084 VOIDmode, 0);
6085 else
6086 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6087 NULL_RTX, VOIDmode, 0);
6088 goto binop2;
6089 }
6090 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6091 && innermode == word_mode)
6092 {
6093 rtx htem;
6094 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6095 NULL_RTX, VOIDmode, 0);
6096 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6097 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6098 VOIDmode, 0);
6099 else
6100 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6101 NULL_RTX, VOIDmode, 0);
6102 temp = expand_binop (mode, other_optab, op0, op1, target,
6103 unsignedp, OPTAB_LIB_WIDEN);
6104 htem = expand_mult_highpart_adjust (innermode,
6105 gen_highpart (innermode, temp),
6106 op0, op1,
6107 gen_highpart (innermode, temp),
6108 unsignedp);
6109 emit_move_insn (gen_highpart (innermode, temp), htem);
6110 return temp;
6111 }
bbf6f052
RK
6112 }
6113 }
6114 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6115 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6116 return expand_mult (mode, op0, op1, target, unsignedp);
6117
6118 case TRUNC_DIV_EXPR:
6119 case FLOOR_DIV_EXPR:
6120 case CEIL_DIV_EXPR:
6121 case ROUND_DIV_EXPR:
6122 case EXACT_DIV_EXPR:
6123 preexpand_calls (exp);
6124 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6125 subtarget = 0;
6126 /* Possible optimization: compute the dividend with EXPAND_SUM
6127 then if the divisor is constant can optimize the case
6128 where some terms of the dividend have coeffs divisible by it. */
6129 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6130 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6131 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6132
6133 case RDIV_EXPR:
6134 this_optab = flodiv_optab;
6135 goto binop;
6136
6137 case TRUNC_MOD_EXPR:
6138 case FLOOR_MOD_EXPR:
6139 case CEIL_MOD_EXPR:
6140 case ROUND_MOD_EXPR:
6141 preexpand_calls (exp);
6142 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6143 subtarget = 0;
6144 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6145 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6146 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6147
6148 case FIX_ROUND_EXPR:
6149 case FIX_FLOOR_EXPR:
6150 case FIX_CEIL_EXPR:
6151 abort (); /* Not used for C. */
6152
6153 case FIX_TRUNC_EXPR:
906c4e36 6154 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6155 if (target == 0)
6156 target = gen_reg_rtx (mode);
6157 expand_fix (target, op0, unsignedp);
6158 return target;
6159
6160 case FLOAT_EXPR:
906c4e36 6161 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6162 if (target == 0)
6163 target = gen_reg_rtx (mode);
6164 /* expand_float can't figure out what to do if FROM has VOIDmode.
6165 So give it the correct mode. With -O, cse will optimize this. */
6166 if (GET_MODE (op0) == VOIDmode)
6167 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6168 op0);
6169 expand_float (target, op0,
6170 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6171 return target;
6172
6173 case NEGATE_EXPR:
5b22bee8 6174 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
bbf6f052
RK
6175 temp = expand_unop (mode, neg_optab, op0, target, 0);
6176 if (temp == 0)
6177 abort ();
6178 return temp;
6179
6180 case ABS_EXPR:
6181 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6182
2d7050fd 6183 /* Handle complex values specially. */
d6a5ac33
RK
6184 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6185 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6186 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 6187
bbf6f052
RK
6188 /* Unsigned abs is simply the operand. Testing here means we don't
6189 risk generating incorrect code below. */
6190 if (TREE_UNSIGNED (type))
6191 return op0;
6192
2e5ec6cf
RK
6193 return expand_abs (mode, op0, target, unsignedp,
6194 safe_from_p (target, TREE_OPERAND (exp, 0)));
bbf6f052
RK
6195
6196 case MAX_EXPR:
6197 case MIN_EXPR:
6198 target = original_target;
6199 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
fc155707 6200 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 6201 || GET_MODE (target) != mode
bbf6f052
RK
6202 || (GET_CODE (target) == REG
6203 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6204 target = gen_reg_rtx (mode);
906c4e36 6205 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6206 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6207
6208 /* First try to do it with a special MIN or MAX instruction.
6209 If that does not win, use a conditional jump to select the proper
6210 value. */
6211 this_optab = (TREE_UNSIGNED (type)
6212 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6213 : (code == MIN_EXPR ? smin_optab : smax_optab));
6214
6215 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6216 OPTAB_WIDEN);
6217 if (temp != 0)
6218 return temp;
6219
fa2981d8
JW
6220 /* At this point, a MEM target is no longer useful; we will get better
6221 code without it. */
6222
6223 if (GET_CODE (target) == MEM)
6224 target = gen_reg_rtx (mode);
6225
ee456b1c
RK
6226 if (target != op0)
6227 emit_move_insn (target, op0);
d6a5ac33 6228
bbf6f052 6229 op0 = gen_label_rtx ();
d6a5ac33 6230
f81497d9
RS
6231 /* If this mode is an integer too wide to compare properly,
6232 compare word by word. Rely on cse to optimize constant cases. */
d6a5ac33 6233 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
bbf6f052 6234 {
f81497d9 6235 if (code == MAX_EXPR)
d6a5ac33
RK
6236 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6237 target, op1, NULL_RTX, op0);
bbf6f052 6238 else
d6a5ac33
RK
6239 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6240 op1, target, NULL_RTX, op0);
ee456b1c 6241 emit_move_insn (target, op1);
bbf6f052 6242 }
f81497d9
RS
6243 else
6244 {
6245 if (code == MAX_EXPR)
6246 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
6247 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6248 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
f81497d9
RS
6249 else
6250 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
6251 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6252 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
f81497d9 6253 if (temp == const0_rtx)
ee456b1c 6254 emit_move_insn (target, op1);
f81497d9
RS
6255 else if (temp != const_true_rtx)
6256 {
6257 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6258 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6259 else
6260 abort ();
ee456b1c 6261 emit_move_insn (target, op1);
f81497d9
RS
6262 }
6263 }
bbf6f052
RK
6264 emit_label (op0);
6265 return target;
6266
bbf6f052
RK
6267 case BIT_NOT_EXPR:
6268 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6269 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6270 if (temp == 0)
6271 abort ();
6272 return temp;
6273
6274 case FFS_EXPR:
6275 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6276 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6277 if (temp == 0)
6278 abort ();
6279 return temp;
6280
d6a5ac33
RK
6281 /* ??? Can optimize bitwise operations with one arg constant.
6282 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6283 and (a bitwise1 b) bitwise2 b (etc)
6284 but that is probably not worth while. */
6285
6286 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6287 boolean values when we want in all cases to compute both of them. In
6288 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6289 as actual zero-or-1 values and then bitwise anding. In cases where
6290 there cannot be any side effects, better code would be made by
6291 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6292 how to recognize those cases. */
6293
bbf6f052
RK
6294 case TRUTH_AND_EXPR:
6295 case BIT_AND_EXPR:
6296 this_optab = and_optab;
6297 goto binop;
6298
bbf6f052
RK
6299 case TRUTH_OR_EXPR:
6300 case BIT_IOR_EXPR:
6301 this_optab = ior_optab;
6302 goto binop;
6303
874726a8 6304 case TRUTH_XOR_EXPR:
bbf6f052
RK
6305 case BIT_XOR_EXPR:
6306 this_optab = xor_optab;
6307 goto binop;
6308
6309 case LSHIFT_EXPR:
6310 case RSHIFT_EXPR:
6311 case LROTATE_EXPR:
6312 case RROTATE_EXPR:
6313 preexpand_calls (exp);
6314 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6315 subtarget = 0;
6316 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6317 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6318 unsignedp);
6319
d6a5ac33
RK
6320 /* Could determine the answer when only additive constants differ. Also,
6321 the addition of one can be handled by changing the condition. */
bbf6f052
RK
6322 case LT_EXPR:
6323 case LE_EXPR:
6324 case GT_EXPR:
6325 case GE_EXPR:
6326 case EQ_EXPR:
6327 case NE_EXPR:
6328 preexpand_calls (exp);
6329 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6330 if (temp != 0)
6331 return temp;
d6a5ac33 6332
bbf6f052
RK
6333 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6334 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6335 && original_target
6336 && GET_CODE (original_target) == REG
6337 && (GET_MODE (original_target)
6338 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6339 {
d6a5ac33
RK
6340 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6341 VOIDmode, 0);
6342
bbf6f052
RK
6343 if (temp != original_target)
6344 temp = copy_to_reg (temp);
d6a5ac33 6345
bbf6f052 6346 op1 = gen_label_rtx ();
906c4e36 6347 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
6348 GET_MODE (temp), unsignedp, 0);
6349 emit_jump_insn (gen_beq (op1));
6350 emit_move_insn (temp, const1_rtx);
6351 emit_label (op1);
6352 return temp;
6353 }
d6a5ac33 6354
bbf6f052
RK
6355 /* If no set-flag instruction, must generate a conditional
6356 store into a temporary variable. Drop through
6357 and handle this like && and ||. */
6358
6359 case TRUTH_ANDIF_EXPR:
6360 case TRUTH_ORIF_EXPR:
e44842fe
RK
6361 if (! ignore
6362 && (target == 0 || ! safe_from_p (target, exp)
6363 /* Make sure we don't have a hard reg (such as function's return
6364 value) live across basic blocks, if not optimizing. */
6365 || (!optimize && GET_CODE (target) == REG
6366 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 6367 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
6368
6369 if (target)
6370 emit_clr_insn (target);
6371
bbf6f052
RK
6372 op1 = gen_label_rtx ();
6373 jumpifnot (exp, op1);
e44842fe
RK
6374
6375 if (target)
6376 emit_0_to_1_insn (target);
6377
bbf6f052 6378 emit_label (op1);
e44842fe 6379 return ignore ? const0_rtx : target;
bbf6f052
RK
6380
6381 case TRUTH_NOT_EXPR:
6382 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6383 /* The parser is careful to generate TRUTH_NOT_EXPR
6384 only with operands that are always zero or one. */
906c4e36 6385 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
6386 target, 1, OPTAB_LIB_WIDEN);
6387 if (temp == 0)
6388 abort ();
6389 return temp;
6390
6391 case COMPOUND_EXPR:
6392 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6393 emit_queue ();
6394 return expand_expr (TREE_OPERAND (exp, 1),
6395 (ignore ? const0_rtx : target),
6396 VOIDmode, 0);
6397
6398 case COND_EXPR:
6399 {
5dab5552
MS
6400 rtx flag = NULL_RTX;
6401 tree left_cleanups = NULL_TREE;
6402 tree right_cleanups = NULL_TREE;
6403
6404 /* Used to save a pointer to the place to put the setting of
6405 the flag that indicates if this side of the conditional was
6406 taken. We backpatch the code, if we find out later that we
6407 have any conditional cleanups that need to be performed. */
6408 rtx dest_right_flag = NULL_RTX;
6409 rtx dest_left_flag = NULL_RTX;
6410
bbf6f052
RK
6411 /* Note that COND_EXPRs whose type is a structure or union
6412 are required to be constructed to contain assignments of
6413 a temporary variable, so that we can evaluate them here
6414 for side effect only. If type is void, we must do likewise. */
6415
6416 /* If an arm of the branch requires a cleanup,
6417 only that cleanup is performed. */
6418
6419 tree singleton = 0;
6420 tree binary_op = 0, unary_op = 0;
6421 tree old_cleanups = cleanups_this_call;
bbf6f052
RK
6422
6423 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6424 convert it to our mode, if necessary. */
6425 if (integer_onep (TREE_OPERAND (exp, 1))
6426 && integer_zerop (TREE_OPERAND (exp, 2))
6427 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6428 {
dd27116b
RK
6429 if (ignore)
6430 {
6431 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6432 modifier);
6433 return const0_rtx;
6434 }
6435
bbf6f052
RK
6436 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
6437 if (GET_MODE (op0) == mode)
6438 return op0;
d6a5ac33 6439
bbf6f052
RK
6440 if (target == 0)
6441 target = gen_reg_rtx (mode);
6442 convert_move (target, op0, unsignedp);
6443 return target;
6444 }
6445
6446 /* If we are not to produce a result, we have no target. Otherwise,
6447 if a target was specified use it; it will not be used as an
6448 intermediate target unless it is safe. If no target, use a
6449 temporary. */
6450
dd27116b 6451 if (ignore)
bbf6f052
RK
6452 temp = 0;
6453 else if (original_target
d6a5ac33 6454 && safe_from_p (original_target, TREE_OPERAND (exp, 0))
2d444001
RK
6455 && GET_MODE (original_target) == mode
6456 && ! (GET_CODE (original_target) == MEM
6457 && MEM_VOLATILE_P (original_target)))
bbf6f052 6458 temp = original_target;
bbf6f052 6459 else
06089a8b 6460 temp = assign_temp (type, 0, 0, 1);
bbf6f052
RK
6461
6462 /* Check for X ? A + B : A. If we have this, we can copy
6463 A to the output and conditionally add B. Similarly for unary
6464 operations. Don't do this if X has side-effects because
6465 those side effects might affect A or B and the "?" operation is
6466 a sequence point in ANSI. (We test for side effects later.) */
6467
6468 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6469 && operand_equal_p (TREE_OPERAND (exp, 2),
6470 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6471 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6472 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6473 && operand_equal_p (TREE_OPERAND (exp, 1),
6474 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6475 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6476 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6477 && operand_equal_p (TREE_OPERAND (exp, 2),
6478 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6479 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6480 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6481 && operand_equal_p (TREE_OPERAND (exp, 1),
6482 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6483 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6484
6485 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
6486 operation, do this as A + (X != 0). Similarly for other simple
6487 binary operators. */
dd27116b 6488 if (temp && singleton && binary_op
bbf6f052
RK
6489 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6490 && (TREE_CODE (binary_op) == PLUS_EXPR
6491 || TREE_CODE (binary_op) == MINUS_EXPR
6492 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 6493 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
bbf6f052
RK
6494 && integer_onep (TREE_OPERAND (binary_op, 1))
6495 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6496 {
6497 rtx result;
6498 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6499 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6500 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
2d444001 6501 : xor_optab);
bbf6f052
RK
6502
6503 /* If we had X ? A : A + 1, do this as A + (X == 0).
6504
6505 We have to invert the truth value here and then put it
6506 back later if do_store_flag fails. We cannot simply copy
6507 TREE_OPERAND (exp, 0) to another variable and modify that
6508 because invert_truthvalue can modify the tree pointed to
6509 by its argument. */
6510 if (singleton == TREE_OPERAND (exp, 1))
6511 TREE_OPERAND (exp, 0)
6512 = invert_truthvalue (TREE_OPERAND (exp, 0));
6513
6514 result = do_store_flag (TREE_OPERAND (exp, 0),
906c4e36
RK
6515 (safe_from_p (temp, singleton)
6516 ? temp : NULL_RTX),
bbf6f052
RK
6517 mode, BRANCH_COST <= 1);
6518
6519 if (result)
6520 {
906c4e36 6521 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6522 return expand_binop (mode, boptab, op1, result, temp,
6523 unsignedp, OPTAB_LIB_WIDEN);
6524 }
6525 else if (singleton == TREE_OPERAND (exp, 1))
6526 TREE_OPERAND (exp, 0)
6527 = invert_truthvalue (TREE_OPERAND (exp, 0));
6528 }
6529
dabf8373 6530 do_pending_stack_adjust ();
bbf6f052
RK
6531 NO_DEFER_POP;
6532 op0 = gen_label_rtx ();
6533
5dab5552 6534 flag = gen_reg_rtx (word_mode);
bbf6f052
RK
6535 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6536 {
6537 if (temp != 0)
6538 {
6539 /* If the target conflicts with the other operand of the
6540 binary op, we can't use it. Also, we can't use the target
6541 if it is a hard register, because evaluating the condition
6542 might clobber it. */
6543 if ((binary_op
6544 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6545 || (GET_CODE (temp) == REG
6546 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6547 temp = gen_reg_rtx (mode);
6548 store_expr (singleton, temp, 0);
6549 }
6550 else
906c4e36 6551 expand_expr (singleton,
2937cf87 6552 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5dab5552 6553 dest_left_flag = get_last_insn ();
bbf6f052
RK
6554 if (singleton == TREE_OPERAND (exp, 1))
6555 jumpif (TREE_OPERAND (exp, 0), op0);
6556 else
6557 jumpifnot (TREE_OPERAND (exp, 0), op0);
6558
5dab5552
MS
6559 /* Allows cleanups up to here. */
6560 old_cleanups = cleanups_this_call;
bbf6f052
RK
6561 if (binary_op && temp == 0)
6562 /* Just touch the other operand. */
6563 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 6564 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6565 else if (binary_op)
6566 store_expr (build (TREE_CODE (binary_op), type,
6567 make_tree (type, temp),
6568 TREE_OPERAND (binary_op, 1)),
6569 temp, 0);
6570 else
6571 store_expr (build1 (TREE_CODE (unary_op), type,
6572 make_tree (type, temp)),
6573 temp, 0);
6574 op1 = op0;
5dab5552 6575 dest_right_flag = get_last_insn ();
bbf6f052
RK
6576 }
6577#if 0
6578 /* This is now done in jump.c and is better done there because it
6579 produces shorter register lifetimes. */
6580
6581 /* Check for both possibilities either constants or variables
6582 in registers (but not the same as the target!). If so, can
6583 save branches by assigning one, branching, and assigning the
6584 other. */
6585 else if (temp && GET_MODE (temp) != BLKmode
6586 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
6587 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
6588 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
6589 && DECL_RTL (TREE_OPERAND (exp, 1))
6590 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
6591 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
6592 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
6593 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
6594 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
6595 && DECL_RTL (TREE_OPERAND (exp, 2))
6596 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
6597 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
6598 {
6599 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6600 temp = gen_reg_rtx (mode);
6601 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5dab5552 6602 dest_left_flag = get_last_insn ();
bbf6f052 6603 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552
MS
6604
6605 /* Allows cleanups up to here. */
6606 old_cleanups = cleanups_this_call;
bbf6f052
RK
6607 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6608 op1 = op0;
5dab5552 6609 dest_right_flag = get_last_insn ();
bbf6f052
RK
6610 }
6611#endif
6612 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6613 comparison operator. If we have one of these cases, set the
6614 output to A, branch on A (cse will merge these two references),
6615 then set the output to FOO. */
6616 else if (temp
6617 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6618 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6619 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6620 TREE_OPERAND (exp, 1), 0)
6621 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6622 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6623 {
6624 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6625 temp = gen_reg_rtx (mode);
6626 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5dab5552 6627 dest_left_flag = get_last_insn ();
bbf6f052 6628 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552
MS
6629
6630 /* Allows cleanups up to here. */
6631 old_cleanups = cleanups_this_call;
bbf6f052
RK
6632 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6633 op1 = op0;
5dab5552 6634 dest_right_flag = get_last_insn ();
bbf6f052
RK
6635 }
6636 else if (temp
6637 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6638 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6639 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6640 TREE_OPERAND (exp, 2), 0)
6641 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6642 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6643 {
6644 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6645 temp = gen_reg_rtx (mode);
6646 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5dab5552 6647 dest_left_flag = get_last_insn ();
bbf6f052 6648 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552
MS
6649
6650 /* Allows cleanups up to here. */
6651 old_cleanups = cleanups_this_call;
bbf6f052
RK
6652 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6653 op1 = op0;
5dab5552 6654 dest_right_flag = get_last_insn ();
bbf6f052
RK
6655 }
6656 else
6657 {
6658 op1 = gen_label_rtx ();
6659 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552
MS
6660
6661 /* Allows cleanups up to here. */
6662 old_cleanups = cleanups_this_call;
bbf6f052
RK
6663 if (temp != 0)
6664 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6665 else
906c4e36
RK
6666 expand_expr (TREE_OPERAND (exp, 1),
6667 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5dab5552
MS
6668 dest_left_flag = get_last_insn ();
6669
6670 /* Handle conditional cleanups, if any. */
6671 left_cleanups = defer_cleanups_to (old_cleanups);
bbf6f052
RK
6672
6673 emit_queue ();
6674 emit_jump_insn (gen_jump (op1));
6675 emit_barrier ();
6676 emit_label (op0);
6677 if (temp != 0)
6678 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6679 else
906c4e36
RK
6680 expand_expr (TREE_OPERAND (exp, 2),
6681 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5dab5552 6682 dest_right_flag = get_last_insn ();
bbf6f052
RK
6683 }
6684
5dab5552
MS
6685 /* Handle conditional cleanups, if any. */
6686 right_cleanups = defer_cleanups_to (old_cleanups);
bbf6f052
RK
6687
6688 emit_queue ();
6689 emit_label (op1);
6690 OK_DEFER_POP;
5dab5552
MS
6691
6692 /* Add back in, any conditional cleanups. */
6693 if (left_cleanups || right_cleanups)
6694 {
6695 tree new_cleanups;
6696 tree cond;
6697 rtx last;
6698
6699 /* Now that we know that a flag is needed, go back and add in the
6700 setting of the flag. */
6701
6702 /* Do the left side flag. */
6703 last = get_last_insn ();
6704 /* Flag left cleanups as needed. */
6705 emit_move_insn (flag, const1_rtx);
6706 /* ??? deprecated, use sequences instead. */
6707 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6708
6709 /* Do the right side flag. */
6710 last = get_last_insn ();
6711 /* Flag left cleanups as needed. */
6712 emit_move_insn (flag, const0_rtx);
6713 /* ??? deprecated, use sequences instead. */
6714 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6715
9ba73d38
MS
6716 /* All cleanups must be on the function_obstack. */
6717 push_obstacks_nochange ();
6718 resume_temporary_allocation ();
6719
5dab5552
MS
6720 /* convert flag, which is an rtx, into a tree. */
6721 cond = make_node (RTL_EXPR);
6722 TREE_TYPE (cond) = integer_type_node;
6723 RTL_EXPR_RTL (cond) = flag;
6724 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
01842234 6725 cond = save_expr (cond);
5dab5552
MS
6726
6727 if (! left_cleanups)
6728 left_cleanups = integer_zero_node;
6729 if (! right_cleanups)
6730 right_cleanups = integer_zero_node;
fd67d2b6
JM
6731 new_cleanups = build (COND_EXPR, void_type_node,
6732 truthvalue_conversion (cond),
5dab5552
MS
6733 left_cleanups, right_cleanups);
6734 new_cleanups = fold (new_cleanups);
6735
9ba73d38
MS
6736 pop_obstacks ();
6737
5dab5552
MS
6738 /* Now add in the conditionalized cleanups. */
6739 cleanups_this_call
6740 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
61d6b1cc 6741 (*interim_eh_hook) (NULL_TREE);
5dab5552 6742 }
bbf6f052
RK
6743 return temp;
6744 }
6745
6746 case TARGET_EXPR:
6747 {
6748 /* Something needs to be initialized, but we didn't know
6749 where that thing was when building the tree. For example,
6750 it could be the return value of a function, or a parameter
6751 to a function which lays down in the stack, or a temporary
6752 variable which must be passed by reference.
6753
6754 We guarantee that the expression will either be constructed
6755 or copied into our original target. */
6756
6757 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 6758 tree cleanups = NULL_TREE;
5c062816 6759 tree exp1;
61d6b1cc 6760 rtx temp;
bbf6f052
RK
6761
6762 if (TREE_CODE (slot) != VAR_DECL)
6763 abort ();
6764
9c51f375
RK
6765 if (! ignore)
6766 target = original_target;
6767
bbf6f052
RK
6768 if (target == 0)
6769 {
6770 if (DECL_RTL (slot) != 0)
ac993f4f
MS
6771 {
6772 target = DECL_RTL (slot);
5c062816 6773 /* If we have already expanded the slot, so don't do
ac993f4f 6774 it again. (mrs) */
5c062816
MS
6775 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6776 return target;
ac993f4f 6777 }
bbf6f052
RK
6778 else
6779 {
06089a8b 6780 target = assign_temp (type, 2, 1, 1);
bbf6f052
RK
6781 /* All temp slots at this level must not conflict. */
6782 preserve_temp_slots (target);
6783 DECL_RTL (slot) = target;
bbf6f052 6784
e287fd6e
RK
6785 /* Since SLOT is not known to the called function
6786 to belong to its stack frame, we must build an explicit
6787 cleanup. This case occurs when we must build up a reference
6788 to pass the reference as an argument. In this case,
6789 it is very likely that such a reference need not be
6790 built here. */
6791
6792 if (TREE_OPERAND (exp, 2) == 0)
6793 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 6794 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 6795 }
bbf6f052
RK
6796 }
6797 else
6798 {
6799 /* This case does occur, when expanding a parameter which
6800 needs to be constructed on the stack. The target
6801 is the actual stack address that we want to initialize.
6802 The function we call will perform the cleanup in this case. */
6803
8c042b47
RS
6804 /* If we have already assigned it space, use that space,
6805 not target that we were passed in, as our target
6806 parameter is only a hint. */
6807 if (DECL_RTL (slot) != 0)
6808 {
6809 target = DECL_RTL (slot);
6810 /* If we have already expanded the slot, so don't do
6811 it again. (mrs) */
6812 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6813 return target;
6814 }
6815
bbf6f052
RK
6816 DECL_RTL (slot) = target;
6817 }
6818
4847c938 6819 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
6820 /* Mark it as expanded. */
6821 TREE_OPERAND (exp, 1) = NULL_TREE;
6822
41531e5b 6823 store_expr (exp1, target, 0);
61d6b1cc 6824
2a888d4c
MS
6825 if (cleanups)
6826 {
6827 cleanups_this_call = tree_cons (NULL_TREE,
6828 cleanups,
6829 cleanups_this_call);
6830 (*interim_eh_hook) (NULL_TREE);
6831 }
61d6b1cc 6832
41531e5b 6833 return target;
bbf6f052
RK
6834 }
6835
6836 case INIT_EXPR:
6837 {
6838 tree lhs = TREE_OPERAND (exp, 0);
6839 tree rhs = TREE_OPERAND (exp, 1);
6840 tree noncopied_parts = 0;
6841 tree lhs_type = TREE_TYPE (lhs);
6842
6843 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6844 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6845 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6846 TYPE_NONCOPIED_PARTS (lhs_type));
6847 while (noncopied_parts != 0)
6848 {
6849 expand_assignment (TREE_VALUE (noncopied_parts),
6850 TREE_PURPOSE (noncopied_parts), 0, 0);
6851 noncopied_parts = TREE_CHAIN (noncopied_parts);
6852 }
6853 return temp;
6854 }
6855
6856 case MODIFY_EXPR:
6857 {
6858 /* If lhs is complex, expand calls in rhs before computing it.
6859 That's so we don't compute a pointer and save it over a call.
6860 If lhs is simple, compute it first so we can give it as a
6861 target if the rhs is just a call. This avoids an extra temp and copy
6862 and that prevents a partial-subsumption which makes bad code.
6863 Actually we could treat component_ref's of vars like vars. */
6864
6865 tree lhs = TREE_OPERAND (exp, 0);
6866 tree rhs = TREE_OPERAND (exp, 1);
6867 tree noncopied_parts = 0;
6868 tree lhs_type = TREE_TYPE (lhs);
6869
6870 temp = 0;
6871
6872 if (TREE_CODE (lhs) != VAR_DECL
6873 && TREE_CODE (lhs) != RESULT_DECL
6874 && TREE_CODE (lhs) != PARM_DECL)
6875 preexpand_calls (exp);
6876
6877 /* Check for |= or &= of a bitfield of size one into another bitfield
6878 of size 1. In this case, (unless we need the result of the
6879 assignment) we can do this more efficiently with a
6880 test followed by an assignment, if necessary.
6881
6882 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6883 things change so we do, this code should be enhanced to
6884 support it. */
6885 if (ignore
6886 && TREE_CODE (lhs) == COMPONENT_REF
6887 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6888 || TREE_CODE (rhs) == BIT_AND_EXPR)
6889 && TREE_OPERAND (rhs, 0) == lhs
6890 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6891 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6892 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6893 {
6894 rtx label = gen_label_rtx ();
6895
6896 do_jump (TREE_OPERAND (rhs, 1),
6897 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6898 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6899 expand_assignment (lhs, convert (TREE_TYPE (rhs),
6900 (TREE_CODE (rhs) == BIT_IOR_EXPR
6901 ? integer_one_node
6902 : integer_zero_node)),
6903 0, 0);
e7c33f54 6904 do_pending_stack_adjust ();
bbf6f052
RK
6905 emit_label (label);
6906 return const0_rtx;
6907 }
6908
6909 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6910 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6911 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6912 TYPE_NONCOPIED_PARTS (lhs_type));
6913
6914 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6915 while (noncopied_parts != 0)
6916 {
6917 expand_assignment (TREE_PURPOSE (noncopied_parts),
6918 TREE_VALUE (noncopied_parts), 0, 0);
6919 noncopied_parts = TREE_CHAIN (noncopied_parts);
6920 }
6921 return temp;
6922 }
6923
6924 case PREINCREMENT_EXPR:
6925 case PREDECREMENT_EXPR:
7b8b9722 6926 return expand_increment (exp, 0, ignore);
bbf6f052
RK
6927
6928 case POSTINCREMENT_EXPR:
6929 case POSTDECREMENT_EXPR:
6930 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 6931 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
6932
6933 case ADDR_EXPR:
987c71d9
RK
6934 /* If nonzero, TEMP will be set to the address of something that might
6935 be a MEM corresponding to a stack slot. */
6936 temp = 0;
6937
bbf6f052
RK
6938 /* Are we taking the address of a nested function? */
6939 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9
JM
6940 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
6941 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
bbf6f052
RK
6942 {
6943 op0 = trampoline_address (TREE_OPERAND (exp, 0));
6944 op0 = force_operand (op0, target);
6945 }
682ba3a6
RK
6946 /* If we are taking the address of something erroneous, just
6947 return a zero. */
6948 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
6949 return const0_rtx;
bbf6f052
RK
6950 else
6951 {
e287fd6e
RK
6952 /* We make sure to pass const0_rtx down if we came in with
6953 ignore set, to avoid doing the cleanups twice for something. */
6954 op0 = expand_expr (TREE_OPERAND (exp, 0),
6955 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
6956 (modifier == EXPAND_INITIALIZER
6957 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 6958
119af78a
RK
6959 /* If we are going to ignore the result, OP0 will have been set
6960 to const0_rtx, so just return it. Don't get confused and
6961 think we are taking the address of the constant. */
6962 if (ignore)
6963 return op0;
6964
3539e816
MS
6965 op0 = protect_from_queue (op0, 0);
6966
896102d0
RK
6967 /* We would like the object in memory. If it is a constant,
6968 we can have it be statically allocated into memory. For
682ba3a6 6969 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
896102d0
RK
6970 memory and store the value into it. */
6971
6972 if (CONSTANT_P (op0))
6973 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6974 op0);
987c71d9 6975 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
6976 {
6977 mark_temp_addr_taken (op0);
6978 temp = XEXP (op0, 0);
6979 }
896102d0 6980
682ba3a6
RK
6981 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6982 || GET_CODE (op0) == CONCAT)
896102d0
RK
6983 {
6984 /* If this object is in a register, it must be not
6985 be BLKmode. */
6986 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
06089a8b 6987 rtx memloc = assign_temp (inner_type, 1, 1, 1);
896102d0 6988
7a0b7b9a 6989 mark_temp_addr_taken (memloc);
896102d0
RK
6990 emit_move_insn (memloc, op0);
6991 op0 = memloc;
6992 }
6993
bbf6f052
RK
6994 if (GET_CODE (op0) != MEM)
6995 abort ();
6996
6997 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
6998 {
6999 temp = XEXP (op0, 0);
7000#ifdef POINTERS_EXTEND_UNSIGNED
7001 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7002 && mode == ptr_mode)
9fcfcce7 7003 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
7004#endif
7005 return temp;
7006 }
987c71d9 7007
bbf6f052
RK
7008 op0 = force_operand (XEXP (op0, 0), target);
7009 }
987c71d9 7010
bbf6f052 7011 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
7012 op0 = force_reg (Pmode, op0);
7013
dc6d66b3
RK
7014 if (GET_CODE (op0) == REG
7015 && ! REG_USERVAR_P (op0))
7016 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
987c71d9
RK
7017
7018 /* If we might have had a temp slot, add an equivalent address
7019 for it. */
7020 if (temp != 0)
7021 update_temp_slot_address (temp, op0);
7022
88f63c77
RK
7023#ifdef POINTERS_EXTEND_UNSIGNED
7024 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7025 && mode == ptr_mode)
9fcfcce7 7026 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
7027#endif
7028
bbf6f052
RK
7029 return op0;
7030
7031 case ENTRY_VALUE_EXPR:
7032 abort ();
7033
7308a047
RS
7034 /* COMPLEX type for Extended Pascal & Fortran */
7035 case COMPLEX_EXPR:
7036 {
7037 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 7038 rtx insns;
7308a047
RS
7039
7040 /* Get the rtx code of the operands. */
7041 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7042 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7043
7044 if (! target)
7045 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7046
6551fa4d 7047 start_sequence ();
7308a047
RS
7048
7049 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
7050 emit_move_insn (gen_realpart (mode, target), op0);
7051 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 7052
6551fa4d
JW
7053 insns = get_insns ();
7054 end_sequence ();
7055
7308a047 7056 /* Complex construction should appear as a single unit. */
6551fa4d
JW
7057 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7058 each with a separate pseudo as destination.
7059 It's not correct for flow to treat them as a unit. */
6d6e61ce 7060 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
7061 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7062 else
7063 emit_insns (insns);
7308a047
RS
7064
7065 return target;
7066 }
7067
7068 case REALPART_EXPR:
2d7050fd
RS
7069 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7070 return gen_realpart (mode, op0);
7308a047
RS
7071
7072 case IMAGPART_EXPR:
2d7050fd
RS
7073 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7074 return gen_imagpart (mode, op0);
7308a047
RS
7075
7076 case CONJ_EXPR:
7077 {
62acb978 7078 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 7079 rtx imag_t;
6551fa4d 7080 rtx insns;
7308a047
RS
7081
7082 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7083
7084 if (! target)
d6a5ac33 7085 target = gen_reg_rtx (mode);
7308a047 7086
6551fa4d 7087 start_sequence ();
7308a047
RS
7088
7089 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
7090 emit_move_insn (gen_realpart (partmode, target),
7091 gen_realpart (partmode, op0));
7308a047 7092
62acb978
RK
7093 imag_t = gen_imagpart (partmode, target);
7094 temp = expand_unop (partmode, neg_optab,
7095 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
7096 if (temp != imag_t)
7097 emit_move_insn (imag_t, temp);
7098
6551fa4d
JW
7099 insns = get_insns ();
7100 end_sequence ();
7101
d6a5ac33
RK
7102 /* Conjugate should appear as a single unit
7103 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
7104 each with a separate pseudo as destination.
7105 It's not correct for flow to treat them as a unit. */
6d6e61ce 7106 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
7107 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7108 else
7109 emit_insns (insns);
7308a047
RS
7110
7111 return target;
7112 }
7113
bbf6f052 7114 case ERROR_MARK:
66538193
RS
7115 op0 = CONST0_RTX (tmode);
7116 if (op0 != 0)
7117 return op0;
bbf6f052
RK
7118 return const0_rtx;
7119
7120 default:
90764a87 7121 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
7122 }
7123
7124 /* Here to do an ordinary binary operator, generating an instruction
7125 from the optab already placed in `this_optab'. */
7126 binop:
7127 preexpand_calls (exp);
7128 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7129 subtarget = 0;
7130 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7131 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7132 binop2:
7133 temp = expand_binop (mode, this_optab, op0, op1, target,
7134 unsignedp, OPTAB_LIB_WIDEN);
7135 if (temp == 0)
7136 abort ();
7137 return temp;
7138}
bbf6f052 7139
bbf6f052 7140
ca695ac9
JB
7141/* Emit bytecode to evaluate the given expression EXP to the stack. */
7142void
7143bc_expand_expr (exp)
7144 tree exp;
bbf6f052 7145{
ca695ac9
JB
7146 enum tree_code code;
7147 tree type, arg0;
7148 rtx r;
7149 struct binary_operator *binoptab;
7150 struct unary_operator *unoptab;
7151 struct increment_operator *incroptab;
7152 struct bc_label *lab, *lab1;
7153 enum bytecode_opcode opcode;
7154
7155
7156 code = TREE_CODE (exp);
7157
7158 switch (code)
bbf6f052 7159 {
ca695ac9
JB
7160 case PARM_DECL:
7161
7162 if (DECL_RTL (exp) == 0)
bbf6f052 7163 {
ca695ac9
JB
7164 error_with_decl (exp, "prior parameter's size depends on `%s'");
7165 return;
bbf6f052 7166 }
ca695ac9
JB
7167
7168 bc_load_parmaddr (DECL_RTL (exp));
7169 bc_load_memory (TREE_TYPE (exp), exp);
7170
7171 return;
7172
7173 case VAR_DECL:
7174
7175 if (DECL_RTL (exp) == 0)
7176 abort ();
7177
7178#if 0
e7a42772 7179 if (BYTECODE_LABEL (DECL_RTL (exp)))
ca695ac9
JB
7180 bc_load_externaddr (DECL_RTL (exp));
7181 else
7182 bc_load_localaddr (DECL_RTL (exp));
7183#endif
7184 if (TREE_PUBLIC (exp))
e7a42772
JB
7185 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7186 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
ca695ac9
JB
7187 else
7188 bc_load_localaddr (DECL_RTL (exp));
7189
7190 bc_load_memory (TREE_TYPE (exp), exp);
7191 return;
7192
7193 case INTEGER_CST:
7194
7195#ifdef DEBUG_PRINT_CODE
7196 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7197#endif
6bd6178d 7198 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
ca695ac9 7199 ? SImode
6bd6178d 7200 : TYPE_MODE (TREE_TYPE (exp)))],
ca695ac9
JB
7201 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7202 return;
7203
7204 case REAL_CST:
7205
c02bd5d9 7206#if 0
ca695ac9
JB
7207#ifdef DEBUG_PRINT_CODE
7208 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7209#endif
c02bd5d9 7210 /* FIX THIS: find a better way to pass real_cst's. -bson */
ca695ac9
JB
7211 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7212 (double) TREE_REAL_CST (exp));
c02bd5d9
JB
7213#else
7214 abort ();
7215#endif
7216
ca695ac9
JB
7217 return;
7218
7219 case CALL_EXPR:
7220
7221 /* We build a call description vector describing the type of
7222 the return value and of the arguments; this call vector,
7223 together with a pointer to a location for the return value
7224 and the base of the argument list, is passed to the low
7225 level machine dependent call subroutine, which is responsible
7226 for putting the arguments wherever real functions expect
7227 them, as well as getting the return value back. */
7228 {
7229 tree calldesc = 0, arg;
7230 int nargs = 0, i;
7231 rtx retval;
7232
7233 /* Push the evaluated args on the evaluation stack in reverse
7234 order. Also make an entry for each arg in the calldesc
7235 vector while we're at it. */
7236
7237 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7238
7239 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7240 {
7241 ++nargs;
7242 bc_expand_expr (TREE_VALUE (arg));
7243
7244 calldesc = tree_cons ((tree) 0,
7245 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7246 calldesc);
7247 calldesc = tree_cons ((tree) 0,
7248 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7249 calldesc);
7250 }
7251
7252 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7253
7254 /* Allocate a location for the return value and push its
7255 address on the evaluation stack. Also make an entry
7256 at the front of the calldesc for the return value type. */
7257
7258 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7259 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7260 bc_load_localaddr (retval);
7261
7262 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7263 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7264
7265 /* Prepend the argument count. */
7266 calldesc = tree_cons ((tree) 0,
7267 build_int_2 (nargs, 0),
7268 calldesc);
7269
7270 /* Push the address of the call description vector on the stack. */
7271 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7272 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7273 build_index_type (build_int_2 (nargs * 2, 0)));
7274 r = output_constant_def (calldesc);
7275 bc_load_externaddr (r);
7276
7277 /* Push the address of the function to be called. */
7278 bc_expand_expr (TREE_OPERAND (exp, 0));
7279
7280 /* Call the function, popping its address and the calldesc vector
7281 address off the evaluation stack in the process. */
7282 bc_emit_instruction (call);
7283
7284 /* Pop the arguments off the stack. */
7285 bc_adjust_stack (nargs);
7286
7287 /* Load the return value onto the stack. */
7288 bc_load_localaddr (retval);
7289 bc_load_memory (type, TREE_OPERAND (exp, 0));
7290 }
7291 return;
7292
7293 case SAVE_EXPR:
7294
7295 if (!SAVE_EXPR_RTL (exp))
bbf6f052 7296 {
ca695ac9
JB
7297 /* First time around: copy to local variable */
7298 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7299 TYPE_ALIGN (TREE_TYPE(exp)));
7300 bc_expand_expr (TREE_OPERAND (exp, 0));
6d6e61ce 7301 bc_emit_instruction (duplicate);
ca695ac9
JB
7302
7303 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7304 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
bbf6f052 7305 }
ca695ac9 7306 else
bbf6f052 7307 {
ca695ac9
JB
7308 /* Consecutive reference: use saved copy */
7309 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7310 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
bbf6f052 7311 }
ca695ac9
JB
7312 return;
7313
7314#if 0
7315 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7316 how are they handled instead? */
7317 case LET_STMT:
7318
7319 TREE_USED (exp) = 1;
7320 bc_expand_expr (STMT_BODY (exp));
7321 return;
7322#endif
7323
7324 case NOP_EXPR:
7325 case CONVERT_EXPR:
7326
7327 bc_expand_expr (TREE_OPERAND (exp, 0));
7328 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7329 return;
7330
7331 case MODIFY_EXPR:
7332
c02bd5d9 7333 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
ca695ac9
JB
7334 return;
7335
7336 case ADDR_EXPR:
7337
7338 bc_expand_address (TREE_OPERAND (exp, 0));
7339 return;
7340
7341 case INDIRECT_REF:
7342
7343 bc_expand_expr (TREE_OPERAND (exp, 0));
7344 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7345 return;
7346
7347 case ARRAY_REF:
7348
7349 bc_expand_expr (bc_canonicalize_array_ref (exp));
7350 return;
7351
7352 case COMPONENT_REF:
7353
7354 bc_expand_component_address (exp);
7355
7356 /* If we have a bitfield, generate a proper load */
7357 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7358 return;
7359
7360 case COMPOUND_EXPR:
7361
7362 bc_expand_expr (TREE_OPERAND (exp, 0));
7363 bc_emit_instruction (drop);
7364 bc_expand_expr (TREE_OPERAND (exp, 1));
7365 return;
7366
7367 case COND_EXPR:
7368
7369 bc_expand_expr (TREE_OPERAND (exp, 0));
7370 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7371 lab = bc_get_bytecode_label ();
c02bd5d9 7372 bc_emit_bytecode (xjumpifnot);
ca695ac9
JB
7373 bc_emit_bytecode_labelref (lab);
7374
7375#ifdef DEBUG_PRINT_CODE
7376 fputc ('\n', stderr);
7377#endif
7378 bc_expand_expr (TREE_OPERAND (exp, 1));
7379 lab1 = bc_get_bytecode_label ();
7380 bc_emit_bytecode (jump);
7381 bc_emit_bytecode_labelref (lab1);
7382
7383#ifdef DEBUG_PRINT_CODE
7384 fputc ('\n', stderr);
7385#endif
7386
7387 bc_emit_bytecode_labeldef (lab);
7388 bc_expand_expr (TREE_OPERAND (exp, 2));
7389 bc_emit_bytecode_labeldef (lab1);
7390 return;
7391
7392 case TRUTH_ANDIF_EXPR:
7393
c02bd5d9 7394 opcode = xjumpifnot;
ca695ac9
JB
7395 goto andorif;
7396
7397 case TRUTH_ORIF_EXPR:
7398
c02bd5d9 7399 opcode = xjumpif;
ca695ac9
JB
7400 goto andorif;
7401
7402 case PLUS_EXPR:
7403
7404 binoptab = optab_plus_expr;
7405 goto binop;
7406
7407 case MINUS_EXPR:
7408
7409 binoptab = optab_minus_expr;
7410 goto binop;
7411
7412 case MULT_EXPR:
7413
7414 binoptab = optab_mult_expr;
7415 goto binop;
7416
7417 case TRUNC_DIV_EXPR:
7418 case FLOOR_DIV_EXPR:
7419 case CEIL_DIV_EXPR:
7420 case ROUND_DIV_EXPR:
7421 case EXACT_DIV_EXPR:
7422
7423 binoptab = optab_trunc_div_expr;
7424 goto binop;
7425
7426 case TRUNC_MOD_EXPR:
7427 case FLOOR_MOD_EXPR:
7428 case CEIL_MOD_EXPR:
7429 case ROUND_MOD_EXPR:
7430
7431 binoptab = optab_trunc_mod_expr;
7432 goto binop;
7433
7434 case FIX_ROUND_EXPR:
7435 case FIX_FLOOR_EXPR:
7436 case FIX_CEIL_EXPR:
7437 abort (); /* Not used for C. */
7438
7439 case FIX_TRUNC_EXPR:
7440 case FLOAT_EXPR:
7441 case MAX_EXPR:
7442 case MIN_EXPR:
7443 case FFS_EXPR:
7444 case LROTATE_EXPR:
7445 case RROTATE_EXPR:
7446 abort (); /* FIXME */
7447
7448 case RDIV_EXPR:
7449
7450 binoptab = optab_rdiv_expr;
7451 goto binop;
7452
7453 case BIT_AND_EXPR:
7454
7455 binoptab = optab_bit_and_expr;
7456 goto binop;
7457
7458 case BIT_IOR_EXPR:
7459
7460 binoptab = optab_bit_ior_expr;
7461 goto binop;
7462
7463 case BIT_XOR_EXPR:
7464
7465 binoptab = optab_bit_xor_expr;
7466 goto binop;
7467
7468 case LSHIFT_EXPR:
7469
7470 binoptab = optab_lshift_expr;
7471 goto binop;
7472
7473 case RSHIFT_EXPR:
7474
7475 binoptab = optab_rshift_expr;
7476 goto binop;
7477
7478 case TRUTH_AND_EXPR:
7479
7480 binoptab = optab_truth_and_expr;
7481 goto binop;
7482
7483 case TRUTH_OR_EXPR:
7484
7485 binoptab = optab_truth_or_expr;
7486 goto binop;
7487
7488 case LT_EXPR:
7489
7490 binoptab = optab_lt_expr;
7491 goto binop;
7492
7493 case LE_EXPR:
7494
7495 binoptab = optab_le_expr;
7496 goto binop;
7497
7498 case GE_EXPR:
7499
7500 binoptab = optab_ge_expr;
7501 goto binop;
7502
7503 case GT_EXPR:
7504
7505 binoptab = optab_gt_expr;
7506 goto binop;
7507
7508 case EQ_EXPR:
7509
7510 binoptab = optab_eq_expr;
7511 goto binop;
7512
7513 case NE_EXPR:
7514
7515 binoptab = optab_ne_expr;
7516 goto binop;
7517
7518 case NEGATE_EXPR:
7519
7520 unoptab = optab_negate_expr;
7521 goto unop;
7522
7523 case BIT_NOT_EXPR:
7524
7525 unoptab = optab_bit_not_expr;
7526 goto unop;
7527
7528 case TRUTH_NOT_EXPR:
7529
7530 unoptab = optab_truth_not_expr;
7531 goto unop;
7532
7533 case PREDECREMENT_EXPR:
7534
7535 incroptab = optab_predecrement_expr;
7536 goto increment;
7537
7538 case PREINCREMENT_EXPR:
7539
7540 incroptab = optab_preincrement_expr;
7541 goto increment;
7542
7543 case POSTDECREMENT_EXPR:
7544
7545 incroptab = optab_postdecrement_expr;
7546 goto increment;
7547
7548 case POSTINCREMENT_EXPR:
7549
7550 incroptab = optab_postincrement_expr;
7551 goto increment;
7552
7553 case CONSTRUCTOR:
7554
7555 bc_expand_constructor (exp);
7556 return;
7557
7558 case ERROR_MARK:
7559 case RTL_EXPR:
7560
7561 return;
7562
7563 case BIND_EXPR:
7564 {
7565 tree vars = TREE_OPERAND (exp, 0);
7566 int vars_need_expansion = 0;
7567
7568 /* Need to open a binding contour here because
7569 if there are any cleanups they most be contained here. */
7570 expand_start_bindings (0);
7571
7572 /* Mark the corresponding BLOCK for output. */
7573 if (TREE_OPERAND (exp, 2) != 0)
7574 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7575
7576 /* If VARS have not yet been expanded, expand them now. */
7577 while (vars)
7578 {
7579 if (DECL_RTL (vars) == 0)
7580 {
7581 vars_need_expansion = 1;
9bac07c3 7582 expand_decl (vars);
ca695ac9 7583 }
9bac07c3 7584 expand_decl_init (vars);
ca695ac9
JB
7585 vars = TREE_CHAIN (vars);
7586 }
7587
7588 bc_expand_expr (TREE_OPERAND (exp, 1));
7589
7590 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7591
7592 return;
7593 }
7594 }
7595
7596 abort ();
7597
7598 binop:
7599
7600 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7601 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7602 return;
7603
7604
7605 unop:
7606
7607 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7608 return;
7609
7610
7611 andorif:
7612
7613 bc_expand_expr (TREE_OPERAND (exp, 0));
7614 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7615 lab = bc_get_bytecode_label ();
7616
6d6e61ce 7617 bc_emit_instruction (duplicate);
ca695ac9
JB
7618 bc_emit_bytecode (opcode);
7619 bc_emit_bytecode_labelref (lab);
7620
7621#ifdef DEBUG_PRINT_CODE
7622 fputc ('\n', stderr);
7623#endif
7624
7625 bc_emit_instruction (drop);
7626
7627 bc_expand_expr (TREE_OPERAND (exp, 1));
7628 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7629 bc_emit_bytecode_labeldef (lab);
7630 return;
7631
7632
7633 increment:
7634
7635 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7636
7637 /* Push the quantum. */
7638 bc_expand_expr (TREE_OPERAND (exp, 1));
7639
7640 /* Convert it to the lvalue's type. */
7641 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7642
7643 /* Push the address of the lvalue */
c02bd5d9 7644 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
ca695ac9
JB
7645
7646 /* Perform actual increment */
c02bd5d9 7647 bc_expand_increment (incroptab, type);
ca695ac9
JB
7648 return;
7649}
7650\f
7651/* Return the alignment in bits of EXP, a pointer valued expression.
7652 But don't return more than MAX_ALIGN no matter what.
7653 The alignment returned is, by default, the alignment of the thing that
7654 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7655
7656 Otherwise, look at the expression to see if we can do better, i.e., if the
7657 expression is actually pointing at an object whose alignment is tighter. */
7658
7659static int
7660get_pointer_alignment (exp, max_align)
7661 tree exp;
7662 unsigned max_align;
7663{
7664 unsigned align, inner;
7665
7666 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7667 return 0;
7668
7669 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7670 align = MIN (align, max_align);
7671
7672 while (1)
7673 {
7674 switch (TREE_CODE (exp))
7675 {
7676 case NOP_EXPR:
7677 case CONVERT_EXPR:
7678 case NON_LVALUE_EXPR:
7679 exp = TREE_OPERAND (exp, 0);
7680 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7681 return align;
7682 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8dc2fbcf 7683 align = MIN (inner, max_align);
ca695ac9
JB
7684 break;
7685
7686 case PLUS_EXPR:
7687 /* If sum of pointer + int, restrict our maximum alignment to that
7688 imposed by the integer. If not, we can't do any better than
7689 ALIGN. */
7690 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7691 return align;
7692
7693 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7694 & (max_align - 1))
7695 != 0)
7696 max_align >>= 1;
7697
7698 exp = TREE_OPERAND (exp, 0);
7699 break;
7700
7701 case ADDR_EXPR:
7702 /* See what we are pointing at and look at its alignment. */
7703 exp = TREE_OPERAND (exp, 0);
7704 if (TREE_CODE (exp) == FUNCTION_DECL)
8dc2fbcf 7705 align = FUNCTION_BOUNDARY;
ca695ac9 7706 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8dc2fbcf 7707 align = DECL_ALIGN (exp);
ca695ac9
JB
7708#ifdef CONSTANT_ALIGNMENT
7709 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7710 align = CONSTANT_ALIGNMENT (exp, align);
7711#endif
7712 return MIN (align, max_align);
7713
7714 default:
7715 return align;
7716 }
7717 }
7718}
7719\f
7720/* Return the tree node and offset if a given argument corresponds to
7721 a string constant. */
7722
7723static tree
7724string_constant (arg, ptr_offset)
7725 tree arg;
7726 tree *ptr_offset;
7727{
7728 STRIP_NOPS (arg);
7729
7730 if (TREE_CODE (arg) == ADDR_EXPR
7731 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7732 {
7733 *ptr_offset = integer_zero_node;
7734 return TREE_OPERAND (arg, 0);
7735 }
7736 else if (TREE_CODE (arg) == PLUS_EXPR)
7737 {
7738 tree arg0 = TREE_OPERAND (arg, 0);
7739 tree arg1 = TREE_OPERAND (arg, 1);
7740
7741 STRIP_NOPS (arg0);
7742 STRIP_NOPS (arg1);
7743
7744 if (TREE_CODE (arg0) == ADDR_EXPR
7745 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7746 {
7747 *ptr_offset = arg1;
7748 return TREE_OPERAND (arg0, 0);
7749 }
7750 else if (TREE_CODE (arg1) == ADDR_EXPR
7751 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7752 {
7753 *ptr_offset = arg0;
7754 return TREE_OPERAND (arg1, 0);
7755 }
7756 }
7757
7758 return 0;
7759}
7760
7761/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7762 way, because it could contain a zero byte in the middle.
7763 TREE_STRING_LENGTH is the size of the character array, not the string.
7764
7765 Unfortunately, string_constant can't access the values of const char
7766 arrays with initializers, so neither can we do so here. */
7767
7768static tree
7769c_strlen (src)
7770 tree src;
7771{
7772 tree offset_node;
7773 int offset, max;
7774 char *ptr;
7775
7776 src = string_constant (src, &offset_node);
7777 if (src == 0)
7778 return 0;
7779 max = TREE_STRING_LENGTH (src);
7780 ptr = TREE_STRING_POINTER (src);
7781 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7782 {
7783 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7784 compute the offset to the following null if we don't know where to
7785 start searching for it. */
7786 int i;
7787 for (i = 0; i < max; i++)
7788 if (ptr[i] == 0)
7789 return 0;
7790 /* We don't know the starting offset, but we do know that the string
7791 has no internal zero bytes. We can assume that the offset falls
7792 within the bounds of the string; otherwise, the programmer deserves
7793 what he gets. Subtract the offset from the length of the string,
7794 and return that. */
7795 /* This would perhaps not be valid if we were dealing with named
7796 arrays in addition to literal string constants. */
7797 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7798 }
7799
7800 /* We have a known offset into the string. Start searching there for
7801 a null character. */
7802 if (offset_node == 0)
7803 offset = 0;
7804 else
7805 {
7806 /* Did we get a long long offset? If so, punt. */
7807 if (TREE_INT_CST_HIGH (offset_node) != 0)
7808 return 0;
7809 offset = TREE_INT_CST_LOW (offset_node);
7810 }
7811 /* If the offset is known to be out of bounds, warn, and call strlen at
7812 runtime. */
7813 if (offset < 0 || offset > max)
7814 {
7815 warning ("offset outside bounds of constant string");
7816 return 0;
7817 }
7818 /* Use strlen to search for the first zero byte. Since any strings
7819 constructed with build_string will have nulls appended, we win even
7820 if we get handed something like (char[4])"abcd".
7821
7822 Since OFFSET is our starting index into the string, no further
7823 calculation is needed. */
7824 return size_int (strlen (ptr + offset));
7825}
2bbf216f
RK
7826
7827rtx
7828expand_builtin_return_addr (fndecl_code, count, tem)
7829 enum built_in_function fndecl_code;
7830 rtx tem;
7831 int count;
7832{
7833 int i;
7834
7835 /* Some machines need special handling before we can access
7836 arbitrary frames. For example, on the sparc, we must first flush
7837 all register windows to the stack. */
7838#ifdef SETUP_FRAME_ADDRESSES
7839 SETUP_FRAME_ADDRESSES ();
7840#endif
7841
7842 /* On the sparc, the return address is not in the frame, it is in a
7843 register. There is no way to access it off of the current frame
7844 pointer, but it can be accessed off the previous frame pointer by
7845 reading the value from the register window save area. */
7846#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7847 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7848 count--;
7849#endif
7850
7851 /* Scan back COUNT frames to the specified frame. */
7852 for (i = 0; i < count; i++)
7853 {
7854 /* Assume the dynamic chain pointer is in the word that the
7855 frame address points to, unless otherwise specified. */
7856#ifdef DYNAMIC_CHAIN_ADDRESS
7857 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7858#endif
7859 tem = memory_address (Pmode, tem);
7860 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7861 }
7862
7863 /* For __builtin_frame_address, return what we've got. */
7864 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7865 return tem;
7866
7867 /* For __builtin_return_address, Get the return address from that
7868 frame. */
7869#ifdef RETURN_ADDR_RTX
7870 tem = RETURN_ADDR_RTX (count, tem);
7871#else
7872 tem = memory_address (Pmode,
7873 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7874 tem = gen_rtx (MEM, Pmode, tem);
7875#endif
0ebba7fc 7876 return tem;
2bbf216f 7877}
ca695ac9
JB
7878\f
7879/* Expand an expression EXP that calls a built-in function,
7880 with result going to TARGET if that's convenient
7881 (and in mode MODE if that's convenient).
7882 SUBTARGET may be used as the target for computing one of EXP's operands.
7883 IGNORE is nonzero if the value is to be ignored. */
7884
98aad286
RK
7885#define CALLED_AS_BUILT_IN(NODE) \
7886 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7887
ca695ac9
JB
7888static rtx
7889expand_builtin (exp, target, subtarget, mode, ignore)
7890 tree exp;
7891 rtx target;
7892 rtx subtarget;
7893 enum machine_mode mode;
7894 int ignore;
7895{
7896 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7897 tree arglist = TREE_OPERAND (exp, 1);
7898 rtx op0;
7899 rtx lab1, insns;
7900 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7901 optab builtin_optab;
7902
7903 switch (DECL_FUNCTION_CODE (fndecl))
7904 {
7905 case BUILT_IN_ABS:
7906 case BUILT_IN_LABS:
7907 case BUILT_IN_FABS:
7908 /* build_function_call changes these into ABS_EXPR. */
7909 abort ();
7910
7911 case BUILT_IN_SIN:
7912 case BUILT_IN_COS:
ba558a85
RK
7913 /* Treat these like sqrt, but only if the user asks for them. */
7914 if (! flag_fast_math)
7915 break;
ca695ac9
JB
7916 case BUILT_IN_FSQRT:
7917 /* If not optimizing, call the library function. */
7918 if (! optimize)
7919 break;
7920
7921 if (arglist == 0
7922 /* Arg could be wrong type if user redeclared this fcn wrong. */
7923 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7b073ca6 7924 break;
ca695ac9
JB
7925
7926 /* Stabilize and compute the argument. */
7927 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7928 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7929 {
7930 exp = copy_node (exp);
7931 arglist = copy_node (arglist);
7932 TREE_OPERAND (exp, 1) = arglist;
7933 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7934 }
7935 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7936
7937 /* Make a suitable register to place result in. */
7938 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7939
7940 emit_queue ();
7941 start_sequence ();
7942
7943 switch (DECL_FUNCTION_CODE (fndecl))
7944 {
7945 case BUILT_IN_SIN:
7946 builtin_optab = sin_optab; break;
7947 case BUILT_IN_COS:
7948 builtin_optab = cos_optab; break;
7949 case BUILT_IN_FSQRT:
7950 builtin_optab = sqrt_optab; break;
7951 default:
7952 abort ();
7953 }
7954
7955 /* Compute into TARGET.
7956 Set TARGET to wherever the result comes back. */
7957 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7958 builtin_optab, op0, target, 0);
7959
7960 /* If we were unable to expand via the builtin, stop the
7961 sequence (without outputting the insns) and break, causing
7962 a call the the library function. */
7963 if (target == 0)
7964 {
7965 end_sequence ();
7966 break;
7967 }
7968
7969 /* Check the results by default. But if flag_fast_math is turned on,
7970 then assume sqrt will always be called with valid arguments. */
7971
7972 if (! flag_fast_math)
7973 {
7974 /* Don't define the builtin FP instructions
7975 if your machine is not IEEE. */
7976 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7977 abort ();
7978
7979 lab1 = gen_label_rtx ();
7980
7981 /* Test the result; if it is NaN, set errno=EDOM because
7982 the argument was not in the domain. */
7983 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
7984 emit_jump_insn (gen_beq (lab1));
7985
4ac09687 7986#ifdef TARGET_EDOM
ca695ac9
JB
7987 {
7988#ifdef GEN_ERRNO_RTX
7989 rtx errno_rtx = GEN_ERRNO_RTX;
7990#else
7991 rtx errno_rtx
e74a2201 7992 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
ca695ac9
JB
7993#endif
7994
7995 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
7996 }
7997#else
7998 /* We can't set errno=EDOM directly; let the library call do it.
7999 Pop the arguments right away in case the call gets deleted. */
8000 NO_DEFER_POP;
8001 expand_call (exp, target, 0);
8002 OK_DEFER_POP;
8003#endif
8004
8005 emit_label (lab1);
8006 }
8007
8008 /* Output the entire sequence. */
8009 insns = get_insns ();
8010 end_sequence ();
8011 emit_insns (insns);
8012
8013 return target;
8014
8015 /* __builtin_apply_args returns block of memory allocated on
8016 the stack into which is stored the arg pointer, structure
8017 value address, static chain, and all the registers that might
8018 possibly be used in performing a function call. The code is
8019 moved to the start of the function so the incoming values are
8020 saved. */
8021 case BUILT_IN_APPLY_ARGS:
8022 /* Don't do __builtin_apply_args more than once in a function.
8023 Save the result of the first call and reuse it. */
8024 if (apply_args_value != 0)
8025 return apply_args_value;
8026 {
8027 /* When this function is called, it means that registers must be
8028 saved on entry to this function. So we migrate the
8029 call to the first insn of this function. */
8030 rtx temp;
8031 rtx seq;
8032
8033 start_sequence ();
8034 temp = expand_builtin_apply_args ();
8035 seq = get_insns ();
8036 end_sequence ();
8037
8038 apply_args_value = temp;
8039
8040 /* Put the sequence after the NOTE that starts the function.
8041 If this is inside a SEQUENCE, make the outer-level insn
8042 chain current, so the code is placed at the start of the
8043 function. */
8044 push_topmost_sequence ();
8045 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8046 pop_topmost_sequence ();
8047 return temp;
8048 }
8049
8050 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8051 FUNCTION with a copy of the parameters described by
8052 ARGUMENTS, and ARGSIZE. It returns a block of memory
8053 allocated on the stack into which is stored all the registers
8054 that might possibly be used for returning the result of a
8055 function. ARGUMENTS is the value returned by
8056 __builtin_apply_args. ARGSIZE is the number of bytes of
8057 arguments that must be copied. ??? How should this value be
8058 computed? We'll also need a safe worst case value for varargs
8059 functions. */
8060 case BUILT_IN_APPLY:
8061 if (arglist == 0
8062 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8063 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8064 || TREE_CHAIN (arglist) == 0
8065 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8066 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8067 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8068 return const0_rtx;
8069 else
8070 {
8071 int i;
8072 tree t;
8073 rtx ops[3];
8074
8075 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8076 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8077
8078 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8079 }
8080
8081 /* __builtin_return (RESULT) causes the function to return the
8082 value described by RESULT. RESULT is address of the block of
8083 memory returned by __builtin_apply. */
8084 case BUILT_IN_RETURN:
8085 if (arglist
8086 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8087 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8088 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8089 NULL_RTX, VOIDmode, 0));
8090 return const0_rtx;
8091
8092 case BUILT_IN_SAVEREGS:
8093 /* Don't do __builtin_saveregs more than once in a function.
8094 Save the result of the first call and reuse it. */
8095 if (saveregs_value != 0)
8096 return saveregs_value;
8097 {
8098 /* When this function is called, it means that registers must be
8099 saved on entry to this function. So we migrate the
8100 call to the first insn of this function. */
8101 rtx temp;
8102 rtx seq;
ca695ac9
JB
8103
8104 /* Now really call the function. `expand_call' does not call
8105 expand_builtin, so there is no danger of infinite recursion here. */
8106 start_sequence ();
8107
8108#ifdef EXPAND_BUILTIN_SAVEREGS
8109 /* Do whatever the machine needs done in this case. */
8110 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8111#else
8112 /* The register where the function returns its value
8113 is likely to have something else in it, such as an argument.
8114 So preserve that register around the call. */
d0c76654 8115
ca695ac9
JB
8116 if (value_mode != VOIDmode)
8117 {
d0c76654
RK
8118 rtx valreg = hard_libcall_value (value_mode);
8119 rtx saved_valreg = gen_reg_rtx (value_mode);
8120
ca695ac9 8121 emit_move_insn (saved_valreg, valreg);
d0c76654
RK
8122 temp = expand_call (exp, target, ignore);
8123 emit_move_insn (valreg, saved_valreg);
ca695ac9 8124 }
d0c76654
RK
8125 else
8126 /* Generate the call, putting the value in a pseudo. */
8127 temp = expand_call (exp, target, ignore);
ca695ac9
JB
8128#endif
8129
8130 seq = get_insns ();
8131 end_sequence ();
8132
8133 saveregs_value = temp;
8134
8135 /* Put the sequence after the NOTE that starts the function.
8136 If this is inside a SEQUENCE, make the outer-level insn
8137 chain current, so the code is placed at the start of the
8138 function. */
8139 push_topmost_sequence ();
8140 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8141 pop_topmost_sequence ();
8142 return temp;
8143 }
8144
8145 /* __builtin_args_info (N) returns word N of the arg space info
8146 for the current function. The number and meanings of words
8147 is controlled by the definition of CUMULATIVE_ARGS. */
8148 case BUILT_IN_ARGS_INFO:
8149 {
8150 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8151 int i;
8152 int *word_ptr = (int *) &current_function_args_info;
8153 tree type, elts, result;
8154
8155 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8156 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8157 __FILE__, __LINE__);
8158
8159 if (arglist != 0)
8160 {
8161 tree arg = TREE_VALUE (arglist);
8162 if (TREE_CODE (arg) != INTEGER_CST)
8163 error ("argument of `__builtin_args_info' must be constant");
8164 else
8165 {
8166 int wordnum = TREE_INT_CST_LOW (arg);
8167
8168 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8169 error ("argument of `__builtin_args_info' out of range");
8170 else
8171 return GEN_INT (word_ptr[wordnum]);
8172 }
8173 }
8174 else
8175 error ("missing argument in `__builtin_args_info'");
8176
8177 return const0_rtx;
8178
8179#if 0
8180 for (i = 0; i < nwords; i++)
8181 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8182
8183 type = build_array_type (integer_type_node,
8184 build_index_type (build_int_2 (nwords, 0)));
8185 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8186 TREE_CONSTANT (result) = 1;
8187 TREE_STATIC (result) = 1;
8188 result = build (INDIRECT_REF, build_pointer_type (type), result);
8189 TREE_CONSTANT (result) = 1;
8190 return expand_expr (result, NULL_RTX, VOIDmode, 0);
8191#endif
8192 }
8193
17bbab26 8194 /* Return the address of the first anonymous stack arg. */
ca695ac9
JB
8195 case BUILT_IN_NEXT_ARG:
8196 {
8197 tree fntype = TREE_TYPE (current_function_decl);
c4dfe0fc 8198
33162beb
DE
8199 if ((TYPE_ARG_TYPES (fntype) == 0
8200 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8201 == void_type_node))
8202 && ! current_function_varargs)
ca695ac9
JB
8203 {
8204 error ("`va_start' used in function with fixed args");
8205 return const0_rtx;
8206 }
c4dfe0fc 8207
e4493c04
RK
8208 if (arglist)
8209 {
8210 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8211 tree arg = TREE_VALUE (arglist);
8212
8213 /* Strip off all nops for the sake of the comparison. This
6692a31f
RK
8214 is not quite the same as STRIP_NOPS. It does more.
8215 We must also strip off INDIRECT_EXPR for C++ reference
8216 parameters. */
e4493c04
RK
8217 while (TREE_CODE (arg) == NOP_EXPR
8218 || TREE_CODE (arg) == CONVERT_EXPR
6692a31f
RK
8219 || TREE_CODE (arg) == NON_LVALUE_EXPR
8220 || TREE_CODE (arg) == INDIRECT_REF)
e4493c04
RK
8221 arg = TREE_OPERAND (arg, 0);
8222 if (arg != last_parm)
8223 warning ("second parameter of `va_start' not last named argument");
8224 }
5b4ff0de 8225 else if (! current_function_varargs)
e4493c04
RK
8226 /* Evidently an out of date version of <stdarg.h>; can't validate
8227 va_start's second argument, but can still work as intended. */
8228 warning ("`__builtin_next_arg' called without an argument");
ca695ac9
JB
8229 }
8230
8231 return expand_binop (Pmode, add_optab,
8232 current_function_internal_arg_pointer,
8233 current_function_arg_offset_rtx,
8234 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8235
8236 case BUILT_IN_CLASSIFY_TYPE:
8237 if (arglist != 0)
8238 {
8239 tree type = TREE_TYPE (TREE_VALUE (arglist));
8240 enum tree_code code = TREE_CODE (type);
8241 if (code == VOID_TYPE)
8242 return GEN_INT (void_type_class);
8243 if (code == INTEGER_TYPE)
8244 return GEN_INT (integer_type_class);
8245 if (code == CHAR_TYPE)
8246 return GEN_INT (char_type_class);
8247 if (code == ENUMERAL_TYPE)
8248 return GEN_INT (enumeral_type_class);
8249 if (code == BOOLEAN_TYPE)
8250 return GEN_INT (boolean_type_class);
8251 if (code == POINTER_TYPE)
8252 return GEN_INT (pointer_type_class);
8253 if (code == REFERENCE_TYPE)
8254 return GEN_INT (reference_type_class);
8255 if (code == OFFSET_TYPE)
8256 return GEN_INT (offset_type_class);
8257 if (code == REAL_TYPE)
8258 return GEN_INT (real_type_class);
8259 if (code == COMPLEX_TYPE)
8260 return GEN_INT (complex_type_class);
8261 if (code == FUNCTION_TYPE)
8262 return GEN_INT (function_type_class);
8263 if (code == METHOD_TYPE)
8264 return GEN_INT (method_type_class);
8265 if (code == RECORD_TYPE)
8266 return GEN_INT (record_type_class);
8267 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8268 return GEN_INT (union_type_class);
8269 if (code == ARRAY_TYPE)
4042d440
PB
8270 {
8271 if (TYPE_STRING_FLAG (type))
8272 return GEN_INT (string_type_class);
8273 else
8274 return GEN_INT (array_type_class);
8275 }
ca695ac9
JB
8276 if (code == SET_TYPE)
8277 return GEN_INT (set_type_class);
8278 if (code == FILE_TYPE)
8279 return GEN_INT (file_type_class);
8280 if (code == LANG_TYPE)
8281 return GEN_INT (lang_type_class);
8282 }
8283 return GEN_INT (no_type_class);
8284
8285 case BUILT_IN_CONSTANT_P:
8286 if (arglist == 0)
8287 return const0_rtx;
8288 else
33cf5823
RK
8289 {
8290 tree arg = TREE_VALUE (arglist);
8291
8292 STRIP_NOPS (arg);
8293 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8294 || (TREE_CODE (arg) == ADDR_EXPR
8295 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8296 ? const1_rtx : const0_rtx);
8297 }
ca695ac9
JB
8298
8299 case BUILT_IN_FRAME_ADDRESS:
8300 /* The argument must be a nonnegative integer constant.
8301 It counts the number of frames to scan up the stack.
8302 The value is the address of that frame. */
8303 case BUILT_IN_RETURN_ADDRESS:
8304 /* The argument must be a nonnegative integer constant.
8305 It counts the number of frames to scan up the stack.
8306 The value is the return address saved in that frame. */
8307 if (arglist == 0)
8308 /* Warning about missing arg was already issued. */
8309 return const0_rtx;
8310 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
8311 {
8312 error ("invalid arg to `__builtin_return_address'");
8313 return const0_rtx;
8314 }
153c149b 8315 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
ca695ac9
JB
8316 {
8317 error ("invalid arg to `__builtin_return_address'");
8318 return const0_rtx;
8319 }
8320 else
8321 {
2bbf216f
RK
8322 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8323 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8324 hard_frame_pointer_rtx);
ca695ac9
JB
8325
8326 /* For __builtin_frame_address, return what we've got. */
8327 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8328 return tem;
8329
2bbf216f
RK
8330 if (GET_CODE (tem) != REG)
8331 tem = copy_to_reg (tem);
8332 return tem;
ca695ac9
JB
8333 }
8334
8335 case BUILT_IN_ALLOCA:
8336 if (arglist == 0
8337 /* Arg could be non-integer if user redeclared this fcn wrong. */
8338 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7b073ca6 8339 break;
1ee86d15 8340
ca695ac9
JB
8341 /* Compute the argument. */
8342 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8343
8344 /* Allocate the desired space. */
1ee86d15 8345 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
ca695ac9
JB
8346
8347 case BUILT_IN_FFS:
8348 /* If not optimizing, call the library function. */
98aad286 8349 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
8350 break;
8351
8352 if (arglist == 0
8353 /* Arg could be non-integer if user redeclared this fcn wrong. */
8354 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7b073ca6 8355 break;
ca695ac9
JB
8356
8357 /* Compute the argument. */
8358 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8359 /* Compute ffs, into TARGET if possible.
8360 Set TARGET to wherever the result comes back. */
8361 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8362 ffs_optab, op0, target, 1);
8363 if (target == 0)
8364 abort ();
8365 return target;
8366
8367 case BUILT_IN_STRLEN:
8368 /* If not optimizing, call the library function. */
98aad286 8369 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
8370 break;
8371
8372 if (arglist == 0
8373 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8374 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7b073ca6 8375 break;
ca695ac9
JB
8376 else
8377 {
8378 tree src = TREE_VALUE (arglist);
8379 tree len = c_strlen (src);
8380
8381 int align
8382 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8383
8384 rtx result, src_rtx, char_rtx;
8385 enum machine_mode insn_mode = value_mode, char_mode;
8386 enum insn_code icode;
8387
8388 /* If the length is known, just return it. */
8389 if (len != 0)
8390 return expand_expr (len, target, mode, 0);
8391
8392 /* If SRC is not a pointer type, don't do this operation inline. */
8393 if (align == 0)
8394 break;
8395
8396 /* Call a function if we can't compute strlen in the right mode. */
8397
8398 while (insn_mode != VOIDmode)
8399 {
8400 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8401 if (icode != CODE_FOR_nothing)
8402 break;
bbf6f052 8403
ca695ac9
JB
8404 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8405 }
8406 if (insn_mode == VOIDmode)
8407 break;
bbf6f052 8408
ca695ac9
JB
8409 /* Make a place to write the result of the instruction. */
8410 result = target;
8411 if (! (result != 0
8412 && GET_CODE (result) == REG
8413 && GET_MODE (result) == insn_mode
8414 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8415 result = gen_reg_rtx (insn_mode);
bbf6f052 8416
ca695ac9
JB
8417 /* Make sure the operands are acceptable to the predicates. */
8418
8419 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8420 result = gen_reg_rtx (insn_mode);
8421
8422 src_rtx = memory_address (BLKmode,
88f63c77 8423 expand_expr (src, NULL_RTX, ptr_mode,
ca695ac9
JB
8424 EXPAND_NORMAL));
8425 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8426 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8427
8428 char_rtx = const0_rtx;
8429 char_mode = insn_operand_mode[(int)icode][2];
8430 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8431 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8432
8433 emit_insn (GEN_FCN (icode) (result,
8434 gen_rtx (MEM, BLKmode, src_rtx),
8435 char_rtx, GEN_INT (align)));
8436
8437 /* Return the value in the proper mode for this function. */
8438 if (GET_MODE (result) == value_mode)
8439 return result;
8440 else if (target != 0)
8441 {
8442 convert_move (target, result, 0);
8443 return target;
8444 }
8445 else
8446 return convert_to_mode (value_mode, result, 0);
8447 }
8448
8449 case BUILT_IN_STRCPY:
e87b4f3f 8450 /* If not optimizing, call the library function. */
98aad286 8451 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
e87b4f3f
RS
8452 break;
8453
8454 if (arglist == 0
ca695ac9
JB
8455 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8456 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8457 || TREE_CHAIN (arglist) == 0
8458 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7b073ca6 8459 break;
ca695ac9 8460 else
db0e6d01 8461 {
ca695ac9 8462 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
e7c33f54 8463
ca695ac9
JB
8464 if (len == 0)
8465 break;
e7c33f54 8466
ca695ac9 8467 len = size_binop (PLUS_EXPR, len, integer_one_node);
e7c33f54 8468
ca695ac9 8469 chainon (arglist, build_tree_list (NULL_TREE, len));
1bbddf11
JVA
8470 }
8471
ca695ac9
JB
8472 /* Drops in. */
8473 case BUILT_IN_MEMCPY:
8474 /* If not optimizing, call the library function. */
98aad286 8475 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9 8476 break;
e7c33f54 8477
ca695ac9
JB
8478 if (arglist == 0
8479 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8480 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8481 || TREE_CHAIN (arglist) == 0
8482 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8483 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8484 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7b073ca6 8485 break;
ca695ac9 8486 else
e7c33f54 8487 {
ca695ac9
JB
8488 tree dest = TREE_VALUE (arglist);
8489 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8490 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
e9cf6a97 8491 tree type;
e87b4f3f 8492
ca695ac9
JB
8493 int src_align
8494 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8495 int dest_align
8496 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8497 rtx dest_rtx, dest_mem, src_mem;
60bac6ea 8498
ca695ac9
JB
8499 /* If either SRC or DEST is not a pointer type, don't do
8500 this operation in-line. */
8501 if (src_align == 0 || dest_align == 0)
8502 {
8503 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8504 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8505 break;
8506 }
8507
88f63c77 8508 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
ca695ac9
JB
8509 dest_mem = gen_rtx (MEM, BLKmode,
8510 memory_address (BLKmode, dest_rtx));
e9cf6a97 8511 /* There could be a void* cast on top of the object. */
5480a90c
RK
8512 while (TREE_CODE (dest) == NOP_EXPR)
8513 dest = TREE_OPERAND (dest, 0);
8514 type = TREE_TYPE (TREE_TYPE (dest));
e9cf6a97 8515 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
ca695ac9
JB
8516 src_mem = gen_rtx (MEM, BLKmode,
8517 memory_address (BLKmode,
8518 expand_expr (src, NULL_RTX,
88f63c77
RK
8519 ptr_mode,
8520 EXPAND_SUM)));
e9cf6a97 8521 /* There could be a void* cast on top of the object. */
5480a90c
RK
8522 while (TREE_CODE (src) == NOP_EXPR)
8523 src = TREE_OPERAND (src, 0);
8524 type = TREE_TYPE (TREE_TYPE (src));
e9cf6a97 8525 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
ca695ac9
JB
8526
8527 /* Copy word part most expediently. */
8528 emit_block_move (dest_mem, src_mem,
8529 expand_expr (len, NULL_RTX, VOIDmode, 0),
8530 MIN (src_align, dest_align));
85c53d24 8531 return force_operand (dest_rtx, NULL_RTX);
ca695ac9
JB
8532 }
8533
8534/* These comparison functions need an instruction that returns an actual
8535 index. An ordinary compare that just sets the condition codes
8536 is not enough. */
8537#ifdef HAVE_cmpstrsi
8538 case BUILT_IN_STRCMP:
8539 /* If not optimizing, call the library function. */
98aad286 8540 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
8541 break;
8542
8543 if (arglist == 0
8544 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8545 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8546 || TREE_CHAIN (arglist) == 0
8547 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7b073ca6 8548 break;
ca695ac9
JB
8549 else if (!HAVE_cmpstrsi)
8550 break;
8551 {
8552 tree arg1 = TREE_VALUE (arglist);
8553 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8554 tree offset;
8555 tree len, len2;
8556
8557 len = c_strlen (arg1);
8558 if (len)
8559 len = size_binop (PLUS_EXPR, integer_one_node, len);
8560 len2 = c_strlen (arg2);
8561 if (len2)
8562 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8563
8564 /* If we don't have a constant length for the first, use the length
8565 of the second, if we know it. We don't require a constant for
8566 this case; some cost analysis could be done if both are available
8567 but neither is constant. For now, assume they're equally cheap.
8568
8569 If both strings have constant lengths, use the smaller. This
8570 could arise if optimization results in strcpy being called with
8571 two fixed strings, or if the code was machine-generated. We should
8572 add some code to the `memcmp' handler below to deal with such
8573 situations, someday. */
8574 if (!len || TREE_CODE (len) != INTEGER_CST)
8575 {
8576 if (len2)
8577 len = len2;
8578 else if (len == 0)
8579 break;
8580 }
8581 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8582 {
8583 if (tree_int_cst_lt (len2, len))
8584 len = len2;
8585 }
8586
8587 chainon (arglist, build_tree_list (NULL_TREE, len));
8588 }
8589
8590 /* Drops in. */
8591 case BUILT_IN_MEMCMP:
8592 /* If not optimizing, call the library function. */
98aad286 8593 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
8594 break;
8595
8596 if (arglist == 0
8597 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8598 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8599 || TREE_CHAIN (arglist) == 0
8600 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8601 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8602 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7b073ca6 8603 break;
ca695ac9
JB
8604 else if (!HAVE_cmpstrsi)
8605 break;
8606 {
8607 tree arg1 = TREE_VALUE (arglist);
8608 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8609 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8610 rtx result;
8611
8612 int arg1_align
8613 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8614 int arg2_align
8615 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8616 enum machine_mode insn_mode
8617 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
60bac6ea 8618
ca695ac9
JB
8619 /* If we don't have POINTER_TYPE, call the function. */
8620 if (arg1_align == 0 || arg2_align == 0)
8621 {
8622 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8623 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8624 break;
8625 }
60bac6ea 8626
ca695ac9
JB
8627 /* Make a place to write the result of the instruction. */
8628 result = target;
8629 if (! (result != 0
8630 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8631 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8632 result = gen_reg_rtx (insn_mode);
60bac6ea 8633
ca695ac9
JB
8634 emit_insn (gen_cmpstrsi (result,
8635 gen_rtx (MEM, BLKmode,
88f63c77
RK
8636 expand_expr (arg1, NULL_RTX,
8637 ptr_mode,
ca695ac9
JB
8638 EXPAND_NORMAL)),
8639 gen_rtx (MEM, BLKmode,
88f63c77
RK
8640 expand_expr (arg2, NULL_RTX,
8641 ptr_mode,
ca695ac9
JB
8642 EXPAND_NORMAL)),
8643 expand_expr (len, NULL_RTX, VOIDmode, 0),
8644 GEN_INT (MIN (arg1_align, arg2_align))));
60bac6ea 8645
ca695ac9
JB
8646 /* Return the value in the proper mode for this function. */
8647 mode = TYPE_MODE (TREE_TYPE (exp));
8648 if (GET_MODE (result) == mode)
8649 return result;
8650 else if (target != 0)
8651 {
8652 convert_move (target, result, 0);
8653 return target;
60bac6ea 8654 }
ca695ac9
JB
8655 else
8656 return convert_to_mode (mode, result, 0);
8657 }
60bac6ea 8658#else
ca695ac9
JB
8659 case BUILT_IN_STRCMP:
8660 case BUILT_IN_MEMCMP:
8661 break;
60bac6ea
RS
8662#endif
8663
4ed67205
RK
8664 /* __builtin_setjmp is passed a pointer to an array of five words
8665 (not all will be used on all machines). It operates similarly to
8666 the C library function of the same name, but is more efficient.
8667 Much of the code below (and for longjmp) is copied from the handling
8668 of non-local gotos.
8669
8670 NOTE: This is intended for use by GNAT and will only work in
8671 the method used by it. This code will likely NOT survive to
8672 the GCC 2.8.0 release. */
8673 case BUILT_IN_SETJMP:
8674 if (arglist == 0
8675 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8676 break;
8677
8678 {
8679 rtx buf_addr
0fedef28
RK
8680 = force_reg (Pmode,
8681 convert_modes (Pmode, ptr_mode,
8682 expand_expr (TREE_VALUE (arglist),
8683 subtarget,
8684 VOIDmode, 0),
8685 1));
4ed67205
RK
8686 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8687 enum machine_mode sa_mode = Pmode;
8688 rtx stack_save;
7565a035
RK
8689 int old_inhibit_defer_pop = inhibit_defer_pop;
8690 int return_pops = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8691 get_identifier ("__dummy"), 0);
8692 rtx next_arg_reg;
8693 CUMULATIVE_ARGS args_so_far;
a8a8cbb7 8694 int i;
4ed67205
RK
8695
8696 if (target == 0 || GET_CODE (target) != REG
8697 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8698 target = gen_reg_rtx (value_mode);
8699
8700 emit_queue ();
8701
0dddb42d 8702 CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
4ed67205
RK
8703 current_function_calls_setjmp = 1;
8704
8705 /* We store the frame pointer and the address of lab1 in the buffer
8706 and use the rest of it for the stack save area, which is
8707 machine-dependent. */
8708 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8709 virtual_stack_vars_rtx);
8710 emit_move_insn
8711 (validize_mem (gen_rtx (MEM, Pmode,
8712 plus_constant (buf_addr,
8713 GET_MODE_SIZE (Pmode)))),
8714 gen_rtx (LABEL_REF, Pmode, lab1));
8715
8716#ifdef HAVE_save_stack_nonlocal
8717 if (HAVE_save_stack_nonlocal)
8718 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8719#endif
8720
8721 stack_save = gen_rtx (MEM, sa_mode,
8722 plus_constant (buf_addr,
8723 2 * GET_MODE_SIZE (Pmode)));
8724 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8725
7565a035
RK
8726#ifdef HAVE_setjmp
8727 if (HAVE_setjmp)
8728 emit_insn (gen_setjmp ());
8729#endif
8730
4ed67205
RK
8731 /* Set TARGET to zero and branch around the other case. */
8732 emit_move_insn (target, const0_rtx);
8733 emit_jump_insn (gen_jump (lab2));
8734 emit_barrier ();
8735 emit_label (lab1);
8736
a8a8cbb7 8737 /* Note that setjmp clobbers FP when we get here, so we have to
0dddb42d 8738 make sure it's marked as used by this function. */
a8a8cbb7
RK
8739 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8740
477efd50
RK
8741 /* Mark the static chain as clobbered here so life information
8742 doesn't get messed up for it. */
8743 emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
8744
4ed67205
RK
8745 /* Now put in the code to restore the frame pointer, and argument
8746 pointer, if needed. The code below is from expand_end_bindings
8747 in stmt.c; see detailed documentation there. */
8748#ifdef HAVE_nonlocal_goto
8749 if (! HAVE_nonlocal_goto)
8750#endif
8751 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8752
a8a8cbb7
RK
8753 current_function_has_nonlocal_goto = 1;
8754
4ed67205
RK
8755#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8756 if (fixed_regs[ARG_POINTER_REGNUM])
8757 {
8758#ifdef ELIMINABLE_REGS
8759 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
4ed67205
RK
8760
8761 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8762 if (elim_regs[i].from == ARG_POINTER_REGNUM
8763 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8764 break;
8765
8766 if (i == sizeof elim_regs / sizeof elim_regs [0])
8767#endif
8768 {
8769 /* Now restore our arg pointer from the address at which it
8770 was saved in our stack frame.
8771 If there hasn't be space allocated for it yet, make
8772 some now. */
8773 if (arg_pointer_save_area == 0)
8774 arg_pointer_save_area
8775 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8776 emit_move_insn (virtual_incoming_args_rtx,
8777 copy_to_reg (arg_pointer_save_area));
8778 }
8779 }
8780#endif
8781
7565a035
RK
8782 /* The static chain pointer contains the address of dummy function.
8783 We need to call it here to handle some PIC cases of restoring
8784 a global pointer. Then return 1. */
8785 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
8786
8787 /* We can't actually call emit_library_call here, so do everything
8788 it does, which isn't much for a libfunc with no args. */
8789 op0 = memory_address (FUNCTION_MODE, op0);
8790
8791 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
2c7ee1a6 8792 gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
7565a035
RK
8793 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
8794
8795#ifndef ACCUMULATE_OUTGOING_ARGS
8796#ifdef HAVE_call_pop
8797 if (HAVE_call_pop)
8798 emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
8799 const0_rtx, next_arg_reg,
8800 GEN_INT (return_pops)));
8801 else
8802#endif
8803#endif
8804
8805#ifdef HAVE_call
8806 if (HAVE_call)
8807 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
8808 const0_rtx, next_arg_reg, const0_rtx));
4ed67205 8809 else
7565a035
RK
8810#endif
8811 abort ();
4ed67205 8812
7565a035 8813 emit_move_insn (target, const1_rtx);
4ed67205
RK
8814 emit_label (lab2);
8815 return target;
8816 }
8817
8818 /* __builtin_longjmp is passed a pointer to an array of five words
7565a035 8819 and a value, which is a dummy. It's similar to the C library longjmp
4ed67205
RK
8820 function but works with __builtin_setjmp above. */
8821 case BUILT_IN_LONGJMP:
8822 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8823 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8824 break;
8825
8826 {
b089937a
RK
8827 tree dummy_id = get_identifier ("__dummy");
8828 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
8829 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
4ed67205 8830 rtx buf_addr
0fedef28
RK
8831 = force_reg (Pmode,
8832 convert_modes (Pmode, ptr_mode,
8833 expand_expr (TREE_VALUE (arglist),
8834 NULL_RTX,
8835 VOIDmode, 0),
8836 1));
4ed67205
RK
8837 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
8838 rtx lab = gen_rtx (MEM, Pmode,
8839 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
8840 enum machine_mode sa_mode
8841#ifdef HAVE_save_stack_nonlocal
8842 = (HAVE_save_stack_nonlocal
8843 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
8844 : Pmode);
8845#else
8846 = Pmode;
8847#endif
8848 rtx stack = gen_rtx (MEM, sa_mode,
8849 plus_constant (buf_addr,
8850 2 * GET_MODE_SIZE (Pmode)));
b089937a
RK
8851
8852 DECL_EXTERNAL (dummy_decl) = 1;
8853 TREE_PUBLIC (dummy_decl) = 1;
8854 make_decl_rtl (dummy_decl, NULL_PTR, 1);
7565a035
RK
8855
8856 /* Expand the second expression just for side-effects. */
8857 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
8858 const0_rtx, VOIDmode, 0);
8859
b089937a 8860 assemble_external (dummy_decl);
4ed67205
RK
8861
8862 /* Pick up FP, label, and SP from the block and jump. This code is
8863 from expand_goto in stmt.c; see there for detailed comments. */
8864#if HAVE_nonlocal_goto
8865 if (HAVE_nonlocal_goto)
b089937a
RK
8866 emit_insn (gen_nonlocal_goto (fp, lab, stack,
8867 XEXP (DECL_RTL (dummy_decl), 0)));
4ed67205
RK
8868 else
8869#endif
8870 {
7565a035 8871 lab = copy_to_reg (lab);
4ed67205
RK
8872 emit_move_insn (hard_frame_pointer_rtx, fp);
8873 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8874
7565a035
RK
8875 /* Put in the static chain register the address of the dummy
8876 function. */
b089937a 8877 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
4ed67205
RK
8878 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8879 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
8880 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
7565a035 8881 emit_indirect_jump (lab);
4ed67205
RK
8882 }
8883
8884 return const0_rtx;
8885 }
8886
ca695ac9
JB
8887 default: /* just do library call, if unknown builtin */
8888 error ("built-in function `%s' not currently supported",
8889 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
8890 }
e87b4f3f 8891
ca695ac9
JB
8892 /* The switch statement above can drop through to cause the function
8893 to be called normally. */
e7c33f54 8894
ca695ac9
JB
8895 return expand_call (exp, target, ignore);
8896}
8897\f
8898/* Built-in functions to perform an untyped call and return. */
0006469d 8899
ca695ac9
JB
8900/* For each register that may be used for calling a function, this
8901 gives a mode used to copy the register's value. VOIDmode indicates
8902 the register is not used for calling a function. If the machine
8903 has register windows, this gives only the outbound registers.
8904 INCOMING_REGNO gives the corresponding inbound register. */
8905static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
0006469d 8906
ca695ac9
JB
8907/* For each register that may be used for returning values, this gives
8908 a mode used to copy the register's value. VOIDmode indicates the
8909 register is not used for returning values. If the machine has
8910 register windows, this gives only the outbound registers.
8911 INCOMING_REGNO gives the corresponding inbound register. */
8912static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
0006469d 8913
ca695ac9
JB
8914/* For each register that may be used for calling a function, this
8915 gives the offset of that register into the block returned by
9faa82d8 8916 __builtin_apply_args. 0 indicates that the register is not
ca695ac9
JB
8917 used for calling a function. */
8918static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
0006469d 8919
ca695ac9
JB
8920/* Return the offset of register REGNO into the block returned by
8921 __builtin_apply_args. This is not declared static, since it is
8922 needed in objc-act.c. */
0006469d 8923
ca695ac9
JB
8924int
8925apply_args_register_offset (regno)
8926 int regno;
8927{
8928 apply_args_size ();
0006469d 8929
ca695ac9
JB
8930 /* Arguments are always put in outgoing registers (in the argument
8931 block) if such make sense. */
8932#ifdef OUTGOING_REGNO
8933 regno = OUTGOING_REGNO(regno);
8934#endif
8935 return apply_args_reg_offset[regno];
8936}
0006469d 8937
ca695ac9
JB
8938/* Return the size required for the block returned by __builtin_apply_args,
8939 and initialize apply_args_mode. */
0006469d 8940
ca695ac9
JB
8941static int
8942apply_args_size ()
8943{
8944 static int size = -1;
8945 int align, regno;
8946 enum machine_mode mode;
bbf6f052 8947
ca695ac9
JB
8948 /* The values computed by this function never change. */
8949 if (size < 0)
8950 {
8951 /* The first value is the incoming arg-pointer. */
8952 size = GET_MODE_SIZE (Pmode);
bbf6f052 8953
ca695ac9
JB
8954 /* The second value is the structure value address unless this is
8955 passed as an "invisible" first argument. */
8956 if (struct_value_rtx)
8957 size += GET_MODE_SIZE (Pmode);
8958
8959 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8960 if (FUNCTION_ARG_REGNO_P (regno))
bbf6f052 8961 {
ca695ac9
JB
8962 /* Search for the proper mode for copying this register's
8963 value. I'm not sure this is right, but it works so far. */
8964 enum machine_mode best_mode = VOIDmode;
8965
8966 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8967 mode != VOIDmode;
8968 mode = GET_MODE_WIDER_MODE (mode))
8969 if (HARD_REGNO_MODE_OK (regno, mode)
8970 && HARD_REGNO_NREGS (regno, mode) == 1)
8971 best_mode = mode;
8972
8973 if (best_mode == VOIDmode)
8974 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8975 mode != VOIDmode;
8976 mode = GET_MODE_WIDER_MODE (mode))
8977 if (HARD_REGNO_MODE_OK (regno, mode)
8978 && (mov_optab->handlers[(int) mode].insn_code
8979 != CODE_FOR_nothing))
8980 best_mode = mode;
8981
8982 mode = best_mode;
8983 if (mode == VOIDmode)
8984 abort ();
8985
8986 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8987 if (size % align != 0)
8988 size = CEIL (size, align) * align;
8989 apply_args_reg_offset[regno] = size;
8990 size += GET_MODE_SIZE (mode);
8991 apply_args_mode[regno] = mode;
8992 }
8993 else
8994 {
8995 apply_args_mode[regno] = VOIDmode;
8996 apply_args_reg_offset[regno] = 0;
bbf6f052 8997 }
ca695ac9
JB
8998 }
8999 return size;
9000}
bbf6f052 9001
ca695ac9
JB
9002/* Return the size required for the block returned by __builtin_apply,
9003 and initialize apply_result_mode. */
bbf6f052 9004
ca695ac9
JB
9005static int
9006apply_result_size ()
9007{
9008 static int size = -1;
9009 int align, regno;
9010 enum machine_mode mode;
bbf6f052 9011
ca695ac9
JB
9012 /* The values computed by this function never change. */
9013 if (size < 0)
9014 {
9015 size = 0;
bbf6f052 9016
ca695ac9
JB
9017 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9018 if (FUNCTION_VALUE_REGNO_P (regno))
9019 {
9020 /* Search for the proper mode for copying this register's
9021 value. I'm not sure this is right, but it works so far. */
9022 enum machine_mode best_mode = VOIDmode;
bbf6f052 9023
ca695ac9
JB
9024 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9025 mode != TImode;
9026 mode = GET_MODE_WIDER_MODE (mode))
9027 if (HARD_REGNO_MODE_OK (regno, mode))
9028 best_mode = mode;
bbf6f052 9029
ca695ac9
JB
9030 if (best_mode == VOIDmode)
9031 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9032 mode != VOIDmode;
9033 mode = GET_MODE_WIDER_MODE (mode))
9034 if (HARD_REGNO_MODE_OK (regno, mode)
9035 && (mov_optab->handlers[(int) mode].insn_code
9036 != CODE_FOR_nothing))
9037 best_mode = mode;
bbf6f052 9038
ca695ac9
JB
9039 mode = best_mode;
9040 if (mode == VOIDmode)
9041 abort ();
bbf6f052 9042
ca695ac9
JB
9043 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9044 if (size % align != 0)
9045 size = CEIL (size, align) * align;
9046 size += GET_MODE_SIZE (mode);
9047 apply_result_mode[regno] = mode;
bbf6f052
RK
9048 }
9049 else
ca695ac9 9050 apply_result_mode[regno] = VOIDmode;
bbf6f052 9051
ca695ac9
JB
9052 /* Allow targets that use untyped_call and untyped_return to override
9053 the size so that machine-specific information can be stored here. */
9054#ifdef APPLY_RESULT_SIZE
9055 size = APPLY_RESULT_SIZE;
9056#endif
9057 }
9058 return size;
9059}
bbf6f052 9060
ca695ac9
JB
9061#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9062/* Create a vector describing the result block RESULT. If SAVEP is true,
9063 the result block is used to save the values; otherwise it is used to
9064 restore the values. */
bbf6f052 9065
ca695ac9
JB
9066static rtx
9067result_vector (savep, result)
9068 int savep;
9069 rtx result;
9070{
9071 int regno, size, align, nelts;
9072 enum machine_mode mode;
9073 rtx reg, mem;
9074 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9075
9076 size = nelts = 0;
9077 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9078 if ((mode = apply_result_mode[regno]) != VOIDmode)
9079 {
9080 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9081 if (size % align != 0)
9082 size = CEIL (size, align) * align;
18992995 9083 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
ca695ac9
JB
9084 mem = change_address (result, mode,
9085 plus_constant (XEXP (result, 0), size));
9086 savevec[nelts++] = (savep
9087 ? gen_rtx (SET, VOIDmode, mem, reg)
9088 : gen_rtx (SET, VOIDmode, reg, mem));
9089 size += GET_MODE_SIZE (mode);
bbf6f052 9090 }
ca695ac9
JB
9091 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
9092}
9093#endif /* HAVE_untyped_call or HAVE_untyped_return */
bbf6f052 9094
ca695ac9
JB
9095/* Save the state required to perform an untyped call with the same
9096 arguments as were passed to the current function. */
9097
9098static rtx
9099expand_builtin_apply_args ()
9100{
9101 rtx registers;
9102 int size, align, regno;
9103 enum machine_mode mode;
9104
9105 /* Create a block where the arg-pointer, structure value address,
9106 and argument registers can be saved. */
9107 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9108
9109 /* Walk past the arg-pointer and structure value address. */
9110 size = GET_MODE_SIZE (Pmode);
9111 if (struct_value_rtx)
9112 size += GET_MODE_SIZE (Pmode);
9113
c816db88
RK
9114 /* Save each register used in calling a function to the block. */
9115 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
ca695ac9 9116 if ((mode = apply_args_mode[regno]) != VOIDmode)
bbf6f052 9117 {
ee33823f
RK
9118 rtx tem;
9119
ca695ac9
JB
9120 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9121 if (size % align != 0)
9122 size = CEIL (size, align) * align;
ee33823f
RK
9123
9124 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9125
9126#ifdef STACK_REGS
9127 /* For reg-stack.c's stack register household.
9128 Compare with a similar piece of code in function.c. */
9129
9130 emit_insn (gen_rtx (USE, mode, tem));
9131#endif
9132
ca695ac9
JB
9133 emit_move_insn (change_address (registers, mode,
9134 plus_constant (XEXP (registers, 0),
9135 size)),
ee33823f 9136 tem);
ca695ac9 9137 size += GET_MODE_SIZE (mode);
bbf6f052
RK
9138 }
9139
ca695ac9
JB
9140 /* Save the arg pointer to the block. */
9141 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9142 copy_to_reg (virtual_incoming_args_rtx));
9143 size = GET_MODE_SIZE (Pmode);
bbf6f052 9144
ca695ac9
JB
9145 /* Save the structure value address unless this is passed as an
9146 "invisible" first argument. */
9147 if (struct_value_incoming_rtx)
9148 {
9149 emit_move_insn (change_address (registers, Pmode,
9150 plus_constant (XEXP (registers, 0),
9151 size)),
9152 copy_to_reg (struct_value_incoming_rtx));
9153 size += GET_MODE_SIZE (Pmode);
9154 }
9155
9156 /* Return the address of the block. */
9157 return copy_addr_to_reg (XEXP (registers, 0));
9158}
9159
9160/* Perform an untyped call and save the state required to perform an
9161 untyped return of whatever value was returned by the given function. */
9162
9163static rtx
9164expand_builtin_apply (function, arguments, argsize)
9165 rtx function, arguments, argsize;
9166{
9167 int size, align, regno;
9168 enum machine_mode mode;
9169 rtx incoming_args, result, reg, dest, call_insn;
9170 rtx old_stack_level = 0;
b3f8cf4a 9171 rtx call_fusage = 0;
bbf6f052 9172
ca695ac9
JB
9173 /* Create a block where the return registers can be saved. */
9174 result = assign_stack_local (BLKmode, apply_result_size (), -1);
bbf6f052 9175
ca695ac9 9176 /* ??? The argsize value should be adjusted here. */
bbf6f052 9177
ca695ac9
JB
9178 /* Fetch the arg pointer from the ARGUMENTS block. */
9179 incoming_args = gen_reg_rtx (Pmode);
9180 emit_move_insn (incoming_args,
9181 gen_rtx (MEM, Pmode, arguments));
9182#ifndef STACK_GROWS_DOWNWARD
9183 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9184 incoming_args, 0, OPTAB_LIB_WIDEN);
46b68a37
JW
9185#endif
9186
ca695ac9
JB
9187 /* Perform postincrements before actually calling the function. */
9188 emit_queue ();
46b68a37 9189
ca695ac9
JB
9190 /* Push a new argument block and copy the arguments. */
9191 do_pending_stack_adjust ();
9192 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
bbf6f052 9193
ca695ac9
JB
9194 /* Push a block of memory onto the stack to store the memory arguments.
9195 Save the address in a register, and copy the memory arguments. ??? I
9196 haven't figured out how the calling convention macros effect this,
9197 but it's likely that the source and/or destination addresses in
9198 the block copy will need updating in machine specific ways. */
9199 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
9200 emit_block_move (gen_rtx (MEM, BLKmode, dest),
9201 gen_rtx (MEM, BLKmode, incoming_args),
9202 argsize,
9203 PARM_BOUNDARY / BITS_PER_UNIT);
bbf6f052 9204
ca695ac9
JB
9205 /* Refer to the argument block. */
9206 apply_args_size ();
9207 arguments = gen_rtx (MEM, BLKmode, arguments);
9208
9209 /* Walk past the arg-pointer and structure value address. */
9210 size = GET_MODE_SIZE (Pmode);
9211 if (struct_value_rtx)
9212 size += GET_MODE_SIZE (Pmode);
9213
9214 /* Restore each of the registers previously saved. Make USE insns
c816db88
RK
9215 for each of these registers for use in making the call. */
9216 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
ca695ac9
JB
9217 if ((mode = apply_args_mode[regno]) != VOIDmode)
9218 {
9219 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9220 if (size % align != 0)
9221 size = CEIL (size, align) * align;
9222 reg = gen_rtx (REG, mode, regno);
9223 emit_move_insn (reg,
9224 change_address (arguments, mode,
9225 plus_constant (XEXP (arguments, 0),
9226 size)));
9227
b3f8cf4a 9228 use_reg (&call_fusage, reg);
ca695ac9
JB
9229 size += GET_MODE_SIZE (mode);
9230 }
9231
9232 /* Restore the structure value address unless this is passed as an
9233 "invisible" first argument. */
9234 size = GET_MODE_SIZE (Pmode);
9235 if (struct_value_rtx)
9236 {
9237 rtx value = gen_reg_rtx (Pmode);
9238 emit_move_insn (value,
9239 change_address (arguments, Pmode,
9240 plus_constant (XEXP (arguments, 0),
9241 size)));
9242 emit_move_insn (struct_value_rtx, value);
9243 if (GET_CODE (struct_value_rtx) == REG)
b3f8cf4a 9244 use_reg (&call_fusage, struct_value_rtx);
ca695ac9
JB
9245 size += GET_MODE_SIZE (Pmode);
9246 }
bbf6f052 9247
ca695ac9 9248 /* All arguments and registers used for the call are set up by now! */
b3f8cf4a 9249 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
bbf6f052 9250
ca695ac9
JB
9251 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9252 and we don't want to load it into a register as an optimization,
9253 because prepare_call_address already did it if it should be done. */
9254 if (GET_CODE (function) != SYMBOL_REF)
9255 function = memory_address (FUNCTION_MODE, function);
bbf6f052 9256
ca695ac9
JB
9257 /* Generate the actual call instruction and save the return value. */
9258#ifdef HAVE_untyped_call
9259 if (HAVE_untyped_call)
9260 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9261 result, result_vector (1, result)));
9262 else
9263#endif
9264#ifdef HAVE_call_value
9265 if (HAVE_call_value)
9266 {
9267 rtx valreg = 0;
bbf6f052 9268
ca695ac9
JB
9269 /* Locate the unique return register. It is not possible to
9270 express a call that sets more than one return register using
9271 call_value; use untyped_call for that. In fact, untyped_call
9272 only needs to save the return registers in the given block. */
9273 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9274 if ((mode = apply_result_mode[regno]) != VOIDmode)
9275 {
9276 if (valreg)
9277 abort (); /* HAVE_untyped_call required. */
9278 valreg = gen_rtx (REG, mode, regno);
9279 }
bbf6f052 9280
ca695ac9
JB
9281 emit_call_insn (gen_call_value (valreg,
9282 gen_rtx (MEM, FUNCTION_MODE, function),
9283 const0_rtx, NULL_RTX, const0_rtx));
bbf6f052 9284
ca695ac9
JB
9285 emit_move_insn (change_address (result, GET_MODE (valreg),
9286 XEXP (result, 0)),
9287 valreg);
9288 }
9289 else
9290#endif
9291 abort ();
bbf6f052 9292
b3f8cf4a 9293 /* Find the CALL insn we just emitted. */
ca695ac9
JB
9294 for (call_insn = get_last_insn ();
9295 call_insn && GET_CODE (call_insn) != CALL_INSN;
9296 call_insn = PREV_INSN (call_insn))
9297 ;
bbf6f052 9298
ca695ac9
JB
9299 if (! call_insn)
9300 abort ();
bbf6f052 9301
6d100794
RK
9302 /* Put the register usage information on the CALL. If there is already
9303 some usage information, put ours at the end. */
9304 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9305 {
9306 rtx link;
9307
9308 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9309 link = XEXP (link, 1))
9310 ;
9311
9312 XEXP (link, 1) = call_fusage;
9313 }
9314 else
9315 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
e7c33f54 9316
ca695ac9
JB
9317 /* Restore the stack. */
9318 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
e7c33f54 9319
ca695ac9
JB
9320 /* Return the address of the result block. */
9321 return copy_addr_to_reg (XEXP (result, 0));
9322}
e7c33f54 9323
ca695ac9 9324/* Perform an untyped return. */
e7c33f54 9325
ca695ac9
JB
9326static void
9327expand_builtin_return (result)
9328 rtx result;
9329{
9330 int size, align, regno;
9331 enum machine_mode mode;
9332 rtx reg;
b3f8cf4a 9333 rtx call_fusage = 0;
e7c33f54 9334
ca695ac9
JB
9335 apply_result_size ();
9336 result = gen_rtx (MEM, BLKmode, result);
e7c33f54 9337
ca695ac9
JB
9338#ifdef HAVE_untyped_return
9339 if (HAVE_untyped_return)
9340 {
9341 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9342 emit_barrier ();
9343 return;
9344 }
9345#endif
e7c33f54 9346
ca695ac9
JB
9347 /* Restore the return value and note that each value is used. */
9348 size = 0;
9349 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9350 if ((mode = apply_result_mode[regno]) != VOIDmode)
9351 {
9352 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9353 if (size % align != 0)
9354 size = CEIL (size, align) * align;
9355 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9356 emit_move_insn (reg,
9357 change_address (result, mode,
9358 plus_constant (XEXP (result, 0),
9359 size)));
e7c33f54 9360
b3f8cf4a 9361 push_to_sequence (call_fusage);
ca695ac9 9362 emit_insn (gen_rtx (USE, VOIDmode, reg));
b3f8cf4a 9363 call_fusage = get_insns ();
ca695ac9
JB
9364 end_sequence ();
9365 size += GET_MODE_SIZE (mode);
9366 }
e7c33f54 9367
ca695ac9 9368 /* Put the USE insns before the return. */
b3f8cf4a 9369 emit_insns (call_fusage);
e7c33f54 9370
ca695ac9
JB
9371 /* Return whatever values was restored by jumping directly to the end
9372 of the function. */
9373 expand_null_return ();
9374}
9375\f
9376/* Expand code for a post- or pre- increment or decrement
9377 and return the RTX for the result.
9378 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
e7c33f54 9379
ca695ac9 9380static rtx
7b8b9722 9381expand_increment (exp, post, ignore)
ca695ac9 9382 register tree exp;
7b8b9722 9383 int post, ignore;
ca695ac9
JB
9384{
9385 register rtx op0, op1;
9386 register rtx temp, value;
9387 register tree incremented = TREE_OPERAND (exp, 0);
9388 optab this_optab = add_optab;
9389 int icode;
9390 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9391 int op0_is_copy = 0;
9392 int single_insn = 0;
a97f5a86
RS
9393 /* 1 means we can't store into OP0 directly,
9394 because it is a subreg narrower than a word,
9395 and we don't dare clobber the rest of the word. */
9396 int bad_subreg = 0;
e7c33f54 9397
ca695ac9 9398 if (output_bytecode)
c02bd5d9
JB
9399 {
9400 bc_expand_expr (exp);
9401 return NULL_RTX;
9402 }
e7c33f54 9403
ca695ac9
JB
9404 /* Stabilize any component ref that might need to be
9405 evaluated more than once below. */
9406 if (!post
9407 || TREE_CODE (incremented) == BIT_FIELD_REF
9408 || (TREE_CODE (incremented) == COMPONENT_REF
9409 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9410 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9411 incremented = stabilize_reference (incremented);
9412 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9413 ones into save exprs so that they don't accidentally get evaluated
9414 more than once by the code below. */
9415 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9416 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9417 incremented = save_expr (incremented);
bbf6f052 9418
ca695ac9
JB
9419 /* Compute the operands as RTX.
9420 Note whether OP0 is the actual lvalue or a copy of it:
9421 I believe it is a copy iff it is a register or subreg
9422 and insns were generated in computing it. */
bbf6f052 9423
ca695ac9
JB
9424 temp = get_last_insn ();
9425 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
bbf6f052 9426
ca695ac9 9427 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9faa82d8 9428 in place but instead must do sign- or zero-extension during assignment,
ca695ac9
JB
9429 so we copy it into a new register and let the code below use it as
9430 a copy.
bbf6f052 9431
ca695ac9
JB
9432 Note that we can safely modify this SUBREG since it is know not to be
9433 shared (it was made by the expand_expr call above). */
bbf6f052 9434
ca695ac9 9435 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
3e073e72
RK
9436 {
9437 if (post)
9438 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9439 else
9440 bad_subreg = 1;
9441 }
a97f5a86
RS
9442 else if (GET_CODE (op0) == SUBREG
9443 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
79777b79
RK
9444 {
9445 /* We cannot increment this SUBREG in place. If we are
9446 post-incrementing, get a copy of the old value. Otherwise,
9447 just mark that we cannot increment in place. */
9448 if (post)
9449 op0 = copy_to_reg (op0);
9450 else
9451 bad_subreg = 1;
9452 }
bbf6f052 9453
ca695ac9
JB
9454 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9455 && temp != get_last_insn ());
9456 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052 9457
ca695ac9
JB
9458 /* Decide whether incrementing or decrementing. */
9459 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9460 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9461 this_optab = sub_optab;
bbf6f052 9462
ca695ac9
JB
9463 /* Convert decrement by a constant into a negative increment. */
9464 if (this_optab == sub_optab
9465 && GET_CODE (op1) == CONST_INT)
9466 {
9467 op1 = GEN_INT (- INTVAL (op1));
9468 this_optab = add_optab;
9469 }
bbf6f052 9470
ca695ac9
JB
9471 /* For a preincrement, see if we can do this with a single instruction. */
9472 if (!post)
9473 {
9474 icode = (int) this_optab->handlers[(int) mode].insn_code;
9475 if (icode != (int) CODE_FOR_nothing
9476 /* Make sure that OP0 is valid for operands 0 and 1
9477 of the insn we want to queue. */
9478 && (*insn_operand_predicate[icode][0]) (op0, mode)
9479 && (*insn_operand_predicate[icode][1]) (op0, mode)
9480 && (*insn_operand_predicate[icode][2]) (op1, mode))
9481 single_insn = 1;
9482 }
bbf6f052 9483
ca695ac9
JB
9484 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9485 then we cannot just increment OP0. We must therefore contrive to
9486 increment the original value. Then, for postincrement, we can return
9487 OP0 since it is a copy of the old value. For preincrement, expand here
a97f5a86
RS
9488 unless we can do it with a single insn.
9489
9490 Likewise if storing directly into OP0 would clobber high bits
9491 we need to preserve (bad_subreg). */
9492 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
ca695ac9
JB
9493 {
9494 /* This is the easiest way to increment the value wherever it is.
9495 Problems with multiple evaluation of INCREMENTED are prevented
9496 because either (1) it is a component_ref or preincrement,
9497 in which case it was stabilized above, or (2) it is an array_ref
9498 with constant index in an array in a register, which is
9499 safe to reevaluate. */
9500 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9501 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9502 ? MINUS_EXPR : PLUS_EXPR),
9503 TREE_TYPE (exp),
9504 incremented,
9505 TREE_OPERAND (exp, 1));
e9cdf6e4
RK
9506
9507 while (TREE_CODE (incremented) == NOP_EXPR
9508 || TREE_CODE (incremented) == CONVERT_EXPR)
9509 {
9510 newexp = convert (TREE_TYPE (incremented), newexp);
9511 incremented = TREE_OPERAND (incremented, 0);
9512 }
9513
7b8b9722 9514 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
ca695ac9
JB
9515 return post ? op0 : temp;
9516 }
bbf6f052 9517
ca695ac9
JB
9518 if (post)
9519 {
9520 /* We have a true reference to the value in OP0.
9521 If there is an insn to add or subtract in this mode, queue it.
9522 Queueing the increment insn avoids the register shuffling
9523 that often results if we must increment now and first save
9524 the old value for subsequent use. */
bbf6f052 9525
ca695ac9
JB
9526#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9527 op0 = stabilize (op0);
9528#endif
bbf6f052 9529
ca695ac9
JB
9530 icode = (int) this_optab->handlers[(int) mode].insn_code;
9531 if (icode != (int) CODE_FOR_nothing
9532 /* Make sure that OP0 is valid for operands 0 and 1
9533 of the insn we want to queue. */
9534 && (*insn_operand_predicate[icode][0]) (op0, mode)
9535 && (*insn_operand_predicate[icode][1]) (op0, mode))
9536 {
9537 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9538 op1 = force_reg (mode, op1);
bbf6f052 9539
ca695ac9
JB
9540 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9541 }
9542 }
bbf6f052 9543
ca695ac9
JB
9544 /* Preincrement, or we can't increment with one simple insn. */
9545 if (post)
9546 /* Save a copy of the value before inc or dec, to return it later. */
9547 temp = value = copy_to_reg (op0);
9548 else
9549 /* Arrange to return the incremented value. */
9550 /* Copy the rtx because expand_binop will protect from the queue,
9551 and the results of that would be invalid for us to return
9552 if our caller does emit_queue before using our result. */
9553 temp = copy_rtx (value = op0);
bbf6f052 9554
ca695ac9
JB
9555 /* Increment however we can. */
9556 op1 = expand_binop (mode, this_optab, value, op1, op0,
9557 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9558 /* Make sure the value is stored into OP0. */
9559 if (op1 != op0)
9560 emit_move_insn (op0, op1);
bbf6f052 9561
ca695ac9
JB
9562 return temp;
9563}
9564\f
9565/* Expand all function calls contained within EXP, innermost ones first.
9566 But don't look within expressions that have sequence points.
9567 For each CALL_EXPR, record the rtx for its value
9568 in the CALL_EXPR_RTL field. */
bbf6f052 9569
ca695ac9
JB
9570static void
9571preexpand_calls (exp)
9572 tree exp;
9573{
9574 register int nops, i;
9575 int type = TREE_CODE_CLASS (TREE_CODE (exp));
bbf6f052 9576
ca695ac9
JB
9577 if (! do_preexpand_calls)
9578 return;
bbf6f052 9579
ca695ac9 9580 /* Only expressions and references can contain calls. */
bbf6f052 9581
ca695ac9
JB
9582 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9583 return;
bbf6f052 9584
ca695ac9
JB
9585 switch (TREE_CODE (exp))
9586 {
9587 case CALL_EXPR:
9588 /* Do nothing if already expanded. */
43198be7
RK
9589 if (CALL_EXPR_RTL (exp) != 0
9590 /* Do nothing if the call returns a variable-sized object. */
9591 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9592 /* Do nothing to built-in functions. */
9593 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9594 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9595 == FUNCTION_DECL)
9596 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
ca695ac9 9597 return;
bbf6f052 9598
43198be7 9599 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
ca695ac9 9600 return;
bbf6f052 9601
ca695ac9
JB
9602 case COMPOUND_EXPR:
9603 case COND_EXPR:
9604 case TRUTH_ANDIF_EXPR:
9605 case TRUTH_ORIF_EXPR:
9606 /* If we find one of these, then we can be sure
9607 the adjust will be done for it (since it makes jumps).
9608 Do it now, so that if this is inside an argument
9609 of a function, we don't get the stack adjustment
9610 after some other args have already been pushed. */
9611 do_pending_stack_adjust ();
9612 return;
bbf6f052 9613
ca695ac9
JB
9614 case BLOCK:
9615 case RTL_EXPR:
9616 case WITH_CLEANUP_EXPR:
402c7311 9617 case CLEANUP_POINT_EXPR:
ca695ac9 9618 return;
bbf6f052 9619
ca695ac9
JB
9620 case SAVE_EXPR:
9621 if (SAVE_EXPR_RTL (exp) != 0)
9622 return;
9623 }
bbf6f052 9624
ca695ac9
JB
9625 nops = tree_code_length[(int) TREE_CODE (exp)];
9626 for (i = 0; i < nops; i++)
9627 if (TREE_OPERAND (exp, i) != 0)
9628 {
9629 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9630 if (type == 'e' || type == '<' || type == '1' || type == '2'
9631 || type == 'r')
9632 preexpand_calls (TREE_OPERAND (exp, i));
9633 }
bbf6f052
RK
9634}
9635\f
ca695ac9
JB
9636/* At the start of a function, record that we have no previously-pushed
9637 arguments waiting to be popped. */
0006469d 9638
ca695ac9
JB
9639void
9640init_pending_stack_adjust ()
9641{
9642 pending_stack_adjust = 0;
9643}
fb2ca25a 9644
ca695ac9
JB
9645/* When exiting from function, if safe, clear out any pending stack adjust
9646 so the adjustment won't get done. */
904762c8 9647
ca695ac9
JB
9648void
9649clear_pending_stack_adjust ()
fb2ca25a 9650{
ca695ac9 9651#ifdef EXIT_IGNORE_STACK
b7c2e1e2
RK
9652 if (optimize > 0
9653 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
ca695ac9
JB
9654 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9655 && ! flag_inline_functions)
9656 pending_stack_adjust = 0;
fb2ca25a 9657#endif
fb2ca25a
KKT
9658}
9659
ca695ac9
JB
9660/* Pop any previously-pushed arguments that have not been popped yet. */
9661
9662void
9663do_pending_stack_adjust ()
9664{
9665 if (inhibit_defer_pop == 0)
9666 {
9667 if (pending_stack_adjust != 0)
9668 adjust_stack (GEN_INT (pending_stack_adjust));
9669 pending_stack_adjust = 0;
9670 }
9671}
9672
5dab5552
MS
9673/* Defer the expansion all cleanups up to OLD_CLEANUPS.
9674 Returns the cleanups to be performed. */
9675
9676static tree
9677defer_cleanups_to (old_cleanups)
9678 tree old_cleanups;
9679{
9680 tree new_cleanups = NULL_TREE;
9681 tree cleanups = cleanups_this_call;
9682 tree last = NULL_TREE;
9683
9684 while (cleanups_this_call != old_cleanups)
9685 {
61d6b1cc 9686 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
4ea8537b 9687 last = cleanups_this_call;
5dab5552
MS
9688 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9689 }
9690
9691 if (last)
9692 {
9693 /* Remove the list from the chain of cleanups. */
9694 TREE_CHAIN (last) = NULL_TREE;
9695
9696 /* reverse them so that we can build them in the right order. */
9697 cleanups = nreverse (cleanups);
9698
9ba73d38
MS
9699 /* All cleanups must be on the function_obstack. */
9700 push_obstacks_nochange ();
9701 resume_temporary_allocation ();
9702
5dab5552
MS
9703 while (cleanups)
9704 {
9705 if (new_cleanups)
9706 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
9707 TREE_VALUE (cleanups), new_cleanups);
9708 else
9709 new_cleanups = TREE_VALUE (cleanups);
9710
9711 cleanups = TREE_CHAIN (cleanups);
9712 }
9ba73d38
MS
9713
9714 pop_obstacks ();
5dab5552
MS
9715 }
9716
9717 return new_cleanups;
9718}
9719
ca695ac9
JB
9720/* Expand all cleanups up to OLD_CLEANUPS.
9721 Needed here, and also for language-dependent calls. */
904762c8 9722
ca695ac9
JB
9723void
9724expand_cleanups_to (old_cleanups)
9725 tree old_cleanups;
0006469d 9726{
ca695ac9 9727 while (cleanups_this_call != old_cleanups)
0006469d 9728 {
61d6b1cc 9729 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
d3158f1a 9730 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
ca695ac9
JB
9731 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9732 }
9733}
9734\f
9735/* Expand conditional expressions. */
0006469d 9736
ca695ac9
JB
9737/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9738 LABEL is an rtx of code CODE_LABEL, in this function and all the
9739 functions here. */
0006469d 9740
ca695ac9
JB
9741void
9742jumpifnot (exp, label)
9743 tree exp;
9744 rtx label;
9745{
9746 do_jump (exp, label, NULL_RTX);
9747}
0006469d 9748
ca695ac9 9749/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
0006469d 9750
ca695ac9
JB
9751void
9752jumpif (exp, label)
9753 tree exp;
9754 rtx label;
9755{
9756 do_jump (exp, NULL_RTX, label);
9757}
0006469d 9758
ca695ac9
JB
9759/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9760 the result is zero, or IF_TRUE_LABEL if the result is one.
9761 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9762 meaning fall through in that case.
0006469d 9763
ca695ac9
JB
9764 do_jump always does any pending stack adjust except when it does not
9765 actually perform a jump. An example where there is no jump
9766 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
0006469d 9767
ca695ac9
JB
9768 This function is responsible for optimizing cases such as
9769 &&, || and comparison operators in EXP. */
904762c8 9770
ca695ac9
JB
9771void
9772do_jump (exp, if_false_label, if_true_label)
9773 tree exp;
9774 rtx if_false_label, if_true_label;
0006469d 9775{
ca695ac9
JB
9776 register enum tree_code code = TREE_CODE (exp);
9777 /* Some cases need to create a label to jump to
9778 in order to properly fall through.
9779 These cases set DROP_THROUGH_LABEL nonzero. */
9780 rtx drop_through_label = 0;
9781 rtx temp;
9782 rtx comparison = 0;
9783 int i;
9784 tree type;
2f6e6d22 9785 enum machine_mode mode;
0006469d 9786
ca695ac9 9787 emit_queue ();
0006469d 9788
ca695ac9
JB
9789 switch (code)
9790 {
9791 case ERROR_MARK:
9792 break;
0006469d 9793
ca695ac9
JB
9794 case INTEGER_CST:
9795 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9796 if (temp)
9797 emit_jump (temp);
9798 break;
0006469d 9799
ca695ac9
JB
9800#if 0
9801 /* This is not true with #pragma weak */
9802 case ADDR_EXPR:
9803 /* The address of something can never be zero. */
9804 if (if_true_label)
9805 emit_jump (if_true_label);
9806 break;
9807#endif
0006469d 9808
ca695ac9
JB
9809 case NOP_EXPR:
9810 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9811 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9812 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9813 goto normal;
9814 case CONVERT_EXPR:
9815 /* If we are narrowing the operand, we have to do the compare in the
9816 narrower mode. */
9817 if ((TYPE_PRECISION (TREE_TYPE (exp))
9818 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9819 goto normal;
9820 case NON_LVALUE_EXPR:
9821 case REFERENCE_EXPR:
9822 case ABS_EXPR:
9823 case NEGATE_EXPR:
9824 case LROTATE_EXPR:
9825 case RROTATE_EXPR:
9826 /* These cannot change zero->non-zero or vice versa. */
9827 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9828 break;
0006469d 9829
ca695ac9
JB
9830#if 0
9831 /* This is never less insns than evaluating the PLUS_EXPR followed by
9832 a test and can be longer if the test is eliminated. */
9833 case PLUS_EXPR:
9834 /* Reduce to minus. */
9835 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9836 TREE_OPERAND (exp, 0),
9837 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9838 TREE_OPERAND (exp, 1))));
9839 /* Process as MINUS. */
0006469d 9840#endif
0006469d 9841
ca695ac9
JB
9842 case MINUS_EXPR:
9843 /* Non-zero iff operands of minus differ. */
9844 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9845 TREE_OPERAND (exp, 0),
9846 TREE_OPERAND (exp, 1)),
9847 NE, NE);
9848 break;
904762c8 9849
ca695ac9
JB
9850 case BIT_AND_EXPR:
9851 /* If we are AND'ing with a small constant, do this comparison in the
9852 smallest type that fits. If the machine doesn't have comparisons
9853 that small, it will be converted back to the wider comparison.
9854 This helps if we are testing the sign bit of a narrower object.
9855 combine can't do this for us because it can't know whether a
9856 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
0006469d 9857
ca695ac9
JB
9858 if (! SLOW_BYTE_ACCESS
9859 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9860 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9861 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
2f6e6d22
RK
9862 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9863 && (type = type_for_mode (mode, 1)) != 0
ca695ac9
JB
9864 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9865 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9866 != CODE_FOR_nothing))
9867 {
9868 do_jump (convert (type, exp), if_false_label, if_true_label);
9869 break;
9870 }
9871 goto normal;
904762c8 9872
ca695ac9
JB
9873 case TRUTH_NOT_EXPR:
9874 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9875 break;
0006469d 9876
ca695ac9 9877 case TRUTH_ANDIF_EXPR:
7ee055f4
MS
9878 {
9879 rtx seq1, seq2;
9880 tree cleanups, old_cleanups;
9881
9882 if (if_false_label == 0)
9883 if_false_label = drop_through_label = gen_label_rtx ();
9884 start_sequence ();
9885 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9886 seq1 = get_insns ();
9887 end_sequence ();
9888
9889 old_cleanups = cleanups_this_call;
9890 start_sequence ();
9891 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9892 seq2 = get_insns ();
9893 end_sequence ();
9894
9895 cleanups = defer_cleanups_to (old_cleanups);
9896 if (cleanups)
9897 {
9898 rtx flag = gen_reg_rtx (word_mode);
9899 tree new_cleanups;
9900 tree cond;
9901
9902 /* Flag cleanups as not needed. */
9903 emit_move_insn (flag, const0_rtx);
9904 emit_insns (seq1);
9905
9906 /* Flag cleanups as needed. */
9907 emit_move_insn (flag, const1_rtx);
9908 emit_insns (seq2);
9909
9ba73d38
MS
9910 /* All cleanups must be on the function_obstack. */
9911 push_obstacks_nochange ();
9912 resume_temporary_allocation ();
9913
7ee055f4
MS
9914 /* convert flag, which is an rtx, into a tree. */
9915 cond = make_node (RTL_EXPR);
9916 TREE_TYPE (cond) = integer_type_node;
9917 RTL_EXPR_RTL (cond) = flag;
9918 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
01842234 9919 cond = save_expr (cond);
7ee055f4
MS
9920
9921 new_cleanups = build (COND_EXPR, void_type_node,
9922 truthvalue_conversion (cond),
9923 cleanups, integer_zero_node);
9924 new_cleanups = fold (new_cleanups);
9925
9ba73d38
MS
9926 pop_obstacks ();
9927
7ee055f4
MS
9928 /* Now add in the conditionalized cleanups. */
9929 cleanups_this_call
9930 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
9931 (*interim_eh_hook) (NULL_TREE);
9932 }
9933 else
9934 {
9935 emit_insns (seq1);
9936 emit_insns (seq2);
9937 }
9938 }
ca695ac9 9939 break;
0006469d 9940
ca695ac9 9941 case TRUTH_ORIF_EXPR:
7ee055f4
MS
9942 {
9943 rtx seq1, seq2;
9944 tree cleanups, old_cleanups;
9945
9946 if (if_true_label == 0)
9947 if_true_label = drop_through_label = gen_label_rtx ();
9948 start_sequence ();
9949 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9950 seq1 = get_insns ();
9951 end_sequence ();
9952
9953 old_cleanups = cleanups_this_call;
9954 start_sequence ();
9955 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9956 seq2 = get_insns ();
9957 end_sequence ();
9958
9959 cleanups = defer_cleanups_to (old_cleanups);
9960 if (cleanups)
9961 {
9962 rtx flag = gen_reg_rtx (word_mode);
9963 tree new_cleanups;
9964 tree cond;
9965
9966 /* Flag cleanups as not needed. */
9967 emit_move_insn (flag, const0_rtx);
9968 emit_insns (seq1);
9969
9970 /* Flag cleanups as needed. */
9971 emit_move_insn (flag, const1_rtx);
9972 emit_insns (seq2);
9973
9ba73d38
MS
9974 /* All cleanups must be on the function_obstack. */
9975 push_obstacks_nochange ();
9976 resume_temporary_allocation ();
9977
7ee055f4
MS
9978 /* convert flag, which is an rtx, into a tree. */
9979 cond = make_node (RTL_EXPR);
9980 TREE_TYPE (cond) = integer_type_node;
9981 RTL_EXPR_RTL (cond) = flag;
9982 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
01842234 9983 cond = save_expr (cond);
7ee055f4
MS
9984
9985 new_cleanups = build (COND_EXPR, void_type_node,
9986 truthvalue_conversion (cond),
9987 cleanups, integer_zero_node);
9988 new_cleanups = fold (new_cleanups);
9989
9ba73d38
MS
9990 pop_obstacks ();
9991
7ee055f4
MS
9992 /* Now add in the conditionalized cleanups. */
9993 cleanups_this_call
9994 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
9995 (*interim_eh_hook) (NULL_TREE);
9996 }
9997 else
9998 {
9999 emit_insns (seq1);
10000 emit_insns (seq2);
10001 }
10002 }
ca695ac9 10003 break;
0006469d 10004
ca695ac9 10005 case COMPOUND_EXPR:
0088fcb1 10006 push_temp_slots ();
ca695ac9 10007 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
d80f96e9 10008 preserve_temp_slots (NULL_RTX);
ca695ac9 10009 free_temp_slots ();
0088fcb1 10010 pop_temp_slots ();
ca695ac9
JB
10011 emit_queue ();
10012 do_pending_stack_adjust ();
10013 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10014 break;
0006469d 10015
ca695ac9
JB
10016 case COMPONENT_REF:
10017 case BIT_FIELD_REF:
10018 case ARRAY_REF:
10019 {
10020 int bitsize, bitpos, unsignedp;
10021 enum machine_mode mode;
10022 tree type;
10023 tree offset;
10024 int volatilep = 0;
0006469d 10025
ca695ac9
JB
10026 /* Get description of this reference. We don't actually care
10027 about the underlying object here. */
10028 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10029 &mode, &unsignedp, &volatilep);
0006469d 10030
ca695ac9
JB
10031 type = type_for_size (bitsize, unsignedp);
10032 if (! SLOW_BYTE_ACCESS
10033 && type != 0 && bitsize >= 0
10034 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10035 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10036 != CODE_FOR_nothing))
10037 {
10038 do_jump (convert (type, exp), if_false_label, if_true_label);
10039 break;
10040 }
10041 goto normal;
10042 }
0006469d 10043
ca695ac9
JB
10044 case COND_EXPR:
10045 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10046 if (integer_onep (TREE_OPERAND (exp, 1))
10047 && integer_zerop (TREE_OPERAND (exp, 2)))
10048 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
904762c8 10049
ca695ac9
JB
10050 else if (integer_zerop (TREE_OPERAND (exp, 1))
10051 && integer_onep (TREE_OPERAND (exp, 2)))
10052 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
0006469d 10053
ca695ac9
JB
10054 else
10055 {
10056 register rtx label1 = gen_label_rtx ();
10057 drop_through_label = gen_label_rtx ();
10058 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10059 /* Now the THEN-expression. */
10060 do_jump (TREE_OPERAND (exp, 1),
10061 if_false_label ? if_false_label : drop_through_label,
10062 if_true_label ? if_true_label : drop_through_label);
10063 /* In case the do_jump just above never jumps. */
10064 do_pending_stack_adjust ();
10065 emit_label (label1);
10066 /* Now the ELSE-expression. */
10067 do_jump (TREE_OPERAND (exp, 2),
10068 if_false_label ? if_false_label : drop_through_label,
10069 if_true_label ? if_true_label : drop_through_label);
10070 }
10071 break;
0006469d 10072
ca695ac9 10073 case EQ_EXPR:
0e8c9172
RK
10074 {
10075 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10076
10077 if (integer_zerop (TREE_OPERAND (exp, 1)))
10078 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10079 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
201012cb 10080 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
0e8c9172
RK
10081 do_jump
10082 (fold
10083 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10084 fold (build (EQ_EXPR, TREE_TYPE (exp),
c8465d86
RK
10085 fold (build1 (REALPART_EXPR,
10086 TREE_TYPE (inner_type),
0e8c9172 10087 TREE_OPERAND (exp, 0))),
c8465d86
RK
10088 fold (build1 (REALPART_EXPR,
10089 TREE_TYPE (inner_type),
0e8c9172
RK
10090 TREE_OPERAND (exp, 1))))),
10091 fold (build (EQ_EXPR, TREE_TYPE (exp),
c8465d86
RK
10092 fold (build1 (IMAGPART_EXPR,
10093 TREE_TYPE (inner_type),
0e8c9172 10094 TREE_OPERAND (exp, 0))),
c8465d86
RK
10095 fold (build1 (IMAGPART_EXPR,
10096 TREE_TYPE (inner_type),
0e8c9172
RK
10097 TREE_OPERAND (exp, 1))))))),
10098 if_false_label, if_true_label);
10099 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10100 && !can_compare_p (TYPE_MODE (inner_type)))
10101 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10102 else
10103 comparison = compare (exp, EQ, EQ);
10104 break;
10105 }
0006469d 10106
ca695ac9 10107 case NE_EXPR:
0e8c9172
RK
10108 {
10109 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10110
10111 if (integer_zerop (TREE_OPERAND (exp, 1)))
10112 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10113 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
201012cb 10114 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
0e8c9172
RK
10115 do_jump
10116 (fold
10117 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10118 fold (build (NE_EXPR, TREE_TYPE (exp),
c8465d86
RK
10119 fold (build1 (REALPART_EXPR,
10120 TREE_TYPE (inner_type),
0e8c9172 10121 TREE_OPERAND (exp, 0))),
c8465d86
RK
10122 fold (build1 (REALPART_EXPR,
10123 TREE_TYPE (inner_type),
0e8c9172
RK
10124 TREE_OPERAND (exp, 1))))),
10125 fold (build (NE_EXPR, TREE_TYPE (exp),
c8465d86
RK
10126 fold (build1 (IMAGPART_EXPR,
10127 TREE_TYPE (inner_type),
0e8c9172 10128 TREE_OPERAND (exp, 0))),
c8465d86
RK
10129 fold (build1 (IMAGPART_EXPR,
10130 TREE_TYPE (inner_type),
0e8c9172
RK
10131 TREE_OPERAND (exp, 1))))))),
10132 if_false_label, if_true_label);
10133 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10134 && !can_compare_p (TYPE_MODE (inner_type)))
10135 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10136 else
10137 comparison = compare (exp, NE, NE);
10138 break;
10139 }
0006469d 10140
ca695ac9
JB
10141 case LT_EXPR:
10142 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10143 == MODE_INT)
10144 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10145 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10146 else
10147 comparison = compare (exp, LT, LTU);
10148 break;
0006469d 10149
ca695ac9
JB
10150 case LE_EXPR:
10151 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10152 == MODE_INT)
10153 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10154 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10155 else
10156 comparison = compare (exp, LE, LEU);
10157 break;
0006469d 10158
ca695ac9
JB
10159 case GT_EXPR:
10160 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10161 == MODE_INT)
10162 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10163 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10164 else
10165 comparison = compare (exp, GT, GTU);
10166 break;
0006469d 10167
ca695ac9
JB
10168 case GE_EXPR:
10169 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10170 == MODE_INT)
10171 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10172 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10173 else
10174 comparison = compare (exp, GE, GEU);
10175 break;
0006469d 10176
ca695ac9
JB
10177 default:
10178 normal:
10179 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10180#if 0
10181 /* This is not needed any more and causes poor code since it causes
10182 comparisons and tests from non-SI objects to have different code
10183 sequences. */
10184 /* Copy to register to avoid generating bad insns by cse
10185 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10186 if (!cse_not_expected && GET_CODE (temp) == MEM)
10187 temp = copy_to_reg (temp);
10188#endif
10189 do_pending_stack_adjust ();
10190 if (GET_CODE (temp) == CONST_INT)
10191 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10192 else if (GET_CODE (temp) == LABEL_REF)
10193 comparison = const_true_rtx;
10194 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10195 && !can_compare_p (GET_MODE (temp)))
10196 /* Note swapping the labels gives us not-equal. */
10197 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10198 else if (GET_MODE (temp) != VOIDmode)
10199 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10200 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10201 GET_MODE (temp), NULL_RTX, 0);
10202 else
10203 abort ();
10204 }
0006469d 10205
ca695ac9
JB
10206 /* Do any postincrements in the expression that was tested. */
10207 emit_queue ();
0006469d 10208
ca695ac9
JB
10209 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10210 straight into a conditional jump instruction as the jump condition.
10211 Otherwise, all the work has been done already. */
0006469d 10212
ca695ac9 10213 if (comparison == const_true_rtx)
0006469d 10214 {
ca695ac9
JB
10215 if (if_true_label)
10216 emit_jump (if_true_label);
0006469d 10217 }
ca695ac9
JB
10218 else if (comparison == const0_rtx)
10219 {
10220 if (if_false_label)
10221 emit_jump (if_false_label);
10222 }
10223 else if (comparison)
10224 do_jump_for_compare (comparison, if_false_label, if_true_label);
0006469d 10225
ca695ac9 10226 if (drop_through_label)
0006469d 10227 {
ca695ac9
JB
10228 /* If do_jump produces code that might be jumped around,
10229 do any stack adjusts from that code, before the place
10230 where control merges in. */
10231 do_pending_stack_adjust ();
10232 emit_label (drop_through_label);
10233 }
10234}
10235\f
10236/* Given a comparison expression EXP for values too wide to be compared
10237 with one insn, test the comparison and jump to the appropriate label.
10238 The code of EXP is ignored; we always test GT if SWAP is 0,
10239 and LT if SWAP is 1. */
0006469d 10240
ca695ac9
JB
10241static void
10242do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10243 tree exp;
10244 int swap;
10245 rtx if_false_label, if_true_label;
10246{
10247 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10248 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10249 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10250 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10251 rtx drop_through_label = 0;
10252 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10253 int i;
0006469d 10254
ca695ac9
JB
10255 if (! if_true_label || ! if_false_label)
10256 drop_through_label = gen_label_rtx ();
10257 if (! if_true_label)
10258 if_true_label = drop_through_label;
10259 if (! if_false_label)
10260 if_false_label = drop_through_label;
0006469d 10261
ca695ac9
JB
10262 /* Compare a word at a time, high order first. */
10263 for (i = 0; i < nwords; i++)
10264 {
10265 rtx comp;
10266 rtx op0_word, op1_word;
0006469d 10267
ca695ac9
JB
10268 if (WORDS_BIG_ENDIAN)
10269 {
10270 op0_word = operand_subword_force (op0, i, mode);
10271 op1_word = operand_subword_force (op1, i, mode);
10272 }
10273 else
10274 {
10275 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10276 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10277 }
0006469d 10278
ca695ac9
JB
10279 /* All but high-order word must be compared as unsigned. */
10280 comp = compare_from_rtx (op0_word, op1_word,
10281 (unsignedp || i > 0) ? GTU : GT,
10282 unsignedp, word_mode, NULL_RTX, 0);
10283 if (comp == const_true_rtx)
10284 emit_jump (if_true_label);
10285 else if (comp != const0_rtx)
10286 do_jump_for_compare (comp, NULL_RTX, if_true_label);
0006469d 10287
ca695ac9
JB
10288 /* Consider lower words only if these are equal. */
10289 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10290 NULL_RTX, 0);
10291 if (comp == const_true_rtx)
10292 emit_jump (if_false_label);
10293 else if (comp != const0_rtx)
10294 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10295 }
0006469d 10296
ca695ac9
JB
10297 if (if_false_label)
10298 emit_jump (if_false_label);
10299 if (drop_through_label)
10300 emit_label (drop_through_label);
0006469d
TW
10301}
10302
ca695ac9
JB
10303/* Compare OP0 with OP1, word at a time, in mode MODE.
10304 UNSIGNEDP says to do unsigned comparison.
10305 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
904762c8 10306
2e5ec6cf 10307void
ca695ac9
JB
10308do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10309 enum machine_mode mode;
10310 int unsignedp;
10311 rtx op0, op1;
10312 rtx if_false_label, if_true_label;
0006469d 10313{
ca695ac9
JB
10314 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10315 rtx drop_through_label = 0;
10316 int i;
0006469d 10317
ca695ac9
JB
10318 if (! if_true_label || ! if_false_label)
10319 drop_through_label = gen_label_rtx ();
10320 if (! if_true_label)
10321 if_true_label = drop_through_label;
10322 if (! if_false_label)
10323 if_false_label = drop_through_label;
0006469d 10324
ca695ac9
JB
10325 /* Compare a word at a time, high order first. */
10326 for (i = 0; i < nwords; i++)
0006469d 10327 {
ca695ac9
JB
10328 rtx comp;
10329 rtx op0_word, op1_word;
0006469d 10330
ca695ac9
JB
10331 if (WORDS_BIG_ENDIAN)
10332 {
10333 op0_word = operand_subword_force (op0, i, mode);
10334 op1_word = operand_subword_force (op1, i, mode);
10335 }
10336 else
10337 {
10338 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10339 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10340 }
0006469d 10341
ca695ac9
JB
10342 /* All but high-order word must be compared as unsigned. */
10343 comp = compare_from_rtx (op0_word, op1_word,
10344 (unsignedp || i > 0) ? GTU : GT,
10345 unsignedp, word_mode, NULL_RTX, 0);
10346 if (comp == const_true_rtx)
10347 emit_jump (if_true_label);
10348 else if (comp != const0_rtx)
10349 do_jump_for_compare (comp, NULL_RTX, if_true_label);
0006469d 10350
ca695ac9
JB
10351 /* Consider lower words only if these are equal. */
10352 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10353 NULL_RTX, 0);
10354 if (comp == const_true_rtx)
10355 emit_jump (if_false_label);
10356 else if (comp != const0_rtx)
10357 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10358 }
0006469d 10359
ca695ac9
JB
10360 if (if_false_label)
10361 emit_jump (if_false_label);
10362 if (drop_through_label)
10363 emit_label (drop_through_label);
0006469d 10364}
bbf6f052 10365
ca695ac9
JB
10366/* Given an EQ_EXPR expression EXP for values too wide to be compared
10367 with one insn, test the comparison and jump to the appropriate label. */
10368
10369static void
10370do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10371 tree exp;
10372 rtx if_false_label, if_true_label;
bbf6f052 10373{
ca695ac9
JB
10374 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10375 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10376 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10377 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10378 int i;
10379 rtx drop_through_label = 0;
bbf6f052 10380
ca695ac9
JB
10381 if (! if_false_label)
10382 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10383
ca695ac9
JB
10384 for (i = 0; i < nwords; i++)
10385 {
10386 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10387 operand_subword_force (op1, i, mode),
10388 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10389 word_mode, NULL_RTX, 0);
10390 if (comp == const_true_rtx)
10391 emit_jump (if_false_label);
10392 else if (comp != const0_rtx)
10393 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10394 }
1499e0a8 10395
ca695ac9
JB
10396 if (if_true_label)
10397 emit_jump (if_true_label);
10398 if (drop_through_label)
10399 emit_label (drop_through_label);
10400}
10401\f
10402/* Jump according to whether OP0 is 0.
10403 We assume that OP0 has an integer mode that is too wide
10404 for the available compare insns. */
1499e0a8 10405
ca695ac9
JB
10406static void
10407do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10408 rtx op0;
10409 rtx if_false_label, if_true_label;
10410{
10411 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10412 int i;
10413 rtx drop_through_label = 0;
1499e0a8 10414
ca695ac9
JB
10415 if (! if_false_label)
10416 drop_through_label = if_false_label = gen_label_rtx ();
1499e0a8 10417
ca695ac9
JB
10418 for (i = 0; i < nwords; i++)
10419 {
10420 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10421 GET_MODE (op0)),
10422 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10423 if (comp == const_true_rtx)
10424 emit_jump (if_false_label);
10425 else if (comp != const0_rtx)
10426 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10427 }
1499e0a8 10428
ca695ac9
JB
10429 if (if_true_label)
10430 emit_jump (if_true_label);
10431 if (drop_through_label)
10432 emit_label (drop_through_label);
10433}
bbf6f052 10434
ca695ac9
JB
10435/* Given a comparison expression in rtl form, output conditional branches to
10436 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
bbf6f052 10437
ca695ac9
JB
10438static void
10439do_jump_for_compare (comparison, if_false_label, if_true_label)
10440 rtx comparison, if_false_label, if_true_label;
10441{
10442 if (if_true_label)
a358cee0 10443 {
ca695ac9
JB
10444 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10445 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10446 else
10447 abort ();
a358cee0 10448
ca695ac9
JB
10449 if (if_false_label)
10450 emit_jump (if_false_label);
c980ac49 10451 }
ca695ac9 10452 else if (if_false_label)
bbf6f052 10453 {
ca695ac9 10454 rtx insn;
f12f485a 10455 rtx prev = get_last_insn ();
ca695ac9 10456 rtx branch = 0;
bbf6f052 10457
ca695ac9
JB
10458 /* Output the branch with the opposite condition. Then try to invert
10459 what is generated. If more than one insn is a branch, or if the
10460 branch is not the last insn written, abort. If we can't invert
10461 the branch, emit make a true label, redirect this jump to that,
10462 emit a jump to the false label and define the true label. */
bbf6f052 10463
ca695ac9 10464 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
34661f5c 10465 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
ca695ac9
JB
10466 else
10467 abort ();
bbf6f052 10468
41dfd40c
RK
10469 /* Here we get the first insn that was just emitted. It used to be the
10470 case that, on some machines, emitting the branch would discard
10471 the previous compare insn and emit a replacement. This isn't
10472 done anymore, but abort if we see that PREV is deleted. */
10473
ca695ac9 10474 if (prev == 0)
ca695ac9 10475 insn = get_insns ();
41dfd40c
RK
10476 else if (INSN_DELETED_P (prev))
10477 abort ();
ca695ac9 10478 else
41dfd40c 10479 insn = NEXT_INSN (prev);
bbf6f052 10480
34661f5c 10481 for (; insn; insn = NEXT_INSN (insn))
ca695ac9
JB
10482 if (GET_CODE (insn) == JUMP_INSN)
10483 {
10484 if (branch)
10485 abort ();
10486 branch = insn;
10487 }
10488
10489 if (branch != get_last_insn ())
10490 abort ();
10491
127e4d19 10492 JUMP_LABEL (branch) = if_false_label;
ca695ac9
JB
10493 if (! invert_jump (branch, if_false_label))
10494 {
10495 if_true_label = gen_label_rtx ();
10496 redirect_jump (branch, if_true_label);
10497 emit_jump (if_false_label);
10498 emit_label (if_true_label);
bbf6f052
RK
10499 }
10500 }
ca695ac9
JB
10501}
10502\f
10503/* Generate code for a comparison expression EXP
10504 (including code to compute the values to be compared)
10505 and set (CC0) according to the result.
10506 SIGNED_CODE should be the rtx operation for this comparison for
10507 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10508
10509 We force a stack adjustment unless there are currently
10510 things pushed on the stack that aren't yet used. */
10511
10512static rtx
10513compare (exp, signed_code, unsigned_code)
10514 register tree exp;
10515 enum rtx_code signed_code, unsigned_code;
10516{
10517 register rtx op0
10518 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10519 register rtx op1
10520 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10521 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10522 register enum machine_mode mode = TYPE_MODE (type);
10523 int unsignedp = TREE_UNSIGNED (type);
10524 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
bbf6f052 10525
ca695ac9
JB
10526 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10527 ((mode == BLKmode)
10528 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10529 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10530}
bbf6f052 10531
ca695ac9
JB
10532/* Like compare but expects the values to compare as two rtx's.
10533 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 10534
ca695ac9
JB
10535 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10536 compared.
bbf6f052 10537
ca695ac9
JB
10538 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10539 size of MODE should be used. */
bbf6f052 10540
ca695ac9
JB
10541rtx
10542compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10543 register rtx op0, op1;
10544 enum rtx_code code;
10545 int unsignedp;
10546 enum machine_mode mode;
10547 rtx size;
10548 int align;
10549{
10550 rtx tem;
bbf6f052 10551
ca695ac9
JB
10552 /* If one operand is constant, make it the second one. Only do this
10553 if the other operand is not constant as well. */
bbf6f052 10554
ca695ac9
JB
10555 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10556 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10557 {
10558 tem = op0;
10559 op0 = op1;
10560 op1 = tem;
10561 code = swap_condition (code);
10562 }
bbf6f052 10563
ca695ac9 10564 if (flag_force_mem)
bbf6f052 10565 {
ca695ac9
JB
10566 op0 = force_not_mem (op0);
10567 op1 = force_not_mem (op1);
10568 }
bbf6f052 10569
ca695ac9 10570 do_pending_stack_adjust ();
bbf6f052 10571
ca695ac9
JB
10572 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10573 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10574 return tem;
bbf6f052 10575
ca695ac9
JB
10576#if 0
10577 /* There's no need to do this now that combine.c can eliminate lots of
10578 sign extensions. This can be less efficient in certain cases on other
10579 machines. */
bbf6f052 10580
ca695ac9
JB
10581 /* If this is a signed equality comparison, we can do it as an
10582 unsigned comparison since zero-extension is cheaper than sign
10583 extension and comparisons with zero are done as unsigned. This is
10584 the case even on machines that can do fast sign extension, since
10585 zero-extension is easier to combine with other operations than
10586 sign-extension is. If we are comparing against a constant, we must
10587 convert it to what it would look like unsigned. */
10588 if ((code == EQ || code == NE) && ! unsignedp
10589 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10590 {
10591 if (GET_CODE (op1) == CONST_INT
10592 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10593 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10594 unsignedp = 1;
bbf6f052 10595 }
ca695ac9
JB
10596#endif
10597
10598 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
bbf6f052 10599
ca695ac9 10600 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
bbf6f052
RK
10601}
10602\f
ca695ac9
JB
10603/* Generate code to calculate EXP using a store-flag instruction
10604 and return an rtx for the result. EXP is either a comparison
10605 or a TRUTH_NOT_EXPR whose operand is a comparison.
bbf6f052 10606
ca695ac9 10607 If TARGET is nonzero, store the result there if convenient.
bbf6f052 10608
ca695ac9
JB
10609 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10610 cheap.
bbf6f052 10611
ca695ac9
JB
10612 Return zero if there is no suitable set-flag instruction
10613 available on this machine.
bbf6f052 10614
ca695ac9
JB
10615 Once expand_expr has been called on the arguments of the comparison,
10616 we are committed to doing the store flag, since it is not safe to
10617 re-evaluate the expression. We emit the store-flag insn by calling
10618 emit_store_flag, but only expand the arguments if we have a reason
10619 to believe that emit_store_flag will be successful. If we think that
10620 it will, but it isn't, we have to simulate the store-flag with a
10621 set/jump/set sequence. */
bbf6f052 10622
ca695ac9
JB
10623static rtx
10624do_store_flag (exp, target, mode, only_cheap)
10625 tree exp;
10626 rtx target;
10627 enum machine_mode mode;
10628 int only_cheap;
bbf6f052 10629{
ca695ac9
JB
10630 enum rtx_code code;
10631 tree arg0, arg1, type;
10632 tree tem;
10633 enum machine_mode operand_mode;
10634 int invert = 0;
10635 int unsignedp;
10636 rtx op0, op1;
10637 enum insn_code icode;
10638 rtx subtarget = target;
10639 rtx result, label, pattern, jump_pat;
bbf6f052 10640
ca695ac9
JB
10641 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10642 result at the end. We can't simply invert the test since it would
10643 have already been inverted if it were valid. This case occurs for
10644 some floating-point comparisons. */
10645
10646 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10647 invert = 1, exp = TREE_OPERAND (exp, 0);
10648
10649 arg0 = TREE_OPERAND (exp, 0);
10650 arg1 = TREE_OPERAND (exp, 1);
10651 type = TREE_TYPE (arg0);
10652 operand_mode = TYPE_MODE (type);
10653 unsignedp = TREE_UNSIGNED (type);
10654
10655 /* We won't bother with BLKmode store-flag operations because it would mean
10656 passing a lot of information to emit_store_flag. */
10657 if (operand_mode == BLKmode)
10658 return 0;
10659
10660 STRIP_NOPS (arg0);
10661 STRIP_NOPS (arg1);
10662
10663 /* Get the rtx comparison code to use. We know that EXP is a comparison
10664 operation of some type. Some comparisons against 1 and -1 can be
10665 converted to comparisons with zero. Do so here so that the tests
10666 below will be aware that we have a comparison with zero. These
10667 tests will not catch constants in the first operand, but constants
10668 are rarely passed as the first operand. */
10669
10670 switch (TREE_CODE (exp))
10671 {
10672 case EQ_EXPR:
10673 code = EQ;
10674 break;
10675 case NE_EXPR:
10676 code = NE;
10677 break;
10678 case LT_EXPR:
10679 if (integer_onep (arg1))
10680 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10681 else
10682 code = unsignedp ? LTU : LT;
10683 break;
10684 case LE_EXPR:
10685 if (! unsignedp && integer_all_onesp (arg1))
10686 arg1 = integer_zero_node, code = LT;
10687 else
10688 code = unsignedp ? LEU : LE;
10689 break;
10690 case GT_EXPR:
10691 if (! unsignedp && integer_all_onesp (arg1))
10692 arg1 = integer_zero_node, code = GE;
10693 else
10694 code = unsignedp ? GTU : GT;
10695 break;
10696 case GE_EXPR:
10697 if (integer_onep (arg1))
10698 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10699 else
10700 code = unsignedp ? GEU : GE;
10701 break;
10702 default:
10703 abort ();
10704 }
bbf6f052 10705
ca695ac9
JB
10706 /* Put a constant second. */
10707 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
bbf6f052 10708 {
ca695ac9
JB
10709 tem = arg0; arg0 = arg1; arg1 = tem;
10710 code = swap_condition (code);
bbf6f052 10711 }
bbf6f052 10712
ca695ac9
JB
10713 /* If this is an equality or inequality test of a single bit, we can
10714 do this by shifting the bit being tested to the low-order bit and
10715 masking the result with the constant 1. If the condition was EQ,
10716 we xor it with 1. This does not require an scc insn and is faster
10717 than an scc insn even if we have it. */
bbf6f052 10718
ca695ac9
JB
10719 if ((code == NE || code == EQ)
10720 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10721 && integer_pow2p (TREE_OPERAND (arg0, 1))
10722 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
10723 {
10724 tree inner = TREE_OPERAND (arg0, 0);
21b2a157
JW
10725 HOST_WIDE_INT tem;
10726 int bitnum;
ca695ac9 10727 int ops_unsignedp;
bbf6f052 10728
21b2a157
JW
10729 tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
10730 NULL_RTX, VOIDmode, 0));
10731 /* In this case, immed_double_const will sign extend the value to make
10732 it look the same on the host and target. We must remove the
10733 sign-extension before calling exact_log2, since exact_log2 will
10734 fail for negative values. */
10735 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
10736 && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
8b295000
JW
10737 /* We don't use the obvious constant shift to generate the mask,
10738 because that generates compiler warnings when BITS_PER_WORD is
10739 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
10740 code is unreachable in that case. */
10741 tem = tem & GET_MODE_MASK (word_mode);
21b2a157
JW
10742 bitnum = exact_log2 (tem);
10743
ca695ac9
JB
10744 /* If INNER is a right shift of a constant and it plus BITNUM does
10745 not overflow, adjust BITNUM and INNER. */
bbf6f052 10746
ca695ac9
JB
10747 if (TREE_CODE (inner) == RSHIFT_EXPR
10748 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10749 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10750 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10751 < TYPE_PRECISION (type)))
10752 {
10753 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10754 inner = TREE_OPERAND (inner, 0);
10755 }
bbf6f052 10756
ca695ac9
JB
10757 /* If we are going to be able to omit the AND below, we must do our
10758 operations as unsigned. If we must use the AND, we have a choice.
10759 Normally unsigned is faster, but for some machines signed is. */
10760 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
ad92c826
RK
10761#ifdef LOAD_EXTEND_OP
10762 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
ca695ac9
JB
10763#else
10764 : 1
10765#endif
10766 );
bbf6f052 10767
ca695ac9
JB
10768 if (subtarget == 0 || GET_CODE (subtarget) != REG
10769 || GET_MODE (subtarget) != operand_mode
10770 || ! safe_from_p (subtarget, inner))
10771 subtarget = 0;
e7c33f54 10772
ca695ac9 10773 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 10774
ca695ac9
JB
10775 if (bitnum != 0)
10776 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
0c316b20 10777 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 10778
ca695ac9
JB
10779 if (GET_MODE (op0) != mode)
10780 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 10781
ca695ac9 10782 if ((code == EQ && ! invert) || (code == NE && invert))
0c316b20 10783 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
ca695ac9 10784 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 10785
ca695ac9
JB
10786 /* Put the AND last so it can combine with more things. */
10787 if (bitnum != TYPE_PRECISION (type) - 1)
0c316b20 10788 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 10789
ca695ac9
JB
10790 return op0;
10791 }
bbf6f052 10792
ca695ac9
JB
10793 /* Now see if we are likely to be able to do this. Return if not. */
10794 if (! can_compare_p (operand_mode))
10795 return 0;
10796 icode = setcc_gen_code[(int) code];
10797 if (icode == CODE_FOR_nothing
10798 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
10799 {
10800 /* We can only do this if it is one of the special cases that
10801 can be handled without an scc insn. */
10802 if ((code == LT && integer_zerop (arg1))
10803 || (! only_cheap && code == GE && integer_zerop (arg1)))
10804 ;
10805 else if (BRANCH_COST >= 0
10806 && ! only_cheap && (code == NE || code == EQ)
10807 && TREE_CODE (type) != REAL_TYPE
10808 && ((abs_optab->handlers[(int) operand_mode].insn_code
10809 != CODE_FOR_nothing)
10810 || (ffs_optab->handlers[(int) operand_mode].insn_code
10811 != CODE_FOR_nothing)))
10812 ;
10813 else
10814 return 0;
10815 }
10816
10817 preexpand_calls (exp);
10818 if (subtarget == 0 || GET_CODE (subtarget) != REG
10819 || GET_MODE (subtarget) != operand_mode
10820 || ! safe_from_p (subtarget, arg1))
10821 subtarget = 0;
bbf6f052 10822
ca695ac9
JB
10823 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10824 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
bbf6f052 10825
ca695ac9
JB
10826 if (target == 0)
10827 target = gen_reg_rtx (mode);
bbf6f052 10828
ca695ac9
JB
10829 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10830 because, if the emit_store_flag does anything it will succeed and
10831 OP0 and OP1 will not be used subsequently. */
bbf6f052 10832
ca695ac9
JB
10833 result = emit_store_flag (target, code,
10834 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10835 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10836 operand_mode, unsignedp, 1);
bbf6f052 10837
ca695ac9
JB
10838 if (result)
10839 {
10840 if (invert)
10841 result = expand_binop (mode, xor_optab, result, const1_rtx,
10842 result, 0, OPTAB_LIB_WIDEN);
10843 return result;
10844 }
bbf6f052 10845
ca695ac9
JB
10846 /* If this failed, we have to do this with set/compare/jump/set code. */
10847 if (target == 0 || GET_CODE (target) != REG
10848 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10849 target = gen_reg_rtx (GET_MODE (target));
bbf6f052 10850
ca695ac9
JB
10851 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10852 result = compare_from_rtx (op0, op1, code, unsignedp,
10853 operand_mode, NULL_RTX, 0);
10854 if (GET_CODE (result) == CONST_INT)
10855 return (((result == const0_rtx && ! invert)
10856 || (result != const0_rtx && invert))
10857 ? const0_rtx : const1_rtx);
bbf6f052 10858
ca695ac9
JB
10859 label = gen_label_rtx ();
10860 if (bcc_gen_fctn[(int) code] == 0)
10861 abort ();
bbf6f052 10862
ca695ac9
JB
10863 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10864 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10865 emit_label (label);
bbf6f052 10866
ca695ac9
JB
10867 return target;
10868}
10869\f
10870/* Generate a tablejump instruction (used for switch statements). */
bbf6f052 10871
ca695ac9 10872#ifdef HAVE_tablejump
bbf6f052 10873
ca695ac9
JB
10874/* INDEX is the value being switched on, with the lowest value
10875 in the table already subtracted.
10876 MODE is its expected mode (needed if INDEX is constant).
10877 RANGE is the length of the jump table.
10878 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
bbf6f052 10879
ca695ac9
JB
10880 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10881 index value is out of range. */
bbf6f052 10882
ca695ac9
JB
10883void
10884do_tablejump (index, mode, range, table_label, default_label)
10885 rtx index, range, table_label, default_label;
10886 enum machine_mode mode;
10887{
10888 register rtx temp, vector;
bbf6f052 10889
ca695ac9
JB
10890 /* Do an unsigned comparison (in the proper mode) between the index
10891 expression and the value which represents the length of the range.
10892 Since we just finished subtracting the lower bound of the range
10893 from the index expression, this comparison allows us to simultaneously
10894 check that the original index expression value is both greater than
10895 or equal to the minimum value of the range and less than or equal to
10896 the maximum value of the range. */
bbf6f052 10897
bf500664
RK
10898 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
10899 emit_jump_insn (gen_bgtu (default_label));
bbf6f052 10900
ca695ac9
JB
10901 /* If index is in range, it must fit in Pmode.
10902 Convert to Pmode so we can index with it. */
10903 if (mode != Pmode)
10904 index = convert_to_mode (Pmode, index, 1);
bbf6f052 10905
ca695ac9
JB
10906 /* Don't let a MEM slip thru, because then INDEX that comes
10907 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10908 and break_out_memory_refs will go to work on it and mess it up. */
10909#ifdef PIC_CASE_VECTOR_ADDRESS
10910 if (flag_pic && GET_CODE (index) != REG)
10911 index = copy_to_mode_reg (Pmode, index);
10912#endif
bbf6f052 10913
ca695ac9
JB
10914 /* If flag_force_addr were to affect this address
10915 it could interfere with the tricky assumptions made
10916 about addresses that contain label-refs,
10917 which may be valid only very near the tablejump itself. */
10918 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10919 GET_MODE_SIZE, because this indicates how large insns are. The other
10920 uses should all be Pmode, because they are addresses. This code
10921 could fail if addresses and insns are not the same size. */
10922 index = gen_rtx (PLUS, Pmode,
10923 gen_rtx (MULT, Pmode, index,
10924 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10925 gen_rtx (LABEL_REF, Pmode, table_label));
10926#ifdef PIC_CASE_VECTOR_ADDRESS
10927 if (flag_pic)
10928 index = PIC_CASE_VECTOR_ADDRESS (index);
10929 else
10930#endif
10931 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10932 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10933 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
10934 RTX_UNCHANGING_P (vector) = 1;
10935 convert_move (temp, vector, 0);
bbf6f052 10936
ca695ac9 10937 emit_jump_insn (gen_tablejump (temp, table_label));
bbf6f052 10938
ca695ac9
JB
10939#ifndef CASE_VECTOR_PC_RELATIVE
10940 /* If we are generating PIC code or if the table is PC-relative, the
10941 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10942 if (! flag_pic)
10943 emit_barrier ();
bbf6f052 10944#endif
ca695ac9 10945}
bbf6f052 10946
ca695ac9 10947#endif /* HAVE_tablejump */
bbf6f052 10948
bbf6f052 10949
ca695ac9
JB
10950/* Emit a suitable bytecode to load a value from memory, assuming a pointer
10951 to that value is on the top of the stack. The resulting type is TYPE, and
10952 the source declaration is DECL. */
bbf6f052 10953
ca695ac9
JB
10954void
10955bc_load_memory (type, decl)
10956 tree type, decl;
10957{
10958 enum bytecode_opcode opcode;
10959
10960
10961 /* Bit fields are special. We only know about signed and
10962 unsigned ints, and enums. The latter are treated as
10963 signed integers. */
10964
10965 if (DECL_BIT_FIELD (decl))
10966 if (TREE_CODE (type) == ENUMERAL_TYPE
10967 || TREE_CODE (type) == INTEGER_TYPE)
10968 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
10969 else
10970 abort ();
10971 else
10972 /* See corresponding comment in bc_store_memory(). */
10973 if (TYPE_MODE (type) == BLKmode
10974 || TYPE_MODE (type) == VOIDmode)
10975 return;
10976 else
6bd6178d 10977 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
bbf6f052 10978
ca695ac9
JB
10979 if (opcode == neverneverland)
10980 abort ();
10981
10982 bc_emit_bytecode (opcode);
10983
10984#ifdef DEBUG_PRINT_CODE
10985 fputc ('\n', stderr);
10986#endif
bbf6f052 10987}
bbf6f052 10988
bbf6f052 10989
ca695ac9
JB
10990/* Store the contents of the second stack slot to the address in the
10991 top stack slot. DECL is the declaration of the destination and is used
10992 to determine whether we're dealing with a bitfield. */
bbf6f052 10993
ca695ac9
JB
10994void
10995bc_store_memory (type, decl)
10996 tree type, decl;
10997{
10998 enum bytecode_opcode opcode;
10999
11000
11001 if (DECL_BIT_FIELD (decl))
f81497d9 11002 {
ca695ac9
JB
11003 if (TREE_CODE (type) == ENUMERAL_TYPE
11004 || TREE_CODE (type) == INTEGER_TYPE)
11005 opcode = sstoreBI;
f81497d9 11006 else
ca695ac9 11007 abort ();
f81497d9 11008 }
ca695ac9
JB
11009 else
11010 if (TYPE_MODE (type) == BLKmode)
11011 {
11012 /* Copy structure. This expands to a block copy instruction, storeBLK.
11013 In addition to the arguments expected by the other store instructions,
11014 it also expects a type size (SImode) on top of the stack, which is the
11015 structure size in size units (usually bytes). The two first arguments
11016 are already on the stack; so we just put the size on level 1. For some
11017 other languages, the size may be variable, this is why we don't encode
11018 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11019
11020 bc_expand_expr (TYPE_SIZE (type));
11021 opcode = storeBLK;
11022 }
11023 else
6bd6178d 11024 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
f81497d9 11025
ca695ac9
JB
11026 if (opcode == neverneverland)
11027 abort ();
11028
11029 bc_emit_bytecode (opcode);
11030
11031#ifdef DEBUG_PRINT_CODE
11032 fputc ('\n', stderr);
11033#endif
f81497d9
RS
11034}
11035
f81497d9 11036
ca695ac9
JB
11037/* Allocate local stack space sufficient to hold a value of the given
11038 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11039 integral power of 2. A special case is locals of type VOID, which
11040 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11041 remapped into the corresponding attribute of SI. */
11042
11043rtx
11044bc_allocate_local (size, alignment)
11045 int size, alignment;
f81497d9 11046{
ca695ac9
JB
11047 rtx retval;
11048 int byte_alignment;
f81497d9 11049
ca695ac9
JB
11050 if (size < 0)
11051 abort ();
f81497d9 11052
ca695ac9
JB
11053 /* Normalize size and alignment */
11054 if (!size)
11055 size = UNITS_PER_WORD;
bbf6f052 11056
ca695ac9
JB
11057 if (alignment < BITS_PER_UNIT)
11058 byte_alignment = 1 << (INT_ALIGN - 1);
11059 else
11060 /* Align */
11061 byte_alignment = alignment / BITS_PER_UNIT;
bbf6f052 11062
ca695ac9
JB
11063 if (local_vars_size & (byte_alignment - 1))
11064 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
bbf6f052 11065
ca695ac9
JB
11066 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11067 local_vars_size += size;
bbf6f052 11068
ca695ac9 11069 return retval;
bbf6f052
RK
11070}
11071
bbf6f052 11072
ca695ac9
JB
11073/* Allocate variable-sized local array. Variable-sized arrays are
11074 actually pointers to the address in memory where they are stored. */
11075
11076rtx
11077bc_allocate_variable_array (size)
11078 tree size;
bbf6f052 11079{
ca695ac9
JB
11080 rtx retval;
11081 const int ptralign = (1 << (PTR_ALIGN - 1));
bbf6f052 11082
ca695ac9
JB
11083 /* Align pointer */
11084 if (local_vars_size & ptralign)
11085 local_vars_size += ptralign - (local_vars_size & ptralign);
bbf6f052 11086
ca695ac9
JB
11087 /* Note down local space needed: pointer to block; also return
11088 dummy rtx */
bbf6f052 11089
ca695ac9
JB
11090 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11091 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
11092 return retval;
bbf6f052 11093}
bbf6f052 11094
bbf6f052 11095
ca695ac9
JB
11096/* Push the machine address for the given external variable offset. */
11097void
11098bc_load_externaddr (externaddr)
11099 rtx externaddr;
11100{
11101 bc_emit_bytecode (constP);
e7a42772
JB
11102 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
11103 BYTECODE_BC_LABEL (externaddr)->offset);
bbf6f052 11104
ca695ac9
JB
11105#ifdef DEBUG_PRINT_CODE
11106 fputc ('\n', stderr);
11107#endif
bbf6f052
RK
11108}
11109
bbf6f052 11110
ca695ac9
JB
11111/* Like above, but expects an IDENTIFIER. */
11112void
11113bc_load_externaddr_id (id, offset)
11114 tree id;
11115 int offset;
11116{
11117 if (!IDENTIFIER_POINTER (id))
11118 abort ();
bbf6f052 11119
ca695ac9 11120 bc_emit_bytecode (constP);
3d8e9bc2 11121 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
bbf6f052 11122
ca695ac9
JB
11123#ifdef DEBUG_PRINT_CODE
11124 fputc ('\n', stderr);
11125#endif
11126}
bbf6f052 11127
bbf6f052 11128
ca695ac9
JB
11129/* Push the machine address for the given local variable offset. */
11130void
11131bc_load_localaddr (localaddr)
11132 rtx localaddr;
11133{
e7a42772 11134 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
bbf6f052 11135}
bbf6f052 11136
bbf6f052 11137
ca695ac9
JB
11138/* Push the machine address for the given parameter offset.
11139 NOTE: offset is in bits. */
11140void
11141bc_load_parmaddr (parmaddr)
11142 rtx parmaddr;
bbf6f052 11143{
e7a42772
JB
11144 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11145 / BITS_PER_UNIT));
ca695ac9 11146}
bbf6f052 11147
ca695ac9
JB
11148
11149/* Convert a[i] into *(a + i). */
11150tree
11151bc_canonicalize_array_ref (exp)
11152 tree exp;
11153{
11154 tree type = TREE_TYPE (exp);
11155 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11156 TREE_OPERAND (exp, 0));
11157 tree index = TREE_OPERAND (exp, 1);
11158
11159
11160 /* Convert the integer argument to a type the same size as a pointer
11161 so the multiply won't overflow spuriously. */
11162
11163 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11164 index = convert (type_for_size (POINTER_SIZE, 0), index);
11165
11166 /* The array address isn't volatile even if the array is.
11167 (Of course this isn't terribly relevant since the bytecode
11168 translator treats nearly everything as volatile anyway.) */
11169 TREE_THIS_VOLATILE (array_adr) = 0;
11170
11171 return build1 (INDIRECT_REF, type,
11172 fold (build (PLUS_EXPR,
11173 TYPE_POINTER_TO (type),
11174 array_adr,
11175 fold (build (MULT_EXPR,
11176 TYPE_POINTER_TO (type),
11177 index,
11178 size_in_bytes (type))))));
bbf6f052
RK
11179}
11180
bbf6f052 11181
ca695ac9
JB
11182/* Load the address of the component referenced by the given
11183 COMPONENT_REF expression.
bbf6f052 11184
ca695ac9 11185 Returns innermost lvalue. */
bbf6f052 11186
ca695ac9
JB
11187tree
11188bc_expand_component_address (exp)
11189 tree exp;
bbf6f052 11190{
ca695ac9
JB
11191 tree tem, chain;
11192 enum machine_mode mode;
11193 int bitpos = 0;
11194 HOST_WIDE_INT SIval;
a7c5971a 11195
bbf6f052 11196
ca695ac9
JB
11197 tem = TREE_OPERAND (exp, 1);
11198 mode = DECL_MODE (tem);
bbf6f052 11199
ca695ac9
JB
11200
11201 /* Compute cumulative bit offset for nested component refs
11202 and array refs, and find the ultimate containing object. */
11203
11204 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
bbf6f052 11205 {
ca695ac9
JB
11206 if (TREE_CODE (tem) == COMPONENT_REF)
11207 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11208 else
11209 if (TREE_CODE (tem) == ARRAY_REF
11210 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11211 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
bbf6f052 11212
ca695ac9
JB
11213 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11214 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11215 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11216 else
11217 break;
11218 }
bbf6f052 11219
c02bd5d9 11220 bc_expand_expr (tem);
bbf6f052 11221
cd1b4b44 11222
ca695ac9
JB
11223 /* For bitfields also push their offset and size */
11224 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11225 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
11226 else
11227 if (SIval = bitpos / BITS_PER_UNIT)
11228 bc_emit_instruction (addconstPSI, SIval);
bbf6f052 11229
ca695ac9 11230 return (TREE_OPERAND (exp, 1));
bbf6f052 11231}
e7c33f54 11232
bbf6f052 11233
ca695ac9
JB
11234/* Emit code to push two SI constants */
11235void
11236bc_push_offset_and_size (offset, size)
11237 HOST_WIDE_INT offset, size;
11238{
11239 bc_emit_instruction (constSI, offset);
11240 bc_emit_instruction (constSI, size);
11241}
bbf6f052 11242
bbf6f052 11243
ca695ac9
JB
11244/* Emit byte code to push the address of the given lvalue expression to
11245 the stack. If it's a bit field, we also push offset and size info.
bbf6f052 11246
ca695ac9
JB
11247 Returns innermost component, which allows us to determine not only
11248 its type, but also whether it's a bitfield. */
11249
11250tree
11251bc_expand_address (exp)
bbf6f052 11252 tree exp;
bbf6f052 11253{
ca695ac9
JB
11254 /* Safeguard */
11255 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11256 return (exp);
bbf6f052 11257
e7c33f54 11258
ca695ac9
JB
11259 switch (TREE_CODE (exp))
11260 {
11261 case ARRAY_REF:
e7c33f54 11262
ca695ac9 11263 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
e7c33f54 11264
ca695ac9 11265 case COMPONENT_REF:
bbf6f052 11266
ca695ac9 11267 return (bc_expand_component_address (exp));
bbf6f052 11268
ca695ac9 11269 case INDIRECT_REF:
bbf6f052 11270
ca695ac9
JB
11271 bc_expand_expr (TREE_OPERAND (exp, 0));
11272
11273 /* For variable-sized types: retrieve pointer. Sometimes the
11274 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11275 also make sure we have an operand, just in case... */
11276
11277 if (TREE_OPERAND (exp, 0)
11278 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11279 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11280 bc_emit_instruction (loadP);
11281
11282 /* If packed, also return offset and size */
11283 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11284
11285 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11286 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11287
11288 return (TREE_OPERAND (exp, 0));
11289
11290 case FUNCTION_DECL:
11291
e7a42772
JB
11292 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11293 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
bbf6f052 11294 break;
ca695ac9
JB
11295
11296 case PARM_DECL:
11297
11298 bc_load_parmaddr (DECL_RTL (exp));
11299
11300 /* For variable-sized types: retrieve pointer */
11301 if (TYPE_SIZE (TREE_TYPE (exp))
11302 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11303 bc_emit_instruction (loadP);
11304
11305 /* If packed, also return offset and size */
11306 if (DECL_BIT_FIELD (exp))
11307 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11308 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11309
bbf6f052 11310 break;
ca695ac9
JB
11311
11312 case RESULT_DECL:
11313
11314 bc_emit_instruction (returnP);
bbf6f052 11315 break;
ca695ac9
JB
11316
11317 case VAR_DECL:
11318
11319#if 0
e7a42772 11320 if (BYTECODE_LABEL (DECL_RTL (exp)))
ca695ac9
JB
11321 bc_load_externaddr (DECL_RTL (exp));
11322#endif
11323
11324 if (DECL_EXTERNAL (exp))
e7a42772 11325 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
eb862a37 11326 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
bbf6f052 11327 else
ca695ac9
JB
11328 bc_load_localaddr (DECL_RTL (exp));
11329
11330 /* For variable-sized types: retrieve pointer */
11331 if (TYPE_SIZE (TREE_TYPE (exp))
11332 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11333 bc_emit_instruction (loadP);
11334
11335 /* If packed, also return offset and size */
11336 if (DECL_BIT_FIELD (exp))
11337 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11338 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11339
bbf6f052 11340 break;
ca695ac9
JB
11341
11342 case STRING_CST:
11343 {
11344 rtx r;
11345
11346 bc_emit_bytecode (constP);
11347 r = output_constant_def (exp);
e7a42772 11348 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
ca695ac9
JB
11349
11350#ifdef DEBUG_PRINT_CODE
11351 fputc ('\n', stderr);
11352#endif
11353 }
bbf6f052 11354 break;
ca695ac9 11355
bbf6f052 11356 default:
bbf6f052 11357
ca695ac9
JB
11358 abort();
11359 break;
bbf6f052
RK
11360 }
11361
ca695ac9
JB
11362 /* Most lvalues don't have components. */
11363 return (exp);
11364}
bbf6f052 11365
ca695ac9
JB
11366
11367/* Emit a type code to be used by the runtime support in handling
11368 parameter passing. The type code consists of the machine mode
11369 plus the minimal alignment shifted left 8 bits. */
11370
11371tree
11372bc_runtime_type_code (type)
11373 tree type;
11374{
11375 int val;
11376
11377 switch (TREE_CODE (type))
bbf6f052 11378 {
ca695ac9
JB
11379 case VOID_TYPE:
11380 case INTEGER_TYPE:
11381 case REAL_TYPE:
11382 case COMPLEX_TYPE:
11383 case ENUMERAL_TYPE:
11384 case POINTER_TYPE:
11385 case RECORD_TYPE:
11386
6bd6178d 11387 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
ca695ac9
JB
11388 break;
11389
11390 case ERROR_MARK:
11391
11392 val = 0;
11393 break;
11394
11395 default:
af508edd 11396
ca695ac9
JB
11397 abort ();
11398 }
11399 return build_int_2 (val, 0);
11400}
af508edd 11401
af508edd 11402
ca695ac9
JB
11403/* Generate constructor label */
11404char *
11405bc_gen_constr_label ()
11406{
11407 static int label_counter;
11408 static char label[20];
bbf6f052 11409
ca695ac9 11410 sprintf (label, "*LR%d", label_counter++);
bbf6f052 11411
ca695ac9
JB
11412 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11413}
bbf6f052 11414
bbf6f052 11415
ca695ac9
JB
11416/* Evaluate constructor CONSTR and return pointer to it on level one. We
11417 expand the constructor data as static data, and push a pointer to it.
11418 The pointer is put in the pointer table and is retrieved by a constP
11419 bytecode instruction. We then loop and store each constructor member in
11420 the corresponding component. Finally, we return the original pointer on
11421 the stack. */
af508edd 11422
ca695ac9
JB
11423void
11424bc_expand_constructor (constr)
11425 tree constr;
11426{
11427 char *l;
11428 HOST_WIDE_INT ptroffs;
11429 rtx constr_rtx;
bbf6f052 11430
ca695ac9
JB
11431
11432 /* Literal constructors are handled as constants, whereas
11433 non-literals are evaluated and stored element by element
11434 into the data segment. */
11435
11436 /* Allocate space in proper segment and push pointer to space on stack.
11437 */
bbf6f052 11438
ca695ac9 11439 l = bc_gen_constr_label ();
bbf6f052 11440
ca695ac9 11441 if (TREE_CONSTANT (constr))
bbf6f052 11442 {
ca695ac9
JB
11443 text_section ();
11444
11445 bc_emit_const_labeldef (l);
11446 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
bbf6f052 11447 }
ca695ac9
JB
11448 else
11449 {
11450 data_section ();
bbf6f052 11451
ca695ac9
JB
11452 bc_emit_data_labeldef (l);
11453 bc_output_data_constructor (constr);
11454 }
bbf6f052 11455
ca695ac9
JB
11456
11457 /* Add reference to pointer table and recall pointer to stack;
11458 this code is common for both types of constructors: literals
11459 and non-literals. */
bbf6f052 11460
de7d9320
JB
11461 ptroffs = bc_define_pointer (l);
11462 bc_emit_instruction (constP, ptroffs);
d39985fa 11463
ca695ac9
JB
11464 /* This is all that has to be done if it's a literal. */
11465 if (TREE_CONSTANT (constr))
11466 return;
bbf6f052 11467
ca695ac9
JB
11468
11469 /* At this point, we have the pointer to the structure on top of the stack.
11470 Generate sequences of store_memory calls for the constructor. */
11471
11472 /* constructor type is structure */
11473 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
e7c33f54 11474 {
ca695ac9
JB
11475 register tree elt;
11476
11477 /* If the constructor has fewer fields than the structure,
11478 clear the whole structure first. */
11479
11480 if (list_length (CONSTRUCTOR_ELTS (constr))
11481 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11482 {
6d6e61ce 11483 bc_emit_instruction (duplicate);
ca695ac9
JB
11484 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11485 bc_emit_instruction (clearBLK);
11486 }
11487
11488 /* Store each element of the constructor into the corresponding
11489 field of TARGET. */
11490
11491 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11492 {
11493 register tree field = TREE_PURPOSE (elt);
11494 register enum machine_mode mode;
11495 int bitsize;
11496 int bitpos;
11497 int unsignedp;
11498
11499 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11500 mode = DECL_MODE (field);
11501 unsignedp = TREE_UNSIGNED (field);
11502
11503 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11504
11505 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11506 /* The alignment of TARGET is
11507 at least what its type requires. */
11508 VOIDmode, 0,
11509 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11510 int_size_in_bytes (TREE_TYPE (constr)));
11511 }
e7c33f54 11512 }
ca695ac9
JB
11513 else
11514
11515 /* Constructor type is array */
11516 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11517 {
11518 register tree elt;
11519 register int i;
11520 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11521 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11522 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11523 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11524
11525 /* If the constructor has fewer fields than the structure,
11526 clear the whole structure first. */
11527
11528 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11529 {
6d6e61ce 11530 bc_emit_instruction (duplicate);
ca695ac9
JB
11531 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11532 bc_emit_instruction (clearBLK);
11533 }
11534
11535
11536 /* Store each element of the constructor into the corresponding
11537 element of TARGET, determined by counting the elements. */
11538
11539 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11540 elt;
11541 elt = TREE_CHAIN (elt), i++)
11542 {
11543 register enum machine_mode mode;
11544 int bitsize;
11545 int bitpos;
11546 int unsignedp;
11547
11548 mode = TYPE_MODE (elttype);
11549 bitsize = GET_MODE_BITSIZE (mode);
11550 unsignedp = TREE_UNSIGNED (elttype);
11551
11552 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11553 /* * TYPE_SIZE_UNIT (elttype) */ );
11554
11555 bc_store_field (elt, bitsize, bitpos, mode,
11556 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11557 /* The alignment of TARGET is
11558 at least what its type requires. */
11559 VOIDmode, 0,
11560 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11561 int_size_in_bytes (TREE_TYPE (constr)));
11562 }
11563
11564 }
11565}
bbf6f052 11566
bbf6f052 11567
ca695ac9
JB
11568/* Store the value of EXP (an expression tree) into member FIELD of
11569 structure at address on stack, which has type TYPE, mode MODE and
11570 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11571 structure.
bbf6f052 11572
ca695ac9
JB
11573 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11574 TOTAL_SIZE is its size in bytes, or -1 if variable. */
bbf6f052 11575
ca695ac9
JB
11576void
11577bc_store_field (field, bitsize, bitpos, mode, exp, type,
11578 value_mode, unsignedp, align, total_size)
11579 int bitsize, bitpos;
11580 enum machine_mode mode;
11581 tree field, exp, type;
11582 enum machine_mode value_mode;
11583 int unsignedp;
11584 int align;
11585 int total_size;
11586{
bbf6f052 11587
ca695ac9
JB
11588 /* Expand expression and copy pointer */
11589 bc_expand_expr (exp);
11590 bc_emit_instruction (over);
bbf6f052 11591
bbf6f052 11592
ca695ac9
JB
11593 /* If the component is a bit field, we cannot use addressing to access
11594 it. Use bit-field techniques to store in it. */
bbf6f052 11595
ca695ac9
JB
11596 if (DECL_BIT_FIELD (field))
11597 {
11598 bc_store_bit_field (bitpos, bitsize, unsignedp);
11599 return;
11600 }
11601 else
11602 /* Not bit field */
11603 {
11604 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11605
11606 /* Advance pointer to the desired member */
11607 if (offset)
11608 bc_emit_instruction (addconstPSI, offset);
11609
11610 /* Store */
11611 bc_store_memory (type, field);
11612 }
11613}
bbf6f052 11614
ca695ac9
JB
11615
11616/* Store SI/SU in bitfield */
bbf6f052 11617void
ca695ac9
JB
11618bc_store_bit_field (offset, size, unsignedp)
11619 int offset, size, unsignedp;
bbf6f052 11620{
ca695ac9
JB
11621 /* Push bitfield offset and size */
11622 bc_push_offset_and_size (offset, size);
bbf6f052 11623
ca695ac9
JB
11624 /* Store */
11625 bc_emit_instruction (sstoreBI);
11626}
e87b4f3f 11627
88d3b7f0 11628
ca695ac9
JB
11629/* Load SI/SU from bitfield */
11630void
11631bc_load_bit_field (offset, size, unsignedp)
11632 int offset, size, unsignedp;
11633{
11634 /* Push bitfield offset and size */
11635 bc_push_offset_and_size (offset, size);
88d3b7f0 11636
ca695ac9
JB
11637 /* Load: sign-extend if signed, else zero-extend */
11638 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
11639}
709f5be1 11640
bbf6f052 11641
ca695ac9
JB
11642/* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11643 (adjust stack pointer upwards), negative means add that number of
11644 levels (adjust the stack pointer downwards). Only positive values
11645 normally make sense. */
bbf6f052 11646
ca695ac9
JB
11647void
11648bc_adjust_stack (nlevels)
11649 int nlevels;
11650{
11651 switch (nlevels)
11652 {
11653 case 0:
11654 break;
11655
11656 case 2:
11657 bc_emit_instruction (drop);
11658
11659 case 1:
11660 bc_emit_instruction (drop);
11661 break;
11662
11663 default:
11664
11665 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
11666 stack_depth -= nlevels;
11667 }
11668
a68c7608
RS
11669#if defined (VALIDATE_STACK_FOR_BC)
11670 VALIDATE_STACK_FOR_BC ();
bbf6f052
RK
11671#endif
11672}
This page took 1.943981 seconds and 5 git commands to generate.