]> gcc.gnu.org Git - gcc.git/blob - gcc/expr.c
ee68dff23ed13535ad56e64a191b9e2c343db274
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "reload.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "defaults.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "intl.h"
46 #include "tm_p.h"
47
48 #ifndef ACCUMULATE_OUTGOING_ARGS
49 #define ACCUMULATE_OUTGOING_ARGS 0
50 #endif
51
52 /* Supply a default definition for PUSH_ARGS. */
53 #ifndef PUSH_ARGS
54 #ifdef PUSH_ROUNDING
55 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
56 #else
57 #define PUSH_ARGS 0
58 #endif
59 #endif
60
61 /* Decide whether a function's arguments should be processed
62 from first to last or from last to first.
63
64 They should if the stack and args grow in opposite directions, but
65 only if we have push insns. */
66
67 #ifdef PUSH_ROUNDING
68
69 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
70 #define PUSH_ARGS_REVERSED /* If it's last to first. */
71 #endif
72
73 #endif
74
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
78 #else
79 #define STACK_PUSH_CODE PRE_INC
80 #endif
81 #endif
82
83 /* Assume that case vectors are not pc-relative. */
84 #ifndef CASE_VECTOR_PC_RELATIVE
85 #define CASE_VECTOR_PC_RELATIVE 0
86 #endif
87
88 /* Hook called by safe_from_p for language-specific tree codes. It is
89 up to the language front-end to install a hook if it has any such
90 codes that safe_from_p needs to know about. Since same_from_p will
91 recursively explore the TREE_OPERANDs of an expression, this hook
92 should not reexamine those pieces. This routine may recursively
93 call safe_from_p; it should always pass `0' as the TOP_P
94 parameter. */
95 int (*lang_safe_from_p) PARAMS ((rtx, tree));
96
97 /* If this is nonzero, we do not bother generating VOLATILE
98 around volatile memory references, and we are willing to
99 output indirect addresses. If cse is to follow, we reject
100 indirect addresses so a useful potential cse is generated;
101 if it is used only once, instruction combination will produce
102 the same indirect address eventually. */
103 int cse_not_expected;
104
105 /* Nonzero to generate code for all the subroutines within an
106 expression before generating the upper levels of the expression.
107 Nowadays this is never zero. */
108 int do_preexpand_calls = 1;
109
110 /* Don't check memory usage, since code is being emitted to check a memory
111 usage. Used when current_function_check_memory_usage is true, to avoid
112 infinite recursion. */
113 static int in_check_memory_usage;
114
115 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
116 static tree placeholder_list = 0;
117
118 /* This structure is used by move_by_pieces to describe the move to
119 be performed. */
120 struct move_by_pieces
121 {
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 rtx from;
127 rtx from_addr;
128 int autinc_from;
129 int explicit_inc_from;
130 unsigned HOST_WIDE_INT len;
131 HOST_WIDE_INT offset;
132 int reverse;
133 };
134
135 /* This structure is used by clear_by_pieces to describe the clear to
136 be performed. */
137
138 struct clear_by_pieces
139 {
140 rtx to;
141 rtx to_addr;
142 int autinc_to;
143 int explicit_inc_to;
144 unsigned HOST_WIDE_INT len;
145 HOST_WIDE_INT offset;
146 int reverse;
147 };
148
149 extern struct obstack permanent_obstack;
150
151 static rtx get_push_address PARAMS ((int));
152
153 static rtx enqueue_insn PARAMS ((rtx, rtx));
154 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
155 PARAMS ((unsigned HOST_WIDE_INT,
156 unsigned int));
157 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
158 struct move_by_pieces *));
159 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
160 unsigned int));
161 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
162 enum machine_mode,
163 struct clear_by_pieces *));
164 static rtx get_subtarget PARAMS ((rtx));
165 static int is_zeros_p PARAMS ((tree));
166 static int mostly_zeros_p PARAMS ((tree));
167 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
168 HOST_WIDE_INT, enum machine_mode,
169 tree, tree, unsigned int, int,
170 int));
171 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
172 HOST_WIDE_INT));
173 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
174 HOST_WIDE_INT, enum machine_mode,
175 tree, enum machine_mode, int,
176 unsigned int, HOST_WIDE_INT, int));
177 static enum memory_use_mode
178 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
179 static tree save_noncopied_parts PARAMS ((tree, tree));
180 static tree init_noncopied_parts PARAMS ((tree, tree));
181 static int fixed_type_p PARAMS ((tree));
182 static rtx var_rtx PARAMS ((tree));
183 static int readonly_fields_p PARAMS ((tree));
184 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
185 static rtx expand_increment PARAMS ((tree, int, int));
186 static void preexpand_calls PARAMS ((tree));
187 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
188 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
189 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
190 rtx, rtx));
191 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
192
193 /* Record for each mode whether we can move a register directly to or
194 from an object of that mode in memory. If we can't, we won't try
195 to use that mode directly when accessing a field of that mode. */
196
197 static char direct_load[NUM_MACHINE_MODES];
198 static char direct_store[NUM_MACHINE_MODES];
199
200 /* If a memory-to-memory move would take MOVE_RATIO or more simple
201 move-instruction sequences, we will do a movstr or libcall instead. */
202
203 #ifndef MOVE_RATIO
204 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
205 #define MOVE_RATIO 2
206 #else
207 /* If we are optimizing for space (-Os), cut down the default move ratio. */
208 #define MOVE_RATIO (optimize_size ? 3 : 15)
209 #endif
210 #endif
211
212 /* This macro is used to determine whether move_by_pieces should be called
213 to perform a structure copy. */
214 #ifndef MOVE_BY_PIECES_P
215 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
216 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
217 #endif
218
219 /* This array records the insn_code of insns to perform block moves. */
220 enum insn_code movstr_optab[NUM_MACHINE_MODES];
221
222 /* This array records the insn_code of insns to perform block clears. */
223 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
224
225 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
226
227 #ifndef SLOW_UNALIGNED_ACCESS
228 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
229 #endif
230 \f
231 /* This is run once per compilation to set up which modes can be used
232 directly in memory and to initialize the block move optab. */
233
234 void
235 init_expr_once ()
236 {
237 rtx insn, pat;
238 enum machine_mode mode;
239 int num_clobbers;
240 rtx mem, mem1;
241 char *free_point;
242
243 start_sequence ();
244
245 /* Since we are on the permanent obstack, we must be sure we save this
246 spot AFTER we call start_sequence, since it will reuse the rtl it
247 makes. */
248 free_point = (char *) oballoc (0);
249
250 /* Try indexing by frame ptr and try by stack ptr.
251 It is known that on the Convex the stack ptr isn't a valid index.
252 With luck, one or the other is valid on any machine. */
253 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
254 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
255
256 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
257 pat = PATTERN (insn);
258
259 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
260 mode = (enum machine_mode) ((int) mode + 1))
261 {
262 int regno;
263 rtx reg;
264
265 direct_load[(int) mode] = direct_store[(int) mode] = 0;
266 PUT_MODE (mem, mode);
267 PUT_MODE (mem1, mode);
268
269 /* See if there is some register that can be used in this mode and
270 directly loaded or stored from memory. */
271
272 if (mode != VOIDmode && mode != BLKmode)
273 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
274 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
275 regno++)
276 {
277 if (! HARD_REGNO_MODE_OK (regno, mode))
278 continue;
279
280 reg = gen_rtx_REG (mode, regno);
281
282 SET_SRC (pat) = mem;
283 SET_DEST (pat) = reg;
284 if (recog (pat, insn, &num_clobbers) >= 0)
285 direct_load[(int) mode] = 1;
286
287 SET_SRC (pat) = mem1;
288 SET_DEST (pat) = reg;
289 if (recog (pat, insn, &num_clobbers) >= 0)
290 direct_load[(int) mode] = 1;
291
292 SET_SRC (pat) = reg;
293 SET_DEST (pat) = mem;
294 if (recog (pat, insn, &num_clobbers) >= 0)
295 direct_store[(int) mode] = 1;
296
297 SET_SRC (pat) = reg;
298 SET_DEST (pat) = mem1;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_store[(int) mode] = 1;
301 }
302 }
303
304 end_sequence ();
305 obfree (free_point);
306 }
307
308 /* This is run at the start of compiling a function. */
309
310 void
311 init_expr ()
312 {
313 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
314
315 pending_chain = 0;
316 pending_stack_adjust = 0;
317 stack_pointer_delta = 0;
318 inhibit_defer_pop = 0;
319 saveregs_value = 0;
320 apply_args_value = 0;
321 forced_labels = 0;
322 }
323
324 void
325 mark_expr_status (p)
326 struct expr_status *p;
327 {
328 if (p == NULL)
329 return;
330
331 ggc_mark_rtx (p->x_saveregs_value);
332 ggc_mark_rtx (p->x_apply_args_value);
333 ggc_mark_rtx (p->x_forced_labels);
334 }
335
336 void
337 free_expr_status (f)
338 struct function *f;
339 {
340 free (f->expr);
341 f->expr = NULL;
342 }
343
344 /* Small sanity check that the queue is empty at the end of a function. */
345
346 void
347 finish_expr_for_function ()
348 {
349 if (pending_chain)
350 abort ();
351 }
352 \f
353 /* Manage the queue of increment instructions to be output
354 for POSTINCREMENT_EXPR expressions, etc. */
355
356 /* Queue up to increment (or change) VAR later. BODY says how:
357 BODY should be the same thing you would pass to emit_insn
358 to increment right away. It will go to emit_insn later on.
359
360 The value is a QUEUED expression to be used in place of VAR
361 where you want to guarantee the pre-incrementation value of VAR. */
362
363 static rtx
364 enqueue_insn (var, body)
365 rtx var, body;
366 {
367 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
368 body, pending_chain);
369 return pending_chain;
370 }
371
372 /* Use protect_from_queue to convert a QUEUED expression
373 into something that you can put immediately into an instruction.
374 If the queued incrementation has not happened yet,
375 protect_from_queue returns the variable itself.
376 If the incrementation has happened, protect_from_queue returns a temp
377 that contains a copy of the old value of the variable.
378
379 Any time an rtx which might possibly be a QUEUED is to be put
380 into an instruction, it must be passed through protect_from_queue first.
381 QUEUED expressions are not meaningful in instructions.
382
383 Do not pass a value through protect_from_queue and then hold
384 on to it for a while before putting it in an instruction!
385 If the queue is flushed in between, incorrect code will result. */
386
387 rtx
388 protect_from_queue (x, modify)
389 register rtx x;
390 int modify;
391 {
392 register RTX_CODE code = GET_CODE (x);
393
394 #if 0 /* A QUEUED can hang around after the queue is forced out. */
395 /* Shortcut for most common case. */
396 if (pending_chain == 0)
397 return x;
398 #endif
399
400 if (code != QUEUED)
401 {
402 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
403 use of autoincrement. Make a copy of the contents of the memory
404 location rather than a copy of the address, but not if the value is
405 of mode BLKmode. Don't modify X in place since it might be
406 shared. */
407 if (code == MEM && GET_MODE (x) != BLKmode
408 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
409 {
410 register rtx y = XEXP (x, 0);
411 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
412
413 MEM_COPY_ATTRIBUTES (new, x);
414
415 if (QUEUED_INSN (y))
416 {
417 register rtx temp = gen_reg_rtx (GET_MODE (new));
418 emit_insn_before (gen_move_insn (temp, new),
419 QUEUED_INSN (y));
420 return temp;
421 }
422 return new;
423 }
424 /* Otherwise, recursively protect the subexpressions of all
425 the kinds of rtx's that can contain a QUEUED. */
426 if (code == MEM)
427 {
428 rtx tem = protect_from_queue (XEXP (x, 0), 0);
429 if (tem != XEXP (x, 0))
430 {
431 x = copy_rtx (x);
432 XEXP (x, 0) = tem;
433 }
434 }
435 else if (code == PLUS || code == MULT)
436 {
437 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
438 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
439 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
440 {
441 x = copy_rtx (x);
442 XEXP (x, 0) = new0;
443 XEXP (x, 1) = new1;
444 }
445 }
446 return x;
447 }
448 /* If the increment has not happened, use the variable itself. */
449 if (QUEUED_INSN (x) == 0)
450 return QUEUED_VAR (x);
451 /* If the increment has happened and a pre-increment copy exists,
452 use that copy. */
453 if (QUEUED_COPY (x) != 0)
454 return QUEUED_COPY (x);
455 /* The increment has happened but we haven't set up a pre-increment copy.
456 Set one up now, and use it. */
457 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
458 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
459 QUEUED_INSN (x));
460 return QUEUED_COPY (x);
461 }
462
463 /* Return nonzero if X contains a QUEUED expression:
464 if it contains anything that will be altered by a queued increment.
465 We handle only combinations of MEM, PLUS, MINUS and MULT operators
466 since memory addresses generally contain only those. */
467
468 int
469 queued_subexp_p (x)
470 rtx x;
471 {
472 register enum rtx_code code = GET_CODE (x);
473 switch (code)
474 {
475 case QUEUED:
476 return 1;
477 case MEM:
478 return queued_subexp_p (XEXP (x, 0));
479 case MULT:
480 case PLUS:
481 case MINUS:
482 return (queued_subexp_p (XEXP (x, 0))
483 || queued_subexp_p (XEXP (x, 1)));
484 default:
485 return 0;
486 }
487 }
488
489 /* Perform all the pending incrementations. */
490
491 void
492 emit_queue ()
493 {
494 register rtx p;
495 while ((p = pending_chain))
496 {
497 rtx body = QUEUED_BODY (p);
498
499 if (GET_CODE (body) == SEQUENCE)
500 {
501 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
502 emit_insn (QUEUED_BODY (p));
503 }
504 else
505 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
506 pending_chain = QUEUED_NEXT (p);
507 }
508 }
509 \f
510 /* Copy data from FROM to TO, where the machine modes are not the same.
511 Both modes may be integer, or both may be floating.
512 UNSIGNEDP should be nonzero if FROM is an unsigned type.
513 This causes zero-extension instead of sign-extension. */
514
515 void
516 convert_move (to, from, unsignedp)
517 register rtx to, from;
518 int unsignedp;
519 {
520 enum machine_mode to_mode = GET_MODE (to);
521 enum machine_mode from_mode = GET_MODE (from);
522 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
523 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
524 enum insn_code code;
525 rtx libcall;
526
527 /* rtx code for making an equivalent value. */
528 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
529
530 to = protect_from_queue (to, 1);
531 from = protect_from_queue (from, 0);
532
533 if (to_real != from_real)
534 abort ();
535
536 /* If FROM is a SUBREG that indicates that we have already done at least
537 the required extension, strip it. We don't handle such SUBREGs as
538 TO here. */
539
540 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
541 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
542 >= GET_MODE_SIZE (to_mode))
543 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
544 from = gen_lowpart (to_mode, from), from_mode = to_mode;
545
546 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
547 abort ();
548
549 if (to_mode == from_mode
550 || (from_mode == VOIDmode && CONSTANT_P (from)))
551 {
552 emit_move_insn (to, from);
553 return;
554 }
555
556 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
557 {
558 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
559 abort ();
560
561 if (VECTOR_MODE_P (to_mode))
562 from = gen_rtx_SUBREG (to_mode, from, 0);
563 else
564 to = gen_rtx_SUBREG (from_mode, to, 0);
565
566 emit_move_insn (to, from);
567 return;
568 }
569
570 if (to_real != from_real)
571 abort ();
572
573 if (to_real)
574 {
575 rtx value, insns;
576
577 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
578 {
579 /* Try converting directly if the insn is supported. */
580 if ((code = can_extend_p (to_mode, from_mode, 0))
581 != CODE_FOR_nothing)
582 {
583 emit_unop_insn (code, to, from, UNKNOWN);
584 return;
585 }
586 }
587
588 #ifdef HAVE_trunchfqf2
589 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
590 {
591 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
592 return;
593 }
594 #endif
595 #ifdef HAVE_trunctqfqf2
596 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
597 {
598 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
599 return;
600 }
601 #endif
602 #ifdef HAVE_truncsfqf2
603 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
604 {
605 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
606 return;
607 }
608 #endif
609 #ifdef HAVE_truncdfqf2
610 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
611 {
612 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
613 return;
614 }
615 #endif
616 #ifdef HAVE_truncxfqf2
617 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
618 {
619 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
620 return;
621 }
622 #endif
623 #ifdef HAVE_trunctfqf2
624 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
625 {
626 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
627 return;
628 }
629 #endif
630
631 #ifdef HAVE_trunctqfhf2
632 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
633 {
634 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
635 return;
636 }
637 #endif
638 #ifdef HAVE_truncsfhf2
639 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
640 {
641 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
642 return;
643 }
644 #endif
645 #ifdef HAVE_truncdfhf2
646 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
647 {
648 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
649 return;
650 }
651 #endif
652 #ifdef HAVE_truncxfhf2
653 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
654 {
655 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
656 return;
657 }
658 #endif
659 #ifdef HAVE_trunctfhf2
660 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
661 {
662 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
663 return;
664 }
665 #endif
666
667 #ifdef HAVE_truncsftqf2
668 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
669 {
670 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
671 return;
672 }
673 #endif
674 #ifdef HAVE_truncdftqf2
675 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
676 {
677 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
678 return;
679 }
680 #endif
681 #ifdef HAVE_truncxftqf2
682 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
683 {
684 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
685 return;
686 }
687 #endif
688 #ifdef HAVE_trunctftqf2
689 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
690 {
691 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
692 return;
693 }
694 #endif
695
696 #ifdef HAVE_truncdfsf2
697 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
698 {
699 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
700 return;
701 }
702 #endif
703 #ifdef HAVE_truncxfsf2
704 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
705 {
706 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
707 return;
708 }
709 #endif
710 #ifdef HAVE_trunctfsf2
711 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
712 {
713 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
714 return;
715 }
716 #endif
717 #ifdef HAVE_truncxfdf2
718 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
719 {
720 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
721 return;
722 }
723 #endif
724 #ifdef HAVE_trunctfdf2
725 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
726 {
727 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
728 return;
729 }
730 #endif
731
732 libcall = (rtx) 0;
733 switch (from_mode)
734 {
735 case SFmode:
736 switch (to_mode)
737 {
738 case DFmode:
739 libcall = extendsfdf2_libfunc;
740 break;
741
742 case XFmode:
743 libcall = extendsfxf2_libfunc;
744 break;
745
746 case TFmode:
747 libcall = extendsftf2_libfunc;
748 break;
749
750 default:
751 break;
752 }
753 break;
754
755 case DFmode:
756 switch (to_mode)
757 {
758 case SFmode:
759 libcall = truncdfsf2_libfunc;
760 break;
761
762 case XFmode:
763 libcall = extenddfxf2_libfunc;
764 break;
765
766 case TFmode:
767 libcall = extenddftf2_libfunc;
768 break;
769
770 default:
771 break;
772 }
773 break;
774
775 case XFmode:
776 switch (to_mode)
777 {
778 case SFmode:
779 libcall = truncxfsf2_libfunc;
780 break;
781
782 case DFmode:
783 libcall = truncxfdf2_libfunc;
784 break;
785
786 default:
787 break;
788 }
789 break;
790
791 case TFmode:
792 switch (to_mode)
793 {
794 case SFmode:
795 libcall = trunctfsf2_libfunc;
796 break;
797
798 case DFmode:
799 libcall = trunctfdf2_libfunc;
800 break;
801
802 default:
803 break;
804 }
805 break;
806
807 default:
808 break;
809 }
810
811 if (libcall == (rtx) 0)
812 /* This conversion is not implemented yet. */
813 abort ();
814
815 start_sequence ();
816 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
817 1, from, from_mode);
818 insns = get_insns ();
819 end_sequence ();
820 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
821 from));
822 return;
823 }
824
825 /* Now both modes are integers. */
826
827 /* Handle expanding beyond a word. */
828 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
829 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
830 {
831 rtx insns;
832 rtx lowpart;
833 rtx fill_value;
834 rtx lowfrom;
835 int i;
836 enum machine_mode lowpart_mode;
837 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
838
839 /* Try converting directly if the insn is supported. */
840 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
841 != CODE_FOR_nothing)
842 {
843 /* If FROM is a SUBREG, put it into a register. Do this
844 so that we always generate the same set of insns for
845 better cse'ing; if an intermediate assignment occurred,
846 we won't be doing the operation directly on the SUBREG. */
847 if (optimize > 0 && GET_CODE (from) == SUBREG)
848 from = force_reg (from_mode, from);
849 emit_unop_insn (code, to, from, equiv_code);
850 return;
851 }
852 /* Next, try converting via full word. */
853 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
854 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
855 != CODE_FOR_nothing))
856 {
857 if (GET_CODE (to) == REG)
858 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
859 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
860 emit_unop_insn (code, to,
861 gen_lowpart (word_mode, to), equiv_code);
862 return;
863 }
864
865 /* No special multiword conversion insn; do it by hand. */
866 start_sequence ();
867
868 /* Since we will turn this into a no conflict block, we must ensure
869 that the source does not overlap the target. */
870
871 if (reg_overlap_mentioned_p (to, from))
872 from = force_reg (from_mode, from);
873
874 /* Get a copy of FROM widened to a word, if necessary. */
875 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
876 lowpart_mode = word_mode;
877 else
878 lowpart_mode = from_mode;
879
880 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
881
882 lowpart = gen_lowpart (lowpart_mode, to);
883 emit_move_insn (lowpart, lowfrom);
884
885 /* Compute the value to put in each remaining word. */
886 if (unsignedp)
887 fill_value = const0_rtx;
888 else
889 {
890 #ifdef HAVE_slt
891 if (HAVE_slt
892 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
893 && STORE_FLAG_VALUE == -1)
894 {
895 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
896 lowpart_mode, 0, 0);
897 fill_value = gen_reg_rtx (word_mode);
898 emit_insn (gen_slt (fill_value));
899 }
900 else
901 #endif
902 {
903 fill_value
904 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
905 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906 NULL_RTX, 0);
907 fill_value = convert_to_mode (word_mode, fill_value, 1);
908 }
909 }
910
911 /* Fill the remaining words. */
912 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
913 {
914 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
915 rtx subword = operand_subword (to, index, 1, to_mode);
916
917 if (subword == 0)
918 abort ();
919
920 if (fill_value != subword)
921 emit_move_insn (subword, fill_value);
922 }
923
924 insns = get_insns ();
925 end_sequence ();
926
927 emit_no_conflict_block (insns, to, from, NULL_RTX,
928 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
929 return;
930 }
931
932 /* Truncating multi-word to a word or less. */
933 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
934 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
935 {
936 if (!((GET_CODE (from) == MEM
937 && ! MEM_VOLATILE_P (from)
938 && direct_load[(int) to_mode]
939 && ! mode_dependent_address_p (XEXP (from, 0)))
940 || GET_CODE (from) == REG
941 || GET_CODE (from) == SUBREG))
942 from = force_reg (from_mode, from);
943 convert_move (to, gen_lowpart (word_mode, from), 0);
944 return;
945 }
946
947 /* Handle pointer conversion. */ /* SPEE 900220. */
948 if (to_mode == PQImode)
949 {
950 if (from_mode != QImode)
951 from = convert_to_mode (QImode, from, unsignedp);
952
953 #ifdef HAVE_truncqipqi2
954 if (HAVE_truncqipqi2)
955 {
956 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
957 return;
958 }
959 #endif /* HAVE_truncqipqi2 */
960 abort ();
961 }
962
963 if (from_mode == PQImode)
964 {
965 if (to_mode != QImode)
966 {
967 from = convert_to_mode (QImode, from, unsignedp);
968 from_mode = QImode;
969 }
970 else
971 {
972 #ifdef HAVE_extendpqiqi2
973 if (HAVE_extendpqiqi2)
974 {
975 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
976 return;
977 }
978 #endif /* HAVE_extendpqiqi2 */
979 abort ();
980 }
981 }
982
983 if (to_mode == PSImode)
984 {
985 if (from_mode != SImode)
986 from = convert_to_mode (SImode, from, unsignedp);
987
988 #ifdef HAVE_truncsipsi2
989 if (HAVE_truncsipsi2)
990 {
991 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
992 return;
993 }
994 #endif /* HAVE_truncsipsi2 */
995 abort ();
996 }
997
998 if (from_mode == PSImode)
999 {
1000 if (to_mode != SImode)
1001 {
1002 from = convert_to_mode (SImode, from, unsignedp);
1003 from_mode = SImode;
1004 }
1005 else
1006 {
1007 #ifdef HAVE_extendpsisi2
1008 if (! unsignedp && HAVE_extendpsisi2)
1009 {
1010 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1011 return;
1012 }
1013 #endif /* HAVE_extendpsisi2 */
1014 #ifdef HAVE_zero_extendpsisi2
1015 if (unsignedp && HAVE_zero_extendpsisi2)
1016 {
1017 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1018 return;
1019 }
1020 #endif /* HAVE_zero_extendpsisi2 */
1021 abort ();
1022 }
1023 }
1024
1025 if (to_mode == PDImode)
1026 {
1027 if (from_mode != DImode)
1028 from = convert_to_mode (DImode, from, unsignedp);
1029
1030 #ifdef HAVE_truncdipdi2
1031 if (HAVE_truncdipdi2)
1032 {
1033 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1034 return;
1035 }
1036 #endif /* HAVE_truncdipdi2 */
1037 abort ();
1038 }
1039
1040 if (from_mode == PDImode)
1041 {
1042 if (to_mode != DImode)
1043 {
1044 from = convert_to_mode (DImode, from, unsignedp);
1045 from_mode = DImode;
1046 }
1047 else
1048 {
1049 #ifdef HAVE_extendpdidi2
1050 if (HAVE_extendpdidi2)
1051 {
1052 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1053 return;
1054 }
1055 #endif /* HAVE_extendpdidi2 */
1056 abort ();
1057 }
1058 }
1059
1060 /* Now follow all the conversions between integers
1061 no more than a word long. */
1062
1063 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1064 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1065 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1066 GET_MODE_BITSIZE (from_mode)))
1067 {
1068 if (!((GET_CODE (from) == MEM
1069 && ! MEM_VOLATILE_P (from)
1070 && direct_load[(int) to_mode]
1071 && ! mode_dependent_address_p (XEXP (from, 0)))
1072 || GET_CODE (from) == REG
1073 || GET_CODE (from) == SUBREG))
1074 from = force_reg (from_mode, from);
1075 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1076 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1077 from = copy_to_reg (from);
1078 emit_move_insn (to, gen_lowpart (to_mode, from));
1079 return;
1080 }
1081
1082 /* Handle extension. */
1083 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1084 {
1085 /* Convert directly if that works. */
1086 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1087 != CODE_FOR_nothing)
1088 {
1089 emit_unop_insn (code, to, from, equiv_code);
1090 return;
1091 }
1092 else
1093 {
1094 enum machine_mode intermediate;
1095 rtx tmp;
1096 tree shift_amount;
1097
1098 /* Search for a mode to convert via. */
1099 for (intermediate = from_mode; intermediate != VOIDmode;
1100 intermediate = GET_MODE_WIDER_MODE (intermediate))
1101 if (((can_extend_p (to_mode, intermediate, unsignedp)
1102 != CODE_FOR_nothing)
1103 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1104 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1105 GET_MODE_BITSIZE (intermediate))))
1106 && (can_extend_p (intermediate, from_mode, unsignedp)
1107 != CODE_FOR_nothing))
1108 {
1109 convert_move (to, convert_to_mode (intermediate, from,
1110 unsignedp), unsignedp);
1111 return;
1112 }
1113
1114 /* No suitable intermediate mode.
1115 Generate what we need with shifts. */
1116 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1117 - GET_MODE_BITSIZE (from_mode), 0);
1118 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1119 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1120 to, unsignedp);
1121 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1122 to, unsignedp);
1123 if (tmp != to)
1124 emit_move_insn (to, tmp);
1125 return;
1126 }
1127 }
1128
1129 /* Support special truncate insns for certain modes. */
1130
1131 if (from_mode == DImode && to_mode == SImode)
1132 {
1133 #ifdef HAVE_truncdisi2
1134 if (HAVE_truncdisi2)
1135 {
1136 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1137 return;
1138 }
1139 #endif
1140 convert_move (to, force_reg (from_mode, from), unsignedp);
1141 return;
1142 }
1143
1144 if (from_mode == DImode && to_mode == HImode)
1145 {
1146 #ifdef HAVE_truncdihi2
1147 if (HAVE_truncdihi2)
1148 {
1149 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1150 return;
1151 }
1152 #endif
1153 convert_move (to, force_reg (from_mode, from), unsignedp);
1154 return;
1155 }
1156
1157 if (from_mode == DImode && to_mode == QImode)
1158 {
1159 #ifdef HAVE_truncdiqi2
1160 if (HAVE_truncdiqi2)
1161 {
1162 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1163 return;
1164 }
1165 #endif
1166 convert_move (to, force_reg (from_mode, from), unsignedp);
1167 return;
1168 }
1169
1170 if (from_mode == SImode && to_mode == HImode)
1171 {
1172 #ifdef HAVE_truncsihi2
1173 if (HAVE_truncsihi2)
1174 {
1175 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1176 return;
1177 }
1178 #endif
1179 convert_move (to, force_reg (from_mode, from), unsignedp);
1180 return;
1181 }
1182
1183 if (from_mode == SImode && to_mode == QImode)
1184 {
1185 #ifdef HAVE_truncsiqi2
1186 if (HAVE_truncsiqi2)
1187 {
1188 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1189 return;
1190 }
1191 #endif
1192 convert_move (to, force_reg (from_mode, from), unsignedp);
1193 return;
1194 }
1195
1196 if (from_mode == HImode && to_mode == QImode)
1197 {
1198 #ifdef HAVE_trunchiqi2
1199 if (HAVE_trunchiqi2)
1200 {
1201 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1202 return;
1203 }
1204 #endif
1205 convert_move (to, force_reg (from_mode, from), unsignedp);
1206 return;
1207 }
1208
1209 if (from_mode == TImode && to_mode == DImode)
1210 {
1211 #ifdef HAVE_trunctidi2
1212 if (HAVE_trunctidi2)
1213 {
1214 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1215 return;
1216 }
1217 #endif
1218 convert_move (to, force_reg (from_mode, from), unsignedp);
1219 return;
1220 }
1221
1222 if (from_mode == TImode && to_mode == SImode)
1223 {
1224 #ifdef HAVE_trunctisi2
1225 if (HAVE_trunctisi2)
1226 {
1227 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1228 return;
1229 }
1230 #endif
1231 convert_move (to, force_reg (from_mode, from), unsignedp);
1232 return;
1233 }
1234
1235 if (from_mode == TImode && to_mode == HImode)
1236 {
1237 #ifdef HAVE_trunctihi2
1238 if (HAVE_trunctihi2)
1239 {
1240 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1241 return;
1242 }
1243 #endif
1244 convert_move (to, force_reg (from_mode, from), unsignedp);
1245 return;
1246 }
1247
1248 if (from_mode == TImode && to_mode == QImode)
1249 {
1250 #ifdef HAVE_trunctiqi2
1251 if (HAVE_trunctiqi2)
1252 {
1253 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1254 return;
1255 }
1256 #endif
1257 convert_move (to, force_reg (from_mode, from), unsignedp);
1258 return;
1259 }
1260
1261 /* Handle truncation of volatile memrefs, and so on;
1262 the things that couldn't be truncated directly,
1263 and for which there was no special instruction. */
1264 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1265 {
1266 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1267 emit_move_insn (to, temp);
1268 return;
1269 }
1270
1271 /* Mode combination is not recognized. */
1272 abort ();
1273 }
1274
1275 /* Return an rtx for a value that would result
1276 from converting X to mode MODE.
1277 Both X and MODE may be floating, or both integer.
1278 UNSIGNEDP is nonzero if X is an unsigned value.
1279 This can be done by referring to a part of X in place
1280 or by copying to a new temporary with conversion.
1281
1282 This function *must not* call protect_from_queue
1283 except when putting X into an insn (in which case convert_move does it). */
1284
1285 rtx
1286 convert_to_mode (mode, x, unsignedp)
1287 enum machine_mode mode;
1288 rtx x;
1289 int unsignedp;
1290 {
1291 return convert_modes (mode, VOIDmode, x, unsignedp);
1292 }
1293
1294 /* Return an rtx for a value that would result
1295 from converting X from mode OLDMODE to mode MODE.
1296 Both modes may be floating, or both integer.
1297 UNSIGNEDP is nonzero if X is an unsigned value.
1298
1299 This can be done by referring to a part of X in place
1300 or by copying to a new temporary with conversion.
1301
1302 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1303
1304 This function *must not* call protect_from_queue
1305 except when putting X into an insn (in which case convert_move does it). */
1306
1307 rtx
1308 convert_modes (mode, oldmode, x, unsignedp)
1309 enum machine_mode mode, oldmode;
1310 rtx x;
1311 int unsignedp;
1312 {
1313 register rtx temp;
1314
1315 /* If FROM is a SUBREG that indicates that we have already done at least
1316 the required extension, strip it. */
1317
1318 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1319 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1320 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1321 x = gen_lowpart (mode, x);
1322
1323 if (GET_MODE (x) != VOIDmode)
1324 oldmode = GET_MODE (x);
1325
1326 if (mode == oldmode)
1327 return x;
1328
1329 /* There is one case that we must handle specially: If we are converting
1330 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1331 we are to interpret the constant as unsigned, gen_lowpart will do
1332 the wrong if the constant appears negative. What we want to do is
1333 make the high-order word of the constant zero, not all ones. */
1334
1335 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1336 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1337 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1338 {
1339 HOST_WIDE_INT val = INTVAL (x);
1340
1341 if (oldmode != VOIDmode
1342 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1343 {
1344 int width = GET_MODE_BITSIZE (oldmode);
1345
1346 /* We need to zero extend VAL. */
1347 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1348 }
1349
1350 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1351 }
1352
1353 /* We can do this with a gen_lowpart if both desired and current modes
1354 are integer, and this is either a constant integer, a register, or a
1355 non-volatile MEM. Except for the constant case where MODE is no
1356 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1357
1358 if ((GET_CODE (x) == CONST_INT
1359 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1360 || (GET_MODE_CLASS (mode) == MODE_INT
1361 && GET_MODE_CLASS (oldmode) == MODE_INT
1362 && (GET_CODE (x) == CONST_DOUBLE
1363 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1364 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1365 && direct_load[(int) mode])
1366 || (GET_CODE (x) == REG
1367 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1368 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1369 {
1370 /* ?? If we don't know OLDMODE, we have to assume here that
1371 X does not need sign- or zero-extension. This may not be
1372 the case, but it's the best we can do. */
1373 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1374 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1375 {
1376 HOST_WIDE_INT val = INTVAL (x);
1377 int width = GET_MODE_BITSIZE (oldmode);
1378
1379 /* We must sign or zero-extend in this case. Start by
1380 zero-extending, then sign extend if we need to. */
1381 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1382 if (! unsignedp
1383 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1384 val |= (HOST_WIDE_INT) (-1) << width;
1385
1386 return GEN_INT (val);
1387 }
1388
1389 return gen_lowpart (mode, x);
1390 }
1391
1392 temp = gen_reg_rtx (mode);
1393 convert_move (temp, x, unsignedp);
1394 return temp;
1395 }
1396 \f
1397 /* This macro is used to determine what the largest unit size that
1398 move_by_pieces can use is. */
1399
1400 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1401 move efficiently, as opposed to MOVE_MAX which is the maximum
1402 number of bytes we can move with a single instruction. */
1403
1404 #ifndef MOVE_MAX_PIECES
1405 #define MOVE_MAX_PIECES MOVE_MAX
1406 #endif
1407
1408 /* Generate several move instructions to copy LEN bytes
1409 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1410 The caller must pass FROM and TO
1411 through protect_from_queue before calling.
1412 ALIGN is maximum alignment we can assume. */
1413
1414 void
1415 move_by_pieces (to, from, len, align)
1416 rtx to, from;
1417 unsigned HOST_WIDE_INT len;
1418 unsigned int align;
1419 {
1420 struct move_by_pieces data;
1421 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1422 unsigned int max_size = MOVE_MAX_PIECES + 1;
1423 enum machine_mode mode = VOIDmode, tmode;
1424 enum insn_code icode;
1425
1426 data.offset = 0;
1427 data.to_addr = to_addr;
1428 data.from_addr = from_addr;
1429 data.to = to;
1430 data.from = from;
1431 data.autinc_to
1432 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1433 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1434 data.autinc_from
1435 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1436 || GET_CODE (from_addr) == POST_INC
1437 || GET_CODE (from_addr) == POST_DEC);
1438
1439 data.explicit_inc_from = 0;
1440 data.explicit_inc_to = 0;
1441 data.reverse
1442 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1443 if (data.reverse) data.offset = len;
1444 data.len = len;
1445
1446 /* If copying requires more than two move insns,
1447 copy addresses to registers (to make displacements shorter)
1448 and use post-increment if available. */
1449 if (!(data.autinc_from && data.autinc_to)
1450 && move_by_pieces_ninsns (len, align) > 2)
1451 {
1452 /* Find the mode of the largest move... */
1453 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1454 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1455 if (GET_MODE_SIZE (tmode) < max_size)
1456 mode = tmode;
1457
1458 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1459 {
1460 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1461 data.autinc_from = 1;
1462 data.explicit_inc_from = -1;
1463 }
1464 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1465 {
1466 data.from_addr = copy_addr_to_reg (from_addr);
1467 data.autinc_from = 1;
1468 data.explicit_inc_from = 1;
1469 }
1470 if (!data.autinc_from && CONSTANT_P (from_addr))
1471 data.from_addr = copy_addr_to_reg (from_addr);
1472 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1473 {
1474 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1475 data.autinc_to = 1;
1476 data.explicit_inc_to = -1;
1477 }
1478 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1479 {
1480 data.to_addr = copy_addr_to_reg (to_addr);
1481 data.autinc_to = 1;
1482 data.explicit_inc_to = 1;
1483 }
1484 if (!data.autinc_to && CONSTANT_P (to_addr))
1485 data.to_addr = copy_addr_to_reg (to_addr);
1486 }
1487
1488 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1489 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1490 align = MOVE_MAX * BITS_PER_UNIT;
1491
1492 /* First move what we can in the largest integer mode, then go to
1493 successively smaller modes. */
1494
1495 while (max_size > 1)
1496 {
1497 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1498 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1499 if (GET_MODE_SIZE (tmode) < max_size)
1500 mode = tmode;
1501
1502 if (mode == VOIDmode)
1503 break;
1504
1505 icode = mov_optab->handlers[(int) mode].insn_code;
1506 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1507 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1508
1509 max_size = GET_MODE_SIZE (mode);
1510 }
1511
1512 /* The code above should have handled everything. */
1513 if (data.len > 0)
1514 abort ();
1515 }
1516
1517 /* Return number of insns required to move L bytes by pieces.
1518 ALIGN (in bytes) is maximum alignment we can assume. */
1519
1520 static unsigned HOST_WIDE_INT
1521 move_by_pieces_ninsns (l, align)
1522 unsigned HOST_WIDE_INT l;
1523 unsigned int align;
1524 {
1525 unsigned HOST_WIDE_INT n_insns = 0;
1526 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1527
1528 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1529 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1530 align = MOVE_MAX * BITS_PER_UNIT;
1531
1532 while (max_size > 1)
1533 {
1534 enum machine_mode mode = VOIDmode, tmode;
1535 enum insn_code icode;
1536
1537 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1538 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1539 if (GET_MODE_SIZE (tmode) < max_size)
1540 mode = tmode;
1541
1542 if (mode == VOIDmode)
1543 break;
1544
1545 icode = mov_optab->handlers[(int) mode].insn_code;
1546 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1547 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1548
1549 max_size = GET_MODE_SIZE (mode);
1550 }
1551
1552 return n_insns;
1553 }
1554
1555 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1556 with move instructions for mode MODE. GENFUN is the gen_... function
1557 to make a move insn for that mode. DATA has all the other info. */
1558
1559 static void
1560 move_by_pieces_1 (genfun, mode, data)
1561 rtx (*genfun) PARAMS ((rtx, ...));
1562 enum machine_mode mode;
1563 struct move_by_pieces *data;
1564 {
1565 unsigned int size = GET_MODE_SIZE (mode);
1566 rtx to1, from1;
1567
1568 while (data->len >= size)
1569 {
1570 if (data->reverse)
1571 data->offset -= size;
1572
1573 if (data->autinc_to)
1574 {
1575 to1 = gen_rtx_MEM (mode, data->to_addr);
1576 MEM_COPY_ATTRIBUTES (to1, data->to);
1577 }
1578 else
1579 to1 = change_address (data->to, mode,
1580 plus_constant (data->to_addr, data->offset));
1581
1582 if (data->autinc_from)
1583 {
1584 from1 = gen_rtx_MEM (mode, data->from_addr);
1585 MEM_COPY_ATTRIBUTES (from1, data->from);
1586 }
1587 else
1588 from1 = change_address (data->from, mode,
1589 plus_constant (data->from_addr, data->offset));
1590
1591 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1592 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1593 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1594 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1595
1596 emit_insn ((*genfun) (to1, from1));
1597
1598 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1599 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1600 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1601 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1602
1603 if (! data->reverse)
1604 data->offset += size;
1605
1606 data->len -= size;
1607 }
1608 }
1609 \f
1610 /* Emit code to move a block Y to a block X.
1611 This may be done with string-move instructions,
1612 with multiple scalar move instructions, or with a library call.
1613
1614 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1615 with mode BLKmode.
1616 SIZE is an rtx that says how long they are.
1617 ALIGN is the maximum alignment we can assume they have.
1618
1619 Return the address of the new block, if memcpy is called and returns it,
1620 0 otherwise. */
1621
1622 rtx
1623 emit_block_move (x, y, size, align)
1624 rtx x, y;
1625 rtx size;
1626 unsigned int align;
1627 {
1628 rtx retval = 0;
1629 #ifdef TARGET_MEM_FUNCTIONS
1630 static tree fn;
1631 tree call_expr, arg_list;
1632 #endif
1633
1634 if (GET_MODE (x) != BLKmode)
1635 abort ();
1636
1637 if (GET_MODE (y) != BLKmode)
1638 abort ();
1639
1640 x = protect_from_queue (x, 1);
1641 y = protect_from_queue (y, 0);
1642 size = protect_from_queue (size, 0);
1643
1644 if (GET_CODE (x) != MEM)
1645 abort ();
1646 if (GET_CODE (y) != MEM)
1647 abort ();
1648 if (size == 0)
1649 abort ();
1650
1651 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1652 move_by_pieces (x, y, INTVAL (size), align);
1653 else
1654 {
1655 /* Try the most limited insn first, because there's no point
1656 including more than one in the machine description unless
1657 the more limited one has some advantage. */
1658
1659 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1660 enum machine_mode mode;
1661
1662 /* Since this is a move insn, we don't care about volatility. */
1663 volatile_ok = 1;
1664
1665 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1666 mode = GET_MODE_WIDER_MODE (mode))
1667 {
1668 enum insn_code code = movstr_optab[(int) mode];
1669 insn_operand_predicate_fn pred;
1670
1671 if (code != CODE_FOR_nothing
1672 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1673 here because if SIZE is less than the mode mask, as it is
1674 returned by the macro, it will definitely be less than the
1675 actual mode mask. */
1676 && ((GET_CODE (size) == CONST_INT
1677 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1678 <= (GET_MODE_MASK (mode) >> 1)))
1679 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1680 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1681 || (*pred) (x, BLKmode))
1682 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1683 || (*pred) (y, BLKmode))
1684 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1685 || (*pred) (opalign, VOIDmode)))
1686 {
1687 rtx op2;
1688 rtx last = get_last_insn ();
1689 rtx pat;
1690
1691 op2 = convert_to_mode (mode, size, 1);
1692 pred = insn_data[(int) code].operand[2].predicate;
1693 if (pred != 0 && ! (*pred) (op2, mode))
1694 op2 = copy_to_mode_reg (mode, op2);
1695
1696 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1697 if (pat)
1698 {
1699 emit_insn (pat);
1700 volatile_ok = 0;
1701 return 0;
1702 }
1703 else
1704 delete_insns_since (last);
1705 }
1706 }
1707
1708 volatile_ok = 0;
1709
1710 /* X, Y, or SIZE may have been passed through protect_from_queue.
1711
1712 It is unsafe to save the value generated by protect_from_queue
1713 and reuse it later. Consider what happens if emit_queue is
1714 called before the return value from protect_from_queue is used.
1715
1716 Expansion of the CALL_EXPR below will call emit_queue before
1717 we are finished emitting RTL for argument setup. So if we are
1718 not careful we could get the wrong value for an argument.
1719
1720 To avoid this problem we go ahead and emit code to copy X, Y &
1721 SIZE into new pseudos. We can then place those new pseudos
1722 into an RTL_EXPR and use them later, even after a call to
1723 emit_queue.
1724
1725 Note this is not strictly needed for library calls since they
1726 do not call emit_queue before loading their arguments. However,
1727 we may need to have library calls call emit_queue in the future
1728 since failing to do so could cause problems for targets which
1729 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1730 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1731 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1732
1733 #ifdef TARGET_MEM_FUNCTIONS
1734 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1735 #else
1736 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1737 TREE_UNSIGNED (integer_type_node));
1738 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1739 #endif
1740
1741 #ifdef TARGET_MEM_FUNCTIONS
1742 /* It is incorrect to use the libcall calling conventions to call
1743 memcpy in this context.
1744
1745 This could be a user call to memcpy and the user may wish to
1746 examine the return value from memcpy.
1747
1748 For targets where libcalls and normal calls have different conventions
1749 for returning pointers, we could end up generating incorrect code.
1750
1751 So instead of using a libcall sequence we build up a suitable
1752 CALL_EXPR and expand the call in the normal fashion. */
1753 if (fn == NULL_TREE)
1754 {
1755 tree fntype;
1756
1757 /* This was copied from except.c, I don't know if all this is
1758 necessary in this context or not. */
1759 fn = get_identifier ("memcpy");
1760 push_obstacks_nochange ();
1761 end_temporary_allocation ();
1762 fntype = build_pointer_type (void_type_node);
1763 fntype = build_function_type (fntype, NULL_TREE);
1764 fn = build_decl (FUNCTION_DECL, fn, fntype);
1765 ggc_add_tree_root (&fn, 1);
1766 DECL_EXTERNAL (fn) = 1;
1767 TREE_PUBLIC (fn) = 1;
1768 DECL_ARTIFICIAL (fn) = 1;
1769 make_decl_rtl (fn, NULL_PTR, 1);
1770 assemble_external (fn);
1771 pop_obstacks ();
1772 }
1773
1774 /* We need to make an argument list for the function call.
1775
1776 memcpy has three arguments, the first two are void * addresses and
1777 the last is a size_t byte count for the copy. */
1778 arg_list
1779 = build_tree_list (NULL_TREE,
1780 make_tree (build_pointer_type (void_type_node), x));
1781 TREE_CHAIN (arg_list)
1782 = build_tree_list (NULL_TREE,
1783 make_tree (build_pointer_type (void_type_node), y));
1784 TREE_CHAIN (TREE_CHAIN (arg_list))
1785 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1786 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1787
1788 /* Now we have to build up the CALL_EXPR itself. */
1789 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1790 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1791 call_expr, arg_list, NULL_TREE);
1792 TREE_SIDE_EFFECTS (call_expr) = 1;
1793
1794 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1795 #else
1796 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1797 VOIDmode, 3, y, Pmode, x, Pmode,
1798 convert_to_mode (TYPE_MODE (integer_type_node), size,
1799 TREE_UNSIGNED (integer_type_node)),
1800 TYPE_MODE (integer_type_node));
1801 #endif
1802 }
1803
1804 return retval;
1805 }
1806 \f
1807 /* Copy all or part of a value X into registers starting at REGNO.
1808 The number of registers to be filled is NREGS. */
1809
1810 void
1811 move_block_to_reg (regno, x, nregs, mode)
1812 int regno;
1813 rtx x;
1814 int nregs;
1815 enum machine_mode mode;
1816 {
1817 int i;
1818 #ifdef HAVE_load_multiple
1819 rtx pat;
1820 rtx last;
1821 #endif
1822
1823 if (nregs == 0)
1824 return;
1825
1826 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1827 x = validize_mem (force_const_mem (mode, x));
1828
1829 /* See if the machine can do this with a load multiple insn. */
1830 #ifdef HAVE_load_multiple
1831 if (HAVE_load_multiple)
1832 {
1833 last = get_last_insn ();
1834 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1835 GEN_INT (nregs));
1836 if (pat)
1837 {
1838 emit_insn (pat);
1839 return;
1840 }
1841 else
1842 delete_insns_since (last);
1843 }
1844 #endif
1845
1846 for (i = 0; i < nregs; i++)
1847 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1848 operand_subword_force (x, i, mode));
1849 }
1850
1851 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1852 The number of registers to be filled is NREGS. SIZE indicates the number
1853 of bytes in the object X. */
1854
1855 void
1856 move_block_from_reg (regno, x, nregs, size)
1857 int regno;
1858 rtx x;
1859 int nregs;
1860 int size;
1861 {
1862 int i;
1863 #ifdef HAVE_store_multiple
1864 rtx pat;
1865 rtx last;
1866 #endif
1867 enum machine_mode mode;
1868
1869 /* If SIZE is that of a mode no bigger than a word, just use that
1870 mode's store operation. */
1871 if (size <= UNITS_PER_WORD
1872 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1873 {
1874 emit_move_insn (change_address (x, mode, NULL),
1875 gen_rtx_REG (mode, regno));
1876 return;
1877 }
1878
1879 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1880 to the left before storing to memory. Note that the previous test
1881 doesn't handle all cases (e.g. SIZE == 3). */
1882 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1883 {
1884 rtx tem = operand_subword (x, 0, 1, BLKmode);
1885 rtx shift;
1886
1887 if (tem == 0)
1888 abort ();
1889
1890 shift = expand_shift (LSHIFT_EXPR, word_mode,
1891 gen_rtx_REG (word_mode, regno),
1892 build_int_2 ((UNITS_PER_WORD - size)
1893 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1894 emit_move_insn (tem, shift);
1895 return;
1896 }
1897
1898 /* See if the machine can do this with a store multiple insn. */
1899 #ifdef HAVE_store_multiple
1900 if (HAVE_store_multiple)
1901 {
1902 last = get_last_insn ();
1903 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1904 GEN_INT (nregs));
1905 if (pat)
1906 {
1907 emit_insn (pat);
1908 return;
1909 }
1910 else
1911 delete_insns_since (last);
1912 }
1913 #endif
1914
1915 for (i = 0; i < nregs; i++)
1916 {
1917 rtx tem = operand_subword (x, i, 1, BLKmode);
1918
1919 if (tem == 0)
1920 abort ();
1921
1922 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1923 }
1924 }
1925
1926 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1927 registers represented by a PARALLEL. SSIZE represents the total size of
1928 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1929 SRC in bits. */
1930 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1931 the balance will be in what would be the low-order memory addresses, i.e.
1932 left justified for big endian, right justified for little endian. This
1933 happens to be true for the targets currently using this support. If this
1934 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1935 would be needed. */
1936
1937 void
1938 emit_group_load (dst, orig_src, ssize, align)
1939 rtx dst, orig_src;
1940 unsigned int align;
1941 int ssize;
1942 {
1943 rtx *tmps, src;
1944 int start, i;
1945
1946 if (GET_CODE (dst) != PARALLEL)
1947 abort ();
1948
1949 /* Check for a NULL entry, used to indicate that the parameter goes
1950 both on the stack and in registers. */
1951 if (XEXP (XVECEXP (dst, 0, 0), 0))
1952 start = 0;
1953 else
1954 start = 1;
1955
1956 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1957
1958 /* If we won't be loading directly from memory, protect the real source
1959 from strange tricks we might play. */
1960 src = orig_src;
1961 if (GET_CODE (src) != MEM && ! CONSTANT_P (src))
1962 {
1963 if (GET_MODE (src) == VOIDmode)
1964 src = gen_reg_rtx (GET_MODE (dst));
1965 else
1966 src = gen_reg_rtx (GET_MODE (orig_src));
1967 emit_move_insn (src, orig_src);
1968 }
1969
1970 /* Process the pieces. */
1971 for (i = start; i < XVECLEN (dst, 0); i++)
1972 {
1973 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1974 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1975 unsigned int bytelen = GET_MODE_SIZE (mode);
1976 int shift = 0;
1977
1978 /* Handle trailing fragments that run over the size of the struct. */
1979 if (ssize >= 0 && bytepos + bytelen > ssize)
1980 {
1981 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1982 bytelen = ssize - bytepos;
1983 if (bytelen <= 0)
1984 abort ();
1985 }
1986
1987 /* Optimize the access just a bit. */
1988 if (GET_CODE (src) == MEM
1989 && align >= GET_MODE_ALIGNMENT (mode)
1990 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1991 && bytelen == GET_MODE_SIZE (mode))
1992 {
1993 tmps[i] = gen_reg_rtx (mode);
1994 emit_move_insn (tmps[i],
1995 change_address (src, mode,
1996 plus_constant (XEXP (src, 0),
1997 bytepos)));
1998 }
1999 else if (GET_CODE (src) == CONCAT)
2000 {
2001 if (bytepos == 0
2002 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2003 tmps[i] = XEXP (src, 0);
2004 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2005 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2006 tmps[i] = XEXP (src, 1);
2007 else
2008 abort ();
2009 }
2010 else if ((CONSTANT_P (src)
2011 && (GET_MODE (src) == VOIDmode || GET_MODE (src) == mode))
2012 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2013 tmps[i] = src;
2014 else
2015 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2016 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2017 mode, mode, align, ssize);
2018
2019 if (BYTES_BIG_ENDIAN && shift)
2020 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2021 tmps[i], 0, OPTAB_WIDEN);
2022 }
2023
2024 emit_queue ();
2025
2026 /* Copy the extracted pieces into the proper (probable) hard regs. */
2027 for (i = start; i < XVECLEN (dst, 0); i++)
2028 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2029 }
2030
2031 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2032 registers represented by a PARALLEL. SSIZE represents the total size of
2033 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2034
2035 void
2036 emit_group_store (orig_dst, src, ssize, align)
2037 rtx orig_dst, src;
2038 int ssize;
2039 unsigned int align;
2040 {
2041 rtx *tmps, dst;
2042 int start, i;
2043
2044 if (GET_CODE (src) != PARALLEL)
2045 abort ();
2046
2047 /* Check for a NULL entry, used to indicate that the parameter goes
2048 both on the stack and in registers. */
2049 if (XEXP (XVECEXP (src, 0, 0), 0))
2050 start = 0;
2051 else
2052 start = 1;
2053
2054 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2055
2056 /* Copy the (probable) hard regs into pseudos. */
2057 for (i = start; i < XVECLEN (src, 0); i++)
2058 {
2059 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2060 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2061 emit_move_insn (tmps[i], reg);
2062 }
2063 emit_queue ();
2064
2065 /* If we won't be storing directly into memory, protect the real destination
2066 from strange tricks we might play. */
2067 dst = orig_dst;
2068 if (GET_CODE (dst) == PARALLEL)
2069 {
2070 rtx temp;
2071
2072 /* We can get a PARALLEL dst if there is a conditional expression in
2073 a return statement. In that case, the dst and src are the same,
2074 so no action is necessary. */
2075 if (rtx_equal_p (dst, src))
2076 return;
2077
2078 /* It is unclear if we can ever reach here, but we may as well handle
2079 it. Allocate a temporary, and split this into a store/load to/from
2080 the temporary. */
2081
2082 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2083 emit_group_store (temp, src, ssize, align);
2084 emit_group_load (dst, temp, ssize, align);
2085 return;
2086 }
2087 else if (GET_CODE (dst) != MEM)
2088 {
2089 dst = gen_reg_rtx (GET_MODE (orig_dst));
2090 /* Make life a bit easier for combine. */
2091 emit_move_insn (dst, const0_rtx);
2092 }
2093
2094 /* Process the pieces. */
2095 for (i = start; i < XVECLEN (src, 0); i++)
2096 {
2097 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2098 enum machine_mode mode = GET_MODE (tmps[i]);
2099 unsigned int bytelen = GET_MODE_SIZE (mode);
2100
2101 /* Handle trailing fragments that run over the size of the struct. */
2102 if (ssize >= 0 && bytepos + bytelen > ssize)
2103 {
2104 if (BYTES_BIG_ENDIAN)
2105 {
2106 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2107 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2108 tmps[i], 0, OPTAB_WIDEN);
2109 }
2110 bytelen = ssize - bytepos;
2111 }
2112
2113 /* Optimize the access just a bit. */
2114 if (GET_CODE (dst) == MEM
2115 && align >= GET_MODE_ALIGNMENT (mode)
2116 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2117 && bytelen == GET_MODE_SIZE (mode))
2118 emit_move_insn (change_address (dst, mode,
2119 plus_constant (XEXP (dst, 0),
2120 bytepos)),
2121 tmps[i]);
2122 else
2123 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2124 mode, tmps[i], align, ssize);
2125 }
2126
2127 emit_queue ();
2128
2129 /* Copy from the pseudo into the (probable) hard reg. */
2130 if (GET_CODE (dst) == REG)
2131 emit_move_insn (orig_dst, dst);
2132 }
2133
2134 /* Generate code to copy a BLKmode object of TYPE out of a
2135 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2136 is null, a stack temporary is created. TGTBLK is returned.
2137
2138 The primary purpose of this routine is to handle functions
2139 that return BLKmode structures in registers. Some machines
2140 (the PA for example) want to return all small structures
2141 in registers regardless of the structure's alignment. */
2142
2143 rtx
2144 copy_blkmode_from_reg (tgtblk, srcreg, type)
2145 rtx tgtblk;
2146 rtx srcreg;
2147 tree type;
2148 {
2149 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2150 rtx src = NULL, dst = NULL;
2151 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2152 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2153
2154 if (tgtblk == 0)
2155 {
2156 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2157 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2158 preserve_temp_slots (tgtblk);
2159 }
2160
2161 /* This code assumes srcreg is at least a full word. If it isn't,
2162 copy it into a new pseudo which is a full word. */
2163 if (GET_MODE (srcreg) != BLKmode
2164 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2165 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2166
2167 /* Structures whose size is not a multiple of a word are aligned
2168 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2169 machine, this means we must skip the empty high order bytes when
2170 calculating the bit offset. */
2171 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2172 big_endian_correction
2173 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2174
2175 /* Copy the structure BITSIZE bites at a time.
2176
2177 We could probably emit more efficient code for machines which do not use
2178 strict alignment, but it doesn't seem worth the effort at the current
2179 time. */
2180 for (bitpos = 0, xbitpos = big_endian_correction;
2181 bitpos < bytes * BITS_PER_UNIT;
2182 bitpos += bitsize, xbitpos += bitsize)
2183 {
2184 /* We need a new source operand each time xbitpos is on a
2185 word boundary and when xbitpos == big_endian_correction
2186 (the first time through). */
2187 if (xbitpos % BITS_PER_WORD == 0
2188 || xbitpos == big_endian_correction)
2189 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2190
2191 /* We need a new destination operand each time bitpos is on
2192 a word boundary. */
2193 if (bitpos % BITS_PER_WORD == 0)
2194 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2195
2196 /* Use xbitpos for the source extraction (right justified) and
2197 xbitpos for the destination store (left justified). */
2198 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2199 extract_bit_field (src, bitsize,
2200 xbitpos % BITS_PER_WORD, 1,
2201 NULL_RTX, word_mode, word_mode,
2202 bitsize, BITS_PER_WORD),
2203 bitsize, BITS_PER_WORD);
2204 }
2205
2206 return tgtblk;
2207 }
2208
2209 /* Add a USE expression for REG to the (possibly empty) list pointed
2210 to by CALL_FUSAGE. REG must denote a hard register. */
2211
2212 void
2213 use_reg (call_fusage, reg)
2214 rtx *call_fusage, reg;
2215 {
2216 if (GET_CODE (reg) != REG
2217 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2218 abort ();
2219
2220 *call_fusage
2221 = gen_rtx_EXPR_LIST (VOIDmode,
2222 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2223 }
2224
2225 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2226 starting at REGNO. All of these registers must be hard registers. */
2227
2228 void
2229 use_regs (call_fusage, regno, nregs)
2230 rtx *call_fusage;
2231 int regno;
2232 int nregs;
2233 {
2234 int i;
2235
2236 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2237 abort ();
2238
2239 for (i = 0; i < nregs; i++)
2240 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2241 }
2242
2243 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2244 PARALLEL REGS. This is for calls that pass values in multiple
2245 non-contiguous locations. The Irix 6 ABI has examples of this. */
2246
2247 void
2248 use_group_regs (call_fusage, regs)
2249 rtx *call_fusage;
2250 rtx regs;
2251 {
2252 int i;
2253
2254 for (i = 0; i < XVECLEN (regs, 0); i++)
2255 {
2256 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2257
2258 /* A NULL entry means the parameter goes both on the stack and in
2259 registers. This can also be a MEM for targets that pass values
2260 partially on the stack and partially in registers. */
2261 if (reg != 0 && GET_CODE (reg) == REG)
2262 use_reg (call_fusage, reg);
2263 }
2264 }
2265 \f
2266 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2267 rtx with BLKmode). The caller must pass TO through protect_from_queue
2268 before calling. ALIGN is maximum alignment we can assume. */
2269
2270 static void
2271 clear_by_pieces (to, len, align)
2272 rtx to;
2273 unsigned HOST_WIDE_INT len;
2274 unsigned int align;
2275 {
2276 struct clear_by_pieces data;
2277 rtx to_addr = XEXP (to, 0);
2278 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2279 enum machine_mode mode = VOIDmode, tmode;
2280 enum insn_code icode;
2281
2282 data.offset = 0;
2283 data.to_addr = to_addr;
2284 data.to = to;
2285 data.autinc_to
2286 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2287 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2288
2289 data.explicit_inc_to = 0;
2290 data.reverse
2291 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2292 if (data.reverse)
2293 data.offset = len;
2294 data.len = len;
2295
2296 /* If copying requires more than two move insns,
2297 copy addresses to registers (to make displacements shorter)
2298 and use post-increment if available. */
2299 if (!data.autinc_to
2300 && move_by_pieces_ninsns (len, align) > 2)
2301 {
2302 /* Determine the main mode we'll be using. */
2303 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2304 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2305 if (GET_MODE_SIZE (tmode) < max_size)
2306 mode = tmode;
2307
2308 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2309 {
2310 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2311 data.autinc_to = 1;
2312 data.explicit_inc_to = -1;
2313 }
2314
2315 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse
2316 && ! data.autinc_to)
2317 {
2318 data.to_addr = copy_addr_to_reg (to_addr);
2319 data.autinc_to = 1;
2320 data.explicit_inc_to = 1;
2321 }
2322
2323 if ( !data.autinc_to && CONSTANT_P (to_addr))
2324 data.to_addr = copy_addr_to_reg (to_addr);
2325 }
2326
2327 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2328 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2329 align = MOVE_MAX * BITS_PER_UNIT;
2330
2331 /* First move what we can in the largest integer mode, then go to
2332 successively smaller modes. */
2333
2334 while (max_size > 1)
2335 {
2336 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2337 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2338 if (GET_MODE_SIZE (tmode) < max_size)
2339 mode = tmode;
2340
2341 if (mode == VOIDmode)
2342 break;
2343
2344 icode = mov_optab->handlers[(int) mode].insn_code;
2345 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2346 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2347
2348 max_size = GET_MODE_SIZE (mode);
2349 }
2350
2351 /* The code above should have handled everything. */
2352 if (data.len != 0)
2353 abort ();
2354 }
2355
2356 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2357 with move instructions for mode MODE. GENFUN is the gen_... function
2358 to make a move insn for that mode. DATA has all the other info. */
2359
2360 static void
2361 clear_by_pieces_1 (genfun, mode, data)
2362 rtx (*genfun) PARAMS ((rtx, ...));
2363 enum machine_mode mode;
2364 struct clear_by_pieces *data;
2365 {
2366 unsigned int size = GET_MODE_SIZE (mode);
2367 rtx to1;
2368
2369 while (data->len >= size)
2370 {
2371 if (data->reverse)
2372 data->offset -= size;
2373
2374 if (data->autinc_to)
2375 {
2376 to1 = gen_rtx_MEM (mode, data->to_addr);
2377 MEM_COPY_ATTRIBUTES (to1, data->to);
2378 }
2379 else
2380 to1 = change_address (data->to, mode,
2381 plus_constant (data->to_addr, data->offset));
2382
2383 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2384 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2385
2386 emit_insn ((*genfun) (to1, const0_rtx));
2387
2388 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2389 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2390
2391 if (! data->reverse)
2392 data->offset += size;
2393
2394 data->len -= size;
2395 }
2396 }
2397 \f
2398 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2399 its length in bytes and ALIGN is the maximum alignment we can is has.
2400
2401 If we call a function that returns the length of the block, return it. */
2402
2403 rtx
2404 clear_storage (object, size, align)
2405 rtx object;
2406 rtx size;
2407 unsigned int align;
2408 {
2409 #ifdef TARGET_MEM_FUNCTIONS
2410 static tree fn;
2411 tree call_expr, arg_list;
2412 #endif
2413 rtx retval = 0;
2414
2415 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2416 just move a zero. Otherwise, do this a piece at a time. */
2417 if (GET_MODE (object) != BLKmode
2418 && GET_CODE (size) == CONST_INT
2419 && GET_MODE_SIZE (GET_MODE (object)) == INTVAL (size))
2420 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2421 else
2422 {
2423 object = protect_from_queue (object, 1);
2424 size = protect_from_queue (size, 0);
2425
2426 if (GET_CODE (size) == CONST_INT
2427 && MOVE_BY_PIECES_P (INTVAL (size), align))
2428 clear_by_pieces (object, INTVAL (size), align);
2429 else
2430 {
2431 /* Try the most limited insn first, because there's no point
2432 including more than one in the machine description unless
2433 the more limited one has some advantage. */
2434
2435 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2436 enum machine_mode mode;
2437
2438 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2439 mode = GET_MODE_WIDER_MODE (mode))
2440 {
2441 enum insn_code code = clrstr_optab[(int) mode];
2442 insn_operand_predicate_fn pred;
2443
2444 if (code != CODE_FOR_nothing
2445 /* We don't need MODE to be narrower than
2446 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2447 the mode mask, as it is returned by the macro, it will
2448 definitely be less than the actual mode mask. */
2449 && ((GET_CODE (size) == CONST_INT
2450 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2451 <= (GET_MODE_MASK (mode) >> 1)))
2452 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2453 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2454 || (*pred) (object, BLKmode))
2455 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2456 || (*pred) (opalign, VOIDmode)))
2457 {
2458 rtx op1;
2459 rtx last = get_last_insn ();
2460 rtx pat;
2461
2462 op1 = convert_to_mode (mode, size, 1);
2463 pred = insn_data[(int) code].operand[1].predicate;
2464 if (pred != 0 && ! (*pred) (op1, mode))
2465 op1 = copy_to_mode_reg (mode, op1);
2466
2467 pat = GEN_FCN ((int) code) (object, op1, opalign);
2468 if (pat)
2469 {
2470 emit_insn (pat);
2471 return 0;
2472 }
2473 else
2474 delete_insns_since (last);
2475 }
2476 }
2477
2478 /* OBJECT or SIZE may have been passed through protect_from_queue.
2479
2480 It is unsafe to save the value generated by protect_from_queue
2481 and reuse it later. Consider what happens if emit_queue is
2482 called before the return value from protect_from_queue is used.
2483
2484 Expansion of the CALL_EXPR below will call emit_queue before
2485 we are finished emitting RTL for argument setup. So if we are
2486 not careful we could get the wrong value for an argument.
2487
2488 To avoid this problem we go ahead and emit code to copy OBJECT
2489 and SIZE into new pseudos. We can then place those new pseudos
2490 into an RTL_EXPR and use them later, even after a call to
2491 emit_queue.
2492
2493 Note this is not strictly needed for library calls since they
2494 do not call emit_queue before loading their arguments. However,
2495 we may need to have library calls call emit_queue in the future
2496 since failing to do so could cause problems for targets which
2497 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2498 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2499
2500 #ifdef TARGET_MEM_FUNCTIONS
2501 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2502 #else
2503 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2504 TREE_UNSIGNED (integer_type_node));
2505 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2506 #endif
2507
2508 #ifdef TARGET_MEM_FUNCTIONS
2509 /* It is incorrect to use the libcall calling conventions to call
2510 memset in this context.
2511
2512 This could be a user call to memset and the user may wish to
2513 examine the return value from memset.
2514
2515 For targets where libcalls and normal calls have different
2516 conventions for returning pointers, we could end up generating
2517 incorrect code.
2518
2519 So instead of using a libcall sequence we build up a suitable
2520 CALL_EXPR and expand the call in the normal fashion. */
2521 if (fn == NULL_TREE)
2522 {
2523 tree fntype;
2524
2525 /* This was copied from except.c, I don't know if all this is
2526 necessary in this context or not. */
2527 fn = get_identifier ("memset");
2528 push_obstacks_nochange ();
2529 end_temporary_allocation ();
2530 fntype = build_pointer_type (void_type_node);
2531 fntype = build_function_type (fntype, NULL_TREE);
2532 fn = build_decl (FUNCTION_DECL, fn, fntype);
2533 ggc_add_tree_root (&fn, 1);
2534 DECL_EXTERNAL (fn) = 1;
2535 TREE_PUBLIC (fn) = 1;
2536 DECL_ARTIFICIAL (fn) = 1;
2537 make_decl_rtl (fn, NULL_PTR, 1);
2538 assemble_external (fn);
2539 pop_obstacks ();
2540 }
2541
2542 /* We need to make an argument list for the function call.
2543
2544 memset has three arguments, the first is a void * addresses, the
2545 second a integer with the initialization value, the last is a
2546 size_t byte count for the copy. */
2547 arg_list
2548 = build_tree_list (NULL_TREE,
2549 make_tree (build_pointer_type (void_type_node),
2550 object));
2551 TREE_CHAIN (arg_list)
2552 = build_tree_list (NULL_TREE,
2553 make_tree (integer_type_node, const0_rtx));
2554 TREE_CHAIN (TREE_CHAIN (arg_list))
2555 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2556 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2557
2558 /* Now we have to build up the CALL_EXPR itself. */
2559 call_expr = build1 (ADDR_EXPR,
2560 build_pointer_type (TREE_TYPE (fn)), fn);
2561 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2562 call_expr, arg_list, NULL_TREE);
2563 TREE_SIDE_EFFECTS (call_expr) = 1;
2564
2565 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2566 #else
2567 emit_library_call (bzero_libfunc, LCT_NORMAL,
2568 VOIDmode, 2, object, Pmode, size,
2569 TYPE_MODE (integer_type_node));
2570 #endif
2571 }
2572 }
2573
2574 return retval;
2575 }
2576
2577 /* Generate code to copy Y into X.
2578 Both Y and X must have the same mode, except that
2579 Y can be a constant with VOIDmode.
2580 This mode cannot be BLKmode; use emit_block_move for that.
2581
2582 Return the last instruction emitted. */
2583
2584 rtx
2585 emit_move_insn (x, y)
2586 rtx x, y;
2587 {
2588 enum machine_mode mode = GET_MODE (x);
2589
2590 x = protect_from_queue (x, 1);
2591 y = protect_from_queue (y, 0);
2592
2593 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2594 abort ();
2595
2596 /* Never force constant_p_rtx to memory. */
2597 if (GET_CODE (y) == CONSTANT_P_RTX)
2598 ;
2599 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2600 y = force_const_mem (mode, y);
2601
2602 /* If X or Y are memory references, verify that their addresses are valid
2603 for the machine. */
2604 if (GET_CODE (x) == MEM
2605 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2606 && ! push_operand (x, GET_MODE (x)))
2607 || (flag_force_addr
2608 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2609 x = change_address (x, VOIDmode, XEXP (x, 0));
2610
2611 if (GET_CODE (y) == MEM
2612 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2613 || (flag_force_addr
2614 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2615 y = change_address (y, VOIDmode, XEXP (y, 0));
2616
2617 if (mode == BLKmode)
2618 abort ();
2619
2620 return emit_move_insn_1 (x, y);
2621 }
2622
2623 /* Low level part of emit_move_insn.
2624 Called just like emit_move_insn, but assumes X and Y
2625 are basically valid. */
2626
2627 rtx
2628 emit_move_insn_1 (x, y)
2629 rtx x, y;
2630 {
2631 enum machine_mode mode = GET_MODE (x);
2632 enum machine_mode submode;
2633 enum mode_class class = GET_MODE_CLASS (mode);
2634 unsigned int i;
2635
2636 if (mode >= MAX_MACHINE_MODE)
2637 abort ();
2638
2639 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2640 return
2641 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2642
2643 /* Expand complex moves by moving real part and imag part, if possible. */
2644 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2645 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2646 * BITS_PER_UNIT),
2647 (class == MODE_COMPLEX_INT
2648 ? MODE_INT : MODE_FLOAT),
2649 0))
2650 && (mov_optab->handlers[(int) submode].insn_code
2651 != CODE_FOR_nothing))
2652 {
2653 /* Don't split destination if it is a stack push. */
2654 int stack = push_operand (x, GET_MODE (x));
2655
2656 /* If this is a stack, push the highpart first, so it
2657 will be in the argument order.
2658
2659 In that case, change_address is used only to convert
2660 the mode, not to change the address. */
2661 if (stack)
2662 {
2663 /* Note that the real part always precedes the imag part in memory
2664 regardless of machine's endianness. */
2665 #ifdef STACK_GROWS_DOWNWARD
2666 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2667 (gen_rtx_MEM (submode, XEXP (x, 0)),
2668 gen_imagpart (submode, y)));
2669 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2670 (gen_rtx_MEM (submode, XEXP (x, 0)),
2671 gen_realpart (submode, y)));
2672 #else
2673 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2674 (gen_rtx_MEM (submode, XEXP (x, 0)),
2675 gen_realpart (submode, y)));
2676 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2677 (gen_rtx_MEM (submode, XEXP (x, 0)),
2678 gen_imagpart (submode, y)));
2679 #endif
2680 }
2681 else
2682 {
2683 rtx realpart_x, realpart_y;
2684 rtx imagpart_x, imagpart_y;
2685
2686 /* If this is a complex value with each part being smaller than a
2687 word, the usual calling sequence will likely pack the pieces into
2688 a single register. Unfortunately, SUBREG of hard registers only
2689 deals in terms of words, so we have a problem converting input
2690 arguments to the CONCAT of two registers that is used elsewhere
2691 for complex values. If this is before reload, we can copy it into
2692 memory and reload. FIXME, we should see about using extract and
2693 insert on integer registers, but complex short and complex char
2694 variables should be rarely used. */
2695 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2696 && (reload_in_progress | reload_completed) == 0)
2697 {
2698 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2699 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2700
2701 if (packed_dest_p || packed_src_p)
2702 {
2703 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2704 ? MODE_FLOAT : MODE_INT);
2705
2706 enum machine_mode reg_mode =
2707 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2708
2709 if (reg_mode != BLKmode)
2710 {
2711 rtx mem = assign_stack_temp (reg_mode,
2712 GET_MODE_SIZE (mode), 0);
2713
2714 rtx cmem = change_address (mem, mode, NULL_RTX);
2715
2716 cfun->cannot_inline = N_("function using short complex types cannot be inline");
2717
2718 if (packed_dest_p)
2719 {
2720 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2721 emit_move_insn_1 (cmem, y);
2722 return emit_move_insn_1 (sreg, mem);
2723 }
2724 else
2725 {
2726 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2727 emit_move_insn_1 (mem, sreg);
2728 return emit_move_insn_1 (x, cmem);
2729 }
2730 }
2731 }
2732 }
2733
2734 realpart_x = gen_realpart (submode, x);
2735 realpart_y = gen_realpart (submode, y);
2736 imagpart_x = gen_imagpart (submode, x);
2737 imagpart_y = gen_imagpart (submode, y);
2738
2739 /* Show the output dies here. This is necessary for SUBREGs
2740 of pseudos since we cannot track their lifetimes correctly;
2741 hard regs shouldn't appear here except as return values.
2742 We never want to emit such a clobber after reload. */
2743 if (x != y
2744 && ! (reload_in_progress || reload_completed)
2745 && (GET_CODE (realpart_x) == SUBREG
2746 || GET_CODE (imagpart_x) == SUBREG))
2747 {
2748 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2749 }
2750
2751 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2752 (realpart_x, realpart_y));
2753 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2754 (imagpart_x, imagpart_y));
2755 }
2756
2757 return get_last_insn ();
2758 }
2759
2760 /* This will handle any multi-word mode that lacks a move_insn pattern.
2761 However, you will get better code if you define such patterns,
2762 even if they must turn into multiple assembler instructions. */
2763 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2764 {
2765 rtx last_insn = 0;
2766 rtx seq, inner;
2767 int need_clobber;
2768
2769 #ifdef PUSH_ROUNDING
2770
2771 /* If X is a push on the stack, do the push now and replace
2772 X with a reference to the stack pointer. */
2773 if (push_operand (x, GET_MODE (x)))
2774 {
2775 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2776 x = change_address (x, VOIDmode, stack_pointer_rtx);
2777 }
2778 #endif
2779
2780 /* If we are in reload, see if either operand is a MEM whose address
2781 is scheduled for replacement. */
2782 if (reload_in_progress && GET_CODE (x) == MEM
2783 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2784 {
2785 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2786
2787 MEM_COPY_ATTRIBUTES (new, x);
2788 x = new;
2789 }
2790 if (reload_in_progress && GET_CODE (y) == MEM
2791 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2792 {
2793 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2794
2795 MEM_COPY_ATTRIBUTES (new, y);
2796 y = new;
2797 }
2798
2799 start_sequence ();
2800
2801 need_clobber = 0;
2802 for (i = 0;
2803 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2804 i++)
2805 {
2806 rtx xpart = operand_subword (x, i, 1, mode);
2807 rtx ypart = operand_subword (y, i, 1, mode);
2808
2809 /* If we can't get a part of Y, put Y into memory if it is a
2810 constant. Otherwise, force it into a register. If we still
2811 can't get a part of Y, abort. */
2812 if (ypart == 0 && CONSTANT_P (y))
2813 {
2814 y = force_const_mem (mode, y);
2815 ypart = operand_subword (y, i, 1, mode);
2816 }
2817 else if (ypart == 0)
2818 ypart = operand_subword_force (y, i, mode);
2819
2820 if (xpart == 0 || ypart == 0)
2821 abort ();
2822
2823 need_clobber |= (GET_CODE (xpart) == SUBREG);
2824
2825 last_insn = emit_move_insn (xpart, ypart);
2826 }
2827
2828 seq = gen_sequence ();
2829 end_sequence ();
2830
2831 /* Show the output dies here. This is necessary for SUBREGs
2832 of pseudos since we cannot track their lifetimes correctly;
2833 hard regs shouldn't appear here except as return values.
2834 We never want to emit such a clobber after reload. */
2835 if (x != y
2836 && ! (reload_in_progress || reload_completed)
2837 && need_clobber != 0)
2838 {
2839 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2840 }
2841
2842 emit_insn (seq);
2843
2844 return last_insn;
2845 }
2846 else
2847 abort ();
2848 }
2849 \f
2850 /* Pushing data onto the stack. */
2851
2852 /* Push a block of length SIZE (perhaps variable)
2853 and return an rtx to address the beginning of the block.
2854 Note that it is not possible for the value returned to be a QUEUED.
2855 The value may be virtual_outgoing_args_rtx.
2856
2857 EXTRA is the number of bytes of padding to push in addition to SIZE.
2858 BELOW nonzero means this padding comes at low addresses;
2859 otherwise, the padding comes at high addresses. */
2860
2861 rtx
2862 push_block (size, extra, below)
2863 rtx size;
2864 int extra, below;
2865 {
2866 register rtx temp;
2867
2868 size = convert_modes (Pmode, ptr_mode, size, 1);
2869 if (CONSTANT_P (size))
2870 anti_adjust_stack (plus_constant (size, extra));
2871 else if (GET_CODE (size) == REG && extra == 0)
2872 anti_adjust_stack (size);
2873 else
2874 {
2875 temp = copy_to_mode_reg (Pmode, size);
2876 if (extra != 0)
2877 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2878 temp, 0, OPTAB_LIB_WIDEN);
2879 anti_adjust_stack (temp);
2880 }
2881
2882 #ifndef STACK_GROWS_DOWNWARD
2883 #ifdef ARGS_GROW_DOWNWARD
2884 if (!ACCUMULATE_OUTGOING_ARGS)
2885 #else
2886 if (0)
2887 #endif
2888 #else
2889 if (1)
2890 #endif
2891 {
2892 /* Return the lowest stack address when STACK or ARGS grow downward and
2893 we are not aaccumulating outgoing arguments (the c4x port uses such
2894 conventions). */
2895 temp = virtual_outgoing_args_rtx;
2896 if (extra != 0 && below)
2897 temp = plus_constant (temp, extra);
2898 }
2899 else
2900 {
2901 if (GET_CODE (size) == CONST_INT)
2902 temp = plus_constant (virtual_outgoing_args_rtx,
2903 -INTVAL (size) - (below ? 0 : extra));
2904 else if (extra != 0 && !below)
2905 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2906 negate_rtx (Pmode, plus_constant (size, extra)));
2907 else
2908 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2909 negate_rtx (Pmode, size));
2910 }
2911
2912 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2913 }
2914
2915 rtx
2916 gen_push_operand ()
2917 {
2918 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2919 }
2920
2921 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2922 block of SIZE bytes. */
2923
2924 static rtx
2925 get_push_address (size)
2926 int size;
2927 {
2928 register rtx temp;
2929
2930 if (STACK_PUSH_CODE == POST_DEC)
2931 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2932 else if (STACK_PUSH_CODE == POST_INC)
2933 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2934 else
2935 temp = stack_pointer_rtx;
2936
2937 return copy_to_reg (temp);
2938 }
2939
2940 /* Generate code to push X onto the stack, assuming it has mode MODE and
2941 type TYPE.
2942 MODE is redundant except when X is a CONST_INT (since they don't
2943 carry mode info).
2944 SIZE is an rtx for the size of data to be copied (in bytes),
2945 needed only if X is BLKmode.
2946
2947 ALIGN is maximum alignment we can assume.
2948
2949 If PARTIAL and REG are both nonzero, then copy that many of the first
2950 words of X into registers starting with REG, and push the rest of X.
2951 The amount of space pushed is decreased by PARTIAL words,
2952 rounded *down* to a multiple of PARM_BOUNDARY.
2953 REG must be a hard register in this case.
2954 If REG is zero but PARTIAL is not, take any all others actions for an
2955 argument partially in registers, but do not actually load any
2956 registers.
2957
2958 EXTRA is the amount in bytes of extra space to leave next to this arg.
2959 This is ignored if an argument block has already been allocated.
2960
2961 On a machine that lacks real push insns, ARGS_ADDR is the address of
2962 the bottom of the argument block for this call. We use indexing off there
2963 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2964 argument block has not been preallocated.
2965
2966 ARGS_SO_FAR is the size of args previously pushed for this call.
2967
2968 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2969 for arguments passed in registers. If nonzero, it will be the number
2970 of bytes required. */
2971
2972 void
2973 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2974 args_addr, args_so_far, reg_parm_stack_space,
2975 alignment_pad)
2976 register rtx x;
2977 enum machine_mode mode;
2978 tree type;
2979 rtx size;
2980 unsigned int align;
2981 int partial;
2982 rtx reg;
2983 int extra;
2984 rtx args_addr;
2985 rtx args_so_far;
2986 int reg_parm_stack_space;
2987 rtx alignment_pad;
2988 {
2989 rtx xinner;
2990 enum direction stack_direction
2991 #ifdef STACK_GROWS_DOWNWARD
2992 = downward;
2993 #else
2994 = upward;
2995 #endif
2996
2997 /* Decide where to pad the argument: `downward' for below,
2998 `upward' for above, or `none' for don't pad it.
2999 Default is below for small data on big-endian machines; else above. */
3000 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3001
3002 /* Invert direction if stack is post-update. */
3003 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3004 if (where_pad != none)
3005 where_pad = (where_pad == downward ? upward : downward);
3006
3007 xinner = x = protect_from_queue (x, 0);
3008
3009 if (mode == BLKmode)
3010 {
3011 /* Copy a block into the stack, entirely or partially. */
3012
3013 register rtx temp;
3014 int used = partial * UNITS_PER_WORD;
3015 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3016 int skip;
3017
3018 if (size == 0)
3019 abort ();
3020
3021 used -= offset;
3022
3023 /* USED is now the # of bytes we need not copy to the stack
3024 because registers will take care of them. */
3025
3026 if (partial != 0)
3027 xinner = change_address (xinner, BLKmode,
3028 plus_constant (XEXP (xinner, 0), used));
3029
3030 /* If the partial register-part of the arg counts in its stack size,
3031 skip the part of stack space corresponding to the registers.
3032 Otherwise, start copying to the beginning of the stack space,
3033 by setting SKIP to 0. */
3034 skip = (reg_parm_stack_space == 0) ? 0 : used;
3035
3036 #ifdef PUSH_ROUNDING
3037 /* Do it with several push insns if that doesn't take lots of insns
3038 and if there is no difficulty with push insns that skip bytes
3039 on the stack for alignment purposes. */
3040 if (args_addr == 0
3041 && PUSH_ARGS
3042 && GET_CODE (size) == CONST_INT
3043 && skip == 0
3044 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3045 /* Here we avoid the case of a structure whose weak alignment
3046 forces many pushes of a small amount of data,
3047 and such small pushes do rounding that causes trouble. */
3048 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3049 || align >= BIGGEST_ALIGNMENT
3050 || PUSH_ROUNDING (align) == align)
3051 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3052 {
3053 /* Push padding now if padding above and stack grows down,
3054 or if padding below and stack grows up.
3055 But if space already allocated, this has already been done. */
3056 if (extra && args_addr == 0
3057 && where_pad != none && where_pad != stack_direction)
3058 anti_adjust_stack (GEN_INT (extra));
3059
3060 stack_pointer_delta += INTVAL (size) - used;
3061 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3062 INTVAL (size) - used, align);
3063
3064 if (current_function_check_memory_usage && ! in_check_memory_usage)
3065 {
3066 rtx temp;
3067
3068 in_check_memory_usage = 1;
3069 temp = get_push_address (INTVAL (size) - used);
3070 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3071 emit_library_call (chkr_copy_bitmap_libfunc,
3072 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3073 Pmode, XEXP (xinner, 0), Pmode,
3074 GEN_INT (INTVAL (size) - used),
3075 TYPE_MODE (sizetype));
3076 else
3077 emit_library_call (chkr_set_right_libfunc,
3078 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3079 Pmode, GEN_INT (INTVAL (size) - used),
3080 TYPE_MODE (sizetype),
3081 GEN_INT (MEMORY_USE_RW),
3082 TYPE_MODE (integer_type_node));
3083 in_check_memory_usage = 0;
3084 }
3085 }
3086 else
3087 #endif /* PUSH_ROUNDING */
3088 {
3089 rtx target;
3090
3091 /* Otherwise make space on the stack and copy the data
3092 to the address of that space. */
3093
3094 /* Deduct words put into registers from the size we must copy. */
3095 if (partial != 0)
3096 {
3097 if (GET_CODE (size) == CONST_INT)
3098 size = GEN_INT (INTVAL (size) - used);
3099 else
3100 size = expand_binop (GET_MODE (size), sub_optab, size,
3101 GEN_INT (used), NULL_RTX, 0,
3102 OPTAB_LIB_WIDEN);
3103 }
3104
3105 /* Get the address of the stack space.
3106 In this case, we do not deal with EXTRA separately.
3107 A single stack adjust will do. */
3108 if (! args_addr)
3109 {
3110 temp = push_block (size, extra, where_pad == downward);
3111 extra = 0;
3112 }
3113 else if (GET_CODE (args_so_far) == CONST_INT)
3114 temp = memory_address (BLKmode,
3115 plus_constant (args_addr,
3116 skip + INTVAL (args_so_far)));
3117 else
3118 temp = memory_address (BLKmode,
3119 plus_constant (gen_rtx_PLUS (Pmode,
3120 args_addr,
3121 args_so_far),
3122 skip));
3123 if (current_function_check_memory_usage && ! in_check_memory_usage)
3124 {
3125 in_check_memory_usage = 1;
3126 target = copy_to_reg (temp);
3127 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3128 emit_library_call (chkr_copy_bitmap_libfunc,
3129 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3130 target, Pmode,
3131 XEXP (xinner, 0), Pmode,
3132 size, TYPE_MODE (sizetype));
3133 else
3134 emit_library_call (chkr_set_right_libfunc,
3135 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3136 target, Pmode,
3137 size, TYPE_MODE (sizetype),
3138 GEN_INT (MEMORY_USE_RW),
3139 TYPE_MODE (integer_type_node));
3140 in_check_memory_usage = 0;
3141 }
3142
3143 target = gen_rtx_MEM (BLKmode, temp);
3144
3145 if (type != 0)
3146 {
3147 set_mem_attributes (target, type, 1);
3148 /* Function incoming arguments may overlap with sibling call
3149 outgoing arguments and we cannot allow reordering of reads
3150 from function arguments with stores to outgoing arguments
3151 of sibling calls. */
3152 MEM_ALIAS_SET (target) = 0;
3153 }
3154
3155 /* TEMP is the address of the block. Copy the data there. */
3156 if (GET_CODE (size) == CONST_INT
3157 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3158 {
3159 move_by_pieces (target, xinner, INTVAL (size), align);
3160 goto ret;
3161 }
3162 else
3163 {
3164 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3165 enum machine_mode mode;
3166
3167 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3168 mode != VOIDmode;
3169 mode = GET_MODE_WIDER_MODE (mode))
3170 {
3171 enum insn_code code = movstr_optab[(int) mode];
3172 insn_operand_predicate_fn pred;
3173
3174 if (code != CODE_FOR_nothing
3175 && ((GET_CODE (size) == CONST_INT
3176 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3177 <= (GET_MODE_MASK (mode) >> 1)))
3178 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3179 && (!(pred = insn_data[(int) code].operand[0].predicate)
3180 || ((*pred) (target, BLKmode)))
3181 && (!(pred = insn_data[(int) code].operand[1].predicate)
3182 || ((*pred) (xinner, BLKmode)))
3183 && (!(pred = insn_data[(int) code].operand[3].predicate)
3184 || ((*pred) (opalign, VOIDmode))))
3185 {
3186 rtx op2 = convert_to_mode (mode, size, 1);
3187 rtx last = get_last_insn ();
3188 rtx pat;
3189
3190 pred = insn_data[(int) code].operand[2].predicate;
3191 if (pred != 0 && ! (*pred) (op2, mode))
3192 op2 = copy_to_mode_reg (mode, op2);
3193
3194 pat = GEN_FCN ((int) code) (target, xinner,
3195 op2, opalign);
3196 if (pat)
3197 {
3198 emit_insn (pat);
3199 goto ret;
3200 }
3201 else
3202 delete_insns_since (last);
3203 }
3204 }
3205 }
3206
3207 if (!ACCUMULATE_OUTGOING_ARGS)
3208 {
3209 /* If the source is referenced relative to the stack pointer,
3210 copy it to another register to stabilize it. We do not need
3211 to do this if we know that we won't be changing sp. */
3212
3213 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3214 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3215 temp = copy_to_reg (temp);
3216 }
3217
3218 /* Make inhibit_defer_pop nonzero around the library call
3219 to force it to pop the bcopy-arguments right away. */
3220 NO_DEFER_POP;
3221 #ifdef TARGET_MEM_FUNCTIONS
3222 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3223 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3224 convert_to_mode (TYPE_MODE (sizetype),
3225 size, TREE_UNSIGNED (sizetype)),
3226 TYPE_MODE (sizetype));
3227 #else
3228 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3229 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3230 convert_to_mode (TYPE_MODE (integer_type_node),
3231 size,
3232 TREE_UNSIGNED (integer_type_node)),
3233 TYPE_MODE (integer_type_node));
3234 #endif
3235 OK_DEFER_POP;
3236 }
3237 }
3238 else if (partial > 0)
3239 {
3240 /* Scalar partly in registers. */
3241
3242 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3243 int i;
3244 int not_stack;
3245 /* # words of start of argument
3246 that we must make space for but need not store. */
3247 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3248 int args_offset = INTVAL (args_so_far);
3249 int skip;
3250
3251 /* Push padding now if padding above and stack grows down,
3252 or if padding below and stack grows up.
3253 But if space already allocated, this has already been done. */
3254 if (extra && args_addr == 0
3255 && where_pad != none && where_pad != stack_direction)
3256 anti_adjust_stack (GEN_INT (extra));
3257
3258 /* If we make space by pushing it, we might as well push
3259 the real data. Otherwise, we can leave OFFSET nonzero
3260 and leave the space uninitialized. */
3261 if (args_addr == 0)
3262 offset = 0;
3263
3264 /* Now NOT_STACK gets the number of words that we don't need to
3265 allocate on the stack. */
3266 not_stack = partial - offset;
3267
3268 /* If the partial register-part of the arg counts in its stack size,
3269 skip the part of stack space corresponding to the registers.
3270 Otherwise, start copying to the beginning of the stack space,
3271 by setting SKIP to 0. */
3272 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3273
3274 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3275 x = validize_mem (force_const_mem (mode, x));
3276
3277 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3278 SUBREGs of such registers are not allowed. */
3279 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3280 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3281 x = copy_to_reg (x);
3282
3283 /* Loop over all the words allocated on the stack for this arg. */
3284 /* We can do it by words, because any scalar bigger than a word
3285 has a size a multiple of a word. */
3286 #ifndef PUSH_ARGS_REVERSED
3287 for (i = not_stack; i < size; i++)
3288 #else
3289 for (i = size - 1; i >= not_stack; i--)
3290 #endif
3291 if (i >= not_stack + offset)
3292 emit_push_insn (operand_subword_force (x, i, mode),
3293 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3294 0, args_addr,
3295 GEN_INT (args_offset + ((i - not_stack + skip)
3296 * UNITS_PER_WORD)),
3297 reg_parm_stack_space, alignment_pad);
3298 }
3299 else
3300 {
3301 rtx addr;
3302 rtx target = NULL_RTX;
3303 rtx dest;
3304
3305 /* Push padding now if padding above and stack grows down,
3306 or if padding below and stack grows up.
3307 But if space already allocated, this has already been done. */
3308 if (extra && args_addr == 0
3309 && where_pad != none && where_pad != stack_direction)
3310 anti_adjust_stack (GEN_INT (extra));
3311
3312 #ifdef PUSH_ROUNDING
3313 if (args_addr == 0 && PUSH_ARGS)
3314 {
3315 addr = gen_push_operand ();
3316 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3317 }
3318 else
3319 #endif
3320 {
3321 if (GET_CODE (args_so_far) == CONST_INT)
3322 addr
3323 = memory_address (mode,
3324 plus_constant (args_addr,
3325 INTVAL (args_so_far)));
3326 else
3327 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3328 args_so_far));
3329 target = addr;
3330 }
3331
3332 dest = gen_rtx_MEM (mode, addr);
3333 if (type != 0)
3334 {
3335 set_mem_attributes (dest, type, 1);
3336 /* Function incoming arguments may overlap with sibling call
3337 outgoing arguments and we cannot allow reordering of reads
3338 from function arguments with stores to outgoing arguments
3339 of sibling calls. */
3340 MEM_ALIAS_SET (dest) = 0;
3341 }
3342
3343 emit_move_insn (dest, x);
3344
3345 if (current_function_check_memory_usage && ! in_check_memory_usage)
3346 {
3347 in_check_memory_usage = 1;
3348 if (target == 0)
3349 target = get_push_address (GET_MODE_SIZE (mode));
3350
3351 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3352 emit_library_call (chkr_copy_bitmap_libfunc,
3353 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3354 Pmode, XEXP (x, 0), Pmode,
3355 GEN_INT (GET_MODE_SIZE (mode)),
3356 TYPE_MODE (sizetype));
3357 else
3358 emit_library_call (chkr_set_right_libfunc,
3359 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3360 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3361 TYPE_MODE (sizetype),
3362 GEN_INT (MEMORY_USE_RW),
3363 TYPE_MODE (integer_type_node));
3364 in_check_memory_usage = 0;
3365 }
3366 }
3367
3368 ret:
3369 /* If part should go in registers, copy that part
3370 into the appropriate registers. Do this now, at the end,
3371 since mem-to-mem copies above may do function calls. */
3372 if (partial > 0 && reg != 0)
3373 {
3374 /* Handle calls that pass values in multiple non-contiguous locations.
3375 The Irix 6 ABI has examples of this. */
3376 if (GET_CODE (reg) == PARALLEL)
3377 emit_group_load (reg, x, -1, align); /* ??? size? */
3378 else
3379 move_block_to_reg (REGNO (reg), x, partial, mode);
3380 }
3381
3382 if (extra && args_addr == 0 && where_pad == stack_direction)
3383 anti_adjust_stack (GEN_INT (extra));
3384
3385 if (alignment_pad && args_addr == 0)
3386 anti_adjust_stack (alignment_pad);
3387 }
3388 \f
3389 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3390 operations. */
3391
3392 static rtx
3393 get_subtarget (x)
3394 rtx x;
3395 {
3396 return ((x == 0
3397 /* Only registers can be subtargets. */
3398 || GET_CODE (x) != REG
3399 /* If the register is readonly, it can't be set more than once. */
3400 || RTX_UNCHANGING_P (x)
3401 /* Don't use hard regs to avoid extending their life. */
3402 || REGNO (x) < FIRST_PSEUDO_REGISTER
3403 /* Avoid subtargets inside loops,
3404 since they hide some invariant expressions. */
3405 || preserve_subexpressions_p ())
3406 ? 0 : x);
3407 }
3408
3409 /* Expand an assignment that stores the value of FROM into TO.
3410 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3411 (This may contain a QUEUED rtx;
3412 if the value is constant, this rtx is a constant.)
3413 Otherwise, the returned value is NULL_RTX.
3414
3415 SUGGEST_REG is no longer actually used.
3416 It used to mean, copy the value through a register
3417 and return that register, if that is possible.
3418 We now use WANT_VALUE to decide whether to do this. */
3419
3420 rtx
3421 expand_assignment (to, from, want_value, suggest_reg)
3422 tree to, from;
3423 int want_value;
3424 int suggest_reg ATTRIBUTE_UNUSED;
3425 {
3426 register rtx to_rtx = 0;
3427 rtx result;
3428
3429 /* Don't crash if the lhs of the assignment was erroneous. */
3430
3431 if (TREE_CODE (to) == ERROR_MARK)
3432 {
3433 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3434 return want_value ? result : NULL_RTX;
3435 }
3436
3437 /* Assignment of a structure component needs special treatment
3438 if the structure component's rtx is not simply a MEM.
3439 Assignment of an array element at a constant index, and assignment of
3440 an array element in an unaligned packed structure field, has the same
3441 problem. */
3442
3443 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3444 || TREE_CODE (to) == ARRAY_REF)
3445 {
3446 enum machine_mode mode1;
3447 HOST_WIDE_INT bitsize, bitpos;
3448 tree offset;
3449 int unsignedp;
3450 int volatilep = 0;
3451 tree tem;
3452 unsigned int alignment;
3453
3454 push_temp_slots ();
3455 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3456 &unsignedp, &volatilep, &alignment);
3457
3458 /* If we are going to use store_bit_field and extract_bit_field,
3459 make sure to_rtx will be safe for multiple use. */
3460
3461 if (mode1 == VOIDmode && want_value)
3462 tem = stabilize_reference (tem);
3463
3464 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3465 if (offset != 0)
3466 {
3467 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3468
3469 if (GET_CODE (to_rtx) != MEM)
3470 abort ();
3471
3472 if (GET_MODE (offset_rtx) != ptr_mode)
3473 {
3474 #ifdef POINTERS_EXTEND_UNSIGNED
3475 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3476 #else
3477 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3478 #endif
3479 }
3480
3481 /* A constant address in TO_RTX can have VOIDmode, we must not try
3482 to call force_reg for that case. Avoid that case. */
3483 if (GET_CODE (to_rtx) == MEM
3484 && GET_MODE (to_rtx) == BLKmode
3485 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3486 && bitsize
3487 && (bitpos % bitsize) == 0
3488 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3489 && alignment == GET_MODE_ALIGNMENT (mode1))
3490 {
3491 rtx temp = change_address (to_rtx, mode1,
3492 plus_constant (XEXP (to_rtx, 0),
3493 (bitpos /
3494 BITS_PER_UNIT)));
3495 if (GET_CODE (XEXP (temp, 0)) == REG)
3496 to_rtx = temp;
3497 else
3498 to_rtx = change_address (to_rtx, mode1,
3499 force_reg (GET_MODE (XEXP (temp, 0)),
3500 XEXP (temp, 0)));
3501 bitpos = 0;
3502 }
3503
3504 to_rtx = change_address (to_rtx, VOIDmode,
3505 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3506 force_reg (ptr_mode,
3507 offset_rtx)));
3508 }
3509
3510 if (volatilep)
3511 {
3512 if (GET_CODE (to_rtx) == MEM)
3513 {
3514 /* When the offset is zero, to_rtx is the address of the
3515 structure we are storing into, and hence may be shared.
3516 We must make a new MEM before setting the volatile bit. */
3517 if (offset == 0)
3518 to_rtx = copy_rtx (to_rtx);
3519
3520 MEM_VOLATILE_P (to_rtx) = 1;
3521 }
3522 #if 0 /* This was turned off because, when a field is volatile
3523 in an object which is not volatile, the object may be in a register,
3524 and then we would abort over here. */
3525 else
3526 abort ();
3527 #endif
3528 }
3529
3530 if (TREE_CODE (to) == COMPONENT_REF
3531 && TREE_READONLY (TREE_OPERAND (to, 1)))
3532 {
3533 if (offset == 0)
3534 to_rtx = copy_rtx (to_rtx);
3535
3536 RTX_UNCHANGING_P (to_rtx) = 1;
3537 }
3538
3539 /* Check the access. */
3540 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3541 {
3542 rtx to_addr;
3543 int size;
3544 int best_mode_size;
3545 enum machine_mode best_mode;
3546
3547 best_mode = get_best_mode (bitsize, bitpos,
3548 TYPE_ALIGN (TREE_TYPE (tem)),
3549 mode1, volatilep);
3550 if (best_mode == VOIDmode)
3551 best_mode = QImode;
3552
3553 best_mode_size = GET_MODE_BITSIZE (best_mode);
3554 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3555 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3556 size *= GET_MODE_SIZE (best_mode);
3557
3558 /* Check the access right of the pointer. */
3559 in_check_memory_usage = 1;
3560 if (size)
3561 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3562 VOIDmode, 3, to_addr, Pmode,
3563 GEN_INT (size), TYPE_MODE (sizetype),
3564 GEN_INT (MEMORY_USE_WO),
3565 TYPE_MODE (integer_type_node));
3566 in_check_memory_usage = 0;
3567 }
3568
3569 /* If this is a varying-length object, we must get the address of
3570 the source and do an explicit block move. */
3571 if (bitsize < 0)
3572 {
3573 unsigned int from_align;
3574 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3575 rtx inner_to_rtx
3576 = change_address (to_rtx, VOIDmode,
3577 plus_constant (XEXP (to_rtx, 0),
3578 bitpos / BITS_PER_UNIT));
3579
3580 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3581 MIN (alignment, from_align));
3582 free_temp_slots ();
3583 pop_temp_slots ();
3584 return to_rtx;
3585 }
3586 else
3587 {
3588 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3589 (want_value
3590 /* Spurious cast for HPUX compiler. */
3591 ? ((enum machine_mode)
3592 TYPE_MODE (TREE_TYPE (to)))
3593 : VOIDmode),
3594 unsignedp,
3595 alignment,
3596 int_size_in_bytes (TREE_TYPE (tem)),
3597 get_alias_set (to));
3598
3599 preserve_temp_slots (result);
3600 free_temp_slots ();
3601 pop_temp_slots ();
3602
3603 /* If the value is meaningful, convert RESULT to the proper mode.
3604 Otherwise, return nothing. */
3605 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3606 TYPE_MODE (TREE_TYPE (from)),
3607 result,
3608 TREE_UNSIGNED (TREE_TYPE (to)))
3609 : NULL_RTX);
3610 }
3611 }
3612
3613 /* If the rhs is a function call and its value is not an aggregate,
3614 call the function before we start to compute the lhs.
3615 This is needed for correct code for cases such as
3616 val = setjmp (buf) on machines where reference to val
3617 requires loading up part of an address in a separate insn.
3618
3619 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3620 since it might be a promoted variable where the zero- or sign- extension
3621 needs to be done. Handling this in the normal way is safe because no
3622 computation is done before the call. */
3623 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3624 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3625 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3626 && GET_CODE (DECL_RTL (to)) == REG))
3627 {
3628 rtx value;
3629
3630 push_temp_slots ();
3631 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3632 if (to_rtx == 0)
3633 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3634
3635 /* Handle calls that return values in multiple non-contiguous locations.
3636 The Irix 6 ABI has examples of this. */
3637 if (GET_CODE (to_rtx) == PARALLEL)
3638 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3639 TYPE_ALIGN (TREE_TYPE (from)));
3640 else if (GET_MODE (to_rtx) == BLKmode)
3641 emit_block_move (to_rtx, value, expr_size (from),
3642 TYPE_ALIGN (TREE_TYPE (from)));
3643 else
3644 {
3645 #ifdef POINTERS_EXTEND_UNSIGNED
3646 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3647 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3648 value = convert_memory_address (GET_MODE (to_rtx), value);
3649 #endif
3650 emit_move_insn (to_rtx, value);
3651 }
3652 preserve_temp_slots (to_rtx);
3653 free_temp_slots ();
3654 pop_temp_slots ();
3655 return want_value ? to_rtx : NULL_RTX;
3656 }
3657
3658 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3659 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3660
3661 if (to_rtx == 0)
3662 {
3663 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3664 if (GET_CODE (to_rtx) == MEM)
3665 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3666 }
3667
3668 /* Don't move directly into a return register. */
3669 if (TREE_CODE (to) == RESULT_DECL
3670 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3671 {
3672 rtx temp;
3673
3674 push_temp_slots ();
3675 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3676
3677 if (GET_CODE (to_rtx) == PARALLEL)
3678 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3679 TYPE_ALIGN (TREE_TYPE (from)));
3680 else
3681 emit_move_insn (to_rtx, temp);
3682
3683 preserve_temp_slots (to_rtx);
3684 free_temp_slots ();
3685 pop_temp_slots ();
3686 return want_value ? to_rtx : NULL_RTX;
3687 }
3688
3689 /* In case we are returning the contents of an object which overlaps
3690 the place the value is being stored, use a safe function when copying
3691 a value through a pointer into a structure value return block. */
3692 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3693 && current_function_returns_struct
3694 && !current_function_returns_pcc_struct)
3695 {
3696 rtx from_rtx, size;
3697
3698 push_temp_slots ();
3699 size = expr_size (from);
3700 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3701 EXPAND_MEMORY_USE_DONT);
3702
3703 /* Copy the rights of the bitmap. */
3704 if (current_function_check_memory_usage)
3705 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3706 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3707 XEXP (from_rtx, 0), Pmode,
3708 convert_to_mode (TYPE_MODE (sizetype),
3709 size, TREE_UNSIGNED (sizetype)),
3710 TYPE_MODE (sizetype));
3711
3712 #ifdef TARGET_MEM_FUNCTIONS
3713 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3714 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3715 XEXP (from_rtx, 0), Pmode,
3716 convert_to_mode (TYPE_MODE (sizetype),
3717 size, TREE_UNSIGNED (sizetype)),
3718 TYPE_MODE (sizetype));
3719 #else
3720 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3721 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3722 XEXP (to_rtx, 0), Pmode,
3723 convert_to_mode (TYPE_MODE (integer_type_node),
3724 size, TREE_UNSIGNED (integer_type_node)),
3725 TYPE_MODE (integer_type_node));
3726 #endif
3727
3728 preserve_temp_slots (to_rtx);
3729 free_temp_slots ();
3730 pop_temp_slots ();
3731 return want_value ? to_rtx : NULL_RTX;
3732 }
3733
3734 /* Compute FROM and store the value in the rtx we got. */
3735
3736 push_temp_slots ();
3737 result = store_expr (from, to_rtx, want_value);
3738 preserve_temp_slots (result);
3739 free_temp_slots ();
3740 pop_temp_slots ();
3741 return want_value ? result : NULL_RTX;
3742 }
3743
3744 /* Generate code for computing expression EXP,
3745 and storing the value into TARGET.
3746 TARGET may contain a QUEUED rtx.
3747
3748 If WANT_VALUE is nonzero, return a copy of the value
3749 not in TARGET, so that we can be sure to use the proper
3750 value in a containing expression even if TARGET has something
3751 else stored in it. If possible, we copy the value through a pseudo
3752 and return that pseudo. Or, if the value is constant, we try to
3753 return the constant. In some cases, we return a pseudo
3754 copied *from* TARGET.
3755
3756 If the mode is BLKmode then we may return TARGET itself.
3757 It turns out that in BLKmode it doesn't cause a problem.
3758 because C has no operators that could combine two different
3759 assignments into the same BLKmode object with different values
3760 with no sequence point. Will other languages need this to
3761 be more thorough?
3762
3763 If WANT_VALUE is 0, we return NULL, to make sure
3764 to catch quickly any cases where the caller uses the value
3765 and fails to set WANT_VALUE. */
3766
3767 rtx
3768 store_expr (exp, target, want_value)
3769 register tree exp;
3770 register rtx target;
3771 int want_value;
3772 {
3773 register rtx temp;
3774 int dont_return_target = 0;
3775
3776 if (TREE_CODE (exp) == COMPOUND_EXPR)
3777 {
3778 /* Perform first part of compound expression, then assign from second
3779 part. */
3780 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3781 emit_queue ();
3782 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3783 }
3784 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3785 {
3786 /* For conditional expression, get safe form of the target. Then
3787 test the condition, doing the appropriate assignment on either
3788 side. This avoids the creation of unnecessary temporaries.
3789 For non-BLKmode, it is more efficient not to do this. */
3790
3791 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3792
3793 emit_queue ();
3794 target = protect_from_queue (target, 1);
3795
3796 do_pending_stack_adjust ();
3797 NO_DEFER_POP;
3798 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3799 start_cleanup_deferral ();
3800 store_expr (TREE_OPERAND (exp, 1), target, 0);
3801 end_cleanup_deferral ();
3802 emit_queue ();
3803 emit_jump_insn (gen_jump (lab2));
3804 emit_barrier ();
3805 emit_label (lab1);
3806 start_cleanup_deferral ();
3807 store_expr (TREE_OPERAND (exp, 2), target, 0);
3808 end_cleanup_deferral ();
3809 emit_queue ();
3810 emit_label (lab2);
3811 OK_DEFER_POP;
3812
3813 return want_value ? target : NULL_RTX;
3814 }
3815 else if (queued_subexp_p (target))
3816 /* If target contains a postincrement, let's not risk
3817 using it as the place to generate the rhs. */
3818 {
3819 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3820 {
3821 /* Expand EXP into a new pseudo. */
3822 temp = gen_reg_rtx (GET_MODE (target));
3823 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3824 }
3825 else
3826 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3827
3828 /* If target is volatile, ANSI requires accessing the value
3829 *from* the target, if it is accessed. So make that happen.
3830 In no case return the target itself. */
3831 if (! MEM_VOLATILE_P (target) && want_value)
3832 dont_return_target = 1;
3833 }
3834 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3835 && GET_MODE (target) != BLKmode)
3836 /* If target is in memory and caller wants value in a register instead,
3837 arrange that. Pass TARGET as target for expand_expr so that,
3838 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3839 We know expand_expr will not use the target in that case.
3840 Don't do this if TARGET is volatile because we are supposed
3841 to write it and then read it. */
3842 {
3843 temp = expand_expr (exp, target, GET_MODE (target), 0);
3844 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3845 temp = copy_to_reg (temp);
3846 dont_return_target = 1;
3847 }
3848 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3849 /* If this is an scalar in a register that is stored in a wider mode
3850 than the declared mode, compute the result into its declared mode
3851 and then convert to the wider mode. Our value is the computed
3852 expression. */
3853 {
3854 /* If we don't want a value, we can do the conversion inside EXP,
3855 which will often result in some optimizations. Do the conversion
3856 in two steps: first change the signedness, if needed, then
3857 the extend. But don't do this if the type of EXP is a subtype
3858 of something else since then the conversion might involve
3859 more than just converting modes. */
3860 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3861 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3862 {
3863 if (TREE_UNSIGNED (TREE_TYPE (exp))
3864 != SUBREG_PROMOTED_UNSIGNED_P (target))
3865 exp
3866 = convert
3867 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3868 TREE_TYPE (exp)),
3869 exp);
3870
3871 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3872 SUBREG_PROMOTED_UNSIGNED_P (target)),
3873 exp);
3874 }
3875
3876 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3877
3878 /* If TEMP is a volatile MEM and we want a result value, make
3879 the access now so it gets done only once. Likewise if
3880 it contains TARGET. */
3881 if (GET_CODE (temp) == MEM && want_value
3882 && (MEM_VOLATILE_P (temp)
3883 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3884 temp = copy_to_reg (temp);
3885
3886 /* If TEMP is a VOIDmode constant, use convert_modes to make
3887 sure that we properly convert it. */
3888 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3889 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3890 TYPE_MODE (TREE_TYPE (exp)), temp,
3891 SUBREG_PROMOTED_UNSIGNED_P (target));
3892
3893 convert_move (SUBREG_REG (target), temp,
3894 SUBREG_PROMOTED_UNSIGNED_P (target));
3895
3896 /* If we promoted a constant, change the mode back down to match
3897 target. Otherwise, the caller might get confused by a result whose
3898 mode is larger than expected. */
3899
3900 if (want_value && GET_MODE (temp) != GET_MODE (target)
3901 && GET_MODE (temp) != VOIDmode)
3902 {
3903 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3904 SUBREG_PROMOTED_VAR_P (temp) = 1;
3905 SUBREG_PROMOTED_UNSIGNED_P (temp)
3906 = SUBREG_PROMOTED_UNSIGNED_P (target);
3907 }
3908
3909 return want_value ? temp : NULL_RTX;
3910 }
3911 else
3912 {
3913 temp = expand_expr (exp, target, GET_MODE (target), 0);
3914 /* Return TARGET if it's a specified hardware register.
3915 If TARGET is a volatile mem ref, either return TARGET
3916 or return a reg copied *from* TARGET; ANSI requires this.
3917
3918 Otherwise, if TEMP is not TARGET, return TEMP
3919 if it is constant (for efficiency),
3920 or if we really want the correct value. */
3921 if (!(target && GET_CODE (target) == REG
3922 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3923 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3924 && ! rtx_equal_p (temp, target)
3925 && (CONSTANT_P (temp) || want_value))
3926 dont_return_target = 1;
3927 }
3928
3929 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3930 the same as that of TARGET, adjust the constant. This is needed, for
3931 example, in case it is a CONST_DOUBLE and we want only a word-sized
3932 value. */
3933 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3934 && TREE_CODE (exp) != ERROR_MARK
3935 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3936 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3937 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3938
3939 if (current_function_check_memory_usage
3940 && GET_CODE (target) == MEM
3941 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3942 {
3943 in_check_memory_usage = 1;
3944 if (GET_CODE (temp) == MEM)
3945 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3946 VOIDmode, 3, XEXP (target, 0), Pmode,
3947 XEXP (temp, 0), Pmode,
3948 expr_size (exp), TYPE_MODE (sizetype));
3949 else
3950 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3951 VOIDmode, 3, XEXP (target, 0), Pmode,
3952 expr_size (exp), TYPE_MODE (sizetype),
3953 GEN_INT (MEMORY_USE_WO),
3954 TYPE_MODE (integer_type_node));
3955 in_check_memory_usage = 0;
3956 }
3957
3958 /* If value was not generated in the target, store it there.
3959 Convert the value to TARGET's type first if nec. */
3960 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3961 one or both of them are volatile memory refs, we have to distinguish
3962 two cases:
3963 - expand_expr has used TARGET. In this case, we must not generate
3964 another copy. This can be detected by TARGET being equal according
3965 to == .
3966 - expand_expr has not used TARGET - that means that the source just
3967 happens to have the same RTX form. Since temp will have been created
3968 by expand_expr, it will compare unequal according to == .
3969 We must generate a copy in this case, to reach the correct number
3970 of volatile memory references. */
3971
3972 if ((! rtx_equal_p (temp, target)
3973 || (temp != target && (side_effects_p (temp)
3974 || side_effects_p (target))))
3975 && TREE_CODE (exp) != ERROR_MARK)
3976 {
3977 target = protect_from_queue (target, 1);
3978 if (GET_MODE (temp) != GET_MODE (target)
3979 && GET_MODE (temp) != VOIDmode)
3980 {
3981 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3982 if (dont_return_target)
3983 {
3984 /* In this case, we will return TEMP,
3985 so make sure it has the proper mode.
3986 But don't forget to store the value into TARGET. */
3987 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3988 emit_move_insn (target, temp);
3989 }
3990 else
3991 convert_move (target, temp, unsignedp);
3992 }
3993
3994 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3995 {
3996 /* Handle copying a string constant into an array.
3997 The string constant may be shorter than the array.
3998 So copy just the string's actual length, and clear the rest. */
3999 rtx size;
4000 rtx addr;
4001
4002 /* Get the size of the data type of the string,
4003 which is actually the size of the target. */
4004 size = expr_size (exp);
4005 if (GET_CODE (size) == CONST_INT
4006 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4007 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4008 else
4009 {
4010 /* Compute the size of the data to copy from the string. */
4011 tree copy_size
4012 = size_binop (MIN_EXPR,
4013 make_tree (sizetype, size),
4014 size_int (TREE_STRING_LENGTH (exp)));
4015 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4016 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4017 VOIDmode, 0);
4018 rtx label = 0;
4019
4020 /* Copy that much. */
4021 emit_block_move (target, temp, copy_size_rtx,
4022 TYPE_ALIGN (TREE_TYPE (exp)));
4023
4024 /* Figure out how much is left in TARGET that we have to clear.
4025 Do all calculations in ptr_mode. */
4026
4027 addr = XEXP (target, 0);
4028 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4029
4030 if (GET_CODE (copy_size_rtx) == CONST_INT)
4031 {
4032 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4033 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4034 align = MIN (align, (BITS_PER_UNIT
4035 * (INTVAL (copy_size_rtx)
4036 & - INTVAL (copy_size_rtx))));
4037 }
4038 else
4039 {
4040 addr = force_reg (ptr_mode, addr);
4041 addr = expand_binop (ptr_mode, add_optab, addr,
4042 copy_size_rtx, NULL_RTX, 0,
4043 OPTAB_LIB_WIDEN);
4044
4045 size = expand_binop (ptr_mode, sub_optab, size,
4046 copy_size_rtx, NULL_RTX, 0,
4047 OPTAB_LIB_WIDEN);
4048
4049 align = BITS_PER_UNIT;
4050 label = gen_label_rtx ();
4051 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4052 GET_MODE (size), 0, 0, label);
4053 }
4054 align = MIN (align, expr_align (copy_size));
4055
4056 if (size != const0_rtx)
4057 {
4058 rtx dest = gen_rtx_MEM (BLKmode, addr);
4059
4060 MEM_COPY_ATTRIBUTES (dest, target);
4061
4062 /* Be sure we can write on ADDR. */
4063 in_check_memory_usage = 1;
4064 if (current_function_check_memory_usage)
4065 emit_library_call (chkr_check_addr_libfunc,
4066 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4067 addr, Pmode,
4068 size, TYPE_MODE (sizetype),
4069 GEN_INT (MEMORY_USE_WO),
4070 TYPE_MODE (integer_type_node));
4071 in_check_memory_usage = 0;
4072 clear_storage (dest, size, align);
4073 }
4074
4075 if (label)
4076 emit_label (label);
4077 }
4078 }
4079 /* Handle calls that return values in multiple non-contiguous locations.
4080 The Irix 6 ABI has examples of this. */
4081 else if (GET_CODE (target) == PARALLEL)
4082 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4083 TYPE_ALIGN (TREE_TYPE (exp)));
4084 else if (GET_MODE (temp) == BLKmode)
4085 emit_block_move (target, temp, expr_size (exp),
4086 TYPE_ALIGN (TREE_TYPE (exp)));
4087 else
4088 emit_move_insn (target, temp);
4089 }
4090
4091 /* If we don't want a value, return NULL_RTX. */
4092 if (! want_value)
4093 return NULL_RTX;
4094
4095 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4096 ??? The latter test doesn't seem to make sense. */
4097 else if (dont_return_target && GET_CODE (temp) != MEM)
4098 return temp;
4099
4100 /* Return TARGET itself if it is a hard register. */
4101 else if (want_value && GET_MODE (target) != BLKmode
4102 && ! (GET_CODE (target) == REG
4103 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4104 return copy_to_reg (target);
4105
4106 else
4107 return target;
4108 }
4109 \f
4110 /* Return 1 if EXP just contains zeros. */
4111
4112 static int
4113 is_zeros_p (exp)
4114 tree exp;
4115 {
4116 tree elt;
4117
4118 switch (TREE_CODE (exp))
4119 {
4120 case CONVERT_EXPR:
4121 case NOP_EXPR:
4122 case NON_LVALUE_EXPR:
4123 return is_zeros_p (TREE_OPERAND (exp, 0));
4124
4125 case INTEGER_CST:
4126 return integer_zerop (exp);
4127
4128 case COMPLEX_CST:
4129 return
4130 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4131
4132 case REAL_CST:
4133 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4134
4135 case CONSTRUCTOR:
4136 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4137 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4138 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4139 if (! is_zeros_p (TREE_VALUE (elt)))
4140 return 0;
4141
4142 return 1;
4143
4144 default:
4145 return 0;
4146 }
4147 }
4148
4149 /* Return 1 if EXP contains mostly (3/4) zeros. */
4150
4151 static int
4152 mostly_zeros_p (exp)
4153 tree exp;
4154 {
4155 if (TREE_CODE (exp) == CONSTRUCTOR)
4156 {
4157 int elts = 0, zeros = 0;
4158 tree elt = CONSTRUCTOR_ELTS (exp);
4159 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4160 {
4161 /* If there are no ranges of true bits, it is all zero. */
4162 return elt == NULL_TREE;
4163 }
4164 for (; elt; elt = TREE_CHAIN (elt))
4165 {
4166 /* We do not handle the case where the index is a RANGE_EXPR,
4167 so the statistic will be somewhat inaccurate.
4168 We do make a more accurate count in store_constructor itself,
4169 so since this function is only used for nested array elements,
4170 this should be close enough. */
4171 if (mostly_zeros_p (TREE_VALUE (elt)))
4172 zeros++;
4173 elts++;
4174 }
4175
4176 return 4 * zeros >= 3 * elts;
4177 }
4178
4179 return is_zeros_p (exp);
4180 }
4181 \f
4182 /* Helper function for store_constructor.
4183 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4184 TYPE is the type of the CONSTRUCTOR, not the element type.
4185 ALIGN and CLEARED are as for store_constructor.
4186 ALIAS_SET is the alias set to use for any stores.
4187
4188 This provides a recursive shortcut back to store_constructor when it isn't
4189 necessary to go through store_field. This is so that we can pass through
4190 the cleared field to let store_constructor know that we may not have to
4191 clear a substructure if the outer structure has already been cleared. */
4192
4193 static void
4194 store_constructor_field (target, bitsize, bitpos,
4195 mode, exp, type, align, cleared, alias_set)
4196 rtx target;
4197 unsigned HOST_WIDE_INT bitsize;
4198 HOST_WIDE_INT bitpos;
4199 enum machine_mode mode;
4200 tree exp, type;
4201 unsigned int align;
4202 int cleared;
4203 int alias_set;
4204 {
4205 if (TREE_CODE (exp) == CONSTRUCTOR
4206 && bitpos % BITS_PER_UNIT == 0
4207 /* If we have a non-zero bitpos for a register target, then we just
4208 let store_field do the bitfield handling. This is unlikely to
4209 generate unnecessary clear instructions anyways. */
4210 && (bitpos == 0 || GET_CODE (target) == MEM))
4211 {
4212 if (bitpos != 0)
4213 target
4214 = change_address (target,
4215 GET_MODE (target) == BLKmode
4216 || 0 != (bitpos
4217 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4218 ? BLKmode : VOIDmode,
4219 plus_constant (XEXP (target, 0),
4220 bitpos / BITS_PER_UNIT));
4221
4222 MEM_ALIAS_SET (target) = alias_set;
4223 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4224 }
4225 else
4226 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4227 int_size_in_bytes (type), alias_set);
4228 }
4229
4230 /* Store the value of constructor EXP into the rtx TARGET.
4231 TARGET is either a REG or a MEM.
4232 ALIGN is the maximum known alignment for TARGET.
4233 CLEARED is true if TARGET is known to have been zero'd.
4234 SIZE is the number of bytes of TARGET we are allowed to modify: this
4235 may not be the same as the size of EXP if we are assigning to a field
4236 which has been packed to exclude padding bits. */
4237
4238 static void
4239 store_constructor (exp, target, align, cleared, size)
4240 tree exp;
4241 rtx target;
4242 unsigned int align;
4243 int cleared;
4244 HOST_WIDE_INT size;
4245 {
4246 tree type = TREE_TYPE (exp);
4247 #ifdef WORD_REGISTER_OPERATIONS
4248 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4249 #endif
4250
4251 /* We know our target cannot conflict, since safe_from_p has been called. */
4252 #if 0
4253 /* Don't try copying piece by piece into a hard register
4254 since that is vulnerable to being clobbered by EXP.
4255 Instead, construct in a pseudo register and then copy it all. */
4256 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4257 {
4258 rtx temp = gen_reg_rtx (GET_MODE (target));
4259 store_constructor (exp, temp, align, cleared, size);
4260 emit_move_insn (target, temp);
4261 return;
4262 }
4263 #endif
4264
4265 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4266 || TREE_CODE (type) == QUAL_UNION_TYPE)
4267 {
4268 register tree elt;
4269
4270 /* Inform later passes that the whole union value is dead. */
4271 if ((TREE_CODE (type) == UNION_TYPE
4272 || TREE_CODE (type) == QUAL_UNION_TYPE)
4273 && ! cleared)
4274 {
4275 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4276
4277 /* If the constructor is empty, clear the union. */
4278 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4279 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4280 }
4281
4282 /* If we are building a static constructor into a register,
4283 set the initial value as zero so we can fold the value into
4284 a constant. But if more than one register is involved,
4285 this probably loses. */
4286 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4287 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4288 {
4289 if (! cleared)
4290 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4291
4292 cleared = 1;
4293 }
4294
4295 /* If the constructor has fewer fields than the structure
4296 or if we are initializing the structure to mostly zeros,
4297 clear the whole structure first. Don't do this is TARGET is
4298 register whose mode size isn't equal to SIZE since clear_storage
4299 can't handle this case. */
4300 else if (size > 0
4301 && ((list_length (CONSTRUCTOR_ELTS (exp))
4302 != fields_length (type))
4303 || mostly_zeros_p (exp))
4304 && (GET_CODE (target) != REG
4305 || GET_MODE_SIZE (GET_MODE (target)) == size))
4306 {
4307 if (! cleared)
4308 clear_storage (target, GEN_INT (size), align);
4309
4310 cleared = 1;
4311 }
4312 else if (! cleared)
4313 /* Inform later passes that the old value is dead. */
4314 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4315
4316 /* Store each element of the constructor into
4317 the corresponding field of TARGET. */
4318
4319 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4320 {
4321 register tree field = TREE_PURPOSE (elt);
4322 #ifdef WORD_REGISTER_OPERATIONS
4323 tree value = TREE_VALUE (elt);
4324 #endif
4325 register enum machine_mode mode;
4326 HOST_WIDE_INT bitsize;
4327 HOST_WIDE_INT bitpos = 0;
4328 int unsignedp;
4329 tree offset;
4330 rtx to_rtx = target;
4331
4332 /* Just ignore missing fields.
4333 We cleared the whole structure, above,
4334 if any fields are missing. */
4335 if (field == 0)
4336 continue;
4337
4338 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4339 continue;
4340
4341 if (host_integerp (DECL_SIZE (field), 1))
4342 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4343 else
4344 bitsize = -1;
4345
4346 unsignedp = TREE_UNSIGNED (field);
4347 mode = DECL_MODE (field);
4348 if (DECL_BIT_FIELD (field))
4349 mode = VOIDmode;
4350
4351 offset = DECL_FIELD_OFFSET (field);
4352 if (host_integerp (offset, 0)
4353 && host_integerp (bit_position (field), 0))
4354 {
4355 bitpos = int_bit_position (field);
4356 offset = 0;
4357 }
4358 else
4359 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4360
4361 if (offset)
4362 {
4363 rtx offset_rtx;
4364
4365 if (contains_placeholder_p (offset))
4366 offset = build (WITH_RECORD_EXPR, sizetype,
4367 offset, make_tree (TREE_TYPE (exp), target));
4368
4369 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4370 if (GET_CODE (to_rtx) != MEM)
4371 abort ();
4372
4373 if (GET_MODE (offset_rtx) != ptr_mode)
4374 {
4375 #ifdef POINTERS_EXTEND_UNSIGNED
4376 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4377 #else
4378 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4379 #endif
4380 }
4381
4382 to_rtx
4383 = change_address (to_rtx, VOIDmode,
4384 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4385 force_reg (ptr_mode,
4386 offset_rtx)));
4387 align = DECL_OFFSET_ALIGN (field);
4388 }
4389
4390 if (TREE_READONLY (field))
4391 {
4392 if (GET_CODE (to_rtx) == MEM)
4393 to_rtx = copy_rtx (to_rtx);
4394
4395 RTX_UNCHANGING_P (to_rtx) = 1;
4396 }
4397
4398 #ifdef WORD_REGISTER_OPERATIONS
4399 /* If this initializes a field that is smaller than a word, at the
4400 start of a word, try to widen it to a full word.
4401 This special case allows us to output C++ member function
4402 initializations in a form that the optimizers can understand. */
4403 if (GET_CODE (target) == REG
4404 && bitsize < BITS_PER_WORD
4405 && bitpos % BITS_PER_WORD == 0
4406 && GET_MODE_CLASS (mode) == MODE_INT
4407 && TREE_CODE (value) == INTEGER_CST
4408 && exp_size >= 0
4409 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4410 {
4411 tree type = TREE_TYPE (value);
4412 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4413 {
4414 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4415 value = convert (type, value);
4416 }
4417 if (BYTES_BIG_ENDIAN)
4418 value
4419 = fold (build (LSHIFT_EXPR, type, value,
4420 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4421 bitsize = BITS_PER_WORD;
4422 mode = word_mode;
4423 }
4424 #endif
4425 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4426 TREE_VALUE (elt), type, align, cleared,
4427 DECL_NONADDRESSABLE_P (field)
4428 ? MEM_ALIAS_SET (to_rtx)
4429 : get_alias_set (TREE_TYPE (field)));
4430 }
4431 }
4432 else if (TREE_CODE (type) == ARRAY_TYPE)
4433 {
4434 register tree elt;
4435 register int i;
4436 int need_to_clear;
4437 tree domain = TYPE_DOMAIN (type);
4438 tree elttype = TREE_TYPE (type);
4439 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4440 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4441 HOST_WIDE_INT minelt;
4442 HOST_WIDE_INT maxelt;
4443
4444 /* If we have constant bounds for the range of the type, get them. */
4445 if (const_bounds_p)
4446 {
4447 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4448 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4449 }
4450
4451 /* If the constructor has fewer elements than the array,
4452 clear the whole array first. Similarly if this is
4453 static constructor of a non-BLKmode object. */
4454 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4455 need_to_clear = 1;
4456 else
4457 {
4458 HOST_WIDE_INT count = 0, zero_count = 0;
4459 need_to_clear = ! const_bounds_p;
4460
4461 /* This loop is a more accurate version of the loop in
4462 mostly_zeros_p (it handles RANGE_EXPR in an index).
4463 It is also needed to check for missing elements. */
4464 for (elt = CONSTRUCTOR_ELTS (exp);
4465 elt != NULL_TREE && ! need_to_clear;
4466 elt = TREE_CHAIN (elt))
4467 {
4468 tree index = TREE_PURPOSE (elt);
4469 HOST_WIDE_INT this_node_count;
4470
4471 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4472 {
4473 tree lo_index = TREE_OPERAND (index, 0);
4474 tree hi_index = TREE_OPERAND (index, 1);
4475
4476 if (! host_integerp (lo_index, 1)
4477 || ! host_integerp (hi_index, 1))
4478 {
4479 need_to_clear = 1;
4480 break;
4481 }
4482
4483 this_node_count = (tree_low_cst (hi_index, 1)
4484 - tree_low_cst (lo_index, 1) + 1);
4485 }
4486 else
4487 this_node_count = 1;
4488
4489 count += this_node_count;
4490 if (mostly_zeros_p (TREE_VALUE (elt)))
4491 zero_count += this_node_count;
4492 }
4493
4494 /* Clear the entire array first if there are any missing elements,
4495 or if the incidence of zero elements is >= 75%. */
4496 if (! need_to_clear
4497 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4498 need_to_clear = 1;
4499 }
4500
4501 if (need_to_clear && size > 0)
4502 {
4503 if (! cleared)
4504 clear_storage (target, GEN_INT (size), align);
4505 cleared = 1;
4506 }
4507 else
4508 /* Inform later passes that the old value is dead. */
4509 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4510
4511 /* Store each element of the constructor into
4512 the corresponding element of TARGET, determined
4513 by counting the elements. */
4514 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4515 elt;
4516 elt = TREE_CHAIN (elt), i++)
4517 {
4518 register enum machine_mode mode;
4519 HOST_WIDE_INT bitsize;
4520 HOST_WIDE_INT bitpos;
4521 int unsignedp;
4522 tree value = TREE_VALUE (elt);
4523 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4524 tree index = TREE_PURPOSE (elt);
4525 rtx xtarget = target;
4526
4527 if (cleared && is_zeros_p (value))
4528 continue;
4529
4530 unsignedp = TREE_UNSIGNED (elttype);
4531 mode = TYPE_MODE (elttype);
4532 if (mode == BLKmode)
4533 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4534 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4535 : -1);
4536 else
4537 bitsize = GET_MODE_BITSIZE (mode);
4538
4539 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4540 {
4541 tree lo_index = TREE_OPERAND (index, 0);
4542 tree hi_index = TREE_OPERAND (index, 1);
4543 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4544 struct nesting *loop;
4545 HOST_WIDE_INT lo, hi, count;
4546 tree position;
4547
4548 /* If the range is constant and "small", unroll the loop. */
4549 if (const_bounds_p
4550 && host_integerp (lo_index, 0)
4551 && host_integerp (hi_index, 0)
4552 && (lo = tree_low_cst (lo_index, 0),
4553 hi = tree_low_cst (hi_index, 0),
4554 count = hi - lo + 1,
4555 (GET_CODE (target) != MEM
4556 || count <= 2
4557 || (host_integerp (TYPE_SIZE (elttype), 1)
4558 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4559 <= 40 * 8)))))
4560 {
4561 lo -= minelt; hi -= minelt;
4562 for (; lo <= hi; lo++)
4563 {
4564 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4565 store_constructor_field
4566 (target, bitsize, bitpos, mode, value, type, align,
4567 cleared,
4568 TYPE_NONALIASED_COMPONENT (type)
4569 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4570 }
4571 }
4572 else
4573 {
4574 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4575 loop_top = gen_label_rtx ();
4576 loop_end = gen_label_rtx ();
4577
4578 unsignedp = TREE_UNSIGNED (domain);
4579
4580 index = build_decl (VAR_DECL, NULL_TREE, domain);
4581
4582 DECL_RTL (index) = index_r
4583 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4584 &unsignedp, 0));
4585
4586 if (TREE_CODE (value) == SAVE_EXPR
4587 && SAVE_EXPR_RTL (value) == 0)
4588 {
4589 /* Make sure value gets expanded once before the
4590 loop. */
4591 expand_expr (value, const0_rtx, VOIDmode, 0);
4592 emit_queue ();
4593 }
4594 store_expr (lo_index, index_r, 0);
4595 loop = expand_start_loop (0);
4596
4597 /* Assign value to element index. */
4598 position
4599 = convert (ssizetype,
4600 fold (build (MINUS_EXPR, TREE_TYPE (index),
4601 index, TYPE_MIN_VALUE (domain))));
4602 position = size_binop (MULT_EXPR, position,
4603 convert (ssizetype,
4604 TYPE_SIZE_UNIT (elttype)));
4605
4606 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4607 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4608 xtarget = change_address (target, mode, addr);
4609 if (TREE_CODE (value) == CONSTRUCTOR)
4610 store_constructor (value, xtarget, align, cleared,
4611 bitsize / BITS_PER_UNIT);
4612 else
4613 store_expr (value, xtarget, 0);
4614
4615 expand_exit_loop_if_false (loop,
4616 build (LT_EXPR, integer_type_node,
4617 index, hi_index));
4618
4619 expand_increment (build (PREINCREMENT_EXPR,
4620 TREE_TYPE (index),
4621 index, integer_one_node), 0, 0);
4622 expand_end_loop ();
4623 emit_label (loop_end);
4624 }
4625 }
4626 else if ((index != 0 && ! host_integerp (index, 0))
4627 || ! host_integerp (TYPE_SIZE (elttype), 1))
4628 {
4629 rtx pos_rtx, addr;
4630 tree position;
4631
4632 if (index == 0)
4633 index = ssize_int (1);
4634
4635 if (minelt)
4636 index = convert (ssizetype,
4637 fold (build (MINUS_EXPR, index,
4638 TYPE_MIN_VALUE (domain))));
4639
4640 position = size_binop (MULT_EXPR, index,
4641 convert (ssizetype,
4642 TYPE_SIZE_UNIT (elttype)));
4643 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4644 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4645 xtarget = change_address (target, mode, addr);
4646 store_expr (value, xtarget, 0);
4647 }
4648 else
4649 {
4650 if (index != 0)
4651 bitpos = ((tree_low_cst (index, 0) - minelt)
4652 * tree_low_cst (TYPE_SIZE (elttype), 1));
4653 else
4654 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4655
4656 store_constructor_field (target, bitsize, bitpos, mode, value,
4657 type, align, cleared,
4658 TYPE_NONALIASED_COMPONENT (type)
4659 ? MEM_ALIAS_SET (target) :
4660 get_alias_set (elttype));
4661
4662 }
4663 }
4664 }
4665
4666 /* Set constructor assignments. */
4667 else if (TREE_CODE (type) == SET_TYPE)
4668 {
4669 tree elt = CONSTRUCTOR_ELTS (exp);
4670 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4671 tree domain = TYPE_DOMAIN (type);
4672 tree domain_min, domain_max, bitlength;
4673
4674 /* The default implementation strategy is to extract the constant
4675 parts of the constructor, use that to initialize the target,
4676 and then "or" in whatever non-constant ranges we need in addition.
4677
4678 If a large set is all zero or all ones, it is
4679 probably better to set it using memset (if available) or bzero.
4680 Also, if a large set has just a single range, it may also be
4681 better to first clear all the first clear the set (using
4682 bzero/memset), and set the bits we want. */
4683
4684 /* Check for all zeros. */
4685 if (elt == NULL_TREE && size > 0)
4686 {
4687 if (!cleared)
4688 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4689 return;
4690 }
4691
4692 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4693 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4694 bitlength = size_binop (PLUS_EXPR,
4695 size_diffop (domain_max, domain_min),
4696 ssize_int (1));
4697
4698 nbits = tree_low_cst (bitlength, 1);
4699
4700 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4701 are "complicated" (more than one range), initialize (the
4702 constant parts) by copying from a constant. */
4703 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4704 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4705 {
4706 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4707 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4708 char *bit_buffer = (char *) alloca (nbits);
4709 HOST_WIDE_INT word = 0;
4710 unsigned int bit_pos = 0;
4711 unsigned int ibit = 0;
4712 unsigned int offset = 0; /* In bytes from beginning of set. */
4713
4714 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4715 for (;;)
4716 {
4717 if (bit_buffer[ibit])
4718 {
4719 if (BYTES_BIG_ENDIAN)
4720 word |= (1 << (set_word_size - 1 - bit_pos));
4721 else
4722 word |= 1 << bit_pos;
4723 }
4724
4725 bit_pos++; ibit++;
4726 if (bit_pos >= set_word_size || ibit == nbits)
4727 {
4728 if (word != 0 || ! cleared)
4729 {
4730 rtx datum = GEN_INT (word);
4731 rtx to_rtx;
4732
4733 /* The assumption here is that it is safe to use
4734 XEXP if the set is multi-word, but not if
4735 it's single-word. */
4736 if (GET_CODE (target) == MEM)
4737 {
4738 to_rtx = plus_constant (XEXP (target, 0), offset);
4739 to_rtx = change_address (target, mode, to_rtx);
4740 }
4741 else if (offset == 0)
4742 to_rtx = target;
4743 else
4744 abort ();
4745 emit_move_insn (to_rtx, datum);
4746 }
4747
4748 if (ibit == nbits)
4749 break;
4750 word = 0;
4751 bit_pos = 0;
4752 offset += set_word_size / BITS_PER_UNIT;
4753 }
4754 }
4755 }
4756 else if (!cleared)
4757 /* Don't bother clearing storage if the set is all ones. */
4758 if (TREE_CHAIN (elt) != NULL_TREE
4759 || (TREE_PURPOSE (elt) == NULL_TREE
4760 ? nbits != 1
4761 : ( ! host_integerp (TREE_VALUE (elt), 0)
4762 || ! host_integerp (TREE_PURPOSE (elt), 0)
4763 || (tree_low_cst (TREE_VALUE (elt), 0)
4764 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4765 != (HOST_WIDE_INT) nbits))))
4766 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4767
4768 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4769 {
4770 /* Start of range of element or NULL. */
4771 tree startbit = TREE_PURPOSE (elt);
4772 /* End of range of element, or element value. */
4773 tree endbit = TREE_VALUE (elt);
4774 #ifdef TARGET_MEM_FUNCTIONS
4775 HOST_WIDE_INT startb, endb;
4776 #endif
4777 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4778
4779 bitlength_rtx = expand_expr (bitlength,
4780 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4781
4782 /* Handle non-range tuple element like [ expr ]. */
4783 if (startbit == NULL_TREE)
4784 {
4785 startbit = save_expr (endbit);
4786 endbit = startbit;
4787 }
4788
4789 startbit = convert (sizetype, startbit);
4790 endbit = convert (sizetype, endbit);
4791 if (! integer_zerop (domain_min))
4792 {
4793 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4794 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4795 }
4796 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4797 EXPAND_CONST_ADDRESS);
4798 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4799 EXPAND_CONST_ADDRESS);
4800
4801 if (REG_P (target))
4802 {
4803 targetx = assign_stack_temp (GET_MODE (target),
4804 GET_MODE_SIZE (GET_MODE (target)),
4805 0);
4806 emit_move_insn (targetx, target);
4807 }
4808
4809 else if (GET_CODE (target) == MEM)
4810 targetx = target;
4811 else
4812 abort ();
4813
4814 #ifdef TARGET_MEM_FUNCTIONS
4815 /* Optimization: If startbit and endbit are
4816 constants divisible by BITS_PER_UNIT,
4817 call memset instead. */
4818 if (TREE_CODE (startbit) == INTEGER_CST
4819 && TREE_CODE (endbit) == INTEGER_CST
4820 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4821 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4822 {
4823 emit_library_call (memset_libfunc, LCT_NORMAL,
4824 VOIDmode, 3,
4825 plus_constant (XEXP (targetx, 0),
4826 startb / BITS_PER_UNIT),
4827 Pmode,
4828 constm1_rtx, TYPE_MODE (integer_type_node),
4829 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4830 TYPE_MODE (sizetype));
4831 }
4832 else
4833 #endif
4834 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4835 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4836 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4837 startbit_rtx, TYPE_MODE (sizetype),
4838 endbit_rtx, TYPE_MODE (sizetype));
4839
4840 if (REG_P (target))
4841 emit_move_insn (target, targetx);
4842 }
4843 }
4844
4845 else
4846 abort ();
4847 }
4848
4849 /* Store the value of EXP (an expression tree)
4850 into a subfield of TARGET which has mode MODE and occupies
4851 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4852 If MODE is VOIDmode, it means that we are storing into a bit-field.
4853
4854 If VALUE_MODE is VOIDmode, return nothing in particular.
4855 UNSIGNEDP is not used in this case.
4856
4857 Otherwise, return an rtx for the value stored. This rtx
4858 has mode VALUE_MODE if that is convenient to do.
4859 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4860
4861 ALIGN is the alignment that TARGET is known to have.
4862 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4863
4864 ALIAS_SET is the alias set for the destination. This value will
4865 (in general) be different from that for TARGET, since TARGET is a
4866 reference to the containing structure. */
4867
4868 static rtx
4869 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4870 unsignedp, align, total_size, alias_set)
4871 rtx target;
4872 HOST_WIDE_INT bitsize;
4873 HOST_WIDE_INT bitpos;
4874 enum machine_mode mode;
4875 tree exp;
4876 enum machine_mode value_mode;
4877 int unsignedp;
4878 unsigned int align;
4879 HOST_WIDE_INT total_size;
4880 int alias_set;
4881 {
4882 HOST_WIDE_INT width_mask = 0;
4883
4884 if (TREE_CODE (exp) == ERROR_MARK)
4885 return const0_rtx;
4886
4887 if (bitsize < HOST_BITS_PER_WIDE_INT)
4888 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4889
4890 /* If we are storing into an unaligned field of an aligned union that is
4891 in a register, we may have the mode of TARGET being an integer mode but
4892 MODE == BLKmode. In that case, get an aligned object whose size and
4893 alignment are the same as TARGET and store TARGET into it (we can avoid
4894 the store if the field being stored is the entire width of TARGET). Then
4895 call ourselves recursively to store the field into a BLKmode version of
4896 that object. Finally, load from the object into TARGET. This is not
4897 very efficient in general, but should only be slightly more expensive
4898 than the otherwise-required unaligned accesses. Perhaps this can be
4899 cleaned up later. */
4900
4901 if (mode == BLKmode
4902 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4903 {
4904 rtx object = assign_stack_temp (GET_MODE (target),
4905 GET_MODE_SIZE (GET_MODE (target)), 0);
4906 rtx blk_object = copy_rtx (object);
4907
4908 MEM_SET_IN_STRUCT_P (object, 1);
4909 MEM_SET_IN_STRUCT_P (blk_object, 1);
4910 PUT_MODE (blk_object, BLKmode);
4911
4912 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4913 emit_move_insn (object, target);
4914
4915 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4916 align, total_size, alias_set);
4917
4918 /* Even though we aren't returning target, we need to
4919 give it the updated value. */
4920 emit_move_insn (target, object);
4921
4922 return blk_object;
4923 }
4924
4925 if (GET_CODE (target) == CONCAT)
4926 {
4927 /* We're storing into a struct containing a single __complex. */
4928
4929 if (bitpos != 0)
4930 abort ();
4931 return store_expr (exp, target, 0);
4932 }
4933
4934 /* If the structure is in a register or if the component
4935 is a bit field, we cannot use addressing to access it.
4936 Use bit-field techniques or SUBREG to store in it. */
4937
4938 if (mode == VOIDmode
4939 || (mode != BLKmode && ! direct_store[(int) mode]
4940 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4941 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4942 || GET_CODE (target) == REG
4943 || GET_CODE (target) == SUBREG
4944 /* If the field isn't aligned enough to store as an ordinary memref,
4945 store it as a bit field. */
4946 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4947 && (align < GET_MODE_ALIGNMENT (mode)
4948 || bitpos % GET_MODE_ALIGNMENT (mode)))
4949 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4950 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
4951 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4952 /* If the RHS and field are a constant size and the size of the
4953 RHS isn't the same size as the bitfield, we must use bitfield
4954 operations. */
4955 || (bitsize >= 0
4956 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4957 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
4958 {
4959 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4960
4961 /* If BITSIZE is narrower than the size of the type of EXP
4962 we will be narrowing TEMP. Normally, what's wanted are the
4963 low-order bits. However, if EXP's type is a record and this is
4964 big-endian machine, we want the upper BITSIZE bits. */
4965 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4966 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4967 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4968 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4969 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4970 - bitsize),
4971 temp, 1);
4972
4973 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4974 MODE. */
4975 if (mode != VOIDmode && mode != BLKmode
4976 && mode != TYPE_MODE (TREE_TYPE (exp)))
4977 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4978
4979 /* If the modes of TARGET and TEMP are both BLKmode, both
4980 must be in memory and BITPOS must be aligned on a byte
4981 boundary. If so, we simply do a block copy. */
4982 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4983 {
4984 unsigned int exp_align = expr_align (exp);
4985
4986 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4987 || bitpos % BITS_PER_UNIT != 0)
4988 abort ();
4989
4990 target = change_address (target, VOIDmode,
4991 plus_constant (XEXP (target, 0),
4992 bitpos / BITS_PER_UNIT));
4993
4994 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4995 align = MIN (exp_align, align);
4996
4997 /* Find an alignment that is consistent with the bit position. */
4998 while ((bitpos % align) != 0)
4999 align >>= 1;
5000
5001 emit_block_move (target, temp,
5002 bitsize == -1 ? expr_size (exp)
5003 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5004 / BITS_PER_UNIT),
5005 align);
5006
5007 return value_mode == VOIDmode ? const0_rtx : target;
5008 }
5009
5010 /* Store the value in the bitfield. */
5011 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5012 if (value_mode != VOIDmode)
5013 {
5014 /* The caller wants an rtx for the value. */
5015 /* If possible, avoid refetching from the bitfield itself. */
5016 if (width_mask != 0
5017 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5018 {
5019 tree count;
5020 enum machine_mode tmode;
5021
5022 if (unsignedp)
5023 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
5024 tmode = GET_MODE (temp);
5025 if (tmode == VOIDmode)
5026 tmode = value_mode;
5027 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5028 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5029 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5030 }
5031 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5032 NULL_RTX, value_mode, 0, align,
5033 total_size);
5034 }
5035 return const0_rtx;
5036 }
5037 else
5038 {
5039 rtx addr = XEXP (target, 0);
5040 rtx to_rtx;
5041
5042 /* If a value is wanted, it must be the lhs;
5043 so make the address stable for multiple use. */
5044
5045 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5046 && ! CONSTANT_ADDRESS_P (addr)
5047 /* A frame-pointer reference is already stable. */
5048 && ! (GET_CODE (addr) == PLUS
5049 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5050 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5051 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5052 addr = copy_to_reg (addr);
5053
5054 /* Now build a reference to just the desired component. */
5055
5056 to_rtx = copy_rtx (change_address (target, mode,
5057 plus_constant (addr,
5058 (bitpos
5059 / BITS_PER_UNIT))));
5060 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5061 MEM_ALIAS_SET (to_rtx) = alias_set;
5062
5063 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5064 }
5065 }
5066 \f
5067 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5068 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5069 ARRAY_REFs and find the ultimate containing object, which we return.
5070
5071 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5072 bit position, and *PUNSIGNEDP to the signedness of the field.
5073 If the position of the field is variable, we store a tree
5074 giving the variable offset (in units) in *POFFSET.
5075 This offset is in addition to the bit position.
5076 If the position is not variable, we store 0 in *POFFSET.
5077 We set *PALIGNMENT to the alignment of the address that will be
5078 computed. This is the alignment of the thing we return if *POFFSET
5079 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5080
5081 If any of the extraction expressions is volatile,
5082 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5083
5084 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5085 is a mode that can be used to access the field. In that case, *PBITSIZE
5086 is redundant.
5087
5088 If the field describes a variable-sized object, *PMODE is set to
5089 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5090 this case, but the address of the object can be found. */
5091
5092 tree
5093 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5094 punsignedp, pvolatilep, palignment)
5095 tree exp;
5096 HOST_WIDE_INT *pbitsize;
5097 HOST_WIDE_INT *pbitpos;
5098 tree *poffset;
5099 enum machine_mode *pmode;
5100 int *punsignedp;
5101 int *pvolatilep;
5102 unsigned int *palignment;
5103 {
5104 tree size_tree = 0;
5105 enum machine_mode mode = VOIDmode;
5106 tree offset = size_zero_node;
5107 tree bit_offset = bitsize_zero_node;
5108 unsigned int alignment = BIGGEST_ALIGNMENT;
5109 tree tem;
5110
5111 /* First get the mode, signedness, and size. We do this from just the
5112 outermost expression. */
5113 if (TREE_CODE (exp) == COMPONENT_REF)
5114 {
5115 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5116 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5117 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5118
5119 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5120 }
5121 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5122 {
5123 size_tree = TREE_OPERAND (exp, 1);
5124 *punsignedp = TREE_UNSIGNED (exp);
5125 }
5126 else
5127 {
5128 mode = TYPE_MODE (TREE_TYPE (exp));
5129 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5130
5131 if (mode == BLKmode)
5132 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5133 else
5134 *pbitsize = GET_MODE_BITSIZE (mode);
5135 }
5136
5137 if (size_tree != 0)
5138 {
5139 if (! host_integerp (size_tree, 1))
5140 mode = BLKmode, *pbitsize = -1;
5141 else
5142 *pbitsize = tree_low_cst (size_tree, 1);
5143 }
5144
5145 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5146 and find the ultimate containing object. */
5147 while (1)
5148 {
5149 if (TREE_CODE (exp) == BIT_FIELD_REF)
5150 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5151 else if (TREE_CODE (exp) == COMPONENT_REF)
5152 {
5153 tree field = TREE_OPERAND (exp, 1);
5154 tree this_offset = DECL_FIELD_OFFSET (field);
5155
5156 /* If this field hasn't been filled in yet, don't go
5157 past it. This should only happen when folding expressions
5158 made during type construction. */
5159 if (this_offset == 0)
5160 break;
5161 else if (! TREE_CONSTANT (this_offset)
5162 && contains_placeholder_p (this_offset))
5163 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5164
5165 offset = size_binop (PLUS_EXPR, offset, this_offset);
5166 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5167 DECL_FIELD_BIT_OFFSET (field));
5168
5169 if (! host_integerp (offset, 0))
5170 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5171 }
5172
5173 else if (TREE_CODE (exp) == ARRAY_REF)
5174 {
5175 tree index = TREE_OPERAND (exp, 1);
5176 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5177 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5178 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5179
5180 /* We assume all arrays have sizes that are a multiple of a byte.
5181 First subtract the lower bound, if any, in the type of the
5182 index, then convert to sizetype and multiply by the size of the
5183 array element. */
5184 if (low_bound != 0 && ! integer_zerop (low_bound))
5185 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5186 index, low_bound));
5187
5188 /* If the index has a self-referential type, pass it to a
5189 WITH_RECORD_EXPR; if the component size is, pass our
5190 component to one. */
5191 if (! TREE_CONSTANT (index)
5192 && contains_placeholder_p (index))
5193 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5194 if (! TREE_CONSTANT (unit_size)
5195 && contains_placeholder_p (unit_size))
5196 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5197 TREE_OPERAND (exp, 0));
5198
5199 offset = size_binop (PLUS_EXPR, offset,
5200 size_binop (MULT_EXPR,
5201 convert (sizetype, index),
5202 unit_size));
5203 }
5204
5205 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5206 && ! ((TREE_CODE (exp) == NOP_EXPR
5207 || TREE_CODE (exp) == CONVERT_EXPR)
5208 && (TYPE_MODE (TREE_TYPE (exp))
5209 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5210 break;
5211
5212 /* If any reference in the chain is volatile, the effect is volatile. */
5213 if (TREE_THIS_VOLATILE (exp))
5214 *pvolatilep = 1;
5215
5216 /* If the offset is non-constant already, then we can't assume any
5217 alignment more than the alignment here. */
5218 if (! TREE_CONSTANT (offset))
5219 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5220
5221 exp = TREE_OPERAND (exp, 0);
5222 }
5223
5224 if (DECL_P (exp))
5225 alignment = MIN (alignment, DECL_ALIGN (exp));
5226 else if (TREE_TYPE (exp) != 0)
5227 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5228
5229 /* If OFFSET is constant, see if we can return the whole thing as a
5230 constant bit position. Otherwise, split it up. */
5231 if (host_integerp (offset, 0)
5232 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5233 bitsize_unit_node))
5234 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5235 && host_integerp (tem, 0))
5236 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5237 else
5238 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5239
5240 *pmode = mode;
5241 *palignment = alignment;
5242 return exp;
5243 }
5244
5245 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5246
5247 static enum memory_use_mode
5248 get_memory_usage_from_modifier (modifier)
5249 enum expand_modifier modifier;
5250 {
5251 switch (modifier)
5252 {
5253 case EXPAND_NORMAL:
5254 case EXPAND_SUM:
5255 return MEMORY_USE_RO;
5256 break;
5257 case EXPAND_MEMORY_USE_WO:
5258 return MEMORY_USE_WO;
5259 break;
5260 case EXPAND_MEMORY_USE_RW:
5261 return MEMORY_USE_RW;
5262 break;
5263 case EXPAND_MEMORY_USE_DONT:
5264 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5265 MEMORY_USE_DONT, because they are modifiers to a call of
5266 expand_expr in the ADDR_EXPR case of expand_expr. */
5267 case EXPAND_CONST_ADDRESS:
5268 case EXPAND_INITIALIZER:
5269 return MEMORY_USE_DONT;
5270 case EXPAND_MEMORY_USE_BAD:
5271 default:
5272 abort ();
5273 }
5274 }
5275 \f
5276 /* Given an rtx VALUE that may contain additions and multiplications,
5277 return an equivalent value that just refers to a register or memory.
5278 This is done by generating instructions to perform the arithmetic
5279 and returning a pseudo-register containing the value.
5280
5281 The returned value may be a REG, SUBREG, MEM or constant. */
5282
5283 rtx
5284 force_operand (value, target)
5285 rtx value, target;
5286 {
5287 register optab binoptab = 0;
5288 /* Use a temporary to force order of execution of calls to
5289 `force_operand'. */
5290 rtx tmp;
5291 register rtx op2;
5292 /* Use subtarget as the target for operand 0 of a binary operation. */
5293 register rtx subtarget = get_subtarget (target);
5294
5295 /* Check for a PIC address load. */
5296 if (flag_pic
5297 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5298 && XEXP (value, 0) == pic_offset_table_rtx
5299 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5300 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5301 || GET_CODE (XEXP (value, 1)) == CONST))
5302 {
5303 if (!subtarget)
5304 subtarget = gen_reg_rtx (GET_MODE (value));
5305 emit_move_insn (subtarget, value);
5306 return subtarget;
5307 }
5308
5309 if (GET_CODE (value) == PLUS)
5310 binoptab = add_optab;
5311 else if (GET_CODE (value) == MINUS)
5312 binoptab = sub_optab;
5313 else if (GET_CODE (value) == MULT)
5314 {
5315 op2 = XEXP (value, 1);
5316 if (!CONSTANT_P (op2)
5317 && !(GET_CODE (op2) == REG && op2 != subtarget))
5318 subtarget = 0;
5319 tmp = force_operand (XEXP (value, 0), subtarget);
5320 return expand_mult (GET_MODE (value), tmp,
5321 force_operand (op2, NULL_RTX),
5322 target, 0);
5323 }
5324
5325 if (binoptab)
5326 {
5327 op2 = XEXP (value, 1);
5328 if (!CONSTANT_P (op2)
5329 && !(GET_CODE (op2) == REG && op2 != subtarget))
5330 subtarget = 0;
5331 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5332 {
5333 binoptab = add_optab;
5334 op2 = negate_rtx (GET_MODE (value), op2);
5335 }
5336
5337 /* Check for an addition with OP2 a constant integer and our first
5338 operand a PLUS of a virtual register and something else. In that
5339 case, we want to emit the sum of the virtual register and the
5340 constant first and then add the other value. This allows virtual
5341 register instantiation to simply modify the constant rather than
5342 creating another one around this addition. */
5343 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5344 && GET_CODE (XEXP (value, 0)) == PLUS
5345 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5346 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5347 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5348 {
5349 rtx temp = expand_binop (GET_MODE (value), binoptab,
5350 XEXP (XEXP (value, 0), 0), op2,
5351 subtarget, 0, OPTAB_LIB_WIDEN);
5352 return expand_binop (GET_MODE (value), binoptab, temp,
5353 force_operand (XEXP (XEXP (value, 0), 1), 0),
5354 target, 0, OPTAB_LIB_WIDEN);
5355 }
5356
5357 tmp = force_operand (XEXP (value, 0), subtarget);
5358 return expand_binop (GET_MODE (value), binoptab, tmp,
5359 force_operand (op2, NULL_RTX),
5360 target, 0, OPTAB_LIB_WIDEN);
5361 /* We give UNSIGNEDP = 0 to expand_binop
5362 because the only operations we are expanding here are signed ones. */
5363 }
5364 return value;
5365 }
5366 \f
5367 /* Subroutine of expand_expr:
5368 save the non-copied parts (LIST) of an expr (LHS), and return a list
5369 which can restore these values to their previous values,
5370 should something modify their storage. */
5371
5372 static tree
5373 save_noncopied_parts (lhs, list)
5374 tree lhs;
5375 tree list;
5376 {
5377 tree tail;
5378 tree parts = 0;
5379
5380 for (tail = list; tail; tail = TREE_CHAIN (tail))
5381 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5382 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5383 else
5384 {
5385 tree part = TREE_VALUE (tail);
5386 tree part_type = TREE_TYPE (part);
5387 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5388 rtx target = assign_temp (part_type, 0, 1, 1);
5389 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5390 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5391 parts = tree_cons (to_be_saved,
5392 build (RTL_EXPR, part_type, NULL_TREE,
5393 (tree) target),
5394 parts);
5395 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5396 }
5397 return parts;
5398 }
5399
5400 /* Subroutine of expand_expr:
5401 record the non-copied parts (LIST) of an expr (LHS), and return a list
5402 which specifies the initial values of these parts. */
5403
5404 static tree
5405 init_noncopied_parts (lhs, list)
5406 tree lhs;
5407 tree list;
5408 {
5409 tree tail;
5410 tree parts = 0;
5411
5412 for (tail = list; tail; tail = TREE_CHAIN (tail))
5413 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5414 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5415 else if (TREE_PURPOSE (tail))
5416 {
5417 tree part = TREE_VALUE (tail);
5418 tree part_type = TREE_TYPE (part);
5419 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5420 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5421 }
5422 return parts;
5423 }
5424
5425 /* Subroutine of expand_expr: return nonzero iff there is no way that
5426 EXP can reference X, which is being modified. TOP_P is nonzero if this
5427 call is going to be used to determine whether we need a temporary
5428 for EXP, as opposed to a recursive call to this function.
5429
5430 It is always safe for this routine to return zero since it merely
5431 searches for optimization opportunities. */
5432
5433 int
5434 safe_from_p (x, exp, top_p)
5435 rtx x;
5436 tree exp;
5437 int top_p;
5438 {
5439 rtx exp_rtl = 0;
5440 int i, nops;
5441 static int save_expr_count;
5442 static int save_expr_size = 0;
5443 static tree *save_expr_rewritten;
5444 static tree save_expr_trees[256];
5445
5446 if (x == 0
5447 /* If EXP has varying size, we MUST use a target since we currently
5448 have no way of allocating temporaries of variable size
5449 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5450 So we assume here that something at a higher level has prevented a
5451 clash. This is somewhat bogus, but the best we can do. Only
5452 do this when X is BLKmode and when we are at the top level. */
5453 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5454 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5455 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5456 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5457 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5458 != INTEGER_CST)
5459 && GET_MODE (x) == BLKmode))
5460 return 1;
5461
5462 if (top_p && save_expr_size == 0)
5463 {
5464 int rtn;
5465
5466 save_expr_count = 0;
5467 save_expr_size = ARRAY_SIZE (save_expr_trees);
5468 save_expr_rewritten = &save_expr_trees[0];
5469
5470 rtn = safe_from_p (x, exp, 1);
5471
5472 for (i = 0; i < save_expr_count; ++i)
5473 {
5474 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5475 abort ();
5476 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5477 }
5478
5479 save_expr_size = 0;
5480
5481 return rtn;
5482 }
5483
5484 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5485 find the underlying pseudo. */
5486 if (GET_CODE (x) == SUBREG)
5487 {
5488 x = SUBREG_REG (x);
5489 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5490 return 0;
5491 }
5492
5493 /* If X is a location in the outgoing argument area, it is always safe. */
5494 if (GET_CODE (x) == MEM
5495 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5496 || (GET_CODE (XEXP (x, 0)) == PLUS
5497 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5498 return 1;
5499
5500 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5501 {
5502 case 'd':
5503 exp_rtl = DECL_RTL (exp);
5504 break;
5505
5506 case 'c':
5507 return 1;
5508
5509 case 'x':
5510 if (TREE_CODE (exp) == TREE_LIST)
5511 return ((TREE_VALUE (exp) == 0
5512 || safe_from_p (x, TREE_VALUE (exp), 0))
5513 && (TREE_CHAIN (exp) == 0
5514 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5515 else if (TREE_CODE (exp) == ERROR_MARK)
5516 return 1; /* An already-visited SAVE_EXPR? */
5517 else
5518 return 0;
5519
5520 case '1':
5521 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5522
5523 case '2':
5524 case '<':
5525 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5526 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5527
5528 case 'e':
5529 case 'r':
5530 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5531 the expression. If it is set, we conflict iff we are that rtx or
5532 both are in memory. Otherwise, we check all operands of the
5533 expression recursively. */
5534
5535 switch (TREE_CODE (exp))
5536 {
5537 case ADDR_EXPR:
5538 return (staticp (TREE_OPERAND (exp, 0))
5539 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5540 || TREE_STATIC (exp));
5541
5542 case INDIRECT_REF:
5543 if (GET_CODE (x) == MEM)
5544 return 0;
5545 break;
5546
5547 case CALL_EXPR:
5548 exp_rtl = CALL_EXPR_RTL (exp);
5549 if (exp_rtl == 0)
5550 {
5551 /* Assume that the call will clobber all hard registers and
5552 all of memory. */
5553 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5554 || GET_CODE (x) == MEM)
5555 return 0;
5556 }
5557
5558 break;
5559
5560 case RTL_EXPR:
5561 /* If a sequence exists, we would have to scan every instruction
5562 in the sequence to see if it was safe. This is probably not
5563 worthwhile. */
5564 if (RTL_EXPR_SEQUENCE (exp))
5565 return 0;
5566
5567 exp_rtl = RTL_EXPR_RTL (exp);
5568 break;
5569
5570 case WITH_CLEANUP_EXPR:
5571 exp_rtl = RTL_EXPR_RTL (exp);
5572 break;
5573
5574 case CLEANUP_POINT_EXPR:
5575 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5576
5577 case SAVE_EXPR:
5578 exp_rtl = SAVE_EXPR_RTL (exp);
5579 if (exp_rtl)
5580 break;
5581
5582 /* This SAVE_EXPR might appear many times in the top-level
5583 safe_from_p() expression, and if it has a complex
5584 subexpression, examining it multiple times could result
5585 in a combinatorial explosion. E.g. on an Alpha
5586 running at least 200MHz, a Fortran test case compiled with
5587 optimization took about 28 minutes to compile -- even though
5588 it was only a few lines long, and the complicated line causing
5589 so much time to be spent in the earlier version of safe_from_p()
5590 had only 293 or so unique nodes.
5591
5592 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5593 where it is so we can turn it back in the top-level safe_from_p()
5594 when we're done. */
5595
5596 /* For now, don't bother re-sizing the array. */
5597 if (save_expr_count >= save_expr_size)
5598 return 0;
5599 save_expr_rewritten[save_expr_count++] = exp;
5600
5601 nops = TREE_CODE_LENGTH (SAVE_EXPR);
5602 for (i = 0; i < nops; i++)
5603 {
5604 tree operand = TREE_OPERAND (exp, i);
5605 if (operand == NULL_TREE)
5606 continue;
5607 TREE_SET_CODE (exp, ERROR_MARK);
5608 if (!safe_from_p (x, operand, 0))
5609 return 0;
5610 TREE_SET_CODE (exp, SAVE_EXPR);
5611 }
5612 TREE_SET_CODE (exp, ERROR_MARK);
5613 return 1;
5614
5615 case BIND_EXPR:
5616 /* The only operand we look at is operand 1. The rest aren't
5617 part of the expression. */
5618 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5619
5620 case METHOD_CALL_EXPR:
5621 /* This takes a rtx argument, but shouldn't appear here. */
5622 abort ();
5623
5624 default:
5625 break;
5626 }
5627
5628 /* If we have an rtx, we do not need to scan our operands. */
5629 if (exp_rtl)
5630 break;
5631
5632 nops = first_rtl_op (TREE_CODE (exp));
5633 for (i = 0; i < nops; i++)
5634 if (TREE_OPERAND (exp, i) != 0
5635 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5636 return 0;
5637
5638 /* If this is a language-specific tree code, it may require
5639 special handling. */
5640 if (TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE
5641 && lang_safe_from_p
5642 && !(*lang_safe_from_p) (x, exp))
5643 return 0;
5644 }
5645
5646 /* If we have an rtl, find any enclosed object. Then see if we conflict
5647 with it. */
5648 if (exp_rtl)
5649 {
5650 if (GET_CODE (exp_rtl) == SUBREG)
5651 {
5652 exp_rtl = SUBREG_REG (exp_rtl);
5653 if (GET_CODE (exp_rtl) == REG
5654 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5655 return 0;
5656 }
5657
5658 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5659 are memory and EXP is not readonly. */
5660 return ! (rtx_equal_p (x, exp_rtl)
5661 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5662 && ! TREE_READONLY (exp)));
5663 }
5664
5665 /* If we reach here, it is safe. */
5666 return 1;
5667 }
5668
5669 /* Subroutine of expand_expr: return nonzero iff EXP is an
5670 expression whose type is statically determinable. */
5671
5672 static int
5673 fixed_type_p (exp)
5674 tree exp;
5675 {
5676 if (TREE_CODE (exp) == PARM_DECL
5677 || TREE_CODE (exp) == VAR_DECL
5678 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5679 || TREE_CODE (exp) == COMPONENT_REF
5680 || TREE_CODE (exp) == ARRAY_REF)
5681 return 1;
5682 return 0;
5683 }
5684
5685 /* Subroutine of expand_expr: return rtx if EXP is a
5686 variable or parameter; else return 0. */
5687
5688 static rtx
5689 var_rtx (exp)
5690 tree exp;
5691 {
5692 STRIP_NOPS (exp);
5693 switch (TREE_CODE (exp))
5694 {
5695 case PARM_DECL:
5696 case VAR_DECL:
5697 return DECL_RTL (exp);
5698 default:
5699 return 0;
5700 }
5701 }
5702
5703 #ifdef MAX_INTEGER_COMPUTATION_MODE
5704 void
5705 check_max_integer_computation_mode (exp)
5706 tree exp;
5707 {
5708 enum tree_code code;
5709 enum machine_mode mode;
5710
5711 /* Strip any NOPs that don't change the mode. */
5712 STRIP_NOPS (exp);
5713 code = TREE_CODE (exp);
5714
5715 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5716 if (code == NOP_EXPR
5717 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5718 return;
5719
5720 /* First check the type of the overall operation. We need only look at
5721 unary, binary and relational operations. */
5722 if (TREE_CODE_CLASS (code) == '1'
5723 || TREE_CODE_CLASS (code) == '2'
5724 || TREE_CODE_CLASS (code) == '<')
5725 {
5726 mode = TYPE_MODE (TREE_TYPE (exp));
5727 if (GET_MODE_CLASS (mode) == MODE_INT
5728 && mode > MAX_INTEGER_COMPUTATION_MODE)
5729 fatal ("unsupported wide integer operation");
5730 }
5731
5732 /* Check operand of a unary op. */
5733 if (TREE_CODE_CLASS (code) == '1')
5734 {
5735 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5736 if (GET_MODE_CLASS (mode) == MODE_INT
5737 && mode > MAX_INTEGER_COMPUTATION_MODE)
5738 fatal ("unsupported wide integer operation");
5739 }
5740
5741 /* Check operands of a binary/comparison op. */
5742 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5743 {
5744 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5745 if (GET_MODE_CLASS (mode) == MODE_INT
5746 && mode > MAX_INTEGER_COMPUTATION_MODE)
5747 fatal ("unsupported wide integer operation");
5748
5749 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5750 if (GET_MODE_CLASS (mode) == MODE_INT
5751 && mode > MAX_INTEGER_COMPUTATION_MODE)
5752 fatal ("unsupported wide integer operation");
5753 }
5754 }
5755 #endif
5756 \f
5757 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5758 has any readonly fields. If any of the fields have types that
5759 contain readonly fields, return true as well. */
5760
5761 static int
5762 readonly_fields_p (type)
5763 tree type;
5764 {
5765 tree field;
5766
5767 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5768 if (TREE_CODE (field) == FIELD_DECL
5769 && (TREE_READONLY (field)
5770 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5771 && readonly_fields_p (TREE_TYPE (field)))))
5772 return 1;
5773
5774 return 0;
5775 }
5776 \f
5777 /* expand_expr: generate code for computing expression EXP.
5778 An rtx for the computed value is returned. The value is never null.
5779 In the case of a void EXP, const0_rtx is returned.
5780
5781 The value may be stored in TARGET if TARGET is nonzero.
5782 TARGET is just a suggestion; callers must assume that
5783 the rtx returned may not be the same as TARGET.
5784
5785 If TARGET is CONST0_RTX, it means that the value will be ignored.
5786
5787 If TMODE is not VOIDmode, it suggests generating the
5788 result in mode TMODE. But this is done only when convenient.
5789 Otherwise, TMODE is ignored and the value generated in its natural mode.
5790 TMODE is just a suggestion; callers must assume that
5791 the rtx returned may not have mode TMODE.
5792
5793 Note that TARGET may have neither TMODE nor MODE. In that case, it
5794 probably will not be used.
5795
5796 If MODIFIER is EXPAND_SUM then when EXP is an addition
5797 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5798 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5799 products as above, or REG or MEM, or constant.
5800 Ordinarily in such cases we would output mul or add instructions
5801 and then return a pseudo reg containing the sum.
5802
5803 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5804 it also marks a label as absolutely required (it can't be dead).
5805 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5806 This is used for outputting expressions used in initializers.
5807
5808 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5809 with a constant address even if that address is not normally legitimate.
5810 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5811
5812 rtx
5813 expand_expr (exp, target, tmode, modifier)
5814 register tree exp;
5815 rtx target;
5816 enum machine_mode tmode;
5817 enum expand_modifier modifier;
5818 {
5819 register rtx op0, op1, temp;
5820 tree type = TREE_TYPE (exp);
5821 int unsignedp = TREE_UNSIGNED (type);
5822 register enum machine_mode mode;
5823 register enum tree_code code = TREE_CODE (exp);
5824 optab this_optab;
5825 rtx subtarget, original_target;
5826 int ignore;
5827 tree context;
5828 /* Used by check-memory-usage to make modifier read only. */
5829 enum expand_modifier ro_modifier;
5830
5831 /* Handle ERROR_MARK before anybody tries to access its type. */
5832 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
5833 {
5834 op0 = CONST0_RTX (tmode);
5835 if (op0 != 0)
5836 return op0;
5837 return const0_rtx;
5838 }
5839
5840 mode = TYPE_MODE (type);
5841 /* Use subtarget as the target for operand 0 of a binary operation. */
5842 subtarget = get_subtarget (target);
5843 original_target = target;
5844 ignore = (target == const0_rtx
5845 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5846 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5847 || code == COND_EXPR)
5848 && TREE_CODE (type) == VOID_TYPE));
5849
5850 /* Make a read-only version of the modifier. */
5851 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5852 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5853 ro_modifier = modifier;
5854 else
5855 ro_modifier = EXPAND_NORMAL;
5856
5857 /* If we are going to ignore this result, we need only do something
5858 if there is a side-effect somewhere in the expression. If there
5859 is, short-circuit the most common cases here. Note that we must
5860 not call expand_expr with anything but const0_rtx in case this
5861 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5862
5863 if (ignore)
5864 {
5865 if (! TREE_SIDE_EFFECTS (exp))
5866 return const0_rtx;
5867
5868 /* Ensure we reference a volatile object even if value is ignored, but
5869 don't do this if all we are doing is taking its address. */
5870 if (TREE_THIS_VOLATILE (exp)
5871 && TREE_CODE (exp) != FUNCTION_DECL
5872 && mode != VOIDmode && mode != BLKmode
5873 && modifier != EXPAND_CONST_ADDRESS)
5874 {
5875 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5876 if (GET_CODE (temp) == MEM)
5877 temp = copy_to_reg (temp);
5878 return const0_rtx;
5879 }
5880
5881 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5882 || code == INDIRECT_REF || code == BUFFER_REF)
5883 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5884 VOIDmode, ro_modifier);
5885 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5886 || code == ARRAY_REF)
5887 {
5888 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5889 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5890 return const0_rtx;
5891 }
5892 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5893 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5894 /* If the second operand has no side effects, just evaluate
5895 the first. */
5896 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5897 VOIDmode, ro_modifier);
5898 else if (code == BIT_FIELD_REF)
5899 {
5900 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5901 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5902 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5903 return const0_rtx;
5904 }
5905 ;
5906 target = 0;
5907 }
5908
5909 #ifdef MAX_INTEGER_COMPUTATION_MODE
5910 /* Only check stuff here if the mode we want is different from the mode
5911 of the expression; if it's the same, check_max_integer_computiation_mode
5912 will handle it. Do we really need to check this stuff at all? */
5913
5914 if (target
5915 && GET_MODE (target) != mode
5916 && TREE_CODE (exp) != INTEGER_CST
5917 && TREE_CODE (exp) != PARM_DECL
5918 && TREE_CODE (exp) != ARRAY_REF
5919 && TREE_CODE (exp) != COMPONENT_REF
5920 && TREE_CODE (exp) != BIT_FIELD_REF
5921 && TREE_CODE (exp) != INDIRECT_REF
5922 && TREE_CODE (exp) != CALL_EXPR
5923 && TREE_CODE (exp) != VAR_DECL
5924 && TREE_CODE (exp) != RTL_EXPR)
5925 {
5926 enum machine_mode mode = GET_MODE (target);
5927
5928 if (GET_MODE_CLASS (mode) == MODE_INT
5929 && mode > MAX_INTEGER_COMPUTATION_MODE)
5930 fatal ("unsupported wide integer operation");
5931 }
5932
5933 if (tmode != mode
5934 && TREE_CODE (exp) != INTEGER_CST
5935 && TREE_CODE (exp) != PARM_DECL
5936 && TREE_CODE (exp) != ARRAY_REF
5937 && TREE_CODE (exp) != COMPONENT_REF
5938 && TREE_CODE (exp) != BIT_FIELD_REF
5939 && TREE_CODE (exp) != INDIRECT_REF
5940 && TREE_CODE (exp) != VAR_DECL
5941 && TREE_CODE (exp) != CALL_EXPR
5942 && TREE_CODE (exp) != RTL_EXPR
5943 && GET_MODE_CLASS (tmode) == MODE_INT
5944 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5945 fatal ("unsupported wide integer operation");
5946
5947 check_max_integer_computation_mode (exp);
5948 #endif
5949
5950 /* If will do cse, generate all results into pseudo registers
5951 since 1) that allows cse to find more things
5952 and 2) otherwise cse could produce an insn the machine
5953 cannot support. */
5954
5955 if (! cse_not_expected && mode != BLKmode && target
5956 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5957 target = subtarget;
5958
5959 switch (code)
5960 {
5961 case LABEL_DECL:
5962 {
5963 tree function = decl_function_context (exp);
5964 /* Handle using a label in a containing function. */
5965 if (function != current_function_decl
5966 && function != inline_function_decl && function != 0)
5967 {
5968 struct function *p = find_function_data (function);
5969 /* Allocate in the memory associated with the function
5970 that the label is in. */
5971 push_obstacks (p->function_obstack,
5972 p->function_maybepermanent_obstack);
5973
5974 p->expr->x_forced_labels
5975 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5976 p->expr->x_forced_labels);
5977 pop_obstacks ();
5978 }
5979 else
5980 {
5981 if (modifier == EXPAND_INITIALIZER)
5982 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5983 label_rtx (exp),
5984 forced_labels);
5985 }
5986
5987 temp = gen_rtx_MEM (FUNCTION_MODE,
5988 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5989 if (function != current_function_decl
5990 && function != inline_function_decl && function != 0)
5991 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5992 return temp;
5993 }
5994
5995 case PARM_DECL:
5996 if (DECL_RTL (exp) == 0)
5997 {
5998 error_with_decl (exp, "prior parameter's size depends on `%s'");
5999 return CONST0_RTX (mode);
6000 }
6001
6002 /* ... fall through ... */
6003
6004 case VAR_DECL:
6005 /* If a static var's type was incomplete when the decl was written,
6006 but the type is complete now, lay out the decl now. */
6007 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6008 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6009 {
6010 push_obstacks_nochange ();
6011 end_temporary_allocation ();
6012 layout_decl (exp, 0);
6013 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6014 pop_obstacks ();
6015 }
6016
6017 /* Although static-storage variables start off initialized, according to
6018 ANSI C, a memcpy could overwrite them with uninitialized values. So
6019 we check them too. This also lets us check for read-only variables
6020 accessed via a non-const declaration, in case it won't be detected
6021 any other way (e.g., in an embedded system or OS kernel without
6022 memory protection).
6023
6024 Aggregates are not checked here; they're handled elsewhere. */
6025 if (cfun && current_function_check_memory_usage
6026 && code == VAR_DECL
6027 && GET_CODE (DECL_RTL (exp)) == MEM
6028 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6029 {
6030 enum memory_use_mode memory_usage;
6031 memory_usage = get_memory_usage_from_modifier (modifier);
6032
6033 in_check_memory_usage = 1;
6034 if (memory_usage != MEMORY_USE_DONT)
6035 emit_library_call (chkr_check_addr_libfunc,
6036 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6037 XEXP (DECL_RTL (exp), 0), Pmode,
6038 GEN_INT (int_size_in_bytes (type)),
6039 TYPE_MODE (sizetype),
6040 GEN_INT (memory_usage),
6041 TYPE_MODE (integer_type_node));
6042 in_check_memory_usage = 0;
6043 }
6044
6045 /* ... fall through ... */
6046
6047 case FUNCTION_DECL:
6048 case RESULT_DECL:
6049 if (DECL_RTL (exp) == 0)
6050 abort ();
6051
6052 /* Ensure variable marked as used even if it doesn't go through
6053 a parser. If it hasn't be used yet, write out an external
6054 definition. */
6055 if (! TREE_USED (exp))
6056 {
6057 assemble_external (exp);
6058 TREE_USED (exp) = 1;
6059 }
6060
6061 /* Show we haven't gotten RTL for this yet. */
6062 temp = 0;
6063
6064 /* Handle variables inherited from containing functions. */
6065 context = decl_function_context (exp);
6066
6067 /* We treat inline_function_decl as an alias for the current function
6068 because that is the inline function whose vars, types, etc.
6069 are being merged into the current function.
6070 See expand_inline_function. */
6071
6072 if (context != 0 && context != current_function_decl
6073 && context != inline_function_decl
6074 /* If var is static, we don't need a static chain to access it. */
6075 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6076 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6077 {
6078 rtx addr;
6079
6080 /* Mark as non-local and addressable. */
6081 DECL_NONLOCAL (exp) = 1;
6082 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6083 abort ();
6084 mark_addressable (exp);
6085 if (GET_CODE (DECL_RTL (exp)) != MEM)
6086 abort ();
6087 addr = XEXP (DECL_RTL (exp), 0);
6088 if (GET_CODE (addr) == MEM)
6089 addr = change_address (addr, Pmode,
6090 fix_lexical_addr (XEXP (addr, 0), exp));
6091 else
6092 addr = fix_lexical_addr (addr, exp);
6093
6094 temp = change_address (DECL_RTL (exp), mode, addr);
6095 }
6096
6097 /* This is the case of an array whose size is to be determined
6098 from its initializer, while the initializer is still being parsed.
6099 See expand_decl. */
6100
6101 else if (GET_CODE (DECL_RTL (exp)) == MEM
6102 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6103 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6104 XEXP (DECL_RTL (exp), 0));
6105
6106 /* If DECL_RTL is memory, we are in the normal case and either
6107 the address is not valid or it is not a register and -fforce-addr
6108 is specified, get the address into a register. */
6109
6110 else if (GET_CODE (DECL_RTL (exp)) == MEM
6111 && modifier != EXPAND_CONST_ADDRESS
6112 && modifier != EXPAND_SUM
6113 && modifier != EXPAND_INITIALIZER
6114 && (! memory_address_p (DECL_MODE (exp),
6115 XEXP (DECL_RTL (exp), 0))
6116 || (flag_force_addr
6117 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6118 temp = change_address (DECL_RTL (exp), VOIDmode,
6119 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6120
6121 /* If we got something, return it. But first, set the alignment
6122 the address is a register. */
6123 if (temp != 0)
6124 {
6125 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6126 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6127
6128 return temp;
6129 }
6130
6131 /* If the mode of DECL_RTL does not match that of the decl, it
6132 must be a promoted value. We return a SUBREG of the wanted mode,
6133 but mark it so that we know that it was already extended. */
6134
6135 if (GET_CODE (DECL_RTL (exp)) == REG
6136 && GET_MODE (DECL_RTL (exp)) != mode)
6137 {
6138 /* Get the signedness used for this variable. Ensure we get the
6139 same mode we got when the variable was declared. */
6140 if (GET_MODE (DECL_RTL (exp))
6141 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6142 abort ();
6143
6144 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6145 SUBREG_PROMOTED_VAR_P (temp) = 1;
6146 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6147 return temp;
6148 }
6149
6150 return DECL_RTL (exp);
6151
6152 case INTEGER_CST:
6153 return immed_double_const (TREE_INT_CST_LOW (exp),
6154 TREE_INT_CST_HIGH (exp), mode);
6155
6156 case CONST_DECL:
6157 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6158 EXPAND_MEMORY_USE_BAD);
6159
6160 case REAL_CST:
6161 /* If optimized, generate immediate CONST_DOUBLE
6162 which will be turned into memory by reload if necessary.
6163
6164 We used to force a register so that loop.c could see it. But
6165 this does not allow gen_* patterns to perform optimizations with
6166 the constants. It also produces two insns in cases like "x = 1.0;".
6167 On most machines, floating-point constants are not permitted in
6168 many insns, so we'd end up copying it to a register in any case.
6169
6170 Now, we do the copying in expand_binop, if appropriate. */
6171 return immed_real_const (exp);
6172
6173 case COMPLEX_CST:
6174 case STRING_CST:
6175 if (! TREE_CST_RTL (exp))
6176 output_constant_def (exp);
6177
6178 /* TREE_CST_RTL probably contains a constant address.
6179 On RISC machines where a constant address isn't valid,
6180 make some insns to get that address into a register. */
6181 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6182 && modifier != EXPAND_CONST_ADDRESS
6183 && modifier != EXPAND_INITIALIZER
6184 && modifier != EXPAND_SUM
6185 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6186 || (flag_force_addr
6187 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6188 return change_address (TREE_CST_RTL (exp), VOIDmode,
6189 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6190 return TREE_CST_RTL (exp);
6191
6192 case EXPR_WITH_FILE_LOCATION:
6193 {
6194 rtx to_return;
6195 const char *saved_input_filename = input_filename;
6196 int saved_lineno = lineno;
6197 input_filename = EXPR_WFL_FILENAME (exp);
6198 lineno = EXPR_WFL_LINENO (exp);
6199 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6200 emit_line_note (input_filename, lineno);
6201 /* Possibly avoid switching back and force here. */
6202 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6203 input_filename = saved_input_filename;
6204 lineno = saved_lineno;
6205 return to_return;
6206 }
6207
6208 case SAVE_EXPR:
6209 context = decl_function_context (exp);
6210
6211 /* If this SAVE_EXPR was at global context, assume we are an
6212 initialization function and move it into our context. */
6213 if (context == 0)
6214 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6215
6216 /* We treat inline_function_decl as an alias for the current function
6217 because that is the inline function whose vars, types, etc.
6218 are being merged into the current function.
6219 See expand_inline_function. */
6220 if (context == current_function_decl || context == inline_function_decl)
6221 context = 0;
6222
6223 /* If this is non-local, handle it. */
6224 if (context)
6225 {
6226 /* The following call just exists to abort if the context is
6227 not of a containing function. */
6228 find_function_data (context);
6229
6230 temp = SAVE_EXPR_RTL (exp);
6231 if (temp && GET_CODE (temp) == REG)
6232 {
6233 put_var_into_stack (exp);
6234 temp = SAVE_EXPR_RTL (exp);
6235 }
6236 if (temp == 0 || GET_CODE (temp) != MEM)
6237 abort ();
6238 return change_address (temp, mode,
6239 fix_lexical_addr (XEXP (temp, 0), exp));
6240 }
6241 if (SAVE_EXPR_RTL (exp) == 0)
6242 {
6243 if (mode == VOIDmode)
6244 temp = const0_rtx;
6245 else
6246 temp = assign_temp (type, 3, 0, 0);
6247
6248 SAVE_EXPR_RTL (exp) = temp;
6249 if (!optimize && GET_CODE (temp) == REG)
6250 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6251 save_expr_regs);
6252
6253 /* If the mode of TEMP does not match that of the expression, it
6254 must be a promoted value. We pass store_expr a SUBREG of the
6255 wanted mode but mark it so that we know that it was already
6256 extended. Note that `unsignedp' was modified above in
6257 this case. */
6258
6259 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6260 {
6261 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6262 SUBREG_PROMOTED_VAR_P (temp) = 1;
6263 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6264 }
6265
6266 if (temp == const0_rtx)
6267 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6268 EXPAND_MEMORY_USE_BAD);
6269 else
6270 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6271
6272 TREE_USED (exp) = 1;
6273 }
6274
6275 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6276 must be a promoted value. We return a SUBREG of the wanted mode,
6277 but mark it so that we know that it was already extended. */
6278
6279 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6280 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6281 {
6282 /* Compute the signedness and make the proper SUBREG. */
6283 promote_mode (type, mode, &unsignedp, 0);
6284 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6285 SUBREG_PROMOTED_VAR_P (temp) = 1;
6286 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6287 return temp;
6288 }
6289
6290 return SAVE_EXPR_RTL (exp);
6291
6292 case UNSAVE_EXPR:
6293 {
6294 rtx temp;
6295 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6296 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6297 return temp;
6298 }
6299
6300 case PLACEHOLDER_EXPR:
6301 {
6302 tree placeholder_expr;
6303
6304 /* If there is an object on the head of the placeholder list,
6305 see if some object in it of type TYPE or a pointer to it. For
6306 further information, see tree.def. */
6307 for (placeholder_expr = placeholder_list;
6308 placeholder_expr != 0;
6309 placeholder_expr = TREE_CHAIN (placeholder_expr))
6310 {
6311 tree need_type = TYPE_MAIN_VARIANT (type);
6312 tree object = 0;
6313 tree old_list = placeholder_list;
6314 tree elt;
6315
6316 /* Find the outermost reference that is of the type we want.
6317 If none, see if any object has a type that is a pointer to
6318 the type we want. */
6319 for (elt = TREE_PURPOSE (placeholder_expr);
6320 elt != 0 && object == 0;
6321 elt
6322 = ((TREE_CODE (elt) == COMPOUND_EXPR
6323 || TREE_CODE (elt) == COND_EXPR)
6324 ? TREE_OPERAND (elt, 1)
6325 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6326 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6327 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6328 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6329 ? TREE_OPERAND (elt, 0) : 0))
6330 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6331 object = elt;
6332
6333 for (elt = TREE_PURPOSE (placeholder_expr);
6334 elt != 0 && object == 0;
6335 elt
6336 = ((TREE_CODE (elt) == COMPOUND_EXPR
6337 || TREE_CODE (elt) == COND_EXPR)
6338 ? TREE_OPERAND (elt, 1)
6339 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6340 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6341 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6342 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6343 ? TREE_OPERAND (elt, 0) : 0))
6344 if (POINTER_TYPE_P (TREE_TYPE (elt))
6345 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6346 == need_type))
6347 object = build1 (INDIRECT_REF, need_type, elt);
6348
6349 if (object != 0)
6350 {
6351 /* Expand this object skipping the list entries before
6352 it was found in case it is also a PLACEHOLDER_EXPR.
6353 In that case, we want to translate it using subsequent
6354 entries. */
6355 placeholder_list = TREE_CHAIN (placeholder_expr);
6356 temp = expand_expr (object, original_target, tmode,
6357 ro_modifier);
6358 placeholder_list = old_list;
6359 return temp;
6360 }
6361 }
6362 }
6363
6364 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6365 abort ();
6366
6367 case WITH_RECORD_EXPR:
6368 /* Put the object on the placeholder list, expand our first operand,
6369 and pop the list. */
6370 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6371 placeholder_list);
6372 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6373 tmode, ro_modifier);
6374 placeholder_list = TREE_CHAIN (placeholder_list);
6375 return target;
6376
6377 case GOTO_EXPR:
6378 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6379 expand_goto (TREE_OPERAND (exp, 0));
6380 else
6381 expand_computed_goto (TREE_OPERAND (exp, 0));
6382 return const0_rtx;
6383
6384 case EXIT_EXPR:
6385 expand_exit_loop_if_false (NULL_PTR,
6386 invert_truthvalue (TREE_OPERAND (exp, 0)));
6387 return const0_rtx;
6388
6389 case LABELED_BLOCK_EXPR:
6390 if (LABELED_BLOCK_BODY (exp))
6391 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6392 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6393 return const0_rtx;
6394
6395 case EXIT_BLOCK_EXPR:
6396 if (EXIT_BLOCK_RETURN (exp))
6397 sorry ("returned value in block_exit_expr");
6398 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6399 return const0_rtx;
6400
6401 case LOOP_EXPR:
6402 push_temp_slots ();
6403 expand_start_loop (1);
6404 expand_expr_stmt (TREE_OPERAND (exp, 0));
6405 expand_end_loop ();
6406 pop_temp_slots ();
6407
6408 return const0_rtx;
6409
6410 case BIND_EXPR:
6411 {
6412 tree vars = TREE_OPERAND (exp, 0);
6413 int vars_need_expansion = 0;
6414
6415 /* Need to open a binding contour here because
6416 if there are any cleanups they must be contained here. */
6417 expand_start_bindings (2);
6418
6419 /* Mark the corresponding BLOCK for output in its proper place. */
6420 if (TREE_OPERAND (exp, 2) != 0
6421 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6422 insert_block (TREE_OPERAND (exp, 2));
6423
6424 /* If VARS have not yet been expanded, expand them now. */
6425 while (vars)
6426 {
6427 if (DECL_RTL (vars) == 0)
6428 {
6429 vars_need_expansion = 1;
6430 expand_decl (vars);
6431 }
6432 expand_decl_init (vars);
6433 vars = TREE_CHAIN (vars);
6434 }
6435
6436 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6437
6438 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6439
6440 return temp;
6441 }
6442
6443 case RTL_EXPR:
6444 if (RTL_EXPR_SEQUENCE (exp))
6445 {
6446 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6447 abort ();
6448 emit_insns (RTL_EXPR_SEQUENCE (exp));
6449 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6450 }
6451 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6452 free_temps_for_rtl_expr (exp);
6453 return RTL_EXPR_RTL (exp);
6454
6455 case CONSTRUCTOR:
6456 /* If we don't need the result, just ensure we evaluate any
6457 subexpressions. */
6458 if (ignore)
6459 {
6460 tree elt;
6461 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6462 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6463 EXPAND_MEMORY_USE_BAD);
6464 return const0_rtx;
6465 }
6466
6467 /* All elts simple constants => refer to a constant in memory. But
6468 if this is a non-BLKmode mode, let it store a field at a time
6469 since that should make a CONST_INT or CONST_DOUBLE when we
6470 fold. Likewise, if we have a target we can use, it is best to
6471 store directly into the target unless the type is large enough
6472 that memcpy will be used. If we are making an initializer and
6473 all operands are constant, put it in memory as well. */
6474 else if ((TREE_STATIC (exp)
6475 && ((mode == BLKmode
6476 && ! (target != 0 && safe_from_p (target, exp, 1)))
6477 || TREE_ADDRESSABLE (exp)
6478 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6479 && (! MOVE_BY_PIECES_P
6480 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6481 TYPE_ALIGN (type)))
6482 && ! mostly_zeros_p (exp))))
6483 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6484 {
6485 rtx constructor = output_constant_def (exp);
6486
6487 if (modifier != EXPAND_CONST_ADDRESS
6488 && modifier != EXPAND_INITIALIZER
6489 && modifier != EXPAND_SUM
6490 && (! memory_address_p (GET_MODE (constructor),
6491 XEXP (constructor, 0))
6492 || (flag_force_addr
6493 && GET_CODE (XEXP (constructor, 0)) != REG)))
6494 constructor = change_address (constructor, VOIDmode,
6495 XEXP (constructor, 0));
6496 return constructor;
6497 }
6498
6499 else
6500 {
6501 /* Handle calls that pass values in multiple non-contiguous
6502 locations. The Irix 6 ABI has examples of this. */
6503 if (target == 0 || ! safe_from_p (target, exp, 1)
6504 || GET_CODE (target) == PARALLEL)
6505 {
6506 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6507 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6508 else
6509 target = assign_temp (type, 0, 1, 1);
6510 }
6511
6512 if (TREE_READONLY (exp))
6513 {
6514 if (GET_CODE (target) == MEM)
6515 target = copy_rtx (target);
6516
6517 RTX_UNCHANGING_P (target) = 1;
6518 }
6519
6520 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6521 int_size_in_bytes (TREE_TYPE (exp)));
6522 return target;
6523 }
6524
6525 case INDIRECT_REF:
6526 {
6527 tree exp1 = TREE_OPERAND (exp, 0);
6528 tree index;
6529 tree string = string_constant (exp1, &index);
6530
6531 /* Try to optimize reads from const strings. */
6532 if (string
6533 && TREE_CODE (string) == STRING_CST
6534 && TREE_CODE (index) == INTEGER_CST
6535 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6536 && GET_MODE_CLASS (mode) == MODE_INT
6537 && GET_MODE_SIZE (mode) == 1
6538 && modifier != EXPAND_MEMORY_USE_WO)
6539 return
6540 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6541
6542 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6543 op0 = memory_address (mode, op0);
6544
6545 if (cfun && current_function_check_memory_usage
6546 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6547 {
6548 enum memory_use_mode memory_usage;
6549 memory_usage = get_memory_usage_from_modifier (modifier);
6550
6551 if (memory_usage != MEMORY_USE_DONT)
6552 {
6553 in_check_memory_usage = 1;
6554 emit_library_call (chkr_check_addr_libfunc,
6555 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6556 Pmode, GEN_INT (int_size_in_bytes (type)),
6557 TYPE_MODE (sizetype),
6558 GEN_INT (memory_usage),
6559 TYPE_MODE (integer_type_node));
6560 in_check_memory_usage = 0;
6561 }
6562 }
6563
6564 temp = gen_rtx_MEM (mode, op0);
6565 set_mem_attributes (temp, exp, 0);
6566
6567 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6568 here, because, in C and C++, the fact that a location is accessed
6569 through a pointer to const does not mean that the value there can
6570 never change. Languages where it can never change should
6571 also set TREE_STATIC. */
6572 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6573
6574 /* If we are writing to this object and its type is a record with
6575 readonly fields, we must mark it as readonly so it will
6576 conflict with readonly references to those fields. */
6577 if (modifier == EXPAND_MEMORY_USE_WO
6578 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6579 RTX_UNCHANGING_P (temp) = 1;
6580
6581 return temp;
6582 }
6583
6584 case ARRAY_REF:
6585 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6586 abort ();
6587
6588 {
6589 tree array = TREE_OPERAND (exp, 0);
6590 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6591 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6592 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6593 HOST_WIDE_INT i;
6594
6595 /* Optimize the special-case of a zero lower bound.
6596
6597 We convert the low_bound to sizetype to avoid some problems
6598 with constant folding. (E.g. suppose the lower bound is 1,
6599 and its mode is QI. Without the conversion, (ARRAY
6600 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6601 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6602
6603 if (! integer_zerop (low_bound))
6604 index = size_diffop (index, convert (sizetype, low_bound));
6605
6606 /* Fold an expression like: "foo"[2].
6607 This is not done in fold so it won't happen inside &.
6608 Don't fold if this is for wide characters since it's too
6609 difficult to do correctly and this is a very rare case. */
6610
6611 if (TREE_CODE (array) == STRING_CST
6612 && TREE_CODE (index) == INTEGER_CST
6613 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6614 && GET_MODE_CLASS (mode) == MODE_INT
6615 && GET_MODE_SIZE (mode) == 1)
6616 return
6617 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6618
6619 /* If this is a constant index into a constant array,
6620 just get the value from the array. Handle both the cases when
6621 we have an explicit constructor and when our operand is a variable
6622 that was declared const. */
6623
6624 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6625 && TREE_CODE (index) == INTEGER_CST
6626 && 0 > compare_tree_int (index,
6627 list_length (CONSTRUCTOR_ELTS
6628 (TREE_OPERAND (exp, 0)))))
6629 {
6630 tree elem;
6631
6632 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6633 i = TREE_INT_CST_LOW (index);
6634 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6635 ;
6636
6637 if (elem)
6638 return expand_expr (fold (TREE_VALUE (elem)), target,
6639 tmode, ro_modifier);
6640 }
6641
6642 else if (optimize >= 1
6643 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6644 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6645 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6646 {
6647 if (TREE_CODE (index) == INTEGER_CST)
6648 {
6649 tree init = DECL_INITIAL (array);
6650
6651 if (TREE_CODE (init) == CONSTRUCTOR)
6652 {
6653 tree elem;
6654
6655 for (elem = CONSTRUCTOR_ELTS (init);
6656 (elem
6657 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6658 elem = TREE_CHAIN (elem))
6659 ;
6660
6661 if (elem)
6662 return expand_expr (fold (TREE_VALUE (elem)), target,
6663 tmode, ro_modifier);
6664 }
6665 else if (TREE_CODE (init) == STRING_CST
6666 && 0 > compare_tree_int (index,
6667 TREE_STRING_LENGTH (init)))
6668 {
6669 tree type = TREE_TYPE (TREE_TYPE (init));
6670 enum machine_mode mode = TYPE_MODE (type);
6671
6672 if (GET_MODE_CLASS (mode) == MODE_INT
6673 && GET_MODE_SIZE (mode) == 1)
6674 return (GEN_INT
6675 (TREE_STRING_POINTER
6676 (init)[TREE_INT_CST_LOW (index)]));
6677 }
6678 }
6679 }
6680 }
6681 /* Fall through. */
6682
6683 case COMPONENT_REF:
6684 case BIT_FIELD_REF:
6685 /* If the operand is a CONSTRUCTOR, we can just extract the
6686 appropriate field if it is present. Don't do this if we have
6687 already written the data since we want to refer to that copy
6688 and varasm.c assumes that's what we'll do. */
6689 if (code != ARRAY_REF
6690 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6691 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6692 {
6693 tree elt;
6694
6695 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6696 elt = TREE_CHAIN (elt))
6697 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6698 /* We can normally use the value of the field in the
6699 CONSTRUCTOR. However, if this is a bitfield in
6700 an integral mode that we can fit in a HOST_WIDE_INT,
6701 we must mask only the number of bits in the bitfield,
6702 since this is done implicitly by the constructor. If
6703 the bitfield does not meet either of those conditions,
6704 we can't do this optimization. */
6705 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6706 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6707 == MODE_INT)
6708 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6709 <= HOST_BITS_PER_WIDE_INT))))
6710 {
6711 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6712 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6713 {
6714 HOST_WIDE_INT bitsize
6715 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6716
6717 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6718 {
6719 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6720 op0 = expand_and (op0, op1, target);
6721 }
6722 else
6723 {
6724 enum machine_mode imode
6725 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6726 tree count
6727 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6728 0);
6729
6730 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6731 target, 0);
6732 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6733 target, 0);
6734 }
6735 }
6736
6737 return op0;
6738 }
6739 }
6740
6741 {
6742 enum machine_mode mode1;
6743 HOST_WIDE_INT bitsize, bitpos;
6744 tree offset;
6745 int volatilep = 0;
6746 unsigned int alignment;
6747 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6748 &mode1, &unsignedp, &volatilep,
6749 &alignment);
6750
6751 /* If we got back the original object, something is wrong. Perhaps
6752 we are evaluating an expression too early. In any event, don't
6753 infinitely recurse. */
6754 if (tem == exp)
6755 abort ();
6756
6757 /* If TEM's type is a union of variable size, pass TARGET to the inner
6758 computation, since it will need a temporary and TARGET is known
6759 to have to do. This occurs in unchecked conversion in Ada. */
6760
6761 op0 = expand_expr (tem,
6762 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6763 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6764 != INTEGER_CST)
6765 ? target : NULL_RTX),
6766 VOIDmode,
6767 (modifier == EXPAND_INITIALIZER
6768 || modifier == EXPAND_CONST_ADDRESS)
6769 ? modifier : EXPAND_NORMAL);
6770
6771 /* If this is a constant, put it into a register if it is a
6772 legitimate constant and OFFSET is 0 and memory if it isn't. */
6773 if (CONSTANT_P (op0))
6774 {
6775 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6776 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6777 && offset == 0)
6778 op0 = force_reg (mode, op0);
6779 else
6780 op0 = validize_mem (force_const_mem (mode, op0));
6781 }
6782
6783 if (offset != 0)
6784 {
6785 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6786
6787 /* If this object is in memory, put it into a register.
6788 This case can't occur in C, but can in Ada if we have
6789 unchecked conversion of an expression from a scalar type to
6790 an array or record type. */
6791 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6792 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6793 {
6794 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6795
6796 mark_temp_addr_taken (memloc);
6797 emit_move_insn (memloc, op0);
6798 op0 = memloc;
6799 }
6800
6801 if (GET_CODE (op0) != MEM)
6802 abort ();
6803
6804 if (GET_MODE (offset_rtx) != ptr_mode)
6805 {
6806 #ifdef POINTERS_EXTEND_UNSIGNED
6807 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6808 #else
6809 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6810 #endif
6811 }
6812
6813 /* A constant address in OP0 can have VOIDmode, we must not try
6814 to call force_reg for that case. Avoid that case. */
6815 if (GET_CODE (op0) == MEM
6816 && GET_MODE (op0) == BLKmode
6817 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6818 && bitsize != 0
6819 && (bitpos % bitsize) == 0
6820 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6821 && alignment == GET_MODE_ALIGNMENT (mode1))
6822 {
6823 rtx temp = change_address (op0, mode1,
6824 plus_constant (XEXP (op0, 0),
6825 (bitpos /
6826 BITS_PER_UNIT)));
6827 if (GET_CODE (XEXP (temp, 0)) == REG)
6828 op0 = temp;
6829 else
6830 op0 = change_address (op0, mode1,
6831 force_reg (GET_MODE (XEXP (temp, 0)),
6832 XEXP (temp, 0)));
6833 bitpos = 0;
6834 }
6835
6836 op0 = change_address (op0, VOIDmode,
6837 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6838 force_reg (ptr_mode,
6839 offset_rtx)));
6840 }
6841
6842 /* Don't forget about volatility even if this is a bitfield. */
6843 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6844 {
6845 op0 = copy_rtx (op0);
6846 MEM_VOLATILE_P (op0) = 1;
6847 }
6848
6849 /* Check the access. */
6850 if (cfun != 0 && current_function_check_memory_usage
6851 && GET_CODE (op0) == MEM)
6852 {
6853 enum memory_use_mode memory_usage;
6854 memory_usage = get_memory_usage_from_modifier (modifier);
6855
6856 if (memory_usage != MEMORY_USE_DONT)
6857 {
6858 rtx to;
6859 int size;
6860
6861 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6862 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6863
6864 /* Check the access right of the pointer. */
6865 in_check_memory_usage = 1;
6866 if (size > BITS_PER_UNIT)
6867 emit_library_call (chkr_check_addr_libfunc,
6868 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
6869 Pmode, GEN_INT (size / BITS_PER_UNIT),
6870 TYPE_MODE (sizetype),
6871 GEN_INT (memory_usage),
6872 TYPE_MODE (integer_type_node));
6873 in_check_memory_usage = 0;
6874 }
6875 }
6876
6877 /* In cases where an aligned union has an unaligned object
6878 as a field, we might be extracting a BLKmode value from
6879 an integer-mode (e.g., SImode) object. Handle this case
6880 by doing the extract into an object as wide as the field
6881 (which we know to be the width of a basic mode), then
6882 storing into memory, and changing the mode to BLKmode.
6883 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6884 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6885 if (mode1 == VOIDmode
6886 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6887 || (modifier != EXPAND_CONST_ADDRESS
6888 && modifier != EXPAND_INITIALIZER
6889 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6890 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6891 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6892 /* If the field isn't aligned enough to fetch as a memref,
6893 fetch it as a bit field. */
6894 || (mode1 != BLKmode
6895 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6896 && ((TYPE_ALIGN (TREE_TYPE (tem))
6897 < GET_MODE_ALIGNMENT (mode))
6898 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6899 /* If the type and the field are a constant size and the
6900 size of the type isn't the same size as the bitfield,
6901 we must use bitfield operations. */
6902 || ((bitsize >= 0
6903 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6904 == INTEGER_CST)
6905 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6906 bitsize)))))
6907 || (modifier != EXPAND_CONST_ADDRESS
6908 && modifier != EXPAND_INITIALIZER
6909 && mode == BLKmode
6910 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6911 && (TYPE_ALIGN (type) > alignment
6912 || bitpos % TYPE_ALIGN (type) != 0)))
6913 {
6914 enum machine_mode ext_mode = mode;
6915
6916 if (ext_mode == BLKmode
6917 && ! (target != 0 && GET_CODE (op0) == MEM
6918 && GET_CODE (target) == MEM
6919 && bitpos % BITS_PER_UNIT == 0))
6920 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6921
6922 if (ext_mode == BLKmode)
6923 {
6924 /* In this case, BITPOS must start at a byte boundary and
6925 TARGET, if specified, must be a MEM. */
6926 if (GET_CODE (op0) != MEM
6927 || (target != 0 && GET_CODE (target) != MEM)
6928 || bitpos % BITS_PER_UNIT != 0)
6929 abort ();
6930
6931 op0 = change_address (op0, VOIDmode,
6932 plus_constant (XEXP (op0, 0),
6933 bitpos / BITS_PER_UNIT));
6934 if (target == 0)
6935 target = assign_temp (type, 0, 1, 1);
6936
6937 emit_block_move (target, op0,
6938 bitsize == -1 ? expr_size (exp)
6939 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6940 / BITS_PER_UNIT),
6941 BITS_PER_UNIT);
6942
6943 return target;
6944 }
6945
6946 op0 = validize_mem (op0);
6947
6948 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6949 mark_reg_pointer (XEXP (op0, 0), alignment);
6950
6951 op0 = extract_bit_field (op0, bitsize, bitpos,
6952 unsignedp, target, ext_mode, ext_mode,
6953 alignment,
6954 int_size_in_bytes (TREE_TYPE (tem)));
6955
6956 /* If the result is a record type and BITSIZE is narrower than
6957 the mode of OP0, an integral mode, and this is a big endian
6958 machine, we must put the field into the high-order bits. */
6959 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6960 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6961 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6962 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6963 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6964 - bitsize),
6965 op0, 1);
6966
6967 if (mode == BLKmode)
6968 {
6969 rtx new = assign_stack_temp (ext_mode,
6970 bitsize / BITS_PER_UNIT, 0);
6971
6972 emit_move_insn (new, op0);
6973 op0 = copy_rtx (new);
6974 PUT_MODE (op0, BLKmode);
6975 MEM_SET_IN_STRUCT_P (op0, 1);
6976 }
6977
6978 return op0;
6979 }
6980
6981 /* If the result is BLKmode, use that to access the object
6982 now as well. */
6983 if (mode == BLKmode)
6984 mode1 = BLKmode;
6985
6986 /* Get a reference to just this component. */
6987 if (modifier == EXPAND_CONST_ADDRESS
6988 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6989 {
6990 rtx new = gen_rtx_MEM (mode1,
6991 plus_constant (XEXP (op0, 0),
6992 (bitpos / BITS_PER_UNIT)));
6993
6994 MEM_COPY_ATTRIBUTES (new, op0);
6995 op0 = new;
6996 }
6997 else
6998 op0 = change_address (op0, mode1,
6999 plus_constant (XEXP (op0, 0),
7000 (bitpos / BITS_PER_UNIT)));
7001
7002 set_mem_attributes (op0, exp, 0);
7003 if (GET_CODE (XEXP (op0, 0)) == REG)
7004 mark_reg_pointer (XEXP (op0, 0), alignment);
7005
7006 MEM_VOLATILE_P (op0) |= volatilep;
7007 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7008 || modifier == EXPAND_CONST_ADDRESS
7009 || modifier == EXPAND_INITIALIZER)
7010 return op0;
7011 else if (target == 0)
7012 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7013
7014 convert_move (target, op0, unsignedp);
7015 return target;
7016 }
7017
7018 /* Intended for a reference to a buffer of a file-object in Pascal.
7019 But it's not certain that a special tree code will really be
7020 necessary for these. INDIRECT_REF might work for them. */
7021 case BUFFER_REF:
7022 abort ();
7023
7024 case IN_EXPR:
7025 {
7026 /* Pascal set IN expression.
7027
7028 Algorithm:
7029 rlo = set_low - (set_low%bits_per_word);
7030 the_word = set [ (index - rlo)/bits_per_word ];
7031 bit_index = index % bits_per_word;
7032 bitmask = 1 << bit_index;
7033 return !!(the_word & bitmask); */
7034
7035 tree set = TREE_OPERAND (exp, 0);
7036 tree index = TREE_OPERAND (exp, 1);
7037 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7038 tree set_type = TREE_TYPE (set);
7039 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7040 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7041 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7042 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7043 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7044 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7045 rtx setaddr = XEXP (setval, 0);
7046 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7047 rtx rlow;
7048 rtx diff, quo, rem, addr, bit, result;
7049
7050 preexpand_calls (exp);
7051
7052 /* If domain is empty, answer is no. Likewise if index is constant
7053 and out of bounds. */
7054 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7055 && TREE_CODE (set_low_bound) == INTEGER_CST
7056 && tree_int_cst_lt (set_high_bound, set_low_bound))
7057 || (TREE_CODE (index) == INTEGER_CST
7058 && TREE_CODE (set_low_bound) == INTEGER_CST
7059 && tree_int_cst_lt (index, set_low_bound))
7060 || (TREE_CODE (set_high_bound) == INTEGER_CST
7061 && TREE_CODE (index) == INTEGER_CST
7062 && tree_int_cst_lt (set_high_bound, index))))
7063 return const0_rtx;
7064
7065 if (target == 0)
7066 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7067
7068 /* If we get here, we have to generate the code for both cases
7069 (in range and out of range). */
7070
7071 op0 = gen_label_rtx ();
7072 op1 = gen_label_rtx ();
7073
7074 if (! (GET_CODE (index_val) == CONST_INT
7075 && GET_CODE (lo_r) == CONST_INT))
7076 {
7077 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7078 GET_MODE (index_val), iunsignedp, 0, op1);
7079 }
7080
7081 if (! (GET_CODE (index_val) == CONST_INT
7082 && GET_CODE (hi_r) == CONST_INT))
7083 {
7084 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7085 GET_MODE (index_val), iunsignedp, 0, op1);
7086 }
7087
7088 /* Calculate the element number of bit zero in the first word
7089 of the set. */
7090 if (GET_CODE (lo_r) == CONST_INT)
7091 rlow = GEN_INT (INTVAL (lo_r)
7092 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7093 else
7094 rlow = expand_binop (index_mode, and_optab, lo_r,
7095 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7096 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7097
7098 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7099 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7100
7101 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7102 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7103 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7104 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7105
7106 addr = memory_address (byte_mode,
7107 expand_binop (index_mode, add_optab, diff,
7108 setaddr, NULL_RTX, iunsignedp,
7109 OPTAB_LIB_WIDEN));
7110
7111 /* Extract the bit we want to examine. */
7112 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7113 gen_rtx_MEM (byte_mode, addr),
7114 make_tree (TREE_TYPE (index), rem),
7115 NULL_RTX, 1);
7116 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7117 GET_MODE (target) == byte_mode ? target : 0,
7118 1, OPTAB_LIB_WIDEN);
7119
7120 if (result != target)
7121 convert_move (target, result, 1);
7122
7123 /* Output the code to handle the out-of-range case. */
7124 emit_jump (op0);
7125 emit_label (op1);
7126 emit_move_insn (target, const0_rtx);
7127 emit_label (op0);
7128 return target;
7129 }
7130
7131 case WITH_CLEANUP_EXPR:
7132 if (RTL_EXPR_RTL (exp) == 0)
7133 {
7134 RTL_EXPR_RTL (exp)
7135 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7136 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7137
7138 /* That's it for this cleanup. */
7139 TREE_OPERAND (exp, 2) = 0;
7140 }
7141 return RTL_EXPR_RTL (exp);
7142
7143 case CLEANUP_POINT_EXPR:
7144 {
7145 /* Start a new binding layer that will keep track of all cleanup
7146 actions to be performed. */
7147 expand_start_bindings (2);
7148
7149 target_temp_slot_level = temp_slot_level;
7150
7151 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7152 /* If we're going to use this value, load it up now. */
7153 if (! ignore)
7154 op0 = force_not_mem (op0);
7155 preserve_temp_slots (op0);
7156 expand_end_bindings (NULL_TREE, 0, 0);
7157 }
7158 return op0;
7159
7160 case CALL_EXPR:
7161 /* Check for a built-in function. */
7162 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7163 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7164 == FUNCTION_DECL)
7165 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7166 return expand_builtin (exp, target, subtarget, tmode, ignore);
7167
7168 /* If this call was expanded already by preexpand_calls,
7169 just return the result we got. */
7170 if (CALL_EXPR_RTL (exp) != 0)
7171 return CALL_EXPR_RTL (exp);
7172
7173 return expand_call (exp, target, ignore);
7174
7175 case NON_LVALUE_EXPR:
7176 case NOP_EXPR:
7177 case CONVERT_EXPR:
7178 case REFERENCE_EXPR:
7179 if (TREE_OPERAND (exp, 0) == error_mark_node)
7180 return const0_rtx;
7181
7182 if (TREE_CODE (type) == UNION_TYPE)
7183 {
7184 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7185
7186 /* If both input and output are BLKmode, this conversion
7187 isn't actually doing anything unless we need to make the
7188 alignment stricter. */
7189 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7190 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7191 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7192 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7193 modifier);
7194
7195 if (target == 0)
7196 {
7197 if (mode != BLKmode)
7198 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7199 else
7200 target = assign_temp (type, 0, 1, 1);
7201 }
7202
7203 if (GET_CODE (target) == MEM)
7204 /* Store data into beginning of memory target. */
7205 store_expr (TREE_OPERAND (exp, 0),
7206 change_address (target, TYPE_MODE (valtype), 0), 0);
7207
7208 else if (GET_CODE (target) == REG)
7209 /* Store this field into a union of the proper type. */
7210 store_field (target,
7211 MIN ((int_size_in_bytes (TREE_TYPE
7212 (TREE_OPERAND (exp, 0)))
7213 * BITS_PER_UNIT),
7214 GET_MODE_BITSIZE (mode)),
7215 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7216 VOIDmode, 0, BITS_PER_UNIT,
7217 int_size_in_bytes (type), 0);
7218 else
7219 abort ();
7220
7221 /* Return the entire union. */
7222 return target;
7223 }
7224
7225 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7226 {
7227 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7228 ro_modifier);
7229
7230 /* If the signedness of the conversion differs and OP0 is
7231 a promoted SUBREG, clear that indication since we now
7232 have to do the proper extension. */
7233 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7234 && GET_CODE (op0) == SUBREG)
7235 SUBREG_PROMOTED_VAR_P (op0) = 0;
7236
7237 return op0;
7238 }
7239
7240 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7241 if (GET_MODE (op0) == mode)
7242 return op0;
7243
7244 /* If OP0 is a constant, just convert it into the proper mode. */
7245 if (CONSTANT_P (op0))
7246 return
7247 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7248 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7249
7250 if (modifier == EXPAND_INITIALIZER)
7251 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7252
7253 if (target == 0)
7254 return
7255 convert_to_mode (mode, op0,
7256 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7257 else
7258 convert_move (target, op0,
7259 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7260 return target;
7261
7262 case PLUS_EXPR:
7263 /* We come here from MINUS_EXPR when the second operand is a
7264 constant. */
7265 plus_expr:
7266 this_optab = add_optab;
7267
7268 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7269 something else, make sure we add the register to the constant and
7270 then to the other thing. This case can occur during strength
7271 reduction and doing it this way will produce better code if the
7272 frame pointer or argument pointer is eliminated.
7273
7274 fold-const.c will ensure that the constant is always in the inner
7275 PLUS_EXPR, so the only case we need to do anything about is if
7276 sp, ap, or fp is our second argument, in which case we must swap
7277 the innermost first argument and our second argument. */
7278
7279 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7280 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7281 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7282 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7283 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7284 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7285 {
7286 tree t = TREE_OPERAND (exp, 1);
7287
7288 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7289 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7290 }
7291
7292 /* If the result is to be ptr_mode and we are adding an integer to
7293 something, we might be forming a constant. So try to use
7294 plus_constant. If it produces a sum and we can't accept it,
7295 use force_operand. This allows P = &ARR[const] to generate
7296 efficient code on machines where a SYMBOL_REF is not a valid
7297 address.
7298
7299 If this is an EXPAND_SUM call, always return the sum. */
7300 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7301 || mode == ptr_mode)
7302 {
7303 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7304 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7305 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7306 {
7307 rtx constant_part;
7308
7309 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7310 EXPAND_SUM);
7311 /* Use immed_double_const to ensure that the constant is
7312 truncated according to the mode of OP1, then sign extended
7313 to a HOST_WIDE_INT. Using the constant directly can result
7314 in non-canonical RTL in a 64x32 cross compile. */
7315 constant_part
7316 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7317 (HOST_WIDE_INT) 0,
7318 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7319 op1 = plus_constant (op1, INTVAL (constant_part));
7320 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7321 op1 = force_operand (op1, target);
7322 return op1;
7323 }
7324
7325 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7326 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7327 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7328 {
7329 rtx constant_part;
7330
7331 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7332 EXPAND_SUM);
7333 if (! CONSTANT_P (op0))
7334 {
7335 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7336 VOIDmode, modifier);
7337 /* Don't go to both_summands if modifier
7338 says it's not right to return a PLUS. */
7339 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7340 goto binop2;
7341 goto both_summands;
7342 }
7343 /* Use immed_double_const to ensure that the constant is
7344 truncated according to the mode of OP1, then sign extended
7345 to a HOST_WIDE_INT. Using the constant directly can result
7346 in non-canonical RTL in a 64x32 cross compile. */
7347 constant_part
7348 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7349 (HOST_WIDE_INT) 0,
7350 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7351 op0 = plus_constant (op0, INTVAL (constant_part));
7352 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7353 op0 = force_operand (op0, target);
7354 return op0;
7355 }
7356 }
7357
7358 /* No sense saving up arithmetic to be done
7359 if it's all in the wrong mode to form part of an address.
7360 And force_operand won't know whether to sign-extend or
7361 zero-extend. */
7362 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7363 || mode != ptr_mode)
7364 goto binop;
7365
7366 preexpand_calls (exp);
7367 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7368 subtarget = 0;
7369
7370 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7371 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7372
7373 both_summands:
7374 /* Make sure any term that's a sum with a constant comes last. */
7375 if (GET_CODE (op0) == PLUS
7376 && CONSTANT_P (XEXP (op0, 1)))
7377 {
7378 temp = op0;
7379 op0 = op1;
7380 op1 = temp;
7381 }
7382 /* If adding to a sum including a constant,
7383 associate it to put the constant outside. */
7384 if (GET_CODE (op1) == PLUS
7385 && CONSTANT_P (XEXP (op1, 1)))
7386 {
7387 rtx constant_term = const0_rtx;
7388
7389 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7390 if (temp != 0)
7391 op0 = temp;
7392 /* Ensure that MULT comes first if there is one. */
7393 else if (GET_CODE (op0) == MULT)
7394 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7395 else
7396 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7397
7398 /* Let's also eliminate constants from op0 if possible. */
7399 op0 = eliminate_constant_term (op0, &constant_term);
7400
7401 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7402 their sum should be a constant. Form it into OP1, since the
7403 result we want will then be OP0 + OP1. */
7404
7405 temp = simplify_binary_operation (PLUS, mode, constant_term,
7406 XEXP (op1, 1));
7407 if (temp != 0)
7408 op1 = temp;
7409 else
7410 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7411 }
7412
7413 /* Put a constant term last and put a multiplication first. */
7414 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7415 temp = op1, op1 = op0, op0 = temp;
7416
7417 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7418 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7419
7420 case MINUS_EXPR:
7421 /* For initializers, we are allowed to return a MINUS of two
7422 symbolic constants. Here we handle all cases when both operands
7423 are constant. */
7424 /* Handle difference of two symbolic constants,
7425 for the sake of an initializer. */
7426 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7427 && really_constant_p (TREE_OPERAND (exp, 0))
7428 && really_constant_p (TREE_OPERAND (exp, 1)))
7429 {
7430 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7431 VOIDmode, ro_modifier);
7432 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7433 VOIDmode, ro_modifier);
7434
7435 /* If the last operand is a CONST_INT, use plus_constant of
7436 the negated constant. Else make the MINUS. */
7437 if (GET_CODE (op1) == CONST_INT)
7438 return plus_constant (op0, - INTVAL (op1));
7439 else
7440 return gen_rtx_MINUS (mode, op0, op1);
7441 }
7442 /* Convert A - const to A + (-const). */
7443 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7444 {
7445 tree negated = fold (build1 (NEGATE_EXPR, type,
7446 TREE_OPERAND (exp, 1)));
7447
7448 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7449 /* If we can't negate the constant in TYPE, leave it alone and
7450 expand_binop will negate it for us. We used to try to do it
7451 here in the signed version of TYPE, but that doesn't work
7452 on POINTER_TYPEs. */;
7453 else
7454 {
7455 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7456 goto plus_expr;
7457 }
7458 }
7459 this_optab = sub_optab;
7460 goto binop;
7461
7462 case MULT_EXPR:
7463 preexpand_calls (exp);
7464 /* If first operand is constant, swap them.
7465 Thus the following special case checks need only
7466 check the second operand. */
7467 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7468 {
7469 register tree t1 = TREE_OPERAND (exp, 0);
7470 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7471 TREE_OPERAND (exp, 1) = t1;
7472 }
7473
7474 /* Attempt to return something suitable for generating an
7475 indexed address, for machines that support that. */
7476
7477 if (modifier == EXPAND_SUM && mode == ptr_mode
7478 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7479 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7480 {
7481 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7482 EXPAND_SUM);
7483
7484 /* Apply distributive law if OP0 is x+c. */
7485 if (GET_CODE (op0) == PLUS
7486 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7487 return
7488 gen_rtx_PLUS
7489 (mode,
7490 gen_rtx_MULT
7491 (mode, XEXP (op0, 0),
7492 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7493 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7494 * INTVAL (XEXP (op0, 1))));
7495
7496 if (GET_CODE (op0) != REG)
7497 op0 = force_operand (op0, NULL_RTX);
7498 if (GET_CODE (op0) != REG)
7499 op0 = copy_to_mode_reg (mode, op0);
7500
7501 return
7502 gen_rtx_MULT (mode, op0,
7503 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7504 }
7505
7506 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7507 subtarget = 0;
7508
7509 /* Check for multiplying things that have been extended
7510 from a narrower type. If this machine supports multiplying
7511 in that narrower type with a result in the desired type,
7512 do it that way, and avoid the explicit type-conversion. */
7513 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7514 && TREE_CODE (type) == INTEGER_TYPE
7515 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7516 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7517 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7518 && int_fits_type_p (TREE_OPERAND (exp, 1),
7519 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7520 /* Don't use a widening multiply if a shift will do. */
7521 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7522 > HOST_BITS_PER_WIDE_INT)
7523 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7524 ||
7525 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7526 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7527 ==
7528 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7529 /* If both operands are extended, they must either both
7530 be zero-extended or both be sign-extended. */
7531 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7532 ==
7533 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7534 {
7535 enum machine_mode innermode
7536 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7537 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7538 ? smul_widen_optab : umul_widen_optab);
7539 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7540 ? umul_widen_optab : smul_widen_optab);
7541 if (mode == GET_MODE_WIDER_MODE (innermode))
7542 {
7543 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7544 {
7545 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7546 NULL_RTX, VOIDmode, 0);
7547 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7548 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7549 VOIDmode, 0);
7550 else
7551 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7552 NULL_RTX, VOIDmode, 0);
7553 goto binop2;
7554 }
7555 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7556 && innermode == word_mode)
7557 {
7558 rtx htem;
7559 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7560 NULL_RTX, VOIDmode, 0);
7561 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7562 op1 = convert_modes (innermode, mode,
7563 expand_expr (TREE_OPERAND (exp, 1),
7564 NULL_RTX, VOIDmode, 0),
7565 unsignedp);
7566 else
7567 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7568 NULL_RTX, VOIDmode, 0);
7569 temp = expand_binop (mode, other_optab, op0, op1, target,
7570 unsignedp, OPTAB_LIB_WIDEN);
7571 htem = expand_mult_highpart_adjust (innermode,
7572 gen_highpart (innermode, temp),
7573 op0, op1,
7574 gen_highpart (innermode, temp),
7575 unsignedp);
7576 emit_move_insn (gen_highpart (innermode, temp), htem);
7577 return temp;
7578 }
7579 }
7580 }
7581 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7582 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7583 return expand_mult (mode, op0, op1, target, unsignedp);
7584
7585 case TRUNC_DIV_EXPR:
7586 case FLOOR_DIV_EXPR:
7587 case CEIL_DIV_EXPR:
7588 case ROUND_DIV_EXPR:
7589 case EXACT_DIV_EXPR:
7590 preexpand_calls (exp);
7591 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7592 subtarget = 0;
7593 /* Possible optimization: compute the dividend with EXPAND_SUM
7594 then if the divisor is constant can optimize the case
7595 where some terms of the dividend have coeffs divisible by it. */
7596 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7597 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7598 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7599
7600 case RDIV_EXPR:
7601 this_optab = flodiv_optab;
7602 goto binop;
7603
7604 case TRUNC_MOD_EXPR:
7605 case FLOOR_MOD_EXPR:
7606 case CEIL_MOD_EXPR:
7607 case ROUND_MOD_EXPR:
7608 preexpand_calls (exp);
7609 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7610 subtarget = 0;
7611 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7612 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7613 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7614
7615 case FIX_ROUND_EXPR:
7616 case FIX_FLOOR_EXPR:
7617 case FIX_CEIL_EXPR:
7618 abort (); /* Not used for C. */
7619
7620 case FIX_TRUNC_EXPR:
7621 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7622 if (target == 0)
7623 target = gen_reg_rtx (mode);
7624 expand_fix (target, op0, unsignedp);
7625 return target;
7626
7627 case FLOAT_EXPR:
7628 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7629 if (target == 0)
7630 target = gen_reg_rtx (mode);
7631 /* expand_float can't figure out what to do if FROM has VOIDmode.
7632 So give it the correct mode. With -O, cse will optimize this. */
7633 if (GET_MODE (op0) == VOIDmode)
7634 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7635 op0);
7636 expand_float (target, op0,
7637 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7638 return target;
7639
7640 case NEGATE_EXPR:
7641 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7642 temp = expand_unop (mode, neg_optab, op0, target, 0);
7643 if (temp == 0)
7644 abort ();
7645 return temp;
7646
7647 case ABS_EXPR:
7648 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7649
7650 /* Handle complex values specially. */
7651 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7652 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7653 return expand_complex_abs (mode, op0, target, unsignedp);
7654
7655 /* Unsigned abs is simply the operand. Testing here means we don't
7656 risk generating incorrect code below. */
7657 if (TREE_UNSIGNED (type))
7658 return op0;
7659
7660 return expand_abs (mode, op0, target,
7661 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7662
7663 case MAX_EXPR:
7664 case MIN_EXPR:
7665 target = original_target;
7666 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7667 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7668 || GET_MODE (target) != mode
7669 || (GET_CODE (target) == REG
7670 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7671 target = gen_reg_rtx (mode);
7672 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7673 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7674
7675 /* First try to do it with a special MIN or MAX instruction.
7676 If that does not win, use a conditional jump to select the proper
7677 value. */
7678 this_optab = (TREE_UNSIGNED (type)
7679 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7680 : (code == MIN_EXPR ? smin_optab : smax_optab));
7681
7682 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7683 OPTAB_WIDEN);
7684 if (temp != 0)
7685 return temp;
7686
7687 /* At this point, a MEM target is no longer useful; we will get better
7688 code without it. */
7689
7690 if (GET_CODE (target) == MEM)
7691 target = gen_reg_rtx (mode);
7692
7693 if (target != op0)
7694 emit_move_insn (target, op0);
7695
7696 op0 = gen_label_rtx ();
7697
7698 /* If this mode is an integer too wide to compare properly,
7699 compare word by word. Rely on cse to optimize constant cases. */
7700 if (GET_MODE_CLASS (mode) == MODE_INT
7701 && ! can_compare_p (GE, mode, ccp_jump))
7702 {
7703 if (code == MAX_EXPR)
7704 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7705 target, op1, NULL_RTX, op0);
7706 else
7707 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7708 op1, target, NULL_RTX, op0);
7709 }
7710 else
7711 {
7712 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7713 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7714 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7715 op0);
7716 }
7717 emit_move_insn (target, op1);
7718 emit_label (op0);
7719 return target;
7720
7721 case BIT_NOT_EXPR:
7722 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7723 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7724 if (temp == 0)
7725 abort ();
7726 return temp;
7727
7728 case FFS_EXPR:
7729 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7730 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7731 if (temp == 0)
7732 abort ();
7733 return temp;
7734
7735 /* ??? Can optimize bitwise operations with one arg constant.
7736 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7737 and (a bitwise1 b) bitwise2 b (etc)
7738 but that is probably not worth while. */
7739
7740 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7741 boolean values when we want in all cases to compute both of them. In
7742 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7743 as actual zero-or-1 values and then bitwise anding. In cases where
7744 there cannot be any side effects, better code would be made by
7745 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7746 how to recognize those cases. */
7747
7748 case TRUTH_AND_EXPR:
7749 case BIT_AND_EXPR:
7750 this_optab = and_optab;
7751 goto binop;
7752
7753 case TRUTH_OR_EXPR:
7754 case BIT_IOR_EXPR:
7755 this_optab = ior_optab;
7756 goto binop;
7757
7758 case TRUTH_XOR_EXPR:
7759 case BIT_XOR_EXPR:
7760 this_optab = xor_optab;
7761 goto binop;
7762
7763 case LSHIFT_EXPR:
7764 case RSHIFT_EXPR:
7765 case LROTATE_EXPR:
7766 case RROTATE_EXPR:
7767 preexpand_calls (exp);
7768 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7769 subtarget = 0;
7770 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7771 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7772 unsignedp);
7773
7774 /* Could determine the answer when only additive constants differ. Also,
7775 the addition of one can be handled by changing the condition. */
7776 case LT_EXPR:
7777 case LE_EXPR:
7778 case GT_EXPR:
7779 case GE_EXPR:
7780 case EQ_EXPR:
7781 case NE_EXPR:
7782 case UNORDERED_EXPR:
7783 case ORDERED_EXPR:
7784 case UNLT_EXPR:
7785 case UNLE_EXPR:
7786 case UNGT_EXPR:
7787 case UNGE_EXPR:
7788 case UNEQ_EXPR:
7789 preexpand_calls (exp);
7790 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7791 if (temp != 0)
7792 return temp;
7793
7794 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7795 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7796 && original_target
7797 && GET_CODE (original_target) == REG
7798 && (GET_MODE (original_target)
7799 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7800 {
7801 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7802 VOIDmode, 0);
7803
7804 if (temp != original_target)
7805 temp = copy_to_reg (temp);
7806
7807 op1 = gen_label_rtx ();
7808 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7809 GET_MODE (temp), unsignedp, 0, op1);
7810 emit_move_insn (temp, const1_rtx);
7811 emit_label (op1);
7812 return temp;
7813 }
7814
7815 /* If no set-flag instruction, must generate a conditional
7816 store into a temporary variable. Drop through
7817 and handle this like && and ||. */
7818
7819 case TRUTH_ANDIF_EXPR:
7820 case TRUTH_ORIF_EXPR:
7821 if (! ignore
7822 && (target == 0 || ! safe_from_p (target, exp, 1)
7823 /* Make sure we don't have a hard reg (such as function's return
7824 value) live across basic blocks, if not optimizing. */
7825 || (!optimize && GET_CODE (target) == REG
7826 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7827 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7828
7829 if (target)
7830 emit_clr_insn (target);
7831
7832 op1 = gen_label_rtx ();
7833 jumpifnot (exp, op1);
7834
7835 if (target)
7836 emit_0_to_1_insn (target);
7837
7838 emit_label (op1);
7839 return ignore ? const0_rtx : target;
7840
7841 case TRUTH_NOT_EXPR:
7842 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7843 /* The parser is careful to generate TRUTH_NOT_EXPR
7844 only with operands that are always zero or one. */
7845 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7846 target, 1, OPTAB_LIB_WIDEN);
7847 if (temp == 0)
7848 abort ();
7849 return temp;
7850
7851 case COMPOUND_EXPR:
7852 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7853 emit_queue ();
7854 return expand_expr (TREE_OPERAND (exp, 1),
7855 (ignore ? const0_rtx : target),
7856 VOIDmode, 0);
7857
7858 case COND_EXPR:
7859 /* If we would have a "singleton" (see below) were it not for a
7860 conversion in each arm, bring that conversion back out. */
7861 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7862 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7863 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7864 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7865 {
7866 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7867 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7868
7869 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7870 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7871 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7872 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7873 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7874 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7875 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7876 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7877 return expand_expr (build1 (NOP_EXPR, type,
7878 build (COND_EXPR, TREE_TYPE (true),
7879 TREE_OPERAND (exp, 0),
7880 true, false)),
7881 target, tmode, modifier);
7882 }
7883
7884 {
7885 /* Note that COND_EXPRs whose type is a structure or union
7886 are required to be constructed to contain assignments of
7887 a temporary variable, so that we can evaluate them here
7888 for side effect only. If type is void, we must do likewise. */
7889
7890 /* If an arm of the branch requires a cleanup,
7891 only that cleanup is performed. */
7892
7893 tree singleton = 0;
7894 tree binary_op = 0, unary_op = 0;
7895
7896 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7897 convert it to our mode, if necessary. */
7898 if (integer_onep (TREE_OPERAND (exp, 1))
7899 && integer_zerop (TREE_OPERAND (exp, 2))
7900 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7901 {
7902 if (ignore)
7903 {
7904 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7905 ro_modifier);
7906 return const0_rtx;
7907 }
7908
7909 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7910 if (GET_MODE (op0) == mode)
7911 return op0;
7912
7913 if (target == 0)
7914 target = gen_reg_rtx (mode);
7915 convert_move (target, op0, unsignedp);
7916 return target;
7917 }
7918
7919 /* Check for X ? A + B : A. If we have this, we can copy A to the
7920 output and conditionally add B. Similarly for unary operations.
7921 Don't do this if X has side-effects because those side effects
7922 might affect A or B and the "?" operation is a sequence point in
7923 ANSI. (operand_equal_p tests for side effects.) */
7924
7925 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7926 && operand_equal_p (TREE_OPERAND (exp, 2),
7927 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7928 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7929 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7930 && operand_equal_p (TREE_OPERAND (exp, 1),
7931 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7932 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7933 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7934 && operand_equal_p (TREE_OPERAND (exp, 2),
7935 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7936 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7937 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7938 && operand_equal_p (TREE_OPERAND (exp, 1),
7939 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7940 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7941
7942 /* If we are not to produce a result, we have no target. Otherwise,
7943 if a target was specified use it; it will not be used as an
7944 intermediate target unless it is safe. If no target, use a
7945 temporary. */
7946
7947 if (ignore)
7948 temp = 0;
7949 else if (original_target
7950 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7951 || (singleton && GET_CODE (original_target) == REG
7952 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7953 && original_target == var_rtx (singleton)))
7954 && GET_MODE (original_target) == mode
7955 #ifdef HAVE_conditional_move
7956 && (! can_conditionally_move_p (mode)
7957 || GET_CODE (original_target) == REG
7958 || TREE_ADDRESSABLE (type))
7959 #endif
7960 && ! (GET_CODE (original_target) == MEM
7961 && MEM_VOLATILE_P (original_target)))
7962 temp = original_target;
7963 else if (TREE_ADDRESSABLE (type))
7964 abort ();
7965 else
7966 temp = assign_temp (type, 0, 0, 1);
7967
7968 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7969 do the test of X as a store-flag operation, do this as
7970 A + ((X != 0) << log C). Similarly for other simple binary
7971 operators. Only do for C == 1 if BRANCH_COST is low. */
7972 if (temp && singleton && binary_op
7973 && (TREE_CODE (binary_op) == PLUS_EXPR
7974 || TREE_CODE (binary_op) == MINUS_EXPR
7975 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7976 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7977 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7978 : integer_onep (TREE_OPERAND (binary_op, 1)))
7979 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7980 {
7981 rtx result;
7982 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7983 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7984 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7985 : xor_optab);
7986
7987 /* If we had X ? A : A + 1, do this as A + (X == 0).
7988
7989 We have to invert the truth value here and then put it
7990 back later if do_store_flag fails. We cannot simply copy
7991 TREE_OPERAND (exp, 0) to another variable and modify that
7992 because invert_truthvalue can modify the tree pointed to
7993 by its argument. */
7994 if (singleton == TREE_OPERAND (exp, 1))
7995 TREE_OPERAND (exp, 0)
7996 = invert_truthvalue (TREE_OPERAND (exp, 0));
7997
7998 result = do_store_flag (TREE_OPERAND (exp, 0),
7999 (safe_from_p (temp, singleton, 1)
8000 ? temp : NULL_RTX),
8001 mode, BRANCH_COST <= 1);
8002
8003 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8004 result = expand_shift (LSHIFT_EXPR, mode, result,
8005 build_int_2 (tree_log2
8006 (TREE_OPERAND
8007 (binary_op, 1)),
8008 0),
8009 (safe_from_p (temp, singleton, 1)
8010 ? temp : NULL_RTX), 0);
8011
8012 if (result)
8013 {
8014 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8015 return expand_binop (mode, boptab, op1, result, temp,
8016 unsignedp, OPTAB_LIB_WIDEN);
8017 }
8018 else if (singleton == TREE_OPERAND (exp, 1))
8019 TREE_OPERAND (exp, 0)
8020 = invert_truthvalue (TREE_OPERAND (exp, 0));
8021 }
8022
8023 do_pending_stack_adjust ();
8024 NO_DEFER_POP;
8025 op0 = gen_label_rtx ();
8026
8027 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8028 {
8029 if (temp != 0)
8030 {
8031 /* If the target conflicts with the other operand of the
8032 binary op, we can't use it. Also, we can't use the target
8033 if it is a hard register, because evaluating the condition
8034 might clobber it. */
8035 if ((binary_op
8036 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8037 || (GET_CODE (temp) == REG
8038 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8039 temp = gen_reg_rtx (mode);
8040 store_expr (singleton, temp, 0);
8041 }
8042 else
8043 expand_expr (singleton,
8044 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8045 if (singleton == TREE_OPERAND (exp, 1))
8046 jumpif (TREE_OPERAND (exp, 0), op0);
8047 else
8048 jumpifnot (TREE_OPERAND (exp, 0), op0);
8049
8050 start_cleanup_deferral ();
8051 if (binary_op && temp == 0)
8052 /* Just touch the other operand. */
8053 expand_expr (TREE_OPERAND (binary_op, 1),
8054 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8055 else if (binary_op)
8056 store_expr (build (TREE_CODE (binary_op), type,
8057 make_tree (type, temp),
8058 TREE_OPERAND (binary_op, 1)),
8059 temp, 0);
8060 else
8061 store_expr (build1 (TREE_CODE (unary_op), type,
8062 make_tree (type, temp)),
8063 temp, 0);
8064 op1 = op0;
8065 }
8066 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8067 comparison operator. If we have one of these cases, set the
8068 output to A, branch on A (cse will merge these two references),
8069 then set the output to FOO. */
8070 else if (temp
8071 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8072 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8073 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8074 TREE_OPERAND (exp, 1), 0)
8075 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8076 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8077 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8078 {
8079 if (GET_CODE (temp) == REG
8080 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8081 temp = gen_reg_rtx (mode);
8082 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8083 jumpif (TREE_OPERAND (exp, 0), op0);
8084
8085 start_cleanup_deferral ();
8086 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8087 op1 = op0;
8088 }
8089 else if (temp
8090 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8091 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8092 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8093 TREE_OPERAND (exp, 2), 0)
8094 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8095 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8096 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8097 {
8098 if (GET_CODE (temp) == REG
8099 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8100 temp = gen_reg_rtx (mode);
8101 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8102 jumpifnot (TREE_OPERAND (exp, 0), op0);
8103
8104 start_cleanup_deferral ();
8105 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8106 op1 = op0;
8107 }
8108 else
8109 {
8110 op1 = gen_label_rtx ();
8111 jumpifnot (TREE_OPERAND (exp, 0), op0);
8112
8113 start_cleanup_deferral ();
8114
8115 /* One branch of the cond can be void, if it never returns. For
8116 example A ? throw : E */
8117 if (temp != 0
8118 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8119 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8120 else
8121 expand_expr (TREE_OPERAND (exp, 1),
8122 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8123 end_cleanup_deferral ();
8124 emit_queue ();
8125 emit_jump_insn (gen_jump (op1));
8126 emit_barrier ();
8127 emit_label (op0);
8128 start_cleanup_deferral ();
8129 if (temp != 0
8130 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8131 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8132 else
8133 expand_expr (TREE_OPERAND (exp, 2),
8134 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8135 }
8136
8137 end_cleanup_deferral ();
8138
8139 emit_queue ();
8140 emit_label (op1);
8141 OK_DEFER_POP;
8142
8143 return temp;
8144 }
8145
8146 case TARGET_EXPR:
8147 {
8148 /* Something needs to be initialized, but we didn't know
8149 where that thing was when building the tree. For example,
8150 it could be the return value of a function, or a parameter
8151 to a function which lays down in the stack, or a temporary
8152 variable which must be passed by reference.
8153
8154 We guarantee that the expression will either be constructed
8155 or copied into our original target. */
8156
8157 tree slot = TREE_OPERAND (exp, 0);
8158 tree cleanups = NULL_TREE;
8159 tree exp1;
8160
8161 if (TREE_CODE (slot) != VAR_DECL)
8162 abort ();
8163
8164 if (! ignore)
8165 target = original_target;
8166
8167 /* Set this here so that if we get a target that refers to a
8168 register variable that's already been used, put_reg_into_stack
8169 knows that it should fix up those uses. */
8170 TREE_USED (slot) = 1;
8171
8172 if (target == 0)
8173 {
8174 if (DECL_RTL (slot) != 0)
8175 {
8176 target = DECL_RTL (slot);
8177 /* If we have already expanded the slot, so don't do
8178 it again. (mrs) */
8179 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8180 return target;
8181 }
8182 else
8183 {
8184 target = assign_temp (type, 2, 0, 1);
8185 /* All temp slots at this level must not conflict. */
8186 preserve_temp_slots (target);
8187 DECL_RTL (slot) = target;
8188 if (TREE_ADDRESSABLE (slot))
8189 put_var_into_stack (slot);
8190
8191 /* Since SLOT is not known to the called function
8192 to belong to its stack frame, we must build an explicit
8193 cleanup. This case occurs when we must build up a reference
8194 to pass the reference as an argument. In this case,
8195 it is very likely that such a reference need not be
8196 built here. */
8197
8198 if (TREE_OPERAND (exp, 2) == 0)
8199 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8200 cleanups = TREE_OPERAND (exp, 2);
8201 }
8202 }
8203 else
8204 {
8205 /* This case does occur, when expanding a parameter which
8206 needs to be constructed on the stack. The target
8207 is the actual stack address that we want to initialize.
8208 The function we call will perform the cleanup in this case. */
8209
8210 /* If we have already assigned it space, use that space,
8211 not target that we were passed in, as our target
8212 parameter is only a hint. */
8213 if (DECL_RTL (slot) != 0)
8214 {
8215 target = DECL_RTL (slot);
8216 /* If we have already expanded the slot, so don't do
8217 it again. (mrs) */
8218 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8219 return target;
8220 }
8221 else
8222 {
8223 DECL_RTL (slot) = target;
8224 /* If we must have an addressable slot, then make sure that
8225 the RTL that we just stored in slot is OK. */
8226 if (TREE_ADDRESSABLE (slot))
8227 put_var_into_stack (slot);
8228 }
8229 }
8230
8231 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8232 /* Mark it as expanded. */
8233 TREE_OPERAND (exp, 1) = NULL_TREE;
8234
8235 store_expr (exp1, target, 0);
8236
8237 expand_decl_cleanup (NULL_TREE, cleanups);
8238
8239 return target;
8240 }
8241
8242 case INIT_EXPR:
8243 {
8244 tree lhs = TREE_OPERAND (exp, 0);
8245 tree rhs = TREE_OPERAND (exp, 1);
8246 tree noncopied_parts = 0;
8247 tree lhs_type = TREE_TYPE (lhs);
8248
8249 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8250 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8251 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8252 TYPE_NONCOPIED_PARTS (lhs_type));
8253 while (noncopied_parts != 0)
8254 {
8255 expand_assignment (TREE_VALUE (noncopied_parts),
8256 TREE_PURPOSE (noncopied_parts), 0, 0);
8257 noncopied_parts = TREE_CHAIN (noncopied_parts);
8258 }
8259 return temp;
8260 }
8261
8262 case MODIFY_EXPR:
8263 {
8264 /* If lhs is complex, expand calls in rhs before computing it.
8265 That's so we don't compute a pointer and save it over a call.
8266 If lhs is simple, compute it first so we can give it as a
8267 target if the rhs is just a call. This avoids an extra temp and copy
8268 and that prevents a partial-subsumption which makes bad code.
8269 Actually we could treat component_ref's of vars like vars. */
8270
8271 tree lhs = TREE_OPERAND (exp, 0);
8272 tree rhs = TREE_OPERAND (exp, 1);
8273 tree noncopied_parts = 0;
8274 tree lhs_type = TREE_TYPE (lhs);
8275
8276 temp = 0;
8277
8278 if (TREE_CODE (lhs) != VAR_DECL
8279 && TREE_CODE (lhs) != RESULT_DECL
8280 && TREE_CODE (lhs) != PARM_DECL
8281 && ! (TREE_CODE (lhs) == INDIRECT_REF
8282 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8283 preexpand_calls (exp);
8284
8285 /* Check for |= or &= of a bitfield of size one into another bitfield
8286 of size 1. In this case, (unless we need the result of the
8287 assignment) we can do this more efficiently with a
8288 test followed by an assignment, if necessary.
8289
8290 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8291 things change so we do, this code should be enhanced to
8292 support it. */
8293 if (ignore
8294 && TREE_CODE (lhs) == COMPONENT_REF
8295 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8296 || TREE_CODE (rhs) == BIT_AND_EXPR)
8297 && TREE_OPERAND (rhs, 0) == lhs
8298 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8299 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8300 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8301 {
8302 rtx label = gen_label_rtx ();
8303
8304 do_jump (TREE_OPERAND (rhs, 1),
8305 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8306 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8307 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8308 (TREE_CODE (rhs) == BIT_IOR_EXPR
8309 ? integer_one_node
8310 : integer_zero_node)),
8311 0, 0);
8312 do_pending_stack_adjust ();
8313 emit_label (label);
8314 return const0_rtx;
8315 }
8316
8317 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8318 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8319 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8320 TYPE_NONCOPIED_PARTS (lhs_type));
8321
8322 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8323 while (noncopied_parts != 0)
8324 {
8325 expand_assignment (TREE_PURPOSE (noncopied_parts),
8326 TREE_VALUE (noncopied_parts), 0, 0);
8327 noncopied_parts = TREE_CHAIN (noncopied_parts);
8328 }
8329 return temp;
8330 }
8331
8332 case RETURN_EXPR:
8333 if (!TREE_OPERAND (exp, 0))
8334 expand_null_return ();
8335 else
8336 expand_return (TREE_OPERAND (exp, 0));
8337 return const0_rtx;
8338
8339 case PREINCREMENT_EXPR:
8340 case PREDECREMENT_EXPR:
8341 return expand_increment (exp, 0, ignore);
8342
8343 case POSTINCREMENT_EXPR:
8344 case POSTDECREMENT_EXPR:
8345 /* Faster to treat as pre-increment if result is not used. */
8346 return expand_increment (exp, ! ignore, ignore);
8347
8348 case ADDR_EXPR:
8349 /* If nonzero, TEMP will be set to the address of something that might
8350 be a MEM corresponding to a stack slot. */
8351 temp = 0;
8352
8353 /* Are we taking the address of a nested function? */
8354 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8355 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8356 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8357 && ! TREE_STATIC (exp))
8358 {
8359 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8360 op0 = force_operand (op0, target);
8361 }
8362 /* If we are taking the address of something erroneous, just
8363 return a zero. */
8364 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8365 return const0_rtx;
8366 else
8367 {
8368 /* We make sure to pass const0_rtx down if we came in with
8369 ignore set, to avoid doing the cleanups twice for something. */
8370 op0 = expand_expr (TREE_OPERAND (exp, 0),
8371 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8372 (modifier == EXPAND_INITIALIZER
8373 ? modifier : EXPAND_CONST_ADDRESS));
8374
8375 /* If we are going to ignore the result, OP0 will have been set
8376 to const0_rtx, so just return it. Don't get confused and
8377 think we are taking the address of the constant. */
8378 if (ignore)
8379 return op0;
8380
8381 op0 = protect_from_queue (op0, 0);
8382
8383 /* We would like the object in memory. If it is a constant, we can
8384 have it be statically allocated into memory. For a non-constant,
8385 we need to allocate some memory and store the value into it. */
8386
8387 if (CONSTANT_P (op0))
8388 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8389 op0);
8390 else if (GET_CODE (op0) == MEM)
8391 {
8392 mark_temp_addr_taken (op0);
8393 temp = XEXP (op0, 0);
8394 }
8395
8396 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8397 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8398 {
8399 /* If this object is in a register, it must be not
8400 be BLKmode. */
8401 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8402 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8403
8404 mark_temp_addr_taken (memloc);
8405 emit_move_insn (memloc, op0);
8406 op0 = memloc;
8407 }
8408
8409 if (GET_CODE (op0) != MEM)
8410 abort ();
8411
8412 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8413 {
8414 temp = XEXP (op0, 0);
8415 #ifdef POINTERS_EXTEND_UNSIGNED
8416 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8417 && mode == ptr_mode)
8418 temp = convert_memory_address (ptr_mode, temp);
8419 #endif
8420 return temp;
8421 }
8422
8423 op0 = force_operand (XEXP (op0, 0), target);
8424 }
8425
8426 if (flag_force_addr && GET_CODE (op0) != REG)
8427 op0 = force_reg (Pmode, op0);
8428
8429 if (GET_CODE (op0) == REG
8430 && ! REG_USERVAR_P (op0))
8431 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8432
8433 /* If we might have had a temp slot, add an equivalent address
8434 for it. */
8435 if (temp != 0)
8436 update_temp_slot_address (temp, op0);
8437
8438 #ifdef POINTERS_EXTEND_UNSIGNED
8439 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8440 && mode == ptr_mode)
8441 op0 = convert_memory_address (ptr_mode, op0);
8442 #endif
8443
8444 return op0;
8445
8446 case ENTRY_VALUE_EXPR:
8447 abort ();
8448
8449 /* COMPLEX type for Extended Pascal & Fortran */
8450 case COMPLEX_EXPR:
8451 {
8452 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8453 rtx insns;
8454
8455 /* Get the rtx code of the operands. */
8456 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8457 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8458
8459 if (! target)
8460 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8461
8462 start_sequence ();
8463
8464 /* Move the real (op0) and imaginary (op1) parts to their location. */
8465 emit_move_insn (gen_realpart (mode, target), op0);
8466 emit_move_insn (gen_imagpart (mode, target), op1);
8467
8468 insns = get_insns ();
8469 end_sequence ();
8470
8471 /* Complex construction should appear as a single unit. */
8472 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8473 each with a separate pseudo as destination.
8474 It's not correct for flow to treat them as a unit. */
8475 if (GET_CODE (target) != CONCAT)
8476 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8477 else
8478 emit_insns (insns);
8479
8480 return target;
8481 }
8482
8483 case REALPART_EXPR:
8484 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8485 return gen_realpart (mode, op0);
8486
8487 case IMAGPART_EXPR:
8488 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8489 return gen_imagpart (mode, op0);
8490
8491 case CONJ_EXPR:
8492 {
8493 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8494 rtx imag_t;
8495 rtx insns;
8496
8497 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8498
8499 if (! target)
8500 target = gen_reg_rtx (mode);
8501
8502 start_sequence ();
8503
8504 /* Store the realpart and the negated imagpart to target. */
8505 emit_move_insn (gen_realpart (partmode, target),
8506 gen_realpart (partmode, op0));
8507
8508 imag_t = gen_imagpart (partmode, target);
8509 temp = expand_unop (partmode, neg_optab,
8510 gen_imagpart (partmode, op0), imag_t, 0);
8511 if (temp != imag_t)
8512 emit_move_insn (imag_t, temp);
8513
8514 insns = get_insns ();
8515 end_sequence ();
8516
8517 /* Conjugate should appear as a single unit
8518 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8519 each with a separate pseudo as destination.
8520 It's not correct for flow to treat them as a unit. */
8521 if (GET_CODE (target) != CONCAT)
8522 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8523 else
8524 emit_insns (insns);
8525
8526 return target;
8527 }
8528
8529 case TRY_CATCH_EXPR:
8530 {
8531 tree handler = TREE_OPERAND (exp, 1);
8532
8533 expand_eh_region_start ();
8534
8535 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8536
8537 expand_eh_region_end (handler);
8538
8539 return op0;
8540 }
8541
8542 case TRY_FINALLY_EXPR:
8543 {
8544 tree try_block = TREE_OPERAND (exp, 0);
8545 tree finally_block = TREE_OPERAND (exp, 1);
8546 rtx finally_label = gen_label_rtx ();
8547 rtx done_label = gen_label_rtx ();
8548 rtx return_link = gen_reg_rtx (Pmode);
8549 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8550 (tree) finally_label, (tree) return_link);
8551 TREE_SIDE_EFFECTS (cleanup) = 1;
8552
8553 /* Start a new binding layer that will keep track of all cleanup
8554 actions to be performed. */
8555 expand_start_bindings (2);
8556
8557 target_temp_slot_level = temp_slot_level;
8558
8559 expand_decl_cleanup (NULL_TREE, cleanup);
8560 op0 = expand_expr (try_block, target, tmode, modifier);
8561
8562 preserve_temp_slots (op0);
8563 expand_end_bindings (NULL_TREE, 0, 0);
8564 emit_jump (done_label);
8565 emit_label (finally_label);
8566 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8567 emit_indirect_jump (return_link);
8568 emit_label (done_label);
8569 return op0;
8570 }
8571
8572 case GOTO_SUBROUTINE_EXPR:
8573 {
8574 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8575 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8576 rtx return_address = gen_label_rtx ();
8577 emit_move_insn (return_link,
8578 gen_rtx_LABEL_REF (Pmode, return_address));
8579 emit_jump (subr);
8580 emit_label (return_address);
8581 return const0_rtx;
8582 }
8583
8584 case POPDCC_EXPR:
8585 {
8586 rtx dcc = get_dynamic_cleanup_chain ();
8587 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8588 return const0_rtx;
8589 }
8590
8591 case POPDHC_EXPR:
8592 {
8593 rtx dhc = get_dynamic_handler_chain ();
8594 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8595 return const0_rtx;
8596 }
8597
8598 case VA_ARG_EXPR:
8599 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8600
8601 default:
8602 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8603 }
8604
8605 /* Here to do an ordinary binary operator, generating an instruction
8606 from the optab already placed in `this_optab'. */
8607 binop:
8608 preexpand_calls (exp);
8609 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8610 subtarget = 0;
8611 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8612 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8613 binop2:
8614 temp = expand_binop (mode, this_optab, op0, op1, target,
8615 unsignedp, OPTAB_LIB_WIDEN);
8616 if (temp == 0)
8617 abort ();
8618 return temp;
8619 }
8620 \f
8621 /* Similar to expand_expr, except that we don't specify a target, target
8622 mode, or modifier and we return the alignment of the inner type. This is
8623 used in cases where it is not necessary to align the result to the
8624 alignment of its type as long as we know the alignment of the result, for
8625 example for comparisons of BLKmode values. */
8626
8627 static rtx
8628 expand_expr_unaligned (exp, palign)
8629 register tree exp;
8630 unsigned int *palign;
8631 {
8632 register rtx op0;
8633 tree type = TREE_TYPE (exp);
8634 register enum machine_mode mode = TYPE_MODE (type);
8635
8636 /* Default the alignment we return to that of the type. */
8637 *palign = TYPE_ALIGN (type);
8638
8639 /* The only cases in which we do anything special is if the resulting mode
8640 is BLKmode. */
8641 if (mode != BLKmode)
8642 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8643
8644 switch (TREE_CODE (exp))
8645 {
8646 case CONVERT_EXPR:
8647 case NOP_EXPR:
8648 case NON_LVALUE_EXPR:
8649 /* Conversions between BLKmode values don't change the underlying
8650 alignment or value. */
8651 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8652 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8653 break;
8654
8655 case ARRAY_REF:
8656 /* Much of the code for this case is copied directly from expand_expr.
8657 We need to duplicate it here because we will do something different
8658 in the fall-through case, so we need to handle the same exceptions
8659 it does. */
8660 {
8661 tree array = TREE_OPERAND (exp, 0);
8662 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8663 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8664 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8665 HOST_WIDE_INT i;
8666
8667 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8668 abort ();
8669
8670 /* Optimize the special-case of a zero lower bound.
8671
8672 We convert the low_bound to sizetype to avoid some problems
8673 with constant folding. (E.g. suppose the lower bound is 1,
8674 and its mode is QI. Without the conversion, (ARRAY
8675 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8676 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8677
8678 if (! integer_zerop (low_bound))
8679 index = size_diffop (index, convert (sizetype, low_bound));
8680
8681 /* If this is a constant index into a constant array,
8682 just get the value from the array. Handle both the cases when
8683 we have an explicit constructor and when our operand is a variable
8684 that was declared const. */
8685
8686 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8687 && 0 > compare_tree_int (index,
8688 list_length (CONSTRUCTOR_ELTS
8689 (TREE_OPERAND (exp, 0)))))
8690 {
8691 tree elem;
8692
8693 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8694 i = TREE_INT_CST_LOW (index);
8695 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8696 ;
8697
8698 if (elem)
8699 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8700 }
8701
8702 else if (optimize >= 1
8703 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8704 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8705 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8706 {
8707 if (TREE_CODE (index) == INTEGER_CST)
8708 {
8709 tree init = DECL_INITIAL (array);
8710
8711 if (TREE_CODE (init) == CONSTRUCTOR)
8712 {
8713 tree elem;
8714
8715 for (elem = CONSTRUCTOR_ELTS (init);
8716 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8717 elem = TREE_CHAIN (elem))
8718 ;
8719
8720 if (elem)
8721 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8722 palign);
8723 }
8724 }
8725 }
8726 }
8727 /* Fall through. */
8728
8729 case COMPONENT_REF:
8730 case BIT_FIELD_REF:
8731 /* If the operand is a CONSTRUCTOR, we can just extract the
8732 appropriate field if it is present. Don't do this if we have
8733 already written the data since we want to refer to that copy
8734 and varasm.c assumes that's what we'll do. */
8735 if (TREE_CODE (exp) != ARRAY_REF
8736 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8737 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8738 {
8739 tree elt;
8740
8741 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8742 elt = TREE_CHAIN (elt))
8743 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8744 /* Note that unlike the case in expand_expr, we know this is
8745 BLKmode and hence not an integer. */
8746 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8747 }
8748
8749 {
8750 enum machine_mode mode1;
8751 HOST_WIDE_INT bitsize, bitpos;
8752 tree offset;
8753 int volatilep = 0;
8754 unsigned int alignment;
8755 int unsignedp;
8756 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8757 &mode1, &unsignedp, &volatilep,
8758 &alignment);
8759
8760 /* If we got back the original object, something is wrong. Perhaps
8761 we are evaluating an expression too early. In any event, don't
8762 infinitely recurse. */
8763 if (tem == exp)
8764 abort ();
8765
8766 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8767
8768 /* If this is a constant, put it into a register if it is a
8769 legitimate constant and OFFSET is 0 and memory if it isn't. */
8770 if (CONSTANT_P (op0))
8771 {
8772 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8773
8774 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8775 && offset == 0)
8776 op0 = force_reg (inner_mode, op0);
8777 else
8778 op0 = validize_mem (force_const_mem (inner_mode, op0));
8779 }
8780
8781 if (offset != 0)
8782 {
8783 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8784
8785 /* If this object is in a register, put it into memory.
8786 This case can't occur in C, but can in Ada if we have
8787 unchecked conversion of an expression from a scalar type to
8788 an array or record type. */
8789 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8790 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8791 {
8792 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8793
8794 mark_temp_addr_taken (memloc);
8795 emit_move_insn (memloc, op0);
8796 op0 = memloc;
8797 }
8798
8799 if (GET_CODE (op0) != MEM)
8800 abort ();
8801
8802 if (GET_MODE (offset_rtx) != ptr_mode)
8803 {
8804 #ifdef POINTERS_EXTEND_UNSIGNED
8805 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8806 #else
8807 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8808 #endif
8809 }
8810
8811 op0 = change_address (op0, VOIDmode,
8812 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8813 force_reg (ptr_mode,
8814 offset_rtx)));
8815 }
8816
8817 /* Don't forget about volatility even if this is a bitfield. */
8818 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8819 {
8820 op0 = copy_rtx (op0);
8821 MEM_VOLATILE_P (op0) = 1;
8822 }
8823
8824 /* Check the access. */
8825 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8826 {
8827 rtx to;
8828 int size;
8829
8830 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8831 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8832
8833 /* Check the access right of the pointer. */
8834 in_check_memory_usage = 1;
8835 if (size > BITS_PER_UNIT)
8836 emit_library_call (chkr_check_addr_libfunc,
8837 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
8838 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8839 TYPE_MODE (sizetype),
8840 GEN_INT (MEMORY_USE_RO),
8841 TYPE_MODE (integer_type_node));
8842 in_check_memory_usage = 0;
8843 }
8844
8845 /* In cases where an aligned union has an unaligned object
8846 as a field, we might be extracting a BLKmode value from
8847 an integer-mode (e.g., SImode) object. Handle this case
8848 by doing the extract into an object as wide as the field
8849 (which we know to be the width of a basic mode), then
8850 storing into memory, and changing the mode to BLKmode.
8851 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8852 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8853 if (mode1 == VOIDmode
8854 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8855 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8856 && (TYPE_ALIGN (type) > alignment
8857 || bitpos % TYPE_ALIGN (type) != 0)))
8858 {
8859 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8860
8861 if (ext_mode == BLKmode)
8862 {
8863 /* In this case, BITPOS must start at a byte boundary. */
8864 if (GET_CODE (op0) != MEM
8865 || bitpos % BITS_PER_UNIT != 0)
8866 abort ();
8867
8868 op0 = change_address (op0, VOIDmode,
8869 plus_constant (XEXP (op0, 0),
8870 bitpos / BITS_PER_UNIT));
8871 }
8872 else
8873 {
8874 rtx new = assign_stack_temp (ext_mode,
8875 bitsize / BITS_PER_UNIT, 0);
8876
8877 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8878 unsignedp, NULL_RTX, ext_mode,
8879 ext_mode, alignment,
8880 int_size_in_bytes (TREE_TYPE (tem)));
8881
8882 /* If the result is a record type and BITSIZE is narrower than
8883 the mode of OP0, an integral mode, and this is a big endian
8884 machine, we must put the field into the high-order bits. */
8885 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8886 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8887 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8888 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8889 size_int (GET_MODE_BITSIZE
8890 (GET_MODE (op0))
8891 - bitsize),
8892 op0, 1);
8893
8894 emit_move_insn (new, op0);
8895 op0 = copy_rtx (new);
8896 PUT_MODE (op0, BLKmode);
8897 }
8898 }
8899 else
8900 /* Get a reference to just this component. */
8901 op0 = change_address (op0, mode1,
8902 plus_constant (XEXP (op0, 0),
8903 (bitpos / BITS_PER_UNIT)));
8904
8905 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8906
8907 /* Adjust the alignment in case the bit position is not
8908 a multiple of the alignment of the inner object. */
8909 while (bitpos % alignment != 0)
8910 alignment >>= 1;
8911
8912 if (GET_CODE (XEXP (op0, 0)) == REG)
8913 mark_reg_pointer (XEXP (op0, 0), alignment);
8914
8915 MEM_IN_STRUCT_P (op0) = 1;
8916 MEM_VOLATILE_P (op0) |= volatilep;
8917
8918 *palign = alignment;
8919 return op0;
8920 }
8921
8922 default:
8923 break;
8924
8925 }
8926
8927 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8928 }
8929 \f
8930 /* Return the tree node if a ARG corresponds to a string constant or zero
8931 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8932 in bytes within the string that ARG is accessing. The type of the
8933 offset will be `sizetype'. */
8934
8935 tree
8936 string_constant (arg, ptr_offset)
8937 tree arg;
8938 tree *ptr_offset;
8939 {
8940 STRIP_NOPS (arg);
8941
8942 if (TREE_CODE (arg) == ADDR_EXPR
8943 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8944 {
8945 *ptr_offset = size_zero_node;
8946 return TREE_OPERAND (arg, 0);
8947 }
8948 else if (TREE_CODE (arg) == PLUS_EXPR)
8949 {
8950 tree arg0 = TREE_OPERAND (arg, 0);
8951 tree arg1 = TREE_OPERAND (arg, 1);
8952
8953 STRIP_NOPS (arg0);
8954 STRIP_NOPS (arg1);
8955
8956 if (TREE_CODE (arg0) == ADDR_EXPR
8957 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8958 {
8959 *ptr_offset = convert (sizetype, arg1);
8960 return TREE_OPERAND (arg0, 0);
8961 }
8962 else if (TREE_CODE (arg1) == ADDR_EXPR
8963 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8964 {
8965 *ptr_offset = convert (sizetype, arg0);
8966 return TREE_OPERAND (arg1, 0);
8967 }
8968 }
8969
8970 return 0;
8971 }
8972 \f
8973 /* Expand code for a post- or pre- increment or decrement
8974 and return the RTX for the result.
8975 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8976
8977 static rtx
8978 expand_increment (exp, post, ignore)
8979 register tree exp;
8980 int post, ignore;
8981 {
8982 register rtx op0, op1;
8983 register rtx temp, value;
8984 register tree incremented = TREE_OPERAND (exp, 0);
8985 optab this_optab = add_optab;
8986 int icode;
8987 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8988 int op0_is_copy = 0;
8989 int single_insn = 0;
8990 /* 1 means we can't store into OP0 directly,
8991 because it is a subreg narrower than a word,
8992 and we don't dare clobber the rest of the word. */
8993 int bad_subreg = 0;
8994
8995 /* Stabilize any component ref that might need to be
8996 evaluated more than once below. */
8997 if (!post
8998 || TREE_CODE (incremented) == BIT_FIELD_REF
8999 || (TREE_CODE (incremented) == COMPONENT_REF
9000 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9001 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9002 incremented = stabilize_reference (incremented);
9003 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9004 ones into save exprs so that they don't accidentally get evaluated
9005 more than once by the code below. */
9006 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9007 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9008 incremented = save_expr (incremented);
9009
9010 /* Compute the operands as RTX.
9011 Note whether OP0 is the actual lvalue or a copy of it:
9012 I believe it is a copy iff it is a register or subreg
9013 and insns were generated in computing it. */
9014
9015 temp = get_last_insn ();
9016 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9017
9018 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9019 in place but instead must do sign- or zero-extension during assignment,
9020 so we copy it into a new register and let the code below use it as
9021 a copy.
9022
9023 Note that we can safely modify this SUBREG since it is know not to be
9024 shared (it was made by the expand_expr call above). */
9025
9026 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9027 {
9028 if (post)
9029 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9030 else
9031 bad_subreg = 1;
9032 }
9033 else if (GET_CODE (op0) == SUBREG
9034 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9035 {
9036 /* We cannot increment this SUBREG in place. If we are
9037 post-incrementing, get a copy of the old value. Otherwise,
9038 just mark that we cannot increment in place. */
9039 if (post)
9040 op0 = copy_to_reg (op0);
9041 else
9042 bad_subreg = 1;
9043 }
9044
9045 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9046 && temp != get_last_insn ());
9047 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9048 EXPAND_MEMORY_USE_BAD);
9049
9050 /* Decide whether incrementing or decrementing. */
9051 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9052 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9053 this_optab = sub_optab;
9054
9055 /* Convert decrement by a constant into a negative increment. */
9056 if (this_optab == sub_optab
9057 && GET_CODE (op1) == CONST_INT)
9058 {
9059 op1 = GEN_INT (-INTVAL (op1));
9060 this_optab = add_optab;
9061 }
9062
9063 /* For a preincrement, see if we can do this with a single instruction. */
9064 if (!post)
9065 {
9066 icode = (int) this_optab->handlers[(int) mode].insn_code;
9067 if (icode != (int) CODE_FOR_nothing
9068 /* Make sure that OP0 is valid for operands 0 and 1
9069 of the insn we want to queue. */
9070 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9071 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9072 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9073 single_insn = 1;
9074 }
9075
9076 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9077 then we cannot just increment OP0. We must therefore contrive to
9078 increment the original value. Then, for postincrement, we can return
9079 OP0 since it is a copy of the old value. For preincrement, expand here
9080 unless we can do it with a single insn.
9081
9082 Likewise if storing directly into OP0 would clobber high bits
9083 we need to preserve (bad_subreg). */
9084 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9085 {
9086 /* This is the easiest way to increment the value wherever it is.
9087 Problems with multiple evaluation of INCREMENTED are prevented
9088 because either (1) it is a component_ref or preincrement,
9089 in which case it was stabilized above, or (2) it is an array_ref
9090 with constant index in an array in a register, which is
9091 safe to reevaluate. */
9092 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9093 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9094 ? MINUS_EXPR : PLUS_EXPR),
9095 TREE_TYPE (exp),
9096 incremented,
9097 TREE_OPERAND (exp, 1));
9098
9099 while (TREE_CODE (incremented) == NOP_EXPR
9100 || TREE_CODE (incremented) == CONVERT_EXPR)
9101 {
9102 newexp = convert (TREE_TYPE (incremented), newexp);
9103 incremented = TREE_OPERAND (incremented, 0);
9104 }
9105
9106 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9107 return post ? op0 : temp;
9108 }
9109
9110 if (post)
9111 {
9112 /* We have a true reference to the value in OP0.
9113 If there is an insn to add or subtract in this mode, queue it.
9114 Queueing the increment insn avoids the register shuffling
9115 that often results if we must increment now and first save
9116 the old value for subsequent use. */
9117
9118 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9119 op0 = stabilize (op0);
9120 #endif
9121
9122 icode = (int) this_optab->handlers[(int) mode].insn_code;
9123 if (icode != (int) CODE_FOR_nothing
9124 /* Make sure that OP0 is valid for operands 0 and 1
9125 of the insn we want to queue. */
9126 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9127 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9128 {
9129 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9130 op1 = force_reg (mode, op1);
9131
9132 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9133 }
9134 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9135 {
9136 rtx addr = (general_operand (XEXP (op0, 0), mode)
9137 ? force_reg (Pmode, XEXP (op0, 0))
9138 : copy_to_reg (XEXP (op0, 0)));
9139 rtx temp, result;
9140
9141 op0 = change_address (op0, VOIDmode, addr);
9142 temp = force_reg (GET_MODE (op0), op0);
9143 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9144 op1 = force_reg (mode, op1);
9145
9146 /* The increment queue is LIFO, thus we have to `queue'
9147 the instructions in reverse order. */
9148 enqueue_insn (op0, gen_move_insn (op0, temp));
9149 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9150 return result;
9151 }
9152 }
9153
9154 /* Preincrement, or we can't increment with one simple insn. */
9155 if (post)
9156 /* Save a copy of the value before inc or dec, to return it later. */
9157 temp = value = copy_to_reg (op0);
9158 else
9159 /* Arrange to return the incremented value. */
9160 /* Copy the rtx because expand_binop will protect from the queue,
9161 and the results of that would be invalid for us to return
9162 if our caller does emit_queue before using our result. */
9163 temp = copy_rtx (value = op0);
9164
9165 /* Increment however we can. */
9166 op1 = expand_binop (mode, this_optab, value, op1,
9167 current_function_check_memory_usage ? NULL_RTX : op0,
9168 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9169 /* Make sure the value is stored into OP0. */
9170 if (op1 != op0)
9171 emit_move_insn (op0, op1);
9172
9173 return temp;
9174 }
9175 \f
9176 /* Expand all function calls contained within EXP, innermost ones first.
9177 But don't look within expressions that have sequence points.
9178 For each CALL_EXPR, record the rtx for its value
9179 in the CALL_EXPR_RTL field. */
9180
9181 static void
9182 preexpand_calls (exp)
9183 tree exp;
9184 {
9185 register int nops, i;
9186 int class = TREE_CODE_CLASS (TREE_CODE (exp));
9187
9188 if (! do_preexpand_calls)
9189 return;
9190
9191 /* Only expressions and references can contain calls. */
9192
9193 if (! IS_EXPR_CODE_CLASS (class) && class != 'r')
9194 return;
9195
9196 switch (TREE_CODE (exp))
9197 {
9198 case CALL_EXPR:
9199 /* Do nothing if already expanded. */
9200 if (CALL_EXPR_RTL (exp) != 0
9201 /* Do nothing if the call returns a variable-sized object. */
9202 || (TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE
9203 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9204 /* Do nothing to built-in functions. */
9205 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9206 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9207 == FUNCTION_DECL)
9208 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9209 return;
9210
9211 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9212 return;
9213
9214 case COMPOUND_EXPR:
9215 case COND_EXPR:
9216 case TRUTH_ANDIF_EXPR:
9217 case TRUTH_ORIF_EXPR:
9218 /* If we find one of these, then we can be sure
9219 the adjust will be done for it (since it makes jumps).
9220 Do it now, so that if this is inside an argument
9221 of a function, we don't get the stack adjustment
9222 after some other args have already been pushed. */
9223 do_pending_stack_adjust ();
9224 return;
9225
9226 case BLOCK:
9227 case RTL_EXPR:
9228 case WITH_CLEANUP_EXPR:
9229 case CLEANUP_POINT_EXPR:
9230 case TRY_CATCH_EXPR:
9231 return;
9232
9233 case SAVE_EXPR:
9234 if (SAVE_EXPR_RTL (exp) != 0)
9235 return;
9236
9237 default:
9238 break;
9239 }
9240
9241 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
9242 for (i = 0; i < nops; i++)
9243 if (TREE_OPERAND (exp, i) != 0)
9244 {
9245 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9246 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9247 It doesn't happen before the call is made. */
9248 ;
9249 else
9250 {
9251 class = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9252 if (IS_EXPR_CODE_CLASS (class) || class == 'r')
9253 preexpand_calls (TREE_OPERAND (exp, i));
9254 }
9255 }
9256 }
9257 \f
9258 /* At the start of a function, record that we have no previously-pushed
9259 arguments waiting to be popped. */
9260
9261 void
9262 init_pending_stack_adjust ()
9263 {
9264 pending_stack_adjust = 0;
9265 }
9266
9267 /* When exiting from function, if safe, clear out any pending stack adjust
9268 so the adjustment won't get done.
9269
9270 Note, if the current function calls alloca, then it must have a
9271 frame pointer regardless of the value of flag_omit_frame_pointer. */
9272
9273 void
9274 clear_pending_stack_adjust ()
9275 {
9276 #ifdef EXIT_IGNORE_STACK
9277 if (optimize > 0
9278 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9279 && EXIT_IGNORE_STACK
9280 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9281 && ! flag_inline_functions)
9282 {
9283 stack_pointer_delta -= pending_stack_adjust,
9284 pending_stack_adjust = 0;
9285 }
9286 #endif
9287 }
9288
9289 /* Pop any previously-pushed arguments that have not been popped yet. */
9290
9291 void
9292 do_pending_stack_adjust ()
9293 {
9294 if (inhibit_defer_pop == 0)
9295 {
9296 if (pending_stack_adjust != 0)
9297 adjust_stack (GEN_INT (pending_stack_adjust));
9298 pending_stack_adjust = 0;
9299 }
9300 }
9301 \f
9302 /* Expand conditional expressions. */
9303
9304 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9305 LABEL is an rtx of code CODE_LABEL, in this function and all the
9306 functions here. */
9307
9308 void
9309 jumpifnot (exp, label)
9310 tree exp;
9311 rtx label;
9312 {
9313 do_jump (exp, label, NULL_RTX);
9314 }
9315
9316 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9317
9318 void
9319 jumpif (exp, label)
9320 tree exp;
9321 rtx label;
9322 {
9323 do_jump (exp, NULL_RTX, label);
9324 }
9325
9326 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9327 the result is zero, or IF_TRUE_LABEL if the result is one.
9328 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9329 meaning fall through in that case.
9330
9331 do_jump always does any pending stack adjust except when it does not
9332 actually perform a jump. An example where there is no jump
9333 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9334
9335 This function is responsible for optimizing cases such as
9336 &&, || and comparison operators in EXP. */
9337
9338 void
9339 do_jump (exp, if_false_label, if_true_label)
9340 tree exp;
9341 rtx if_false_label, if_true_label;
9342 {
9343 register enum tree_code code = TREE_CODE (exp);
9344 /* Some cases need to create a label to jump to
9345 in order to properly fall through.
9346 These cases set DROP_THROUGH_LABEL nonzero. */
9347 rtx drop_through_label = 0;
9348 rtx temp;
9349 int i;
9350 tree type;
9351 enum machine_mode mode;
9352
9353 #ifdef MAX_INTEGER_COMPUTATION_MODE
9354 check_max_integer_computation_mode (exp);
9355 #endif
9356
9357 emit_queue ();
9358
9359 switch (code)
9360 {
9361 case ERROR_MARK:
9362 break;
9363
9364 case INTEGER_CST:
9365 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9366 if (temp)
9367 emit_jump (temp);
9368 break;
9369
9370 #if 0
9371 /* This is not true with #pragma weak */
9372 case ADDR_EXPR:
9373 /* The address of something can never be zero. */
9374 if (if_true_label)
9375 emit_jump (if_true_label);
9376 break;
9377 #endif
9378
9379 case NOP_EXPR:
9380 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9381 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9382 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9383 goto normal;
9384 case CONVERT_EXPR:
9385 /* If we are narrowing the operand, we have to do the compare in the
9386 narrower mode. */
9387 if ((TYPE_PRECISION (TREE_TYPE (exp))
9388 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9389 goto normal;
9390 case NON_LVALUE_EXPR:
9391 case REFERENCE_EXPR:
9392 case ABS_EXPR:
9393 case NEGATE_EXPR:
9394 case LROTATE_EXPR:
9395 case RROTATE_EXPR:
9396 /* These cannot change zero->non-zero or vice versa. */
9397 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9398 break;
9399
9400 case WITH_RECORD_EXPR:
9401 /* Put the object on the placeholder list, recurse through our first
9402 operand, and pop the list. */
9403 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9404 placeholder_list);
9405 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9406 placeholder_list = TREE_CHAIN (placeholder_list);
9407 break;
9408
9409 #if 0
9410 /* This is never less insns than evaluating the PLUS_EXPR followed by
9411 a test and can be longer if the test is eliminated. */
9412 case PLUS_EXPR:
9413 /* Reduce to minus. */
9414 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9415 TREE_OPERAND (exp, 0),
9416 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9417 TREE_OPERAND (exp, 1))));
9418 /* Process as MINUS. */
9419 #endif
9420
9421 case MINUS_EXPR:
9422 /* Non-zero iff operands of minus differ. */
9423 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9424 TREE_OPERAND (exp, 0),
9425 TREE_OPERAND (exp, 1)),
9426 NE, NE, if_false_label, if_true_label);
9427 break;
9428
9429 case BIT_AND_EXPR:
9430 /* If we are AND'ing with a small constant, do this comparison in the
9431 smallest type that fits. If the machine doesn't have comparisons
9432 that small, it will be converted back to the wider comparison.
9433 This helps if we are testing the sign bit of a narrower object.
9434 combine can't do this for us because it can't know whether a
9435 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9436
9437 if (! SLOW_BYTE_ACCESS
9438 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9439 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9440 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9441 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9442 && (type = type_for_mode (mode, 1)) != 0
9443 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9444 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9445 != CODE_FOR_nothing))
9446 {
9447 do_jump (convert (type, exp), if_false_label, if_true_label);
9448 break;
9449 }
9450 goto normal;
9451
9452 case TRUTH_NOT_EXPR:
9453 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9454 break;
9455
9456 case TRUTH_ANDIF_EXPR:
9457 if (if_false_label == 0)
9458 if_false_label = drop_through_label = gen_label_rtx ();
9459 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9460 start_cleanup_deferral ();
9461 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9462 end_cleanup_deferral ();
9463 break;
9464
9465 case TRUTH_ORIF_EXPR:
9466 if (if_true_label == 0)
9467 if_true_label = drop_through_label = gen_label_rtx ();
9468 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9469 start_cleanup_deferral ();
9470 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9471 end_cleanup_deferral ();
9472 break;
9473
9474 case COMPOUND_EXPR:
9475 push_temp_slots ();
9476 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9477 preserve_temp_slots (NULL_RTX);
9478 free_temp_slots ();
9479 pop_temp_slots ();
9480 emit_queue ();
9481 do_pending_stack_adjust ();
9482 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9483 break;
9484
9485 case COMPONENT_REF:
9486 case BIT_FIELD_REF:
9487 case ARRAY_REF:
9488 {
9489 HOST_WIDE_INT bitsize, bitpos;
9490 int unsignedp;
9491 enum machine_mode mode;
9492 tree type;
9493 tree offset;
9494 int volatilep = 0;
9495 unsigned int alignment;
9496
9497 /* Get description of this reference. We don't actually care
9498 about the underlying object here. */
9499 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9500 &unsignedp, &volatilep, &alignment);
9501
9502 type = type_for_size (bitsize, unsignedp);
9503 if (! SLOW_BYTE_ACCESS
9504 && type != 0 && bitsize >= 0
9505 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9506 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9507 != CODE_FOR_nothing))
9508 {
9509 do_jump (convert (type, exp), if_false_label, if_true_label);
9510 break;
9511 }
9512 goto normal;
9513 }
9514
9515 case COND_EXPR:
9516 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9517 if (integer_onep (TREE_OPERAND (exp, 1))
9518 && integer_zerop (TREE_OPERAND (exp, 2)))
9519 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9520
9521 else if (integer_zerop (TREE_OPERAND (exp, 1))
9522 && integer_onep (TREE_OPERAND (exp, 2)))
9523 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9524
9525 else
9526 {
9527 register rtx label1 = gen_label_rtx ();
9528 drop_through_label = gen_label_rtx ();
9529
9530 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9531
9532 start_cleanup_deferral ();
9533 /* Now the THEN-expression. */
9534 do_jump (TREE_OPERAND (exp, 1),
9535 if_false_label ? if_false_label : drop_through_label,
9536 if_true_label ? if_true_label : drop_through_label);
9537 /* In case the do_jump just above never jumps. */
9538 do_pending_stack_adjust ();
9539 emit_label (label1);
9540
9541 /* Now the ELSE-expression. */
9542 do_jump (TREE_OPERAND (exp, 2),
9543 if_false_label ? if_false_label : drop_through_label,
9544 if_true_label ? if_true_label : drop_through_label);
9545 end_cleanup_deferral ();
9546 }
9547 break;
9548
9549 case EQ_EXPR:
9550 {
9551 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9552
9553 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9554 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9555 {
9556 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9557 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9558 do_jump
9559 (fold
9560 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9561 fold (build (EQ_EXPR, TREE_TYPE (exp),
9562 fold (build1 (REALPART_EXPR,
9563 TREE_TYPE (inner_type),
9564 exp0)),
9565 fold (build1 (REALPART_EXPR,
9566 TREE_TYPE (inner_type),
9567 exp1)))),
9568 fold (build (EQ_EXPR, TREE_TYPE (exp),
9569 fold (build1 (IMAGPART_EXPR,
9570 TREE_TYPE (inner_type),
9571 exp0)),
9572 fold (build1 (IMAGPART_EXPR,
9573 TREE_TYPE (inner_type),
9574 exp1)))))),
9575 if_false_label, if_true_label);
9576 }
9577
9578 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9579 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9580
9581 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9582 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9583 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9584 else
9585 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9586 break;
9587 }
9588
9589 case NE_EXPR:
9590 {
9591 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9592
9593 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9594 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9595 {
9596 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9597 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9598 do_jump
9599 (fold
9600 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9601 fold (build (NE_EXPR, TREE_TYPE (exp),
9602 fold (build1 (REALPART_EXPR,
9603 TREE_TYPE (inner_type),
9604 exp0)),
9605 fold (build1 (REALPART_EXPR,
9606 TREE_TYPE (inner_type),
9607 exp1)))),
9608 fold (build (NE_EXPR, TREE_TYPE (exp),
9609 fold (build1 (IMAGPART_EXPR,
9610 TREE_TYPE (inner_type),
9611 exp0)),
9612 fold (build1 (IMAGPART_EXPR,
9613 TREE_TYPE (inner_type),
9614 exp1)))))),
9615 if_false_label, if_true_label);
9616 }
9617
9618 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9619 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9620
9621 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9622 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9623 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9624 else
9625 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9626 break;
9627 }
9628
9629 case LT_EXPR:
9630 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9631 if (GET_MODE_CLASS (mode) == MODE_INT
9632 && ! can_compare_p (LT, mode, ccp_jump))
9633 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9634 else
9635 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9636 break;
9637
9638 case LE_EXPR:
9639 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9640 if (GET_MODE_CLASS (mode) == MODE_INT
9641 && ! can_compare_p (LE, mode, ccp_jump))
9642 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9643 else
9644 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9645 break;
9646
9647 case GT_EXPR:
9648 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9649 if (GET_MODE_CLASS (mode) == MODE_INT
9650 && ! can_compare_p (GT, mode, ccp_jump))
9651 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9652 else
9653 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9654 break;
9655
9656 case GE_EXPR:
9657 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9658 if (GET_MODE_CLASS (mode) == MODE_INT
9659 && ! can_compare_p (GE, mode, ccp_jump))
9660 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9661 else
9662 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9663 break;
9664
9665 case UNORDERED_EXPR:
9666 case ORDERED_EXPR:
9667 {
9668 enum rtx_code cmp, rcmp;
9669 int do_rev;
9670
9671 if (code == UNORDERED_EXPR)
9672 cmp = UNORDERED, rcmp = ORDERED;
9673 else
9674 cmp = ORDERED, rcmp = UNORDERED;
9675 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9676
9677 do_rev = 0;
9678 if (! can_compare_p (cmp, mode, ccp_jump)
9679 && (can_compare_p (rcmp, mode, ccp_jump)
9680 /* If the target doesn't provide either UNORDERED or ORDERED
9681 comparisons, canonicalize on UNORDERED for the library. */
9682 || rcmp == UNORDERED))
9683 do_rev = 1;
9684
9685 if (! do_rev)
9686 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9687 else
9688 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9689 }
9690 break;
9691
9692 {
9693 enum rtx_code rcode1;
9694 enum tree_code tcode2;
9695
9696 case UNLT_EXPR:
9697 rcode1 = UNLT;
9698 tcode2 = LT_EXPR;
9699 goto unordered_bcc;
9700 case UNLE_EXPR:
9701 rcode1 = UNLE;
9702 tcode2 = LE_EXPR;
9703 goto unordered_bcc;
9704 case UNGT_EXPR:
9705 rcode1 = UNGT;
9706 tcode2 = GT_EXPR;
9707 goto unordered_bcc;
9708 case UNGE_EXPR:
9709 rcode1 = UNGE;
9710 tcode2 = GE_EXPR;
9711 goto unordered_bcc;
9712 case UNEQ_EXPR:
9713 rcode1 = UNEQ;
9714 tcode2 = EQ_EXPR;
9715 goto unordered_bcc;
9716
9717 unordered_bcc:
9718 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9719 if (can_compare_p (rcode1, mode, ccp_jump))
9720 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9721 if_true_label);
9722 else
9723 {
9724 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9725 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9726 tree cmp0, cmp1;
9727
9728 /* If the target doesn't support combined unordered
9729 compares, decompose into UNORDERED + comparison. */
9730 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9731 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9732 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9733 do_jump (exp, if_false_label, if_true_label);
9734 }
9735 }
9736 break;
9737
9738 default:
9739 normal:
9740 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9741 #if 0
9742 /* This is not needed any more and causes poor code since it causes
9743 comparisons and tests from non-SI objects to have different code
9744 sequences. */
9745 /* Copy to register to avoid generating bad insns by cse
9746 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9747 if (!cse_not_expected && GET_CODE (temp) == MEM)
9748 temp = copy_to_reg (temp);
9749 #endif
9750 do_pending_stack_adjust ();
9751 /* Do any postincrements in the expression that was tested. */
9752 emit_queue ();
9753
9754 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9755 {
9756 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9757 if (target)
9758 emit_jump (target);
9759 }
9760 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9761 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9762 /* Note swapping the labels gives us not-equal. */
9763 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9764 else if (GET_MODE (temp) != VOIDmode)
9765 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9766 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9767 GET_MODE (temp), NULL_RTX, 0,
9768 if_false_label, if_true_label);
9769 else
9770 abort ();
9771 }
9772
9773 if (drop_through_label)
9774 {
9775 /* If do_jump produces code that might be jumped around,
9776 do any stack adjusts from that code, before the place
9777 where control merges in. */
9778 do_pending_stack_adjust ();
9779 emit_label (drop_through_label);
9780 }
9781 }
9782 \f
9783 /* Given a comparison expression EXP for values too wide to be compared
9784 with one insn, test the comparison and jump to the appropriate label.
9785 The code of EXP is ignored; we always test GT if SWAP is 0,
9786 and LT if SWAP is 1. */
9787
9788 static void
9789 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9790 tree exp;
9791 int swap;
9792 rtx if_false_label, if_true_label;
9793 {
9794 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9795 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9796 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9797 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9798
9799 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9800 }
9801
9802 /* Compare OP0 with OP1, word at a time, in mode MODE.
9803 UNSIGNEDP says to do unsigned comparison.
9804 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9805
9806 void
9807 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9808 enum machine_mode mode;
9809 int unsignedp;
9810 rtx op0, op1;
9811 rtx if_false_label, if_true_label;
9812 {
9813 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9814 rtx drop_through_label = 0;
9815 int i;
9816
9817 if (! if_true_label || ! if_false_label)
9818 drop_through_label = gen_label_rtx ();
9819 if (! if_true_label)
9820 if_true_label = drop_through_label;
9821 if (! if_false_label)
9822 if_false_label = drop_through_label;
9823
9824 /* Compare a word at a time, high order first. */
9825 for (i = 0; i < nwords; i++)
9826 {
9827 rtx op0_word, op1_word;
9828
9829 if (WORDS_BIG_ENDIAN)
9830 {
9831 op0_word = operand_subword_force (op0, i, mode);
9832 op1_word = operand_subword_force (op1, i, mode);
9833 }
9834 else
9835 {
9836 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9837 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9838 }
9839
9840 /* All but high-order word must be compared as unsigned. */
9841 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9842 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9843 NULL_RTX, if_true_label);
9844
9845 /* Consider lower words only if these are equal. */
9846 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9847 NULL_RTX, 0, NULL_RTX, if_false_label);
9848 }
9849
9850 if (if_false_label)
9851 emit_jump (if_false_label);
9852 if (drop_through_label)
9853 emit_label (drop_through_label);
9854 }
9855
9856 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9857 with one insn, test the comparison and jump to the appropriate label. */
9858
9859 static void
9860 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9861 tree exp;
9862 rtx if_false_label, if_true_label;
9863 {
9864 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9865 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9866 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9867 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9868 int i;
9869 rtx drop_through_label = 0;
9870
9871 if (! if_false_label)
9872 drop_through_label = if_false_label = gen_label_rtx ();
9873
9874 for (i = 0; i < nwords; i++)
9875 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9876 operand_subword_force (op1, i, mode),
9877 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9878 word_mode, NULL_RTX, 0, if_false_label,
9879 NULL_RTX);
9880
9881 if (if_true_label)
9882 emit_jump (if_true_label);
9883 if (drop_through_label)
9884 emit_label (drop_through_label);
9885 }
9886 \f
9887 /* Jump according to whether OP0 is 0.
9888 We assume that OP0 has an integer mode that is too wide
9889 for the available compare insns. */
9890
9891 void
9892 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9893 rtx op0;
9894 rtx if_false_label, if_true_label;
9895 {
9896 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9897 rtx part;
9898 int i;
9899 rtx drop_through_label = 0;
9900
9901 /* The fastest way of doing this comparison on almost any machine is to
9902 "or" all the words and compare the result. If all have to be loaded
9903 from memory and this is a very wide item, it's possible this may
9904 be slower, but that's highly unlikely. */
9905
9906 part = gen_reg_rtx (word_mode);
9907 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9908 for (i = 1; i < nwords && part != 0; i++)
9909 part = expand_binop (word_mode, ior_optab, part,
9910 operand_subword_force (op0, i, GET_MODE (op0)),
9911 part, 1, OPTAB_WIDEN);
9912
9913 if (part != 0)
9914 {
9915 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9916 NULL_RTX, 0, if_false_label, if_true_label);
9917
9918 return;
9919 }
9920
9921 /* If we couldn't do the "or" simply, do this with a series of compares. */
9922 if (! if_false_label)
9923 drop_through_label = if_false_label = gen_label_rtx ();
9924
9925 for (i = 0; i < nwords; i++)
9926 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9927 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9928 if_false_label, NULL_RTX);
9929
9930 if (if_true_label)
9931 emit_jump (if_true_label);
9932
9933 if (drop_through_label)
9934 emit_label (drop_through_label);
9935 }
9936 \f
9937 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9938 (including code to compute the values to be compared)
9939 and set (CC0) according to the result.
9940 The decision as to signed or unsigned comparison must be made by the caller.
9941
9942 We force a stack adjustment unless there are currently
9943 things pushed on the stack that aren't yet used.
9944
9945 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9946 compared.
9947
9948 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9949 size of MODE should be used. */
9950
9951 rtx
9952 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9953 register rtx op0, op1;
9954 enum rtx_code code;
9955 int unsignedp;
9956 enum machine_mode mode;
9957 rtx size;
9958 unsigned int align;
9959 {
9960 rtx tem;
9961
9962 /* If one operand is constant, make it the second one. Only do this
9963 if the other operand is not constant as well. */
9964
9965 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9966 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9967 {
9968 tem = op0;
9969 op0 = op1;
9970 op1 = tem;
9971 code = swap_condition (code);
9972 }
9973
9974 if (flag_force_mem)
9975 {
9976 op0 = force_not_mem (op0);
9977 op1 = force_not_mem (op1);
9978 }
9979
9980 do_pending_stack_adjust ();
9981
9982 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9983 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9984 return tem;
9985
9986 #if 0
9987 /* There's no need to do this now that combine.c can eliminate lots of
9988 sign extensions. This can be less efficient in certain cases on other
9989 machines. */
9990
9991 /* If this is a signed equality comparison, we can do it as an
9992 unsigned comparison since zero-extension is cheaper than sign
9993 extension and comparisons with zero are done as unsigned. This is
9994 the case even on machines that can do fast sign extension, since
9995 zero-extension is easier to combine with other operations than
9996 sign-extension is. If we are comparing against a constant, we must
9997 convert it to what it would look like unsigned. */
9998 if ((code == EQ || code == NE) && ! unsignedp
9999 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10000 {
10001 if (GET_CODE (op1) == CONST_INT
10002 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10003 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10004 unsignedp = 1;
10005 }
10006 #endif
10007
10008 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10009
10010 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10011 }
10012
10013 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10014 The decision as to signed or unsigned comparison must be made by the caller.
10015
10016 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10017 compared.
10018
10019 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10020 size of MODE should be used. */
10021
10022 void
10023 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10024 if_false_label, if_true_label)
10025 register rtx op0, op1;
10026 enum rtx_code code;
10027 int unsignedp;
10028 enum machine_mode mode;
10029 rtx size;
10030 unsigned int align;
10031 rtx if_false_label, if_true_label;
10032 {
10033 rtx tem;
10034 int dummy_true_label = 0;
10035
10036 /* Reverse the comparison if that is safe and we want to jump if it is
10037 false. */
10038 if (! if_true_label && ! FLOAT_MODE_P (mode))
10039 {
10040 if_true_label = if_false_label;
10041 if_false_label = 0;
10042 code = reverse_condition (code);
10043 }
10044
10045 /* If one operand is constant, make it the second one. Only do this
10046 if the other operand is not constant as well. */
10047
10048 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10049 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10050 {
10051 tem = op0;
10052 op0 = op1;
10053 op1 = tem;
10054 code = swap_condition (code);
10055 }
10056
10057 if (flag_force_mem)
10058 {
10059 op0 = force_not_mem (op0);
10060 op1 = force_not_mem (op1);
10061 }
10062
10063 do_pending_stack_adjust ();
10064
10065 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10066 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10067 {
10068 if (tem == const_true_rtx)
10069 {
10070 if (if_true_label)
10071 emit_jump (if_true_label);
10072 }
10073 else
10074 {
10075 if (if_false_label)
10076 emit_jump (if_false_label);
10077 }
10078 return;
10079 }
10080
10081 #if 0
10082 /* There's no need to do this now that combine.c can eliminate lots of
10083 sign extensions. This can be less efficient in certain cases on other
10084 machines. */
10085
10086 /* If this is a signed equality comparison, we can do it as an
10087 unsigned comparison since zero-extension is cheaper than sign
10088 extension and comparisons with zero are done as unsigned. This is
10089 the case even on machines that can do fast sign extension, since
10090 zero-extension is easier to combine with other operations than
10091 sign-extension is. If we are comparing against a constant, we must
10092 convert it to what it would look like unsigned. */
10093 if ((code == EQ || code == NE) && ! unsignedp
10094 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10095 {
10096 if (GET_CODE (op1) == CONST_INT
10097 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10098 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10099 unsignedp = 1;
10100 }
10101 #endif
10102
10103 if (! if_true_label)
10104 {
10105 dummy_true_label = 1;
10106 if_true_label = gen_label_rtx ();
10107 }
10108
10109 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10110 if_true_label);
10111
10112 if (if_false_label)
10113 emit_jump (if_false_label);
10114 if (dummy_true_label)
10115 emit_label (if_true_label);
10116 }
10117
10118 /* Generate code for a comparison expression EXP (including code to compute
10119 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10120 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10121 generated code will drop through.
10122 SIGNED_CODE should be the rtx operation for this comparison for
10123 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10124
10125 We force a stack adjustment unless there are currently
10126 things pushed on the stack that aren't yet used. */
10127
10128 static void
10129 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10130 if_true_label)
10131 register tree exp;
10132 enum rtx_code signed_code, unsigned_code;
10133 rtx if_false_label, if_true_label;
10134 {
10135 unsigned int align0, align1;
10136 register rtx op0, op1;
10137 register tree type;
10138 register enum machine_mode mode;
10139 int unsignedp;
10140 enum rtx_code code;
10141
10142 /* Don't crash if the comparison was erroneous. */
10143 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10144 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10145 return;
10146
10147 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10148 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10149 mode = TYPE_MODE (type);
10150 unsignedp = TREE_UNSIGNED (type);
10151 code = unsignedp ? unsigned_code : signed_code;
10152
10153 #ifdef HAVE_canonicalize_funcptr_for_compare
10154 /* If function pointers need to be "canonicalized" before they can
10155 be reliably compared, then canonicalize them. */
10156 if (HAVE_canonicalize_funcptr_for_compare
10157 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10158 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10159 == FUNCTION_TYPE))
10160 {
10161 rtx new_op0 = gen_reg_rtx (mode);
10162
10163 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10164 op0 = new_op0;
10165 }
10166
10167 if (HAVE_canonicalize_funcptr_for_compare
10168 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10169 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10170 == FUNCTION_TYPE))
10171 {
10172 rtx new_op1 = gen_reg_rtx (mode);
10173
10174 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10175 op1 = new_op1;
10176 }
10177 #endif
10178
10179 /* Do any postincrements in the expression that was tested. */
10180 emit_queue ();
10181
10182 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10183 ((mode == BLKmode)
10184 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10185 MIN (align0, align1),
10186 if_false_label, if_true_label);
10187 }
10188 \f
10189 /* Generate code to calculate EXP using a store-flag instruction
10190 and return an rtx for the result. EXP is either a comparison
10191 or a TRUTH_NOT_EXPR whose operand is a comparison.
10192
10193 If TARGET is nonzero, store the result there if convenient.
10194
10195 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10196 cheap.
10197
10198 Return zero if there is no suitable set-flag instruction
10199 available on this machine.
10200
10201 Once expand_expr has been called on the arguments of the comparison,
10202 we are committed to doing the store flag, since it is not safe to
10203 re-evaluate the expression. We emit the store-flag insn by calling
10204 emit_store_flag, but only expand the arguments if we have a reason
10205 to believe that emit_store_flag will be successful. If we think that
10206 it will, but it isn't, we have to simulate the store-flag with a
10207 set/jump/set sequence. */
10208
10209 static rtx
10210 do_store_flag (exp, target, mode, only_cheap)
10211 tree exp;
10212 rtx target;
10213 enum machine_mode mode;
10214 int only_cheap;
10215 {
10216 enum rtx_code code;
10217 tree arg0, arg1, type;
10218 tree tem;
10219 enum machine_mode operand_mode;
10220 int invert = 0;
10221 int unsignedp;
10222 rtx op0, op1;
10223 enum insn_code icode;
10224 rtx subtarget = target;
10225 rtx result, label;
10226
10227 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10228 result at the end. We can't simply invert the test since it would
10229 have already been inverted if it were valid. This case occurs for
10230 some floating-point comparisons. */
10231
10232 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10233 invert = 1, exp = TREE_OPERAND (exp, 0);
10234
10235 arg0 = TREE_OPERAND (exp, 0);
10236 arg1 = TREE_OPERAND (exp, 1);
10237 type = TREE_TYPE (arg0);
10238 operand_mode = TYPE_MODE (type);
10239 unsignedp = TREE_UNSIGNED (type);
10240
10241 /* We won't bother with BLKmode store-flag operations because it would mean
10242 passing a lot of information to emit_store_flag. */
10243 if (operand_mode == BLKmode)
10244 return 0;
10245
10246 /* We won't bother with store-flag operations involving function pointers
10247 when function pointers must be canonicalized before comparisons. */
10248 #ifdef HAVE_canonicalize_funcptr_for_compare
10249 if (HAVE_canonicalize_funcptr_for_compare
10250 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10251 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10252 == FUNCTION_TYPE))
10253 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10254 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10255 == FUNCTION_TYPE))))
10256 return 0;
10257 #endif
10258
10259 STRIP_NOPS (arg0);
10260 STRIP_NOPS (arg1);
10261
10262 /* Get the rtx comparison code to use. We know that EXP is a comparison
10263 operation of some type. Some comparisons against 1 and -1 can be
10264 converted to comparisons with zero. Do so here so that the tests
10265 below will be aware that we have a comparison with zero. These
10266 tests will not catch constants in the first operand, but constants
10267 are rarely passed as the first operand. */
10268
10269 switch (TREE_CODE (exp))
10270 {
10271 case EQ_EXPR:
10272 code = EQ;
10273 break;
10274 case NE_EXPR:
10275 code = NE;
10276 break;
10277 case LT_EXPR:
10278 if (integer_onep (arg1))
10279 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10280 else
10281 code = unsignedp ? LTU : LT;
10282 break;
10283 case LE_EXPR:
10284 if (! unsignedp && integer_all_onesp (arg1))
10285 arg1 = integer_zero_node, code = LT;
10286 else
10287 code = unsignedp ? LEU : LE;
10288 break;
10289 case GT_EXPR:
10290 if (! unsignedp && integer_all_onesp (arg1))
10291 arg1 = integer_zero_node, code = GE;
10292 else
10293 code = unsignedp ? GTU : GT;
10294 break;
10295 case GE_EXPR:
10296 if (integer_onep (arg1))
10297 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10298 else
10299 code = unsignedp ? GEU : GE;
10300 break;
10301
10302 case UNORDERED_EXPR:
10303 code = UNORDERED;
10304 break;
10305 case ORDERED_EXPR:
10306 code = ORDERED;
10307 break;
10308 case UNLT_EXPR:
10309 code = UNLT;
10310 break;
10311 case UNLE_EXPR:
10312 code = UNLE;
10313 break;
10314 case UNGT_EXPR:
10315 code = UNGT;
10316 break;
10317 case UNGE_EXPR:
10318 code = UNGE;
10319 break;
10320 case UNEQ_EXPR:
10321 code = UNEQ;
10322 break;
10323
10324 default:
10325 abort ();
10326 }
10327
10328 /* Put a constant second. */
10329 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10330 {
10331 tem = arg0; arg0 = arg1; arg1 = tem;
10332 code = swap_condition (code);
10333 }
10334
10335 /* If this is an equality or inequality test of a single bit, we can
10336 do this by shifting the bit being tested to the low-order bit and
10337 masking the result with the constant 1. If the condition was EQ,
10338 we xor it with 1. This does not require an scc insn and is faster
10339 than an scc insn even if we have it. */
10340
10341 if ((code == NE || code == EQ)
10342 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10343 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10344 {
10345 tree inner = TREE_OPERAND (arg0, 0);
10346 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10347 int ops_unsignedp;
10348
10349 /* If INNER is a right shift of a constant and it plus BITNUM does
10350 not overflow, adjust BITNUM and INNER. */
10351
10352 if (TREE_CODE (inner) == RSHIFT_EXPR
10353 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10354 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10355 && bitnum < TYPE_PRECISION (type)
10356 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10357 bitnum - TYPE_PRECISION (type)))
10358 {
10359 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10360 inner = TREE_OPERAND (inner, 0);
10361 }
10362
10363 /* If we are going to be able to omit the AND below, we must do our
10364 operations as unsigned. If we must use the AND, we have a choice.
10365 Normally unsigned is faster, but for some machines signed is. */
10366 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10367 #ifdef LOAD_EXTEND_OP
10368 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10369 #else
10370 : 1
10371 #endif
10372 );
10373
10374 if (! get_subtarget (subtarget)
10375 || GET_MODE (subtarget) != operand_mode
10376 || ! safe_from_p (subtarget, inner, 1))
10377 subtarget = 0;
10378
10379 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10380
10381 if (bitnum != 0)
10382 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10383 size_int (bitnum), subtarget, ops_unsignedp);
10384
10385 if (GET_MODE (op0) != mode)
10386 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10387
10388 if ((code == EQ && ! invert) || (code == NE && invert))
10389 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10390 ops_unsignedp, OPTAB_LIB_WIDEN);
10391
10392 /* Put the AND last so it can combine with more things. */
10393 if (bitnum != TYPE_PRECISION (type) - 1)
10394 op0 = expand_and (op0, const1_rtx, subtarget);
10395
10396 return op0;
10397 }
10398
10399 /* Now see if we are likely to be able to do this. Return if not. */
10400 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10401 return 0;
10402
10403 icode = setcc_gen_code[(int) code];
10404 if (icode == CODE_FOR_nothing
10405 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10406 {
10407 /* We can only do this if it is one of the special cases that
10408 can be handled without an scc insn. */
10409 if ((code == LT && integer_zerop (arg1))
10410 || (! only_cheap && code == GE && integer_zerop (arg1)))
10411 ;
10412 else if (BRANCH_COST >= 0
10413 && ! only_cheap && (code == NE || code == EQ)
10414 && TREE_CODE (type) != REAL_TYPE
10415 && ((abs_optab->handlers[(int) operand_mode].insn_code
10416 != CODE_FOR_nothing)
10417 || (ffs_optab->handlers[(int) operand_mode].insn_code
10418 != CODE_FOR_nothing)))
10419 ;
10420 else
10421 return 0;
10422 }
10423
10424 preexpand_calls (exp);
10425 if (! get_subtarget (target)
10426 || GET_MODE (subtarget) != operand_mode
10427 || ! safe_from_p (subtarget, arg1, 1))
10428 subtarget = 0;
10429
10430 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10431 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10432
10433 if (target == 0)
10434 target = gen_reg_rtx (mode);
10435
10436 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10437 because, if the emit_store_flag does anything it will succeed and
10438 OP0 and OP1 will not be used subsequently. */
10439
10440 result = emit_store_flag (target, code,
10441 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10442 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10443 operand_mode, unsignedp, 1);
10444
10445 if (result)
10446 {
10447 if (invert)
10448 result = expand_binop (mode, xor_optab, result, const1_rtx,
10449 result, 0, OPTAB_LIB_WIDEN);
10450 return result;
10451 }
10452
10453 /* If this failed, we have to do this with set/compare/jump/set code. */
10454 if (GET_CODE (target) != REG
10455 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10456 target = gen_reg_rtx (GET_MODE (target));
10457
10458 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10459 result = compare_from_rtx (op0, op1, code, unsignedp,
10460 operand_mode, NULL_RTX, 0);
10461 if (GET_CODE (result) == CONST_INT)
10462 return (((result == const0_rtx && ! invert)
10463 || (result != const0_rtx && invert))
10464 ? const0_rtx : const1_rtx);
10465
10466 label = gen_label_rtx ();
10467 if (bcc_gen_fctn[(int) code] == 0)
10468 abort ();
10469
10470 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10471 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10472 emit_label (label);
10473
10474 return target;
10475 }
10476 \f
10477 /* Generate a tablejump instruction (used for switch statements). */
10478
10479 #ifdef HAVE_tablejump
10480
10481 /* INDEX is the value being switched on, with the lowest value
10482 in the table already subtracted.
10483 MODE is its expected mode (needed if INDEX is constant).
10484 RANGE is the length of the jump table.
10485 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10486
10487 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10488 index value is out of range. */
10489
10490 void
10491 do_tablejump (index, mode, range, table_label, default_label)
10492 rtx index, range, table_label, default_label;
10493 enum machine_mode mode;
10494 {
10495 register rtx temp, vector;
10496
10497 /* Do an unsigned comparison (in the proper mode) between the index
10498 expression and the value which represents the length of the range.
10499 Since we just finished subtracting the lower bound of the range
10500 from the index expression, this comparison allows us to simultaneously
10501 check that the original index expression value is both greater than
10502 or equal to the minimum value of the range and less than or equal to
10503 the maximum value of the range. */
10504
10505 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10506 0, default_label);
10507
10508 /* If index is in range, it must fit in Pmode.
10509 Convert to Pmode so we can index with it. */
10510 if (mode != Pmode)
10511 index = convert_to_mode (Pmode, index, 1);
10512
10513 /* Don't let a MEM slip thru, because then INDEX that comes
10514 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10515 and break_out_memory_refs will go to work on it and mess it up. */
10516 #ifdef PIC_CASE_VECTOR_ADDRESS
10517 if (flag_pic && GET_CODE (index) != REG)
10518 index = copy_to_mode_reg (Pmode, index);
10519 #endif
10520
10521 /* If flag_force_addr were to affect this address
10522 it could interfere with the tricky assumptions made
10523 about addresses that contain label-refs,
10524 which may be valid only very near the tablejump itself. */
10525 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10526 GET_MODE_SIZE, because this indicates how large insns are. The other
10527 uses should all be Pmode, because they are addresses. This code
10528 could fail if addresses and insns are not the same size. */
10529 index = gen_rtx_PLUS (Pmode,
10530 gen_rtx_MULT (Pmode, index,
10531 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10532 gen_rtx_LABEL_REF (Pmode, table_label));
10533 #ifdef PIC_CASE_VECTOR_ADDRESS
10534 if (flag_pic)
10535 index = PIC_CASE_VECTOR_ADDRESS (index);
10536 else
10537 #endif
10538 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10539 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10540 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10541 RTX_UNCHANGING_P (vector) = 1;
10542 convert_move (temp, vector, 0);
10543
10544 emit_jump_insn (gen_tablejump (temp, table_label));
10545
10546 /* If we are generating PIC code or if the table is PC-relative, the
10547 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10548 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10549 emit_barrier ();
10550 }
10551
10552 #endif /* HAVE_tablejump */
This page took 0.556734 seconds and 4 git commands to generate.