]> gcc.gnu.org Git - gcc.git/blob - gcc/explow.c
*** empty log message ***
[gcc.git] / gcc / explow.c
1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987, 1991 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 #include "config.h"
22 #include "rtl.h"
23 #include "tree.h"
24 #include "flags.h"
25 #include "expr.h"
26 #include "hard-reg-set.h"
27 #include "insn-config.h"
28 #include "recog.h"
29 #include "insn-flags.h"
30 #include "insn-codes.h"
31
32 /* Return an rtx for the sum of X and the integer C. */
33
34 rtx
35 plus_constant (x, c)
36 register rtx x;
37 register int c;
38 {
39 register RTX_CODE code;
40 register enum machine_mode mode;
41 register rtx tem;
42 int all_constant = 0;
43
44 if (c == 0)
45 return x;
46
47 restart:
48
49 code = GET_CODE (x);
50 mode = GET_MODE (x);
51 switch (code)
52 {
53 case CONST_INT:
54 return gen_rtx (CONST_INT, VOIDmode, (INTVAL (x) + c));
55
56 case CONST_DOUBLE:
57 {
58 int l1 = CONST_DOUBLE_LOW (x);
59 int h1 = CONST_DOUBLE_HIGH (x);
60 int l2 = c;
61 int h2 = c < 0 ? ~0 : 0;
62 int lv, hv;
63
64 add_double (l1, h1, l2, h2, &lv, &hv);
65
66 return immed_double_const (lv, hv, VOIDmode);
67 }
68
69 case MEM:
70 /* If this is a reference to the constant pool, try replacing it with
71 a reference to a new constant. If the resulting address isn't
72 valid, don't return it because we have no way to validize it. */
73 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
74 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
75 {
76 tem
77 = force_const_mem (GET_MODE (x),
78 plus_constant (get_pool_constant (XEXP (x, 0)),
79 c));
80 if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
81 return tem;
82 }
83 break;
84
85 case CONST:
86 /* If adding to something entirely constant, set a flag
87 so that we can add a CONST around the result. */
88 x = XEXP (x, 0);
89 all_constant = 1;
90 goto restart;
91
92 case SYMBOL_REF:
93 case LABEL_REF:
94 all_constant = 1;
95 break;
96
97 case PLUS:
98 /* The interesting case is adding the integer to a sum.
99 Look for constant term in the sum and combine
100 with C. For an integer constant term, we make a combined
101 integer. For a constant term that is not an explicit integer,
102 we cannot really combine, but group them together anyway. */
103 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
104 {
105 c += INTVAL (XEXP (x, 0));
106 x = XEXP (x, 1);
107 }
108 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
109 {
110 c += INTVAL (XEXP (x, 1));
111 x = XEXP (x, 0);
112 }
113 else if (CONSTANT_P (XEXP (x, 0)))
114 return gen_rtx (PLUS, mode,
115 plus_constant (XEXP (x, 0), c),
116 XEXP (x, 1));
117 else if (CONSTANT_P (XEXP (x, 1)))
118 return gen_rtx (PLUS, mode,
119 XEXP (x, 0),
120 plus_constant (XEXP (x, 1), c));
121 }
122
123 if (c != 0)
124 x = gen_rtx (PLUS, mode, x, gen_rtx (CONST_INT, VOIDmode, c));
125
126 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
127 return x;
128 else if (all_constant)
129 return gen_rtx (CONST, mode, x);
130 else
131 return x;
132 }
133
134 /* This is the same a `plus_constant', except that it handles LO_SUM. */
135
136 rtx
137 plus_constant_for_output (x, c)
138 register rtx x;
139 register int c;
140 {
141 register RTX_CODE code = GET_CODE (x);
142 register enum machine_mode mode = GET_MODE (x);
143 int all_constant = 0;
144
145 if (GET_CODE (x) == LO_SUM)
146 return gen_rtx (LO_SUM, mode, XEXP (x, 0),
147 plus_constant_for_output (XEXP (x, 1), c));
148
149 else
150 return plus_constant (x, c);
151 }
152 \f
153 /* If X is a sum, return a new sum like X but lacking any constant terms.
154 Add all the removed constant terms into *CONSTPTR.
155 X itself is not altered. The result != X if and only if
156 it is not isomorphic to X. */
157
158 rtx
159 eliminate_constant_term (x, constptr)
160 rtx x;
161 rtx *constptr;
162 {
163 register rtx x0, x1;
164 rtx tem;
165
166 if (GET_CODE (x) != PLUS)
167 return x;
168
169 /* First handle constants appearing at this level explicitly. */
170 if (GET_CODE (XEXP (x, 1)) == CONST_INT
171 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
172 XEXP (x, 1)))
173 && GET_CODE (tem) == CONST_INT)
174 {
175 *constptr = tem;
176 return eliminate_constant_term (XEXP (x, 0), constptr);
177 }
178
179 tem = const0_rtx;
180 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
181 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
182 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
183 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
184 *constptr, tem))
185 && GET_CODE (tem) == CONST_INT)
186 {
187 *constptr = tem;
188 return gen_rtx (PLUS, GET_MODE (x), x0, x1);
189 }
190
191 return x;
192 }
193
194 /* Returns the insn that next references REG after INSN, or 0
195 if REG is clobbered before next referenced or we cannot find
196 an insn that references REG in a straight-line piece of code. */
197
198 rtx
199 find_next_ref (reg, insn)
200 rtx reg;
201 rtx insn;
202 {
203 rtx next;
204
205 for (insn = NEXT_INSN (insn); insn; insn = next)
206 {
207 next = NEXT_INSN (insn);
208 if (GET_CODE (insn) == NOTE)
209 continue;
210 if (GET_CODE (insn) == CODE_LABEL
211 || GET_CODE (insn) == BARRIER)
212 return 0;
213 if (GET_CODE (insn) == INSN
214 || GET_CODE (insn) == JUMP_INSN
215 || GET_CODE (insn) == CALL_INSN)
216 {
217 if (reg_set_p (reg, insn))
218 return 0;
219 if (reg_mentioned_p (reg, PATTERN (insn)))
220 return insn;
221 if (GET_CODE (insn) == JUMP_INSN)
222 {
223 if (simplejump_p (insn))
224 next = JUMP_LABEL (insn);
225 else
226 return 0;
227 }
228 if (GET_CODE (insn) == CALL_INSN
229 && REGNO (reg) < FIRST_PSEUDO_REGISTER
230 && call_used_regs[REGNO (reg)])
231 return 0;
232 }
233 else
234 abort ();
235 }
236 return 0;
237 }
238
239 /* Return an rtx for the size in bytes of the value of EXP. */
240
241 rtx
242 expr_size (exp)
243 tree exp;
244 {
245 return expand_expr (size_in_bytes (TREE_TYPE (exp)),
246 0, TYPE_MODE (sizetype), 0);
247 }
248 \f
249 /* Return a copy of X in which all memory references
250 and all constants that involve symbol refs
251 have been replaced with new temporary registers.
252 Also emit code to load the memory locations and constants
253 into those registers.
254
255 If X contains no such constants or memory references,
256 X itself (not a copy) is returned.
257
258 If a constant is found in the address that is not a legitimate constant
259 in an insn, it is left alone in the hope that it might be valid in the
260 address.
261
262 X may contain no arithmetic except addition, subtraction and multiplication.
263 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
264
265 static rtx
266 break_out_memory_refs (x)
267 register rtx x;
268 {
269 if (GET_CODE (x) == MEM
270 || (CONSTANT_P (x) && LEGITIMATE_CONSTANT_P (x)
271 && GET_MODE (x) != VOIDmode))
272 {
273 register rtx temp = force_reg (GET_MODE (x), x);
274 mark_reg_pointer (temp);
275 x = temp;
276 }
277 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
278 || GET_CODE (x) == MULT)
279 {
280 register rtx op0 = break_out_memory_refs (XEXP (x, 0));
281 register rtx op1 = break_out_memory_refs (XEXP (x, 1));
282 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
283 x = gen_rtx (GET_CODE (x), Pmode, op0, op1);
284 }
285 return x;
286 }
287
288 /* Given a memory address or facsimile X, construct a new address,
289 currently equivalent, that is stable: future stores won't change it.
290
291 X must be composed of constants, register and memory references
292 combined with addition, subtraction and multiplication:
293 in other words, just what you can get from expand_expr if sum_ok is 1.
294
295 Works by making copies of all regs and memory locations used
296 by X and combining them the same way X does.
297 You could also stabilize the reference to this address
298 by copying the address to a register with copy_to_reg;
299 but then you wouldn't get indexed addressing in the reference. */
300
301 rtx
302 copy_all_regs (x)
303 register rtx x;
304 {
305 if (GET_CODE (x) == REG)
306 {
307 if (REGNO (x) != FRAME_POINTER_REGNUM)
308 x = copy_to_reg (x);
309 }
310 else if (GET_CODE (x) == MEM)
311 x = copy_to_reg (x);
312 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
313 || GET_CODE (x) == MULT)
314 {
315 register rtx op0 = copy_all_regs (XEXP (x, 0));
316 register rtx op1 = copy_all_regs (XEXP (x, 1));
317 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
318 x = gen_rtx (GET_CODE (x), Pmode, op0, op1);
319 }
320 return x;
321 }
322 \f
323 /* Return something equivalent to X but valid as a memory address
324 for something of mode MODE. When X is not itself valid, this
325 works by copying X or subexpressions of it into registers. */
326
327 rtx
328 memory_address (mode, x)
329 enum machine_mode mode;
330 register rtx x;
331 {
332 register rtx oldx;
333
334 /* By passing constant addresses thru registers
335 we get a chance to cse them. */
336 if (! cse_not_expected && CONSTANT_P (x) && LEGITIMATE_CONSTANT_P (x))
337 return force_reg (Pmode, x);
338
339 /* Accept a QUEUED that refers to a REG
340 even though that isn't a valid address.
341 On attempting to put this in an insn we will call protect_from_queue
342 which will turn it into a REG, which is valid. */
343 if (GET_CODE (x) == QUEUED
344 && GET_CODE (QUEUED_VAR (x)) == REG)
345 return x;
346
347 /* We get better cse by rejecting indirect addressing at this stage.
348 Let the combiner create indirect addresses where appropriate.
349 For now, generate the code so that the subexpressions useful to share
350 are visible. But not if cse won't be done! */
351 oldx = x;
352 if (! cse_not_expected && GET_CODE (x) != REG)
353 x = break_out_memory_refs (x);
354
355 /* At this point, any valid address is accepted. */
356 GO_IF_LEGITIMATE_ADDRESS (mode, x, win);
357
358 /* If it was valid before but breaking out memory refs invalidated it,
359 use it the old way. */
360 if (memory_address_p (mode, oldx))
361 goto win2;
362
363 /* Perform machine-dependent transformations on X
364 in certain cases. This is not necessary since the code
365 below can handle all possible cases, but machine-dependent
366 transformations can make better code. */
367 LEGITIMIZE_ADDRESS (x, oldx, mode, win);
368
369 /* PLUS and MULT can appear in special ways
370 as the result of attempts to make an address usable for indexing.
371 Usually they are dealt with by calling force_operand, below.
372 But a sum containing constant terms is special
373 if removing them makes the sum a valid address:
374 then we generate that address in a register
375 and index off of it. We do this because it often makes
376 shorter code, and because the addresses thus generated
377 in registers often become common subexpressions. */
378 if (GET_CODE (x) == PLUS)
379 {
380 rtx constant_term = const0_rtx;
381 rtx y = eliminate_constant_term (x, &constant_term);
382 if (constant_term == const0_rtx
383 || ! memory_address_p (mode, y))
384 return force_operand (x, 0);
385
386 y = gen_rtx (PLUS, GET_MODE (x), copy_to_reg (y), constant_term);
387 if (! memory_address_p (mode, y))
388 return force_operand (x, 0);
389 return y;
390 }
391 if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
392 return force_operand (x, 0);
393
394 /* If we have a register that's an invalid address,
395 it must be a hard reg of the wrong class. Copy it to a pseudo. */
396 if (GET_CODE (x) == REG)
397 return copy_to_reg (x);
398
399 /* Last resort: copy the value to a register, since
400 the register is a valid address. */
401 return force_reg (Pmode, x);
402
403 win2:
404 x = oldx;
405 win:
406 if (flag_force_addr && ! cse_not_expected && GET_CODE (x) != REG
407 /* Don't copy an addr via a reg if it is one of our stack slots. */
408 && ! (GET_CODE (x) == PLUS
409 && (XEXP (x, 0) == virtual_stack_vars_rtx
410 || XEXP (x, 0) == virtual_incoming_args_rtx)))
411 {
412 if (general_operand (x, Pmode))
413 return force_reg (Pmode, x);
414 else
415 return force_operand (x, 0);
416 }
417 return x;
418 }
419
420 /* Like `memory_address' but pretend `flag_force_addr' is 0. */
421
422 rtx
423 memory_address_noforce (mode, x)
424 enum machine_mode mode;
425 rtx x;
426 {
427 int ambient_force_addr = flag_force_addr;
428 rtx val;
429
430 flag_force_addr = 0;
431 val = memory_address (mode, x);
432 flag_force_addr = ambient_force_addr;
433 return val;
434 }
435
436 /* Convert a mem ref into one with a valid memory address.
437 Pass through anything else unchanged. */
438
439 rtx
440 validize_mem (ref)
441 rtx ref;
442 {
443 if (GET_CODE (ref) != MEM)
444 return ref;
445 if (memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
446 return ref;
447 /* Don't alter REF itself, since that is probably a stack slot. */
448 return change_address (ref, GET_MODE (ref), XEXP (ref, 0));
449 }
450 \f
451 /* Return a modified copy of X with its memory address copied
452 into a temporary register to protect it from side effects.
453 If X is not a MEM, it is returned unchanged (and not copied).
454 Perhaps even if it is a MEM, if there is no need to change it. */
455
456 rtx
457 stabilize (x)
458 rtx x;
459 {
460 register rtx addr;
461 if (GET_CODE (x) != MEM)
462 return x;
463 addr = XEXP (x, 0);
464 if (rtx_unstable_p (addr))
465 {
466 rtx temp = copy_all_regs (addr);
467 rtx mem;
468 if (GET_CODE (temp) != REG)
469 temp = copy_to_reg (temp);
470 mem = gen_rtx (MEM, GET_MODE (x), temp);
471
472 /* Mark returned memref with in_struct if it's in an array or
473 structure. Copy const and volatile from original memref. */
474
475 MEM_IN_STRUCT_P (mem) = MEM_IN_STRUCT_P (x) || GET_CODE (addr) == PLUS;
476 RTX_UNCHANGING_P (mem) = RTX_UNCHANGING_P (x);
477 MEM_VOLATILE_P (mem) = MEM_VOLATILE_P (x);
478 return mem;
479 }
480 return x;
481 }
482 \f
483 /* Copy the value or contents of X to a new temp reg and return that reg. */
484
485 rtx
486 copy_to_reg (x)
487 rtx x;
488 {
489 register rtx temp = gen_reg_rtx (GET_MODE (x));
490
491 /* If not an operand, must be an address with PLUS and MULT so
492 do the computation. */
493 if (! general_operand (x, VOIDmode))
494 x = force_operand (x, temp);
495
496 if (x != temp)
497 emit_move_insn (temp, x);
498
499 return temp;
500 }
501
502 /* Like copy_to_reg but always give the new register mode Pmode
503 in case X is a constant. */
504
505 rtx
506 copy_addr_to_reg (x)
507 rtx x;
508 {
509 return copy_to_mode_reg (Pmode, x);
510 }
511
512 /* Like copy_to_reg but always give the new register mode MODE
513 in case X is a constant. */
514
515 rtx
516 copy_to_mode_reg (mode, x)
517 enum machine_mode mode;
518 rtx x;
519 {
520 register rtx temp = gen_reg_rtx (mode);
521
522 /* If not an operand, must be an address with PLUS and MULT so
523 do the computation. */
524 if (! general_operand (x, VOIDmode))
525 x = force_operand (x, temp);
526
527 if (GET_MODE (x) != mode && GET_MODE (x) != VOIDmode)
528 abort ();
529 if (x != temp)
530 emit_move_insn (temp, x);
531 return temp;
532 }
533
534 /* Load X into a register if it is not already one.
535 Use mode MODE for the register.
536 X should be valid for mode MODE, but it may be a constant which
537 is valid for all integer modes; that's why caller must specify MODE.
538
539 The caller must not alter the value in the register we return,
540 since we mark it as a "constant" register. */
541
542 rtx
543 force_reg (mode, x)
544 enum machine_mode mode;
545 rtx x;
546 {
547 register rtx temp, insn;
548
549 if (GET_CODE (x) == REG)
550 return x;
551 temp = gen_reg_rtx (mode);
552 insn = emit_move_insn (temp, x);
553 /* Let optimizers know that TEMP's value never changes
554 and that X can be substituted for it. */
555 if (CONSTANT_P (x))
556 {
557 rtx note = find_reg_note (insn, REG_EQUAL, 0);
558
559 if (note)
560 XEXP (note, 0) = x;
561 else
562 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL, x, REG_NOTES (insn));
563 }
564 return temp;
565 }
566
567 /* If X is a memory ref, copy its contents to a new temp reg and return
568 that reg. Otherwise, return X. */
569
570 rtx
571 force_not_mem (x)
572 rtx x;
573 {
574 register rtx temp;
575 if (GET_CODE (x) != MEM || GET_MODE (x) == BLKmode)
576 return x;
577 temp = gen_reg_rtx (GET_MODE (x));
578 emit_move_insn (temp, x);
579 return temp;
580 }
581
582 /* Copy X to TARGET (if it's nonzero and a reg)
583 or to a new temp reg and return that reg.
584 MODE is the mode to use for X in case it is a constant. */
585
586 rtx
587 copy_to_suggested_reg (x, target, mode)
588 rtx x, target;
589 enum machine_mode mode;
590 {
591 register rtx temp;
592
593 if (target && GET_CODE (target) == REG)
594 temp = target;
595 else
596 temp = gen_reg_rtx (mode);
597
598 emit_move_insn (temp, x);
599 return temp;
600 }
601 \f
602 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
603 This pops when ADJUST is positive. ADJUST need not be constant. */
604
605 void
606 adjust_stack (adjust)
607 rtx adjust;
608 {
609 rtx temp;
610 adjust = protect_from_queue (adjust, 0);
611
612 if (adjust == const0_rtx)
613 return;
614
615 temp = expand_binop (Pmode,
616 #ifdef STACK_GROWS_DOWNWARD
617 add_optab,
618 #else
619 sub_optab,
620 #endif
621 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
622 OPTAB_LIB_WIDEN);
623
624 if (temp != stack_pointer_rtx)
625 emit_move_insn (stack_pointer_rtx, temp);
626 }
627
628 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
629 This pushes when ADJUST is positive. ADJUST need not be constant. */
630
631 void
632 anti_adjust_stack (adjust)
633 rtx adjust;
634 {
635 rtx temp;
636 adjust = protect_from_queue (adjust, 0);
637
638 if (adjust == const0_rtx)
639 return;
640
641 temp = expand_binop (Pmode,
642 #ifdef STACK_GROWS_DOWNWARD
643 sub_optab,
644 #else
645 add_optab,
646 #endif
647 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
648 OPTAB_LIB_WIDEN);
649
650 if (temp != stack_pointer_rtx)
651 emit_move_insn (stack_pointer_rtx, temp);
652 }
653
654 /* Round the size of a block to be pushed up to the boundary required
655 by this machine. SIZE is the desired size, which need not be constant. */
656
657 rtx
658 round_push (size)
659 rtx size;
660 {
661 #ifdef STACK_BOUNDARY
662 int align = STACK_BOUNDARY / BITS_PER_UNIT;
663 if (align == 1)
664 return size;
665 if (GET_CODE (size) == CONST_INT)
666 {
667 int new = (INTVAL (size) + align - 1) / align * align;
668 if (INTVAL (size) != new)
669 size = gen_rtx (CONST_INT, VOIDmode, new);
670 }
671 else
672 {
673 size = expand_divmod (0, CEIL_DIV_EXPR, Pmode, size,
674 gen_rtx (CONST_INT, VOIDmode, align),
675 0, 1);
676 size = expand_mult (Pmode, size,
677 gen_rtx (CONST_INT, VOIDmode, align),
678 0, 1);
679 }
680 #endif /* STACK_BOUNDARY */
681 return size;
682 }
683 \f
684 /* Return an rtx representing the address of an area of memory dynamically
685 pushed on the stack. This region of memory is always aligned to
686 a multiple of BIGGEST_ALIGNMENT.
687
688 Any required stack pointer alignment is preserved.
689
690 SIZE is an rtx representing the size of the area.
691 TARGET is a place in which the address can be placed.
692
693 KNOWN_ALIGN is the alignment (in bits) that we know SIZE has. */
694
695 rtx
696 allocate_dynamic_stack_space (size, target, known_align)
697 rtx size;
698 rtx target;
699 int known_align;
700 {
701 /* Ensure the size is in the proper mode. */
702 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
703 size = convert_to_mode (Pmode, size, 1);
704
705 /* We will need to ensure that the address we return is aligned to
706 BIGGEST_ALIGNMENT. If STACK_DYNAMIC_OFFSET is defined, we don't
707 always know its final value at this point in the compilation (it
708 might depend on the size of the outgoing parameter lists, for
709 example), so we must align the value to be returned in that case.
710 (Note that STACK_DYNAMIC_OFFSET will have a default non-zero value if
711 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
712 We must also do an alignment operation on the returned value if
713 the stack pointer alignment is less strict that BIGGEST_ALIGNMENT.
714
715 If we have to align, we must leave space in SIZE for the hole
716 that might result from the alignment operation. */
717
718 #if defined (STACK_DYNAMIC_OFFSET) || defined(STACK_POINTER_OFFSET) || defined (ALLOCATE_OUTGOING_ARGS)
719 #define MUST_ALIGN
720 #endif
721
722 #if ! defined (MUST_ALIGN) && (!defined(STACK_BOUNDARY) || STACK_BOUNDARY < BIGGEST_ALIGNMENT)
723 #define MUST_ALIGN
724 #endif
725
726 #ifdef MUST_ALIGN
727
728 if (GET_CODE (size) == CONST_INT)
729 size = gen_rtx (CONST_INT, VOIDmode,
730 INTVAL (size) + (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1));
731 else
732 size = expand_binop (Pmode, add_optab, size,
733 gen_rtx (CONST_INT, VOIDmode,
734 BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
735 0, 1, OPTAB_LIB_WIDEN);
736 #endif
737
738 #ifdef SETJMP_VIA_SAVE_AREA
739 /* If setjmp restores regs from a save area in the stack frame,
740 avoid clobbering the reg save area. Note that the offset of
741 virtual_incoming_args_rtx includes the preallocated stack args space.
742 It would be no problem to clobber that, but it's on the wrong side
743 of the old save area. */
744 {
745 rtx dynamic_offset
746 = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
747 stack_pointer_rtx, 0, 1, OPTAB_LIB_WIDEN);
748 size = expand_binop (Pmode, add_optab, size, dynamic_offset,
749 0, 1, OPTAB_LIB_WIDEN);
750 }
751 #endif /* SETJMP_VIA_SAVE_AREA */
752
753 /* Round the size to a multiple of the required stack alignment.
754 Since the stack if presumed to be rounded before this allocation,
755 this will maintain the required alignment.
756
757 If the stack grows downward, we could save an insn by subtracting
758 SIZE from the stack pointer and then aligning the stack pointer.
759 The problem with this is that the stack pointer may be unaligned
760 between the execution of the subtraction and alignment insns and
761 some machines do not allow this. Even on those that do, some
762 signal handlers malfunction if a signal should occur between those
763 insns. Since this is an extremely rare event, we have no reliable
764 way of knowing which systems have this problem. So we avoid even
765 momentarily mis-aligning the stack. */
766
767 if (known_align % STACK_BOUNDARY != 0)
768 size = round_push (size);
769
770 do_pending_stack_adjust ();
771
772 /* Don't use a TARGET that isn't a pseudo. */
773 if (target == 0 || GET_CODE (target) != REG
774 || REGNO (target) < FIRST_PSEUDO_REGISTER)
775 target = gen_reg_rtx (Pmode);
776
777 #ifndef STACK_GROWS_DOWNWARD
778 emit_move_insn (target, virtual_stack_dynamic_rtx);
779 #endif
780
781 /* Perform the required allocation from the stack. Some systems do
782 this differently than simply incrementing/decrementing from the
783 stack pointer. */
784 #ifdef HAVE_allocate_stack
785 if (HAVE_allocate_stack)
786 {
787 enum machine_mode mode
788 = insn_operand_mode[(int) CODE_FOR_allocate_stack][0];
789
790 if (insn_operand_predicate[(int) CODE_FOR_allocate_stack][0]
791 && ! ((*insn_operand_predicate[(int) CODE_FOR_allocate_stack][0])
792 (size, mode)))
793 size = copy_to_mode_reg (mode, size);
794
795 emit_insn (gen_allocate_stack (size));
796 }
797 else
798 #endif
799 anti_adjust_stack (size);
800
801 #ifdef STACK_GROWS_DOWNWARD
802 emit_move_insn (target, virtual_stack_dynamic_rtx);
803 #endif
804
805 #ifdef MUST_ALIGN
806 if (known_align % BIGGEST_ALIGNMENT != 0)
807 {
808 target = expand_divmod (0, CEIL_DIV_EXPR, Pmode, target,
809 gen_rtx (CONST_INT, VOIDmode,
810 BIGGEST_ALIGNMENT / BITS_PER_UNIT),
811 0, 1);
812
813 target = expand_mult (Pmode, target,
814 gen_rtx (CONST_INT, VOIDmode,
815 BIGGEST_ALIGNMENT / BITS_PER_UNIT),
816 0, 1);
817 }
818 #endif
819
820 /* Some systems require a particular insn to refer to the stack
821 to make the pages exist. */
822 #ifdef HAVE_probe
823 if (HAVE_probe)
824 emit_insn (gen_probe ());
825 #endif
826
827 return target;
828 }
829 \f
830 /* Return an rtx representing the register or memory location
831 in which a scalar value of data type VALTYPE
832 was returned by a function call to function FUNC.
833 FUNC is a FUNCTION_DECL node if the precise function is known,
834 otherwise 0. */
835
836 rtx
837 hard_function_value (valtype, func)
838 tree valtype;
839 tree func;
840 {
841 return FUNCTION_VALUE (valtype, func);
842 }
843
844 /* Return an rtx representing the register or memory location
845 in which a scalar value of mode MODE was returned by a library call. */
846
847 rtx
848 hard_libcall_value (mode)
849 enum machine_mode mode;
850 {
851 return LIBCALL_VALUE (mode);
852 }
This page took 0.077597 seconds and 6 git commands to generate.