]> gcc.gnu.org Git - gcc.git/blame - gcc/explow.c
Merge in gcc2-ss-010999
[gcc.git] / gcc / explow.c
CommitLineData
18ca7dab 1/* Subroutines for manipulating rtx's in semantically interesting ways.
747215f1 2 Copyright (C) 1987, 91, 94-97, 1998, 1999 Free Software Foundation, Inc.
18ca7dab
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
18ca7dab
RK
20
21
22#include "config.h"
670ee920 23#include "system.h"
01198c2f 24#include "toplev.h"
18ca7dab
RK
25#include "rtl.h"
26#include "tree.h"
27#include "flags.h"
49ad7cfa 28#include "function.h"
18ca7dab
RK
29#include "expr.h"
30#include "hard-reg-set.h"
31#include "insn-config.h"
32#include "recog.h"
33#include "insn-flags.h"
34#include "insn-codes.h"
35
c795bca9
BS
36#if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
37#define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
38#endif
39
ea534b63 40static rtx break_out_memory_refs PROTO((rtx));
edff2491 41static void emit_stack_probe PROTO((rtx));
7e4ce834
RH
42
43
44/* Truncate and perhaps sign-extend C as appropriate for MODE. */
45
46HOST_WIDE_INT
47trunc_int_for_mode (c, mode)
48 HOST_WIDE_INT c;
49 enum machine_mode mode;
50{
51 int width = GET_MODE_BITSIZE (mode);
52
53 /* We clear out all bits that don't belong in MODE, unless they and our
54 sign bit are all one. So we get either a reasonable negative
55 value or a reasonable unsigned value. */
56
57 if (width < HOST_BITS_PER_WIDE_INT
58 && ((c & ((HOST_WIDE_INT) (-1) << (width - 1)))
59 != ((HOST_WIDE_INT) (-1) << (width - 1))))
60 c &= ((HOST_WIDE_INT) 1 << width) - 1;
61
62 /* If this would be an entire word for the target, but is not for
63 the host, then sign-extend on the host so that the number will look
64 the same way on the host that it would on the target.
65
66 For example, when building a 64 bit alpha hosted 32 bit sparc
67 targeted compiler, then we want the 32 bit unsigned value -1 to be
68 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
69 The later confuses the sparc backend. */
70
71 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
72 && BITS_PER_WORD == width
73 && (c & ((HOST_WIDE_INT) 1 << (width - 1))))
74 c |= ((HOST_WIDE_INT) (-1) << width);
75
76 return c;
77}
78
b1ec3c92
CH
79/* Return an rtx for the sum of X and the integer C.
80
8008b228 81 This function should be used via the `plus_constant' macro. */
18ca7dab
RK
82
83rtx
b1ec3c92 84plus_constant_wide (x, c)
18ca7dab 85 register rtx x;
b1ec3c92 86 register HOST_WIDE_INT c;
18ca7dab
RK
87{
88 register RTX_CODE code;
89 register enum machine_mode mode;
90 register rtx tem;
91 int all_constant = 0;
92
93 if (c == 0)
94 return x;
95
96 restart:
97
98 code = GET_CODE (x);
99 mode = GET_MODE (x);
100 switch (code)
101 {
102 case CONST_INT:
b1ec3c92 103 return GEN_INT (INTVAL (x) + c);
18ca7dab
RK
104
105 case CONST_DOUBLE:
106 {
b1ec3c92
CH
107 HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
108 HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
109 HOST_WIDE_INT l2 = c;
110 HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
111 HOST_WIDE_INT lv, hv;
18ca7dab
RK
112
113 add_double (l1, h1, l2, h2, &lv, &hv);
114
115 return immed_double_const (lv, hv, VOIDmode);
116 }
117
118 case MEM:
119 /* If this is a reference to the constant pool, try replacing it with
120 a reference to a new constant. If the resulting address isn't
121 valid, don't return it because we have no way to validize it. */
122 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
123 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
124 {
38a448ca
RH
125 /* Any rtl we create here must go in a saveable obstack, since
126 we might have been called from within combine. */
127 push_obstacks_nochange ();
128 rtl_in_saveable_obstack ();
18ca7dab
RK
129 tem
130 = force_const_mem (GET_MODE (x),
131 plus_constant (get_pool_constant (XEXP (x, 0)),
132 c));
38a448ca 133 pop_obstacks ();
18ca7dab
RK
134 if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
135 return tem;
136 }
137 break;
138
139 case CONST:
140 /* If adding to something entirely constant, set a flag
141 so that we can add a CONST around the result. */
142 x = XEXP (x, 0);
143 all_constant = 1;
144 goto restart;
145
146 case SYMBOL_REF:
147 case LABEL_REF:
148 all_constant = 1;
149 break;
150
151 case PLUS:
152 /* The interesting case is adding the integer to a sum.
153 Look for constant term in the sum and combine
154 with C. For an integer constant term, we make a combined
155 integer. For a constant term that is not an explicit integer,
e5671f2b
RK
156 we cannot really combine, but group them together anyway.
157
03d937fc
R
158 Restart or use a recursive call in case the remaining operand is
159 something that we handle specially, such as a SYMBOL_REF.
160
161 We may not immediately return from the recursive call here, lest
162 all_constant gets lost. */
e5671f2b
RK
163
164 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
03d937fc
R
165 {
166 c += INTVAL (XEXP (x, 1));
7e4ce834
RH
167
168 if (GET_MODE (x) != VOIDmode)
169 c = trunc_int_for_mode (c, GET_MODE (x));
170
03d937fc
R
171 x = XEXP (x, 0);
172 goto restart;
173 }
18ca7dab 174 else if (CONSTANT_P (XEXP (x, 0)))
03d937fc
R
175 {
176 x = gen_rtx_PLUS (mode,
177 plus_constant (XEXP (x, 0), c),
178 XEXP (x, 1));
179 c = 0;
180 }
18ca7dab 181 else if (CONSTANT_P (XEXP (x, 1)))
03d937fc
R
182 {
183 x = gen_rtx_PLUS (mode,
184 XEXP (x, 0),
185 plus_constant (XEXP (x, 1), c));
186 c = 0;
187 }
38a448ca
RH
188 break;
189
190 default:
191 break;
18ca7dab
RK
192 }
193
194 if (c != 0)
38a448ca 195 x = gen_rtx_PLUS (mode, x, GEN_INT (c));
18ca7dab
RK
196
197 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
198 return x;
199 else if (all_constant)
38a448ca 200 return gen_rtx_CONST (mode, x);
18ca7dab
RK
201 else
202 return x;
203}
204
b1ec3c92
CH
205/* This is the same as `plus_constant', except that it handles LO_SUM.
206
207 This function should be used via the `plus_constant_for_output' macro. */
18ca7dab
RK
208
209rtx
b1ec3c92 210plus_constant_for_output_wide (x, c)
18ca7dab 211 register rtx x;
b1ec3c92 212 register HOST_WIDE_INT c;
18ca7dab 213{
18ca7dab 214 register enum machine_mode mode = GET_MODE (x);
18ca7dab
RK
215
216 if (GET_CODE (x) == LO_SUM)
38a448ca 217 return gen_rtx_LO_SUM (mode, XEXP (x, 0),
c5c76735 218 plus_constant_for_output (XEXP (x, 1), c));
18ca7dab
RK
219
220 else
221 return plus_constant (x, c);
222}
223\f
224/* If X is a sum, return a new sum like X but lacking any constant terms.
225 Add all the removed constant terms into *CONSTPTR.
226 X itself is not altered. The result != X if and only if
227 it is not isomorphic to X. */
228
229rtx
230eliminate_constant_term (x, constptr)
231 rtx x;
232 rtx *constptr;
233{
234 register rtx x0, x1;
235 rtx tem;
236
237 if (GET_CODE (x) != PLUS)
238 return x;
239
240 /* First handle constants appearing at this level explicitly. */
241 if (GET_CODE (XEXP (x, 1)) == CONST_INT
242 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
243 XEXP (x, 1)))
244 && GET_CODE (tem) == CONST_INT)
245 {
246 *constptr = tem;
247 return eliminate_constant_term (XEXP (x, 0), constptr);
248 }
249
250 tem = const0_rtx;
251 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
252 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
253 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
254 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
255 *constptr, tem))
256 && GET_CODE (tem) == CONST_INT)
257 {
258 *constptr = tem;
38a448ca 259 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
18ca7dab
RK
260 }
261
262 return x;
263}
264
265/* Returns the insn that next references REG after INSN, or 0
266 if REG is clobbered before next referenced or we cannot find
267 an insn that references REG in a straight-line piece of code. */
268
269rtx
270find_next_ref (reg, insn)
271 rtx reg;
272 rtx insn;
273{
274 rtx next;
275
276 for (insn = NEXT_INSN (insn); insn; insn = next)
277 {
278 next = NEXT_INSN (insn);
279 if (GET_CODE (insn) == NOTE)
280 continue;
281 if (GET_CODE (insn) == CODE_LABEL
282 || GET_CODE (insn) == BARRIER)
283 return 0;
284 if (GET_CODE (insn) == INSN
285 || GET_CODE (insn) == JUMP_INSN
286 || GET_CODE (insn) == CALL_INSN)
287 {
288 if (reg_set_p (reg, insn))
289 return 0;
290 if (reg_mentioned_p (reg, PATTERN (insn)))
291 return insn;
292 if (GET_CODE (insn) == JUMP_INSN)
293 {
294 if (simplejump_p (insn))
295 next = JUMP_LABEL (insn);
296 else
297 return 0;
298 }
299 if (GET_CODE (insn) == CALL_INSN
300 && REGNO (reg) < FIRST_PSEUDO_REGISTER
301 && call_used_regs[REGNO (reg)])
302 return 0;
303 }
304 else
305 abort ();
306 }
307 return 0;
308}
309
310/* Return an rtx for the size in bytes of the value of EXP. */
311
312rtx
313expr_size (exp)
314 tree exp;
315{
99098c66
RK
316 tree size = size_in_bytes (TREE_TYPE (exp));
317
318 if (TREE_CODE (size) != INTEGER_CST
319 && contains_placeholder_p (size))
320 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
321
8fbea4dc
RK
322 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype),
323 EXPAND_MEMORY_USE_BAD);
18ca7dab
RK
324}
325\f
326/* Return a copy of X in which all memory references
327 and all constants that involve symbol refs
328 have been replaced with new temporary registers.
329 Also emit code to load the memory locations and constants
330 into those registers.
331
332 If X contains no such constants or memory references,
333 X itself (not a copy) is returned.
334
335 If a constant is found in the address that is not a legitimate constant
336 in an insn, it is left alone in the hope that it might be valid in the
337 address.
338
339 X may contain no arithmetic except addition, subtraction and multiplication.
340 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
341
342static rtx
343break_out_memory_refs (x)
344 register rtx x;
345{
346 if (GET_CODE (x) == MEM
cabeca29 347 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
18ca7dab 348 && GET_MODE (x) != VOIDmode))
2cca6e3f 349 x = force_reg (GET_MODE (x), x);
18ca7dab
RK
350 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
351 || GET_CODE (x) == MULT)
352 {
353 register rtx op0 = break_out_memory_refs (XEXP (x, 0));
354 register rtx op1 = break_out_memory_refs (XEXP (x, 1));
2cca6e3f 355
18ca7dab 356 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
38a448ca 357 x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
18ca7dab 358 }
2cca6e3f 359
18ca7dab
RK
360 return x;
361}
362
ea534b63
RK
363#ifdef POINTERS_EXTEND_UNSIGNED
364
365/* Given X, a memory address in ptr_mode, convert it to an address
498b529f
RK
366 in Pmode, or vice versa (TO_MODE says which way). We take advantage of
367 the fact that pointers are not allowed to overflow by commuting arithmetic
368 operations over conversions so that address arithmetic insns can be
369 used. */
ea534b63 370
498b529f
RK
371rtx
372convert_memory_address (to_mode, x)
373 enum machine_mode to_mode;
ea534b63
RK
374 rtx x;
375{
0b04ec8c 376 enum machine_mode from_mode = to_mode == ptr_mode ? Pmode : ptr_mode;
498b529f
RK
377 rtx temp;
378
0b04ec8c
RK
379 /* Here we handle some special cases. If none of them apply, fall through
380 to the default case. */
ea534b63
RK
381 switch (GET_CODE (x))
382 {
383 case CONST_INT:
384 case CONST_DOUBLE:
498b529f
RK
385 return x;
386
ea534b63 387 case LABEL_REF:
38a448ca
RH
388 temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
389 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
390 return temp;
498b529f 391
ea534b63 392 case SYMBOL_REF:
38a448ca 393 temp = gen_rtx_SYMBOL_REF (to_mode, XSTR (x, 0));
498b529f 394 SYMBOL_REF_FLAG (temp) = SYMBOL_REF_FLAG (x);
d7dc4377 395 CONSTANT_POOL_ADDRESS_P (temp) = CONSTANT_POOL_ADDRESS_P (x);
498b529f 396 return temp;
ea534b63 397
498b529f 398 case CONST:
38a448ca
RH
399 return gen_rtx_CONST (to_mode,
400 convert_memory_address (to_mode, XEXP (x, 0)));
ea534b63 401
0b04ec8c
RK
402 case PLUS:
403 case MULT:
404 /* For addition the second operand is a small constant, we can safely
38a448ca 405 permute the conversion and addition operation. We can always safely
60725c78
RK
406 permute them if we are making the address narrower. In addition,
407 always permute the operations if this is a constant. */
0b04ec8c
RK
408 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
409 || (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT
60725c78
RK
410 && (INTVAL (XEXP (x, 1)) + 20000 < 40000
411 || CONSTANT_P (XEXP (x, 0)))))
38a448ca
RH
412 return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
413 convert_memory_address (to_mode, XEXP (x, 0)),
414 convert_memory_address (to_mode, XEXP (x, 1)));
415 break;
416
417 default:
418 break;
ea534b63 419 }
0b04ec8c
RK
420
421 return convert_modes (to_mode, from_mode,
422 x, POINTERS_EXTEND_UNSIGNED);
ea534b63
RK
423}
424#endif
425
18ca7dab
RK
426/* Given a memory address or facsimile X, construct a new address,
427 currently equivalent, that is stable: future stores won't change it.
428
429 X must be composed of constants, register and memory references
430 combined with addition, subtraction and multiplication:
431 in other words, just what you can get from expand_expr if sum_ok is 1.
432
433 Works by making copies of all regs and memory locations used
434 by X and combining them the same way X does.
435 You could also stabilize the reference to this address
436 by copying the address to a register with copy_to_reg;
437 but then you wouldn't get indexed addressing in the reference. */
438
439rtx
440copy_all_regs (x)
441 register rtx x;
442{
443 if (GET_CODE (x) == REG)
444 {
11c50c5e
DE
445 if (REGNO (x) != FRAME_POINTER_REGNUM
446#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
447 && REGNO (x) != HARD_FRAME_POINTER_REGNUM
448#endif
449 )
18ca7dab
RK
450 x = copy_to_reg (x);
451 }
452 else if (GET_CODE (x) == MEM)
453 x = copy_to_reg (x);
454 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
455 || GET_CODE (x) == MULT)
456 {
457 register rtx op0 = copy_all_regs (XEXP (x, 0));
458 register rtx op1 = copy_all_regs (XEXP (x, 1));
459 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
38a448ca 460 x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
18ca7dab
RK
461 }
462 return x;
463}
464\f
465/* Return something equivalent to X but valid as a memory address
466 for something of mode MODE. When X is not itself valid, this
467 works by copying X or subexpressions of it into registers. */
468
469rtx
470memory_address (mode, x)
471 enum machine_mode mode;
472 register rtx x;
473{
18b9ca6f 474 register rtx oldx = x;
18ca7dab 475
38a448ca
RH
476 if (GET_CODE (x) == ADDRESSOF)
477 return x;
478
ea534b63
RK
479#ifdef POINTERS_EXTEND_UNSIGNED
480 if (GET_MODE (x) == ptr_mode)
498b529f 481 x = convert_memory_address (Pmode, x);
ea534b63
RK
482#endif
483
18ca7dab
RK
484 /* By passing constant addresses thru registers
485 we get a chance to cse them. */
cabeca29 486 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
18b9ca6f 487 x = force_reg (Pmode, x);
18ca7dab
RK
488
489 /* Accept a QUEUED that refers to a REG
490 even though that isn't a valid address.
491 On attempting to put this in an insn we will call protect_from_queue
492 which will turn it into a REG, which is valid. */
18b9ca6f 493 else if (GET_CODE (x) == QUEUED
18ca7dab 494 && GET_CODE (QUEUED_VAR (x)) == REG)
18b9ca6f 495 ;
18ca7dab
RK
496
497 /* We get better cse by rejecting indirect addressing at this stage.
498 Let the combiner create indirect addresses where appropriate.
499 For now, generate the code so that the subexpressions useful to share
500 are visible. But not if cse won't be done! */
18b9ca6f 501 else
18ca7dab 502 {
18b9ca6f
RK
503 if (! cse_not_expected && GET_CODE (x) != REG)
504 x = break_out_memory_refs (x);
505
506 /* At this point, any valid address is accepted. */
507 GO_IF_LEGITIMATE_ADDRESS (mode, x, win);
508
509 /* If it was valid before but breaking out memory refs invalidated it,
510 use it the old way. */
511 if (memory_address_p (mode, oldx))
512 goto win2;
513
514 /* Perform machine-dependent transformations on X
515 in certain cases. This is not necessary since the code
516 below can handle all possible cases, but machine-dependent
517 transformations can make better code. */
518 LEGITIMIZE_ADDRESS (x, oldx, mode, win);
519
520 /* PLUS and MULT can appear in special ways
521 as the result of attempts to make an address usable for indexing.
522 Usually they are dealt with by calling force_operand, below.
523 But a sum containing constant terms is special
524 if removing them makes the sum a valid address:
525 then we generate that address in a register
526 and index off of it. We do this because it often makes
527 shorter code, and because the addresses thus generated
528 in registers often become common subexpressions. */
529 if (GET_CODE (x) == PLUS)
530 {
531 rtx constant_term = const0_rtx;
532 rtx y = eliminate_constant_term (x, &constant_term);
533 if (constant_term == const0_rtx
534 || ! memory_address_p (mode, y))
535 x = force_operand (x, NULL_RTX);
536 else
537 {
38a448ca 538 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
18b9ca6f
RK
539 if (! memory_address_p (mode, y))
540 x = force_operand (x, NULL_RTX);
541 else
542 x = y;
543 }
544 }
18ca7dab 545
e475ed2a 546 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
18b9ca6f 547 x = force_operand (x, NULL_RTX);
18ca7dab 548
18b9ca6f
RK
549 /* If we have a register that's an invalid address,
550 it must be a hard reg of the wrong class. Copy it to a pseudo. */
551 else if (GET_CODE (x) == REG)
552 x = copy_to_reg (x);
553
554 /* Last resort: copy the value to a register, since
555 the register is a valid address. */
556 else
557 x = force_reg (Pmode, x);
558
559 goto done;
18ca7dab 560
c02a7fbb
RK
561 win2:
562 x = oldx;
563 win:
564 if (flag_force_addr && ! cse_not_expected && GET_CODE (x) != REG
565 /* Don't copy an addr via a reg if it is one of our stack slots. */
566 && ! (GET_CODE (x) == PLUS
567 && (XEXP (x, 0) == virtual_stack_vars_rtx
568 || XEXP (x, 0) == virtual_incoming_args_rtx)))
569 {
570 if (general_operand (x, Pmode))
571 x = force_reg (Pmode, x);
572 else
573 x = force_operand (x, NULL_RTX);
574 }
18ca7dab 575 }
18b9ca6f
RK
576
577 done:
578
2cca6e3f
RK
579 /* If we didn't change the address, we are done. Otherwise, mark
580 a reg as a pointer if we have REG or REG + CONST_INT. */
581 if (oldx == x)
582 return x;
583 else if (GET_CODE (x) == REG)
305f22b5 584 mark_reg_pointer (x, 1);
2cca6e3f
RK
585 else if (GET_CODE (x) == PLUS
586 && GET_CODE (XEXP (x, 0)) == REG
587 && GET_CODE (XEXP (x, 1)) == CONST_INT)
305f22b5 588 mark_reg_pointer (XEXP (x, 0), 1);
2cca6e3f 589
18b9ca6f
RK
590 /* OLDX may have been the address on a temporary. Update the address
591 to indicate that X is now used. */
592 update_temp_slot_address (oldx, x);
593
18ca7dab
RK
594 return x;
595}
596
597/* Like `memory_address' but pretend `flag_force_addr' is 0. */
598
599rtx
600memory_address_noforce (mode, x)
601 enum machine_mode mode;
602 rtx x;
603{
604 int ambient_force_addr = flag_force_addr;
605 rtx val;
606
607 flag_force_addr = 0;
608 val = memory_address (mode, x);
609 flag_force_addr = ambient_force_addr;
610 return val;
611}
612
613/* Convert a mem ref into one with a valid memory address.
614 Pass through anything else unchanged. */
615
616rtx
617validize_mem (ref)
618 rtx ref;
619{
620 if (GET_CODE (ref) != MEM)
621 return ref;
622 if (memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
623 return ref;
624 /* Don't alter REF itself, since that is probably a stack slot. */
625 return change_address (ref, GET_MODE (ref), XEXP (ref, 0));
626}
627\f
628/* Return a modified copy of X with its memory address copied
629 into a temporary register to protect it from side effects.
630 If X is not a MEM, it is returned unchanged (and not copied).
631 Perhaps even if it is a MEM, if there is no need to change it. */
632
633rtx
634stabilize (x)
635 rtx x;
636{
637 register rtx addr;
638 if (GET_CODE (x) != MEM)
639 return x;
640 addr = XEXP (x, 0);
641 if (rtx_unstable_p (addr))
642 {
643 rtx temp = copy_all_regs (addr);
644 rtx mem;
645 if (GET_CODE (temp) != REG)
646 temp = copy_to_reg (temp);
38a448ca 647 mem = gen_rtx_MEM (GET_MODE (x), temp);
18ca7dab
RK
648
649 /* Mark returned memref with in_struct if it's in an array or
650 structure. Copy const and volatile from original memref. */
651
18ca7dab 652 RTX_UNCHANGING_P (mem) = RTX_UNCHANGING_P (x);
c6df88cb
MM
653 MEM_COPY_ATTRIBUTES (mem, x);
654 if (GET_CODE (addr) == PLUS)
655 MEM_SET_IN_STRUCT_P (mem, 1);
41472af8
MM
656
657 /* Since the new MEM is just like the old X, it can alias only
658 the things that X could. */
659 MEM_ALIAS_SET (mem) = MEM_ALIAS_SET (x);
660
18ca7dab
RK
661 return mem;
662 }
663 return x;
664}
665\f
666/* Copy the value or contents of X to a new temp reg and return that reg. */
667
668rtx
669copy_to_reg (x)
670 rtx x;
671{
672 register rtx temp = gen_reg_rtx (GET_MODE (x));
673
674 /* If not an operand, must be an address with PLUS and MULT so
675 do the computation. */
676 if (! general_operand (x, VOIDmode))
677 x = force_operand (x, temp);
678
679 if (x != temp)
680 emit_move_insn (temp, x);
681
682 return temp;
683}
684
685/* Like copy_to_reg but always give the new register mode Pmode
686 in case X is a constant. */
687
688rtx
689copy_addr_to_reg (x)
690 rtx x;
691{
692 return copy_to_mode_reg (Pmode, x);
693}
694
695/* Like copy_to_reg but always give the new register mode MODE
696 in case X is a constant. */
697
698rtx
699copy_to_mode_reg (mode, x)
700 enum machine_mode mode;
701 rtx x;
702{
703 register rtx temp = gen_reg_rtx (mode);
704
705 /* If not an operand, must be an address with PLUS and MULT so
706 do the computation. */
707 if (! general_operand (x, VOIDmode))
708 x = force_operand (x, temp);
709
710 if (GET_MODE (x) != mode && GET_MODE (x) != VOIDmode)
711 abort ();
712 if (x != temp)
713 emit_move_insn (temp, x);
714 return temp;
715}
716
717/* Load X into a register if it is not already one.
718 Use mode MODE for the register.
719 X should be valid for mode MODE, but it may be a constant which
720 is valid for all integer modes; that's why caller must specify MODE.
721
722 The caller must not alter the value in the register we return,
723 since we mark it as a "constant" register. */
724
725rtx
726force_reg (mode, x)
727 enum machine_mode mode;
728 rtx x;
729{
62874575 730 register rtx temp, insn, set;
18ca7dab
RK
731
732 if (GET_CODE (x) == REG)
733 return x;
96843fa2 734
18ca7dab 735 temp = gen_reg_rtx (mode);
96843fa2
NC
736
737 if (! general_operand (x, mode))
738 x = force_operand (x, NULL_RTX);
739
18ca7dab 740 insn = emit_move_insn (temp, x);
62874575 741
18ca7dab 742 /* Let optimizers know that TEMP's value never changes
62874575
RK
743 and that X can be substituted for it. Don't get confused
744 if INSN set something else (such as a SUBREG of TEMP). */
745 if (CONSTANT_P (x)
746 && (set = single_set (insn)) != 0
747 && SET_DEST (set) == temp)
18ca7dab 748 {
b1ec3c92 749 rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
18ca7dab
RK
750
751 if (note)
752 XEXP (note, 0) = x;
753 else
38a448ca 754 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, x, REG_NOTES (insn));
18ca7dab
RK
755 }
756 return temp;
757}
758
759/* If X is a memory ref, copy its contents to a new temp reg and return
760 that reg. Otherwise, return X. */
761
762rtx
763force_not_mem (x)
764 rtx x;
765{
766 register rtx temp;
767 if (GET_CODE (x) != MEM || GET_MODE (x) == BLKmode)
768 return x;
769 temp = gen_reg_rtx (GET_MODE (x));
770 emit_move_insn (temp, x);
771 return temp;
772}
773
774/* Copy X to TARGET (if it's nonzero and a reg)
775 or to a new temp reg and return that reg.
776 MODE is the mode to use for X in case it is a constant. */
777
778rtx
779copy_to_suggested_reg (x, target, mode)
780 rtx x, target;
781 enum machine_mode mode;
782{
783 register rtx temp;
784
785 if (target && GET_CODE (target) == REG)
786 temp = target;
787 else
788 temp = gen_reg_rtx (mode);
789
790 emit_move_insn (temp, x);
791 return temp;
792}
793\f
9ff65789
RK
794/* Return the mode to use to store a scalar of TYPE and MODE.
795 PUNSIGNEDP points to the signedness of the type and may be adjusted
796 to show what signedness to use on extension operations.
797
798 FOR_CALL is non-zero if this call is promoting args for a call. */
799
800enum machine_mode
801promote_mode (type, mode, punsignedp, for_call)
802 tree type;
803 enum machine_mode mode;
804 int *punsignedp;
c84e2712 805 int for_call ATTRIBUTE_UNUSED;
9ff65789
RK
806{
807 enum tree_code code = TREE_CODE (type);
808 int unsignedp = *punsignedp;
809
810#ifdef PROMOTE_FOR_CALL_ONLY
811 if (! for_call)
812 return mode;
813#endif
814
815 switch (code)
816 {
817#ifdef PROMOTE_MODE
818 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
819 case CHAR_TYPE: case REAL_TYPE: case OFFSET_TYPE:
820 PROMOTE_MODE (mode, unsignedp, type);
821 break;
822#endif
823
ea534b63 824#ifdef POINTERS_EXTEND_UNSIGNED
56a4c9e2 825 case REFERENCE_TYPE:
9ff65789 826 case POINTER_TYPE:
ea534b63
RK
827 mode = Pmode;
828 unsignedp = POINTERS_EXTEND_UNSIGNED;
9ff65789 829 break;
ea534b63 830#endif
38a448ca
RH
831
832 default:
833 break;
9ff65789
RK
834 }
835
836 *punsignedp = unsignedp;
837 return mode;
838}
839\f
18ca7dab
RK
840/* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
841 This pops when ADJUST is positive. ADJUST need not be constant. */
842
843void
844adjust_stack (adjust)
845 rtx adjust;
846{
847 rtx temp;
848 adjust = protect_from_queue (adjust, 0);
849
850 if (adjust == const0_rtx)
851 return;
852
853 temp = expand_binop (Pmode,
854#ifdef STACK_GROWS_DOWNWARD
855 add_optab,
856#else
857 sub_optab,
858#endif
859 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
860 OPTAB_LIB_WIDEN);
861
862 if (temp != stack_pointer_rtx)
863 emit_move_insn (stack_pointer_rtx, temp);
864}
865
866/* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
867 This pushes when ADJUST is positive. ADJUST need not be constant. */
868
869void
870anti_adjust_stack (adjust)
871 rtx adjust;
872{
873 rtx temp;
874 adjust = protect_from_queue (adjust, 0);
875
876 if (adjust == const0_rtx)
877 return;
878
879 temp = expand_binop (Pmode,
880#ifdef STACK_GROWS_DOWNWARD
881 sub_optab,
882#else
883 add_optab,
884#endif
885 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
886 OPTAB_LIB_WIDEN);
887
888 if (temp != stack_pointer_rtx)
889 emit_move_insn (stack_pointer_rtx, temp);
890}
891
892/* Round the size of a block to be pushed up to the boundary required
893 by this machine. SIZE is the desired size, which need not be constant. */
894
895rtx
896round_push (size)
897 rtx size;
898{
c795bca9
BS
899#ifdef PREFERRED_STACK_BOUNDARY
900 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
18ca7dab
RK
901 if (align == 1)
902 return size;
903 if (GET_CODE (size) == CONST_INT)
904 {
905 int new = (INTVAL (size) + align - 1) / align * align;
906 if (INTVAL (size) != new)
b1ec3c92 907 size = GEN_INT (new);
18ca7dab
RK
908 }
909 else
910 {
5244db05 911 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
0f41302f
MS
912 but we know it can't. So add ourselves and then do
913 TRUNC_DIV_EXPR. */
5244db05
RK
914 size = expand_binop (Pmode, add_optab, size, GEN_INT (align - 1),
915 NULL_RTX, 1, OPTAB_LIB_WIDEN);
916 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, GEN_INT (align),
b1ec3c92
CH
917 NULL_RTX, 1);
918 size = expand_mult (Pmode, size, GEN_INT (align), NULL_RTX, 1);
18ca7dab 919 }
c795bca9 920#endif /* PREFERRED_STACK_BOUNDARY */
18ca7dab
RK
921 return size;
922}
923\f
59257ff7
RK
924/* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
925 to a previously-created save area. If no save area has been allocated,
926 this function will allocate one. If a save area is specified, it
927 must be of the proper mode.
928
929 The insns are emitted after insn AFTER, if nonzero, otherwise the insns
930 are emitted at the current position. */
931
932void
933emit_stack_save (save_level, psave, after)
934 enum save_level save_level;
935 rtx *psave;
936 rtx after;
937{
938 rtx sa = *psave;
939 /* The default is that we use a move insn and save in a Pmode object. */
0ddc9a94 940 rtx (*fcn) PROTO ((rtx, rtx)) = gen_move_insn;
a260abc9 941 enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
59257ff7
RK
942
943 /* See if this machine has anything special to do for this kind of save. */
944 switch (save_level)
945 {
946#ifdef HAVE_save_stack_block
947 case SAVE_BLOCK:
948 if (HAVE_save_stack_block)
a260abc9 949 fcn = gen_save_stack_block;
59257ff7
RK
950 break;
951#endif
952#ifdef HAVE_save_stack_function
953 case SAVE_FUNCTION:
954 if (HAVE_save_stack_function)
a260abc9 955 fcn = gen_save_stack_function;
59257ff7
RK
956 break;
957#endif
958#ifdef HAVE_save_stack_nonlocal
959 case SAVE_NONLOCAL:
960 if (HAVE_save_stack_nonlocal)
a260abc9 961 fcn = gen_save_stack_nonlocal;
59257ff7
RK
962 break;
963#endif
38a448ca
RH
964 default:
965 break;
59257ff7
RK
966 }
967
968 /* If there is no save area and we have to allocate one, do so. Otherwise
969 verify the save area is the proper mode. */
970
971 if (sa == 0)
972 {
973 if (mode != VOIDmode)
974 {
975 if (save_level == SAVE_NONLOCAL)
976 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
977 else
978 *psave = sa = gen_reg_rtx (mode);
979 }
980 }
981 else
982 {
983 if (mode == VOIDmode || GET_MODE (sa) != mode)
984 abort ();
985 }
986
987 if (after)
700f6f98
RK
988 {
989 rtx seq;
990
991 start_sequence ();
5460015d
JW
992 /* We must validize inside the sequence, to ensure that any instructions
993 created by the validize call also get moved to the right place. */
994 if (sa != 0)
995 sa = validize_mem (sa);
d072107f 996 emit_insn (fcn (sa, stack_pointer_rtx));
700f6f98
RK
997 seq = gen_sequence ();
998 end_sequence ();
999 emit_insn_after (seq, after);
1000 }
59257ff7 1001 else
5460015d
JW
1002 {
1003 if (sa != 0)
1004 sa = validize_mem (sa);
1005 emit_insn (fcn (sa, stack_pointer_rtx));
1006 }
59257ff7
RK
1007}
1008
1009/* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
1010 area made by emit_stack_save. If it is zero, we have nothing to do.
1011
1012 Put any emitted insns after insn AFTER, if nonzero, otherwise at
1013 current position. */
1014
1015void
1016emit_stack_restore (save_level, sa, after)
1017 enum save_level save_level;
1018 rtx after;
1019 rtx sa;
1020{
1021 /* The default is that we use a move insn. */
0ddc9a94 1022 rtx (*fcn) PROTO ((rtx, rtx)) = gen_move_insn;
59257ff7
RK
1023
1024 /* See if this machine has anything special to do for this kind of save. */
1025 switch (save_level)
1026 {
1027#ifdef HAVE_restore_stack_block
1028 case SAVE_BLOCK:
1029 if (HAVE_restore_stack_block)
1030 fcn = gen_restore_stack_block;
1031 break;
1032#endif
1033#ifdef HAVE_restore_stack_function
1034 case SAVE_FUNCTION:
1035 if (HAVE_restore_stack_function)
1036 fcn = gen_restore_stack_function;
1037 break;
1038#endif
1039#ifdef HAVE_restore_stack_nonlocal
59257ff7
RK
1040 case SAVE_NONLOCAL:
1041 if (HAVE_restore_stack_nonlocal)
1042 fcn = gen_restore_stack_nonlocal;
1043 break;
1044#endif
38a448ca
RH
1045 default:
1046 break;
59257ff7
RK
1047 }
1048
d072107f
RK
1049 if (sa != 0)
1050 sa = validize_mem (sa);
1051
59257ff7 1052 if (after)
700f6f98
RK
1053 {
1054 rtx seq;
1055
1056 start_sequence ();
d072107f 1057 emit_insn (fcn (stack_pointer_rtx, sa));
700f6f98
RK
1058 seq = gen_sequence ();
1059 end_sequence ();
1060 emit_insn_after (seq, after);
1061 }
59257ff7 1062 else
d072107f 1063 emit_insn (fcn (stack_pointer_rtx, sa));
59257ff7
RK
1064}
1065\f
c9ec4f99
DM
1066#ifdef SETJMP_VIA_SAVE_AREA
1067/* Optimize RTL generated by allocate_dynamic_stack_space for targets
1068 where SETJMP_VIA_SAVE_AREA is true. The problem is that on these
1069 platforms, the dynamic stack space used can corrupt the original
1070 frame, thus causing a crash if a longjmp unwinds to it. */
1071
1072void
1073optimize_save_area_alloca (insns)
1074 rtx insns;
1075{
1076 rtx insn;
1077
1078 for (insn = insns; insn; insn = NEXT_INSN(insn))
1079 {
1080 rtx note;
1081
1082 if (GET_CODE (insn) != INSN)
1083 continue;
1084
1085 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1086 {
1087 if (REG_NOTE_KIND (note) != REG_SAVE_AREA)
1088 continue;
1089
1090 if (!current_function_calls_setjmp)
1091 {
1092 rtx pat = PATTERN (insn);
1093
1094 /* If we do not see the note in a pattern matching
1095 these precise characteristics, we did something
1096 entirely wrong in allocate_dynamic_stack_space.
1097
38e01259 1098 Note, one way this could happen is if SETJMP_VIA_SAVE_AREA
c9ec4f99
DM
1099 was defined on a machine where stacks grow towards higher
1100 addresses.
1101
1102 Right now only supported port with stack that grow upward
1103 is the HPPA and it does not define SETJMP_VIA_SAVE_AREA. */
1104 if (GET_CODE (pat) != SET
1105 || SET_DEST (pat) != stack_pointer_rtx
1106 || GET_CODE (SET_SRC (pat)) != MINUS
1107 || XEXP (SET_SRC (pat), 0) != stack_pointer_rtx)
1108 abort ();
1109
1110 /* This will now be transformed into a (set REG REG)
1111 so we can just blow away all the other notes. */
1112 XEXP (SET_SRC (pat), 1) = XEXP (note, 0);
1113 REG_NOTES (insn) = NULL_RTX;
1114 }
1115 else
1116 {
1117 /* setjmp was called, we must remove the REG_SAVE_AREA
1118 note so that later passes do not get confused by its
1119 presence. */
1120 if (note == REG_NOTES (insn))
1121 {
1122 REG_NOTES (insn) = XEXP (note, 1);
1123 }
1124 else
1125 {
1126 rtx srch;
1127
1128 for (srch = REG_NOTES (insn); srch; srch = XEXP (srch, 1))
1129 if (XEXP (srch, 1) == note)
1130 break;
1131
1132 if (srch == NULL_RTX)
1133 abort();
1134
1135 XEXP (srch, 1) = XEXP (note, 1);
1136 }
1137 }
1138 /* Once we've seen the note of interest, we need not look at
1139 the rest of them. */
1140 break;
1141 }
1142 }
1143}
1144#endif /* SETJMP_VIA_SAVE_AREA */
1145
18ca7dab
RK
1146/* Return an rtx representing the address of an area of memory dynamically
1147 pushed on the stack. This region of memory is always aligned to
1148 a multiple of BIGGEST_ALIGNMENT.
1149
1150 Any required stack pointer alignment is preserved.
1151
1152 SIZE is an rtx representing the size of the area.
091ad0b9
RK
1153 TARGET is a place in which the address can be placed.
1154
1155 KNOWN_ALIGN is the alignment (in bits) that we know SIZE has. */
18ca7dab
RK
1156
1157rtx
091ad0b9 1158allocate_dynamic_stack_space (size, target, known_align)
18ca7dab
RK
1159 rtx size;
1160 rtx target;
091ad0b9 1161 int known_align;
18ca7dab 1162{
c9ec4f99
DM
1163#ifdef SETJMP_VIA_SAVE_AREA
1164 rtx setjmpless_size = NULL_RTX;
1165#endif
1166
15fc0026 1167 /* If we're asking for zero bytes, it doesn't matter what we point
9faa82d8 1168 to since we can't dereference it. But return a reasonable
15fc0026
RK
1169 address anyway. */
1170 if (size == const0_rtx)
1171 return virtual_stack_dynamic_rtx;
1172
1173 /* Otherwise, show we're calling alloca or equivalent. */
1174 current_function_calls_alloca = 1;
1175
18ca7dab
RK
1176 /* Ensure the size is in the proper mode. */
1177 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1178 size = convert_to_mode (Pmode, size, 1);
1179
1180 /* We will need to ensure that the address we return is aligned to
1181 BIGGEST_ALIGNMENT. If STACK_DYNAMIC_OFFSET is defined, we don't
1182 always know its final value at this point in the compilation (it
1183 might depend on the size of the outgoing parameter lists, for
1184 example), so we must align the value to be returned in that case.
1185 (Note that STACK_DYNAMIC_OFFSET will have a default non-zero value if
1186 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1187 We must also do an alignment operation on the returned value if
1188 the stack pointer alignment is less strict that BIGGEST_ALIGNMENT.
1189
1190 If we have to align, we must leave space in SIZE for the hole
1191 that might result from the alignment operation. */
1192
c795bca9 1193#if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET) || ! defined (PREFERRED_STACK_BOUNDARY)
515a7242
JW
1194#define MUST_ALIGN 1
1195#else
c795bca9 1196#define MUST_ALIGN (PREFERRED_STACK_BOUNDARY < BIGGEST_ALIGNMENT)
18ca7dab
RK
1197#endif
1198
515a7242 1199 if (MUST_ALIGN)
3b998c11
RK
1200 {
1201 if (GET_CODE (size) == CONST_INT)
b1ec3c92
CH
1202 size = GEN_INT (INTVAL (size)
1203 + (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1));
3b998c11
RK
1204 else
1205 size = expand_binop (Pmode, add_optab, size,
b1ec3c92
CH
1206 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1207 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3b998c11 1208 }
1d9d04f8 1209
18ca7dab
RK
1210#ifdef SETJMP_VIA_SAVE_AREA
1211 /* If setjmp restores regs from a save area in the stack frame,
1212 avoid clobbering the reg save area. Note that the offset of
1213 virtual_incoming_args_rtx includes the preallocated stack args space.
1214 It would be no problem to clobber that, but it's on the wrong side
1215 of the old save area. */
1216 {
1217 rtx dynamic_offset
1218 = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
b1ec3c92 1219 stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN);
c9ec4f99
DM
1220
1221 if (!current_function_calls_setjmp)
1222 {
c795bca9 1223 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
c9ec4f99
DM
1224
1225 /* See optimize_save_area_alloca to understand what is being
1226 set up here. */
1227
c795bca9 1228#if !defined(PREFERRED_STACK_BOUNDARY) || !defined(MUST_ALIGN) || (PREFERRED_STACK_BOUNDARY != BIGGEST_ALIGNMENT)
c9ec4f99
DM
1229 /* If anyone creates a target with these characteristics, let them
1230 know that our optimization cannot work correctly in such a case. */
1231 abort();
1232#endif
1233
1234 if (GET_CODE (size) == CONST_INT)
1235 {
1236 int new = INTVAL (size) / align * align;
1237
1238 if (INTVAL (size) != new)
1239 setjmpless_size = GEN_INT (new);
1240 else
1241 setjmpless_size = size;
1242 }
1243 else
1244 {
1245 /* Since we know overflow is not possible, we avoid using
1246 CEIL_DIV_EXPR and use TRUNC_DIV_EXPR instead. */
1247 setjmpless_size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size,
1248 GEN_INT (align), NULL_RTX, 1);
1249 setjmpless_size = expand_mult (Pmode, setjmpless_size,
1250 GEN_INT (align), NULL_RTX, 1);
1251 }
1252 /* Our optimization works based upon being able to perform a simple
1253 transformation of this RTL into a (set REG REG) so make sure things
1254 did in fact end up in a REG. */
ee5332b8 1255 if (!register_operand (setjmpless_size, Pmode))
c9ec4f99
DM
1256 setjmpless_size = force_reg (Pmode, setjmpless_size);
1257 }
1258
18ca7dab 1259 size = expand_binop (Pmode, add_optab, size, dynamic_offset,
b1ec3c92 1260 NULL_RTX, 1, OPTAB_LIB_WIDEN);
18ca7dab
RK
1261 }
1262#endif /* SETJMP_VIA_SAVE_AREA */
1263
1264 /* Round the size to a multiple of the required stack alignment.
1265 Since the stack if presumed to be rounded before this allocation,
1266 this will maintain the required alignment.
1267
1268 If the stack grows downward, we could save an insn by subtracting
1269 SIZE from the stack pointer and then aligning the stack pointer.
1270 The problem with this is that the stack pointer may be unaligned
1271 between the execution of the subtraction and alignment insns and
1272 some machines do not allow this. Even on those that do, some
1273 signal handlers malfunction if a signal should occur between those
1274 insns. Since this is an extremely rare event, we have no reliable
1275 way of knowing which systems have this problem. So we avoid even
1276 momentarily mis-aligning the stack. */
1277
c795bca9 1278#ifdef PREFERRED_STACK_BOUNDARY
86b25e81
RS
1279 /* If we added a variable amount to SIZE,
1280 we can no longer assume it is aligned. */
515a7242 1281#if !defined (SETJMP_VIA_SAVE_AREA)
c795bca9 1282 if (MUST_ALIGN || known_align % PREFERRED_STACK_BOUNDARY != 0)
34c9156a 1283#endif
091ad0b9 1284 size = round_push (size);
89d825c9 1285#endif
18ca7dab
RK
1286
1287 do_pending_stack_adjust ();
1288
edff2491
RK
1289 /* If needed, check that we have the required amount of stack. Take into
1290 account what has already been checked. */
1291 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1292 probe_stack_range (STACK_CHECK_MAX_FRAME_SIZE + STACK_CHECK_PROTECT, size);
1293
091ad0b9
RK
1294 /* Don't use a TARGET that isn't a pseudo. */
1295 if (target == 0 || GET_CODE (target) != REG
1296 || REGNO (target) < FIRST_PSEUDO_REGISTER)
18ca7dab
RK
1297 target = gen_reg_rtx (Pmode);
1298
305f22b5 1299 mark_reg_pointer (target, known_align / BITS_PER_UNIT);
3ad69266 1300
18ca7dab
RK
1301 /* Perform the required allocation from the stack. Some systems do
1302 this differently than simply incrementing/decrementing from the
38a448ca 1303 stack pointer, such as acquiring the space by calling malloc(). */
18ca7dab
RK
1304#ifdef HAVE_allocate_stack
1305 if (HAVE_allocate_stack)
1306 {
39403d82
DE
1307 enum machine_mode mode = STACK_SIZE_MODE;
1308
18ca7dab
RK
1309 if (insn_operand_predicate[(int) CODE_FOR_allocate_stack][0]
1310 && ! ((*insn_operand_predicate[(int) CODE_FOR_allocate_stack][0])
38a448ca 1311 (target, Pmode)))
e0a52410
JL
1312#ifdef POINTERS_EXTEND_UNSIGNED
1313 target = convert_memory_address (Pmode, target);
1314#else
1315 target = copy_to_mode_reg (Pmode, target);
1316#endif
c5c76735
JL
1317
1318 if (mode == VOIDmode)
1319 mode = Pmode;
1320
39403d82 1321 size = convert_modes (mode, ptr_mode, size, 1);
38a448ca
RH
1322 if (insn_operand_predicate[(int) CODE_FOR_allocate_stack][1]
1323 && ! ((*insn_operand_predicate[(int) CODE_FOR_allocate_stack][1])
39403d82
DE
1324 (size, mode)))
1325 size = copy_to_mode_reg (mode, size);
18ca7dab 1326
38a448ca 1327 emit_insn (gen_allocate_stack (target, size));
18ca7dab
RK
1328 }
1329 else
1330#endif
ea534b63 1331 {
38a448ca
RH
1332#ifndef STACK_GROWS_DOWNWARD
1333 emit_move_insn (target, virtual_stack_dynamic_rtx);
1334#endif
ea534b63
RK
1335 size = convert_modes (Pmode, ptr_mode, size, 1);
1336 anti_adjust_stack (size);
c9ec4f99
DM
1337#ifdef SETJMP_VIA_SAVE_AREA
1338 if (setjmpless_size != NULL_RTX)
1339 {
1340 rtx note_target = get_last_insn ();
1341
9e6a5703
JC
1342 REG_NOTES (note_target)
1343 = gen_rtx_EXPR_LIST (REG_SAVE_AREA, setjmpless_size,
1344 REG_NOTES (note_target));
c9ec4f99
DM
1345 }
1346#endif /* SETJMP_VIA_SAVE_AREA */
18ca7dab
RK
1347#ifdef STACK_GROWS_DOWNWARD
1348 emit_move_insn (target, virtual_stack_dynamic_rtx);
1349#endif
38a448ca 1350 }
18ca7dab 1351
515a7242 1352 if (MUST_ALIGN)
091ad0b9 1353 {
5244db05 1354 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
0f41302f
MS
1355 but we know it can't. So add ourselves and then do
1356 TRUNC_DIV_EXPR. */
0f56a403 1357 target = expand_binop (Pmode, add_optab, target,
5244db05
RK
1358 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1359 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1360 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
b1ec3c92
CH
1361 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1362 NULL_RTX, 1);
091ad0b9 1363 target = expand_mult (Pmode, target,
b1ec3c92
CH
1364 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1365 NULL_RTX, 1);
091ad0b9 1366 }
18ca7dab
RK
1367
1368 /* Some systems require a particular insn to refer to the stack
1369 to make the pages exist. */
1370#ifdef HAVE_probe
1371 if (HAVE_probe)
1372 emit_insn (gen_probe ());
1373#endif
1374
15fc0026 1375 /* Record the new stack level for nonlocal gotos. */
ba716ac9 1376 if (nonlocal_goto_handler_slots != 0)
15fc0026
RK
1377 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
1378
18ca7dab
RK
1379 return target;
1380}
1381\f
edff2491
RK
1382/* Emit one stack probe at ADDRESS, an address within the stack. */
1383
1384static void
1385emit_stack_probe (address)
1386 rtx address;
1387{
38a448ca 1388 rtx memref = gen_rtx_MEM (word_mode, address);
edff2491
RK
1389
1390 MEM_VOLATILE_P (memref) = 1;
1391
1392 if (STACK_CHECK_PROBE_LOAD)
1393 emit_move_insn (gen_reg_rtx (word_mode), memref);
1394 else
1395 emit_move_insn (memref, const0_rtx);
1396}
1397
1398/* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1399 FIRST is a constant and size is a Pmode RTX. These are offsets from the
1400 current stack pointer. STACK_GROWS_DOWNWARD says whether to add or
1401 subtract from the stack. If SIZE is constant, this is done
1402 with a fixed number of probes. Otherwise, we must make a loop. */
1403
1404#ifdef STACK_GROWS_DOWNWARD
1405#define STACK_GROW_OP MINUS
1406#else
1407#define STACK_GROW_OP PLUS
1408#endif
1409
1410void
1411probe_stack_range (first, size)
1412 HOST_WIDE_INT first;
1413 rtx size;
1414{
1415 /* First see if we have an insn to check the stack. Use it if so. */
1416#ifdef HAVE_check_stack
1417 if (HAVE_check_stack)
1418 {
38a448ca
RH
1419 rtx last_addr
1420 = force_operand (gen_rtx_STACK_GROW_OP (Pmode,
1421 stack_pointer_rtx,
1422 plus_constant (size, first)),
1423 NULL_RTX);
edff2491
RK
1424
1425 if (insn_operand_predicate[(int) CODE_FOR_check_stack][0]
1426 && ! ((*insn_operand_predicate[(int) CODE_FOR_check_stack][0])
c5c76735
JL
1427 (last_addr, Pmode)))
1428 last_addr = copy_to_mode_reg (Pmode, last_addr);
edff2491 1429
c5c76735 1430 emit_insn (gen_check_stack (last_addr));
edff2491
RK
1431 return;
1432 }
1433#endif
1434
1435 /* If we have to generate explicit probes, see if we have a constant
95a086b1 1436 small number of them to generate. If so, that's the easy case. */
e5e809f4
JL
1437 if (GET_CODE (size) == CONST_INT
1438 && INTVAL (size) < 10 * STACK_CHECK_PROBE_INTERVAL)
edff2491
RK
1439 {
1440 HOST_WIDE_INT offset;
1441
1442 /* Start probing at FIRST + N * STACK_CHECK_PROBE_INTERVAL
1443 for values of N from 1 until it exceeds LAST. If only one
1444 probe is needed, this will not generate any code. Then probe
1445 at LAST. */
1446 for (offset = first + STACK_CHECK_PROBE_INTERVAL;
1447 offset < INTVAL (size);
1448 offset = offset + STACK_CHECK_PROBE_INTERVAL)
38a448ca
RH
1449 emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1450 stack_pointer_rtx,
1451 GEN_INT (offset)));
edff2491 1452
38a448ca
RH
1453 emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1454 stack_pointer_rtx,
1455 plus_constant (size, first)));
edff2491
RK
1456 }
1457
1458 /* In the variable case, do the same as above, but in a loop. We emit loop
1459 notes so that loop optimization can be done. */
1460 else
1461 {
1462 rtx test_addr
38a448ca
RH
1463 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1464 stack_pointer_rtx,
1465 GEN_INT (first + STACK_CHECK_PROBE_INTERVAL)),
edff2491
RK
1466 NULL_RTX);
1467 rtx last_addr
38a448ca
RH
1468 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1469 stack_pointer_rtx,
1470 plus_constant (size, first)),
edff2491
RK
1471 NULL_RTX);
1472 rtx incr = GEN_INT (STACK_CHECK_PROBE_INTERVAL);
1473 rtx loop_lab = gen_label_rtx ();
1474 rtx test_lab = gen_label_rtx ();
1475 rtx end_lab = gen_label_rtx ();
1476 rtx temp;
1477
1478 if (GET_CODE (test_addr) != REG
1479 || REGNO (test_addr) < FIRST_PSEUDO_REGISTER)
1480 test_addr = force_reg (Pmode, test_addr);
1481
1482 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
1483 emit_jump (test_lab);
1484
1485 emit_label (loop_lab);
1486 emit_stack_probe (test_addr);
1487
1488 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
1489
1490#ifdef STACK_GROWS_DOWNWARD
1491#define CMP_OPCODE GTU
1492 temp = expand_binop (Pmode, sub_optab, test_addr, incr, test_addr,
1493 1, OPTAB_WIDEN);
1494#else
1495#define CMP_OPCODE LTU
1496 temp = expand_binop (Pmode, add_optab, test_addr, incr, test_addr,
1497 1, OPTAB_WIDEN);
1498#endif
1499
1500 if (temp != test_addr)
1501 abort ();
1502
1503 emit_label (test_lab);
c5d5d461
JL
1504 emit_cmp_and_jump_insns (test_addr, last_addr, CMP_OPCODE,
1505 NULL_RTX, Pmode, 1, 0, loop_lab);
edff2491
RK
1506 emit_jump (end_lab);
1507 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
1508 emit_label (end_lab);
1509
38a448ca
RH
1510 /* If will be doing stupid optimization, show test_addr is still live. */
1511 if (obey_regdecls)
1512 emit_insn (gen_rtx_USE (VOIDmode, test_addr));
1513
edff2491
RK
1514 emit_stack_probe (last_addr);
1515 }
1516}
1517\f
18ca7dab
RK
1518/* Return an rtx representing the register or memory location
1519 in which a scalar value of data type VALTYPE
1520 was returned by a function call to function FUNC.
1521 FUNC is a FUNCTION_DECL node if the precise function is known,
1522 otherwise 0. */
1523
1524rtx
1525hard_function_value (valtype, func)
1526 tree valtype;
91813b28 1527 tree func ATTRIBUTE_UNUSED;
18ca7dab 1528{
e1a4071f
JL
1529 rtx val = FUNCTION_VALUE (valtype, func);
1530 if (GET_CODE (val) == REG
1531 && GET_MODE (val) == BLKmode)
1532 {
1533 int bytes = int_size_in_bytes (valtype);
1534 enum machine_mode tmpmode;
1535 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
0c61f541 1536 tmpmode != VOIDmode;
e1a4071f
JL
1537 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1538 {
1539 /* Have we found a large enough mode? */
1540 if (GET_MODE_SIZE (tmpmode) >= bytes)
1541 break;
1542 }
1543
1544 /* No suitable mode found. */
0c61f541 1545 if (tmpmode == VOIDmode)
e1a4071f
JL
1546 abort ();
1547
1548 PUT_MODE (val, tmpmode);
1549 }
1550 return val;
18ca7dab
RK
1551}
1552
1553/* Return an rtx representing the register or memory location
1554 in which a scalar value of mode MODE was returned by a library call. */
1555
1556rtx
1557hard_libcall_value (mode)
1558 enum machine_mode mode;
1559{
1560 return LIBCALL_VALUE (mode);
1561}
0c5e217d
RS
1562
1563/* Look up the tree code for a given rtx code
1564 to provide the arithmetic operation for REAL_ARITHMETIC.
1565 The function returns an int because the caller may not know
1566 what `enum tree_code' means. */
1567
1568int
1569rtx_to_tree_code (code)
1570 enum rtx_code code;
1571{
1572 enum tree_code tcode;
1573
1574 switch (code)
1575 {
1576 case PLUS:
1577 tcode = PLUS_EXPR;
1578 break;
1579 case MINUS:
1580 tcode = MINUS_EXPR;
1581 break;
1582 case MULT:
1583 tcode = MULT_EXPR;
1584 break;
1585 case DIV:
1586 tcode = RDIV_EXPR;
1587 break;
1588 case SMIN:
1589 tcode = MIN_EXPR;
1590 break;
1591 case SMAX:
1592 tcode = MAX_EXPR;
1593 break;
1594 default:
1595 tcode = LAST_AND_UNUSED_TREE_CODE;
1596 break;
1597 }
1598 return ((int) tcode);
1599}
This page took 0.912621 seconds and 5 git commands to generate.