]> gcc.gnu.org Git - gcc.git/blame - gcc/config/xtensa/xtensa.c
elf.h: New file.
[gcc.git] / gcc / config / xtensa / xtensa.c
CommitLineData
03984308
BW
1/* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright (C) 2001 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
21
22#include "config.h"
23#include "system.h"
24#include "rtl.h"
25#include "regs.h"
26#include "machmode.h"
27#include "hard-reg-set.h"
28#include "basic-block.h"
29#include "real.h"
30#include "insn-config.h"
31#include "conditions.h"
32#include "insn-flags.h"
33#include "insn-attr.h"
34#include "insn-codes.h"
35#include "recog.h"
36#include "output.h"
37#include "tree.h"
38#include "expr.h"
39#include "flags.h"
40#include "reload.h"
41#include "tm_p.h"
42#include "function.h"
43#include "toplev.h"
44#include "optabs.h"
45#include "libfuncs.h"
46#include "target.h"
47#include "target-def.h"
48
49/* Enumeration for all of the relational tests, so that we can build
50 arrays indexed by the test type, and not worry about the order
51 of EQ, NE, etc. */
52
53enum internal_test {
54 ITEST_EQ,
55 ITEST_NE,
56 ITEST_GT,
57 ITEST_GE,
58 ITEST_LT,
59 ITEST_LE,
60 ITEST_GTU,
61 ITEST_GEU,
62 ITEST_LTU,
63 ITEST_LEU,
64 ITEST_MAX
65 };
66
67/* Cached operands, and operator to compare for use in set/branch on
68 condition codes. */
69rtx branch_cmp[2];
70
71/* what type of branch to use */
72enum cmp_type branch_type;
73
74/* Array giving truth value on whether or not a given hard register
75 can support a given mode. */
76char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
77
78/* Current frame size calculated by compute_frame_size. */
79unsigned xtensa_current_frame_size;
80
81/* Tables of ld/st opcode names for block moves */
82const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
83const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
84#define LARGEST_MOVE_RATIO 15
85
86/* Define the structure for the machine field in struct function. */
87struct machine_function
88{
89 int accesses_prev_frame;
90};
91
92/* Vector, indexed by hard register number, which contains 1 for a
93 register that is allowable in a candidate for leaf function
94 treatment. */
95
96const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
97{
98 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
99 1, 1, 1,
100 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
101 1
102};
103
104/* Map hard register number to register class */
105const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
106{
107 GR_REGS, SP_REG, GR_REGS, GR_REGS,
108 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
109 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
110 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
111 AR_REGS, AR_REGS, BR_REGS,
112 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
113 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
114 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
115 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
116 ACC_REG,
117};
118
119/* Map register constraint character to register class. */
120enum reg_class xtensa_char_to_class[256] =
121{
122 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
123 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
124 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
125 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
126 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
127 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
128 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186};
187
188/* This macro generates the assembly code for function entry.
189 FILE is a stdio stream to output the code to.
190 SIZE is an int: how many units of temporary storage to allocate.
191 Refer to the array 'regs_ever_live' to determine which registers
192 to save; 'regs_ever_live[I]' is nonzero if register number I
193 is ever used in the function. This macro is responsible for
194 knowing which registers should not be saved even if used. */
195
196#undef TARGET_ASM_FUNCTION_PROLOGUE
197#define TARGET_ASM_FUNCTION_PROLOGUE xtensa_function_prologue
198
199/* This macro generates the assembly code for function exit,
200 on machines that need it. If FUNCTION_EPILOGUE is not defined
201 then individual return instructions are generated for each
202 return statement. Args are same as for FUNCTION_PROLOGUE. */
203
204#undef TARGET_ASM_FUNCTION_EPILOGUE
205#define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
206
207/* These hooks specify assembly directives for creating certain kinds
208 of integer object. */
209
210#undef TARGET_ASM_ALIGNED_SI_OP
211#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
212
213struct gcc_target targetm = TARGET_INITIALIZER;
214
215static int b4const_or_zero PARAMS ((int));
216static enum internal_test map_test_to_internal_test PARAMS ((enum rtx_code));
217static rtx gen_int_relational PARAMS ((enum rtx_code, rtx, rtx, int *));
218static rtx gen_float_relational PARAMS ((enum rtx_code, rtx, rtx));
219static rtx gen_conditional_move PARAMS ((rtx));
220static rtx fixup_subreg_mem PARAMS ((rtx x));
221static enum machine_mode xtensa_find_mode_for_size PARAMS ((unsigned));
222static void xtensa_init_machine_status PARAMS ((struct function *p));
223static void xtensa_free_machine_status PARAMS ((struct function *p));
224static void printx PARAMS ((FILE *, signed int));
225static rtx frame_size_const;
226static int current_function_arg_words;
227static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
228 REG_ALLOC_ORDER;
229
230
231/*
232 * Functions to test Xtensa immediate operand validity.
233 */
234
235int
236xtensa_b4constu (v)
237 int v;
238{
239 switch (v)
240 {
241 case 32768:
242 case 65536:
243 case 2:
244 case 3:
245 case 4:
246 case 5:
247 case 6:
248 case 7:
249 case 8:
250 case 10:
251 case 12:
252 case 16:
253 case 32:
254 case 64:
255 case 128:
256 case 256:
257 return 1;
258 }
259 return 0;
260}
261
262int
263xtensa_simm8x256 (v)
264 int v;
265{
266 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
267}
268
269int
270xtensa_ai4const (v)
271 int v;
272{
273 return (v == -1 || (v >= 1 && v <= 15));
274}
275
276int
277xtensa_simm7 (v)
278 int v;
279{
280 return v >= -32 && v <= 95;
281}
282
283int
284xtensa_b4const (v)
285 int v;
286{
287 switch (v)
288 {
289 case -1:
290 case 1:
291 case 2:
292 case 3:
293 case 4:
294 case 5:
295 case 6:
296 case 7:
297 case 8:
298 case 10:
299 case 12:
300 case 16:
301 case 32:
302 case 64:
303 case 128:
304 case 256:
305 return 1;
306 }
307 return 0;
308}
309
310int
311xtensa_simm8 (v)
312 int v;
313{
314 return v >= -128 && v <= 127;
315}
316
317int
318xtensa_tp7 (v)
319 int v;
320{
321 return (v >= 7 && v <= 22);
322}
323
324int
325xtensa_lsi4x4 (v)
326 int v;
327{
328 return (v & 3) == 0 && (v >= 0 && v <= 60);
329}
330
331int
332xtensa_simm12b (v)
333 int v;
334{
335 return v >= -2048 && v <= 2047;
336}
337
338int
339xtensa_uimm8 (v)
340 int v;
341{
342 return v >= 0 && v <= 255;
343}
344
345int
346xtensa_uimm8x2 (v)
347 int v;
348{
349 return (v & 1) == 0 && (v >= 0 && v <= 510);
350}
351
352int
353xtensa_uimm8x4 (v)
354 int v;
355{
356 return (v & 3) == 0 && (v >= 0 && v <= 1020);
357}
358
359
360/* This is just like the standard true_regnum() function except that it
361 works even when reg_renumber is not initialized. */
362
363int
364xt_true_regnum (x)
365 rtx x;
366{
367 if (GET_CODE (x) == REG)
368 {
369 if (reg_renumber
370 && REGNO (x) >= FIRST_PSEUDO_REGISTER
371 && reg_renumber[REGNO (x)] >= 0)
372 return reg_renumber[REGNO (x)];
373 return REGNO (x);
374 }
375 if (GET_CODE (x) == SUBREG)
376 {
377 int base = xt_true_regnum (SUBREG_REG (x));
378 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
379 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
380 GET_MODE (SUBREG_REG (x)),
381 SUBREG_BYTE (x), GET_MODE (x));
382 }
383 return -1;
384}
385
386
387int
388add_operand (op, mode)
389 rtx op;
390 enum machine_mode mode;
391{
392 if (GET_CODE (op) == CONST_INT)
393 return (xtensa_simm8 (INTVAL (op)) ||
394 xtensa_simm8x256 (INTVAL (op)));
395
396 return register_operand (op, mode);
397}
398
399
400int
401arith_operand (op, mode)
402 rtx op;
403 enum machine_mode mode;
404{
405 if (GET_CODE (op) == CONST_INT)
406 return xtensa_simm8 (INTVAL (op));
407
408 return register_operand (op, mode);
409}
410
411
412int
413nonimmed_operand (op, mode)
414 rtx op;
415 enum machine_mode mode;
416{
417 /* We cannot use the standard nonimmediate_operand() predicate because
418 it includes constant pool memory operands. */
419
420 if (memory_operand (op, mode))
421 return !constantpool_address_p (XEXP (op, 0));
422
423 return register_operand (op, mode);
424}
425
426
427int
428mem_operand (op, mode)
429 rtx op;
430 enum machine_mode mode;
431{
432 /* We cannot use the standard memory_operand() predicate because
433 it includes constant pool memory operands. */
434
435 if (memory_operand (op, mode))
436 return !constantpool_address_p (XEXP (op, 0));
437
438 return FALSE;
439}
440
441
442int
443non_acc_reg_operand (op, mode)
444 rtx op;
445 enum machine_mode mode;
446{
447 if (register_operand (op, mode))
448 return !ACC_REG_P (xt_true_regnum (op));
449 return FALSE;
450}
451
452
453int
454mask_operand (op, mode)
455 rtx op;
456 enum machine_mode mode;
457{
458 if (GET_CODE (op) == CONST_INT)
459 return xtensa_mask_immediate (INTVAL (op));
460
461 return register_operand (op, mode);
462}
463
464
465int
466extui_fldsz_operand (op, mode)
467 rtx op;
468 enum machine_mode mode ATTRIBUTE_UNUSED;
469{
470 return ((GET_CODE (op) == CONST_INT)
471 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
472}
473
474
475int
476sext_operand (op, mode)
477 rtx op;
478 enum machine_mode mode;
479{
480 if (TARGET_SEXT)
481 return nonimmed_operand (op, mode);
482 return mem_operand (op, mode);
483}
484
485
486int
487sext_fldsz_operand (op, mode)
488 rtx op;
489 enum machine_mode mode ATTRIBUTE_UNUSED;
490{
491 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
492}
493
494
495int
496lsbitnum_operand (op, mode)
497 rtx op;
498 enum machine_mode mode ATTRIBUTE_UNUSED;
499{
500 if (GET_CODE (op) == CONST_INT)
501 {
502 return (BITS_BIG_ENDIAN
503 ? (INTVAL (op) == BITS_PER_WORD-1)
504 : (INTVAL (op) == 0));
505 }
506 return FALSE;
507}
508
509
510static int
511b4const_or_zero (v)
512 int v;
513{
514 if (v == 0)
515 return TRUE;
516 return xtensa_b4const (v);
517}
518
519
520int
521branch_operand (op, mode)
522 rtx op;
523 enum machine_mode mode;
524{
525 if (GET_CODE (op) == CONST_INT)
526 return b4const_or_zero (INTVAL (op));
527
528 return register_operand (op, mode);
529}
530
531
532int
533ubranch_operand (op, mode)
534 rtx op;
535 enum machine_mode mode;
536{
537 if (GET_CODE (op) == CONST_INT)
538 return xtensa_b4constu (INTVAL (op));
539
540 return register_operand (op, mode);
541}
542
543
544int
545call_insn_operand (op, mode)
546 rtx op;
547 enum machine_mode mode ATTRIBUTE_UNUSED;
548{
549 if ((GET_CODE (op) == REG)
550 && (op != arg_pointer_rtx)
551 && ((REGNO (op) < FRAME_POINTER_REGNUM)
552 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
553 return TRUE;
554
555 if (CONSTANT_ADDRESS_P (op))
556 {
557 /* Direct calls only allowed to static functions with PIC. */
558 return (!flag_pic || (GET_CODE (op) == SYMBOL_REF
559 && SYMBOL_REF_FLAG (op)));
560 }
561
562 return FALSE;
563}
564
565
566int
567move_operand (op, mode)
568 rtx op;
569 enum machine_mode mode;
570{
571 if (register_operand (op, mode))
572 return TRUE;
573
574 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
575 result in 0/1. */
576 if (GET_CODE (op) == CONSTANT_P_RTX)
577 return TRUE;
578
579 if (GET_CODE (op) == CONST_INT)
580 return xtensa_simm12b (INTVAL (op));
581
582 if (GET_CODE (op) == MEM)
583 return memory_address_p (mode, XEXP (op, 0));
584
585 return FALSE;
586}
587
588
589int
590smalloffset_mem_p (op)
591 rtx op;
592{
593 if (GET_CODE (op) == MEM)
594 {
595 rtx addr = XEXP (op, 0);
596 if (GET_CODE (addr) == REG)
597 return REG_OK_FOR_BASE_P (addr);
598 if (GET_CODE (addr) == PLUS)
599 {
600 rtx offset = XEXP (addr, 0);
601 if (GET_CODE (offset) != CONST_INT)
602 offset = XEXP (addr, 1);
603 if (GET_CODE (offset) != CONST_INT)
604 return FALSE;
605 return xtensa_lsi4x4 (INTVAL (offset));
606 }
607 }
608 return FALSE;
609}
610
611
612int
613smalloffset_double_mem_p (op)
614 rtx op;
615{
616 if (!smalloffset_mem_p (op))
617 return FALSE;
618 return smalloffset_mem_p (adjust_address (op, GET_MODE (op), 4));
619}
620
621
622int
623constantpool_address_p (addr)
624 rtx addr;
625{
626 rtx sym = addr;
627
628 if (GET_CODE (addr) == CONST)
629 {
630 rtx offset;
631
632 /* only handle (PLUS (SYM, OFFSET)) form */
633 addr = XEXP (addr, 0);
634 if (GET_CODE (addr) != PLUS)
635 return FALSE;
636
637 /* make sure the address is word aligned */
638 offset = XEXP (addr, 1);
639 if ((GET_CODE (offset) != CONST_INT)
640 || ((INTVAL (offset) & 3) != 0))
641 return FALSE;
642
643 sym = XEXP (addr, 0);
644 }
645
646 if ((GET_CODE (sym) == SYMBOL_REF)
647 && CONSTANT_POOL_ADDRESS_P (sym))
648 return TRUE;
649 return FALSE;
650}
651
652
653int
654constantpool_mem_p (op)
655 rtx op;
656{
657 if (GET_CODE (op) == MEM)
658 return constantpool_address_p (XEXP (op, 0));
659 return FALSE;
660}
661
662
663int
664non_const_move_operand (op, mode)
665 rtx op;
666 enum machine_mode mode;
667{
668 if (register_operand (op, mode))
669 return 1;
670 if (GET_CODE (op) == SUBREG)
671 op = SUBREG_REG (op);
672 if (GET_CODE (op) == MEM)
673 return memory_address_p (mode, XEXP (op, 0));
674 return FALSE;
675}
676
677
678/* Accept the floating point constant 1 in the appropriate mode. */
679
680int
681const_float_1_operand (op, mode)
682 rtx op;
683 enum machine_mode mode;
684{
685 REAL_VALUE_TYPE d;
686 static REAL_VALUE_TYPE onedf;
687 static REAL_VALUE_TYPE onesf;
688 static int one_initialized;
689
690 if ((GET_CODE (op) != CONST_DOUBLE)
691 || (mode != GET_MODE (op))
692 || (mode != DFmode && mode != SFmode))
693 return FALSE;
694
695 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
696
697 if (! one_initialized)
698 {
699 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
700 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
701 one_initialized = TRUE;
702 }
703
704 if (mode == DFmode)
705 return REAL_VALUES_EQUAL (d, onedf);
706 else
707 return REAL_VALUES_EQUAL (d, onesf);
708}
709
710
711int
712fpmem_offset_operand (op, mode)
713 rtx op;
714 enum machine_mode mode ATTRIBUTE_UNUSED;
715{
716 if (GET_CODE (op) == CONST_INT)
717 return xtensa_mem_offset (INTVAL (op), SFmode);
718 return 0;
719}
720
721
722void
723xtensa_extend_reg (dst, src)
724 rtx dst;
725 rtx src;
726{
727 rtx temp = gen_reg_rtx (SImode);
728 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
729
730 /* generate paradoxical subregs as needed so that the modes match */
731 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
732 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
733
734 emit_insn (gen_ashlsi3 (temp, src, shift));
735 emit_insn (gen_ashrsi3 (dst, temp, shift));
736}
737
738
739void
740xtensa_load_constant (dst, src)
741 rtx dst;
742 rtx src;
743{
744 enum machine_mode mode = GET_MODE (dst);
745 src = force_const_mem (SImode, src);
746
747 /* PC-relative loads are always SImode so we have to add a SUBREG if that
748 is not the desired mode */
749
750 if (mode != SImode)
751 {
752 if (register_operand (dst, mode))
753 dst = simplify_gen_subreg (SImode, dst, mode, 0);
754 else
755 {
756 src = force_reg (SImode, src);
757 src = gen_lowpart_SUBREG (mode, src);
758 }
759 }
760
761 emit_move_insn (dst, src);
762}
763
764
765int
766branch_operator (x, mode)
767 rtx x;
768 enum machine_mode mode;
769{
770 if (GET_MODE (x) != mode)
771 return FALSE;
772
773 switch (GET_CODE (x))
774 {
775 case EQ:
776 case NE:
777 case LT:
778 case GE:
779 return TRUE;
780 default:
781 break;
782 }
783 return FALSE;
784}
785
786
787int
788ubranch_operator (x, mode)
789 rtx x;
790 enum machine_mode mode;
791{
792 if (GET_MODE (x) != mode)
793 return FALSE;
794
795 switch (GET_CODE (x))
796 {
797 case LTU:
798 case GEU:
799 return TRUE;
800 default:
801 break;
802 }
803 return FALSE;
804}
805
806
807int
808boolean_operator (x, mode)
809 rtx x;
810 enum machine_mode mode;
811{
812 if (GET_MODE (x) != mode)
813 return FALSE;
814
815 switch (GET_CODE (x))
816 {
817 case EQ:
818 case NE:
819 return TRUE;
820 default:
821 break;
822 }
823 return FALSE;
824}
825
826
827int
828xtensa_mask_immediate (v)
829 int v;
830{
831#define MAX_MASK_SIZE 16
832 int mask_size;
833
834 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
835 {
836 if ((v & 1) == 0)
837 return FALSE;
838 v = v >> 1;
839 if (v == 0)
840 return TRUE;
841 }
842
843 return FALSE;
844}
845
846
847int
848xtensa_mem_offset (v, mode)
849 unsigned v;
850 enum machine_mode mode;
851{
852 switch (mode)
853 {
854 case BLKmode:
855 /* Handle the worst case for block moves. See xtensa_expand_block_move
856 where we emit an optimized block move operation if the block can be
857 moved in < "move_ratio" pieces. The worst case is when the block is
858 aligned but has a size of (3 mod 4) (does this happen?) so that the
859 last piece requires a byte load/store. */
860 return (xtensa_uimm8 (v) &&
861 xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
862
863 case QImode:
864 return xtensa_uimm8 (v);
865
866 case HImode:
867 return xtensa_uimm8x2 (v);
868
869 case DFmode:
870 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
871
872 default:
873 break;
874 }
875
876 return xtensa_uimm8x4 (v);
877}
878
879
880/* Make normal rtx_code into something we can index from an array */
881
882static enum internal_test
883map_test_to_internal_test (test_code)
884 enum rtx_code test_code;
885{
886 enum internal_test test = ITEST_MAX;
887
888 switch (test_code)
889 {
890 default: break;
891 case EQ: test = ITEST_EQ; break;
892 case NE: test = ITEST_NE; break;
893 case GT: test = ITEST_GT; break;
894 case GE: test = ITEST_GE; break;
895 case LT: test = ITEST_LT; break;
896 case LE: test = ITEST_LE; break;
897 case GTU: test = ITEST_GTU; break;
898 case GEU: test = ITEST_GEU; break;
899 case LTU: test = ITEST_LTU; break;
900 case LEU: test = ITEST_LEU; break;
901 }
902
903 return test;
904}
905
906
907/* Generate the code to compare two integer values. The return value is
908 the comparison expression. */
909
910static rtx
911gen_int_relational (test_code, cmp0, cmp1, p_invert)
912 enum rtx_code test_code; /* relational test (EQ, etc) */
913 rtx cmp0; /* first operand to compare */
914 rtx cmp1; /* second operand to compare */
915 int *p_invert; /* whether branch needs to reverse its test */
916{
917 struct cmp_info {
918 enum rtx_code test_code; /* test code to use in insn */
919 int (*const_range_p) PARAMS ((int)); /* predicate function to check range */
920 int const_add; /* constant to add (convert LE -> LT) */
921 int reverse_regs; /* reverse registers in test */
922 int invert_const; /* != 0 if invert value if cmp1 is constant */
923 int invert_reg; /* != 0 if invert value if cmp1 is register */
924 int unsignedp; /* != 0 for unsigned comparisons. */
925 };
926
927 static struct cmp_info info[ (int)ITEST_MAX ] = {
928
929 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
930 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
931
932 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
933 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
934 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
935 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
936
937 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
938 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
939 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
940 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
941 };
942
943 enum internal_test test;
944 enum machine_mode mode;
945 struct cmp_info *p_info;
946
947 test = map_test_to_internal_test (test_code);
948 if (test == ITEST_MAX)
949 abort ();
950
951 p_info = &info[ (int)test ];
952
953 mode = GET_MODE (cmp0);
954 if (mode == VOIDmode)
955 mode = GET_MODE (cmp1);
956
957 /* Make sure we can handle any constants given to us. */
958 if (GET_CODE (cmp1) == CONST_INT)
959 {
960 HOST_WIDE_INT value = INTVAL (cmp1);
961 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
962
963 /* if the immediate overflows or does not fit in the immediate field,
964 spill it to a register */
965
966 if ((p_info->unsignedp ?
967 (uvalue + p_info->const_add > uvalue) :
968 (value + p_info->const_add > value)) != (p_info->const_add > 0))
969 {
970 cmp1 = force_reg (mode, cmp1);
971 }
972 else if (!(p_info->const_range_p) (value + p_info->const_add))
973 {
974 cmp1 = force_reg (mode, cmp1);
975 }
976 }
977 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
978 {
979 cmp1 = force_reg (mode, cmp1);
980 }
981
982 /* See if we need to invert the result. */
983 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
984 ? p_info->invert_const
985 : p_info->invert_reg);
986
987 /* Comparison to constants, may involve adding 1 to change a LT into LE.
988 Comparison between two registers, may involve switching operands. */
989 if (GET_CODE (cmp1) == CONST_INT)
990 {
991 if (p_info->const_add != 0)
992 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
993
994 }
995 else if (p_info->reverse_regs)
996 {
997 rtx temp = cmp0;
998 cmp0 = cmp1;
999 cmp1 = temp;
1000 }
1001
1002 return gen_rtx (p_info->test_code, VOIDmode, cmp0, cmp1);
1003}
1004
1005
1006/* Generate the code to compare two float values. The return value is
1007 the comparison expression. */
1008
1009static rtx
1010gen_float_relational (test_code, cmp0, cmp1)
1011 enum rtx_code test_code; /* relational test (EQ, etc) */
1012 rtx cmp0; /* first operand to compare */
1013 rtx cmp1; /* second operand to compare */
1014{
1015 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx));
1016 rtx brtmp;
1017 int reverse_regs, invert;
1018
1019 switch (test_code)
1020 {
1021 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1022 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1023 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1024 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1025 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1026 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1027 default:
1028 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1029 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1030 }
1031
1032 if (reverse_regs)
1033 {
1034 rtx temp = cmp0;
1035 cmp0 = cmp1;
1036 cmp1 = temp;
1037 }
1038
1039 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1040 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1041
1042 return gen_rtx (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1043}
1044
1045
1046void
1047xtensa_expand_conditional_branch (operands, test_code)
1048 rtx *operands;
1049 enum rtx_code test_code;
1050{
1051 enum cmp_type type = branch_type;
1052 rtx cmp0 = branch_cmp[0];
1053 rtx cmp1 = branch_cmp[1];
1054 rtx cmp;
1055 int invert;
1056 rtx label1, label2;
1057
1058 switch (type)
1059 {
1060 case CMP_DF:
1061 default:
1062 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1063
1064 case CMP_SI:
1065 invert = FALSE;
1066 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1067 break;
1068
1069 case CMP_SF:
1070 if (!TARGET_HARD_FLOAT)
1071 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1072 invert = FALSE;
1073 cmp = gen_float_relational (test_code, cmp0, cmp1);
1074 break;
1075 }
1076
1077 /* Generate the branch. */
1078
1079 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1080 label2 = pc_rtx;
1081
1082 if (invert)
1083 {
1084 label2 = label1;
1085 label1 = pc_rtx;
1086 }
1087
1088 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1089 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1090 label1,
1091 label2)));
1092}
1093
1094
1095static rtx
1096gen_conditional_move (cmp)
1097 rtx cmp;
1098{
1099 enum rtx_code code = GET_CODE (cmp);
1100 rtx op0 = branch_cmp[0];
1101 rtx op1 = branch_cmp[1];
1102
1103 if (branch_type == CMP_SI)
1104 {
1105 /* Jump optimization calls get_condition() which canonicalizes
1106 comparisons like (GE x <const>) to (GT x <const-1>).
1107 Transform those comparisons back to GE, since that is the
1108 comparison supported in Xtensa. We shouldn't have to
1109 transform <LE x const> comparisons, because neither
1110 xtensa_expand_conditional_branch() nor get_condition() will
1111 produce them. */
1112
1113 if ((code == GT) && (op1 == constm1_rtx))
1114 {
1115 code = GE;
1116 op1 = const0_rtx;
1117 }
1118 cmp = gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
1119
1120 if (boolean_operator (cmp, VOIDmode))
1121 {
1122 /* swap the operands to make const0 second */
1123 if (op0 == const0_rtx)
1124 {
1125 op0 = op1;
1126 op1 = const0_rtx;
1127 }
1128
1129 /* if not comparing against zero, emit a comparison (subtract) */
1130 if (op1 != const0_rtx)
1131 {
1132 op0 = expand_binop (SImode, sub_optab, op0, op1,
1133 0, 0, OPTAB_LIB_WIDEN);
1134 op1 = const0_rtx;
1135 }
1136 }
1137 else if (branch_operator (cmp, VOIDmode))
1138 {
1139 /* swap the operands to make const0 second */
1140 if (op0 == const0_rtx)
1141 {
1142 op0 = op1;
1143 op1 = const0_rtx;
1144
1145 switch (code)
1146 {
1147 case LT: code = GE; break;
1148 case GE: code = LT; break;
1149 default: abort ();
1150 }
1151 }
1152
1153 if (op1 != const0_rtx)
1154 return 0;
1155 }
1156 else
1157 return 0;
1158
1159 return gen_rtx (code, VOIDmode, op0, op1);
1160 }
1161
1162 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1163 return gen_float_relational (code, op0, op1);
1164
1165 return 0;
1166}
1167
1168
1169int
1170xtensa_expand_conditional_move (operands, isflt)
1171 rtx *operands;
1172 int isflt;
1173{
1174 rtx cmp;
1175 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1176
1177 if (!(cmp = gen_conditional_move (operands[1])))
1178 return 0;
1179
1180 if (isflt)
1181 gen_fn = (branch_type == CMP_SI
1182 ? gen_movsfcc_internal0
1183 : gen_movsfcc_internal1);
1184 else
1185 gen_fn = (branch_type == CMP_SI
1186 ? gen_movsicc_internal0
1187 : gen_movsicc_internal1);
1188
1189 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1190 operands[2], operands[3], cmp));
1191 return 1;
1192}
1193
1194
1195int
1196xtensa_expand_scc (operands)
1197 rtx *operands;
1198{
1199 rtx dest = operands[0];
1200 rtx cmp = operands[1];
1201 rtx one_tmp, zero_tmp;
1202 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1203
1204 if (!(cmp = gen_conditional_move (cmp)))
1205 return 0;
1206
1207 one_tmp = gen_reg_rtx (SImode);
1208 zero_tmp = gen_reg_rtx (SImode);
1209 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1210 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1211
1212 gen_fn = (branch_type == CMP_SI
1213 ? gen_movsicc_internal0
1214 : gen_movsicc_internal1);
1215 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1216 return 1;
1217}
1218
1219
1220/* Emit insns to move operands[1] into operands[0].
1221
1222 Return 1 if we have written out everything that needs to be done to
1223 do the move. Otherwise, return 0 and the caller will emit the move
1224 normally. */
1225
1226int
1227xtensa_emit_move_sequence (operands, mode)
1228 rtx *operands;
1229 enum machine_mode mode;
1230{
1231 if (CONSTANT_P (operands[1])
1232 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1233 && (GET_CODE (operands[1]) != CONST_INT
1234 || !xtensa_simm12b (INTVAL (operands[1]))))
1235 {
1236 xtensa_load_constant (operands[0], operands[1]);
1237 return 1;
1238 }
1239
1240 if (!(reload_in_progress | reload_completed))
1241 {
1242 if (!non_acc_reg_operand (operands[0], mode)
1243 && !non_acc_reg_operand (operands[1], mode))
1244 operands[1] = force_reg (mode, operands[1]);
1245
1246 /* Check if this move is copying an incoming argument in a7. If
1247 so, emit the move, followed by the special "set_frame_ptr"
1248 unspec_volatile insn, at the very beginning of the function.
1249 This is necessary because the register allocator will ignore
1250 conflicts with a7 and may assign some other pseudo to a7. If
1251 that pseudo was assigned prior to this move, it would clobber
1252 the incoming argument in a7. By copying the argument out of
1253 a7 as the very first thing, and then immediately following
1254 that with an unspec_volatile to keep the scheduler away, we
1255 should avoid any problems. */
1256
1257 if (a7_overlap_mentioned_p (operands[1]))
1258 {
1259 rtx mov;
1260 switch (mode)
1261 {
1262 case SImode:
1263 mov = gen_movsi_internal (operands[0], operands[1]);
1264 break;
1265 case HImode:
1266 mov = gen_movhi_internal (operands[0], operands[1]);
1267 break;
1268 case QImode:
1269 mov = gen_movqi_internal (operands[0], operands[1]);
1270 break;
1271 default:
1272 abort ();
1273 }
1274
1275 /* Insert the instructions before any other argument copies.
1276 (The set_frame_ptr insn comes _after_ the move, so push it
1277 out first.) */
1278 push_topmost_sequence ();
1279 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1280 emit_insn_after (mov, get_insns ());
1281 pop_topmost_sequence ();
1282
1283 return 1;
1284 }
1285 }
1286
1287 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1288 instruction won't be recognized after reload. So we remove the
1289 subreg and adjust mem accordingly. */
1290 if (reload_in_progress)
1291 {
1292 operands[0] = fixup_subreg_mem (operands[0]);
1293 operands[1] = fixup_subreg_mem (operands[1]);
1294 }
1295 return 0;
1296}
1297
1298static rtx
1299fixup_subreg_mem (x)
1300 rtx x;
1301{
1302 if (GET_CODE (x) == SUBREG
1303 && GET_CODE (SUBREG_REG (x)) == REG
1304 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1305 {
1306 rtx temp =
1307 gen_rtx_SUBREG (GET_MODE (x),
1308 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1309 SUBREG_BYTE (x));
1310 x = alter_subreg (&temp);
1311 }
1312 return x;
1313}
1314
1315
1316/* Try to expand a block move operation to an RTL block move instruction.
1317 If not optimizing or if the block size is not a constant or if the
1318 block is small, the expansion fails and GCC falls back to calling
1319 memcpy().
1320
1321 operands[0] is the destination
1322 operands[1] is the source
1323 operands[2] is the length
1324 operands[3] is the alignment */
1325
1326int
1327xtensa_expand_block_move (operands)
1328 rtx *operands;
1329{
1330 rtx dest = operands[0];
1331 rtx src = operands[1];
1332 int bytes = INTVAL (operands[2]);
1333 int align = XINT (operands[3], 0);
1334 int num_pieces, move_ratio;
1335
1336 /* If this is not a fixed size move, just call memcpy */
1337 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1338 return 0;
1339
1340 /* Anything to move? */
1341 if (bytes <= 0)
1342 return 1;
1343
1344 if (align > MOVE_MAX)
1345 align = MOVE_MAX;
1346
1347 /* decide whether to expand inline based on the optimization level */
1348 move_ratio = 4;
1349 if (optimize > 2)
1350 move_ratio = LARGEST_MOVE_RATIO;
1351 num_pieces = (bytes / align) + (bytes % align); /* close enough anyway */
1352 if (num_pieces >= move_ratio)
1353 return 0;
1354
1355 /* make sure the memory addresses are valid */
1356 operands[0] = change_address (dest, VOIDmode, NULL);
1357 operands[1] = change_address (src, VOIDmode, NULL);
1358
1359 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1360 operands[2], operands[3]));
1361 return 1;
1362}
1363
1364
1365/* Emit a sequence of instructions to implement a block move, trying
1366 to hide load delay slots as much as possible. Load N values into
1367 temporary registers, store those N values, and repeat until the
1368 complete block has been moved. N=delay_slots+1 */
1369
1370struct meminsnbuf {
1371 char template[30];
1372 rtx operands[2];
1373};
1374
1375void
1376xtensa_emit_block_move (operands, tmpregs, delay_slots)
1377 rtx *operands;
1378 rtx *tmpregs;
1379 int delay_slots;
1380{
1381 rtx dest = operands[0];
1382 rtx src = operands[1];
1383 int bytes = INTVAL (operands[2]);
1384 int align = XINT (operands[3], 0);
1385 rtx from_addr = XEXP (src, 0);
1386 rtx to_addr = XEXP (dest, 0);
1387 int from_struct = MEM_IN_STRUCT_P (src);
1388 int to_struct = MEM_IN_STRUCT_P (dest);
1389 int offset = 0;
1390 int chunk_size, item_size;
1391 struct meminsnbuf *ldinsns, *stinsns;
1392 const char *ldname, *stname;
1393 enum machine_mode mode;
1394
1395 if (align > MOVE_MAX)
1396 align = MOVE_MAX;
1397 item_size = align;
1398 chunk_size = delay_slots + 1;
1399
1400 ldinsns = (struct meminsnbuf *)
1401 alloca (chunk_size * sizeof (struct meminsnbuf));
1402 stinsns = (struct meminsnbuf *)
1403 alloca (chunk_size * sizeof (struct meminsnbuf));
1404
1405 mode = xtensa_find_mode_for_size (item_size);
1406 item_size = GET_MODE_SIZE (mode);
1407 ldname = xtensa_ld_opcodes[(int) mode];
1408 stname = xtensa_st_opcodes[(int) mode];
1409
1410 while (bytes > 0)
1411 {
1412 int n;
1413
1414 for (n = 0; n < chunk_size; n++)
1415 {
1416 rtx addr, mem;
1417
1418 if (bytes == 0)
1419 {
1420 chunk_size = n;
1421 break;
1422 }
1423
1424 if (bytes < item_size)
1425 {
1426 /* find a smaller item_size which we can load & store */
1427 item_size = bytes;
1428 mode = xtensa_find_mode_for_size (item_size);
1429 item_size = GET_MODE_SIZE (mode);
1430 ldname = xtensa_ld_opcodes[(int) mode];
1431 stname = xtensa_st_opcodes[(int) mode];
1432 }
1433
1434 /* record the load instruction opcode and operands */
1435 addr = plus_constant (from_addr, offset);
1436 mem = gen_rtx_MEM (mode, addr);
1437 if (! memory_address_p (mode, addr))
1438 abort ();
1439 MEM_IN_STRUCT_P (mem) = from_struct;
1440 ldinsns[n].operands[0] = tmpregs[n];
1441 ldinsns[n].operands[1] = mem;
1442 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1443
1444 /* record the store instruction opcode and operands */
1445 addr = plus_constant (to_addr, offset);
1446 mem = gen_rtx_MEM (mode, addr);
1447 if (! memory_address_p (mode, addr))
1448 abort ();
1449 MEM_IN_STRUCT_P (mem) = to_struct;
1450 stinsns[n].operands[0] = tmpregs[n];
1451 stinsns[n].operands[1] = mem;
1452 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1453
1454 offset += item_size;
1455 bytes -= item_size;
1456 }
1457
1458 /* now output the loads followed by the stores */
1459 for (n = 0; n < chunk_size; n++)
1460 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1461 for (n = 0; n < chunk_size; n++)
1462 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1463 }
1464}
1465
1466
1467static enum machine_mode
1468xtensa_find_mode_for_size (item_size)
1469 unsigned item_size;
1470{
1471 enum machine_mode mode, tmode;
1472
1473 while (1)
1474 {
1475 mode = VOIDmode;
1476
1477 /* find mode closest to but not bigger than item_size */
1478 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1479 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1480 if (GET_MODE_SIZE (tmode) <= item_size)
1481 mode = tmode;
1482 if (mode == VOIDmode)
1483 abort ();
1484
1485 item_size = GET_MODE_SIZE (mode);
1486
1487 if (xtensa_ld_opcodes[(int) mode]
1488 && xtensa_st_opcodes[(int) mode])
1489 break;
1490
1491 /* cannot load & store this mode; try something smaller */
1492 item_size -= 1;
1493 }
1494
1495 return mode;
1496}
1497
1498
1499void
1500xtensa_expand_nonlocal_goto (operands)
1501 rtx *operands;
1502{
1503 rtx goto_handler = operands[1];
1504 rtx containing_fp = operands[3];
1505
1506 /* generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1507 is too big to generate in-line */
1508
1509 if (GET_CODE (containing_fp) != REG)
1510 containing_fp = force_reg (Pmode, containing_fp);
1511
1512 goto_handler = replace_rtx (copy_rtx (goto_handler),
1513 virtual_stack_vars_rtx,
1514 containing_fp);
1515
1516 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1517 0, VOIDmode, 2,
1518 containing_fp, Pmode,
1519 goto_handler, Pmode);
1520}
1521
1522
1523static void
1524xtensa_init_machine_status (p)
1525 struct function *p;
1526{
1527 p->machine = (struct machine_function *)
1528 xcalloc (1, sizeof (struct machine_function));
1529}
1530
1531
1532static void
1533xtensa_free_machine_status (p)
1534 struct function *p;
1535{
1536 free (p->machine);
1537 p->machine = NULL;
1538}
1539
1540
1541void
1542xtensa_setup_frame_addresses ()
1543{
1544 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1545 cfun->machine->accesses_prev_frame = 1;
1546
1547 emit_library_call
1548 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1549 0, VOIDmode, 0);
1550}
1551
1552
1553/* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1554 a comment showing where the end of the loop is. However, if there is a
1555 label or a branch at the end of the loop then we need to place a nop
1556 there. If the loop ends with a label we need the nop so that branches
1557 targetting that label will target the nop (and thus remain in the loop),
1558 instead of targetting the instruction after the loop (and thus exiting
1559 the loop). If the loop ends with a branch, we need the nop in case the
1560 branch is targetting a location inside the loop. When the branch
1561 executes it will cause the loop count to be decremented even if it is
1562 taken (because it is the last instruction in the loop), so we need to
1563 nop after the branch to prevent the loop count from being decremented
1564 when the branch is taken. */
1565
1566void
1567xtensa_emit_loop_end (insn, operands)
1568 rtx insn;
1569 rtx *operands;
1570{
1571 char done = 0;
1572
1573 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1574 {
1575 switch (GET_CODE (insn))
1576 {
1577 case NOTE:
1578 case BARRIER:
1579 break;
1580
1581 case CODE_LABEL:
1582 output_asm_insn ("nop.n", operands);
1583 done = 1;
1584 break;
1585
1586 default:
1587 {
1588 rtx body = PATTERN (insn);
1589
1590 if (GET_CODE (body) == JUMP_INSN)
1591 {
1592 output_asm_insn ("nop.n", operands);
1593 done = 1;
1594 }
1595 else if ((GET_CODE (body) != USE)
1596 && (GET_CODE (body) != CLOBBER))
1597 done = 1;
1598 }
1599 break;
1600 }
1601 }
1602
1603 output_asm_insn ("# loop end for %0", operands);
1604}
1605
1606
1607char *
1608xtensa_emit_call (callop, operands)
1609 int callop;
1610 rtx *operands;
1611{
1612 char *result = (char *) malloc (64);
1613 rtx tgt = operands[callop];
1614
1615 if (GET_CODE (tgt) == CONST_INT)
1616 sprintf (result, "call8\t0x%x", INTVAL (tgt));
1617 else if (register_operand (tgt, VOIDmode))
1618 sprintf (result, "callx8\t%%%d", callop);
1619 else
1620 sprintf (result, "call8\t%%%d", callop);
1621
1622 return result;
1623}
1624
1625
1626/* Return the stabs register number to use for 'regno'. */
1627
1628int
1629xtensa_dbx_register_number (regno)
1630 int regno;
1631{
1632 int first = -1;
1633
1634 if (GP_REG_P (regno)) {
1635 regno -= GP_REG_FIRST;
1636 first = 0;
1637 }
1638 else if (BR_REG_P (regno)) {
1639 regno -= BR_REG_FIRST;
1640 first = 16;
1641 }
1642 else if (FP_REG_P (regno)) {
1643 regno -= FP_REG_FIRST;
1644 /* The current numbering convention is that TIE registers are
1645 numbered in libcc order beginning with 256. We can't guarantee
1646 that the FP registers will come first, so the following is just
1647 a guess. It seems like we should make a special case for FP
1648 registers and give them fixed numbers < 256. */
1649 first = 256;
1650 }
1651 else if (ACC_REG_P (regno))
1652 {
1653 first = 0;
1654 regno = -1;
1655 }
1656
1657 /* When optimizing, we sometimes get asked about pseudo-registers
1658 that don't represent hard registers. Return 0 for these. */
1659 if (first == -1)
1660 return 0;
1661
1662 return first + regno;
1663}
1664
1665
1666/* Argument support functions. */
1667
1668/* Initialize CUMULATIVE_ARGS for a function. */
1669
1670void
1671init_cumulative_args (cum, fntype, libname)
1672 CUMULATIVE_ARGS *cum; /* argument info to initialize */
1673 tree fntype ATTRIBUTE_UNUSED; /* tree ptr for function decl */
1674 rtx libname ATTRIBUTE_UNUSED; /* SYMBOL_REF of library name or 0 */
1675{
1676 cum->arg_words = 0;
1677}
1678
1679/* Advance the argument to the next argument position. */
1680
1681void
1682function_arg_advance (cum, mode, type)
1683 CUMULATIVE_ARGS *cum; /* current arg information */
1684 enum machine_mode mode; /* current arg mode */
1685 tree type; /* type of the argument or 0 if lib support */
1686{
1687 int words, max;
1688 int *arg_words;
1689
1690 arg_words = &cum->arg_words;
1691 max = MAX_ARGS_IN_REGISTERS;
1692
1693 words = (((mode != BLKmode)
1694 ? (int) GET_MODE_SIZE (mode)
1695 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1696
1697 if ((*arg_words + words > max) && (*arg_words < max))
1698 *arg_words = max;
1699
1700 *arg_words += words;
1701}
1702
1703
1704/* Return an RTL expression containing the register for the given mode,
1705 or 0 if the argument is to be passed on the stack. */
1706
1707rtx
1708function_arg (cum, mode, type, incoming_p)
1709 CUMULATIVE_ARGS *cum; /* current arg information */
1710 enum machine_mode mode; /* current arg mode */
1711 tree type; /* type of the argument or 0 if lib support */
1712 int incoming_p; /* computing the incoming registers? */
1713{
1714 int regbase, words, max;
1715 int *arg_words;
1716 int regno;
1717 enum machine_mode result_mode;
1718
1719 arg_words = &cum->arg_words;
1720 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1721 max = MAX_ARGS_IN_REGISTERS;
1722
1723 words = (((mode != BLKmode)
1724 ? (int) GET_MODE_SIZE (mode)
1725 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1726
1727 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1728 *arg_words += (*arg_words & 1);
1729
1730 if (*arg_words + words > max)
1731 return (rtx)0;
1732
1733 regno = regbase + *arg_words;
1734 result_mode = (mode == BLKmode ? TYPE_MODE (type) : mode);
1735
1736 /* We need to make sure that references to a7 are represented with
1737 rtx that is not equal to hard_frame_pointer_rtx. For BLKmode and
1738 modes bigger than 2 words (because we only have patterns for
1739 modes of 2 words or smaller), we can't control the expansion
1740 unless we explicitly list the individual registers in a PARALLEL. */
1741
1742 if ((mode == BLKmode || words > 2)
1743 && regno < A7_REG
1744 && regno + words > A7_REG)
1745 {
1746 rtx result;
1747 int n;
1748
1749 result = gen_rtx_PARALLEL (result_mode, rtvec_alloc (words));
1750 for (n = 0; n < words; n++)
1751 {
1752 XVECEXP (result, 0, n) =
1753 gen_rtx_EXPR_LIST (VOIDmode,
1754 gen_raw_REG (SImode, regno + n),
1755 GEN_INT (n * UNITS_PER_WORD));
1756 }
1757 return result;
1758 }
1759
1760 return gen_raw_REG (result_mode, regno);
1761}
1762
1763
1764void
1765override_options ()
1766{
1767 int regno;
1768 enum machine_mode mode;
1769
1770 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1771 error ("boolean registers required for the floating-point option");
1772
1773 /* set up the tables of ld/st opcode names for block moves */
1774 xtensa_ld_opcodes[(int) SImode] = "l32i";
1775 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1776 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1777 xtensa_st_opcodes[(int) SImode] = "s32i";
1778 xtensa_st_opcodes[(int) HImode] = "s16i";
1779 xtensa_st_opcodes[(int) QImode] = "s8i";
1780
1781 xtensa_char_to_class['q'] = SP_REG;
1782 xtensa_char_to_class['a'] = GR_REGS;
1783 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1784 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1785 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1786 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1787 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1788 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1789 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1790
1791 /* Set up array giving whether a given register can hold a given mode. */
1792 for (mode = VOIDmode;
1793 mode != MAX_MACHINE_MODE;
1794 mode = (enum machine_mode) ((int) mode + 1))
1795 {
1796 int size = GET_MODE_SIZE (mode);
1797 enum mode_class class = GET_MODE_CLASS (mode);
1798
1799 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1800 {
1801 int temp;
1802
1803 if (ACC_REG_P (regno))
1804 temp = (TARGET_MAC16 &&
1805 (class == MODE_INT) && (size <= UNITS_PER_WORD));
1806 else if (GP_REG_P (regno))
1807 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1808 else if (FP_REG_P (regno))
1809 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1810 else if (BR_REG_P (regno))
1811 temp = (TARGET_BOOLEANS && (mode == CCmode));
1812 else
1813 temp = FALSE;
1814
1815 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1816 }
1817 }
1818
1819 init_machine_status = xtensa_init_machine_status;
1820 free_machine_status = xtensa_free_machine_status;
1821
1822 /* Check PIC settings. There's no need for -fPIC on Xtensa and
1823 some targets need to always use PIC. */
1824 if (XTENSA_ALWAYS_PIC)
1825 {
1826 if (flag_pic)
1827 warning ("-f%s ignored (all code is position independent)",
1828 (flag_pic > 1 ? "PIC" : "pic"));
1829 flag_pic = 1;
1830 }
1831 if (flag_pic > 1)
1832 flag_pic = 1;
1833}
1834
1835
1836/* A C compound statement to output to stdio stream STREAM the
1837 assembler syntax for an instruction operand X. X is an RTL
1838 expression.
1839
1840 CODE is a value that can be used to specify one of several ways
1841 of printing the operand. It is used when identical operands
1842 must be printed differently depending on the context. CODE
1843 comes from the '%' specification that was used to request
1844 printing of the operand. If the specification was just '%DIGIT'
1845 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1846 is the ASCII code for LTR.
1847
1848 If X is a register, this macro should print the register's name.
1849 The names can be found in an array 'reg_names' whose type is
1850 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1851
1852 When the machine description has a specification '%PUNCT' (a '%'
1853 followed by a punctuation character), this macro is called with
1854 a null pointer for X and the punctuation character for CODE.
1855
1856 'a', 'c', 'l', and 'n' are reserved.
1857
1858 The Xtensa specific codes are:
1859
1860 'd' CONST_INT, print as signed decimal
1861 'x' CONST_INT, print as signed hexadecimal
1862 'K' CONST_INT, print number of bits in mask for EXTUI
1863 'R' CONST_INT, print (X & 0x1f)
1864 'L' CONST_INT, print ((32 - X) & 0x1f)
1865 'D' REG, print second register of double-word register operand
1866 'N' MEM, print address of next word following a memory operand
1867 'v' MEM, if memory reference is volatile, output a MEMW before it
1868*/
1869
1870static void
1871printx (file, val)
1872 FILE *file;
1873 signed int val;
1874{
1875 /* print a hexadecimal value in a nice way */
1876 if ((val > -0xa) && (val < 0xa))
1877 fprintf (file, "%d", val);
1878 else if (val < 0)
1879 fprintf (file, "-0x%x", -val);
1880 else
1881 fprintf (file, "0x%x", val);
1882}
1883
1884
1885void
1886print_operand (file, op, letter)
1887 FILE *file; /* file to write to */
1888 rtx op; /* operand to print */
1889 int letter; /* %<letter> or 0 */
1890{
1891 enum rtx_code code;
1892
1893 if (! op)
1894 error ("PRINT_OPERAND null pointer");
1895
1896 code = GET_CODE (op);
1897 switch (code)
1898 {
1899 case REG:
1900 case SUBREG:
1901 {
1902 int regnum = xt_true_regnum (op);
1903 if (letter == 'D')
1904 regnum++;
1905 fprintf (file, "%s", reg_names[regnum]);
1906 break;
1907 }
1908
1909 case MEM:
1910 /*
1911 * For a volatile memory reference, emit a MEMW before the
1912 * load or store.
1913 */
1914 if (letter == 'v')
1915 {
1916 if (MEM_VOLATILE_P (op) && TARGET_SERIALIZE_VOLATILE)
1917 fprintf (file, "memw\n\t");
1918 break;
1919 }
1920 else if (letter == 'N')
1921 op = adjust_address (op, GET_MODE (op), 4);
1922
1923 output_address (XEXP (op, 0));
1924 break;
1925
1926 case CONST_INT:
1927 switch (letter)
1928 {
1929 case 'K':
1930 {
1931 int num_bits = 0;
1932 unsigned val = INTVAL (op);
1933 while (val & 1)
1934 {
1935 num_bits += 1;
1936 val = val >> 1;
1937 }
1938 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1939 fatal_insn ("invalid mask", op);
1940
1941 fprintf (file, "%d", num_bits);
1942 break;
1943 }
1944
1945 case 'L':
1946 fprintf (file, "%d", (32 - INTVAL (op)) & 0x1f);
1947 break;
1948
1949 case 'R':
1950 fprintf (file, "%d", INTVAL (op) & 0x1f);
1951 break;
1952
1953 case 'x':
1954 printx (file, INTVAL (op));
1955 break;
1956
1957 case 'd':
1958 default:
1959 fprintf (file, "%d", INTVAL (op));
1960 break;
1961
1962 }
1963 break;
1964
1965 default:
1966 output_addr_const (file, op);
1967 }
1968}
1969
1970
1971/* A C compound statement to output to stdio stream STREAM the
1972 assembler syntax for an instruction operand that is a memory
1973 reference whose address is ADDR. ADDR is an RTL expression.
1974
1975 On some machines, the syntax for a symbolic address depends on
1976 the section that the address refers to. On these machines,
1977 define the macro 'ENCODE_SECTION_INFO' to store the information
1978 into the 'symbol_ref', and then check for it here. */
1979
1980void
1981print_operand_address (file, addr)
1982 FILE *file;
1983 rtx addr;
1984{
1985 if (!addr)
1986 error ("PRINT_OPERAND_ADDRESS, null pointer");
1987
1988 switch (GET_CODE (addr))
1989 {
1990 default:
1991 fatal_insn ("invalid address", addr);
1992 break;
1993
1994 case REG:
1995 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
1996 break;
1997
1998 case PLUS:
1999 {
2000 rtx reg = (rtx)0;
2001 rtx offset = (rtx)0;
2002 rtx arg0 = XEXP (addr, 0);
2003 rtx arg1 = XEXP (addr, 1);
2004
2005 if (GET_CODE (arg0) == REG)
2006 {
2007 reg = arg0;
2008 offset = arg1;
2009 }
2010 else if (GET_CODE (arg1) == REG)
2011 {
2012 reg = arg1;
2013 offset = arg0;
2014 }
2015 else
2016 fatal_insn ("no register in address", addr);
2017
2018 if (CONSTANT_P (offset))
2019 {
2020 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2021 output_addr_const (file, offset);
2022 }
2023 else
2024 fatal_insn ("address offset not a constant", addr);
2025 }
2026 break;
2027
2028 case LABEL_REF:
2029 case SYMBOL_REF:
2030 case CONST_INT:
2031 case CONST:
2032 output_addr_const (file, addr);
2033 break;
2034 }
2035}
2036
2037
2038/* Emit either a label, .comm, or .lcomm directive. */
2039
2040void
2041xtensa_declare_object (file, name, init_string, final_string, size)
2042 FILE *file;
2043 char *name;
2044 char *init_string;
2045 char *final_string;
2046 int size;
2047{
2048 fputs (init_string, file); /* "", "\t.comm\t", or "\t.lcomm\t" */
2049 assemble_name (file, name);
2050 fprintf (file, final_string, size); /* ":\n", ",%u\n", ",%u\n" */
2051}
2052
2053
2054void
2055xtensa_output_literal (file, x, mode, labelno)
2056 FILE *file;
2057 rtx x;
2058 enum machine_mode mode;
2059 int labelno;
2060{
2061 long value_long[2];
2062 union real_extract u;
2063 int size;
2064
2065 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2066
2067 switch (GET_MODE_CLASS (mode))
2068 {
2069 case MODE_FLOAT:
2070 if (GET_CODE (x) != CONST_DOUBLE)
2071 abort ();
2072
2073 memcpy ((char *) &u, (char *) &CONST_DOUBLE_LOW (x), sizeof u);
2074 switch (mode)
2075 {
2076 case SFmode:
2077 REAL_VALUE_TO_TARGET_SINGLE (u.d, value_long[0]);
2078 fprintf (file, "0x%08lx\t\t# %.12g (float)\n", value_long[0], u.d);
2079 break;
2080
2081 case DFmode:
2082 REAL_VALUE_TO_TARGET_DOUBLE (u.d, value_long);
2083 fprintf (file, "0x%08lx, 0x%08lx # %.20g (double)\n",
2084 value_long[0], value_long[1], u.d);
2085 break;
2086
2087 default:
2088 abort ();
2089 }
2090
2091 break;
2092
2093 case MODE_INT:
2094 case MODE_PARTIAL_INT:
2095 size = GET_MODE_SIZE (mode);
2096 if (size == 4)
2097 {
2098 output_addr_const (file, x);
2099 fputs ("\n", file);
2100 }
2101 else if (size == 8)
2102 {
2103 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2104 fputs (", ", file);
2105 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2106 fputs ("\n", file);
2107 }
2108 else
2109 abort ();
2110 break;
2111
2112 default:
2113 abort ();
2114 }
2115}
2116
2117
2118/* Return the bytes needed to compute the frame pointer from the current
2119 stack pointer. */
2120
2121#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2122#define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2123
2124long
2125compute_frame_size (size)
2126 int size; /* # of var. bytes allocated */
2127{
2128 /* add space for the incoming static chain value */
2129 if (current_function_needs_context)
2130 size += (1 * UNITS_PER_WORD);
2131
2132 xtensa_current_frame_size =
2133 XTENSA_STACK_ALIGN (size
2134 + current_function_outgoing_args_size
2135 + (WINDOW_SIZE * UNITS_PER_WORD));
2136 return xtensa_current_frame_size;
2137}
2138
2139
2140int
2141xtensa_frame_pointer_required ()
2142{
2143 /* The code to expand builtin_frame_addr and builtin_return_addr
2144 currently uses the hard_frame_pointer instead of frame_pointer.
2145 This seems wrong but maybe it's necessary for other architectures.
2146 This function is derived from the i386 code. */
2147
2148 if (cfun->machine->accesses_prev_frame)
2149 return 1;
2150
2151 return 0;
2152}
2153
2154
2155void
2156xtensa_reorg (first)
2157 rtx first;
2158{
2159 rtx insn, set_frame_ptr_insn = 0;
2160
2161 unsigned long tsize = compute_frame_size (get_frame_size ());
2162 if (tsize < (1 << (12+3)))
2163 frame_size_const = 0;
2164 else
2165 {
2166 frame_size_const = force_const_mem (SImode, GEN_INT (tsize - 16));;
2167
2168 /* make sure the constant is used so it doesn't get eliminated
2169 from the constant pool */
2170 emit_insn_before (gen_rtx_USE (SImode, frame_size_const), first);
2171 }
2172
2173 if (!frame_pointer_needed)
2174 return;
2175
2176 /* Search all instructions, looking for the insn that sets up the
2177 frame pointer. This search will fail if the function does not
2178 have an incoming argument in $a7, but in that case, we can just
2179 set up the frame pointer at the very beginning of the
2180 function. */
2181
2182 for (insn = first; insn; insn = NEXT_INSN (insn))
2183 {
2184 rtx pat;
2185
2186 if (!INSN_P (insn))
2187 continue;
2188
2189 pat = PATTERN (insn);
2190 if (GET_CODE (pat) == UNSPEC_VOLATILE
2191 && (XINT (pat, 1) == UNSPECV_SET_FP))
2192 {
2193 set_frame_ptr_insn = insn;
2194 break;
2195 }
2196 }
2197
2198 if (set_frame_ptr_insn)
2199 {
2200 /* for all instructions prior to set_frame_ptr_insn, replace
2201 hard_frame_pointer references with stack_pointer */
2202 for (insn = first; insn != set_frame_ptr_insn; insn = NEXT_INSN (insn))
2203 {
2204 if (INSN_P (insn))
2205 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2206 hard_frame_pointer_rtx,
2207 stack_pointer_rtx);
2208 }
2209 }
2210 else
2211 {
2212 /* emit the frame pointer move immediately after the NOTE that starts
2213 the function */
2214 emit_insn_after (gen_movsi (hard_frame_pointer_rtx,
2215 stack_pointer_rtx), first);
2216 }
2217}
2218
2219
2220/* Set up the stack and frame (if desired) for the function. */
2221
2222void
2223xtensa_function_prologue (file, size)
2224 FILE *file;
2225 int size ATTRIBUTE_UNUSED;
2226{
2227 unsigned long tsize = compute_frame_size (get_frame_size ());
2228
2229 if (frame_pointer_needed)
2230 fprintf (file, "\t.frame\ta7, %ld\n", tsize);
2231 else
2232 fprintf (file, "\t.frame\tsp, %ld\n", tsize);
2233
2234
2235 if (tsize < (1 << (12+3)))
2236 {
2237 fprintf (file, "\tentry\tsp, %ld\n", tsize);
2238 }
2239 else
2240 {
2241 fprintf (file, "\tentry\tsp, 16\n");
2242
2243 /* use a8 as a temporary since a0-a7 may be live */
2244 fprintf (file, "\tl32r\ta8, ");
2245 print_operand (file, frame_size_const, 0);
2246 fprintf (file, "\n\tsub\ta8, sp, a8\n");
2247 fprintf (file, "\tmovsp\tsp, a8\n");
2248 }
2249}
2250
2251
2252/* Do any necessary cleanup after a function to restore
2253 stack, frame, and regs. */
2254
2255void
2256xtensa_function_epilogue (file, size)
2257 FILE *file;
2258 int size ATTRIBUTE_UNUSED;
2259{
2260 rtx insn = get_last_insn ();
2261 /* If the last insn was a BARRIER, we don't have to write anything. */
2262 if (GET_CODE (insn) == NOTE)
2263 insn = prev_nonnote_insn (insn);
2264 if (insn == 0 || GET_CODE (insn) != BARRIER)
2265 fprintf (file, TARGET_DENSITY ? "\tretw.n\n" : "\tretw\n");
2266
2267 xtensa_current_frame_size = 0;
2268}
2269
2270
2271/* Create the va_list data type.
2272 This structure is set up by __builtin_saveregs. The __va_reg
2273 field points to a stack-allocated region holding the contents of the
2274 incoming argument registers. The __va_ndx field is an index initialized
2275 to the position of the first unnamed (variable) argument. This same index
2276 is also used to address the arguments passed in memory. Thus, the
2277 __va_stk field is initialized to point to the position of the first
2278 argument in memory offset to account for the arguments passed in
2279 registers. E.G., if there are 6 argument registers, and each register is
2280 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2281 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2282 argument word N for N >= 6. */
2283
2284tree
2285xtensa_build_va_list (void)
2286{
2287 tree f_stk, f_reg, f_ndx, record;
2288
2289 record = make_node (RECORD_TYPE);
2290
2291 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2292 ptr_type_node);
2293 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2294 ptr_type_node);
2295 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2296 integer_type_node);
2297
2298 DECL_FIELD_CONTEXT (f_stk) = record;
2299 DECL_FIELD_CONTEXT (f_reg) = record;
2300 DECL_FIELD_CONTEXT (f_ndx) = record;
2301
2302 TYPE_FIELDS (record) = f_stk;
2303 TREE_CHAIN (f_stk) = f_reg;
2304 TREE_CHAIN (f_reg) = f_ndx;
2305
2306 layout_type (record);
2307 return record;
2308}
2309
2310
2311/* Save the incoming argument registers on the stack. Returns the
2312 address of the saved registers. */
2313
2314rtx
2315xtensa_builtin_saveregs ()
2316{
2317 rtx gp_regs, dest;
2318 int arg_words = current_function_arg_words;
2319 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2320 int i;
2321
2322 if (gp_left == 0)
2323 return const0_rtx;
2324
2325 /* allocate the general-purpose register space */
2326 gp_regs = assign_stack_local
2327 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2328 MEM_IN_STRUCT_P (gp_regs) = 1;
2329 RTX_UNCHANGING_P (gp_regs) = 1;
2330 RTX_UNCHANGING_P (XEXP (gp_regs, 0)) = 1;
2331
2332 /* Now store the incoming registers. */
2333 dest = change_address (gp_regs, SImode,
2334 plus_constant (XEXP (gp_regs, 0),
2335 arg_words * UNITS_PER_WORD));
2336
2337 /* Note: Don't use move_block_from_reg() here because the incoming
2338 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2339 Instead, call gen_raw_REG() directly so that we get a distinct
2340 instance of (REG:SI 7). */
2341 for (i = 0; i < gp_left; i++)
2342 {
2343 emit_move_insn (operand_subword (dest, i, 1, BLKmode),
2344 gen_raw_REG (SImode, GP_ARG_FIRST + arg_words + i));
2345 }
2346
2347 return XEXP (gp_regs, 0);
2348}
2349
2350
2351/* Implement `va_start' for varargs and stdarg. We look at the
2352 current function to fill in an initial va_list. */
2353
2354void
2355xtensa_va_start (stdarg_p, valist, nextarg)
2356 int stdarg_p ATTRIBUTE_UNUSED;
2357 tree valist;
2358 rtx nextarg ATTRIBUTE_UNUSED;
2359{
2360 tree f_stk, stk;
2361 tree f_reg, reg;
2362 tree f_ndx, ndx;
2363 tree t, u;
2364 int arg_words;
2365
2366 arg_words = current_function_args_info.arg_words;
2367
2368 f_stk = TYPE_FIELDS (va_list_type_node);
2369 f_reg = TREE_CHAIN (f_stk);
2370 f_ndx = TREE_CHAIN (f_reg);
2371
2372 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2373 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2374 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2375
2376 /* Call __builtin_saveregs; save the result in __va_reg */
2377 current_function_arg_words = arg_words;
2378 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2379 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2380 TREE_SIDE_EFFECTS (t) = 1;
2381 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2382
2383 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2384 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2385 u = fold (build (PLUS_EXPR, ptr_type_node, u,
2386 build_int_2 (-MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1)));
2387 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2388 TREE_SIDE_EFFECTS (t) = 1;
2389 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2390
2391 /* Set the __va_ndx member. */
2392 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2393 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2394 TREE_SIDE_EFFECTS (t) = 1;
2395 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2396}
2397
2398
2399/* Implement `va_arg'. */
2400
2401rtx
2402xtensa_va_arg (valist, type)
2403 tree valist, type;
2404{
2405 tree f_stk, stk;
2406 tree f_reg, reg;
2407 tree f_ndx, ndx;
2408 tree tmp, addr_tree;
2409 rtx array, orig_ndx, r, addr;
2410 HOST_WIDE_INT size, va_size;
2411 rtx lab_false, lab_over, lab_false2;
2412
2413 size = int_size_in_bytes (type);
2414 va_size = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
2415
2416 f_stk = TYPE_FIELDS (va_list_type_node);
2417 f_reg = TREE_CHAIN (f_stk);
2418 f_ndx = TREE_CHAIN (f_reg);
2419
2420 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2421 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2422 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2423
2424
2425 /* First align __va_ndx to a double word boundary if necessary for this arg:
2426
2427 if (__alignof__ (TYPE) > 4)
2428 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8)
2429 */
2430
2431 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2432 {
2433 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2434 build_int_2 ((2 * UNITS_PER_WORD) - 1, 0));
2435 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2436 build_int_2 (-2 * UNITS_PER_WORD, -1));
2437 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2438 TREE_SIDE_EFFECTS (tmp) = 1;
2439 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2440 }
2441
2442
2443 /* Increment __va_ndx to point past the argument:
2444
2445 orig_ndx = (AP).__va_ndx;
2446 (AP).__va_ndx += __va_size (TYPE);
2447 */
2448
2449 orig_ndx = gen_reg_rtx (SImode);
2450 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2451 if (r != orig_ndx)
2452 emit_move_insn (orig_ndx, r);
2453
2454 tmp = build (PLUS_EXPR, integer_type_node, ndx, build_int_2 (va_size, 0));
2455 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2456 TREE_SIDE_EFFECTS (tmp) = 1;
2457 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2458
2459
2460 /* Check if the argument is in registers:
2461
2462 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4)
2463 __array = (AP).__va_reg;
2464 */
2465
2466 lab_false = gen_label_rtx ();
2467 lab_over = gen_label_rtx ();
2468 array = gen_reg_rtx (Pmode);
2469
2470 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode, EXPAND_NORMAL),
2471 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2472 GT, const1_rtx, SImode, 0, lab_false);
2473
2474 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2475 if (r != array)
2476 emit_move_insn (array, r);
2477
2478 emit_jump_insn (gen_jump (lab_over));
2479 emit_barrier ();
2480 emit_label (lab_false);
2481
2482
2483 /* ...otherwise, the argument is on the stack (never split between
2484 registers and the stack -- change __va_ndx if necessary):
2485
2486 else
2487 {
2488 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2489 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2490 __array = (AP).__va_stk;
2491 }
2492 */
2493
2494 lab_false2 = gen_label_rtx ();
2495 emit_cmp_and_jump_insns (orig_ndx,
2496 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2497 GE, const1_rtx, SImode, 0, lab_false2);
2498
2499 tmp = build_int_2 ((MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD) + va_size, 0);
2500 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2501 TREE_SIDE_EFFECTS (tmp) = 1;
2502 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2503
2504 emit_label (lab_false2);
2505
2506 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2507 if (r != array)
2508 emit_move_insn (array, r);
2509
2510
2511 /* Given the base array pointer (__array) and index to the subsequent
2512 argument (__va_ndx), find the address:
2513
2514 Big-endian:
2515 __array + (AP).__va_ndx - sizeof (TYPE)
2516
2517 Little-endian:
2518 __array + (AP).__va_ndx - __va_size (TYPE)
2519
2520 The results are endian-dependent because values smaller than one word
2521 are aligned differently.
2522 */
2523
2524 emit_label (lab_over);
2525
2526 addr_tree = build (PLUS_EXPR, ptr_type_node,
2527 make_tree (ptr_type_node, array),
2528 ndx);
2529 addr_tree = build (PLUS_EXPR, ptr_type_node,
2530 addr_tree,
2531 build_int_2 (BYTES_BIG_ENDIAN
2532 && size < (PARM_BOUNDARY / BITS_PER_UNIT)
2533 ? -size
2534 : -va_size, -1));
2535 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2536 addr = copy_to_reg (addr);
2537 return addr;
2538}
2539
2540
2541enum reg_class
2542xtensa_secondary_reload_class (class, mode, x, isoutput)
2543 enum reg_class class;
2544 enum machine_mode mode ATTRIBUTE_UNUSED;
2545 rtx x;
2546 int isoutput;
2547{
2548 int regno;
2549
2550 if (GET_CODE (x) == SIGN_EXTEND)
2551 x = XEXP (x, 0);
2552 regno = xt_true_regnum (x);
2553
2554 if (!isoutput)
2555 {
2556 if (class == FP_REGS && constantpool_mem_p (x))
2557 return GR_REGS;
2558 }
2559
2560 if (ACC_REG_P (regno))
2561 return (class == GR_REGS ? NO_REGS : GR_REGS);
2562 if (class == ACC_REG)
2563 return (GP_REG_P (regno) ? NO_REGS : GR_REGS);
2564
2565 return NO_REGS;
2566}
2567
2568
2569void
2570order_regs_for_local_alloc ()
2571{
2572 if (!leaf_function_p ())
2573 {
2574 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2575 FIRST_PSEUDO_REGISTER * sizeof (int));
2576 }
2577 else
2578 {
2579 int i, num_arg_regs;
2580 int nxt = 0;
2581
2582 /* use the AR registers in increasing order (skipping a0 and a1)
2583 but save the incoming argument registers for a last resort */
2584 num_arg_regs = current_function_args_info.arg_words;
2585 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2586 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2587 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2588 reg_alloc_order[nxt++] = i + num_arg_regs;
2589 for (i = 0; i < num_arg_regs; i++)
2590 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2591
2592 /* list the FP registers in order for now */
2593 for (i = 0; i < 16; i++)
2594 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2595
2596 /* GCC requires that we list *all* the registers.... */
2597 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2598 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2599 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2600 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2601
2602 /* list the coprocessor registers in order */
2603 for (i = 0; i < BR_REG_NUM; i++)
2604 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2605
2606 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2607 }
2608}
2609
2610
2611/* A customized version of reg_overlap_mentioned_p that only looks for
2612 references to a7 (as opposed to hard_frame_pointer_rtx). */
2613
2614int
2615a7_overlap_mentioned_p (x)
2616 rtx x;
2617{
2618 int i, j;
2619 unsigned int x_regno;
2620 const char *fmt;
2621
2622 if (GET_CODE (x) == REG)
2623 {
2624 x_regno = REGNO (x);
2625 return (x != hard_frame_pointer_rtx
2626 && x_regno < A7_REG + 1
2627 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2628 }
2629
2630 if (GET_CODE (x) == SUBREG
2631 && GET_CODE (SUBREG_REG (x)) == REG
2632 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2633 {
2634 x_regno = subreg_regno (x);
2635 return (SUBREG_REG (x) != hard_frame_pointer_rtx
2636 && x_regno < A7_REG + 1
2637 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2638 }
2639
2640 /* X does not match, so try its subexpressions. */
2641 fmt = GET_RTX_FORMAT (GET_CODE (x));
2642 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2643 {
2644 if (fmt[i] == 'e')
2645 {
2646 if (a7_overlap_mentioned_p (XEXP (x, i)))
2647 return 1;
2648 }
2649 else if (fmt[i] == 'E')
2650 {
2651 for (j = XVECLEN (x, i) - 1; j >=0; j--)
2652 if (a7_overlap_mentioned_p (XVECEXP (x, i, j)))
2653 return 1;
2654 }
2655 }
2656
2657 return 0;
2658}
This page took 0.266481 seconds and 5 git commands to generate.