]> gcc.gnu.org Git - gcc.git/blame - gcc/config/xtensa/xtensa.c
Add -Wswitch-enum. Document.
[gcc.git] / gcc / config / xtensa / xtensa.c
CommitLineData
03984308
BW
1/* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright (C) 2001 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
21
22#include "config.h"
23#include "system.h"
24#include "rtl.h"
25#include "regs.h"
26#include "machmode.h"
27#include "hard-reg-set.h"
28#include "basic-block.h"
29#include "real.h"
30#include "insn-config.h"
31#include "conditions.h"
32#include "insn-flags.h"
33#include "insn-attr.h"
34#include "insn-codes.h"
35#include "recog.h"
36#include "output.h"
37#include "tree.h"
38#include "expr.h"
39#include "flags.h"
40#include "reload.h"
41#include "tm_p.h"
42#include "function.h"
43#include "toplev.h"
44#include "optabs.h"
45#include "libfuncs.h"
46#include "target.h"
47#include "target-def.h"
48
49/* Enumeration for all of the relational tests, so that we can build
50 arrays indexed by the test type, and not worry about the order
51 of EQ, NE, etc. */
52
53enum internal_test {
54 ITEST_EQ,
55 ITEST_NE,
56 ITEST_GT,
57 ITEST_GE,
58 ITEST_LT,
59 ITEST_LE,
60 ITEST_GTU,
61 ITEST_GEU,
62 ITEST_LTU,
63 ITEST_LEU,
64 ITEST_MAX
65 };
66
67/* Cached operands, and operator to compare for use in set/branch on
68 condition codes. */
69rtx branch_cmp[2];
70
71/* what type of branch to use */
72enum cmp_type branch_type;
73
74/* Array giving truth value on whether or not a given hard register
75 can support a given mode. */
76char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
77
78/* Current frame size calculated by compute_frame_size. */
79unsigned xtensa_current_frame_size;
80
81/* Tables of ld/st opcode names for block moves */
82const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
83const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
84#define LARGEST_MOVE_RATIO 15
85
86/* Define the structure for the machine field in struct function. */
87struct machine_function
88{
89 int accesses_prev_frame;
90};
91
92/* Vector, indexed by hard register number, which contains 1 for a
93 register that is allowable in a candidate for leaf function
94 treatment. */
95
96const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
97{
98 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
99 1, 1, 1,
100 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
101 1
102};
103
104/* Map hard register number to register class */
105const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
106{
107 GR_REGS, SP_REG, GR_REGS, GR_REGS,
108 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
109 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
110 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
111 AR_REGS, AR_REGS, BR_REGS,
112 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
113 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
114 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
115 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
116 ACC_REG,
117};
118
119/* Map register constraint character to register class. */
120enum reg_class xtensa_char_to_class[256] =
121{
122 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
123 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
124 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
125 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
126 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
127 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
128 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186};
187
188/* This macro generates the assembly code for function entry.
189 FILE is a stdio stream to output the code to.
190 SIZE is an int: how many units of temporary storage to allocate.
191 Refer to the array 'regs_ever_live' to determine which registers
192 to save; 'regs_ever_live[I]' is nonzero if register number I
193 is ever used in the function. This macro is responsible for
194 knowing which registers should not be saved even if used. */
195
196#undef TARGET_ASM_FUNCTION_PROLOGUE
197#define TARGET_ASM_FUNCTION_PROLOGUE xtensa_function_prologue
198
199/* This macro generates the assembly code for function exit,
200 on machines that need it. If FUNCTION_EPILOGUE is not defined
201 then individual return instructions are generated for each
202 return statement. Args are same as for FUNCTION_PROLOGUE. */
203
204#undef TARGET_ASM_FUNCTION_EPILOGUE
205#define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
206
207/* These hooks specify assembly directives for creating certain kinds
208 of integer object. */
209
210#undef TARGET_ASM_ALIGNED_SI_OP
211#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
212
213struct gcc_target targetm = TARGET_INITIALIZER;
214
215static int b4const_or_zero PARAMS ((int));
216static enum internal_test map_test_to_internal_test PARAMS ((enum rtx_code));
217static rtx gen_int_relational PARAMS ((enum rtx_code, rtx, rtx, int *));
218static rtx gen_float_relational PARAMS ((enum rtx_code, rtx, rtx));
219static rtx gen_conditional_move PARAMS ((rtx));
220static rtx fixup_subreg_mem PARAMS ((rtx x));
221static enum machine_mode xtensa_find_mode_for_size PARAMS ((unsigned));
222static void xtensa_init_machine_status PARAMS ((struct function *p));
223static void xtensa_free_machine_status PARAMS ((struct function *p));
224static void printx PARAMS ((FILE *, signed int));
225static rtx frame_size_const;
226static int current_function_arg_words;
227static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
228 REG_ALLOC_ORDER;
229
230
231/*
232 * Functions to test Xtensa immediate operand validity.
233 */
234
235int
236xtensa_b4constu (v)
237 int v;
238{
239 switch (v)
240 {
241 case 32768:
242 case 65536:
243 case 2:
244 case 3:
245 case 4:
246 case 5:
247 case 6:
248 case 7:
249 case 8:
250 case 10:
251 case 12:
252 case 16:
253 case 32:
254 case 64:
255 case 128:
256 case 256:
257 return 1;
258 }
259 return 0;
260}
261
262int
263xtensa_simm8x256 (v)
264 int v;
265{
266 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
267}
268
269int
270xtensa_ai4const (v)
271 int v;
272{
273 return (v == -1 || (v >= 1 && v <= 15));
274}
275
276int
277xtensa_simm7 (v)
278 int v;
279{
280 return v >= -32 && v <= 95;
281}
282
283int
284xtensa_b4const (v)
285 int v;
286{
287 switch (v)
288 {
289 case -1:
290 case 1:
291 case 2:
292 case 3:
293 case 4:
294 case 5:
295 case 6:
296 case 7:
297 case 8:
298 case 10:
299 case 12:
300 case 16:
301 case 32:
302 case 64:
303 case 128:
304 case 256:
305 return 1;
306 }
307 return 0;
308}
309
310int
311xtensa_simm8 (v)
312 int v;
313{
314 return v >= -128 && v <= 127;
315}
316
317int
318xtensa_tp7 (v)
319 int v;
320{
321 return (v >= 7 && v <= 22);
322}
323
324int
325xtensa_lsi4x4 (v)
326 int v;
327{
328 return (v & 3) == 0 && (v >= 0 && v <= 60);
329}
330
331int
332xtensa_simm12b (v)
333 int v;
334{
335 return v >= -2048 && v <= 2047;
336}
337
338int
339xtensa_uimm8 (v)
340 int v;
341{
342 return v >= 0 && v <= 255;
343}
344
345int
346xtensa_uimm8x2 (v)
347 int v;
348{
349 return (v & 1) == 0 && (v >= 0 && v <= 510);
350}
351
352int
353xtensa_uimm8x4 (v)
354 int v;
355{
356 return (v & 3) == 0 && (v >= 0 && v <= 1020);
357}
358
359
360/* This is just like the standard true_regnum() function except that it
361 works even when reg_renumber is not initialized. */
362
363int
364xt_true_regnum (x)
365 rtx x;
366{
367 if (GET_CODE (x) == REG)
368 {
369 if (reg_renumber
370 && REGNO (x) >= FIRST_PSEUDO_REGISTER
371 && reg_renumber[REGNO (x)] >= 0)
372 return reg_renumber[REGNO (x)];
373 return REGNO (x);
374 }
375 if (GET_CODE (x) == SUBREG)
376 {
377 int base = xt_true_regnum (SUBREG_REG (x));
378 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
379 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
380 GET_MODE (SUBREG_REG (x)),
381 SUBREG_BYTE (x), GET_MODE (x));
382 }
383 return -1;
384}
385
386
387int
388add_operand (op, mode)
389 rtx op;
390 enum machine_mode mode;
391{
392 if (GET_CODE (op) == CONST_INT)
393 return (xtensa_simm8 (INTVAL (op)) ||
394 xtensa_simm8x256 (INTVAL (op)));
395
396 return register_operand (op, mode);
397}
398
399
400int
401arith_operand (op, mode)
402 rtx op;
403 enum machine_mode mode;
404{
405 if (GET_CODE (op) == CONST_INT)
406 return xtensa_simm8 (INTVAL (op));
407
408 return register_operand (op, mode);
409}
410
411
412int
413nonimmed_operand (op, mode)
414 rtx op;
415 enum machine_mode mode;
416{
417 /* We cannot use the standard nonimmediate_operand() predicate because
418 it includes constant pool memory operands. */
419
420 if (memory_operand (op, mode))
421 return !constantpool_address_p (XEXP (op, 0));
422
423 return register_operand (op, mode);
424}
425
426
427int
428mem_operand (op, mode)
429 rtx op;
430 enum machine_mode mode;
431{
432 /* We cannot use the standard memory_operand() predicate because
433 it includes constant pool memory operands. */
434
435 if (memory_operand (op, mode))
436 return !constantpool_address_p (XEXP (op, 0));
437
438 return FALSE;
439}
440
441
442int
a8cacfd2 443xtensa_valid_move (mode, operands)
03984308 444 enum machine_mode mode;
a8cacfd2 445 rtx *operands;
03984308 446{
a8cacfd2
BW
447 /* Either the destination or source must be a register, and the
448 MAC16 accumulator doesn't count. */
449
450 if (register_operand (operands[0], mode))
451 {
452 int dst_regnum = xt_true_regnum (operands[0]);
453
454 /* The stack pointer can only be assigned with a MOVSP opcode. */
455 if (dst_regnum == STACK_POINTER_REGNUM)
456 return (mode == SImode
457 && register_operand (operands[1], mode)
458 && !ACC_REG_P (xt_true_regnum (operands[1])));
459
460 if (!ACC_REG_P (dst_regnum))
461 return true;
462 }
463 else if (register_operand (operands[1], mode))
464 {
465 int src_regnum = xt_true_regnum (operands[1]);
466 if (!ACC_REG_P (src_regnum))
467 return true;
468 }
03984308
BW
469 return FALSE;
470}
471
472
473int
474mask_operand (op, mode)
475 rtx op;
476 enum machine_mode mode;
477{
478 if (GET_CODE (op) == CONST_INT)
479 return xtensa_mask_immediate (INTVAL (op));
480
481 return register_operand (op, mode);
482}
483
484
485int
486extui_fldsz_operand (op, mode)
487 rtx op;
488 enum machine_mode mode ATTRIBUTE_UNUSED;
489{
490 return ((GET_CODE (op) == CONST_INT)
491 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
492}
493
494
495int
496sext_operand (op, mode)
497 rtx op;
498 enum machine_mode mode;
499{
500 if (TARGET_SEXT)
501 return nonimmed_operand (op, mode);
502 return mem_operand (op, mode);
503}
504
505
506int
507sext_fldsz_operand (op, mode)
508 rtx op;
509 enum machine_mode mode ATTRIBUTE_UNUSED;
510{
511 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
512}
513
514
515int
516lsbitnum_operand (op, mode)
517 rtx op;
518 enum machine_mode mode ATTRIBUTE_UNUSED;
519{
520 if (GET_CODE (op) == CONST_INT)
521 {
522 return (BITS_BIG_ENDIAN
523 ? (INTVAL (op) == BITS_PER_WORD-1)
524 : (INTVAL (op) == 0));
525 }
526 return FALSE;
527}
528
529
530static int
531b4const_or_zero (v)
532 int v;
533{
534 if (v == 0)
535 return TRUE;
536 return xtensa_b4const (v);
537}
538
539
540int
541branch_operand (op, mode)
542 rtx op;
543 enum machine_mode mode;
544{
545 if (GET_CODE (op) == CONST_INT)
546 return b4const_or_zero (INTVAL (op));
547
548 return register_operand (op, mode);
549}
550
551
552int
553ubranch_operand (op, mode)
554 rtx op;
555 enum machine_mode mode;
556{
557 if (GET_CODE (op) == CONST_INT)
558 return xtensa_b4constu (INTVAL (op));
559
560 return register_operand (op, mode);
561}
562
563
564int
565call_insn_operand (op, mode)
566 rtx op;
567 enum machine_mode mode ATTRIBUTE_UNUSED;
568{
569 if ((GET_CODE (op) == REG)
570 && (op != arg_pointer_rtx)
571 && ((REGNO (op) < FRAME_POINTER_REGNUM)
572 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
573 return TRUE;
574
575 if (CONSTANT_ADDRESS_P (op))
576 {
577 /* Direct calls only allowed to static functions with PIC. */
578 return (!flag_pic || (GET_CODE (op) == SYMBOL_REF
579 && SYMBOL_REF_FLAG (op)));
580 }
581
582 return FALSE;
583}
584
585
586int
587move_operand (op, mode)
588 rtx op;
589 enum machine_mode mode;
590{
591 if (register_operand (op, mode))
592 return TRUE;
593
594 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
595 result in 0/1. */
596 if (GET_CODE (op) == CONSTANT_P_RTX)
597 return TRUE;
598
599 if (GET_CODE (op) == CONST_INT)
600 return xtensa_simm12b (INTVAL (op));
601
602 if (GET_CODE (op) == MEM)
603 return memory_address_p (mode, XEXP (op, 0));
604
605 return FALSE;
606}
607
608
609int
610smalloffset_mem_p (op)
611 rtx op;
612{
613 if (GET_CODE (op) == MEM)
614 {
615 rtx addr = XEXP (op, 0);
616 if (GET_CODE (addr) == REG)
617 return REG_OK_FOR_BASE_P (addr);
618 if (GET_CODE (addr) == PLUS)
619 {
620 rtx offset = XEXP (addr, 0);
621 if (GET_CODE (offset) != CONST_INT)
622 offset = XEXP (addr, 1);
623 if (GET_CODE (offset) != CONST_INT)
624 return FALSE;
625 return xtensa_lsi4x4 (INTVAL (offset));
626 }
627 }
628 return FALSE;
629}
630
631
632int
633smalloffset_double_mem_p (op)
634 rtx op;
635{
636 if (!smalloffset_mem_p (op))
637 return FALSE;
638 return smalloffset_mem_p (adjust_address (op, GET_MODE (op), 4));
639}
640
641
642int
643constantpool_address_p (addr)
644 rtx addr;
645{
646 rtx sym = addr;
647
648 if (GET_CODE (addr) == CONST)
649 {
650 rtx offset;
651
652 /* only handle (PLUS (SYM, OFFSET)) form */
653 addr = XEXP (addr, 0);
654 if (GET_CODE (addr) != PLUS)
655 return FALSE;
656
657 /* make sure the address is word aligned */
658 offset = XEXP (addr, 1);
659 if ((GET_CODE (offset) != CONST_INT)
660 || ((INTVAL (offset) & 3) != 0))
661 return FALSE;
662
663 sym = XEXP (addr, 0);
664 }
665
666 if ((GET_CODE (sym) == SYMBOL_REF)
667 && CONSTANT_POOL_ADDRESS_P (sym))
668 return TRUE;
669 return FALSE;
670}
671
672
673int
674constantpool_mem_p (op)
675 rtx op;
676{
677 if (GET_CODE (op) == MEM)
678 return constantpool_address_p (XEXP (op, 0));
679 return FALSE;
680}
681
682
683int
684non_const_move_operand (op, mode)
685 rtx op;
686 enum machine_mode mode;
687{
688 if (register_operand (op, mode))
689 return 1;
690 if (GET_CODE (op) == SUBREG)
691 op = SUBREG_REG (op);
692 if (GET_CODE (op) == MEM)
693 return memory_address_p (mode, XEXP (op, 0));
694 return FALSE;
695}
696
697
698/* Accept the floating point constant 1 in the appropriate mode. */
699
700int
701const_float_1_operand (op, mode)
702 rtx op;
703 enum machine_mode mode;
704{
705 REAL_VALUE_TYPE d;
706 static REAL_VALUE_TYPE onedf;
707 static REAL_VALUE_TYPE onesf;
708 static int one_initialized;
709
710 if ((GET_CODE (op) != CONST_DOUBLE)
711 || (mode != GET_MODE (op))
712 || (mode != DFmode && mode != SFmode))
713 return FALSE;
714
715 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
716
717 if (! one_initialized)
718 {
719 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
720 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
721 one_initialized = TRUE;
722 }
723
724 if (mode == DFmode)
725 return REAL_VALUES_EQUAL (d, onedf);
726 else
727 return REAL_VALUES_EQUAL (d, onesf);
728}
729
730
731int
732fpmem_offset_operand (op, mode)
733 rtx op;
734 enum machine_mode mode ATTRIBUTE_UNUSED;
735{
736 if (GET_CODE (op) == CONST_INT)
737 return xtensa_mem_offset (INTVAL (op), SFmode);
738 return 0;
739}
740
741
742void
743xtensa_extend_reg (dst, src)
744 rtx dst;
745 rtx src;
746{
747 rtx temp = gen_reg_rtx (SImode);
748 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
749
750 /* generate paradoxical subregs as needed so that the modes match */
751 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
752 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
753
754 emit_insn (gen_ashlsi3 (temp, src, shift));
755 emit_insn (gen_ashrsi3 (dst, temp, shift));
756}
757
758
759void
760xtensa_load_constant (dst, src)
761 rtx dst;
762 rtx src;
763{
764 enum machine_mode mode = GET_MODE (dst);
765 src = force_const_mem (SImode, src);
766
767 /* PC-relative loads are always SImode so we have to add a SUBREG if that
768 is not the desired mode */
769
770 if (mode != SImode)
771 {
772 if (register_operand (dst, mode))
773 dst = simplify_gen_subreg (SImode, dst, mode, 0);
774 else
775 {
776 src = force_reg (SImode, src);
777 src = gen_lowpart_SUBREG (mode, src);
778 }
779 }
780
781 emit_move_insn (dst, src);
782}
783
784
785int
786branch_operator (x, mode)
787 rtx x;
788 enum machine_mode mode;
789{
790 if (GET_MODE (x) != mode)
791 return FALSE;
792
793 switch (GET_CODE (x))
794 {
795 case EQ:
796 case NE:
797 case LT:
798 case GE:
799 return TRUE;
800 default:
801 break;
802 }
803 return FALSE;
804}
805
806
807int
808ubranch_operator (x, mode)
809 rtx x;
810 enum machine_mode mode;
811{
812 if (GET_MODE (x) != mode)
813 return FALSE;
814
815 switch (GET_CODE (x))
816 {
817 case LTU:
818 case GEU:
819 return TRUE;
820 default:
821 break;
822 }
823 return FALSE;
824}
825
826
827int
828boolean_operator (x, mode)
829 rtx x;
830 enum machine_mode mode;
831{
832 if (GET_MODE (x) != mode)
833 return FALSE;
834
835 switch (GET_CODE (x))
836 {
837 case EQ:
838 case NE:
839 return TRUE;
840 default:
841 break;
842 }
843 return FALSE;
844}
845
846
847int
848xtensa_mask_immediate (v)
849 int v;
850{
851#define MAX_MASK_SIZE 16
852 int mask_size;
853
854 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
855 {
856 if ((v & 1) == 0)
857 return FALSE;
858 v = v >> 1;
859 if (v == 0)
860 return TRUE;
861 }
862
863 return FALSE;
864}
865
866
867int
868xtensa_mem_offset (v, mode)
869 unsigned v;
870 enum machine_mode mode;
871{
872 switch (mode)
873 {
874 case BLKmode:
875 /* Handle the worst case for block moves. See xtensa_expand_block_move
876 where we emit an optimized block move operation if the block can be
877 moved in < "move_ratio" pieces. The worst case is when the block is
878 aligned but has a size of (3 mod 4) (does this happen?) so that the
879 last piece requires a byte load/store. */
880 return (xtensa_uimm8 (v) &&
881 xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
882
883 case QImode:
884 return xtensa_uimm8 (v);
885
886 case HImode:
887 return xtensa_uimm8x2 (v);
888
889 case DFmode:
890 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
891
892 default:
893 break;
894 }
895
896 return xtensa_uimm8x4 (v);
897}
898
899
900/* Make normal rtx_code into something we can index from an array */
901
902static enum internal_test
903map_test_to_internal_test (test_code)
904 enum rtx_code test_code;
905{
906 enum internal_test test = ITEST_MAX;
907
908 switch (test_code)
909 {
910 default: break;
911 case EQ: test = ITEST_EQ; break;
912 case NE: test = ITEST_NE; break;
913 case GT: test = ITEST_GT; break;
914 case GE: test = ITEST_GE; break;
915 case LT: test = ITEST_LT; break;
916 case LE: test = ITEST_LE; break;
917 case GTU: test = ITEST_GTU; break;
918 case GEU: test = ITEST_GEU; break;
919 case LTU: test = ITEST_LTU; break;
920 case LEU: test = ITEST_LEU; break;
921 }
922
923 return test;
924}
925
926
927/* Generate the code to compare two integer values. The return value is
928 the comparison expression. */
929
930static rtx
931gen_int_relational (test_code, cmp0, cmp1, p_invert)
932 enum rtx_code test_code; /* relational test (EQ, etc) */
933 rtx cmp0; /* first operand to compare */
934 rtx cmp1; /* second operand to compare */
935 int *p_invert; /* whether branch needs to reverse its test */
936{
937 struct cmp_info {
938 enum rtx_code test_code; /* test code to use in insn */
939 int (*const_range_p) PARAMS ((int)); /* predicate function to check range */
940 int const_add; /* constant to add (convert LE -> LT) */
941 int reverse_regs; /* reverse registers in test */
942 int invert_const; /* != 0 if invert value if cmp1 is constant */
943 int invert_reg; /* != 0 if invert value if cmp1 is register */
944 int unsignedp; /* != 0 for unsigned comparisons. */
945 };
946
947 static struct cmp_info info[ (int)ITEST_MAX ] = {
948
949 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
950 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
951
952 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
953 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
954 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
955 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
956
957 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
958 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
959 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
960 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
961 };
962
963 enum internal_test test;
964 enum machine_mode mode;
965 struct cmp_info *p_info;
966
967 test = map_test_to_internal_test (test_code);
968 if (test == ITEST_MAX)
969 abort ();
970
971 p_info = &info[ (int)test ];
972
973 mode = GET_MODE (cmp0);
974 if (mode == VOIDmode)
975 mode = GET_MODE (cmp1);
976
977 /* Make sure we can handle any constants given to us. */
978 if (GET_CODE (cmp1) == CONST_INT)
979 {
980 HOST_WIDE_INT value = INTVAL (cmp1);
981 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
982
983 /* if the immediate overflows or does not fit in the immediate field,
984 spill it to a register */
985
986 if ((p_info->unsignedp ?
987 (uvalue + p_info->const_add > uvalue) :
988 (value + p_info->const_add > value)) != (p_info->const_add > 0))
989 {
990 cmp1 = force_reg (mode, cmp1);
991 }
992 else if (!(p_info->const_range_p) (value + p_info->const_add))
993 {
994 cmp1 = force_reg (mode, cmp1);
995 }
996 }
997 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
998 {
999 cmp1 = force_reg (mode, cmp1);
1000 }
1001
1002 /* See if we need to invert the result. */
1003 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
1004 ? p_info->invert_const
1005 : p_info->invert_reg);
1006
1007 /* Comparison to constants, may involve adding 1 to change a LT into LE.
1008 Comparison between two registers, may involve switching operands. */
1009 if (GET_CODE (cmp1) == CONST_INT)
1010 {
1011 if (p_info->const_add != 0)
1012 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
1013
1014 }
1015 else if (p_info->reverse_regs)
1016 {
1017 rtx temp = cmp0;
1018 cmp0 = cmp1;
1019 cmp1 = temp;
1020 }
1021
1022 return gen_rtx (p_info->test_code, VOIDmode, cmp0, cmp1);
1023}
1024
1025
1026/* Generate the code to compare two float values. The return value is
1027 the comparison expression. */
1028
1029static rtx
1030gen_float_relational (test_code, cmp0, cmp1)
1031 enum rtx_code test_code; /* relational test (EQ, etc) */
1032 rtx cmp0; /* first operand to compare */
1033 rtx cmp1; /* second operand to compare */
1034{
1035 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx));
1036 rtx brtmp;
1037 int reverse_regs, invert;
1038
1039 switch (test_code)
1040 {
1041 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1042 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1043 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1044 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1045 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1046 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1047 default:
1048 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1049 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1050 }
1051
1052 if (reverse_regs)
1053 {
1054 rtx temp = cmp0;
1055 cmp0 = cmp1;
1056 cmp1 = temp;
1057 }
1058
1059 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1060 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1061
1062 return gen_rtx (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1063}
1064
1065
1066void
1067xtensa_expand_conditional_branch (operands, test_code)
1068 rtx *operands;
1069 enum rtx_code test_code;
1070{
1071 enum cmp_type type = branch_type;
1072 rtx cmp0 = branch_cmp[0];
1073 rtx cmp1 = branch_cmp[1];
1074 rtx cmp;
1075 int invert;
1076 rtx label1, label2;
1077
1078 switch (type)
1079 {
1080 case CMP_DF:
1081 default:
1082 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1083
1084 case CMP_SI:
1085 invert = FALSE;
1086 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1087 break;
1088
1089 case CMP_SF:
1090 if (!TARGET_HARD_FLOAT)
1091 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1092 invert = FALSE;
1093 cmp = gen_float_relational (test_code, cmp0, cmp1);
1094 break;
1095 }
1096
1097 /* Generate the branch. */
1098
1099 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1100 label2 = pc_rtx;
1101
1102 if (invert)
1103 {
1104 label2 = label1;
1105 label1 = pc_rtx;
1106 }
1107
1108 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1109 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1110 label1,
1111 label2)));
1112}
1113
1114
1115static rtx
1116gen_conditional_move (cmp)
1117 rtx cmp;
1118{
1119 enum rtx_code code = GET_CODE (cmp);
1120 rtx op0 = branch_cmp[0];
1121 rtx op1 = branch_cmp[1];
1122
1123 if (branch_type == CMP_SI)
1124 {
1125 /* Jump optimization calls get_condition() which canonicalizes
1126 comparisons like (GE x <const>) to (GT x <const-1>).
1127 Transform those comparisons back to GE, since that is the
1128 comparison supported in Xtensa. We shouldn't have to
1129 transform <LE x const> comparisons, because neither
1130 xtensa_expand_conditional_branch() nor get_condition() will
1131 produce them. */
1132
1133 if ((code == GT) && (op1 == constm1_rtx))
1134 {
1135 code = GE;
1136 op1 = const0_rtx;
1137 }
1138 cmp = gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
1139
1140 if (boolean_operator (cmp, VOIDmode))
1141 {
1142 /* swap the operands to make const0 second */
1143 if (op0 == const0_rtx)
1144 {
1145 op0 = op1;
1146 op1 = const0_rtx;
1147 }
1148
1149 /* if not comparing against zero, emit a comparison (subtract) */
1150 if (op1 != const0_rtx)
1151 {
1152 op0 = expand_binop (SImode, sub_optab, op0, op1,
1153 0, 0, OPTAB_LIB_WIDEN);
1154 op1 = const0_rtx;
1155 }
1156 }
1157 else if (branch_operator (cmp, VOIDmode))
1158 {
1159 /* swap the operands to make const0 second */
1160 if (op0 == const0_rtx)
1161 {
1162 op0 = op1;
1163 op1 = const0_rtx;
1164
1165 switch (code)
1166 {
1167 case LT: code = GE; break;
1168 case GE: code = LT; break;
1169 default: abort ();
1170 }
1171 }
1172
1173 if (op1 != const0_rtx)
1174 return 0;
1175 }
1176 else
1177 return 0;
1178
1179 return gen_rtx (code, VOIDmode, op0, op1);
1180 }
1181
1182 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1183 return gen_float_relational (code, op0, op1);
1184
1185 return 0;
1186}
1187
1188
1189int
1190xtensa_expand_conditional_move (operands, isflt)
1191 rtx *operands;
1192 int isflt;
1193{
1194 rtx cmp;
1195 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1196
1197 if (!(cmp = gen_conditional_move (operands[1])))
1198 return 0;
1199
1200 if (isflt)
1201 gen_fn = (branch_type == CMP_SI
1202 ? gen_movsfcc_internal0
1203 : gen_movsfcc_internal1);
1204 else
1205 gen_fn = (branch_type == CMP_SI
1206 ? gen_movsicc_internal0
1207 : gen_movsicc_internal1);
1208
1209 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1210 operands[2], operands[3], cmp));
1211 return 1;
1212}
1213
1214
1215int
1216xtensa_expand_scc (operands)
1217 rtx *operands;
1218{
1219 rtx dest = operands[0];
1220 rtx cmp = operands[1];
1221 rtx one_tmp, zero_tmp;
1222 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1223
1224 if (!(cmp = gen_conditional_move (cmp)))
1225 return 0;
1226
1227 one_tmp = gen_reg_rtx (SImode);
1228 zero_tmp = gen_reg_rtx (SImode);
1229 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1230 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1231
1232 gen_fn = (branch_type == CMP_SI
1233 ? gen_movsicc_internal0
1234 : gen_movsicc_internal1);
1235 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1236 return 1;
1237}
1238
1239
1240/* Emit insns to move operands[1] into operands[0].
1241
1242 Return 1 if we have written out everything that needs to be done to
1243 do the move. Otherwise, return 0 and the caller will emit the move
1244 normally. */
1245
1246int
1247xtensa_emit_move_sequence (operands, mode)
1248 rtx *operands;
1249 enum machine_mode mode;
1250{
1251 if (CONSTANT_P (operands[1])
1252 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1253 && (GET_CODE (operands[1]) != CONST_INT
1254 || !xtensa_simm12b (INTVAL (operands[1]))))
1255 {
1256 xtensa_load_constant (operands[0], operands[1]);
1257 return 1;
1258 }
1259
1260 if (!(reload_in_progress | reload_completed))
1261 {
a8cacfd2 1262 if (!xtensa_valid_move (mode, operands))
03984308
BW
1263 operands[1] = force_reg (mode, operands[1]);
1264
1265 /* Check if this move is copying an incoming argument in a7. If
1266 so, emit the move, followed by the special "set_frame_ptr"
1267 unspec_volatile insn, at the very beginning of the function.
1268 This is necessary because the register allocator will ignore
1269 conflicts with a7 and may assign some other pseudo to a7. If
1270 that pseudo was assigned prior to this move, it would clobber
1271 the incoming argument in a7. By copying the argument out of
1272 a7 as the very first thing, and then immediately following
1273 that with an unspec_volatile to keep the scheduler away, we
1274 should avoid any problems. */
1275
1276 if (a7_overlap_mentioned_p (operands[1]))
1277 {
1278 rtx mov;
1279 switch (mode)
1280 {
1281 case SImode:
1282 mov = gen_movsi_internal (operands[0], operands[1]);
1283 break;
1284 case HImode:
1285 mov = gen_movhi_internal (operands[0], operands[1]);
1286 break;
1287 case QImode:
1288 mov = gen_movqi_internal (operands[0], operands[1]);
1289 break;
1290 default:
1291 abort ();
1292 }
1293
1294 /* Insert the instructions before any other argument copies.
1295 (The set_frame_ptr insn comes _after_ the move, so push it
1296 out first.) */
1297 push_topmost_sequence ();
1298 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1299 emit_insn_after (mov, get_insns ());
1300 pop_topmost_sequence ();
1301
1302 return 1;
1303 }
1304 }
1305
1306 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1307 instruction won't be recognized after reload. So we remove the
1308 subreg and adjust mem accordingly. */
1309 if (reload_in_progress)
1310 {
1311 operands[0] = fixup_subreg_mem (operands[0]);
1312 operands[1] = fixup_subreg_mem (operands[1]);
1313 }
1314 return 0;
1315}
1316
1317static rtx
1318fixup_subreg_mem (x)
1319 rtx x;
1320{
1321 if (GET_CODE (x) == SUBREG
1322 && GET_CODE (SUBREG_REG (x)) == REG
1323 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1324 {
1325 rtx temp =
1326 gen_rtx_SUBREG (GET_MODE (x),
1327 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1328 SUBREG_BYTE (x));
1329 x = alter_subreg (&temp);
1330 }
1331 return x;
1332}
1333
1334
1335/* Try to expand a block move operation to an RTL block move instruction.
1336 If not optimizing or if the block size is not a constant or if the
1337 block is small, the expansion fails and GCC falls back to calling
1338 memcpy().
1339
1340 operands[0] is the destination
1341 operands[1] is the source
1342 operands[2] is the length
1343 operands[3] is the alignment */
1344
1345int
1346xtensa_expand_block_move (operands)
1347 rtx *operands;
1348{
1349 rtx dest = operands[0];
1350 rtx src = operands[1];
1351 int bytes = INTVAL (operands[2]);
1352 int align = XINT (operands[3], 0);
1353 int num_pieces, move_ratio;
1354
1355 /* If this is not a fixed size move, just call memcpy */
1356 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1357 return 0;
1358
1359 /* Anything to move? */
1360 if (bytes <= 0)
1361 return 1;
1362
1363 if (align > MOVE_MAX)
1364 align = MOVE_MAX;
1365
1366 /* decide whether to expand inline based on the optimization level */
1367 move_ratio = 4;
1368 if (optimize > 2)
1369 move_ratio = LARGEST_MOVE_RATIO;
1370 num_pieces = (bytes / align) + (bytes % align); /* close enough anyway */
1371 if (num_pieces >= move_ratio)
1372 return 0;
1373
1374 /* make sure the memory addresses are valid */
0ae02efa
BW
1375 operands[0] = validize_mem (dest);
1376 operands[1] = validize_mem (src);
03984308
BW
1377
1378 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1379 operands[2], operands[3]));
1380 return 1;
1381}
1382
1383
1384/* Emit a sequence of instructions to implement a block move, trying
1385 to hide load delay slots as much as possible. Load N values into
1386 temporary registers, store those N values, and repeat until the
1387 complete block has been moved. N=delay_slots+1 */
1388
1389struct meminsnbuf {
1390 char template[30];
1391 rtx operands[2];
1392};
1393
1394void
1395xtensa_emit_block_move (operands, tmpregs, delay_slots)
1396 rtx *operands;
1397 rtx *tmpregs;
1398 int delay_slots;
1399{
1400 rtx dest = operands[0];
1401 rtx src = operands[1];
1402 int bytes = INTVAL (operands[2]);
1403 int align = XINT (operands[3], 0);
1404 rtx from_addr = XEXP (src, 0);
1405 rtx to_addr = XEXP (dest, 0);
1406 int from_struct = MEM_IN_STRUCT_P (src);
1407 int to_struct = MEM_IN_STRUCT_P (dest);
1408 int offset = 0;
1409 int chunk_size, item_size;
1410 struct meminsnbuf *ldinsns, *stinsns;
1411 const char *ldname, *stname;
1412 enum machine_mode mode;
1413
1414 if (align > MOVE_MAX)
1415 align = MOVE_MAX;
1416 item_size = align;
1417 chunk_size = delay_slots + 1;
1418
1419 ldinsns = (struct meminsnbuf *)
1420 alloca (chunk_size * sizeof (struct meminsnbuf));
1421 stinsns = (struct meminsnbuf *)
1422 alloca (chunk_size * sizeof (struct meminsnbuf));
1423
1424 mode = xtensa_find_mode_for_size (item_size);
1425 item_size = GET_MODE_SIZE (mode);
1426 ldname = xtensa_ld_opcodes[(int) mode];
1427 stname = xtensa_st_opcodes[(int) mode];
1428
1429 while (bytes > 0)
1430 {
1431 int n;
1432
1433 for (n = 0; n < chunk_size; n++)
1434 {
1435 rtx addr, mem;
1436
1437 if (bytes == 0)
1438 {
1439 chunk_size = n;
1440 break;
1441 }
1442
1443 if (bytes < item_size)
1444 {
1445 /* find a smaller item_size which we can load & store */
1446 item_size = bytes;
1447 mode = xtensa_find_mode_for_size (item_size);
1448 item_size = GET_MODE_SIZE (mode);
1449 ldname = xtensa_ld_opcodes[(int) mode];
1450 stname = xtensa_st_opcodes[(int) mode];
1451 }
1452
1453 /* record the load instruction opcode and operands */
1454 addr = plus_constant (from_addr, offset);
1455 mem = gen_rtx_MEM (mode, addr);
1456 if (! memory_address_p (mode, addr))
1457 abort ();
1458 MEM_IN_STRUCT_P (mem) = from_struct;
1459 ldinsns[n].operands[0] = tmpregs[n];
1460 ldinsns[n].operands[1] = mem;
1461 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1462
1463 /* record the store instruction opcode and operands */
1464 addr = plus_constant (to_addr, offset);
1465 mem = gen_rtx_MEM (mode, addr);
1466 if (! memory_address_p (mode, addr))
1467 abort ();
1468 MEM_IN_STRUCT_P (mem) = to_struct;
1469 stinsns[n].operands[0] = tmpregs[n];
1470 stinsns[n].operands[1] = mem;
1471 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1472
1473 offset += item_size;
1474 bytes -= item_size;
1475 }
1476
1477 /* now output the loads followed by the stores */
1478 for (n = 0; n < chunk_size; n++)
1479 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1480 for (n = 0; n < chunk_size; n++)
1481 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1482 }
1483}
1484
1485
1486static enum machine_mode
1487xtensa_find_mode_for_size (item_size)
1488 unsigned item_size;
1489{
1490 enum machine_mode mode, tmode;
1491
1492 while (1)
1493 {
1494 mode = VOIDmode;
1495
1496 /* find mode closest to but not bigger than item_size */
1497 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1498 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1499 if (GET_MODE_SIZE (tmode) <= item_size)
1500 mode = tmode;
1501 if (mode == VOIDmode)
1502 abort ();
1503
1504 item_size = GET_MODE_SIZE (mode);
1505
1506 if (xtensa_ld_opcodes[(int) mode]
1507 && xtensa_st_opcodes[(int) mode])
1508 break;
1509
1510 /* cannot load & store this mode; try something smaller */
1511 item_size -= 1;
1512 }
1513
1514 return mode;
1515}
1516
1517
1518void
1519xtensa_expand_nonlocal_goto (operands)
1520 rtx *operands;
1521{
1522 rtx goto_handler = operands[1];
1523 rtx containing_fp = operands[3];
1524
1525 /* generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1526 is too big to generate in-line */
1527
1528 if (GET_CODE (containing_fp) != REG)
1529 containing_fp = force_reg (Pmode, containing_fp);
1530
1531 goto_handler = replace_rtx (copy_rtx (goto_handler),
1532 virtual_stack_vars_rtx,
1533 containing_fp);
1534
1535 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1536 0, VOIDmode, 2,
1537 containing_fp, Pmode,
1538 goto_handler, Pmode);
1539}
1540
1541
1542static void
1543xtensa_init_machine_status (p)
1544 struct function *p;
1545{
1546 p->machine = (struct machine_function *)
1547 xcalloc (1, sizeof (struct machine_function));
1548}
1549
1550
1551static void
1552xtensa_free_machine_status (p)
1553 struct function *p;
1554{
1555 free (p->machine);
1556 p->machine = NULL;
1557}
1558
1559
1560void
1561xtensa_setup_frame_addresses ()
1562{
1563 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1564 cfun->machine->accesses_prev_frame = 1;
1565
1566 emit_library_call
1567 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1568 0, VOIDmode, 0);
1569}
1570
1571
1572/* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1573 a comment showing where the end of the loop is. However, if there is a
1574 label or a branch at the end of the loop then we need to place a nop
1575 there. If the loop ends with a label we need the nop so that branches
1576 targetting that label will target the nop (and thus remain in the loop),
1577 instead of targetting the instruction after the loop (and thus exiting
1578 the loop). If the loop ends with a branch, we need the nop in case the
1579 branch is targetting a location inside the loop. When the branch
1580 executes it will cause the loop count to be decremented even if it is
1581 taken (because it is the last instruction in the loop), so we need to
1582 nop after the branch to prevent the loop count from being decremented
1583 when the branch is taken. */
1584
1585void
1586xtensa_emit_loop_end (insn, operands)
1587 rtx insn;
1588 rtx *operands;
1589{
1590 char done = 0;
1591
1592 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1593 {
1594 switch (GET_CODE (insn))
1595 {
1596 case NOTE:
1597 case BARRIER:
1598 break;
1599
1600 case CODE_LABEL:
1601 output_asm_insn ("nop.n", operands);
1602 done = 1;
1603 break;
1604
1605 default:
1606 {
1607 rtx body = PATTERN (insn);
1608
1609 if (GET_CODE (body) == JUMP_INSN)
1610 {
1611 output_asm_insn ("nop.n", operands);
1612 done = 1;
1613 }
1614 else if ((GET_CODE (body) != USE)
1615 && (GET_CODE (body) != CLOBBER))
1616 done = 1;
1617 }
1618 break;
1619 }
1620 }
1621
1622 output_asm_insn ("# loop end for %0", operands);
1623}
1624
1625
1626char *
1627xtensa_emit_call (callop, operands)
1628 int callop;
1629 rtx *operands;
1630{
1631 char *result = (char *) malloc (64);
1632 rtx tgt = operands[callop];
1633
1634 if (GET_CODE (tgt) == CONST_INT)
1635 sprintf (result, "call8\t0x%x", INTVAL (tgt));
1636 else if (register_operand (tgt, VOIDmode))
1637 sprintf (result, "callx8\t%%%d", callop);
1638 else
1639 sprintf (result, "call8\t%%%d", callop);
1640
1641 return result;
1642}
1643
1644
1645/* Return the stabs register number to use for 'regno'. */
1646
1647int
1648xtensa_dbx_register_number (regno)
1649 int regno;
1650{
1651 int first = -1;
1652
1653 if (GP_REG_P (regno)) {
1654 regno -= GP_REG_FIRST;
1655 first = 0;
1656 }
1657 else if (BR_REG_P (regno)) {
1658 regno -= BR_REG_FIRST;
1659 first = 16;
1660 }
1661 else if (FP_REG_P (regno)) {
1662 regno -= FP_REG_FIRST;
1663 /* The current numbering convention is that TIE registers are
1664 numbered in libcc order beginning with 256. We can't guarantee
1665 that the FP registers will come first, so the following is just
1666 a guess. It seems like we should make a special case for FP
1667 registers and give them fixed numbers < 256. */
1668 first = 256;
1669 }
1670 else if (ACC_REG_P (regno))
1671 {
1672 first = 0;
1673 regno = -1;
1674 }
1675
1676 /* When optimizing, we sometimes get asked about pseudo-registers
1677 that don't represent hard registers. Return 0 for these. */
1678 if (first == -1)
1679 return 0;
1680
1681 return first + regno;
1682}
1683
1684
1685/* Argument support functions. */
1686
1687/* Initialize CUMULATIVE_ARGS for a function. */
1688
1689void
1690init_cumulative_args (cum, fntype, libname)
1691 CUMULATIVE_ARGS *cum; /* argument info to initialize */
1692 tree fntype ATTRIBUTE_UNUSED; /* tree ptr for function decl */
1693 rtx libname ATTRIBUTE_UNUSED; /* SYMBOL_REF of library name or 0 */
1694{
1695 cum->arg_words = 0;
1696}
1697
1698/* Advance the argument to the next argument position. */
1699
1700void
1701function_arg_advance (cum, mode, type)
1702 CUMULATIVE_ARGS *cum; /* current arg information */
1703 enum machine_mode mode; /* current arg mode */
1704 tree type; /* type of the argument or 0 if lib support */
1705{
1706 int words, max;
1707 int *arg_words;
1708
1709 arg_words = &cum->arg_words;
1710 max = MAX_ARGS_IN_REGISTERS;
1711
1712 words = (((mode != BLKmode)
1713 ? (int) GET_MODE_SIZE (mode)
1714 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1715
1716 if ((*arg_words + words > max) && (*arg_words < max))
1717 *arg_words = max;
1718
1719 *arg_words += words;
1720}
1721
1722
1723/* Return an RTL expression containing the register for the given mode,
1724 or 0 if the argument is to be passed on the stack. */
1725
1726rtx
1727function_arg (cum, mode, type, incoming_p)
1728 CUMULATIVE_ARGS *cum; /* current arg information */
1729 enum machine_mode mode; /* current arg mode */
1730 tree type; /* type of the argument or 0 if lib support */
1731 int incoming_p; /* computing the incoming registers? */
1732{
1733 int regbase, words, max;
1734 int *arg_words;
1735 int regno;
1736 enum machine_mode result_mode;
1737
1738 arg_words = &cum->arg_words;
1739 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1740 max = MAX_ARGS_IN_REGISTERS;
1741
1742 words = (((mode != BLKmode)
1743 ? (int) GET_MODE_SIZE (mode)
1744 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1745
1746 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1747 *arg_words += (*arg_words & 1);
1748
1749 if (*arg_words + words > max)
1750 return (rtx)0;
1751
1752 regno = regbase + *arg_words;
1753 result_mode = (mode == BLKmode ? TYPE_MODE (type) : mode);
1754
1755 /* We need to make sure that references to a7 are represented with
1756 rtx that is not equal to hard_frame_pointer_rtx. For BLKmode and
1757 modes bigger than 2 words (because we only have patterns for
1758 modes of 2 words or smaller), we can't control the expansion
1759 unless we explicitly list the individual registers in a PARALLEL. */
1760
1761 if ((mode == BLKmode || words > 2)
1762 && regno < A7_REG
1763 && regno + words > A7_REG)
1764 {
1765 rtx result;
1766 int n;
1767
1768 result = gen_rtx_PARALLEL (result_mode, rtvec_alloc (words));
1769 for (n = 0; n < words; n++)
1770 {
1771 XVECEXP (result, 0, n) =
1772 gen_rtx_EXPR_LIST (VOIDmode,
1773 gen_raw_REG (SImode, regno + n),
1774 GEN_INT (n * UNITS_PER_WORD));
1775 }
1776 return result;
1777 }
1778
1779 return gen_raw_REG (result_mode, regno);
1780}
1781
1782
1783void
1784override_options ()
1785{
1786 int regno;
1787 enum machine_mode mode;
1788
1789 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1790 error ("boolean registers required for the floating-point option");
1791
1792 /* set up the tables of ld/st opcode names for block moves */
1793 xtensa_ld_opcodes[(int) SImode] = "l32i";
1794 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1795 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1796 xtensa_st_opcodes[(int) SImode] = "s32i";
1797 xtensa_st_opcodes[(int) HImode] = "s16i";
1798 xtensa_st_opcodes[(int) QImode] = "s8i";
1799
1800 xtensa_char_to_class['q'] = SP_REG;
1801 xtensa_char_to_class['a'] = GR_REGS;
1802 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1803 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1804 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1805 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1806 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1807 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1808 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1809
1810 /* Set up array giving whether a given register can hold a given mode. */
1811 for (mode = VOIDmode;
1812 mode != MAX_MACHINE_MODE;
1813 mode = (enum machine_mode) ((int) mode + 1))
1814 {
1815 int size = GET_MODE_SIZE (mode);
1816 enum mode_class class = GET_MODE_CLASS (mode);
1817
1818 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1819 {
1820 int temp;
1821
1822 if (ACC_REG_P (regno))
1823 temp = (TARGET_MAC16 &&
1824 (class == MODE_INT) && (size <= UNITS_PER_WORD));
1825 else if (GP_REG_P (regno))
1826 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1827 else if (FP_REG_P (regno))
1828 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1829 else if (BR_REG_P (regno))
1830 temp = (TARGET_BOOLEANS && (mode == CCmode));
1831 else
1832 temp = FALSE;
1833
1834 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1835 }
1836 }
1837
1838 init_machine_status = xtensa_init_machine_status;
1839 free_machine_status = xtensa_free_machine_status;
1840
1841 /* Check PIC settings. There's no need for -fPIC on Xtensa and
1842 some targets need to always use PIC. */
1843 if (XTENSA_ALWAYS_PIC)
1844 {
1845 if (flag_pic)
1846 warning ("-f%s ignored (all code is position independent)",
1847 (flag_pic > 1 ? "PIC" : "pic"));
1848 flag_pic = 1;
1849 }
1850 if (flag_pic > 1)
1851 flag_pic = 1;
1852}
1853
1854
1855/* A C compound statement to output to stdio stream STREAM the
1856 assembler syntax for an instruction operand X. X is an RTL
1857 expression.
1858
1859 CODE is a value that can be used to specify one of several ways
1860 of printing the operand. It is used when identical operands
1861 must be printed differently depending on the context. CODE
1862 comes from the '%' specification that was used to request
1863 printing of the operand. If the specification was just '%DIGIT'
1864 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1865 is the ASCII code for LTR.
1866
1867 If X is a register, this macro should print the register's name.
1868 The names can be found in an array 'reg_names' whose type is
1869 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1870
1871 When the machine description has a specification '%PUNCT' (a '%'
1872 followed by a punctuation character), this macro is called with
1873 a null pointer for X and the punctuation character for CODE.
1874
1875 'a', 'c', 'l', and 'n' are reserved.
1876
1877 The Xtensa specific codes are:
1878
1879 'd' CONST_INT, print as signed decimal
1880 'x' CONST_INT, print as signed hexadecimal
1881 'K' CONST_INT, print number of bits in mask for EXTUI
1882 'R' CONST_INT, print (X & 0x1f)
1883 'L' CONST_INT, print ((32 - X) & 0x1f)
1884 'D' REG, print second register of double-word register operand
1885 'N' MEM, print address of next word following a memory operand
1886 'v' MEM, if memory reference is volatile, output a MEMW before it
1887*/
1888
1889static void
1890printx (file, val)
1891 FILE *file;
1892 signed int val;
1893{
1894 /* print a hexadecimal value in a nice way */
1895 if ((val > -0xa) && (val < 0xa))
1896 fprintf (file, "%d", val);
1897 else if (val < 0)
1898 fprintf (file, "-0x%x", -val);
1899 else
1900 fprintf (file, "0x%x", val);
1901}
1902
1903
1904void
1905print_operand (file, op, letter)
1906 FILE *file; /* file to write to */
1907 rtx op; /* operand to print */
1908 int letter; /* %<letter> or 0 */
1909{
1910 enum rtx_code code;
1911
1912 if (! op)
1913 error ("PRINT_OPERAND null pointer");
1914
1915 code = GET_CODE (op);
1916 switch (code)
1917 {
1918 case REG:
1919 case SUBREG:
1920 {
1921 int regnum = xt_true_regnum (op);
1922 if (letter == 'D')
1923 regnum++;
1924 fprintf (file, "%s", reg_names[regnum]);
1925 break;
1926 }
1927
1928 case MEM:
84bf8c2c
BW
1929 /* For a volatile memory reference, emit a MEMW before the
1930 load or store. */
03984308
BW
1931 if (letter == 'v')
1932 {
1933 if (MEM_VOLATILE_P (op) && TARGET_SERIALIZE_VOLATILE)
1934 fprintf (file, "memw\n\t");
1935 break;
1936 }
1937 else if (letter == 'N')
84bf8c2c
BW
1938 {
1939 enum machine_mode mode;
1940 switch (GET_MODE (op))
1941 {
1942 case DFmode: mode = SFmode; break;
1943 case DImode: mode = SImode; break;
1944 default: abort ();
1945 }
1946 op = adjust_address (op, mode, 4);
1947 }
03984308
BW
1948
1949 output_address (XEXP (op, 0));
1950 break;
1951
1952 case CONST_INT:
1953 switch (letter)
1954 {
1955 case 'K':
1956 {
1957 int num_bits = 0;
1958 unsigned val = INTVAL (op);
1959 while (val & 1)
1960 {
1961 num_bits += 1;
1962 val = val >> 1;
1963 }
1964 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1965 fatal_insn ("invalid mask", op);
1966
1967 fprintf (file, "%d", num_bits);
1968 break;
1969 }
1970
1971 case 'L':
1972 fprintf (file, "%d", (32 - INTVAL (op)) & 0x1f);
1973 break;
1974
1975 case 'R':
1976 fprintf (file, "%d", INTVAL (op) & 0x1f);
1977 break;
1978
1979 case 'x':
1980 printx (file, INTVAL (op));
1981 break;
1982
1983 case 'd':
1984 default:
1985 fprintf (file, "%d", INTVAL (op));
1986 break;
1987
1988 }
1989 break;
1990
1991 default:
1992 output_addr_const (file, op);
1993 }
1994}
1995
1996
1997/* A C compound statement to output to stdio stream STREAM the
1998 assembler syntax for an instruction operand that is a memory
1999 reference whose address is ADDR. ADDR is an RTL expression.
2000
2001 On some machines, the syntax for a symbolic address depends on
2002 the section that the address refers to. On these machines,
2003 define the macro 'ENCODE_SECTION_INFO' to store the information
2004 into the 'symbol_ref', and then check for it here. */
2005
2006void
2007print_operand_address (file, addr)
2008 FILE *file;
2009 rtx addr;
2010{
2011 if (!addr)
2012 error ("PRINT_OPERAND_ADDRESS, null pointer");
2013
2014 switch (GET_CODE (addr))
2015 {
2016 default:
2017 fatal_insn ("invalid address", addr);
2018 break;
2019
2020 case REG:
2021 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2022 break;
2023
2024 case PLUS:
2025 {
2026 rtx reg = (rtx)0;
2027 rtx offset = (rtx)0;
2028 rtx arg0 = XEXP (addr, 0);
2029 rtx arg1 = XEXP (addr, 1);
2030
2031 if (GET_CODE (arg0) == REG)
2032 {
2033 reg = arg0;
2034 offset = arg1;
2035 }
2036 else if (GET_CODE (arg1) == REG)
2037 {
2038 reg = arg1;
2039 offset = arg0;
2040 }
2041 else
2042 fatal_insn ("no register in address", addr);
2043
2044 if (CONSTANT_P (offset))
2045 {
2046 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2047 output_addr_const (file, offset);
2048 }
2049 else
2050 fatal_insn ("address offset not a constant", addr);
2051 }
2052 break;
2053
2054 case LABEL_REF:
2055 case SYMBOL_REF:
2056 case CONST_INT:
2057 case CONST:
2058 output_addr_const (file, addr);
2059 break;
2060 }
2061}
2062
2063
2064/* Emit either a label, .comm, or .lcomm directive. */
2065
2066void
2067xtensa_declare_object (file, name, init_string, final_string, size)
2068 FILE *file;
2069 char *name;
2070 char *init_string;
2071 char *final_string;
2072 int size;
2073{
2074 fputs (init_string, file); /* "", "\t.comm\t", or "\t.lcomm\t" */
2075 assemble_name (file, name);
2076 fprintf (file, final_string, size); /* ":\n", ",%u\n", ",%u\n" */
2077}
2078
2079
2080void
2081xtensa_output_literal (file, x, mode, labelno)
2082 FILE *file;
2083 rtx x;
2084 enum machine_mode mode;
2085 int labelno;
2086{
2087 long value_long[2];
b216cd4a 2088 REAL_VALUE_TYPE r;
03984308
BW
2089 int size;
2090
2091 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2092
2093 switch (GET_MODE_CLASS (mode))
2094 {
2095 case MODE_FLOAT:
2096 if (GET_CODE (x) != CONST_DOUBLE)
2097 abort ();
2098
b216cd4a 2099 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
03984308
BW
2100 switch (mode)
2101 {
2102 case SFmode:
b216cd4a
ZW
2103 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2104 fprintf (file, "0x%08lx\n", value_long[0]);
03984308
BW
2105 break;
2106
2107 case DFmode:
b216cd4a
ZW
2108 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2109 fprintf (file, "0x%08lx, 0x%08lx\n",
2110 value_long[0], value_long[1]);
03984308
BW
2111 break;
2112
2113 default:
2114 abort ();
2115 }
2116
2117 break;
2118
2119 case MODE_INT:
2120 case MODE_PARTIAL_INT:
2121 size = GET_MODE_SIZE (mode);
2122 if (size == 4)
2123 {
2124 output_addr_const (file, x);
2125 fputs ("\n", file);
2126 }
2127 else if (size == 8)
2128 {
2129 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2130 fputs (", ", file);
2131 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2132 fputs ("\n", file);
2133 }
2134 else
2135 abort ();
2136 break;
2137
2138 default:
2139 abort ();
2140 }
2141}
2142
2143
2144/* Return the bytes needed to compute the frame pointer from the current
2145 stack pointer. */
2146
2147#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2148#define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2149
2150long
2151compute_frame_size (size)
2152 int size; /* # of var. bytes allocated */
2153{
2154 /* add space for the incoming static chain value */
2155 if (current_function_needs_context)
2156 size += (1 * UNITS_PER_WORD);
2157
2158 xtensa_current_frame_size =
2159 XTENSA_STACK_ALIGN (size
2160 + current_function_outgoing_args_size
2161 + (WINDOW_SIZE * UNITS_PER_WORD));
2162 return xtensa_current_frame_size;
2163}
2164
2165
2166int
2167xtensa_frame_pointer_required ()
2168{
2169 /* The code to expand builtin_frame_addr and builtin_return_addr
2170 currently uses the hard_frame_pointer instead of frame_pointer.
2171 This seems wrong but maybe it's necessary for other architectures.
2172 This function is derived from the i386 code. */
2173
2174 if (cfun->machine->accesses_prev_frame)
2175 return 1;
2176
2177 return 0;
2178}
2179
2180
2181void
2182xtensa_reorg (first)
2183 rtx first;
2184{
2185 rtx insn, set_frame_ptr_insn = 0;
2186
2187 unsigned long tsize = compute_frame_size (get_frame_size ());
2188 if (tsize < (1 << (12+3)))
2189 frame_size_const = 0;
2190 else
2191 {
2192 frame_size_const = force_const_mem (SImode, GEN_INT (tsize - 16));;
2193
2194 /* make sure the constant is used so it doesn't get eliminated
2195 from the constant pool */
2196 emit_insn_before (gen_rtx_USE (SImode, frame_size_const), first);
2197 }
2198
2199 if (!frame_pointer_needed)
2200 return;
2201
2202 /* Search all instructions, looking for the insn that sets up the
2203 frame pointer. This search will fail if the function does not
2204 have an incoming argument in $a7, but in that case, we can just
2205 set up the frame pointer at the very beginning of the
2206 function. */
2207
2208 for (insn = first; insn; insn = NEXT_INSN (insn))
2209 {
2210 rtx pat;
2211
2212 if (!INSN_P (insn))
2213 continue;
2214
2215 pat = PATTERN (insn);
2216 if (GET_CODE (pat) == UNSPEC_VOLATILE
2217 && (XINT (pat, 1) == UNSPECV_SET_FP))
2218 {
2219 set_frame_ptr_insn = insn;
2220 break;
2221 }
2222 }
2223
2224 if (set_frame_ptr_insn)
2225 {
2226 /* for all instructions prior to set_frame_ptr_insn, replace
2227 hard_frame_pointer references with stack_pointer */
2228 for (insn = first; insn != set_frame_ptr_insn; insn = NEXT_INSN (insn))
2229 {
2230 if (INSN_P (insn))
2231 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2232 hard_frame_pointer_rtx,
2233 stack_pointer_rtx);
2234 }
2235 }
2236 else
2237 {
2238 /* emit the frame pointer move immediately after the NOTE that starts
2239 the function */
2240 emit_insn_after (gen_movsi (hard_frame_pointer_rtx,
2241 stack_pointer_rtx), first);
2242 }
2243}
2244
2245
2246/* Set up the stack and frame (if desired) for the function. */
2247
2248void
2249xtensa_function_prologue (file, size)
2250 FILE *file;
2251 int size ATTRIBUTE_UNUSED;
2252{
2253 unsigned long tsize = compute_frame_size (get_frame_size ());
2254
2255 if (frame_pointer_needed)
2256 fprintf (file, "\t.frame\ta7, %ld\n", tsize);
2257 else
2258 fprintf (file, "\t.frame\tsp, %ld\n", tsize);
2259
2260
2261 if (tsize < (1 << (12+3)))
2262 {
2263 fprintf (file, "\tentry\tsp, %ld\n", tsize);
2264 }
2265 else
2266 {
2267 fprintf (file, "\tentry\tsp, 16\n");
2268
2269 /* use a8 as a temporary since a0-a7 may be live */
2270 fprintf (file, "\tl32r\ta8, ");
2271 print_operand (file, frame_size_const, 0);
2272 fprintf (file, "\n\tsub\ta8, sp, a8\n");
2273 fprintf (file, "\tmovsp\tsp, a8\n");
2274 }
2275}
2276
2277
2278/* Do any necessary cleanup after a function to restore
2279 stack, frame, and regs. */
2280
2281void
2282xtensa_function_epilogue (file, size)
2283 FILE *file;
2284 int size ATTRIBUTE_UNUSED;
2285{
2286 rtx insn = get_last_insn ();
2287 /* If the last insn was a BARRIER, we don't have to write anything. */
2288 if (GET_CODE (insn) == NOTE)
2289 insn = prev_nonnote_insn (insn);
2290 if (insn == 0 || GET_CODE (insn) != BARRIER)
2291 fprintf (file, TARGET_DENSITY ? "\tretw.n\n" : "\tretw\n");
2292
2293 xtensa_current_frame_size = 0;
2294}
2295
2296
2297/* Create the va_list data type.
2298 This structure is set up by __builtin_saveregs. The __va_reg
2299 field points to a stack-allocated region holding the contents of the
2300 incoming argument registers. The __va_ndx field is an index initialized
2301 to the position of the first unnamed (variable) argument. This same index
2302 is also used to address the arguments passed in memory. Thus, the
2303 __va_stk field is initialized to point to the position of the first
2304 argument in memory offset to account for the arguments passed in
2305 registers. E.G., if there are 6 argument registers, and each register is
2306 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2307 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2308 argument word N for N >= 6. */
2309
2310tree
2311xtensa_build_va_list (void)
2312{
2313 tree f_stk, f_reg, f_ndx, record;
2314
2315 record = make_node (RECORD_TYPE);
2316
2317 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2318 ptr_type_node);
2319 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2320 ptr_type_node);
2321 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2322 integer_type_node);
2323
2324 DECL_FIELD_CONTEXT (f_stk) = record;
2325 DECL_FIELD_CONTEXT (f_reg) = record;
2326 DECL_FIELD_CONTEXT (f_ndx) = record;
2327
2328 TYPE_FIELDS (record) = f_stk;
2329 TREE_CHAIN (f_stk) = f_reg;
2330 TREE_CHAIN (f_reg) = f_ndx;
2331
2332 layout_type (record);
2333 return record;
2334}
2335
2336
2337/* Save the incoming argument registers on the stack. Returns the
2338 address of the saved registers. */
2339
2340rtx
2341xtensa_builtin_saveregs ()
2342{
2343 rtx gp_regs, dest;
2344 int arg_words = current_function_arg_words;
2345 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2346 int i;
2347
2348 if (gp_left == 0)
2349 return const0_rtx;
2350
2351 /* allocate the general-purpose register space */
2352 gp_regs = assign_stack_local
2353 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2354 MEM_IN_STRUCT_P (gp_regs) = 1;
2355 RTX_UNCHANGING_P (gp_regs) = 1;
2356 RTX_UNCHANGING_P (XEXP (gp_regs, 0)) = 1;
2357
2358 /* Now store the incoming registers. */
2359 dest = change_address (gp_regs, SImode,
2360 plus_constant (XEXP (gp_regs, 0),
2361 arg_words * UNITS_PER_WORD));
2362
2363 /* Note: Don't use move_block_from_reg() here because the incoming
2364 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2365 Instead, call gen_raw_REG() directly so that we get a distinct
2366 instance of (REG:SI 7). */
2367 for (i = 0; i < gp_left; i++)
2368 {
2369 emit_move_insn (operand_subword (dest, i, 1, BLKmode),
2370 gen_raw_REG (SImode, GP_ARG_FIRST + arg_words + i));
2371 }
2372
2373 return XEXP (gp_regs, 0);
2374}
2375
2376
2377/* Implement `va_start' for varargs and stdarg. We look at the
2378 current function to fill in an initial va_list. */
2379
2380void
2381xtensa_va_start (stdarg_p, valist, nextarg)
2382 int stdarg_p ATTRIBUTE_UNUSED;
2383 tree valist;
2384 rtx nextarg ATTRIBUTE_UNUSED;
2385{
2386 tree f_stk, stk;
2387 tree f_reg, reg;
2388 tree f_ndx, ndx;
2389 tree t, u;
2390 int arg_words;
2391
2392 arg_words = current_function_args_info.arg_words;
2393
2394 f_stk = TYPE_FIELDS (va_list_type_node);
2395 f_reg = TREE_CHAIN (f_stk);
2396 f_ndx = TREE_CHAIN (f_reg);
2397
2398 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2399 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2400 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2401
2402 /* Call __builtin_saveregs; save the result in __va_reg */
2403 current_function_arg_words = arg_words;
2404 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2405 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2406 TREE_SIDE_EFFECTS (t) = 1;
2407 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2408
2409 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2410 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2411 u = fold (build (PLUS_EXPR, ptr_type_node, u,
2412 build_int_2 (-MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1)));
2413 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2414 TREE_SIDE_EFFECTS (t) = 1;
2415 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2416
2417 /* Set the __va_ndx member. */
2418 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2419 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2420 TREE_SIDE_EFFECTS (t) = 1;
2421 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2422}
2423
2424
2425/* Implement `va_arg'. */
2426
2427rtx
2428xtensa_va_arg (valist, type)
2429 tree valist, type;
2430{
2431 tree f_stk, stk;
2432 tree f_reg, reg;
2433 tree f_ndx, ndx;
2434 tree tmp, addr_tree;
2435 rtx array, orig_ndx, r, addr;
2436 HOST_WIDE_INT size, va_size;
2437 rtx lab_false, lab_over, lab_false2;
2438
2439 size = int_size_in_bytes (type);
2440 va_size = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
2441
2442 f_stk = TYPE_FIELDS (va_list_type_node);
2443 f_reg = TREE_CHAIN (f_stk);
2444 f_ndx = TREE_CHAIN (f_reg);
2445
2446 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2447 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2448 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2449
2450
2451 /* First align __va_ndx to a double word boundary if necessary for this arg:
2452
2453 if (__alignof__ (TYPE) > 4)
2454 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8)
2455 */
2456
2457 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2458 {
2459 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2460 build_int_2 ((2 * UNITS_PER_WORD) - 1, 0));
2461 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2462 build_int_2 (-2 * UNITS_PER_WORD, -1));
2463 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2464 TREE_SIDE_EFFECTS (tmp) = 1;
2465 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2466 }
2467
2468
2469 /* Increment __va_ndx to point past the argument:
2470
2471 orig_ndx = (AP).__va_ndx;
2472 (AP).__va_ndx += __va_size (TYPE);
2473 */
2474
2475 orig_ndx = gen_reg_rtx (SImode);
2476 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2477 if (r != orig_ndx)
2478 emit_move_insn (orig_ndx, r);
2479
2480 tmp = build (PLUS_EXPR, integer_type_node, ndx, build_int_2 (va_size, 0));
2481 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2482 TREE_SIDE_EFFECTS (tmp) = 1;
2483 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2484
2485
2486 /* Check if the argument is in registers:
2487
2488 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4)
2489 __array = (AP).__va_reg;
2490 */
2491
2492 lab_false = gen_label_rtx ();
2493 lab_over = gen_label_rtx ();
2494 array = gen_reg_rtx (Pmode);
2495
2496 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode, EXPAND_NORMAL),
2497 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2498 GT, const1_rtx, SImode, 0, lab_false);
2499
2500 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2501 if (r != array)
2502 emit_move_insn (array, r);
2503
2504 emit_jump_insn (gen_jump (lab_over));
2505 emit_barrier ();
2506 emit_label (lab_false);
2507
2508
2509 /* ...otherwise, the argument is on the stack (never split between
2510 registers and the stack -- change __va_ndx if necessary):
2511
2512 else
2513 {
2514 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2515 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2516 __array = (AP).__va_stk;
2517 }
2518 */
2519
2520 lab_false2 = gen_label_rtx ();
2521 emit_cmp_and_jump_insns (orig_ndx,
2522 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2523 GE, const1_rtx, SImode, 0, lab_false2);
2524
2525 tmp = build_int_2 ((MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD) + va_size, 0);
2526 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2527 TREE_SIDE_EFFECTS (tmp) = 1;
2528 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2529
2530 emit_label (lab_false2);
2531
2532 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2533 if (r != array)
2534 emit_move_insn (array, r);
2535
2536
2537 /* Given the base array pointer (__array) and index to the subsequent
2538 argument (__va_ndx), find the address:
2539
2540 Big-endian:
2541 __array + (AP).__va_ndx - sizeof (TYPE)
2542
2543 Little-endian:
2544 __array + (AP).__va_ndx - __va_size (TYPE)
2545
2546 The results are endian-dependent because values smaller than one word
2547 are aligned differently.
2548 */
2549
2550 emit_label (lab_over);
2551
2552 addr_tree = build (PLUS_EXPR, ptr_type_node,
2553 make_tree (ptr_type_node, array),
2554 ndx);
2555 addr_tree = build (PLUS_EXPR, ptr_type_node,
2556 addr_tree,
2557 build_int_2 (BYTES_BIG_ENDIAN
2558 && size < (PARM_BOUNDARY / BITS_PER_UNIT)
2559 ? -size
2560 : -va_size, -1));
2561 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2562 addr = copy_to_reg (addr);
2563 return addr;
2564}
2565
2566
a8cacfd2
BW
2567enum reg_class
2568xtensa_preferred_reload_class (x, class)
2569 rtx x;
2570 enum reg_class class;
2571{
2572 if (CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2573 return NO_REGS;
2574
2575 /* Don't use sp for reloads! */
2576 if (class == AR_REGS)
2577 return GR_REGS;
2578
2579 return class;
2580}
2581
2582
03984308
BW
2583enum reg_class
2584xtensa_secondary_reload_class (class, mode, x, isoutput)
2585 enum reg_class class;
2586 enum machine_mode mode ATTRIBUTE_UNUSED;
2587 rtx x;
2588 int isoutput;
2589{
2590 int regno;
2591
2592 if (GET_CODE (x) == SIGN_EXTEND)
2593 x = XEXP (x, 0);
2594 regno = xt_true_regnum (x);
2595
2596 if (!isoutput)
2597 {
2598 if (class == FP_REGS && constantpool_mem_p (x))
2599 return GR_REGS;
2600 }
2601
2602 if (ACC_REG_P (regno))
2603 return (class == GR_REGS ? NO_REGS : GR_REGS);
2604 if (class == ACC_REG)
2605 return (GP_REG_P (regno) ? NO_REGS : GR_REGS);
2606
2607 return NO_REGS;
2608}
2609
2610
2611void
2612order_regs_for_local_alloc ()
2613{
2614 if (!leaf_function_p ())
2615 {
2616 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2617 FIRST_PSEUDO_REGISTER * sizeof (int));
2618 }
2619 else
2620 {
2621 int i, num_arg_regs;
2622 int nxt = 0;
2623
2624 /* use the AR registers in increasing order (skipping a0 and a1)
2625 but save the incoming argument registers for a last resort */
2626 num_arg_regs = current_function_args_info.arg_words;
2627 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2628 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2629 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2630 reg_alloc_order[nxt++] = i + num_arg_regs;
2631 for (i = 0; i < num_arg_regs; i++)
2632 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2633
2634 /* list the FP registers in order for now */
2635 for (i = 0; i < 16; i++)
2636 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2637
2638 /* GCC requires that we list *all* the registers.... */
2639 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2640 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2641 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2642 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2643
2644 /* list the coprocessor registers in order */
2645 for (i = 0; i < BR_REG_NUM; i++)
2646 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2647
2648 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2649 }
2650}
2651
2652
2653/* A customized version of reg_overlap_mentioned_p that only looks for
2654 references to a7 (as opposed to hard_frame_pointer_rtx). */
2655
2656int
2657a7_overlap_mentioned_p (x)
2658 rtx x;
2659{
2660 int i, j;
2661 unsigned int x_regno;
2662 const char *fmt;
2663
2664 if (GET_CODE (x) == REG)
2665 {
2666 x_regno = REGNO (x);
2667 return (x != hard_frame_pointer_rtx
2668 && x_regno < A7_REG + 1
2669 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2670 }
2671
2672 if (GET_CODE (x) == SUBREG
2673 && GET_CODE (SUBREG_REG (x)) == REG
2674 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2675 {
2676 x_regno = subreg_regno (x);
2677 return (SUBREG_REG (x) != hard_frame_pointer_rtx
2678 && x_regno < A7_REG + 1
2679 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2680 }
2681
2682 /* X does not match, so try its subexpressions. */
2683 fmt = GET_RTX_FORMAT (GET_CODE (x));
2684 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2685 {
2686 if (fmt[i] == 'e')
2687 {
2688 if (a7_overlap_mentioned_p (XEXP (x, i)))
2689 return 1;
2690 }
2691 else if (fmt[i] == 'E')
2692 {
2693 for (j = XVECLEN (x, i) - 1; j >=0; j--)
2694 if (a7_overlap_mentioned_p (XVECEXP (x, i, j)))
2695 return 1;
2696 }
2697 }
2698
2699 return 0;
2700}
This page took 0.594278 seconds and 5 git commands to generate.