]> gcc.gnu.org Git - gcc.git/blame - gcc/config/xtensa/xtensa.c
cp-tree.h (complete_type_or_diagnostic): Changed prototype, renamed from...
[gcc.git] / gcc / config / xtensa / xtensa.c
CommitLineData
03984308
BW
1/* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright (C) 2001 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
21
22#include "config.h"
23#include "system.h"
24#include "rtl.h"
25#include "regs.h"
26#include "machmode.h"
27#include "hard-reg-set.h"
28#include "basic-block.h"
29#include "real.h"
30#include "insn-config.h"
31#include "conditions.h"
32#include "insn-flags.h"
33#include "insn-attr.h"
34#include "insn-codes.h"
35#include "recog.h"
36#include "output.h"
37#include "tree.h"
38#include "expr.h"
39#include "flags.h"
40#include "reload.h"
41#include "tm_p.h"
42#include "function.h"
43#include "toplev.h"
44#include "optabs.h"
b64a1b53 45#include "output.h"
03984308
BW
46#include "libfuncs.h"
47#include "target.h"
48#include "target-def.h"
49
50/* Enumeration for all of the relational tests, so that we can build
51 arrays indexed by the test type, and not worry about the order
52 of EQ, NE, etc. */
53
54enum internal_test {
55 ITEST_EQ,
56 ITEST_NE,
57 ITEST_GT,
58 ITEST_GE,
59 ITEST_LT,
60 ITEST_LE,
61 ITEST_GTU,
62 ITEST_GEU,
63 ITEST_LTU,
64 ITEST_LEU,
65 ITEST_MAX
66 };
67
68/* Cached operands, and operator to compare for use in set/branch on
69 condition codes. */
70rtx branch_cmp[2];
71
72/* what type of branch to use */
73enum cmp_type branch_type;
74
75/* Array giving truth value on whether or not a given hard register
76 can support a given mode. */
77char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
78
79/* Current frame size calculated by compute_frame_size. */
80unsigned xtensa_current_frame_size;
81
82/* Tables of ld/st opcode names for block moves */
83const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
84const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
85#define LARGEST_MOVE_RATIO 15
86
87/* Define the structure for the machine field in struct function. */
88struct machine_function
89{
90 int accesses_prev_frame;
91};
92
93/* Vector, indexed by hard register number, which contains 1 for a
94 register that is allowable in a candidate for leaf function
95 treatment. */
96
97const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
98{
99 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
100 1, 1, 1,
101 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
102 1
103};
104
105/* Map hard register number to register class */
106const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
107{
108 GR_REGS, SP_REG, GR_REGS, GR_REGS,
109 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
110 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
111 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
112 AR_REGS, AR_REGS, BR_REGS,
113 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
114 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
115 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
116 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
117 ACC_REG,
118};
119
120/* Map register constraint character to register class. */
121enum reg_class xtensa_char_to_class[256] =
122{
123 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
124 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
125 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
126 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
127 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
128 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
187};
188
b64a1b53
RH
189static int b4const_or_zero PARAMS ((int));
190static enum internal_test map_test_to_internal_test PARAMS ((enum rtx_code));
191static rtx gen_int_relational PARAMS ((enum rtx_code, rtx, rtx, int *));
192static rtx gen_float_relational PARAMS ((enum rtx_code, rtx, rtx));
193static rtx gen_conditional_move PARAMS ((rtx));
194static rtx fixup_subreg_mem PARAMS ((rtx x));
195static enum machine_mode xtensa_find_mode_for_size PARAMS ((unsigned));
196static void xtensa_init_machine_status PARAMS ((struct function *p));
197static void xtensa_free_machine_status PARAMS ((struct function *p));
198static void printx PARAMS ((FILE *, signed int));
199static void xtensa_select_rtx_section PARAMS ((enum machine_mode, rtx,
200 unsigned HOST_WIDE_INT));
201
202static rtx frame_size_const;
203static int current_function_arg_words;
204static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
205 REG_ALLOC_ORDER;
206\f
03984308
BW
207/* This macro generates the assembly code for function entry.
208 FILE is a stdio stream to output the code to.
209 SIZE is an int: how many units of temporary storage to allocate.
210 Refer to the array 'regs_ever_live' to determine which registers
211 to save; 'regs_ever_live[I]' is nonzero if register number I
212 is ever used in the function. This macro is responsible for
213 knowing which registers should not be saved even if used. */
214
215#undef TARGET_ASM_FUNCTION_PROLOGUE
216#define TARGET_ASM_FUNCTION_PROLOGUE xtensa_function_prologue
217
218/* This macro generates the assembly code for function exit,
219 on machines that need it. If FUNCTION_EPILOGUE is not defined
220 then individual return instructions are generated for each
221 return statement. Args are same as for FUNCTION_PROLOGUE. */
222
223#undef TARGET_ASM_FUNCTION_EPILOGUE
224#define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
225
226/* These hooks specify assembly directives for creating certain kinds
227 of integer object. */
228
229#undef TARGET_ASM_ALIGNED_SI_OP
230#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
231
b64a1b53
RH
232#undef TARGET_ASM_SELECT_RTX_SECTION
233#define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
03984308 234
b64a1b53
RH
235struct gcc_target targetm = TARGET_INITIALIZER;
236\f
03984308
BW
237
238/*
239 * Functions to test Xtensa immediate operand validity.
240 */
241
242int
243xtensa_b4constu (v)
244 int v;
245{
246 switch (v)
247 {
248 case 32768:
249 case 65536:
250 case 2:
251 case 3:
252 case 4:
253 case 5:
254 case 6:
255 case 7:
256 case 8:
257 case 10:
258 case 12:
259 case 16:
260 case 32:
261 case 64:
262 case 128:
263 case 256:
264 return 1;
265 }
266 return 0;
267}
268
269int
270xtensa_simm8x256 (v)
271 int v;
272{
273 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
274}
275
276int
277xtensa_ai4const (v)
278 int v;
279{
280 return (v == -1 || (v >= 1 && v <= 15));
281}
282
283int
284xtensa_simm7 (v)
285 int v;
286{
287 return v >= -32 && v <= 95;
288}
289
290int
291xtensa_b4const (v)
292 int v;
293{
294 switch (v)
295 {
296 case -1:
297 case 1:
298 case 2:
299 case 3:
300 case 4:
301 case 5:
302 case 6:
303 case 7:
304 case 8:
305 case 10:
306 case 12:
307 case 16:
308 case 32:
309 case 64:
310 case 128:
311 case 256:
312 return 1;
313 }
314 return 0;
315}
316
317int
318xtensa_simm8 (v)
319 int v;
320{
321 return v >= -128 && v <= 127;
322}
323
324int
325xtensa_tp7 (v)
326 int v;
327{
328 return (v >= 7 && v <= 22);
329}
330
331int
332xtensa_lsi4x4 (v)
333 int v;
334{
335 return (v & 3) == 0 && (v >= 0 && v <= 60);
336}
337
338int
339xtensa_simm12b (v)
340 int v;
341{
342 return v >= -2048 && v <= 2047;
343}
344
345int
346xtensa_uimm8 (v)
347 int v;
348{
349 return v >= 0 && v <= 255;
350}
351
352int
353xtensa_uimm8x2 (v)
354 int v;
355{
356 return (v & 1) == 0 && (v >= 0 && v <= 510);
357}
358
359int
360xtensa_uimm8x4 (v)
361 int v;
362{
363 return (v & 3) == 0 && (v >= 0 && v <= 1020);
364}
365
366
367/* This is just like the standard true_regnum() function except that it
368 works even when reg_renumber is not initialized. */
369
370int
371xt_true_regnum (x)
372 rtx x;
373{
374 if (GET_CODE (x) == REG)
375 {
376 if (reg_renumber
377 && REGNO (x) >= FIRST_PSEUDO_REGISTER
378 && reg_renumber[REGNO (x)] >= 0)
379 return reg_renumber[REGNO (x)];
380 return REGNO (x);
381 }
382 if (GET_CODE (x) == SUBREG)
383 {
384 int base = xt_true_regnum (SUBREG_REG (x));
385 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
386 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
387 GET_MODE (SUBREG_REG (x)),
388 SUBREG_BYTE (x), GET_MODE (x));
389 }
390 return -1;
391}
392
393
394int
395add_operand (op, mode)
396 rtx op;
397 enum machine_mode mode;
398{
399 if (GET_CODE (op) == CONST_INT)
400 return (xtensa_simm8 (INTVAL (op)) ||
401 xtensa_simm8x256 (INTVAL (op)));
402
403 return register_operand (op, mode);
404}
405
406
407int
408arith_operand (op, mode)
409 rtx op;
410 enum machine_mode mode;
411{
412 if (GET_CODE (op) == CONST_INT)
413 return xtensa_simm8 (INTVAL (op));
414
415 return register_operand (op, mode);
416}
417
418
419int
420nonimmed_operand (op, mode)
421 rtx op;
422 enum machine_mode mode;
423{
424 /* We cannot use the standard nonimmediate_operand() predicate because
425 it includes constant pool memory operands. */
426
427 if (memory_operand (op, mode))
428 return !constantpool_address_p (XEXP (op, 0));
429
430 return register_operand (op, mode);
431}
432
433
434int
435mem_operand (op, mode)
436 rtx op;
437 enum machine_mode mode;
438{
439 /* We cannot use the standard memory_operand() predicate because
440 it includes constant pool memory operands. */
441
442 if (memory_operand (op, mode))
443 return !constantpool_address_p (XEXP (op, 0));
444
445 return FALSE;
446}
447
448
449int
a8cacfd2 450xtensa_valid_move (mode, operands)
03984308 451 enum machine_mode mode;
a8cacfd2 452 rtx *operands;
03984308 453{
a8cacfd2
BW
454 /* Either the destination or source must be a register, and the
455 MAC16 accumulator doesn't count. */
456
457 if (register_operand (operands[0], mode))
458 {
459 int dst_regnum = xt_true_regnum (operands[0]);
460
461 /* The stack pointer can only be assigned with a MOVSP opcode. */
462 if (dst_regnum == STACK_POINTER_REGNUM)
463 return (mode == SImode
464 && register_operand (operands[1], mode)
465 && !ACC_REG_P (xt_true_regnum (operands[1])));
466
467 if (!ACC_REG_P (dst_regnum))
468 return true;
469 }
3437320b 470 if (register_operand (operands[1], mode))
a8cacfd2
BW
471 {
472 int src_regnum = xt_true_regnum (operands[1]);
473 if (!ACC_REG_P (src_regnum))
474 return true;
475 }
03984308
BW
476 return FALSE;
477}
478
479
480int
481mask_operand (op, mode)
482 rtx op;
483 enum machine_mode mode;
484{
485 if (GET_CODE (op) == CONST_INT)
486 return xtensa_mask_immediate (INTVAL (op));
487
488 return register_operand (op, mode);
489}
490
491
492int
493extui_fldsz_operand (op, mode)
494 rtx op;
495 enum machine_mode mode ATTRIBUTE_UNUSED;
496{
497 return ((GET_CODE (op) == CONST_INT)
498 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
499}
500
501
502int
503sext_operand (op, mode)
504 rtx op;
505 enum machine_mode mode;
506{
507 if (TARGET_SEXT)
508 return nonimmed_operand (op, mode);
509 return mem_operand (op, mode);
510}
511
512
513int
514sext_fldsz_operand (op, mode)
515 rtx op;
516 enum machine_mode mode ATTRIBUTE_UNUSED;
517{
518 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
519}
520
521
522int
523lsbitnum_operand (op, mode)
524 rtx op;
525 enum machine_mode mode ATTRIBUTE_UNUSED;
526{
527 if (GET_CODE (op) == CONST_INT)
528 {
529 return (BITS_BIG_ENDIAN
530 ? (INTVAL (op) == BITS_PER_WORD-1)
531 : (INTVAL (op) == 0));
532 }
533 return FALSE;
534}
535
536
537static int
538b4const_or_zero (v)
539 int v;
540{
541 if (v == 0)
542 return TRUE;
543 return xtensa_b4const (v);
544}
545
546
547int
548branch_operand (op, mode)
549 rtx op;
550 enum machine_mode mode;
551{
552 if (GET_CODE (op) == CONST_INT)
553 return b4const_or_zero (INTVAL (op));
554
555 return register_operand (op, mode);
556}
557
558
559int
560ubranch_operand (op, mode)
561 rtx op;
562 enum machine_mode mode;
563{
564 if (GET_CODE (op) == CONST_INT)
565 return xtensa_b4constu (INTVAL (op));
566
567 return register_operand (op, mode);
568}
569
570
571int
572call_insn_operand (op, mode)
573 rtx op;
574 enum machine_mode mode ATTRIBUTE_UNUSED;
575{
576 if ((GET_CODE (op) == REG)
577 && (op != arg_pointer_rtx)
578 && ((REGNO (op) < FRAME_POINTER_REGNUM)
579 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
580 return TRUE;
581
582 if (CONSTANT_ADDRESS_P (op))
583 {
584 /* Direct calls only allowed to static functions with PIC. */
585 return (!flag_pic || (GET_CODE (op) == SYMBOL_REF
586 && SYMBOL_REF_FLAG (op)));
587 }
588
589 return FALSE;
590}
591
592
593int
594move_operand (op, mode)
595 rtx op;
596 enum machine_mode mode;
597{
598 if (register_operand (op, mode))
599 return TRUE;
600
601 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
602 result in 0/1. */
603 if (GET_CODE (op) == CONSTANT_P_RTX)
604 return TRUE;
605
606 if (GET_CODE (op) == CONST_INT)
607 return xtensa_simm12b (INTVAL (op));
608
609 if (GET_CODE (op) == MEM)
610 return memory_address_p (mode, XEXP (op, 0));
611
612 return FALSE;
613}
614
615
616int
617smalloffset_mem_p (op)
618 rtx op;
619{
620 if (GET_CODE (op) == MEM)
621 {
622 rtx addr = XEXP (op, 0);
623 if (GET_CODE (addr) == REG)
624 return REG_OK_FOR_BASE_P (addr);
625 if (GET_CODE (addr) == PLUS)
626 {
627 rtx offset = XEXP (addr, 0);
628 if (GET_CODE (offset) != CONST_INT)
629 offset = XEXP (addr, 1);
630 if (GET_CODE (offset) != CONST_INT)
631 return FALSE;
632 return xtensa_lsi4x4 (INTVAL (offset));
633 }
634 }
635 return FALSE;
636}
637
638
639int
640smalloffset_double_mem_p (op)
641 rtx op;
642{
643 if (!smalloffset_mem_p (op))
644 return FALSE;
645 return smalloffset_mem_p (adjust_address (op, GET_MODE (op), 4));
646}
647
648
649int
650constantpool_address_p (addr)
651 rtx addr;
652{
653 rtx sym = addr;
654
655 if (GET_CODE (addr) == CONST)
656 {
657 rtx offset;
658
659 /* only handle (PLUS (SYM, OFFSET)) form */
660 addr = XEXP (addr, 0);
661 if (GET_CODE (addr) != PLUS)
662 return FALSE;
663
664 /* make sure the address is word aligned */
665 offset = XEXP (addr, 1);
666 if ((GET_CODE (offset) != CONST_INT)
667 || ((INTVAL (offset) & 3) != 0))
668 return FALSE;
669
670 sym = XEXP (addr, 0);
671 }
672
673 if ((GET_CODE (sym) == SYMBOL_REF)
674 && CONSTANT_POOL_ADDRESS_P (sym))
675 return TRUE;
676 return FALSE;
677}
678
679
680int
681constantpool_mem_p (op)
682 rtx op;
683{
684 if (GET_CODE (op) == MEM)
685 return constantpool_address_p (XEXP (op, 0));
686 return FALSE;
687}
688
689
690int
691non_const_move_operand (op, mode)
692 rtx op;
693 enum machine_mode mode;
694{
695 if (register_operand (op, mode))
696 return 1;
697 if (GET_CODE (op) == SUBREG)
698 op = SUBREG_REG (op);
699 if (GET_CODE (op) == MEM)
700 return memory_address_p (mode, XEXP (op, 0));
701 return FALSE;
702}
703
704
705/* Accept the floating point constant 1 in the appropriate mode. */
706
707int
708const_float_1_operand (op, mode)
709 rtx op;
710 enum machine_mode mode;
711{
712 REAL_VALUE_TYPE d;
713 static REAL_VALUE_TYPE onedf;
714 static REAL_VALUE_TYPE onesf;
715 static int one_initialized;
716
717 if ((GET_CODE (op) != CONST_DOUBLE)
718 || (mode != GET_MODE (op))
719 || (mode != DFmode && mode != SFmode))
720 return FALSE;
721
722 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
723
724 if (! one_initialized)
725 {
726 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
727 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
728 one_initialized = TRUE;
729 }
730
731 if (mode == DFmode)
732 return REAL_VALUES_EQUAL (d, onedf);
733 else
734 return REAL_VALUES_EQUAL (d, onesf);
735}
736
737
738int
739fpmem_offset_operand (op, mode)
740 rtx op;
741 enum machine_mode mode ATTRIBUTE_UNUSED;
742{
743 if (GET_CODE (op) == CONST_INT)
744 return xtensa_mem_offset (INTVAL (op), SFmode);
745 return 0;
746}
747
748
749void
750xtensa_extend_reg (dst, src)
751 rtx dst;
752 rtx src;
753{
754 rtx temp = gen_reg_rtx (SImode);
755 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
756
757 /* generate paradoxical subregs as needed so that the modes match */
758 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
759 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
760
761 emit_insn (gen_ashlsi3 (temp, src, shift));
762 emit_insn (gen_ashrsi3 (dst, temp, shift));
763}
764
765
766void
767xtensa_load_constant (dst, src)
768 rtx dst;
769 rtx src;
770{
771 enum machine_mode mode = GET_MODE (dst);
772 src = force_const_mem (SImode, src);
773
774 /* PC-relative loads are always SImode so we have to add a SUBREG if that
775 is not the desired mode */
776
777 if (mode != SImode)
778 {
779 if (register_operand (dst, mode))
780 dst = simplify_gen_subreg (SImode, dst, mode, 0);
781 else
782 {
783 src = force_reg (SImode, src);
784 src = gen_lowpart_SUBREG (mode, src);
785 }
786 }
787
788 emit_move_insn (dst, src);
789}
790
791
792int
793branch_operator (x, mode)
794 rtx x;
795 enum machine_mode mode;
796{
797 if (GET_MODE (x) != mode)
798 return FALSE;
799
800 switch (GET_CODE (x))
801 {
802 case EQ:
803 case NE:
804 case LT:
805 case GE:
806 return TRUE;
807 default:
808 break;
809 }
810 return FALSE;
811}
812
813
814int
815ubranch_operator (x, mode)
816 rtx x;
817 enum machine_mode mode;
818{
819 if (GET_MODE (x) != mode)
820 return FALSE;
821
822 switch (GET_CODE (x))
823 {
824 case LTU:
825 case GEU:
826 return TRUE;
827 default:
828 break;
829 }
830 return FALSE;
831}
832
833
834int
835boolean_operator (x, mode)
836 rtx x;
837 enum machine_mode mode;
838{
839 if (GET_MODE (x) != mode)
840 return FALSE;
841
842 switch (GET_CODE (x))
843 {
844 case EQ:
845 case NE:
846 return TRUE;
847 default:
848 break;
849 }
850 return FALSE;
851}
852
853
854int
855xtensa_mask_immediate (v)
856 int v;
857{
858#define MAX_MASK_SIZE 16
859 int mask_size;
860
861 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
862 {
863 if ((v & 1) == 0)
864 return FALSE;
865 v = v >> 1;
866 if (v == 0)
867 return TRUE;
868 }
869
870 return FALSE;
871}
872
873
874int
875xtensa_mem_offset (v, mode)
876 unsigned v;
877 enum machine_mode mode;
878{
879 switch (mode)
880 {
881 case BLKmode:
882 /* Handle the worst case for block moves. See xtensa_expand_block_move
883 where we emit an optimized block move operation if the block can be
884 moved in < "move_ratio" pieces. The worst case is when the block is
885 aligned but has a size of (3 mod 4) (does this happen?) so that the
886 last piece requires a byte load/store. */
887 return (xtensa_uimm8 (v) &&
888 xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
889
890 case QImode:
891 return xtensa_uimm8 (v);
892
893 case HImode:
894 return xtensa_uimm8x2 (v);
895
896 case DFmode:
897 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
898
899 default:
900 break;
901 }
902
903 return xtensa_uimm8x4 (v);
904}
905
906
907/* Make normal rtx_code into something we can index from an array */
908
909static enum internal_test
910map_test_to_internal_test (test_code)
911 enum rtx_code test_code;
912{
913 enum internal_test test = ITEST_MAX;
914
915 switch (test_code)
916 {
917 default: break;
918 case EQ: test = ITEST_EQ; break;
919 case NE: test = ITEST_NE; break;
920 case GT: test = ITEST_GT; break;
921 case GE: test = ITEST_GE; break;
922 case LT: test = ITEST_LT; break;
923 case LE: test = ITEST_LE; break;
924 case GTU: test = ITEST_GTU; break;
925 case GEU: test = ITEST_GEU; break;
926 case LTU: test = ITEST_LTU; break;
927 case LEU: test = ITEST_LEU; break;
928 }
929
930 return test;
931}
932
933
934/* Generate the code to compare two integer values. The return value is
935 the comparison expression. */
936
937static rtx
938gen_int_relational (test_code, cmp0, cmp1, p_invert)
939 enum rtx_code test_code; /* relational test (EQ, etc) */
940 rtx cmp0; /* first operand to compare */
941 rtx cmp1; /* second operand to compare */
942 int *p_invert; /* whether branch needs to reverse its test */
943{
944 struct cmp_info {
945 enum rtx_code test_code; /* test code to use in insn */
946 int (*const_range_p) PARAMS ((int)); /* predicate function to check range */
947 int const_add; /* constant to add (convert LE -> LT) */
948 int reverse_regs; /* reverse registers in test */
949 int invert_const; /* != 0 if invert value if cmp1 is constant */
950 int invert_reg; /* != 0 if invert value if cmp1 is register */
951 int unsignedp; /* != 0 for unsigned comparisons. */
952 };
953
954 static struct cmp_info info[ (int)ITEST_MAX ] = {
955
956 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
957 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
958
959 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
960 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
961 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
962 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
963
964 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
965 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
966 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
967 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
968 };
969
970 enum internal_test test;
971 enum machine_mode mode;
972 struct cmp_info *p_info;
973
974 test = map_test_to_internal_test (test_code);
975 if (test == ITEST_MAX)
976 abort ();
977
978 p_info = &info[ (int)test ];
979
980 mode = GET_MODE (cmp0);
981 if (mode == VOIDmode)
982 mode = GET_MODE (cmp1);
983
984 /* Make sure we can handle any constants given to us. */
985 if (GET_CODE (cmp1) == CONST_INT)
986 {
987 HOST_WIDE_INT value = INTVAL (cmp1);
988 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
989
990 /* if the immediate overflows or does not fit in the immediate field,
991 spill it to a register */
992
993 if ((p_info->unsignedp ?
994 (uvalue + p_info->const_add > uvalue) :
995 (value + p_info->const_add > value)) != (p_info->const_add > 0))
996 {
997 cmp1 = force_reg (mode, cmp1);
998 }
999 else if (!(p_info->const_range_p) (value + p_info->const_add))
1000 {
1001 cmp1 = force_reg (mode, cmp1);
1002 }
1003 }
1004 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
1005 {
1006 cmp1 = force_reg (mode, cmp1);
1007 }
1008
1009 /* See if we need to invert the result. */
1010 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
1011 ? p_info->invert_const
1012 : p_info->invert_reg);
1013
1014 /* Comparison to constants, may involve adding 1 to change a LT into LE.
1015 Comparison between two registers, may involve switching operands. */
1016 if (GET_CODE (cmp1) == CONST_INT)
1017 {
1018 if (p_info->const_add != 0)
1019 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
1020
1021 }
1022 else if (p_info->reverse_regs)
1023 {
1024 rtx temp = cmp0;
1025 cmp0 = cmp1;
1026 cmp1 = temp;
1027 }
1028
1029 return gen_rtx (p_info->test_code, VOIDmode, cmp0, cmp1);
1030}
1031
1032
1033/* Generate the code to compare two float values. The return value is
1034 the comparison expression. */
1035
1036static rtx
1037gen_float_relational (test_code, cmp0, cmp1)
1038 enum rtx_code test_code; /* relational test (EQ, etc) */
1039 rtx cmp0; /* first operand to compare */
1040 rtx cmp1; /* second operand to compare */
1041{
1042 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx));
1043 rtx brtmp;
1044 int reverse_regs, invert;
1045
1046 switch (test_code)
1047 {
1048 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1049 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1050 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1051 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1052 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1053 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1054 default:
1055 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1056 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1057 }
1058
1059 if (reverse_regs)
1060 {
1061 rtx temp = cmp0;
1062 cmp0 = cmp1;
1063 cmp1 = temp;
1064 }
1065
1066 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1067 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1068
1069 return gen_rtx (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1070}
1071
1072
1073void
1074xtensa_expand_conditional_branch (operands, test_code)
1075 rtx *operands;
1076 enum rtx_code test_code;
1077{
1078 enum cmp_type type = branch_type;
1079 rtx cmp0 = branch_cmp[0];
1080 rtx cmp1 = branch_cmp[1];
1081 rtx cmp;
1082 int invert;
1083 rtx label1, label2;
1084
1085 switch (type)
1086 {
1087 case CMP_DF:
1088 default:
1089 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1090
1091 case CMP_SI:
1092 invert = FALSE;
1093 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1094 break;
1095
1096 case CMP_SF:
1097 if (!TARGET_HARD_FLOAT)
1098 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1099 invert = FALSE;
1100 cmp = gen_float_relational (test_code, cmp0, cmp1);
1101 break;
1102 }
1103
1104 /* Generate the branch. */
1105
1106 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1107 label2 = pc_rtx;
1108
1109 if (invert)
1110 {
1111 label2 = label1;
1112 label1 = pc_rtx;
1113 }
1114
1115 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1116 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1117 label1,
1118 label2)));
1119}
1120
1121
1122static rtx
1123gen_conditional_move (cmp)
1124 rtx cmp;
1125{
1126 enum rtx_code code = GET_CODE (cmp);
1127 rtx op0 = branch_cmp[0];
1128 rtx op1 = branch_cmp[1];
1129
1130 if (branch_type == CMP_SI)
1131 {
1132 /* Jump optimization calls get_condition() which canonicalizes
1133 comparisons like (GE x <const>) to (GT x <const-1>).
1134 Transform those comparisons back to GE, since that is the
1135 comparison supported in Xtensa. We shouldn't have to
1136 transform <LE x const> comparisons, because neither
1137 xtensa_expand_conditional_branch() nor get_condition() will
1138 produce them. */
1139
1140 if ((code == GT) && (op1 == constm1_rtx))
1141 {
1142 code = GE;
1143 op1 = const0_rtx;
1144 }
1145 cmp = gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
1146
1147 if (boolean_operator (cmp, VOIDmode))
1148 {
1149 /* swap the operands to make const0 second */
1150 if (op0 == const0_rtx)
1151 {
1152 op0 = op1;
1153 op1 = const0_rtx;
1154 }
1155
1156 /* if not comparing against zero, emit a comparison (subtract) */
1157 if (op1 != const0_rtx)
1158 {
1159 op0 = expand_binop (SImode, sub_optab, op0, op1,
1160 0, 0, OPTAB_LIB_WIDEN);
1161 op1 = const0_rtx;
1162 }
1163 }
1164 else if (branch_operator (cmp, VOIDmode))
1165 {
1166 /* swap the operands to make const0 second */
1167 if (op0 == const0_rtx)
1168 {
1169 op0 = op1;
1170 op1 = const0_rtx;
1171
1172 switch (code)
1173 {
1174 case LT: code = GE; break;
1175 case GE: code = LT; break;
1176 default: abort ();
1177 }
1178 }
1179
1180 if (op1 != const0_rtx)
1181 return 0;
1182 }
1183 else
1184 return 0;
1185
1186 return gen_rtx (code, VOIDmode, op0, op1);
1187 }
1188
1189 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1190 return gen_float_relational (code, op0, op1);
1191
1192 return 0;
1193}
1194
1195
1196int
1197xtensa_expand_conditional_move (operands, isflt)
1198 rtx *operands;
1199 int isflt;
1200{
1201 rtx cmp;
1202 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1203
1204 if (!(cmp = gen_conditional_move (operands[1])))
1205 return 0;
1206
1207 if (isflt)
1208 gen_fn = (branch_type == CMP_SI
1209 ? gen_movsfcc_internal0
1210 : gen_movsfcc_internal1);
1211 else
1212 gen_fn = (branch_type == CMP_SI
1213 ? gen_movsicc_internal0
1214 : gen_movsicc_internal1);
1215
1216 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1217 operands[2], operands[3], cmp));
1218 return 1;
1219}
1220
1221
1222int
1223xtensa_expand_scc (operands)
1224 rtx *operands;
1225{
1226 rtx dest = operands[0];
1227 rtx cmp = operands[1];
1228 rtx one_tmp, zero_tmp;
1229 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1230
1231 if (!(cmp = gen_conditional_move (cmp)))
1232 return 0;
1233
1234 one_tmp = gen_reg_rtx (SImode);
1235 zero_tmp = gen_reg_rtx (SImode);
1236 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1237 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1238
1239 gen_fn = (branch_type == CMP_SI
1240 ? gen_movsicc_internal0
1241 : gen_movsicc_internal1);
1242 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1243 return 1;
1244}
1245
1246
1247/* Emit insns to move operands[1] into operands[0].
1248
1249 Return 1 if we have written out everything that needs to be done to
1250 do the move. Otherwise, return 0 and the caller will emit the move
1251 normally. */
1252
1253int
1254xtensa_emit_move_sequence (operands, mode)
1255 rtx *operands;
1256 enum machine_mode mode;
1257{
1258 if (CONSTANT_P (operands[1])
1259 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1260 && (GET_CODE (operands[1]) != CONST_INT
1261 || !xtensa_simm12b (INTVAL (operands[1]))))
1262 {
1263 xtensa_load_constant (operands[0], operands[1]);
1264 return 1;
1265 }
1266
1267 if (!(reload_in_progress | reload_completed))
1268 {
a8cacfd2 1269 if (!xtensa_valid_move (mode, operands))
03984308
BW
1270 operands[1] = force_reg (mode, operands[1]);
1271
1272 /* Check if this move is copying an incoming argument in a7. If
1273 so, emit the move, followed by the special "set_frame_ptr"
1274 unspec_volatile insn, at the very beginning of the function.
1275 This is necessary because the register allocator will ignore
1276 conflicts with a7 and may assign some other pseudo to a7. If
1277 that pseudo was assigned prior to this move, it would clobber
1278 the incoming argument in a7. By copying the argument out of
1279 a7 as the very first thing, and then immediately following
1280 that with an unspec_volatile to keep the scheduler away, we
1281 should avoid any problems. */
1282
1283 if (a7_overlap_mentioned_p (operands[1]))
1284 {
1285 rtx mov;
1286 switch (mode)
1287 {
1288 case SImode:
1289 mov = gen_movsi_internal (operands[0], operands[1]);
1290 break;
1291 case HImode:
1292 mov = gen_movhi_internal (operands[0], operands[1]);
1293 break;
1294 case QImode:
1295 mov = gen_movqi_internal (operands[0], operands[1]);
1296 break;
1297 default:
1298 abort ();
1299 }
1300
1301 /* Insert the instructions before any other argument copies.
1302 (The set_frame_ptr insn comes _after_ the move, so push it
1303 out first.) */
1304 push_topmost_sequence ();
1305 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1306 emit_insn_after (mov, get_insns ());
1307 pop_topmost_sequence ();
1308
1309 return 1;
1310 }
1311 }
1312
1313 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1314 instruction won't be recognized after reload. So we remove the
1315 subreg and adjust mem accordingly. */
1316 if (reload_in_progress)
1317 {
1318 operands[0] = fixup_subreg_mem (operands[0]);
1319 operands[1] = fixup_subreg_mem (operands[1]);
1320 }
1321 return 0;
1322}
1323
1324static rtx
1325fixup_subreg_mem (x)
1326 rtx x;
1327{
1328 if (GET_CODE (x) == SUBREG
1329 && GET_CODE (SUBREG_REG (x)) == REG
1330 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1331 {
1332 rtx temp =
1333 gen_rtx_SUBREG (GET_MODE (x),
1334 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1335 SUBREG_BYTE (x));
1336 x = alter_subreg (&temp);
1337 }
1338 return x;
1339}
1340
1341
1342/* Try to expand a block move operation to an RTL block move instruction.
1343 If not optimizing or if the block size is not a constant or if the
1344 block is small, the expansion fails and GCC falls back to calling
1345 memcpy().
1346
1347 operands[0] is the destination
1348 operands[1] is the source
1349 operands[2] is the length
1350 operands[3] is the alignment */
1351
1352int
1353xtensa_expand_block_move (operands)
1354 rtx *operands;
1355{
1356 rtx dest = operands[0];
1357 rtx src = operands[1];
1358 int bytes = INTVAL (operands[2]);
1359 int align = XINT (operands[3], 0);
1360 int num_pieces, move_ratio;
1361
1362 /* If this is not a fixed size move, just call memcpy */
1363 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1364 return 0;
1365
1366 /* Anything to move? */
1367 if (bytes <= 0)
1368 return 1;
1369
1370 if (align > MOVE_MAX)
1371 align = MOVE_MAX;
1372
1373 /* decide whether to expand inline based on the optimization level */
1374 move_ratio = 4;
1375 if (optimize > 2)
1376 move_ratio = LARGEST_MOVE_RATIO;
1377 num_pieces = (bytes / align) + (bytes % align); /* close enough anyway */
1378 if (num_pieces >= move_ratio)
1379 return 0;
1380
1381 /* make sure the memory addresses are valid */
0ae02efa
BW
1382 operands[0] = validize_mem (dest);
1383 operands[1] = validize_mem (src);
03984308
BW
1384
1385 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1386 operands[2], operands[3]));
1387 return 1;
1388}
1389
1390
1391/* Emit a sequence of instructions to implement a block move, trying
1392 to hide load delay slots as much as possible. Load N values into
1393 temporary registers, store those N values, and repeat until the
1394 complete block has been moved. N=delay_slots+1 */
1395
1396struct meminsnbuf {
1397 char template[30];
1398 rtx operands[2];
1399};
1400
1401void
1402xtensa_emit_block_move (operands, tmpregs, delay_slots)
1403 rtx *operands;
1404 rtx *tmpregs;
1405 int delay_slots;
1406{
1407 rtx dest = operands[0];
1408 rtx src = operands[1];
1409 int bytes = INTVAL (operands[2]);
1410 int align = XINT (operands[3], 0);
1411 rtx from_addr = XEXP (src, 0);
1412 rtx to_addr = XEXP (dest, 0);
1413 int from_struct = MEM_IN_STRUCT_P (src);
1414 int to_struct = MEM_IN_STRUCT_P (dest);
1415 int offset = 0;
1416 int chunk_size, item_size;
1417 struct meminsnbuf *ldinsns, *stinsns;
1418 const char *ldname, *stname;
1419 enum machine_mode mode;
1420
1421 if (align > MOVE_MAX)
1422 align = MOVE_MAX;
1423 item_size = align;
1424 chunk_size = delay_slots + 1;
1425
1426 ldinsns = (struct meminsnbuf *)
1427 alloca (chunk_size * sizeof (struct meminsnbuf));
1428 stinsns = (struct meminsnbuf *)
1429 alloca (chunk_size * sizeof (struct meminsnbuf));
1430
1431 mode = xtensa_find_mode_for_size (item_size);
1432 item_size = GET_MODE_SIZE (mode);
1433 ldname = xtensa_ld_opcodes[(int) mode];
1434 stname = xtensa_st_opcodes[(int) mode];
1435
1436 while (bytes > 0)
1437 {
1438 int n;
1439
1440 for (n = 0; n < chunk_size; n++)
1441 {
1442 rtx addr, mem;
1443
1444 if (bytes == 0)
1445 {
1446 chunk_size = n;
1447 break;
1448 }
1449
1450 if (bytes < item_size)
1451 {
1452 /* find a smaller item_size which we can load & store */
1453 item_size = bytes;
1454 mode = xtensa_find_mode_for_size (item_size);
1455 item_size = GET_MODE_SIZE (mode);
1456 ldname = xtensa_ld_opcodes[(int) mode];
1457 stname = xtensa_st_opcodes[(int) mode];
1458 }
1459
1460 /* record the load instruction opcode and operands */
1461 addr = plus_constant (from_addr, offset);
1462 mem = gen_rtx_MEM (mode, addr);
1463 if (! memory_address_p (mode, addr))
1464 abort ();
1465 MEM_IN_STRUCT_P (mem) = from_struct;
1466 ldinsns[n].operands[0] = tmpregs[n];
1467 ldinsns[n].operands[1] = mem;
1468 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1469
1470 /* record the store instruction opcode and operands */
1471 addr = plus_constant (to_addr, offset);
1472 mem = gen_rtx_MEM (mode, addr);
1473 if (! memory_address_p (mode, addr))
1474 abort ();
1475 MEM_IN_STRUCT_P (mem) = to_struct;
1476 stinsns[n].operands[0] = tmpregs[n];
1477 stinsns[n].operands[1] = mem;
1478 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1479
1480 offset += item_size;
1481 bytes -= item_size;
1482 }
1483
1484 /* now output the loads followed by the stores */
1485 for (n = 0; n < chunk_size; n++)
1486 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1487 for (n = 0; n < chunk_size; n++)
1488 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1489 }
1490}
1491
1492
1493static enum machine_mode
1494xtensa_find_mode_for_size (item_size)
1495 unsigned item_size;
1496{
1497 enum machine_mode mode, tmode;
1498
1499 while (1)
1500 {
1501 mode = VOIDmode;
1502
1503 /* find mode closest to but not bigger than item_size */
1504 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1505 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1506 if (GET_MODE_SIZE (tmode) <= item_size)
1507 mode = tmode;
1508 if (mode == VOIDmode)
1509 abort ();
1510
1511 item_size = GET_MODE_SIZE (mode);
1512
1513 if (xtensa_ld_opcodes[(int) mode]
1514 && xtensa_st_opcodes[(int) mode])
1515 break;
1516
1517 /* cannot load & store this mode; try something smaller */
1518 item_size -= 1;
1519 }
1520
1521 return mode;
1522}
1523
1524
1525void
1526xtensa_expand_nonlocal_goto (operands)
1527 rtx *operands;
1528{
1529 rtx goto_handler = operands[1];
1530 rtx containing_fp = operands[3];
1531
1532 /* generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1533 is too big to generate in-line */
1534
1535 if (GET_CODE (containing_fp) != REG)
1536 containing_fp = force_reg (Pmode, containing_fp);
1537
1538 goto_handler = replace_rtx (copy_rtx (goto_handler),
1539 virtual_stack_vars_rtx,
1540 containing_fp);
1541
1542 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1543 0, VOIDmode, 2,
1544 containing_fp, Pmode,
1545 goto_handler, Pmode);
1546}
1547
1548
1549static void
1550xtensa_init_machine_status (p)
1551 struct function *p;
1552{
1553 p->machine = (struct machine_function *)
1554 xcalloc (1, sizeof (struct machine_function));
1555}
1556
1557
1558static void
1559xtensa_free_machine_status (p)
1560 struct function *p;
1561{
1562 free (p->machine);
1563 p->machine = NULL;
1564}
1565
1566
1567void
1568xtensa_setup_frame_addresses ()
1569{
1570 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1571 cfun->machine->accesses_prev_frame = 1;
1572
1573 emit_library_call
1574 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1575 0, VOIDmode, 0);
1576}
1577
1578
1579/* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1580 a comment showing where the end of the loop is. However, if there is a
1581 label or a branch at the end of the loop then we need to place a nop
1582 there. If the loop ends with a label we need the nop so that branches
1583 targetting that label will target the nop (and thus remain in the loop),
1584 instead of targetting the instruction after the loop (and thus exiting
1585 the loop). If the loop ends with a branch, we need the nop in case the
1586 branch is targetting a location inside the loop. When the branch
1587 executes it will cause the loop count to be decremented even if it is
1588 taken (because it is the last instruction in the loop), so we need to
1589 nop after the branch to prevent the loop count from being decremented
1590 when the branch is taken. */
1591
1592void
1593xtensa_emit_loop_end (insn, operands)
1594 rtx insn;
1595 rtx *operands;
1596{
1597 char done = 0;
1598
1599 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1600 {
1601 switch (GET_CODE (insn))
1602 {
1603 case NOTE:
1604 case BARRIER:
1605 break;
1606
1607 case CODE_LABEL:
1608 output_asm_insn ("nop.n", operands);
1609 done = 1;
1610 break;
1611
1612 default:
1613 {
1614 rtx body = PATTERN (insn);
1615
1616 if (GET_CODE (body) == JUMP_INSN)
1617 {
1618 output_asm_insn ("nop.n", operands);
1619 done = 1;
1620 }
1621 else if ((GET_CODE (body) != USE)
1622 && (GET_CODE (body) != CLOBBER))
1623 done = 1;
1624 }
1625 break;
1626 }
1627 }
1628
1629 output_asm_insn ("# loop end for %0", operands);
1630}
1631
1632
1633char *
1634xtensa_emit_call (callop, operands)
1635 int callop;
1636 rtx *operands;
1637{
b64a1b53 1638 static char result[64];
03984308
BW
1639 rtx tgt = operands[callop];
1640
1641 if (GET_CODE (tgt) == CONST_INT)
1642 sprintf (result, "call8\t0x%x", INTVAL (tgt));
1643 else if (register_operand (tgt, VOIDmode))
1644 sprintf (result, "callx8\t%%%d", callop);
1645 else
1646 sprintf (result, "call8\t%%%d", callop);
1647
1648 return result;
1649}
1650
1651
1652/* Return the stabs register number to use for 'regno'. */
1653
1654int
1655xtensa_dbx_register_number (regno)
1656 int regno;
1657{
1658 int first = -1;
1659
1660 if (GP_REG_P (regno)) {
1661 regno -= GP_REG_FIRST;
1662 first = 0;
1663 }
1664 else if (BR_REG_P (regno)) {
1665 regno -= BR_REG_FIRST;
1666 first = 16;
1667 }
1668 else if (FP_REG_P (regno)) {
1669 regno -= FP_REG_FIRST;
1670 /* The current numbering convention is that TIE registers are
1671 numbered in libcc order beginning with 256. We can't guarantee
1672 that the FP registers will come first, so the following is just
1673 a guess. It seems like we should make a special case for FP
1674 registers and give them fixed numbers < 256. */
1675 first = 256;
1676 }
1677 else if (ACC_REG_P (regno))
1678 {
1679 first = 0;
1680 regno = -1;
1681 }
1682
1683 /* When optimizing, we sometimes get asked about pseudo-registers
1684 that don't represent hard registers. Return 0 for these. */
1685 if (first == -1)
1686 return 0;
1687
1688 return first + regno;
1689}
1690
1691
1692/* Argument support functions. */
1693
1694/* Initialize CUMULATIVE_ARGS for a function. */
1695
1696void
1697init_cumulative_args (cum, fntype, libname)
1698 CUMULATIVE_ARGS *cum; /* argument info to initialize */
1699 tree fntype ATTRIBUTE_UNUSED; /* tree ptr for function decl */
1700 rtx libname ATTRIBUTE_UNUSED; /* SYMBOL_REF of library name or 0 */
1701{
1702 cum->arg_words = 0;
1703}
1704
1705/* Advance the argument to the next argument position. */
1706
1707void
1708function_arg_advance (cum, mode, type)
1709 CUMULATIVE_ARGS *cum; /* current arg information */
1710 enum machine_mode mode; /* current arg mode */
1711 tree type; /* type of the argument or 0 if lib support */
1712{
1713 int words, max;
1714 int *arg_words;
1715
1716 arg_words = &cum->arg_words;
1717 max = MAX_ARGS_IN_REGISTERS;
1718
1719 words = (((mode != BLKmode)
1720 ? (int) GET_MODE_SIZE (mode)
1721 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1722
1723 if ((*arg_words + words > max) && (*arg_words < max))
1724 *arg_words = max;
1725
1726 *arg_words += words;
1727}
1728
1729
1730/* Return an RTL expression containing the register for the given mode,
1731 or 0 if the argument is to be passed on the stack. */
1732
1733rtx
1734function_arg (cum, mode, type, incoming_p)
1735 CUMULATIVE_ARGS *cum; /* current arg information */
1736 enum machine_mode mode; /* current arg mode */
1737 tree type; /* type of the argument or 0 if lib support */
1738 int incoming_p; /* computing the incoming registers? */
1739{
1740 int regbase, words, max;
1741 int *arg_words;
1742 int regno;
1743 enum machine_mode result_mode;
1744
1745 arg_words = &cum->arg_words;
1746 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1747 max = MAX_ARGS_IN_REGISTERS;
1748
1749 words = (((mode != BLKmode)
1750 ? (int) GET_MODE_SIZE (mode)
1751 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1752
1753 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1754 *arg_words += (*arg_words & 1);
1755
1756 if (*arg_words + words > max)
1757 return (rtx)0;
1758
1759 regno = regbase + *arg_words;
1760 result_mode = (mode == BLKmode ? TYPE_MODE (type) : mode);
1761
1762 /* We need to make sure that references to a7 are represented with
1763 rtx that is not equal to hard_frame_pointer_rtx. For BLKmode and
1764 modes bigger than 2 words (because we only have patterns for
1765 modes of 2 words or smaller), we can't control the expansion
1766 unless we explicitly list the individual registers in a PARALLEL. */
1767
1768 if ((mode == BLKmode || words > 2)
1769 && regno < A7_REG
1770 && regno + words > A7_REG)
1771 {
1772 rtx result;
1773 int n;
1774
1775 result = gen_rtx_PARALLEL (result_mode, rtvec_alloc (words));
1776 for (n = 0; n < words; n++)
1777 {
1778 XVECEXP (result, 0, n) =
1779 gen_rtx_EXPR_LIST (VOIDmode,
1780 gen_raw_REG (SImode, regno + n),
1781 GEN_INT (n * UNITS_PER_WORD));
1782 }
1783 return result;
1784 }
1785
1786 return gen_raw_REG (result_mode, regno);
1787}
1788
1789
1790void
1791override_options ()
1792{
1793 int regno;
1794 enum machine_mode mode;
1795
1796 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1797 error ("boolean registers required for the floating-point option");
1798
1799 /* set up the tables of ld/st opcode names for block moves */
1800 xtensa_ld_opcodes[(int) SImode] = "l32i";
1801 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1802 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1803 xtensa_st_opcodes[(int) SImode] = "s32i";
1804 xtensa_st_opcodes[(int) HImode] = "s16i";
1805 xtensa_st_opcodes[(int) QImode] = "s8i";
1806
1807 xtensa_char_to_class['q'] = SP_REG;
1808 xtensa_char_to_class['a'] = GR_REGS;
1809 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1810 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1811 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1812 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1813 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1814 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1815 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1816
1817 /* Set up array giving whether a given register can hold a given mode. */
1818 for (mode = VOIDmode;
1819 mode != MAX_MACHINE_MODE;
1820 mode = (enum machine_mode) ((int) mode + 1))
1821 {
1822 int size = GET_MODE_SIZE (mode);
1823 enum mode_class class = GET_MODE_CLASS (mode);
1824
1825 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1826 {
1827 int temp;
1828
1829 if (ACC_REG_P (regno))
1830 temp = (TARGET_MAC16 &&
1831 (class == MODE_INT) && (size <= UNITS_PER_WORD));
1832 else if (GP_REG_P (regno))
1833 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1834 else if (FP_REG_P (regno))
1835 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1836 else if (BR_REG_P (regno))
1837 temp = (TARGET_BOOLEANS && (mode == CCmode));
1838 else
1839 temp = FALSE;
1840
1841 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1842 }
1843 }
1844
1845 init_machine_status = xtensa_init_machine_status;
1846 free_machine_status = xtensa_free_machine_status;
1847
1848 /* Check PIC settings. There's no need for -fPIC on Xtensa and
1849 some targets need to always use PIC. */
1850 if (XTENSA_ALWAYS_PIC)
1851 {
1852 if (flag_pic)
1853 warning ("-f%s ignored (all code is position independent)",
1854 (flag_pic > 1 ? "PIC" : "pic"));
1855 flag_pic = 1;
1856 }
1857 if (flag_pic > 1)
1858 flag_pic = 1;
1859}
1860
1861
1862/* A C compound statement to output to stdio stream STREAM the
1863 assembler syntax for an instruction operand X. X is an RTL
1864 expression.
1865
1866 CODE is a value that can be used to specify one of several ways
1867 of printing the operand. It is used when identical operands
1868 must be printed differently depending on the context. CODE
1869 comes from the '%' specification that was used to request
1870 printing of the operand. If the specification was just '%DIGIT'
1871 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1872 is the ASCII code for LTR.
1873
1874 If X is a register, this macro should print the register's name.
1875 The names can be found in an array 'reg_names' whose type is
1876 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1877
1878 When the machine description has a specification '%PUNCT' (a '%'
1879 followed by a punctuation character), this macro is called with
1880 a null pointer for X and the punctuation character for CODE.
1881
1882 'a', 'c', 'l', and 'n' are reserved.
1883
1884 The Xtensa specific codes are:
1885
1886 'd' CONST_INT, print as signed decimal
1887 'x' CONST_INT, print as signed hexadecimal
1888 'K' CONST_INT, print number of bits in mask for EXTUI
1889 'R' CONST_INT, print (X & 0x1f)
1890 'L' CONST_INT, print ((32 - X) & 0x1f)
1891 'D' REG, print second register of double-word register operand
1892 'N' MEM, print address of next word following a memory operand
1893 'v' MEM, if memory reference is volatile, output a MEMW before it
1894*/
1895
1896static void
1897printx (file, val)
1898 FILE *file;
1899 signed int val;
1900{
1901 /* print a hexadecimal value in a nice way */
1902 if ((val > -0xa) && (val < 0xa))
1903 fprintf (file, "%d", val);
1904 else if (val < 0)
1905 fprintf (file, "-0x%x", -val);
1906 else
1907 fprintf (file, "0x%x", val);
1908}
1909
1910
1911void
1912print_operand (file, op, letter)
1913 FILE *file; /* file to write to */
1914 rtx op; /* operand to print */
1915 int letter; /* %<letter> or 0 */
1916{
1917 enum rtx_code code;
1918
1919 if (! op)
1920 error ("PRINT_OPERAND null pointer");
1921
1922 code = GET_CODE (op);
1923 switch (code)
1924 {
1925 case REG:
1926 case SUBREG:
1927 {
1928 int regnum = xt_true_regnum (op);
1929 if (letter == 'D')
1930 regnum++;
1931 fprintf (file, "%s", reg_names[regnum]);
1932 break;
1933 }
1934
1935 case MEM:
84bf8c2c
BW
1936 /* For a volatile memory reference, emit a MEMW before the
1937 load or store. */
03984308
BW
1938 if (letter == 'v')
1939 {
1940 if (MEM_VOLATILE_P (op) && TARGET_SERIALIZE_VOLATILE)
1941 fprintf (file, "memw\n\t");
1942 break;
1943 }
1944 else if (letter == 'N')
84bf8c2c
BW
1945 {
1946 enum machine_mode mode;
1947 switch (GET_MODE (op))
1948 {
1949 case DFmode: mode = SFmode; break;
1950 case DImode: mode = SImode; break;
1951 default: abort ();
1952 }
1953 op = adjust_address (op, mode, 4);
1954 }
03984308
BW
1955
1956 output_address (XEXP (op, 0));
1957 break;
1958
1959 case CONST_INT:
1960 switch (letter)
1961 {
1962 case 'K':
1963 {
1964 int num_bits = 0;
1965 unsigned val = INTVAL (op);
1966 while (val & 1)
1967 {
1968 num_bits += 1;
1969 val = val >> 1;
1970 }
1971 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1972 fatal_insn ("invalid mask", op);
1973
1974 fprintf (file, "%d", num_bits);
1975 break;
1976 }
1977
1978 case 'L':
1979 fprintf (file, "%d", (32 - INTVAL (op)) & 0x1f);
1980 break;
1981
1982 case 'R':
1983 fprintf (file, "%d", INTVAL (op) & 0x1f);
1984 break;
1985
1986 case 'x':
1987 printx (file, INTVAL (op));
1988 break;
1989
1990 case 'd':
1991 default:
1992 fprintf (file, "%d", INTVAL (op));
1993 break;
1994
1995 }
1996 break;
1997
1998 default:
1999 output_addr_const (file, op);
2000 }
2001}
2002
2003
2004/* A C compound statement to output to stdio stream STREAM the
2005 assembler syntax for an instruction operand that is a memory
2006 reference whose address is ADDR. ADDR is an RTL expression.
2007
2008 On some machines, the syntax for a symbolic address depends on
2009 the section that the address refers to. On these machines,
2010 define the macro 'ENCODE_SECTION_INFO' to store the information
2011 into the 'symbol_ref', and then check for it here. */
2012
2013void
2014print_operand_address (file, addr)
2015 FILE *file;
2016 rtx addr;
2017{
2018 if (!addr)
2019 error ("PRINT_OPERAND_ADDRESS, null pointer");
2020
2021 switch (GET_CODE (addr))
2022 {
2023 default:
2024 fatal_insn ("invalid address", addr);
2025 break;
2026
2027 case REG:
2028 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2029 break;
2030
2031 case PLUS:
2032 {
2033 rtx reg = (rtx)0;
2034 rtx offset = (rtx)0;
2035 rtx arg0 = XEXP (addr, 0);
2036 rtx arg1 = XEXP (addr, 1);
2037
2038 if (GET_CODE (arg0) == REG)
2039 {
2040 reg = arg0;
2041 offset = arg1;
2042 }
2043 else if (GET_CODE (arg1) == REG)
2044 {
2045 reg = arg1;
2046 offset = arg0;
2047 }
2048 else
2049 fatal_insn ("no register in address", addr);
2050
2051 if (CONSTANT_P (offset))
2052 {
2053 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2054 output_addr_const (file, offset);
2055 }
2056 else
2057 fatal_insn ("address offset not a constant", addr);
2058 }
2059 break;
2060
2061 case LABEL_REF:
2062 case SYMBOL_REF:
2063 case CONST_INT:
2064 case CONST:
2065 output_addr_const (file, addr);
2066 break;
2067 }
2068}
2069
2070
2071/* Emit either a label, .comm, or .lcomm directive. */
2072
2073void
2074xtensa_declare_object (file, name, init_string, final_string, size)
2075 FILE *file;
2076 char *name;
2077 char *init_string;
2078 char *final_string;
2079 int size;
2080{
2081 fputs (init_string, file); /* "", "\t.comm\t", or "\t.lcomm\t" */
2082 assemble_name (file, name);
2083 fprintf (file, final_string, size); /* ":\n", ",%u\n", ",%u\n" */
2084}
2085
2086
2087void
2088xtensa_output_literal (file, x, mode, labelno)
2089 FILE *file;
2090 rtx x;
2091 enum machine_mode mode;
2092 int labelno;
2093{
2094 long value_long[2];
b216cd4a 2095 REAL_VALUE_TYPE r;
03984308
BW
2096 int size;
2097
2098 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2099
2100 switch (GET_MODE_CLASS (mode))
2101 {
2102 case MODE_FLOAT:
2103 if (GET_CODE (x) != CONST_DOUBLE)
2104 abort ();
2105
b216cd4a 2106 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
03984308
BW
2107 switch (mode)
2108 {
2109 case SFmode:
b216cd4a
ZW
2110 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2111 fprintf (file, "0x%08lx\n", value_long[0]);
03984308
BW
2112 break;
2113
2114 case DFmode:
b216cd4a
ZW
2115 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2116 fprintf (file, "0x%08lx, 0x%08lx\n",
2117 value_long[0], value_long[1]);
03984308
BW
2118 break;
2119
2120 default:
2121 abort ();
2122 }
2123
2124 break;
2125
2126 case MODE_INT:
2127 case MODE_PARTIAL_INT:
2128 size = GET_MODE_SIZE (mode);
2129 if (size == 4)
2130 {
2131 output_addr_const (file, x);
2132 fputs ("\n", file);
2133 }
2134 else if (size == 8)
2135 {
2136 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2137 fputs (", ", file);
2138 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2139 fputs ("\n", file);
2140 }
2141 else
2142 abort ();
2143 break;
2144
2145 default:
2146 abort ();
2147 }
2148}
2149
2150
2151/* Return the bytes needed to compute the frame pointer from the current
2152 stack pointer. */
2153
2154#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2155#define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2156
2157long
2158compute_frame_size (size)
2159 int size; /* # of var. bytes allocated */
2160{
2161 /* add space for the incoming static chain value */
2162 if (current_function_needs_context)
2163 size += (1 * UNITS_PER_WORD);
2164
2165 xtensa_current_frame_size =
2166 XTENSA_STACK_ALIGN (size
2167 + current_function_outgoing_args_size
2168 + (WINDOW_SIZE * UNITS_PER_WORD));
2169 return xtensa_current_frame_size;
2170}
2171
2172
2173int
2174xtensa_frame_pointer_required ()
2175{
2176 /* The code to expand builtin_frame_addr and builtin_return_addr
2177 currently uses the hard_frame_pointer instead of frame_pointer.
2178 This seems wrong but maybe it's necessary for other architectures.
2179 This function is derived from the i386 code. */
2180
2181 if (cfun->machine->accesses_prev_frame)
2182 return 1;
2183
2184 return 0;
2185}
2186
2187
2188void
2189xtensa_reorg (first)
2190 rtx first;
2191{
2192 rtx insn, set_frame_ptr_insn = 0;
2193
2194 unsigned long tsize = compute_frame_size (get_frame_size ());
2195 if (tsize < (1 << (12+3)))
2196 frame_size_const = 0;
2197 else
2198 {
2199 frame_size_const = force_const_mem (SImode, GEN_INT (tsize - 16));;
2200
2201 /* make sure the constant is used so it doesn't get eliminated
2202 from the constant pool */
2203 emit_insn_before (gen_rtx_USE (SImode, frame_size_const), first);
2204 }
2205
2206 if (!frame_pointer_needed)
2207 return;
2208
2209 /* Search all instructions, looking for the insn that sets up the
2210 frame pointer. This search will fail if the function does not
2211 have an incoming argument in $a7, but in that case, we can just
2212 set up the frame pointer at the very beginning of the
2213 function. */
2214
2215 for (insn = first; insn; insn = NEXT_INSN (insn))
2216 {
2217 rtx pat;
2218
2219 if (!INSN_P (insn))
2220 continue;
2221
2222 pat = PATTERN (insn);
2223 if (GET_CODE (pat) == UNSPEC_VOLATILE
2224 && (XINT (pat, 1) == UNSPECV_SET_FP))
2225 {
2226 set_frame_ptr_insn = insn;
2227 break;
2228 }
2229 }
2230
2231 if (set_frame_ptr_insn)
2232 {
2233 /* for all instructions prior to set_frame_ptr_insn, replace
2234 hard_frame_pointer references with stack_pointer */
2235 for (insn = first; insn != set_frame_ptr_insn; insn = NEXT_INSN (insn))
2236 {
2237 if (INSN_P (insn))
2238 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2239 hard_frame_pointer_rtx,
2240 stack_pointer_rtx);
2241 }
2242 }
2243 else
2244 {
2245 /* emit the frame pointer move immediately after the NOTE that starts
2246 the function */
2247 emit_insn_after (gen_movsi (hard_frame_pointer_rtx,
2248 stack_pointer_rtx), first);
2249 }
2250}
2251
2252
2253/* Set up the stack and frame (if desired) for the function. */
2254
2255void
2256xtensa_function_prologue (file, size)
2257 FILE *file;
2258 int size ATTRIBUTE_UNUSED;
2259{
2260 unsigned long tsize = compute_frame_size (get_frame_size ());
2261
2262 if (frame_pointer_needed)
2263 fprintf (file, "\t.frame\ta7, %ld\n", tsize);
2264 else
2265 fprintf (file, "\t.frame\tsp, %ld\n", tsize);
2266
2267
2268 if (tsize < (1 << (12+3)))
2269 {
2270 fprintf (file, "\tentry\tsp, %ld\n", tsize);
2271 }
2272 else
2273 {
2274 fprintf (file, "\tentry\tsp, 16\n");
2275
2276 /* use a8 as a temporary since a0-a7 may be live */
2277 fprintf (file, "\tl32r\ta8, ");
2278 print_operand (file, frame_size_const, 0);
2279 fprintf (file, "\n\tsub\ta8, sp, a8\n");
2280 fprintf (file, "\tmovsp\tsp, a8\n");
2281 }
2282}
2283
2284
2285/* Do any necessary cleanup after a function to restore
2286 stack, frame, and regs. */
2287
2288void
2289xtensa_function_epilogue (file, size)
2290 FILE *file;
2291 int size ATTRIBUTE_UNUSED;
2292{
2293 rtx insn = get_last_insn ();
2294 /* If the last insn was a BARRIER, we don't have to write anything. */
2295 if (GET_CODE (insn) == NOTE)
2296 insn = prev_nonnote_insn (insn);
2297 if (insn == 0 || GET_CODE (insn) != BARRIER)
2298 fprintf (file, TARGET_DENSITY ? "\tretw.n\n" : "\tretw\n");
2299
2300 xtensa_current_frame_size = 0;
2301}
2302
2303
2304/* Create the va_list data type.
2305 This structure is set up by __builtin_saveregs. The __va_reg
2306 field points to a stack-allocated region holding the contents of the
2307 incoming argument registers. The __va_ndx field is an index initialized
2308 to the position of the first unnamed (variable) argument. This same index
2309 is also used to address the arguments passed in memory. Thus, the
2310 __va_stk field is initialized to point to the position of the first
2311 argument in memory offset to account for the arguments passed in
2312 registers. E.G., if there are 6 argument registers, and each register is
2313 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2314 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2315 argument word N for N >= 6. */
2316
2317tree
2318xtensa_build_va_list (void)
2319{
2320 tree f_stk, f_reg, f_ndx, record;
2321
2322 record = make_node (RECORD_TYPE);
2323
2324 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2325 ptr_type_node);
2326 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2327 ptr_type_node);
2328 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2329 integer_type_node);
2330
2331 DECL_FIELD_CONTEXT (f_stk) = record;
2332 DECL_FIELD_CONTEXT (f_reg) = record;
2333 DECL_FIELD_CONTEXT (f_ndx) = record;
2334
2335 TYPE_FIELDS (record) = f_stk;
2336 TREE_CHAIN (f_stk) = f_reg;
2337 TREE_CHAIN (f_reg) = f_ndx;
2338
2339 layout_type (record);
2340 return record;
2341}
2342
2343
2344/* Save the incoming argument registers on the stack. Returns the
2345 address of the saved registers. */
2346
2347rtx
2348xtensa_builtin_saveregs ()
2349{
2350 rtx gp_regs, dest;
2351 int arg_words = current_function_arg_words;
2352 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2353 int i;
2354
2355 if (gp_left == 0)
2356 return const0_rtx;
2357
2358 /* allocate the general-purpose register space */
2359 gp_regs = assign_stack_local
2360 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2361 MEM_IN_STRUCT_P (gp_regs) = 1;
2362 RTX_UNCHANGING_P (gp_regs) = 1;
2363 RTX_UNCHANGING_P (XEXP (gp_regs, 0)) = 1;
2364
2365 /* Now store the incoming registers. */
2366 dest = change_address (gp_regs, SImode,
2367 plus_constant (XEXP (gp_regs, 0),
2368 arg_words * UNITS_PER_WORD));
2369
2370 /* Note: Don't use move_block_from_reg() here because the incoming
2371 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2372 Instead, call gen_raw_REG() directly so that we get a distinct
2373 instance of (REG:SI 7). */
2374 for (i = 0; i < gp_left; i++)
2375 {
2376 emit_move_insn (operand_subword (dest, i, 1, BLKmode),
2377 gen_raw_REG (SImode, GP_ARG_FIRST + arg_words + i));
2378 }
2379
2380 return XEXP (gp_regs, 0);
2381}
2382
2383
2384/* Implement `va_start' for varargs and stdarg. We look at the
2385 current function to fill in an initial va_list. */
2386
2387void
2388xtensa_va_start (stdarg_p, valist, nextarg)
2389 int stdarg_p ATTRIBUTE_UNUSED;
2390 tree valist;
2391 rtx nextarg ATTRIBUTE_UNUSED;
2392{
2393 tree f_stk, stk;
2394 tree f_reg, reg;
2395 tree f_ndx, ndx;
2396 tree t, u;
2397 int arg_words;
2398
2399 arg_words = current_function_args_info.arg_words;
2400
2401 f_stk = TYPE_FIELDS (va_list_type_node);
2402 f_reg = TREE_CHAIN (f_stk);
2403 f_ndx = TREE_CHAIN (f_reg);
2404
2405 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2406 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2407 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2408
2409 /* Call __builtin_saveregs; save the result in __va_reg */
2410 current_function_arg_words = arg_words;
2411 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2412 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2413 TREE_SIDE_EFFECTS (t) = 1;
2414 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2415
2416 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2417 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2418 u = fold (build (PLUS_EXPR, ptr_type_node, u,
2419 build_int_2 (-MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1)));
2420 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2421 TREE_SIDE_EFFECTS (t) = 1;
2422 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2423
2424 /* Set the __va_ndx member. */
2425 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2426 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2427 TREE_SIDE_EFFECTS (t) = 1;
2428 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2429}
2430
2431
2432/* Implement `va_arg'. */
2433
2434rtx
2435xtensa_va_arg (valist, type)
2436 tree valist, type;
2437{
2438 tree f_stk, stk;
2439 tree f_reg, reg;
2440 tree f_ndx, ndx;
8be56275
BW
2441 tree tmp, addr_tree, type_size;
2442 rtx array, orig_ndx, r, addr, size, va_size;
03984308
BW
2443 rtx lab_false, lab_over, lab_false2;
2444
03984308
BW
2445 f_stk = TYPE_FIELDS (va_list_type_node);
2446 f_reg = TREE_CHAIN (f_stk);
2447 f_ndx = TREE_CHAIN (f_reg);
2448
2449 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2450 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2451 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2452
8be56275
BW
2453 type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
2454
2455 va_size = gen_reg_rtx (SImode);
2456 tmp = fold (build (MULT_EXPR, sizetype,
2457 fold (build (TRUNC_DIV_EXPR, sizetype,
2458 fold (build (PLUS_EXPR, sizetype,
2459 type_size,
2460 size_int (UNITS_PER_WORD - 1))),
2461 size_int (UNITS_PER_WORD))),
2462 size_int (UNITS_PER_WORD)));
2463 r = expand_expr (tmp, va_size, SImode, EXPAND_NORMAL);
2464 if (r != va_size)
2465 emit_move_insn (va_size, r);
2466
03984308
BW
2467
2468 /* First align __va_ndx to a double word boundary if necessary for this arg:
2469
2470 if (__alignof__ (TYPE) > 4)
2471 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8)
2472 */
2473
2474 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2475 {
2476 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2477 build_int_2 ((2 * UNITS_PER_WORD) - 1, 0));
2478 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2479 build_int_2 (-2 * UNITS_PER_WORD, -1));
2480 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2481 TREE_SIDE_EFFECTS (tmp) = 1;
2482 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2483 }
2484
2485
2486 /* Increment __va_ndx to point past the argument:
2487
2488 orig_ndx = (AP).__va_ndx;
2489 (AP).__va_ndx += __va_size (TYPE);
2490 */
2491
2492 orig_ndx = gen_reg_rtx (SImode);
2493 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2494 if (r != orig_ndx)
2495 emit_move_insn (orig_ndx, r);
2496
8be56275
BW
2497 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2498 make_tree (intSI_type_node, va_size));
03984308
BW
2499 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2500 TREE_SIDE_EFFECTS (tmp) = 1;
2501 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2502
2503
2504 /* Check if the argument is in registers:
2505
bcf88f9b
BW
2506 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2507 && !MUST_PASS_IN_STACK (type))
03984308
BW
2508 __array = (AP).__va_reg;
2509 */
2510
03984308
BW
2511 array = gen_reg_rtx (Pmode);
2512
544ef5b5 2513 lab_over = NULL_RTX;
bcf88f9b
BW
2514 if (!MUST_PASS_IN_STACK (VOIDmode, type))
2515 {
2516 lab_false = gen_label_rtx ();
2517 lab_over = gen_label_rtx ();
2518
2519 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode,
2520 EXPAND_NORMAL),
2521 GEN_INT (MAX_ARGS_IN_REGISTERS
2522 * UNITS_PER_WORD),
2523 GT, const1_rtx, SImode, 0, lab_false);
2524
2525 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2526 if (r != array)
2527 emit_move_insn (array, r);
2528
2529 emit_jump_insn (gen_jump (lab_over));
2530 emit_barrier ();
2531 emit_label (lab_false);
2532 }
03984308
BW
2533
2534 /* ...otherwise, the argument is on the stack (never split between
2535 registers and the stack -- change __va_ndx if necessary):
2536
2537 else
2538 {
2539 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2540 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2541 __array = (AP).__va_stk;
2542 }
2543 */
2544
2545 lab_false2 = gen_label_rtx ();
2546 emit_cmp_and_jump_insns (orig_ndx,
2547 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2548 GE, const1_rtx, SImode, 0, lab_false2);
2549
8be56275
BW
2550 tmp = build (PLUS_EXPR, sizetype, make_tree (intSI_type_node, va_size),
2551 build_int_2 (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, 0));
03984308
BW
2552 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2553 TREE_SIDE_EFFECTS (tmp) = 1;
2554 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2555
2556 emit_label (lab_false2);
2557
2558 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2559 if (r != array)
2560 emit_move_insn (array, r);
2561
544ef5b5 2562 if (lab_over != NULL_RTX)
bcf88f9b 2563 emit_label (lab_over);
8be56275 2564
03984308
BW
2565
2566 /* Given the base array pointer (__array) and index to the subsequent
2567 argument (__va_ndx), find the address:
2568
8be56275
BW
2569 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2570 ? sizeof (TYPE)
2571 : __va_size (TYPE))
03984308
BW
2572
2573 The results are endian-dependent because values smaller than one word
2574 are aligned differently.
2575 */
2576
8be56275
BW
2577 size = gen_reg_rtx (SImode);
2578 emit_move_insn (size, va_size);
2579
2580 if (BYTES_BIG_ENDIAN)
2581 {
2582 rtx lab_use_va_size = gen_label_rtx ();
2583
2584 emit_cmp_and_jump_insns (expand_expr (type_size, NULL_RTX, SImode,
2585 EXPAND_NORMAL),
2586 GEN_INT (PARM_BOUNDARY / BITS_PER_UNIT),
2587 GE, const1_rtx, SImode, 0, lab_use_va_size);
2588
2589 r = expand_expr (type_size, size, SImode, EXPAND_NORMAL);
2590 if (r != size)
2591 emit_move_insn (size, r);
2592
2593 emit_label (lab_use_va_size);
2594 }
03984308
BW
2595
2596 addr_tree = build (PLUS_EXPR, ptr_type_node,
2597 make_tree (ptr_type_node, array),
2598 ndx);
8be56275
BW
2599 addr_tree = build (MINUS_EXPR, ptr_type_node, addr_tree,
2600 make_tree (intSI_type_node, size));
03984308
BW
2601 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2602 addr = copy_to_reg (addr);
2603 return addr;
2604}
2605
2606
a8cacfd2
BW
2607enum reg_class
2608xtensa_preferred_reload_class (x, class)
2609 rtx x;
2610 enum reg_class class;
2611{
2612 if (CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2613 return NO_REGS;
2614
2615 /* Don't use sp for reloads! */
2616 if (class == AR_REGS)
2617 return GR_REGS;
2618
2619 return class;
2620}
2621
2622
03984308
BW
2623enum reg_class
2624xtensa_secondary_reload_class (class, mode, x, isoutput)
2625 enum reg_class class;
2626 enum machine_mode mode ATTRIBUTE_UNUSED;
2627 rtx x;
2628 int isoutput;
2629{
2630 int regno;
2631
2632 if (GET_CODE (x) == SIGN_EXTEND)
2633 x = XEXP (x, 0);
2634 regno = xt_true_regnum (x);
2635
2636 if (!isoutput)
2637 {
2638 if (class == FP_REGS && constantpool_mem_p (x))
2639 return GR_REGS;
2640 }
2641
2642 if (ACC_REG_P (regno))
2643 return (class == GR_REGS ? NO_REGS : GR_REGS);
2644 if (class == ACC_REG)
2645 return (GP_REG_P (regno) ? NO_REGS : GR_REGS);
2646
2647 return NO_REGS;
2648}
2649
2650
2651void
2652order_regs_for_local_alloc ()
2653{
2654 if (!leaf_function_p ())
2655 {
2656 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2657 FIRST_PSEUDO_REGISTER * sizeof (int));
2658 }
2659 else
2660 {
2661 int i, num_arg_regs;
2662 int nxt = 0;
2663
2664 /* use the AR registers in increasing order (skipping a0 and a1)
2665 but save the incoming argument registers for a last resort */
2666 num_arg_regs = current_function_args_info.arg_words;
2667 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2668 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2669 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2670 reg_alloc_order[nxt++] = i + num_arg_regs;
2671 for (i = 0; i < num_arg_regs; i++)
2672 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2673
2674 /* list the FP registers in order for now */
2675 for (i = 0; i < 16; i++)
2676 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2677
2678 /* GCC requires that we list *all* the registers.... */
2679 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2680 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2681 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2682 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2683
2684 /* list the coprocessor registers in order */
2685 for (i = 0; i < BR_REG_NUM; i++)
2686 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2687
2688 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2689 }
2690}
2691
2692
2693/* A customized version of reg_overlap_mentioned_p that only looks for
2694 references to a7 (as opposed to hard_frame_pointer_rtx). */
2695
2696int
2697a7_overlap_mentioned_p (x)
2698 rtx x;
2699{
2700 int i, j;
2701 unsigned int x_regno;
2702 const char *fmt;
2703
2704 if (GET_CODE (x) == REG)
2705 {
2706 x_regno = REGNO (x);
2707 return (x != hard_frame_pointer_rtx
2708 && x_regno < A7_REG + 1
2709 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2710 }
2711
2712 if (GET_CODE (x) == SUBREG
2713 && GET_CODE (SUBREG_REG (x)) == REG
2714 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2715 {
2716 x_regno = subreg_regno (x);
2717 return (SUBREG_REG (x) != hard_frame_pointer_rtx
2718 && x_regno < A7_REG + 1
2719 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2720 }
2721
2722 /* X does not match, so try its subexpressions. */
2723 fmt = GET_RTX_FORMAT (GET_CODE (x));
2724 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2725 {
2726 if (fmt[i] == 'e')
2727 {
2728 if (a7_overlap_mentioned_p (XEXP (x, i)))
2729 return 1;
2730 }
2731 else if (fmt[i] == 'E')
2732 {
2733 for (j = XVECLEN (x, i) - 1; j >=0; j--)
2734 if (a7_overlap_mentioned_p (XVECEXP (x, i, j)))
2735 return 1;
2736 }
2737 }
2738
2739 return 0;
2740}
b64a1b53
RH
2741
2742/* The literal pool stays with the function. */
2743
2744static void
2745xtensa_select_rtx_section (mode, x, align)
2746 enum machine_mode mode ATTRIBUTE_UNUSED;
2747 rtx x ATTRIBUTE_UNUSED;
2748 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
2749{
2750 function_section (current_function_decl);
2751}
This page took 0.331003 seconds and 5 git commands to generate.