]> gcc.gnu.org Git - gcc.git/blob - gcc/config/stormy16/stormy16.c
Update copyright years.
[gcc.git] / gcc / config / stormy16 / stormy16.c
1 /* Xstormy16 target functions.
2 Copyright (C) 1997-2015 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-flags.h"
31 #include "output.h"
32 #include "insn-attr.h"
33 #include "flags.h"
34 #include "recog.h"
35 #include "diagnostic-core.h"
36 #include "obstack.h"
37 #include "tree.h"
38 #include "stringpool.h"
39 #include "stor-layout.h"
40 #include "varasm.h"
41 #include "calls.h"
42 #include "expr.h"
43 #include "insn-codes.h"
44 #include "optabs.h"
45 #include "except.h"
46 #include "hashtab.h"
47 #include "hash-set.h"
48 #include "vec.h"
49 #include "machmode.h"
50 #include "input.h"
51 #include "function.h"
52 #include "target.h"
53 #include "target-def.h"
54 #include "tm_p.h"
55 #include "langhooks.h"
56 #include "hash-table.h"
57 #include "ggc.h"
58 #include "predict.h"
59 #include "dominance.h"
60 #include "cfg.h"
61 #include "cfgrtl.h"
62 #include "cfganal.h"
63 #include "lcm.h"
64 #include "cfgbuild.h"
65 #include "cfgcleanup.h"
66 #include "basic-block.h"
67 #include "tree-ssa-alias.h"
68 #include "internal-fn.h"
69 #include "gimple-fold.h"
70 #include "tree-eh.h"
71 #include "gimple-expr.h"
72 #include "is-a.h"
73 #include "gimple.h"
74 #include "gimplify.h"
75 #include "df.h"
76 #include "reload.h"
77 #include "builtins.h"
78
79 static rtx emit_addhi3_postreload (rtx, rtx, rtx);
80 static void xstormy16_asm_out_constructor (rtx, int);
81 static void xstormy16_asm_out_destructor (rtx, int);
82 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
83 HOST_WIDE_INT, tree);
84
85 static void xstormy16_init_builtins (void);
86 static rtx xstormy16_expand_builtin (tree, rtx, rtx, machine_mode, int);
87 static bool xstormy16_rtx_costs (rtx, int, int, int, int *, bool);
88 static int xstormy16_address_cost (rtx, machine_mode, addr_space_t, bool);
89 static bool xstormy16_return_in_memory (const_tree, const_tree);
90
91 static GTY(()) section *bss100_section;
92
93 /* Compute a (partial) cost for rtx X. Return true if the complete
94 cost has been computed, and false if subexpressions should be
95 scanned. In either case, *TOTAL contains the cost result. */
96
97 static bool
98 xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
99 int opno ATTRIBUTE_UNUSED, int *total,
100 bool speed ATTRIBUTE_UNUSED)
101 {
102 switch (code)
103 {
104 case CONST_INT:
105 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
106 *total = COSTS_N_INSNS (1) / 2;
107 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
108 *total = COSTS_N_INSNS (1);
109 else
110 *total = COSTS_N_INSNS (2);
111 return true;
112
113 case CONST_DOUBLE:
114 case CONST:
115 case SYMBOL_REF:
116 case LABEL_REF:
117 *total = COSTS_N_INSNS (2);
118 return true;
119
120 case MULT:
121 *total = COSTS_N_INSNS (35 + 6);
122 return true;
123 case DIV:
124 *total = COSTS_N_INSNS (51 - 6);
125 return true;
126
127 default:
128 return false;
129 }
130 }
131
132 static int
133 xstormy16_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
134 addr_space_t as ATTRIBUTE_UNUSED,
135 bool speed ATTRIBUTE_UNUSED)
136 {
137 return (CONST_INT_P (x) ? 2
138 : GET_CODE (x) == PLUS ? 7
139 : 5);
140 }
141
142 /* Worker function for TARGET_MEMORY_MOVE_COST. */
143
144 static int
145 xstormy16_memory_move_cost (machine_mode mode, reg_class_t rclass,
146 bool in)
147 {
148 return (5 + memory_move_secondary_cost (mode, rclass, in));
149 }
150
151 /* Branches are handled as follows:
152
153 1. HImode compare-and-branches. The machine supports these
154 natively, so the appropriate pattern is emitted directly.
155
156 2. SImode EQ and NE. These are emitted as pairs of HImode
157 compare-and-branches.
158
159 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
160 of a SImode subtract followed by a branch (not a compare-and-branch),
161 like this:
162 sub
163 sbc
164 blt
165
166 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
167 sub
168 sbc
169 blt
170 or
171 bne. */
172
173 /* Emit a branch of kind CODE to location LOC. */
174
175 void
176 xstormy16_emit_cbranch (enum rtx_code code, rtx op0, rtx op1, rtx loc)
177 {
178 rtx condition_rtx, loc_ref, branch, cy_clobber;
179 rtvec vec;
180 machine_mode mode;
181
182 mode = GET_MODE (op0);
183 gcc_assert (mode == HImode || mode == SImode);
184
185 if (mode == SImode
186 && (code == GT || code == LE || code == GTU || code == LEU))
187 {
188 int unsigned_p = (code == GTU || code == LEU);
189 int gt_p = (code == GT || code == GTU);
190 rtx lab = NULL_RTX;
191
192 if (gt_p)
193 lab = gen_label_rtx ();
194 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, op0, op1, gt_p ? lab : loc);
195 /* This should be generated as a comparison against the temporary
196 created by the previous insn, but reload can't handle that. */
197 xstormy16_emit_cbranch (gt_p ? NE : EQ, op0, op1, loc);
198 if (gt_p)
199 emit_label (lab);
200 return;
201 }
202 else if (mode == SImode
203 && (code == NE || code == EQ)
204 && op1 != const0_rtx)
205 {
206 rtx op0_word, op1_word;
207 rtx lab = NULL_RTX;
208 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
209 int i;
210
211 if (code == EQ)
212 lab = gen_label_rtx ();
213
214 for (i = 0; i < num_words - 1; i++)
215 {
216 op0_word = simplify_gen_subreg (word_mode, op0, mode,
217 i * UNITS_PER_WORD);
218 op1_word = simplify_gen_subreg (word_mode, op1, mode,
219 i * UNITS_PER_WORD);
220 xstormy16_emit_cbranch (NE, op0_word, op1_word, code == EQ ? lab : loc);
221 }
222 op0_word = simplify_gen_subreg (word_mode, op0, mode,
223 i * UNITS_PER_WORD);
224 op1_word = simplify_gen_subreg (word_mode, op1, mode,
225 i * UNITS_PER_WORD);
226 xstormy16_emit_cbranch (code, op0_word, op1_word, loc);
227
228 if (code == EQ)
229 emit_label (lab);
230 return;
231 }
232
233 /* We can't allow reload to try to generate any reload after a branch,
234 so when some register must match we must make the temporary ourselves. */
235 if (mode != HImode)
236 {
237 rtx tmp;
238 tmp = gen_reg_rtx (mode);
239 emit_move_insn (tmp, op0);
240 op0 = tmp;
241 }
242
243 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
244 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
245 branch = gen_rtx_SET (VOIDmode, pc_rtx,
246 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
247 loc_ref, pc_rtx));
248
249 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
250
251 if (mode == HImode)
252 vec = gen_rtvec (2, branch, cy_clobber);
253 else if (code == NE || code == EQ)
254 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
255 else
256 {
257 rtx sub;
258 #if 0
259 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
260 #else
261 sub = gen_rtx_CLOBBER (SImode, op0);
262 #endif
263 vec = gen_rtvec (3, branch, sub, cy_clobber);
264 }
265
266 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
267 }
268
269 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
270 the arithmetic operation. Most of the work is done by
271 xstormy16_expand_arith. */
272
273 void
274 xstormy16_split_cbranch (machine_mode mode, rtx label, rtx comparison,
275 rtx dest)
276 {
277 rtx op0 = XEXP (comparison, 0);
278 rtx op1 = XEXP (comparison, 1);
279 rtx_insn *seq, *last_insn;
280 rtx compare;
281
282 start_sequence ();
283 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1);
284 seq = get_insns ();
285 end_sequence ();
286
287 gcc_assert (INSN_P (seq));
288
289 last_insn = seq;
290 while (NEXT_INSN (last_insn) != NULL_RTX)
291 last_insn = NEXT_INSN (last_insn);
292
293 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
294 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
295 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
296 emit_insn (seq);
297 }
298
299
300 /* Return the string to output a conditional branch to LABEL, which is
301 the operand number of the label.
302
303 OP is the conditional expression, or NULL for branch-always.
304
305 REVERSED is nonzero if we should reverse the sense of the comparison.
306
307 INSN is the insn. */
308
309 char *
310 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed,
311 rtx_insn *insn)
312 {
313 static char string[64];
314 int need_longbranch = (op != NULL_RTX
315 ? get_attr_length (insn) == 8
316 : get_attr_length (insn) == 4);
317 int really_reversed = reversed ^ need_longbranch;
318 const char *ccode;
319 const char *templ;
320 const char *operands;
321 enum rtx_code code;
322
323 if (! op)
324 {
325 if (need_longbranch)
326 ccode = "jmpf";
327 else
328 ccode = "br";
329 sprintf (string, "%s %s", ccode, label);
330 return string;
331 }
332
333 code = GET_CODE (op);
334
335 if (! REG_P (XEXP (op, 0)))
336 {
337 code = swap_condition (code);
338 operands = "%3,%2";
339 }
340 else
341 operands = "%2,%3";
342
343 /* Work out which way this really branches. */
344 if (really_reversed)
345 code = reverse_condition (code);
346
347 switch (code)
348 {
349 case EQ: ccode = "z"; break;
350 case NE: ccode = "nz"; break;
351 case GE: ccode = "ge"; break;
352 case LT: ccode = "lt"; break;
353 case GT: ccode = "gt"; break;
354 case LE: ccode = "le"; break;
355 case GEU: ccode = "nc"; break;
356 case LTU: ccode = "c"; break;
357 case GTU: ccode = "hi"; break;
358 case LEU: ccode = "ls"; break;
359
360 default:
361 gcc_unreachable ();
362 }
363
364 if (need_longbranch)
365 templ = "b%s %s,.+8 | jmpf %s";
366 else
367 templ = "b%s %s,%s";
368 sprintf (string, templ, ccode, operands, label);
369
370 return string;
371 }
372
373 /* Return the string to output a conditional branch to LABEL, which is
374 the operand number of the label, but suitable for the tail of a
375 SImode branch.
376
377 OP is the conditional expression (OP is never NULL_RTX).
378
379 REVERSED is nonzero if we should reverse the sense of the comparison.
380
381 INSN is the insn. */
382
383 char *
384 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed,
385 rtx_insn *insn)
386 {
387 static char string[64];
388 int need_longbranch = get_attr_length (insn) >= 8;
389 int really_reversed = reversed ^ need_longbranch;
390 const char *ccode;
391 const char *templ;
392 char prevop[16];
393 enum rtx_code code;
394
395 code = GET_CODE (op);
396
397 /* Work out which way this really branches. */
398 if (really_reversed)
399 code = reverse_condition (code);
400
401 switch (code)
402 {
403 case EQ: ccode = "z"; break;
404 case NE: ccode = "nz"; break;
405 case GE: ccode = "ge"; break;
406 case LT: ccode = "lt"; break;
407 case GEU: ccode = "nc"; break;
408 case LTU: ccode = "c"; break;
409
410 /* The missing codes above should never be generated. */
411 default:
412 gcc_unreachable ();
413 }
414
415 switch (code)
416 {
417 case EQ: case NE:
418 {
419 int regnum;
420
421 gcc_assert (REG_P (XEXP (op, 0)));
422
423 regnum = REGNO (XEXP (op, 0));
424 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
425 }
426 break;
427
428 case GE: case LT: case GEU: case LTU:
429 strcpy (prevop, "sbc %2,%3");
430 break;
431
432 default:
433 gcc_unreachable ();
434 }
435
436 if (need_longbranch)
437 templ = "%s | b%s .+6 | jmpf %s";
438 else
439 templ = "%s | b%s %s";
440 sprintf (string, templ, prevop, ccode, label);
441
442 return string;
443 }
444 \f
445 /* Many machines have some registers that cannot be copied directly to or from
446 memory or even from other types of registers. An example is the `MQ'
447 register, which on most machines, can only be copied to or from general
448 registers, but not memory. Some machines allow copying all registers to and
449 from memory, but require a scratch register for stores to some memory
450 locations (e.g., those with symbolic address on the RT, and those with
451 certain symbolic address on the SPARC when compiling PIC). In some cases,
452 both an intermediate and a scratch register are required.
453
454 You should define these macros to indicate to the reload phase that it may
455 need to allocate at least one register for a reload in addition to the
456 register to contain the data. Specifically, if copying X to a register
457 RCLASS in MODE requires an intermediate register, you should define
458 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
459 whose registers can be used as intermediate registers or scratch registers.
460
461 If copying a register RCLASS in MODE to X requires an intermediate or scratch
462 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
463 largest register class required. If the requirements for input and output
464 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
465 instead of defining both macros identically.
466
467 The values returned by these macros are often `GENERAL_REGS'. Return
468 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
469 to or from a register of RCLASS in MODE without requiring a scratch register.
470 Do not define this macro if it would always return `NO_REGS'.
471
472 If a scratch register is required (either with or without an intermediate
473 register), you should define patterns for `reload_inM' or `reload_outM', as
474 required.. These patterns, which will normally be implemented with a
475 `define_expand', should be similar to the `movM' patterns, except that
476 operand 2 is the scratch register.
477
478 Define constraints for the reload register and scratch register that contain
479 a single register class. If the original reload register (whose class is
480 RCLASS) can meet the constraint given in the pattern, the value returned by
481 these macros is used for the class of the scratch register. Otherwise, two
482 additional reload registers are required. Their classes are obtained from
483 the constraints in the insn pattern.
484
485 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
486 either be in a hard register or in memory. Use `true_regnum' to find out;
487 it will return -1 if the pseudo is in memory and the hard register number if
488 it is in a register.
489
490 These macros should not be used in the case where a particular class of
491 registers can only be copied to memory and not to another class of
492 registers. In that case, secondary reload registers are not needed and
493 would not be helpful. Instead, a stack location must be used to perform the
494 copy and the `movM' pattern should use memory as an intermediate storage.
495 This case often occurs between floating-point and general registers. */
496
497 enum reg_class
498 xstormy16_secondary_reload_class (enum reg_class rclass,
499 machine_mode mode ATTRIBUTE_UNUSED,
500 rtx x)
501 {
502 /* This chip has the interesting property that only the first eight
503 registers can be moved to/from memory. */
504 if ((MEM_P (x)
505 || ((GET_CODE (x) == SUBREG || REG_P (x))
506 && (true_regnum (x) == -1
507 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
508 && ! reg_class_subset_p (rclass, EIGHT_REGS))
509 return EIGHT_REGS;
510
511 return NO_REGS;
512 }
513
514 /* Worker function for TARGET_PREFERRED_RELOAD_CLASS
515 and TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
516
517 static reg_class_t
518 xstormy16_preferred_reload_class (rtx x, reg_class_t rclass)
519 {
520 if (rclass == GENERAL_REGS && MEM_P (x))
521 return EIGHT_REGS;
522
523 return rclass;
524 }
525
526 /* Predicate for symbols and addresses that reflect special 8-bit
527 addressing. */
528
529 int
530 xstormy16_below100_symbol (rtx x,
531 machine_mode mode ATTRIBUTE_UNUSED)
532 {
533 if (GET_CODE (x) == CONST)
534 x = XEXP (x, 0);
535 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
536 x = XEXP (x, 0);
537
538 if (GET_CODE (x) == SYMBOL_REF)
539 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
540
541 if (CONST_INT_P (x))
542 {
543 HOST_WIDE_INT i = INTVAL (x);
544
545 if ((i >= 0x0000 && i <= 0x00ff)
546 || (i >= 0x7f00 && i <= 0x7fff))
547 return 1;
548 }
549 return 0;
550 }
551
552 /* Likewise, but only for non-volatile MEMs, for patterns where the
553 MEM will get split into smaller sized accesses. */
554
555 int
556 xstormy16_splittable_below100_operand (rtx x, machine_mode mode)
557 {
558 if (MEM_P (x) && MEM_VOLATILE_P (x))
559 return 0;
560 return xstormy16_below100_operand (x, mode);
561 }
562
563 /* Expand an 8-bit IOR. This either detects the one case we can
564 actually do, or uses a 16-bit IOR. */
565
566 void
567 xstormy16_expand_iorqi3 (rtx *operands)
568 {
569 rtx in, out, outsub, val;
570
571 out = operands[0];
572 in = operands[1];
573 val = operands[2];
574
575 if (xstormy16_onebit_set_operand (val, QImode))
576 {
577 if (!xstormy16_below100_or_register (in, QImode))
578 in = copy_to_mode_reg (QImode, in);
579 if (!xstormy16_below100_or_register (out, QImode))
580 out = gen_reg_rtx (QImode);
581 emit_insn (gen_iorqi3_internal (out, in, val));
582 if (out != operands[0])
583 emit_move_insn (operands[0], out);
584 return;
585 }
586
587 if (! REG_P (in))
588 in = copy_to_mode_reg (QImode, in);
589
590 if (! REG_P (val) && ! CONST_INT_P (val))
591 val = copy_to_mode_reg (QImode, val);
592
593 if (! REG_P (out))
594 out = gen_reg_rtx (QImode);
595
596 in = simplify_gen_subreg (HImode, in, QImode, 0);
597 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
598
599 if (! CONST_INT_P (val))
600 val = simplify_gen_subreg (HImode, val, QImode, 0);
601
602 emit_insn (gen_iorhi3 (outsub, in, val));
603
604 if (out != operands[0])
605 emit_move_insn (operands[0], out);
606 }
607
608 /* Expand an 8-bit AND. This either detects the one case we can
609 actually do, or uses a 16-bit AND. */
610
611 void
612 xstormy16_expand_andqi3 (rtx *operands)
613 {
614 rtx in, out, outsub, val;
615
616 out = operands[0];
617 in = operands[1];
618 val = operands[2];
619
620 if (xstormy16_onebit_clr_operand (val, QImode))
621 {
622 if (!xstormy16_below100_or_register (in, QImode))
623 in = copy_to_mode_reg (QImode, in);
624 if (!xstormy16_below100_or_register (out, QImode))
625 out = gen_reg_rtx (QImode);
626 emit_insn (gen_andqi3_internal (out, in, val));
627 if (out != operands[0])
628 emit_move_insn (operands[0], out);
629 return;
630 }
631
632 if (! REG_P (in))
633 in = copy_to_mode_reg (QImode, in);
634
635 if (! REG_P (val) && ! CONST_INT_P (val))
636 val = copy_to_mode_reg (QImode, val);
637
638 if (! REG_P (out))
639 out = gen_reg_rtx (QImode);
640
641 in = simplify_gen_subreg (HImode, in, QImode, 0);
642 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
643
644 if (! CONST_INT_P (val))
645 val = simplify_gen_subreg (HImode, val, QImode, 0);
646
647 emit_insn (gen_andhi3 (outsub, in, val));
648
649 if (out != operands[0])
650 emit_move_insn (operands[0], out);
651 }
652
653 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
654 (CONST_INT_P (X) \
655 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
656
657 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
658 (CONST_INT_P (X) \
659 && INTVAL (X) + (OFFSET) >= 0 \
660 && INTVAL (X) + (OFFSET) < 0x8000 \
661 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
662
663 bool
664 xstormy16_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
665 rtx x, bool strict)
666 {
667 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
668 return true;
669
670 if (GET_CODE (x) == PLUS
671 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
672 {
673 x = XEXP (x, 0);
674 /* PR 31232: Do not allow INT+INT as an address. */
675 if (CONST_INT_P (x))
676 return false;
677 }
678
679 if ((GET_CODE (x) == PRE_MODIFY && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
680 || GET_CODE (x) == POST_INC
681 || GET_CODE (x) == PRE_DEC)
682 x = XEXP (x, 0);
683
684 if (REG_P (x)
685 && REGNO_OK_FOR_BASE_P (REGNO (x))
686 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
687 return true;
688
689 if (xstormy16_below100_symbol (x, mode))
690 return true;
691
692 return false;
693 }
694
695 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P.
696
697 On this chip, this is true if the address is valid with an offset
698 of 0 but not of 6, because in that case it cannot be used as an
699 address for DImode or DFmode, or if the address is a post-increment
700 or pre-decrement address. */
701
702 static bool
703 xstormy16_mode_dependent_address_p (const_rtx x,
704 addr_space_t as ATTRIBUTE_UNUSED)
705 {
706 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
707 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
708 return true;
709
710 if (GET_CODE (x) == PLUS
711 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
712 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
713 return true;
714
715 /* Auto-increment addresses are now treated generically in recog.c. */
716 return false;
717 }
718
719 int
720 short_memory_operand (rtx x, machine_mode mode)
721 {
722 if (! memory_operand (x, mode))
723 return 0;
724 return (GET_CODE (XEXP (x, 0)) != PLUS);
725 }
726
727 /* Splitter for the 'move' patterns, for modes not directly implemented
728 by hardware. Emit insns to copy a value of mode MODE from SRC to
729 DEST.
730
731 This function is only called when reload_completed. */
732
733 void
734 xstormy16_split_move (machine_mode mode, rtx dest, rtx src)
735 {
736 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
737 int direction, end, i;
738 int src_modifies = 0;
739 int dest_modifies = 0;
740 int src_volatile = 0;
741 int dest_volatile = 0;
742 rtx mem_operand;
743 rtx auto_inc_reg_rtx = NULL_RTX;
744
745 /* Check initial conditions. */
746 gcc_assert (reload_completed
747 && mode != QImode && mode != HImode
748 && nonimmediate_operand (dest, mode)
749 && general_operand (src, mode));
750
751 /* This case is not supported below, and shouldn't be generated. */
752 gcc_assert (! MEM_P (dest) || ! MEM_P (src));
753
754 /* This case is very very bad after reload, so trap it now. */
755 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
756
757 /* The general idea is to copy by words, offsetting the source and
758 destination. Normally the least-significant word will be copied
759 first, but for pre-dec operations it's better to copy the
760 most-significant word first. Only one operand can be a pre-dec
761 or post-inc operand.
762
763 It's also possible that the copy overlaps so that the direction
764 must be reversed. */
765 direction = 1;
766
767 if (MEM_P (dest))
768 {
769 mem_operand = XEXP (dest, 0);
770 dest_modifies = side_effects_p (mem_operand);
771 if (auto_inc_p (mem_operand))
772 auto_inc_reg_rtx = XEXP (mem_operand, 0);
773 dest_volatile = MEM_VOLATILE_P (dest);
774 if (dest_volatile)
775 {
776 dest = copy_rtx (dest);
777 MEM_VOLATILE_P (dest) = 0;
778 }
779 }
780 else if (MEM_P (src))
781 {
782 mem_operand = XEXP (src, 0);
783 src_modifies = side_effects_p (mem_operand);
784 if (auto_inc_p (mem_operand))
785 auto_inc_reg_rtx = XEXP (mem_operand, 0);
786 src_volatile = MEM_VOLATILE_P (src);
787 if (src_volatile)
788 {
789 src = copy_rtx (src);
790 MEM_VOLATILE_P (src) = 0;
791 }
792 }
793 else
794 mem_operand = NULL_RTX;
795
796 if (mem_operand == NULL_RTX)
797 {
798 if (REG_P (src)
799 && REG_P (dest)
800 && reg_overlap_mentioned_p (dest, src)
801 && REGNO (dest) > REGNO (src))
802 direction = -1;
803 }
804 else if (GET_CODE (mem_operand) == PRE_DEC
805 || (GET_CODE (mem_operand) == PLUS
806 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
807 direction = -1;
808 else if (MEM_P (src) && reg_overlap_mentioned_p (dest, src))
809 {
810 int regno;
811
812 gcc_assert (REG_P (dest));
813 regno = REGNO (dest);
814
815 gcc_assert (refers_to_regno_p (regno, regno + num_words,
816 mem_operand, 0));
817
818 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
819 direction = -1;
820 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
821 mem_operand, 0))
822 direction = 1;
823 else
824 /* This means something like
825 (set (reg:DI r0) (mem:DI (reg:HI r1)))
826 which we'd need to support by doing the set of the second word
827 last. */
828 gcc_unreachable ();
829 }
830
831 end = direction < 0 ? -1 : num_words;
832 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
833 {
834 rtx w_src, w_dest, insn;
835
836 if (src_modifies)
837 w_src = gen_rtx_MEM (word_mode, mem_operand);
838 else
839 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
840 if (src_volatile)
841 MEM_VOLATILE_P (w_src) = 1;
842 if (dest_modifies)
843 w_dest = gen_rtx_MEM (word_mode, mem_operand);
844 else
845 w_dest = simplify_gen_subreg (word_mode, dest, mode,
846 i * UNITS_PER_WORD);
847 if (dest_volatile)
848 MEM_VOLATILE_P (w_dest) = 1;
849
850 /* The simplify_subreg calls must always be able to simplify. */
851 gcc_assert (GET_CODE (w_src) != SUBREG
852 && GET_CODE (w_dest) != SUBREG);
853
854 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
855 if (auto_inc_reg_rtx)
856 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
857 auto_inc_reg_rtx,
858 REG_NOTES (insn));
859 }
860 }
861
862 /* Expander for the 'move' patterns. Emit insns to copy a value of
863 mode MODE from SRC to DEST. */
864
865 void
866 xstormy16_expand_move (machine_mode mode, rtx dest, rtx src)
867 {
868 if (MEM_P (dest) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
869 {
870 rtx pmv = XEXP (dest, 0);
871 rtx dest_reg = XEXP (pmv, 0);
872 rtx dest_mod = XEXP (pmv, 1);
873 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod);
874 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
875
876 dest = gen_rtx_MEM (mode, dest_reg);
877 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
878 }
879 else if (MEM_P (src) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
880 {
881 rtx pmv = XEXP (src, 0);
882 rtx src_reg = XEXP (pmv, 0);
883 rtx src_mod = XEXP (pmv, 1);
884 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod);
885 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
886
887 src = gen_rtx_MEM (mode, src_reg);
888 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
889 }
890
891 /* There are only limited immediate-to-memory move instructions. */
892 if (! reload_in_progress
893 && ! reload_completed
894 && MEM_P (dest)
895 && (! CONST_INT_P (XEXP (dest, 0))
896 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
897 && ! xstormy16_below100_operand (dest, mode)
898 && ! REG_P (src)
899 && GET_CODE (src) != SUBREG)
900 src = copy_to_mode_reg (mode, src);
901
902 /* Don't emit something we would immediately split. */
903 if (reload_completed
904 && mode != HImode && mode != QImode)
905 {
906 xstormy16_split_move (mode, dest, src);
907 return;
908 }
909
910 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
911 }
912 \f
913 /* Stack Layout:
914
915 The stack is laid out as follows:
916
917 SP->
918 FP-> Local variables
919 Register save area (up to 4 words)
920 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
921
922 AP-> Return address (two words)
923 9th procedure parameter word
924 10th procedure parameter word
925 ...
926 last procedure parameter word
927
928 The frame pointer location is tuned to make it most likely that all
929 parameters and local variables can be accessed using a load-indexed
930 instruction. */
931
932 /* A structure to describe the layout. */
933 struct xstormy16_stack_layout
934 {
935 /* Size of the topmost three items on the stack. */
936 int locals_size;
937 int register_save_size;
938 int stdarg_save_size;
939 /* Sum of the above items. */
940 int frame_size;
941 /* Various offsets. */
942 int first_local_minus_ap;
943 int sp_minus_fp;
944 int fp_minus_ap;
945 };
946
947 /* Does REGNO need to be saved? */
948 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
949 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
950 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
951 && (REGNUM != CARRY_REGNUM) \
952 && (df_regs_ever_live_p (REGNUM) || ! crtl->is_leaf)))
953
954 /* Compute the stack layout. */
955
956 struct xstormy16_stack_layout
957 xstormy16_compute_stack_layout (void)
958 {
959 struct xstormy16_stack_layout layout;
960 int regno;
961 const int ifun = xstormy16_interrupt_function_p ();
962
963 layout.locals_size = get_frame_size ();
964
965 layout.register_save_size = 0;
966 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
967 if (REG_NEEDS_SAVE (regno, ifun))
968 layout.register_save_size += UNITS_PER_WORD;
969
970 if (cfun->stdarg)
971 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
972 else
973 layout.stdarg_save_size = 0;
974
975 layout.frame_size = (layout.locals_size
976 + layout.register_save_size
977 + layout.stdarg_save_size);
978
979 if (crtl->args.size <= 2048 && crtl->args.size != -1)
980 {
981 if (layout.frame_size - INCOMING_FRAME_SP_OFFSET
982 + crtl->args.size <= 2048)
983 layout.fp_minus_ap = layout.frame_size - INCOMING_FRAME_SP_OFFSET;
984 else
985 layout.fp_minus_ap = 2048 - crtl->args.size;
986 }
987 else
988 layout.fp_minus_ap = (layout.stdarg_save_size
989 + layout.register_save_size
990 - INCOMING_FRAME_SP_OFFSET);
991 layout.sp_minus_fp = (layout.frame_size - INCOMING_FRAME_SP_OFFSET
992 - layout.fp_minus_ap);
993 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
994 return layout;
995 }
996
997 /* Worker function for TARGET_CAN_ELIMINATE. */
998
999 static bool
1000 xstormy16_can_eliminate (const int from, const int to)
1001 {
1002 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1003 ? ! frame_pointer_needed
1004 : true);
1005 }
1006
1007 /* Determine how all the special registers get eliminated. */
1008
1009 int
1010 xstormy16_initial_elimination_offset (int from, int to)
1011 {
1012 struct xstormy16_stack_layout layout;
1013 int result;
1014
1015 layout = xstormy16_compute_stack_layout ();
1016
1017 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1018 result = layout.sp_minus_fp - layout.locals_size;
1019 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1020 result = - layout.locals_size;
1021 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
1022 result = - layout.fp_minus_ap;
1023 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1024 result = - (layout.sp_minus_fp + layout.fp_minus_ap);
1025 else
1026 gcc_unreachable ();
1027
1028 return result;
1029 }
1030
1031 static rtx
1032 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
1033 {
1034 rtx set, clobber, insn;
1035
1036 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
1037 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1038 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1039 return insn;
1040 }
1041
1042 /* Called after register allocation to add any instructions needed for
1043 the prologue. Using a prologue insn is favored compared to putting
1044 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1045 since it allows the scheduler to intermix instructions with the
1046 saves of the caller saved registers. In some cases, it might be
1047 necessary to emit a barrier instruction as the last insn to prevent
1048 such scheduling.
1049
1050 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1051 so that the debug info generation code can handle them properly. */
1052
1053 void
1054 xstormy16_expand_prologue (void)
1055 {
1056 struct xstormy16_stack_layout layout;
1057 int regno;
1058 rtx insn;
1059 rtx mem_push_rtx;
1060 const int ifun = xstormy16_interrupt_function_p ();
1061
1062 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1063 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1064
1065 layout = xstormy16_compute_stack_layout ();
1066
1067 if (layout.locals_size >= 32768)
1068 error ("local variable memory requirements exceed capacity");
1069
1070 if (flag_stack_usage_info)
1071 current_function_static_stack_size = layout.frame_size;
1072
1073 /* Save the argument registers if necessary. */
1074 if (layout.stdarg_save_size)
1075 for (regno = FIRST_ARGUMENT_REGISTER;
1076 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1077 regno++)
1078 {
1079 rtx dwarf;
1080 rtx reg = gen_rtx_REG (HImode, regno);
1081
1082 insn = emit_move_insn (mem_push_rtx, reg);
1083 RTX_FRAME_RELATED_P (insn) = 1;
1084
1085 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1086
1087 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1088 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1089 reg);
1090 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1091 plus_constant (Pmode,
1092 stack_pointer_rtx,
1093 GET_MODE_SIZE (Pmode)));
1094 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1095 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1096 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1097 }
1098
1099 /* Push each of the registers to save. */
1100 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1101 if (REG_NEEDS_SAVE (regno, ifun))
1102 {
1103 rtx dwarf;
1104 rtx reg = gen_rtx_REG (HImode, regno);
1105
1106 insn = emit_move_insn (mem_push_rtx, reg);
1107 RTX_FRAME_RELATED_P (insn) = 1;
1108
1109 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1110
1111 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1112 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1113 reg);
1114 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1115 plus_constant (Pmode,
1116 stack_pointer_rtx,
1117 GET_MODE_SIZE (Pmode)));
1118 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1119 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1120 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1121 }
1122
1123 /* It's just possible that the SP here might be what we need for
1124 the new FP... */
1125 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1126 {
1127 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1128 RTX_FRAME_RELATED_P (insn) = 1;
1129 }
1130
1131 /* Allocate space for local variables. */
1132 if (layout.locals_size)
1133 {
1134 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1135 GEN_INT (layout.locals_size));
1136 RTX_FRAME_RELATED_P (insn) = 1;
1137 }
1138
1139 /* Set up the frame pointer, if required. */
1140 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1141 {
1142 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1143 RTX_FRAME_RELATED_P (insn) = 1;
1144
1145 if (layout.sp_minus_fp)
1146 {
1147 insn = emit_addhi3_postreload (hard_frame_pointer_rtx,
1148 hard_frame_pointer_rtx,
1149 GEN_INT (- layout.sp_minus_fp));
1150 RTX_FRAME_RELATED_P (insn) = 1;
1151 }
1152 }
1153 }
1154
1155 /* Do we need an epilogue at all? */
1156
1157 int
1158 direct_return (void)
1159 {
1160 return (reload_completed
1161 && xstormy16_compute_stack_layout ().frame_size == 0
1162 && ! xstormy16_interrupt_function_p ());
1163 }
1164
1165 /* Called after register allocation to add any instructions needed for
1166 the epilogue. Using an epilogue insn is favored compared to putting
1167 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1168 since it allows the scheduler to intermix instructions with the
1169 saves of the caller saved registers. In some cases, it might be
1170 necessary to emit a barrier instruction as the last insn to prevent
1171 such scheduling. */
1172
1173 void
1174 xstormy16_expand_epilogue (void)
1175 {
1176 struct xstormy16_stack_layout layout;
1177 rtx mem_pop_rtx;
1178 int regno;
1179 const int ifun = xstormy16_interrupt_function_p ();
1180
1181 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1182 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1183
1184 layout = xstormy16_compute_stack_layout ();
1185
1186 /* Pop the stack for the locals. */
1187 if (layout.locals_size)
1188 {
1189 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1190 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1191 else
1192 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1193 GEN_INT (- layout.locals_size));
1194 }
1195
1196 /* Restore any call-saved registers. */
1197 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1198 if (REG_NEEDS_SAVE (regno, ifun))
1199 emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1200
1201 /* Pop the stack for the stdarg save area. */
1202 if (layout.stdarg_save_size)
1203 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1204 GEN_INT (- layout.stdarg_save_size));
1205
1206 /* Return. */
1207 if (ifun)
1208 emit_jump_insn (gen_return_internal_interrupt ());
1209 else
1210 emit_jump_insn (gen_return_internal ());
1211 }
1212
1213 int
1214 xstormy16_epilogue_uses (int regno)
1215 {
1216 if (reload_completed && call_used_regs[regno])
1217 {
1218 const int ifun = xstormy16_interrupt_function_p ();
1219 return REG_NEEDS_SAVE (regno, ifun);
1220 }
1221 return 0;
1222 }
1223
1224 void
1225 xstormy16_function_profiler (void)
1226 {
1227 sorry ("function_profiler support");
1228 }
1229 \f
1230 /* Update CUM to advance past an argument in the argument list. The
1231 values MODE, TYPE and NAMED describe that argument. Once this is
1232 done, the variable CUM is suitable for analyzing the *following*
1233 argument with `TARGET_FUNCTION_ARG', etc.
1234
1235 This function need not do anything if the argument in question was
1236 passed on the stack. The compiler knows how to track the amount of
1237 stack space used for arguments without any special help. However,
1238 it makes life easier for xstormy16_build_va_list if it does update
1239 the word count. */
1240
1241 static void
1242 xstormy16_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
1243 const_tree type, bool named ATTRIBUTE_UNUSED)
1244 {
1245 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1246
1247 /* If an argument would otherwise be passed partially in registers,
1248 and partially on the stack, the whole of it is passed on the
1249 stack. */
1250 if (*cum < NUM_ARGUMENT_REGISTERS
1251 && *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1252 *cum = NUM_ARGUMENT_REGISTERS;
1253
1254 *cum += XSTORMY16_WORD_SIZE (type, mode);
1255 }
1256
1257 static rtx
1258 xstormy16_function_arg (cumulative_args_t cum_v, machine_mode mode,
1259 const_tree type, bool named ATTRIBUTE_UNUSED)
1260 {
1261 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1262
1263 if (mode == VOIDmode)
1264 return const0_rtx;
1265 if (targetm.calls.must_pass_in_stack (mode, type)
1266 || *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1267 return NULL_RTX;
1268 return gen_rtx_REG (mode, *cum + FIRST_ARGUMENT_REGISTER);
1269 }
1270
1271 /* Build the va_list type.
1272
1273 For this chip, va_list is a record containing a counter and a pointer.
1274 The counter is of type 'int' and indicates how many bytes
1275 have been used to date. The pointer indicates the stack position
1276 for arguments that have not been passed in registers.
1277 To keep the layout nice, the pointer is first in the structure. */
1278
1279 static tree
1280 xstormy16_build_builtin_va_list (void)
1281 {
1282 tree f_1, f_2, record, type_decl;
1283
1284 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1285 type_decl = build_decl (BUILTINS_LOCATION,
1286 TYPE_DECL, get_identifier ("__va_list_tag"), record);
1287
1288 f_1 = build_decl (BUILTINS_LOCATION,
1289 FIELD_DECL, get_identifier ("base"),
1290 ptr_type_node);
1291 f_2 = build_decl (BUILTINS_LOCATION,
1292 FIELD_DECL, get_identifier ("count"),
1293 unsigned_type_node);
1294
1295 DECL_FIELD_CONTEXT (f_1) = record;
1296 DECL_FIELD_CONTEXT (f_2) = record;
1297
1298 TYPE_STUB_DECL (record) = type_decl;
1299 TYPE_NAME (record) = type_decl;
1300 TYPE_FIELDS (record) = f_1;
1301 DECL_CHAIN (f_1) = f_2;
1302
1303 layout_type (record);
1304
1305 return record;
1306 }
1307
1308 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1309 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1310 variable to initialize. NEXTARG is the machine independent notion of the
1311 'next' argument after the variable arguments. */
1312
1313 static void
1314 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1315 {
1316 tree f_base, f_count;
1317 tree base, count;
1318 tree t,u;
1319
1320 if (xstormy16_interrupt_function_p ())
1321 error ("cannot use va_start in interrupt function");
1322
1323 f_base = TYPE_FIELDS (va_list_type_node);
1324 f_count = DECL_CHAIN (f_base);
1325
1326 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1327 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1328 NULL_TREE);
1329
1330 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1331 u = build_int_cst (NULL_TREE, - INCOMING_FRAME_SP_OFFSET);
1332 u = fold_convert (TREE_TYPE (count), u);
1333 t = fold_build_pointer_plus (t, u);
1334 t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
1335 TREE_SIDE_EFFECTS (t) = 1;
1336 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1337
1338 t = build2 (MODIFY_EXPR, TREE_TYPE (count), count,
1339 build_int_cst (NULL_TREE,
1340 crtl->args.info * UNITS_PER_WORD));
1341 TREE_SIDE_EFFECTS (t) = 1;
1342 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1343 }
1344
1345 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1346 of type va_list as a tree, TYPE is the type passed to va_arg.
1347 Note: This algorithm is documented in stormy-abi. */
1348
1349 static tree
1350 xstormy16_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
1351 gimple_seq *post_p ATTRIBUTE_UNUSED)
1352 {
1353 tree f_base, f_count;
1354 tree base, count;
1355 tree count_tmp, addr, t;
1356 tree lab_gotaddr, lab_fromstack;
1357 int size, size_of_reg_args, must_stack;
1358 tree size_tree;
1359
1360 f_base = TYPE_FIELDS (va_list_type_node);
1361 f_count = DECL_CHAIN (f_base);
1362
1363 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1364 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1365 NULL_TREE);
1366
1367 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1368 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1369 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1370
1371 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1372
1373 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1374 lab_gotaddr = create_artificial_label (UNKNOWN_LOCATION);
1375 lab_fromstack = create_artificial_label (UNKNOWN_LOCATION);
1376 addr = create_tmp_var (ptr_type_node);
1377
1378 if (!must_stack)
1379 {
1380 tree r;
1381
1382 t = fold_convert (TREE_TYPE (count), size_tree);
1383 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1384 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1385 t = build2 (GT_EXPR, boolean_type_node, t, r);
1386 t = build3 (COND_EXPR, void_type_node, t,
1387 build1 (GOTO_EXPR, void_type_node, lab_fromstack),
1388 NULL_TREE);
1389 gimplify_and_add (t, pre_p);
1390
1391 t = fold_build_pointer_plus (base, count_tmp);
1392 gimplify_assign (addr, t, pre_p);
1393
1394 t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
1395 gimplify_and_add (t, pre_p);
1396
1397 t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
1398 gimplify_and_add (t, pre_p);
1399 }
1400
1401 /* Arguments larger than a word might need to skip over some
1402 registers, since arguments are either passed entirely in
1403 registers or entirely on the stack. */
1404 size = PUSH_ROUNDING (int_size_in_bytes (type));
1405 if (size > 2 || size < 0 || must_stack)
1406 {
1407 tree r, u;
1408
1409 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1410 u = build2 (MODIFY_EXPR, TREE_TYPE (count_tmp), count_tmp, r);
1411
1412 t = fold_convert (TREE_TYPE (count), r);
1413 t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
1414 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
1415 gimplify_and_add (t, pre_p);
1416 }
1417
1418 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1419 + INCOMING_FRAME_SP_OFFSET);
1420 t = fold_convert (TREE_TYPE (count), t);
1421 t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1422 t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
1423 fold_convert (TREE_TYPE (count), size_tree));
1424 t = fold_convert (TREE_TYPE (t), fold (t));
1425 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1426 t = fold_build_pointer_plus (base, t);
1427 gimplify_assign (addr, t, pre_p);
1428
1429 t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
1430 gimplify_and_add (t, pre_p);
1431
1432 t = fold_convert (TREE_TYPE (count), size_tree);
1433 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1434 gimplify_assign (count, t, pre_p);
1435
1436 addr = fold_convert (build_pointer_type (type), addr);
1437 return build_va_arg_indirect_ref (addr);
1438 }
1439
1440 /* Worker function for TARGET_TRAMPOLINE_INIT. */
1441
1442 static void
1443 xstormy16_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
1444 {
1445 rtx temp = gen_reg_rtx (HImode);
1446 rtx reg_fnaddr = gen_reg_rtx (HImode);
1447 rtx reg_addr, reg_addr_mem;
1448
1449 reg_addr = copy_to_reg (XEXP (m_tramp, 0));
1450 reg_addr_mem = adjust_automodify_address (m_tramp, HImode, reg_addr, 0);
1451
1452 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1453 emit_move_insn (reg_addr_mem, temp);
1454 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1455 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1456
1457 emit_move_insn (temp, static_chain);
1458 emit_move_insn (reg_addr_mem, temp);
1459 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1460 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1461
1462 emit_move_insn (reg_fnaddr, XEXP (DECL_RTL (fndecl), 0));
1463 emit_move_insn (temp, reg_fnaddr);
1464 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1465 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1466 emit_move_insn (reg_addr_mem, temp);
1467 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1468 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1469
1470 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1471 emit_move_insn (reg_addr_mem, reg_fnaddr);
1472 }
1473
1474 /* Worker function for TARGET_FUNCTION_VALUE. */
1475
1476 static rtx
1477 xstormy16_function_value (const_tree valtype,
1478 const_tree func ATTRIBUTE_UNUSED,
1479 bool outgoing ATTRIBUTE_UNUSED)
1480 {
1481 machine_mode mode;
1482 mode = TYPE_MODE (valtype);
1483 PROMOTE_MODE (mode, 0, valtype);
1484 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1485 }
1486
1487 /* Worker function for TARGET_LIBCALL_VALUE. */
1488
1489 static rtx
1490 xstormy16_libcall_value (machine_mode mode,
1491 const_rtx fun ATTRIBUTE_UNUSED)
1492 {
1493 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1494 }
1495
1496 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
1497
1498 static bool
1499 xstormy16_function_value_regno_p (const unsigned int regno)
1500 {
1501 return (regno == RETURN_VALUE_REGNUM);
1502 }
1503
1504 /* A C compound statement that outputs the assembler code for a thunk function,
1505 used to implement C++ virtual function calls with multiple inheritance. The
1506 thunk acts as a wrapper around a virtual function, adjusting the implicit
1507 object parameter before handing control off to the real function.
1508
1509 First, emit code to add the integer DELTA to the location that contains the
1510 incoming first argument. Assume that this argument contains a pointer, and
1511 is the one used to pass the `this' pointer in C++. This is the incoming
1512 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1513 addition must preserve the values of all other incoming arguments.
1514
1515 After the addition, emit code to jump to FUNCTION, which is a
1516 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1517 the return address. Hence returning from FUNCTION will return to whoever
1518 called the current `thunk'.
1519
1520 The effect must be as if @var{function} had been called directly
1521 with the adjusted first argument. This macro is responsible for
1522 emitting all of the code for a thunk function;
1523 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1524 not invoked.
1525
1526 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1527 extracted from it.) It might possibly be useful on some targets, but
1528 probably not. */
1529
1530 static void
1531 xstormy16_asm_output_mi_thunk (FILE *file,
1532 tree thunk_fndecl ATTRIBUTE_UNUSED,
1533 HOST_WIDE_INT delta,
1534 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1535 tree function)
1536 {
1537 int regnum = FIRST_ARGUMENT_REGISTER;
1538
1539 /* There might be a hidden first argument for a returned structure. */
1540 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1541 regnum += 1;
1542
1543 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1544 fputs ("\tjmpf ", file);
1545 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1546 putc ('\n', file);
1547 }
1548
1549 /* The purpose of this function is to override the default behavior of
1550 BSS objects. Normally, they go into .bss or .sbss via ".common"
1551 directives, but we need to override that and put them in
1552 .bss_below100. We can't just use a section override (like we do
1553 for .data_below100), because that makes them initialized rather
1554 than uninitialized. */
1555
1556 void
1557 xstormy16_asm_output_aligned_common (FILE *stream,
1558 tree decl,
1559 const char *name,
1560 int size,
1561 int align,
1562 int global)
1563 {
1564 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
1565 rtx symbol;
1566
1567 if (mem != NULL_RTX
1568 && MEM_P (mem)
1569 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1570 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
1571 {
1572 const char *name2;
1573 int p2align = 0;
1574
1575 switch_to_section (bss100_section);
1576
1577 while (align > 8)
1578 {
1579 align /= 2;
1580 p2align ++;
1581 }
1582
1583 name2 = default_strip_name_encoding (name);
1584 if (global)
1585 fprintf (stream, "\t.globl\t%s\n", name2);
1586 if (p2align)
1587 fprintf (stream, "\t.p2align %d\n", p2align);
1588 fprintf (stream, "\t.type\t%s, @object\n", name2);
1589 fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1590 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1591 return;
1592 }
1593
1594 if (!global)
1595 {
1596 fprintf (stream, "\t.local\t");
1597 assemble_name (stream, name);
1598 fprintf (stream, "\n");
1599 }
1600 fprintf (stream, "\t.comm\t");
1601 assemble_name (stream, name);
1602 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
1603 }
1604
1605 /* Implement TARGET_ASM_INIT_SECTIONS. */
1606
1607 static void
1608 xstormy16_asm_init_sections (void)
1609 {
1610 bss100_section
1611 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
1612 output_section_asm_op,
1613 "\t.section \".bss_below100\",\"aw\",@nobits");
1614 }
1615
1616 /* Mark symbols with the "below100" attribute so that we can use the
1617 special addressing modes for them. */
1618
1619 static void
1620 xstormy16_encode_section_info (tree decl, rtx r, int first)
1621 {
1622 default_encode_section_info (decl, r, first);
1623
1624 if (TREE_CODE (decl) == VAR_DECL
1625 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1626 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1627 {
1628 rtx symbol = XEXP (r, 0);
1629
1630 gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1631 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
1632 }
1633 }
1634
1635 #undef TARGET_ASM_CONSTRUCTOR
1636 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1637 #undef TARGET_ASM_DESTRUCTOR
1638 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1639
1640 /* Output constructors and destructors. Just like
1641 default_named_section_asm_out_* but don't set the sections writable. */
1642
1643 static void
1644 xstormy16_asm_out_destructor (rtx symbol, int priority)
1645 {
1646 const char *section = ".dtors";
1647 char buf[16];
1648
1649 /* ??? This only works reliably with the GNU linker. */
1650 if (priority != DEFAULT_INIT_PRIORITY)
1651 {
1652 sprintf (buf, ".dtors.%.5u",
1653 /* Invert the numbering so the linker puts us in the proper
1654 order; constructors are run from right to left, and the
1655 linker sorts in increasing order. */
1656 MAX_INIT_PRIORITY - priority);
1657 section = buf;
1658 }
1659
1660 switch_to_section (get_section (section, 0, NULL));
1661 assemble_align (POINTER_SIZE);
1662 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1663 }
1664
1665 static void
1666 xstormy16_asm_out_constructor (rtx symbol, int priority)
1667 {
1668 const char *section = ".ctors";
1669 char buf[16];
1670
1671 /* ??? This only works reliably with the GNU linker. */
1672 if (priority != DEFAULT_INIT_PRIORITY)
1673 {
1674 sprintf (buf, ".ctors.%.5u",
1675 /* Invert the numbering so the linker puts us in the proper
1676 order; constructors are run from right to left, and the
1677 linker sorts in increasing order. */
1678 MAX_INIT_PRIORITY - priority);
1679 section = buf;
1680 }
1681
1682 switch_to_section (get_section (section, 0, NULL));
1683 assemble_align (POINTER_SIZE);
1684 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1685 }
1686 \f
1687 /* Worker function for TARGET_PRINT_OPERAND_ADDRESS.
1688
1689 Print a memory address as an operand to reference that memory location. */
1690
1691 static void
1692 xstormy16_print_operand_address (FILE *file, rtx address)
1693 {
1694 HOST_WIDE_INT offset;
1695 int pre_dec, post_inc;
1696
1697 /* There are a few easy cases. */
1698 if (CONST_INT_P (address))
1699 {
1700 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1701 return;
1702 }
1703
1704 if (CONSTANT_P (address) || LABEL_P (address))
1705 {
1706 output_addr_const (file, address);
1707 return;
1708 }
1709
1710 /* Otherwise, it's hopefully something of the form
1711 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)). */
1712 if (GET_CODE (address) == PLUS)
1713 {
1714 gcc_assert (CONST_INT_P (XEXP (address, 1)));
1715 offset = INTVAL (XEXP (address, 1));
1716 address = XEXP (address, 0);
1717 }
1718 else
1719 offset = 0;
1720
1721 pre_dec = (GET_CODE (address) == PRE_DEC);
1722 post_inc = (GET_CODE (address) == POST_INC);
1723 if (pre_dec || post_inc)
1724 address = XEXP (address, 0);
1725
1726 gcc_assert (REG_P (address));
1727
1728 fputc ('(', file);
1729 if (pre_dec)
1730 fputs ("--", file);
1731 fputs (reg_names [REGNO (address)], file);
1732 if (post_inc)
1733 fputs ("++", file);
1734 if (offset != 0)
1735 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1736 fputc (')', file);
1737 }
1738
1739 /* Worker function for TARGET_PRINT_OPERAND.
1740
1741 Print an operand to an assembler instruction. */
1742
1743 static void
1744 xstormy16_print_operand (FILE *file, rtx x, int code)
1745 {
1746 switch (code)
1747 {
1748 case 'B':
1749 /* There is either one bit set, or one bit clear, in X.
1750 Print it preceded by '#'. */
1751 {
1752 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1753 HOST_WIDE_INT xx = 1;
1754 HOST_WIDE_INT l;
1755
1756 if (CONST_INT_P (x))
1757 xx = INTVAL (x);
1758 else
1759 output_operand_lossage ("'B' operand is not constant");
1760
1761 /* GCC sign-extends masks with the MSB set, so we have to
1762 detect all the cases that differ only in sign extension
1763 beyond the bits we care about. Normally, the predicates
1764 and constraints ensure that we have the right values. This
1765 works correctly for valid masks. */
1766 if (bits_set[xx & 7] <= 1)
1767 {
1768 /* Remove sign extension bits. */
1769 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1770 xx &= 0xff;
1771 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1772 xx &= 0xffff;
1773 l = exact_log2 (xx);
1774 }
1775 else
1776 {
1777 /* Add sign extension bits. */
1778 if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1779 xx |= ~(HOST_WIDE_INT)0xff;
1780 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1781 xx |= ~(HOST_WIDE_INT)0xffff;
1782 l = exact_log2 (~xx);
1783 }
1784
1785 if (l == -1)
1786 output_operand_lossage ("'B' operand has multiple bits set");
1787
1788 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1789 return;
1790 }
1791
1792 case 'C':
1793 /* Print the symbol without a surrounding @fptr(). */
1794 if (GET_CODE (x) == SYMBOL_REF)
1795 assemble_name (file, XSTR (x, 0));
1796 else if (LABEL_P (x))
1797 output_asm_label (x);
1798 else
1799 xstormy16_print_operand_address (file, x);
1800 return;
1801
1802 case 'o':
1803 case 'O':
1804 /* Print the immediate operand less one, preceded by '#'.
1805 For 'O', negate it first. */
1806 {
1807 HOST_WIDE_INT xx = 0;
1808
1809 if (CONST_INT_P (x))
1810 xx = INTVAL (x);
1811 else
1812 output_operand_lossage ("'o' operand is not constant");
1813
1814 if (code == 'O')
1815 xx = -xx;
1816
1817 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1818 return;
1819 }
1820
1821 case 'b':
1822 /* Print the shift mask for bp/bn. */
1823 {
1824 HOST_WIDE_INT xx = 1;
1825 HOST_WIDE_INT l;
1826
1827 if (CONST_INT_P (x))
1828 xx = INTVAL (x);
1829 else
1830 output_operand_lossage ("'B' operand is not constant");
1831
1832 l = 7 - xx;
1833
1834 fputs (IMMEDIATE_PREFIX, file);
1835 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1836 return;
1837 }
1838
1839 case 0:
1840 /* Handled below. */
1841 break;
1842
1843 default:
1844 output_operand_lossage ("xstormy16_print_operand: unknown code");
1845 return;
1846 }
1847
1848 switch (GET_CODE (x))
1849 {
1850 case REG:
1851 fputs (reg_names [REGNO (x)], file);
1852 break;
1853
1854 case MEM:
1855 xstormy16_print_operand_address (file, XEXP (x, 0));
1856 break;
1857
1858 default:
1859 /* Some kind of constant or label; an immediate operand,
1860 so prefix it with '#' for the assembler. */
1861 fputs (IMMEDIATE_PREFIX, file);
1862 output_addr_const (file, x);
1863 break;
1864 }
1865
1866 return;
1867 }
1868 \f
1869 /* Expander for the `casesi' pattern.
1870 INDEX is the index of the switch statement.
1871 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1872 to the first table entry.
1873 RANGE is the number of table entries.
1874 TABLE is an ADDR_VEC that is the jump table.
1875 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1876 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */
1877
1878 void
1879 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1880 rtx table, rtx default_label)
1881 {
1882 HOST_WIDE_INT range_i = INTVAL (range);
1883 rtx int_index;
1884
1885 /* This code uses 'br', so it can deal only with tables of size up to
1886 8192 entries. */
1887 if (range_i >= 8192)
1888 sorry ("switch statement of size %lu entries too large",
1889 (unsigned long) range_i);
1890
1891 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1892 OPTAB_LIB_WIDEN);
1893 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1894 default_label);
1895 int_index = gen_lowpart_common (HImode, index);
1896 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
1897 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1898 }
1899
1900 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1901 instructions, without label or alignment or any other special
1902 constructs. We know that the previous instruction will be the
1903 `tablejump_pcrel' output above.
1904
1905 TODO: it might be nice to output 'br' instructions if they could
1906 all reach. */
1907
1908 void
1909 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1910 {
1911 int vlen, idx;
1912
1913 switch_to_section (current_function_section ());
1914
1915 vlen = XVECLEN (table, 0);
1916 for (idx = 0; idx < vlen; idx++)
1917 {
1918 fputs ("\tjmpf ", file);
1919 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1920 fputc ('\n', file);
1921 }
1922 }
1923 \f
1924 /* Expander for the `call' patterns.
1925 RETVAL is the RTL for the return register or NULL for void functions.
1926 DEST is the function to call, expressed as a MEM.
1927 COUNTER is ignored. */
1928
1929 void
1930 xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1931 {
1932 rtx call, temp;
1933 machine_mode mode;
1934
1935 gcc_assert (MEM_P (dest));
1936 dest = XEXP (dest, 0);
1937
1938 if (! CONSTANT_P (dest) && ! REG_P (dest))
1939 dest = force_reg (Pmode, dest);
1940
1941 if (retval == NULL)
1942 mode = VOIDmode;
1943 else
1944 mode = GET_MODE (retval);
1945
1946 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1947 counter);
1948 if (retval)
1949 call = gen_rtx_SET (VOIDmode, retval, call);
1950
1951 if (! CONSTANT_P (dest))
1952 {
1953 temp = gen_reg_rtx (HImode);
1954 emit_move_insn (temp, const0_rtx);
1955 }
1956 else
1957 temp = const0_rtx;
1958
1959 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1960 gen_rtx_USE (VOIDmode, temp)));
1961 emit_call_insn (call);
1962 }
1963 \f
1964 /* Expanders for multiword computational operations. */
1965
1966 /* Expander for arithmetic operations; emit insns to compute
1967
1968 (set DEST (CODE:MODE SRC0 SRC1))
1969
1970 When CODE is COMPARE, a branch template is generated
1971 (this saves duplicating code in xstormy16_split_cbranch). */
1972
1973 void
1974 xstormy16_expand_arith (machine_mode mode, enum rtx_code code,
1975 rtx dest, rtx src0, rtx src1)
1976 {
1977 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1978 int i;
1979 int firstloop = 1;
1980
1981 if (code == NEG)
1982 emit_move_insn (src0, const0_rtx);
1983
1984 for (i = 0; i < num_words; i++)
1985 {
1986 rtx w_src0, w_src1, w_dest;
1987 rtx insn;
1988
1989 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1990 i * UNITS_PER_WORD);
1991 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1992 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1993
1994 switch (code)
1995 {
1996 case PLUS:
1997 if (firstloop
1998 && CONST_INT_P (w_src1)
1999 && INTVAL (w_src1) == 0)
2000 continue;
2001
2002 if (firstloop)
2003 insn = gen_addchi4 (w_dest, w_src0, w_src1);
2004 else
2005 insn = gen_addchi5 (w_dest, w_src0, w_src1);
2006 break;
2007
2008 case NEG:
2009 case MINUS:
2010 case COMPARE:
2011 if (code == COMPARE && i == num_words - 1)
2012 {
2013 rtx branch, sub, clobber, sub_1;
2014
2015 sub_1 = gen_rtx_MINUS (HImode, w_src0,
2016 gen_rtx_ZERO_EXTEND (HImode, gen_rtx_REG (BImode, CARRY_REGNUM)));
2017 sub = gen_rtx_SET (VOIDmode, w_dest,
2018 gen_rtx_MINUS (HImode, sub_1, w_src1));
2019 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
2020 branch = gen_rtx_SET (VOIDmode, pc_rtx,
2021 gen_rtx_IF_THEN_ELSE (VOIDmode,
2022 gen_rtx_EQ (HImode,
2023 sub_1,
2024 w_src1),
2025 pc_rtx,
2026 pc_rtx));
2027 insn = gen_rtx_PARALLEL (VOIDmode,
2028 gen_rtvec (3, branch, sub, clobber));
2029 }
2030 else if (firstloop
2031 && code != COMPARE
2032 && CONST_INT_P (w_src1)
2033 && INTVAL (w_src1) == 0)
2034 continue;
2035 else if (firstloop)
2036 insn = gen_subchi4 (w_dest, w_src0, w_src1);
2037 else
2038 insn = gen_subchi5 (w_dest, w_src0, w_src1);
2039 break;
2040
2041 case IOR:
2042 case XOR:
2043 case AND:
2044 if (CONST_INT_P (w_src1)
2045 && INTVAL (w_src1) == -(code == AND))
2046 continue;
2047
2048 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode,
2049 w_src0, w_src1));
2050 break;
2051
2052 case NOT:
2053 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
2054 break;
2055
2056 default:
2057 gcc_unreachable ();
2058 }
2059
2060 firstloop = 0;
2061 emit (insn);
2062 }
2063
2064 /* If we emit nothing, try_split() will think we failed. So emit
2065 something that does nothing and can be optimized away. */
2066 if (firstloop)
2067 emit (gen_nop ());
2068 }
2069
2070 /* The shift operations are split at output time for constant values;
2071 variable-width shifts get handed off to a library routine.
2072
2073 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2074 SIZE_R will be a CONST_INT, X will be a hard register. */
2075
2076 const char *
2077 xstormy16_output_shift (machine_mode mode, enum rtx_code code,
2078 rtx x, rtx size_r, rtx temp)
2079 {
2080 HOST_WIDE_INT size;
2081 const char *r0, *r1, *rt;
2082 static char r[64];
2083
2084 gcc_assert (CONST_INT_P (size_r)
2085 && REG_P (x)
2086 && mode == SImode);
2087
2088 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2089
2090 if (size == 0)
2091 return "";
2092
2093 r0 = reg_names [REGNO (x)];
2094 r1 = reg_names [REGNO (x) + 1];
2095
2096 /* For shifts of size 1, we can use the rotate instructions. */
2097 if (size == 1)
2098 {
2099 switch (code)
2100 {
2101 case ASHIFT:
2102 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2103 break;
2104 case ASHIFTRT:
2105 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2106 break;
2107 case LSHIFTRT:
2108 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2109 break;
2110 default:
2111 gcc_unreachable ();
2112 }
2113 return r;
2114 }
2115
2116 /* For large shifts, there are easy special cases. */
2117 if (size == 16)
2118 {
2119 switch (code)
2120 {
2121 case ASHIFT:
2122 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2123 break;
2124 case ASHIFTRT:
2125 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2126 break;
2127 case LSHIFTRT:
2128 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2129 break;
2130 default:
2131 gcc_unreachable ();
2132 }
2133 return r;
2134 }
2135 if (size > 16)
2136 {
2137 switch (code)
2138 {
2139 case ASHIFT:
2140 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2141 r1, r0, r0, r1, (int) size - 16);
2142 break;
2143 case ASHIFTRT:
2144 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2145 r0, r1, r1, r0, (int) size - 16);
2146 break;
2147 case LSHIFTRT:
2148 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2149 r0, r1, r1, r0, (int) size - 16);
2150 break;
2151 default:
2152 gcc_unreachable ();
2153 }
2154 return r;
2155 }
2156
2157 /* For the rest, we have to do more work. In particular, we
2158 need a temporary. */
2159 rt = reg_names [REGNO (temp)];
2160 switch (code)
2161 {
2162 case ASHIFT:
2163 sprintf (r,
2164 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2165 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16 - size),
2166 r1, rt);
2167 break;
2168 case ASHIFTRT:
2169 sprintf (r,
2170 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2171 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2172 r0, rt);
2173 break;
2174 case LSHIFTRT:
2175 sprintf (r,
2176 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2177 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2178 r0, rt);
2179 break;
2180 default:
2181 gcc_unreachable ();
2182 }
2183 return r;
2184 }
2185 \f
2186 /* Attribute handling. */
2187
2188 /* Return nonzero if the function is an interrupt function. */
2189
2190 int
2191 xstormy16_interrupt_function_p (void)
2192 {
2193 tree attributes;
2194
2195 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2196 any functions are declared, which is demonstrably wrong, but
2197 it is worked around here. FIXME. */
2198 if (!cfun)
2199 return 0;
2200
2201 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2202 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2203 }
2204
2205 #undef TARGET_ATTRIBUTE_TABLE
2206 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2207
2208 static tree xstormy16_handle_interrupt_attribute
2209 (tree *, tree, tree, int, bool *);
2210 static tree xstormy16_handle_below100_attribute
2211 (tree *, tree, tree, int, bool *);
2212
2213 static const struct attribute_spec xstormy16_attribute_table[] =
2214 {
2215 /* name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2216 affects_type_identity. */
2217 { "interrupt", 0, 0, false, true, true,
2218 xstormy16_handle_interrupt_attribute , false },
2219 { "BELOW100", 0, 0, false, false, false,
2220 xstormy16_handle_below100_attribute, false },
2221 { "below100", 0, 0, false, false, false,
2222 xstormy16_handle_below100_attribute, false },
2223 { NULL, 0, 0, false, false, false, NULL, false }
2224 };
2225
2226 /* Handle an "interrupt" attribute;
2227 arguments as in struct attribute_spec.handler. */
2228
2229 static tree
2230 xstormy16_handle_interrupt_attribute (tree *node, tree name,
2231 tree args ATTRIBUTE_UNUSED,
2232 int flags ATTRIBUTE_UNUSED,
2233 bool *no_add_attrs)
2234 {
2235 if (TREE_CODE (*node) != FUNCTION_TYPE)
2236 {
2237 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2238 name);
2239 *no_add_attrs = true;
2240 }
2241
2242 return NULL_TREE;
2243 }
2244
2245 /* Handle an "below" attribute;
2246 arguments as in struct attribute_spec.handler. */
2247
2248 static tree
2249 xstormy16_handle_below100_attribute (tree *node,
2250 tree name ATTRIBUTE_UNUSED,
2251 tree args ATTRIBUTE_UNUSED,
2252 int flags ATTRIBUTE_UNUSED,
2253 bool *no_add_attrs)
2254 {
2255 if (TREE_CODE (*node) != VAR_DECL
2256 && TREE_CODE (*node) != POINTER_TYPE
2257 && TREE_CODE (*node) != TYPE_DECL)
2258 {
2259 warning (OPT_Wattributes,
2260 "%<__BELOW100__%> attribute only applies to variables");
2261 *no_add_attrs = true;
2262 }
2263 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2264 {
2265 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2266 {
2267 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
2268 "with auto storage class");
2269 *no_add_attrs = true;
2270 }
2271 }
2272
2273 return NULL_TREE;
2274 }
2275 \f
2276 #undef TARGET_INIT_BUILTINS
2277 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2278 #undef TARGET_EXPAND_BUILTIN
2279 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2280
2281 static struct
2282 {
2283 const char * name;
2284 int md_code;
2285 const char * arg_ops; /* 0..9, t for temp register, r for return value. */
2286 const char * arg_types; /* s=short,l=long, upper case for unsigned. */
2287 }
2288 s16builtins[] =
2289 {
2290 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2291 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2292 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2293 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2294 { NULL, 0, NULL, NULL }
2295 };
2296
2297 static void
2298 xstormy16_init_builtins (void)
2299 {
2300 tree args[2], ret_type, arg = NULL_TREE, ftype;
2301 int i, a, n_args;
2302
2303 ret_type = void_type_node;
2304
2305 for (i = 0; s16builtins[i].name; i++)
2306 {
2307 n_args = strlen (s16builtins[i].arg_types) - 1;
2308
2309 gcc_assert (n_args <= (int) ARRAY_SIZE (args));
2310
2311 for (a = n_args - 1; a >= 0; a--)
2312 args[a] = NULL_TREE;
2313
2314 for (a = n_args; a >= 0; a--)
2315 {
2316 switch (s16builtins[i].arg_types[a])
2317 {
2318 case 's': arg = short_integer_type_node; break;
2319 case 'S': arg = short_unsigned_type_node; break;
2320 case 'l': arg = long_integer_type_node; break;
2321 case 'L': arg = long_unsigned_type_node; break;
2322 default: gcc_unreachable ();
2323 }
2324 if (a == 0)
2325 ret_type = arg;
2326 else
2327 args[a-1] = arg;
2328 }
2329 ftype = build_function_type_list (ret_type, args[0], args[1], NULL_TREE);
2330 add_builtin_function (s16builtins[i].name, ftype,
2331 i, BUILT_IN_MD, NULL, NULL_TREE);
2332 }
2333 }
2334
2335 static rtx
2336 xstormy16_expand_builtin (tree exp, rtx target,
2337 rtx subtarget ATTRIBUTE_UNUSED,
2338 machine_mode mode ATTRIBUTE_UNUSED,
2339 int ignore ATTRIBUTE_UNUSED)
2340 {
2341 rtx op[10], args[10], pat, copyto[10], retval = 0;
2342 tree fndecl, argtree;
2343 int i, a, o, code;
2344
2345 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2346 argtree = TREE_OPERAND (exp, 1);
2347 i = DECL_FUNCTION_CODE (fndecl);
2348 code = s16builtins[i].md_code;
2349
2350 for (a = 0; a < 10 && argtree; a++)
2351 {
2352 args[a] = expand_normal (TREE_VALUE (argtree));
2353 argtree = TREE_CHAIN (argtree);
2354 }
2355
2356 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2357 {
2358 char ao = s16builtins[i].arg_ops[o];
2359 char c = insn_data[code].operand[o].constraint[0];
2360 machine_mode omode;
2361
2362 copyto[o] = 0;
2363
2364 omode = (machine_mode) insn_data[code].operand[o].mode;
2365 if (ao == 'r')
2366 op[o] = target ? target : gen_reg_rtx (omode);
2367 else if (ao == 't')
2368 op[o] = gen_reg_rtx (omode);
2369 else
2370 op[o] = args[(int) hex_value (ao)];
2371
2372 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2373 {
2374 if (c == '+' || c == '=')
2375 {
2376 copyto[o] = op[o];
2377 op[o] = gen_reg_rtx (omode);
2378 }
2379 else
2380 op[o] = copy_to_mode_reg (omode, op[o]);
2381 }
2382
2383 if (ao == 'r')
2384 retval = op[o];
2385 }
2386
2387 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2388 op[5], op[6], op[7], op[8], op[9]);
2389 emit_insn (pat);
2390
2391 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2392 if (copyto[o])
2393 {
2394 emit_move_insn (copyto[o], op[o]);
2395 if (op[o] == retval)
2396 retval = copyto[o];
2397 }
2398
2399 return retval;
2400 }
2401 \f
2402 /* Look for combinations of insns that can be converted to BN or BP
2403 opcodes. This is, unfortunately, too complex to do with MD
2404 patterns. */
2405
2406 static void
2407 combine_bnp (rtx_insn *insn)
2408 {
2409 int insn_code, regno, need_extend;
2410 unsigned int mask;
2411 rtx cond, reg, qireg, mem;
2412 rtx_insn *and_insn, *load;
2413 machine_mode load_mode = QImode;
2414 machine_mode and_mode = QImode;
2415 rtx_insn *shift = NULL;
2416
2417 insn_code = recog_memoized (insn);
2418 if (insn_code != CODE_FOR_cbranchhi
2419 && insn_code != CODE_FOR_cbranchhi_neg)
2420 return;
2421
2422 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2423 cond = XEXP (cond, 1); /* if */
2424 cond = XEXP (cond, 0); /* cond */
2425 switch (GET_CODE (cond))
2426 {
2427 case NE:
2428 case EQ:
2429 need_extend = 0;
2430 break;
2431 case LT:
2432 case GE:
2433 need_extend = 1;
2434 break;
2435 default:
2436 return;
2437 }
2438
2439 reg = XEXP (cond, 0);
2440 if (! REG_P (reg))
2441 return;
2442 regno = REGNO (reg);
2443 if (XEXP (cond, 1) != const0_rtx)
2444 return;
2445 if (! find_regno_note (insn, REG_DEAD, regno))
2446 return;
2447 qireg = gen_rtx_REG (QImode, regno);
2448
2449 if (need_extend)
2450 {
2451 /* LT and GE conditionals should have a sign extend before
2452 them. */
2453 for (and_insn = prev_real_insn (insn);
2454 and_insn != NULL_RTX;
2455 and_insn = prev_real_insn (and_insn))
2456 {
2457 int and_code = recog_memoized (and_insn);
2458
2459 if (and_code == CODE_FOR_extendqihi2
2460 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2461 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), qireg))
2462 break;
2463
2464 if (and_code == CODE_FOR_movhi_internal
2465 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg))
2466 {
2467 /* This is for testing bit 15. */
2468 and_insn = insn;
2469 break;
2470 }
2471
2472 if (reg_mentioned_p (reg, and_insn))
2473 return;
2474
2475 if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn))
2476 return;
2477 }
2478 }
2479 else
2480 {
2481 /* EQ and NE conditionals have an AND before them. */
2482 for (and_insn = prev_real_insn (insn);
2483 and_insn != NULL_RTX;
2484 and_insn = prev_real_insn (and_insn))
2485 {
2486 if (recog_memoized (and_insn) == CODE_FOR_andhi3
2487 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2488 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), reg))
2489 break;
2490
2491 if (reg_mentioned_p (reg, and_insn))
2492 return;
2493
2494 if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn))
2495 return;
2496 }
2497
2498 if (and_insn)
2499 {
2500 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2501 followed by an AND like this:
2502
2503 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2504 (clobber (reg:BI carry))]
2505
2506 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2507
2508 Attempt to detect this here. */
2509 for (shift = prev_real_insn (and_insn); shift;
2510 shift = prev_real_insn (shift))
2511 {
2512 if (recog_memoized (shift) == CODE_FOR_lshrhi3
2513 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2514 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2515 break;
2516
2517 if (reg_mentioned_p (reg, shift)
2518 || (! NOTE_P (shift) && ! NONJUMP_INSN_P (shift)))
2519 {
2520 shift = NULL;
2521 break;
2522 }
2523 }
2524 }
2525 }
2526
2527 if (and_insn == NULL_RTX)
2528 return;
2529
2530 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and_insn);
2531 load;
2532 load = prev_real_insn (load))
2533 {
2534 int load_code = recog_memoized (load);
2535
2536 if (load_code == CODE_FOR_movhi_internal
2537 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2538 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2539 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
2540 {
2541 load_mode = HImode;
2542 break;
2543 }
2544
2545 if (load_code == CODE_FOR_movqi_internal
2546 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2547 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
2548 {
2549 load_mode = QImode;
2550 break;
2551 }
2552
2553 if (load_code == CODE_FOR_zero_extendqihi2
2554 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2555 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2556 {
2557 load_mode = QImode;
2558 and_mode = HImode;
2559 break;
2560 }
2561
2562 if (reg_mentioned_p (reg, load))
2563 return;
2564
2565 if (! NOTE_P (load) && ! NONJUMP_INSN_P (load))
2566 return;
2567 }
2568 if (!load)
2569 return;
2570
2571 mem = SET_SRC (PATTERN (load));
2572
2573 if (need_extend)
2574 {
2575 mask = (load_mode == HImode) ? 0x8000 : 0x80;
2576
2577 /* If the mem includes a zero-extend operation and we are
2578 going to generate a sign-extend operation then move the
2579 mem inside the zero-extend. */
2580 if (GET_CODE (mem) == ZERO_EXTEND)
2581 mem = XEXP (mem, 0);
2582 }
2583 else
2584 {
2585 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and_insn)), 1),
2586 load_mode))
2587 return;
2588
2589 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and_insn)), 1));
2590
2591 if (shift)
2592 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2593 }
2594
2595 if (load_mode == HImode)
2596 {
2597 rtx addr = XEXP (mem, 0);
2598
2599 if (! (mask & 0xff))
2600 {
2601 addr = plus_constant (Pmode, addr, 1);
2602 mask >>= 8;
2603 }
2604 mem = gen_rtx_MEM (QImode, addr);
2605 }
2606
2607 if (need_extend)
2608 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2609 else
2610 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2611
2612 INSN_CODE (insn) = -1;
2613 delete_insn (load);
2614
2615 if (and_insn != insn)
2616 delete_insn (and_insn);
2617
2618 if (shift != NULL_RTX)
2619 delete_insn (shift);
2620 }
2621
2622 static void
2623 xstormy16_reorg (void)
2624 {
2625 rtx_insn *insn;
2626
2627 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2628 {
2629 if (! JUMP_P (insn))
2630 continue;
2631 combine_bnp (insn);
2632 }
2633 }
2634 \f
2635 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2636
2637 static bool
2638 xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2639 {
2640 const HOST_WIDE_INT size = int_size_in_bytes (type);
2641 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2642 }
2643 \f
2644 #undef TARGET_ASM_ALIGNED_HI_OP
2645 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2646 #undef TARGET_ASM_ALIGNED_SI_OP
2647 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2648 #undef TARGET_ENCODE_SECTION_INFO
2649 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2650
2651 /* Select_section doesn't handle .bss_below100. */
2652 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2653 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2654
2655 #undef TARGET_ASM_OUTPUT_MI_THUNK
2656 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2657 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2658 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2659
2660 #undef TARGET_PRINT_OPERAND
2661 #define TARGET_PRINT_OPERAND xstormy16_print_operand
2662 #undef TARGET_PRINT_OPERAND_ADDRESS
2663 #define TARGET_PRINT_OPERAND_ADDRESS xstormy16_print_operand_address
2664
2665 #undef TARGET_MEMORY_MOVE_COST
2666 #define TARGET_MEMORY_MOVE_COST xstormy16_memory_move_cost
2667 #undef TARGET_RTX_COSTS
2668 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2669 #undef TARGET_ADDRESS_COST
2670 #define TARGET_ADDRESS_COST xstormy16_address_cost
2671
2672 #undef TARGET_BUILD_BUILTIN_VA_LIST
2673 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2674 #undef TARGET_EXPAND_BUILTIN_VA_START
2675 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
2676 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2677 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
2678
2679 #undef TARGET_PROMOTE_FUNCTION_MODE
2680 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
2681 #undef TARGET_PROMOTE_PROTOTYPES
2682 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2683
2684 #undef TARGET_FUNCTION_ARG
2685 #define TARGET_FUNCTION_ARG xstormy16_function_arg
2686 #undef TARGET_FUNCTION_ARG_ADVANCE
2687 #define TARGET_FUNCTION_ARG_ADVANCE xstormy16_function_arg_advance
2688
2689 #undef TARGET_RETURN_IN_MEMORY
2690 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2691 #undef TARGET_FUNCTION_VALUE
2692 #define TARGET_FUNCTION_VALUE xstormy16_function_value
2693 #undef TARGET_LIBCALL_VALUE
2694 #define TARGET_LIBCALL_VALUE xstormy16_libcall_value
2695 #undef TARGET_FUNCTION_VALUE_REGNO_P
2696 #define TARGET_FUNCTION_VALUE_REGNO_P xstormy16_function_value_regno_p
2697
2698 #undef TARGET_MACHINE_DEPENDENT_REORG
2699 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2700
2701 #undef TARGET_PREFERRED_RELOAD_CLASS
2702 #define TARGET_PREFERRED_RELOAD_CLASS xstormy16_preferred_reload_class
2703 #undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
2704 #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS xstormy16_preferred_reload_class
2705
2706 #undef TARGET_LEGITIMATE_ADDRESS_P
2707 #define TARGET_LEGITIMATE_ADDRESS_P xstormy16_legitimate_address_p
2708 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
2709 #define TARGET_MODE_DEPENDENT_ADDRESS_P xstormy16_mode_dependent_address_p
2710
2711 #undef TARGET_CAN_ELIMINATE
2712 #define TARGET_CAN_ELIMINATE xstormy16_can_eliminate
2713
2714 #undef TARGET_TRAMPOLINE_INIT
2715 #define TARGET_TRAMPOLINE_INIT xstormy16_trampoline_init
2716
2717 struct gcc_target targetm = TARGET_INITIALIZER;
2718
2719 #include "gt-stormy16.h"
This page took 0.164753 seconds and 5 git commands to generate.