]> gcc.gnu.org Git - gcc.git/blob - gcc/config/lm32/lm32.c
re PR bootstrap/48230 (bootstrapping gcc-4.6.0-RC-20110321 fails for lm32-rtems*)
[gcc.git] / gcc / config / lm32 / lm32.c
1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2 Contributed by Jon Beniston <jon@beniston.com>
3
4 Copyright (C) 2009-2014 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "insn-attr.h"
34 #include "insn-codes.h"
35 #include "recog.h"
36 #include "output.h"
37 #include "tree.h"
38 #include "calls.h"
39 #include "expr.h"
40 #include "flags.h"
41 #include "reload.h"
42 #include "tm_p.h"
43 #include "function.h"
44 #include "diagnostic-core.h"
45 #include "optabs.h"
46 #include "libfuncs.h"
47 #include "ggc.h"
48 #include "target.h"
49 #include "target-def.h"
50 #include "langhooks.h"
51 #include "tm-constrs.h"
52 #include "df.h"
53
54 struct lm32_frame_info
55 {
56 HOST_WIDE_INT total_size; /* number of bytes of entire frame. */
57 HOST_WIDE_INT callee_size; /* number of bytes to save callee saves. */
58 HOST_WIDE_INT pretend_size; /* number of bytes we pretend caller did. */
59 HOST_WIDE_INT args_size; /* number of bytes for outgoing arguments. */
60 HOST_WIDE_INT locals_size; /* number of bytes for local variables. */
61 unsigned int reg_save_mask; /* mask of saved registers. */
62 };
63
64 /* Prototypes for static functions. */
65 static rtx emit_add (rtx dest, rtx src0, rtx src1);
66 static void expand_save_restore (struct lm32_frame_info *info, int op);
67 static void stack_adjust (HOST_WIDE_INT amount);
68 static bool lm32_in_small_data_p (const_tree);
69 static void lm32_setup_incoming_varargs (cumulative_args_t cum,
70 enum machine_mode mode, tree type,
71 int *pretend_size, int no_rtl);
72 static bool lm32_rtx_costs (rtx x, int code, int outer_code, int opno,
73 int *total, bool speed);
74 static bool lm32_can_eliminate (const int, const int);
75 static bool
76 lm32_legitimate_address_p (enum machine_mode mode, rtx x, bool strict);
77 static HOST_WIDE_INT lm32_compute_frame_size (int size);
78 static void lm32_option_override (void);
79 static rtx lm32_function_arg (cumulative_args_t cum,
80 enum machine_mode mode, const_tree type,
81 bool named);
82 static void lm32_function_arg_advance (cumulative_args_t cum,
83 enum machine_mode mode,
84 const_tree type, bool named);
85
86 #undef TARGET_OPTION_OVERRIDE
87 #define TARGET_OPTION_OVERRIDE lm32_option_override
88 #undef TARGET_ADDRESS_COST
89 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
90 #undef TARGET_RTX_COSTS
91 #define TARGET_RTX_COSTS lm32_rtx_costs
92 #undef TARGET_IN_SMALL_DATA_P
93 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
94 #undef TARGET_PROMOTE_FUNCTION_MODE
95 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
96 #undef TARGET_SETUP_INCOMING_VARARGS
97 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
98 #undef TARGET_FUNCTION_ARG
99 #define TARGET_FUNCTION_ARG lm32_function_arg
100 #undef TARGET_FUNCTION_ARG_ADVANCE
101 #define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
102 #undef TARGET_PROMOTE_PROTOTYPES
103 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
104 #undef TARGET_MIN_ANCHOR_OFFSET
105 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
106 #undef TARGET_MAX_ANCHOR_OFFSET
107 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
108 #undef TARGET_CAN_ELIMINATE
109 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
110 #undef TARGET_LEGITIMATE_ADDRESS_P
111 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
112
113 struct gcc_target targetm = TARGET_INITIALIZER;
114
115 /* Current frame information calculated by lm32_compute_frame_size. */
116 static struct lm32_frame_info current_frame_info;
117
118 /* Return non-zero if the given return type should be returned in memory. */
119
120 int
121 lm32_return_in_memory (tree type)
122 {
123 HOST_WIDE_INT size;
124
125 if (!AGGREGATE_TYPE_P (type))
126 {
127 /* All simple types are returned in registers. */
128 return 0;
129 }
130
131 size = int_size_in_bytes (type);
132 if (size >= 0 && size <= UNITS_PER_WORD)
133 {
134 /* If it can fit in one register. */
135 return 0;
136 }
137
138 return 1;
139 }
140
141 /* Generate an emit a word sized add instruction. */
142
143 static rtx
144 emit_add (rtx dest, rtx src0, rtx src1)
145 {
146 rtx insn;
147 insn = emit_insn (gen_addsi3 (dest, src0, src1));
148 return insn;
149 }
150
151 /* Generate the code to compare (and possibly branch) two integer values
152 TEST_CODE is the comparison code we are trying to emulate
153 (or implement directly)
154 RESULT is where to store the result of the comparison,
155 or null to emit a branch
156 CMP0 CMP1 are the two comparison operands
157 DESTINATION is the destination of the branch, or null to only compare
158 */
159
160 static void
161 gen_int_relational (enum rtx_code code,
162 rtx result,
163 rtx cmp0,
164 rtx cmp1,
165 rtx destination)
166 {
167 enum machine_mode mode;
168 int branch_p;
169
170 mode = GET_MODE (cmp0);
171 if (mode == VOIDmode)
172 mode = GET_MODE (cmp1);
173
174 /* Is this a branch or compare. */
175 branch_p = (destination != 0);
176
177 /* Instruction set doesn't support LE or LT, so swap operands and use
178 GE, GT. */
179 switch (code)
180 {
181 case LE:
182 case LT:
183 case LEU:
184 case LTU:
185 {
186 rtx temp;
187
188 code = swap_condition (code);
189 temp = cmp0;
190 cmp0 = cmp1;
191 cmp1 = temp;
192 break;
193 }
194 default:
195 break;
196 }
197
198 if (branch_p)
199 {
200 rtx insn, cond, label;
201
202 /* Operands must be in registers. */
203 if (!register_operand (cmp0, mode))
204 cmp0 = force_reg (mode, cmp0);
205 if (!register_operand (cmp1, mode))
206 cmp1 = force_reg (mode, cmp1);
207
208 /* Generate conditional branch instruction. */
209 cond = gen_rtx_fmt_ee (code, mode, cmp0, cmp1);
210 label = gen_rtx_LABEL_REF (VOIDmode, destination);
211 insn = gen_rtx_SET (VOIDmode, pc_rtx,
212 gen_rtx_IF_THEN_ELSE (VOIDmode,
213 cond, label, pc_rtx));
214 emit_jump_insn (insn);
215 }
216 else
217 {
218 /* We can't have const_ints in cmp0, other than 0. */
219 if ((GET_CODE (cmp0) == CONST_INT) && (INTVAL (cmp0) != 0))
220 cmp0 = force_reg (mode, cmp0);
221
222 /* If the comparison is against an int not in legal range
223 move it into a register. */
224 if (GET_CODE (cmp1) == CONST_INT)
225 {
226 switch (code)
227 {
228 case EQ:
229 case NE:
230 case LE:
231 case LT:
232 case GE:
233 case GT:
234 if (!satisfies_constraint_K (cmp1))
235 cmp1 = force_reg (mode, cmp1);
236 break;
237 case LEU:
238 case LTU:
239 case GEU:
240 case GTU:
241 if (!satisfies_constraint_L (cmp1))
242 cmp1 = force_reg (mode, cmp1);
243 break;
244 default:
245 gcc_unreachable ();
246 }
247 }
248
249 /* Generate compare instruction. */
250 emit_move_insn (result, gen_rtx_fmt_ee (code, mode, cmp0, cmp1));
251 }
252 }
253
254 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
255 and OPERAND[3]. Store the result in OPERANDS[0]. */
256
257 void
258 lm32_expand_scc (rtx operands[])
259 {
260 rtx target = operands[0];
261 enum rtx_code code = GET_CODE (operands[1]);
262 rtx op0 = operands[2];
263 rtx op1 = operands[3];
264
265 gen_int_relational (code, target, op0, op1, NULL_RTX);
266 }
267
268 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
269 CODE and jump to OPERANDS[3] if the condition holds. */
270
271 void
272 lm32_expand_conditional_branch (rtx operands[])
273 {
274 enum rtx_code code = GET_CODE (operands[0]);
275 rtx op0 = operands[1];
276 rtx op1 = operands[2];
277 rtx destination = operands[3];
278
279 gen_int_relational (code, NULL_RTX, op0, op1, destination);
280 }
281
282 /* Generate and emit RTL to save or restore callee save registers. */
283 static void
284 expand_save_restore (struct lm32_frame_info *info, int op)
285 {
286 unsigned int reg_save_mask = info->reg_save_mask;
287 int regno;
288 HOST_WIDE_INT offset;
289 rtx insn;
290
291 /* Callee saves are below locals and above outgoing arguments. */
292 offset = info->args_size + info->callee_size;
293 for (regno = 0; regno <= 31; regno++)
294 {
295 if ((reg_save_mask & (1 << regno)) != 0)
296 {
297 rtx offset_rtx;
298 rtx mem;
299
300 offset_rtx = GEN_INT (offset);
301 if (satisfies_constraint_K (offset_rtx))
302 {
303 mem = gen_rtx_MEM (word_mode,
304 gen_rtx_PLUS (Pmode,
305 stack_pointer_rtx,
306 offset_rtx));
307 }
308 else
309 {
310 /* r10 is caller saved so it can be used as a temp reg. */
311 rtx r10;
312
313 r10 = gen_rtx_REG (word_mode, 10);
314 insn = emit_move_insn (r10, offset_rtx);
315 if (op == 0)
316 RTX_FRAME_RELATED_P (insn) = 1;
317 insn = emit_add (r10, r10, stack_pointer_rtx);
318 if (op == 0)
319 RTX_FRAME_RELATED_P (insn) = 1;
320 mem = gen_rtx_MEM (word_mode, r10);
321 }
322
323 if (op == 0)
324 insn = emit_move_insn (mem, gen_rtx_REG (word_mode, regno));
325 else
326 insn = emit_move_insn (gen_rtx_REG (word_mode, regno), mem);
327
328 /* only prologue instructions which set the sp fp or save a
329 register should be marked as frame related. */
330 if (op == 0)
331 RTX_FRAME_RELATED_P (insn) = 1;
332 offset -= UNITS_PER_WORD;
333 }
334 }
335 }
336
337 static void
338 stack_adjust (HOST_WIDE_INT amount)
339 {
340 rtx insn;
341
342 if (!IN_RANGE (amount, -32776, 32768))
343 {
344 /* r10 is caller saved so it can be used as a temp reg. */
345 rtx r10;
346 r10 = gen_rtx_REG (word_mode, 10);
347 insn = emit_move_insn (r10, GEN_INT (amount));
348 if (amount < 0)
349 RTX_FRAME_RELATED_P (insn) = 1;
350 insn = emit_add (stack_pointer_rtx, stack_pointer_rtx, r10);
351 if (amount < 0)
352 RTX_FRAME_RELATED_P (insn) = 1;
353 }
354 else
355 {
356 insn = emit_add (stack_pointer_rtx,
357 stack_pointer_rtx, GEN_INT (amount));
358 if (amount < 0)
359 RTX_FRAME_RELATED_P (insn) = 1;
360 }
361 }
362
363
364 /* Create and emit instructions for a functions prologue. */
365 void
366 lm32_expand_prologue (void)
367 {
368 rtx insn;
369
370 lm32_compute_frame_size (get_frame_size ());
371
372 if (current_frame_info.total_size > 0)
373 {
374 /* Add space on stack new frame. */
375 stack_adjust (-current_frame_info.total_size);
376
377 /* Save callee save registers. */
378 if (current_frame_info.reg_save_mask != 0)
379 expand_save_restore (&current_frame_info, 0);
380
381 /* Setup frame pointer if it's needed. */
382 if (frame_pointer_needed == 1)
383 {
384 /* Move sp to fp. */
385 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
386 RTX_FRAME_RELATED_P (insn) = 1;
387
388 /* Add offset - Don't use total_size, as that includes pretend_size,
389 which isn't part of this frame? */
390 insn = emit_add (frame_pointer_rtx,
391 frame_pointer_rtx,
392 GEN_INT (current_frame_info.args_size +
393 current_frame_info.callee_size +
394 current_frame_info.locals_size));
395 RTX_FRAME_RELATED_P (insn) = 1;
396 }
397
398 /* Prevent prologue from being scheduled into function body. */
399 emit_insn (gen_blockage ());
400 }
401 }
402
403 /* Create an emit instructions for a functions epilogue. */
404 void
405 lm32_expand_epilogue (void)
406 {
407 rtx ra_rtx = gen_rtx_REG (Pmode, RA_REGNUM);
408
409 lm32_compute_frame_size (get_frame_size ());
410
411 if (current_frame_info.total_size > 0)
412 {
413 /* Prevent stack code from being reordered. */
414 emit_insn (gen_blockage ());
415
416 /* Restore callee save registers. */
417 if (current_frame_info.reg_save_mask != 0)
418 expand_save_restore (&current_frame_info, 1);
419
420 /* Deallocate stack. */
421 stack_adjust (current_frame_info.total_size);
422
423 /* Return to calling function. */
424 emit_jump_insn (gen_return_internal (ra_rtx));
425 }
426 else
427 {
428 /* Return to calling function. */
429 emit_jump_insn (gen_return_internal (ra_rtx));
430 }
431 }
432
433 /* Return the bytes needed to compute the frame pointer from the current
434 stack pointer. */
435 static HOST_WIDE_INT
436 lm32_compute_frame_size (int size)
437 {
438 int regno;
439 HOST_WIDE_INT total_size, locals_size, args_size, pretend_size, callee_size;
440 unsigned int reg_save_mask;
441
442 locals_size = size;
443 args_size = crtl->outgoing_args_size;
444 pretend_size = crtl->args.pretend_args_size;
445 callee_size = 0;
446 reg_save_mask = 0;
447
448 /* Build mask that actually determines which regsiters we save
449 and calculate size required to store them in the stack. */
450 for (regno = 1; regno < SP_REGNUM; regno++)
451 {
452 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
453 {
454 reg_save_mask |= 1 << regno;
455 callee_size += UNITS_PER_WORD;
456 }
457 }
458 if (df_regs_ever_live_p (RA_REGNUM) || ! crtl->is_leaf
459 || !optimize)
460 {
461 reg_save_mask |= 1 << RA_REGNUM;
462 callee_size += UNITS_PER_WORD;
463 }
464 if (!(reg_save_mask & (1 << FP_REGNUM)) && frame_pointer_needed)
465 {
466 reg_save_mask |= 1 << FP_REGNUM;
467 callee_size += UNITS_PER_WORD;
468 }
469
470 /* Compute total frame size. */
471 total_size = pretend_size + args_size + locals_size + callee_size;
472
473 /* Align frame to appropriate boundary. */
474 total_size = (total_size + 3) & ~3;
475
476 /* Save computed information. */
477 current_frame_info.total_size = total_size;
478 current_frame_info.callee_size = callee_size;
479 current_frame_info.pretend_size = pretend_size;
480 current_frame_info.locals_size = locals_size;
481 current_frame_info.args_size = args_size;
482 current_frame_info.reg_save_mask = reg_save_mask;
483
484 return total_size;
485 }
486
487 void
488 lm32_print_operand (FILE * file, rtx op, int letter)
489 {
490 enum rtx_code code;
491
492 code = GET_CODE (op);
493
494 if (code == SIGN_EXTEND)
495 op = XEXP (op, 0), code = GET_CODE (op);
496 else if (code == REG || code == SUBREG)
497 {
498 int regnum;
499
500 if (code == REG)
501 regnum = REGNO (op);
502 else
503 regnum = true_regnum (op);
504
505 fprintf (file, "%s", reg_names[regnum]);
506 }
507 else if (code == HIGH)
508 output_addr_const (file, XEXP (op, 0));
509 else if (code == MEM)
510 output_address (XEXP (op, 0));
511 else if (letter == 'z' && GET_CODE (op) == CONST_INT && INTVAL (op) == 0)
512 fprintf (file, "%s", reg_names[0]);
513 else if (GET_CODE (op) == CONST_DOUBLE)
514 {
515 if ((CONST_DOUBLE_LOW (op) != 0) || (CONST_DOUBLE_HIGH (op) != 0))
516 output_operand_lossage ("only 0.0 can be loaded as an immediate");
517 else
518 fprintf (file, "0");
519 }
520 else if (code == EQ)
521 fprintf (file, "e ");
522 else if (code == NE)
523 fprintf (file, "ne ");
524 else if (code == GT)
525 fprintf (file, "g ");
526 else if (code == GTU)
527 fprintf (file, "gu ");
528 else if (code == LT)
529 fprintf (file, "l ");
530 else if (code == LTU)
531 fprintf (file, "lu ");
532 else if (code == GE)
533 fprintf (file, "ge ");
534 else if (code == GEU)
535 fprintf (file, "geu");
536 else if (code == LE)
537 fprintf (file, "le ");
538 else if (code == LEU)
539 fprintf (file, "leu");
540 else
541 output_addr_const (file, op);
542 }
543
544 /* A C compound statement to output to stdio stream STREAM the
545 assembler syntax for an instruction operand that is a memory
546 reference whose address is ADDR. ADDR is an RTL expression.
547
548 On some machines, the syntax for a symbolic address depends on
549 the section that the address refers to. On these machines,
550 define the macro `ENCODE_SECTION_INFO' to store the information
551 into the `symbol_ref', and then check for it here. */
552
553 void
554 lm32_print_operand_address (FILE * file, rtx addr)
555 {
556 switch (GET_CODE (addr))
557 {
558 case REG:
559 fprintf (file, "(%s+0)", reg_names[REGNO (addr)]);
560 break;
561
562 case MEM:
563 output_address (XEXP (addr, 0));
564 break;
565
566 case PLUS:
567 {
568 rtx arg0 = XEXP (addr, 0);
569 rtx arg1 = XEXP (addr, 1);
570
571 if (GET_CODE (arg0) == REG && CONSTANT_P (arg1))
572 {
573 if (GET_CODE (arg1) == CONST_INT)
574 fprintf (file, "(%s+%ld)", reg_names[REGNO (arg0)],
575 INTVAL (arg1));
576 else
577 {
578 fprintf (file, "(%s+", reg_names[REGNO (arg0)]);
579 output_addr_const (file, arg1);
580 fprintf (file, ")");
581 }
582 }
583 else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
584 output_addr_const (file, addr);
585 else
586 fatal_insn ("bad operand", addr);
587 }
588 break;
589
590 case SYMBOL_REF:
591 if (SYMBOL_REF_SMALL_P (addr))
592 {
593 fprintf (file, "gp(");
594 output_addr_const (file, addr);
595 fprintf (file, ")");
596 }
597 else
598 fatal_insn ("can't use non gp relative absolute address", addr);
599 break;
600
601 default:
602 fatal_insn ("invalid addressing mode", addr);
603 break;
604 }
605 }
606
607 /* Determine where to put an argument to a function.
608 Value is zero to push the argument on the stack,
609 or a hard register in which to store the argument.
610
611 MODE is the argument's machine mode.
612 TYPE is the data type of the argument (as a tree).
613 This is null for libcalls where that information may
614 not be available.
615 CUM is a variable of type CUMULATIVE_ARGS which gives info about
616 the preceding args and about the function being called.
617 NAMED is nonzero if this argument is a named parameter
618 (otherwise it is an extra parameter matching an ellipsis). */
619
620 static rtx
621 lm32_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
622 const_tree type, bool named)
623 {
624 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
625
626 if (mode == VOIDmode)
627 /* Compute operand 2 of the call insn. */
628 return GEN_INT (0);
629
630 if (targetm.calls.must_pass_in_stack (mode, type))
631 return NULL_RTX;
632
633 if (!named || (*cum + LM32_NUM_REGS2 (mode, type) > LM32_NUM_ARG_REGS))
634 return NULL_RTX;
635
636 return gen_rtx_REG (mode, *cum + LM32_FIRST_ARG_REG);
637 }
638
639 static void
640 lm32_function_arg_advance (cumulative_args_t cum, enum machine_mode mode,
641 const_tree type, bool named ATTRIBUTE_UNUSED)
642 {
643 *get_cumulative_args (cum) += LM32_NUM_REGS2 (mode, type);
644 }
645
646 HOST_WIDE_INT
647 lm32_compute_initial_elimination_offset (int from, int to)
648 {
649 HOST_WIDE_INT offset = 0;
650
651 switch (from)
652 {
653 case ARG_POINTER_REGNUM:
654 switch (to)
655 {
656 case FRAME_POINTER_REGNUM:
657 offset = 0;
658 break;
659 case STACK_POINTER_REGNUM:
660 offset =
661 lm32_compute_frame_size (get_frame_size ()) -
662 current_frame_info.pretend_size;
663 break;
664 default:
665 gcc_unreachable ();
666 }
667 break;
668 default:
669 gcc_unreachable ();
670 }
671
672 return offset;
673 }
674
675 static void
676 lm32_setup_incoming_varargs (cumulative_args_t cum_v, enum machine_mode mode,
677 tree type, int *pretend_size, int no_rtl)
678 {
679 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
680 int first_anon_arg;
681 tree fntype;
682
683 fntype = TREE_TYPE (current_function_decl);
684
685 if (stdarg_p (fntype))
686 first_anon_arg = *cum + LM32_FIRST_ARG_REG;
687 else
688 {
689 /* this is the common case, we have been passed details setup
690 for the last named argument, we want to skip over the
691 registers, if any used in passing this named paramter in
692 order to determine which is the first registers used to pass
693 anonymous arguments. */
694 int size;
695
696 if (mode == BLKmode)
697 size = int_size_in_bytes (type);
698 else
699 size = GET_MODE_SIZE (mode);
700
701 first_anon_arg =
702 *cum + LM32_FIRST_ARG_REG +
703 ((size + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
704 }
705
706 if ((first_anon_arg < (LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS)) && !no_rtl)
707 {
708 int first_reg_offset = first_anon_arg;
709 int size = LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS - first_anon_arg;
710 rtx regblock;
711
712 regblock = gen_rtx_MEM (BLKmode,
713 plus_constant (Pmode, arg_pointer_rtx,
714 FIRST_PARM_OFFSET (0)));
715 move_block_from_reg (first_reg_offset, regblock, size);
716
717 *pretend_size = size * UNITS_PER_WORD;
718 }
719 }
720
721 /* Override command line options. */
722 static void
723 lm32_option_override (void)
724 {
725 /* We must have sign-extend enabled if barrel-shift isn't. */
726 if (!TARGET_BARREL_SHIFT_ENABLED && !TARGET_SIGN_EXTEND_ENABLED)
727 target_flags |= MASK_SIGN_EXTEND_ENABLED;
728 }
729
730 /* Return nonzero if this function is known to have a null epilogue.
731 This allows the optimizer to omit jumps to jumps if no stack
732 was created. */
733 int
734 lm32_can_use_return (void)
735 {
736 if (!reload_completed)
737 return 0;
738
739 if (df_regs_ever_live_p (RA_REGNUM) || crtl->profile)
740 return 0;
741
742 if (lm32_compute_frame_size (get_frame_size ()) != 0)
743 return 0;
744
745 return 1;
746 }
747
748 /* Support function to determine the return address of the function
749 'count' frames back up the stack. */
750 rtx
751 lm32_return_addr_rtx (int count, rtx frame)
752 {
753 rtx r;
754 if (count == 0)
755 {
756 if (!df_regs_ever_live_p (RA_REGNUM))
757 r = gen_rtx_REG (Pmode, RA_REGNUM);
758 else
759 {
760 r = gen_rtx_MEM (Pmode,
761 gen_rtx_PLUS (Pmode, frame,
762 GEN_INT (-2 * UNITS_PER_WORD)));
763 set_mem_alias_set (r, get_frame_alias_set ());
764 }
765 }
766 else if (flag_omit_frame_pointer)
767 r = NULL_RTX;
768 else
769 {
770 r = gen_rtx_MEM (Pmode,
771 gen_rtx_PLUS (Pmode, frame,
772 GEN_INT (-2 * UNITS_PER_WORD)));
773 set_mem_alias_set (r, get_frame_alias_set ());
774 }
775 return r;
776 }
777
778 /* Return true if EXP should be placed in the small data section. */
779
780 static bool
781 lm32_in_small_data_p (const_tree exp)
782 {
783 /* We want to merge strings, so we never consider them small data. */
784 if (TREE_CODE (exp) == STRING_CST)
785 return false;
786
787 /* Functions are never in the small data area. Duh. */
788 if (TREE_CODE (exp) == FUNCTION_DECL)
789 return false;
790
791 if (TREE_CODE (exp) == VAR_DECL && DECL_SECTION_NAME (exp))
792 {
793 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (exp));
794 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
795 return true;
796 }
797 else
798 {
799 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
800
801 /* If this is an incomplete type with size 0, then we can't put it
802 in sdata because it might be too big when completed. */
803 if (size > 0 && size <= g_switch_value)
804 return true;
805 }
806
807 return false;
808 }
809
810 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
811 Assume that the areas do not overlap. */
812
813 static void
814 lm32_block_move_inline (rtx dest, rtx src, HOST_WIDE_INT length,
815 HOST_WIDE_INT alignment)
816 {
817 HOST_WIDE_INT offset, delta;
818 unsigned HOST_WIDE_INT bits;
819 int i;
820 enum machine_mode mode;
821 rtx *regs;
822
823 /* Work out how many bits to move at a time. */
824 switch (alignment)
825 {
826 case 1:
827 bits = 8;
828 break;
829 case 2:
830 bits = 16;
831 break;
832 default:
833 bits = 32;
834 break;
835 }
836
837 mode = mode_for_size (bits, MODE_INT, 0);
838 delta = bits / BITS_PER_UNIT;
839
840 /* Allocate a buffer for the temporary registers. */
841 regs = XALLOCAVEC (rtx, length / delta);
842
843 /* Load as many BITS-sized chunks as possible. */
844 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
845 {
846 regs[i] = gen_reg_rtx (mode);
847 emit_move_insn (regs[i], adjust_address (src, mode, offset));
848 }
849
850 /* Copy the chunks to the destination. */
851 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
852 emit_move_insn (adjust_address (dest, mode, offset), regs[i]);
853
854 /* Mop up any left-over bytes. */
855 if (offset < length)
856 {
857 src = adjust_address (src, BLKmode, offset);
858 dest = adjust_address (dest, BLKmode, offset);
859 move_by_pieces (dest, src, length - offset,
860 MIN (MEM_ALIGN (src), MEM_ALIGN (dest)), 0);
861 }
862 }
863
864 /* Expand string/block move operations.
865
866 operands[0] is the pointer to the destination.
867 operands[1] is the pointer to the source.
868 operands[2] is the number of bytes to move.
869 operands[3] is the alignment. */
870
871 int
872 lm32_expand_block_move (rtx * operands)
873 {
874 if ((GET_CODE (operands[2]) == CONST_INT) && (INTVAL (operands[2]) <= 32))
875 {
876 lm32_block_move_inline (operands[0], operands[1], INTVAL (operands[2]),
877 INTVAL (operands[3]));
878 return 1;
879 }
880 return 0;
881 }
882
883 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
884 isn't protected by a PIC unspec. */
885 int
886 nonpic_symbol_mentioned_p (rtx x)
887 {
888 const char *fmt;
889 int i;
890
891 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF
892 || GET_CODE (x) == PC)
893 return 1;
894
895 /* We don't want to look into the possible MEM location of a
896 CONST_DOUBLE, since we're not going to use it, in general. */
897 if (GET_CODE (x) == CONST_DOUBLE)
898 return 0;
899
900 if (GET_CODE (x) == UNSPEC)
901 return 0;
902
903 fmt = GET_RTX_FORMAT (GET_CODE (x));
904 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
905 {
906 if (fmt[i] == 'E')
907 {
908 int j;
909
910 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
911 if (nonpic_symbol_mentioned_p (XVECEXP (x, i, j)))
912 return 1;
913 }
914 else if (fmt[i] == 'e' && nonpic_symbol_mentioned_p (XEXP (x, i)))
915 return 1;
916 }
917
918 return 0;
919 }
920
921 /* Compute a (partial) cost for rtx X. Return true if the complete
922 cost has been computed, and false if subexpressions should be
923 scanned. In either case, *TOTAL contains the cost result. */
924
925 static bool
926 lm32_rtx_costs (rtx x, int code, int outer_code, int opno ATTRIBUTE_UNUSED,
927 int *total, bool speed)
928 {
929 enum machine_mode mode = GET_MODE (x);
930 bool small_mode;
931
932 const int arithmetic_latency = 1;
933 const int shift_latency = 1;
934 const int compare_latency = 2;
935 const int multiply_latency = 3;
936 const int load_latency = 3;
937 const int libcall_size_cost = 5;
938
939 /* Determine if we can handle the given mode size in a single instruction. */
940 small_mode = (mode == QImode) || (mode == HImode) || (mode == SImode);
941
942 switch (code)
943 {
944
945 case PLUS:
946 case MINUS:
947 case AND:
948 case IOR:
949 case XOR:
950 case NOT:
951 case NEG:
952 if (!speed)
953 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode));
954 else
955 *total =
956 COSTS_N_INSNS (arithmetic_latency + (LM32_NUM_REGS (mode) - 1));
957 break;
958
959 case COMPARE:
960 if (small_mode)
961 {
962 if (!speed)
963 *total = COSTS_N_INSNS (1);
964 else
965 *total = COSTS_N_INSNS (compare_latency);
966 }
967 else
968 {
969 /* FIXME. Guessing here. */
970 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * (2 + 3) / 2);
971 }
972 break;
973
974 case ASHIFT:
975 case ASHIFTRT:
976 case LSHIFTRT:
977 if (TARGET_BARREL_SHIFT_ENABLED && small_mode)
978 {
979 if (!speed)
980 *total = COSTS_N_INSNS (1);
981 else
982 *total = COSTS_N_INSNS (shift_latency);
983 }
984 else if (TARGET_BARREL_SHIFT_ENABLED)
985 {
986 /* FIXME: Guessing here. */
987 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * 4);
988 }
989 else if (small_mode && GET_CODE (XEXP (x, 1)) == CONST_INT)
990 {
991 *total = COSTS_N_INSNS (INTVAL (XEXP (x, 1)));
992 }
993 else
994 {
995 /* Libcall. */
996 if (!speed)
997 *total = COSTS_N_INSNS (libcall_size_cost);
998 else
999 *total = COSTS_N_INSNS (100);
1000 }
1001 break;
1002
1003 case MULT:
1004 if (TARGET_MULTIPLY_ENABLED && small_mode)
1005 {
1006 if (!speed)
1007 *total = COSTS_N_INSNS (1);
1008 else
1009 *total = COSTS_N_INSNS (multiply_latency);
1010 }
1011 else
1012 {
1013 /* Libcall. */
1014 if (!speed)
1015 *total = COSTS_N_INSNS (libcall_size_cost);
1016 else
1017 *total = COSTS_N_INSNS (100);
1018 }
1019 break;
1020
1021 case DIV:
1022 case MOD:
1023 case UDIV:
1024 case UMOD:
1025 if (TARGET_DIVIDE_ENABLED && small_mode)
1026 {
1027 if (!speed)
1028 *total = COSTS_N_INSNS (1);
1029 else
1030 {
1031 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1032 {
1033 int cycles = 0;
1034 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
1035
1036 while (i)
1037 {
1038 i >>= 2;
1039 cycles++;
1040 }
1041 if (IN_RANGE (i, 0, 65536))
1042 *total = COSTS_N_INSNS (1 + 1 + cycles);
1043 else
1044 *total = COSTS_N_INSNS (2 + 1 + cycles);
1045 return true;
1046 }
1047 else if (GET_CODE (XEXP (x, 1)) == REG)
1048 {
1049 *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1050 return true;
1051 }
1052 else
1053 {
1054 *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1055 return false;
1056 }
1057 }
1058 }
1059 else
1060 {
1061 /* Libcall. */
1062 if (!speed)
1063 *total = COSTS_N_INSNS (libcall_size_cost);
1064 else
1065 *total = COSTS_N_INSNS (100);
1066 }
1067 break;
1068
1069 case HIGH:
1070 case LO_SUM:
1071 if (!speed)
1072 *total = COSTS_N_INSNS (1);
1073 else
1074 *total = COSTS_N_INSNS (arithmetic_latency);
1075 break;
1076
1077 case ZERO_EXTEND:
1078 if (MEM_P (XEXP (x, 0)))
1079 *total = COSTS_N_INSNS (0);
1080 else if (small_mode)
1081 {
1082 if (!speed)
1083 *total = COSTS_N_INSNS (1);
1084 else
1085 *total = COSTS_N_INSNS (arithmetic_latency);
1086 }
1087 else
1088 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) / 2);
1089 break;
1090
1091 case CONST_INT:
1092 {
1093 switch (outer_code)
1094 {
1095 case HIGH:
1096 case LO_SUM:
1097 *total = COSTS_N_INSNS (0);
1098 return true;
1099
1100 case AND:
1101 case XOR:
1102 case IOR:
1103 case ASHIFT:
1104 case ASHIFTRT:
1105 case LSHIFTRT:
1106 case ROTATE:
1107 case ROTATERT:
1108 if (satisfies_constraint_L (x))
1109 *total = COSTS_N_INSNS (0);
1110 else
1111 *total = COSTS_N_INSNS (2);
1112 return true;
1113
1114 case SET:
1115 case PLUS:
1116 case MINUS:
1117 case COMPARE:
1118 if (satisfies_constraint_K (x))
1119 *total = COSTS_N_INSNS (0);
1120 else
1121 *total = COSTS_N_INSNS (2);
1122 return true;
1123
1124 case MULT:
1125 if (TARGET_MULTIPLY_ENABLED)
1126 {
1127 if (satisfies_constraint_K (x))
1128 *total = COSTS_N_INSNS (0);
1129 else
1130 *total = COSTS_N_INSNS (2);
1131 return true;
1132 }
1133 /* Fall through. */
1134
1135 default:
1136 if (satisfies_constraint_K (x))
1137 *total = COSTS_N_INSNS (1);
1138 else
1139 *total = COSTS_N_INSNS (2);
1140 return true;
1141 }
1142 }
1143
1144 case SYMBOL_REF:
1145 case CONST:
1146 switch (outer_code)
1147 {
1148 case HIGH:
1149 case LO_SUM:
1150 *total = COSTS_N_INSNS (0);
1151 return true;
1152
1153 case MEM:
1154 case SET:
1155 if (g_switch_value)
1156 {
1157 *total = COSTS_N_INSNS (0);
1158 return true;
1159 }
1160 break;
1161 }
1162 /* Fall through. */
1163
1164 case LABEL_REF:
1165 case CONST_DOUBLE:
1166 *total = COSTS_N_INSNS (2);
1167 return true;
1168
1169 case SET:
1170 *total = COSTS_N_INSNS (1);
1171 break;
1172
1173 case MEM:
1174 if (!speed)
1175 *total = COSTS_N_INSNS (1);
1176 else
1177 *total = COSTS_N_INSNS (load_latency);
1178 break;
1179
1180 }
1181
1182 return false;
1183 }
1184
1185 /* Implemenent TARGET_CAN_ELIMINATE. */
1186
1187 bool
1188 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1189 {
1190 return (to == STACK_POINTER_REGNUM && frame_pointer_needed) ? false : true;
1191 }
1192
1193 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1194
1195 static bool
1196 lm32_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x, bool strict)
1197 {
1198 /* (rM) */
1199 if (strict && REG_P (x) && STRICT_REG_OK_FOR_BASE_P (x))
1200 return true;
1201 if (!strict && REG_P (x) && NONSTRICT_REG_OK_FOR_BASE_P (x))
1202 return true;
1203
1204 /* (rM)+literal) */
1205 if (GET_CODE (x) == PLUS
1206 && REG_P (XEXP (x, 0))
1207 && ((strict && STRICT_REG_OK_FOR_BASE_P (XEXP (x, 0)))
1208 || (!strict && NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x, 0))))
1209 && GET_CODE (XEXP (x, 1)) == CONST_INT
1210 && satisfies_constraint_K (XEXP ((x), 1)))
1211 return true;
1212
1213 /* gp(sym) */
1214 if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_SMALL_P (x))
1215 return true;
1216
1217 return false;
1218 }
1219
1220 /* Check a move is not memory to memory. */
1221
1222 bool
1223 lm32_move_ok (enum machine_mode mode, rtx operands[2]) {
1224 if (memory_operand (operands[0], mode))
1225 return register_or_zero_operand (operands[1], mode);
1226 return true;
1227 }
This page took 0.088722 seconds and 5 git commands to generate.