1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2 Contributed by Jon Beniston <jon@beniston.com>
4 Copyright (C) 2009-2014 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "insn-attr.h"
34 #include "insn-codes.h"
44 #include "diagnostic-core.h"
49 #include "target-def.h"
50 #include "langhooks.h"
51 #include "tm-constrs.h"
54 struct lm32_frame_info
56 HOST_WIDE_INT total_size
; /* number of bytes of entire frame. */
57 HOST_WIDE_INT callee_size
; /* number of bytes to save callee saves. */
58 HOST_WIDE_INT pretend_size
; /* number of bytes we pretend caller did. */
59 HOST_WIDE_INT args_size
; /* number of bytes for outgoing arguments. */
60 HOST_WIDE_INT locals_size
; /* number of bytes for local variables. */
61 unsigned int reg_save_mask
; /* mask of saved registers. */
64 /* Prototypes for static functions. */
65 static rtx
emit_add (rtx dest
, rtx src0
, rtx src1
);
66 static void expand_save_restore (struct lm32_frame_info
*info
, int op
);
67 static void stack_adjust (HOST_WIDE_INT amount
);
68 static bool lm32_in_small_data_p (const_tree
);
69 static void lm32_setup_incoming_varargs (cumulative_args_t cum
,
70 enum machine_mode mode
, tree type
,
71 int *pretend_size
, int no_rtl
);
72 static bool lm32_rtx_costs (rtx x
, int code
, int outer_code
, int opno
,
73 int *total
, bool speed
);
74 static bool lm32_can_eliminate (const int, const int);
76 lm32_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
);
77 static HOST_WIDE_INT
lm32_compute_frame_size (int size
);
78 static void lm32_option_override (void);
79 static rtx
lm32_function_arg (cumulative_args_t cum
,
80 enum machine_mode mode
, const_tree type
,
82 static void lm32_function_arg_advance (cumulative_args_t cum
,
83 enum machine_mode mode
,
84 const_tree type
, bool named
);
86 #undef TARGET_OPTION_OVERRIDE
87 #define TARGET_OPTION_OVERRIDE lm32_option_override
88 #undef TARGET_ADDRESS_COST
89 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
90 #undef TARGET_RTX_COSTS
91 #define TARGET_RTX_COSTS lm32_rtx_costs
92 #undef TARGET_IN_SMALL_DATA_P
93 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
94 #undef TARGET_PROMOTE_FUNCTION_MODE
95 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
96 #undef TARGET_SETUP_INCOMING_VARARGS
97 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
98 #undef TARGET_FUNCTION_ARG
99 #define TARGET_FUNCTION_ARG lm32_function_arg
100 #undef TARGET_FUNCTION_ARG_ADVANCE
101 #define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
102 #undef TARGET_PROMOTE_PROTOTYPES
103 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
104 #undef TARGET_MIN_ANCHOR_OFFSET
105 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
106 #undef TARGET_MAX_ANCHOR_OFFSET
107 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
108 #undef TARGET_CAN_ELIMINATE
109 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
110 #undef TARGET_LEGITIMATE_ADDRESS_P
111 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
113 struct gcc_target targetm
= TARGET_INITIALIZER
;
115 /* Current frame information calculated by lm32_compute_frame_size. */
116 static struct lm32_frame_info current_frame_info
;
118 /* Return non-zero if the given return type should be returned in memory. */
121 lm32_return_in_memory (tree type
)
125 if (!AGGREGATE_TYPE_P (type
))
127 /* All simple types are returned in registers. */
131 size
= int_size_in_bytes (type
);
132 if (size
>= 0 && size
<= UNITS_PER_WORD
)
134 /* If it can fit in one register. */
141 /* Generate an emit a word sized add instruction. */
144 emit_add (rtx dest
, rtx src0
, rtx src1
)
147 insn
= emit_insn (gen_addsi3 (dest
, src0
, src1
));
151 /* Generate the code to compare (and possibly branch) two integer values
152 TEST_CODE is the comparison code we are trying to emulate
153 (or implement directly)
154 RESULT is where to store the result of the comparison,
155 or null to emit a branch
156 CMP0 CMP1 are the two comparison operands
157 DESTINATION is the destination of the branch, or null to only compare
161 gen_int_relational (enum rtx_code code
,
167 enum machine_mode mode
;
170 mode
= GET_MODE (cmp0
);
171 if (mode
== VOIDmode
)
172 mode
= GET_MODE (cmp1
);
174 /* Is this a branch or compare. */
175 branch_p
= (destination
!= 0);
177 /* Instruction set doesn't support LE or LT, so swap operands and use
188 code
= swap_condition (code
);
200 rtx insn
, cond
, label
;
202 /* Operands must be in registers. */
203 if (!register_operand (cmp0
, mode
))
204 cmp0
= force_reg (mode
, cmp0
);
205 if (!register_operand (cmp1
, mode
))
206 cmp1
= force_reg (mode
, cmp1
);
208 /* Generate conditional branch instruction. */
209 cond
= gen_rtx_fmt_ee (code
, mode
, cmp0
, cmp1
);
210 label
= gen_rtx_LABEL_REF (VOIDmode
, destination
);
211 insn
= gen_rtx_SET (VOIDmode
, pc_rtx
,
212 gen_rtx_IF_THEN_ELSE (VOIDmode
,
213 cond
, label
, pc_rtx
));
214 emit_jump_insn (insn
);
218 /* We can't have const_ints in cmp0, other than 0. */
219 if ((GET_CODE (cmp0
) == CONST_INT
) && (INTVAL (cmp0
) != 0))
220 cmp0
= force_reg (mode
, cmp0
);
222 /* If the comparison is against an int not in legal range
223 move it into a register. */
224 if (GET_CODE (cmp1
) == CONST_INT
)
234 if (!satisfies_constraint_K (cmp1
))
235 cmp1
= force_reg (mode
, cmp1
);
241 if (!satisfies_constraint_L (cmp1
))
242 cmp1
= force_reg (mode
, cmp1
);
249 /* Generate compare instruction. */
250 emit_move_insn (result
, gen_rtx_fmt_ee (code
, mode
, cmp0
, cmp1
));
254 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
255 and OPERAND[3]. Store the result in OPERANDS[0]. */
258 lm32_expand_scc (rtx operands
[])
260 rtx target
= operands
[0];
261 enum rtx_code code
= GET_CODE (operands
[1]);
262 rtx op0
= operands
[2];
263 rtx op1
= operands
[3];
265 gen_int_relational (code
, target
, op0
, op1
, NULL_RTX
);
268 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
269 CODE and jump to OPERANDS[3] if the condition holds. */
272 lm32_expand_conditional_branch (rtx operands
[])
274 enum rtx_code code
= GET_CODE (operands
[0]);
275 rtx op0
= operands
[1];
276 rtx op1
= operands
[2];
277 rtx destination
= operands
[3];
279 gen_int_relational (code
, NULL_RTX
, op0
, op1
, destination
);
282 /* Generate and emit RTL to save or restore callee save registers. */
284 expand_save_restore (struct lm32_frame_info
*info
, int op
)
286 unsigned int reg_save_mask
= info
->reg_save_mask
;
288 HOST_WIDE_INT offset
;
291 /* Callee saves are below locals and above outgoing arguments. */
292 offset
= info
->args_size
+ info
->callee_size
;
293 for (regno
= 0; regno
<= 31; regno
++)
295 if ((reg_save_mask
& (1 << regno
)) != 0)
300 offset_rtx
= GEN_INT (offset
);
301 if (satisfies_constraint_K (offset_rtx
))
303 mem
= gen_rtx_MEM (word_mode
,
310 /* r10 is caller saved so it can be used as a temp reg. */
313 r10
= gen_rtx_REG (word_mode
, 10);
314 insn
= emit_move_insn (r10
, offset_rtx
);
316 RTX_FRAME_RELATED_P (insn
) = 1;
317 insn
= emit_add (r10
, r10
, stack_pointer_rtx
);
319 RTX_FRAME_RELATED_P (insn
) = 1;
320 mem
= gen_rtx_MEM (word_mode
, r10
);
324 insn
= emit_move_insn (mem
, gen_rtx_REG (word_mode
, regno
));
326 insn
= emit_move_insn (gen_rtx_REG (word_mode
, regno
), mem
);
328 /* only prologue instructions which set the sp fp or save a
329 register should be marked as frame related. */
331 RTX_FRAME_RELATED_P (insn
) = 1;
332 offset
-= UNITS_PER_WORD
;
338 stack_adjust (HOST_WIDE_INT amount
)
342 if (!IN_RANGE (amount
, -32776, 32768))
344 /* r10 is caller saved so it can be used as a temp reg. */
346 r10
= gen_rtx_REG (word_mode
, 10);
347 insn
= emit_move_insn (r10
, GEN_INT (amount
));
349 RTX_FRAME_RELATED_P (insn
) = 1;
350 insn
= emit_add (stack_pointer_rtx
, stack_pointer_rtx
, r10
);
352 RTX_FRAME_RELATED_P (insn
) = 1;
356 insn
= emit_add (stack_pointer_rtx
,
357 stack_pointer_rtx
, GEN_INT (amount
));
359 RTX_FRAME_RELATED_P (insn
) = 1;
364 /* Create and emit instructions for a functions prologue. */
366 lm32_expand_prologue (void)
370 lm32_compute_frame_size (get_frame_size ());
372 if (current_frame_info
.total_size
> 0)
374 /* Add space on stack new frame. */
375 stack_adjust (-current_frame_info
.total_size
);
377 /* Save callee save registers. */
378 if (current_frame_info
.reg_save_mask
!= 0)
379 expand_save_restore (¤t_frame_info
, 0);
381 /* Setup frame pointer if it's needed. */
382 if (frame_pointer_needed
== 1)
385 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
386 RTX_FRAME_RELATED_P (insn
) = 1;
388 /* Add offset - Don't use total_size, as that includes pretend_size,
389 which isn't part of this frame? */
390 insn
= emit_add (frame_pointer_rtx
,
392 GEN_INT (current_frame_info
.args_size
+
393 current_frame_info
.callee_size
+
394 current_frame_info
.locals_size
));
395 RTX_FRAME_RELATED_P (insn
) = 1;
398 /* Prevent prologue from being scheduled into function body. */
399 emit_insn (gen_blockage ());
403 /* Create an emit instructions for a functions epilogue. */
405 lm32_expand_epilogue (void)
407 rtx ra_rtx
= gen_rtx_REG (Pmode
, RA_REGNUM
);
409 lm32_compute_frame_size (get_frame_size ());
411 if (current_frame_info
.total_size
> 0)
413 /* Prevent stack code from being reordered. */
414 emit_insn (gen_blockage ());
416 /* Restore callee save registers. */
417 if (current_frame_info
.reg_save_mask
!= 0)
418 expand_save_restore (¤t_frame_info
, 1);
420 /* Deallocate stack. */
421 stack_adjust (current_frame_info
.total_size
);
423 /* Return to calling function. */
424 emit_jump_insn (gen_return_internal (ra_rtx
));
428 /* Return to calling function. */
429 emit_jump_insn (gen_return_internal (ra_rtx
));
433 /* Return the bytes needed to compute the frame pointer from the current
436 lm32_compute_frame_size (int size
)
439 HOST_WIDE_INT total_size
, locals_size
, args_size
, pretend_size
, callee_size
;
440 unsigned int reg_save_mask
;
443 args_size
= crtl
->outgoing_args_size
;
444 pretend_size
= crtl
->args
.pretend_args_size
;
448 /* Build mask that actually determines which regsiters we save
449 and calculate size required to store them in the stack. */
450 for (regno
= 1; regno
< SP_REGNUM
; regno
++)
452 if (df_regs_ever_live_p (regno
) && !call_used_regs
[regno
])
454 reg_save_mask
|= 1 << regno
;
455 callee_size
+= UNITS_PER_WORD
;
458 if (df_regs_ever_live_p (RA_REGNUM
) || ! crtl
->is_leaf
461 reg_save_mask
|= 1 << RA_REGNUM
;
462 callee_size
+= UNITS_PER_WORD
;
464 if (!(reg_save_mask
& (1 << FP_REGNUM
)) && frame_pointer_needed
)
466 reg_save_mask
|= 1 << FP_REGNUM
;
467 callee_size
+= UNITS_PER_WORD
;
470 /* Compute total frame size. */
471 total_size
= pretend_size
+ args_size
+ locals_size
+ callee_size
;
473 /* Align frame to appropriate boundary. */
474 total_size
= (total_size
+ 3) & ~3;
476 /* Save computed information. */
477 current_frame_info
.total_size
= total_size
;
478 current_frame_info
.callee_size
= callee_size
;
479 current_frame_info
.pretend_size
= pretend_size
;
480 current_frame_info
.locals_size
= locals_size
;
481 current_frame_info
.args_size
= args_size
;
482 current_frame_info
.reg_save_mask
= reg_save_mask
;
488 lm32_print_operand (FILE * file
, rtx op
, int letter
)
492 code
= GET_CODE (op
);
494 if (code
== SIGN_EXTEND
)
495 op
= XEXP (op
, 0), code
= GET_CODE (op
);
496 else if (code
== REG
|| code
== SUBREG
)
503 regnum
= true_regnum (op
);
505 fprintf (file
, "%s", reg_names
[regnum
]);
507 else if (code
== HIGH
)
508 output_addr_const (file
, XEXP (op
, 0));
509 else if (code
== MEM
)
510 output_address (XEXP (op
, 0));
511 else if (letter
== 'z' && GET_CODE (op
) == CONST_INT
&& INTVAL (op
) == 0)
512 fprintf (file
, "%s", reg_names
[0]);
513 else if (GET_CODE (op
) == CONST_DOUBLE
)
515 if ((CONST_DOUBLE_LOW (op
) != 0) || (CONST_DOUBLE_HIGH (op
) != 0))
516 output_operand_lossage ("only 0.0 can be loaded as an immediate");
521 fprintf (file
, "e ");
523 fprintf (file
, "ne ");
525 fprintf (file
, "g ");
526 else if (code
== GTU
)
527 fprintf (file
, "gu ");
529 fprintf (file
, "l ");
530 else if (code
== LTU
)
531 fprintf (file
, "lu ");
533 fprintf (file
, "ge ");
534 else if (code
== GEU
)
535 fprintf (file
, "geu");
537 fprintf (file
, "le ");
538 else if (code
== LEU
)
539 fprintf (file
, "leu");
541 output_addr_const (file
, op
);
544 /* A C compound statement to output to stdio stream STREAM the
545 assembler syntax for an instruction operand that is a memory
546 reference whose address is ADDR. ADDR is an RTL expression.
548 On some machines, the syntax for a symbolic address depends on
549 the section that the address refers to. On these machines,
550 define the macro `ENCODE_SECTION_INFO' to store the information
551 into the `symbol_ref', and then check for it here. */
554 lm32_print_operand_address (FILE * file
, rtx addr
)
556 switch (GET_CODE (addr
))
559 fprintf (file
, "(%s+0)", reg_names
[REGNO (addr
)]);
563 output_address (XEXP (addr
, 0));
568 rtx arg0
= XEXP (addr
, 0);
569 rtx arg1
= XEXP (addr
, 1);
571 if (GET_CODE (arg0
) == REG
&& CONSTANT_P (arg1
))
573 if (GET_CODE (arg1
) == CONST_INT
)
574 fprintf (file
, "(%s+%ld)", reg_names
[REGNO (arg0
)],
578 fprintf (file
, "(%s+", reg_names
[REGNO (arg0
)]);
579 output_addr_const (file
, arg1
);
583 else if (CONSTANT_P (arg0
) && CONSTANT_P (arg1
))
584 output_addr_const (file
, addr
);
586 fatal_insn ("bad operand", addr
);
591 if (SYMBOL_REF_SMALL_P (addr
))
593 fprintf (file
, "gp(");
594 output_addr_const (file
, addr
);
598 fatal_insn ("can't use non gp relative absolute address", addr
);
602 fatal_insn ("invalid addressing mode", addr
);
607 /* Determine where to put an argument to a function.
608 Value is zero to push the argument on the stack,
609 or a hard register in which to store the argument.
611 MODE is the argument's machine mode.
612 TYPE is the data type of the argument (as a tree).
613 This is null for libcalls where that information may
615 CUM is a variable of type CUMULATIVE_ARGS which gives info about
616 the preceding args and about the function being called.
617 NAMED is nonzero if this argument is a named parameter
618 (otherwise it is an extra parameter matching an ellipsis). */
621 lm32_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
622 const_tree type
, bool named
)
624 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
626 if (mode
== VOIDmode
)
627 /* Compute operand 2 of the call insn. */
630 if (targetm
.calls
.must_pass_in_stack (mode
, type
))
633 if (!named
|| (*cum
+ LM32_NUM_REGS2 (mode
, type
) > LM32_NUM_ARG_REGS
))
636 return gen_rtx_REG (mode
, *cum
+ LM32_FIRST_ARG_REG
);
640 lm32_function_arg_advance (cumulative_args_t cum
, enum machine_mode mode
,
641 const_tree type
, bool named ATTRIBUTE_UNUSED
)
643 *get_cumulative_args (cum
) += LM32_NUM_REGS2 (mode
, type
);
647 lm32_compute_initial_elimination_offset (int from
, int to
)
649 HOST_WIDE_INT offset
= 0;
653 case ARG_POINTER_REGNUM
:
656 case FRAME_POINTER_REGNUM
:
659 case STACK_POINTER_REGNUM
:
661 lm32_compute_frame_size (get_frame_size ()) -
662 current_frame_info
.pretend_size
;
676 lm32_setup_incoming_varargs (cumulative_args_t cum_v
, enum machine_mode mode
,
677 tree type
, int *pretend_size
, int no_rtl
)
679 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
683 fntype
= TREE_TYPE (current_function_decl
);
685 if (stdarg_p (fntype
))
686 first_anon_arg
= *cum
+ LM32_FIRST_ARG_REG
;
689 /* this is the common case, we have been passed details setup
690 for the last named argument, we want to skip over the
691 registers, if any used in passing this named paramter in
692 order to determine which is the first registers used to pass
693 anonymous arguments. */
697 size
= int_size_in_bytes (type
);
699 size
= GET_MODE_SIZE (mode
);
702 *cum
+ LM32_FIRST_ARG_REG
+
703 ((size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
);
706 if ((first_anon_arg
< (LM32_FIRST_ARG_REG
+ LM32_NUM_ARG_REGS
)) && !no_rtl
)
708 int first_reg_offset
= first_anon_arg
;
709 int size
= LM32_FIRST_ARG_REG
+ LM32_NUM_ARG_REGS
- first_anon_arg
;
712 regblock
= gen_rtx_MEM (BLKmode
,
713 plus_constant (Pmode
, arg_pointer_rtx
,
714 FIRST_PARM_OFFSET (0)));
715 move_block_from_reg (first_reg_offset
, regblock
, size
);
717 *pretend_size
= size
* UNITS_PER_WORD
;
721 /* Override command line options. */
723 lm32_option_override (void)
725 /* We must have sign-extend enabled if barrel-shift isn't. */
726 if (!TARGET_BARREL_SHIFT_ENABLED
&& !TARGET_SIGN_EXTEND_ENABLED
)
727 target_flags
|= MASK_SIGN_EXTEND_ENABLED
;
730 /* Return nonzero if this function is known to have a null epilogue.
731 This allows the optimizer to omit jumps to jumps if no stack
734 lm32_can_use_return (void)
736 if (!reload_completed
)
739 if (df_regs_ever_live_p (RA_REGNUM
) || crtl
->profile
)
742 if (lm32_compute_frame_size (get_frame_size ()) != 0)
748 /* Support function to determine the return address of the function
749 'count' frames back up the stack. */
751 lm32_return_addr_rtx (int count
, rtx frame
)
756 if (!df_regs_ever_live_p (RA_REGNUM
))
757 r
= gen_rtx_REG (Pmode
, RA_REGNUM
);
760 r
= gen_rtx_MEM (Pmode
,
761 gen_rtx_PLUS (Pmode
, frame
,
762 GEN_INT (-2 * UNITS_PER_WORD
)));
763 set_mem_alias_set (r
, get_frame_alias_set ());
766 else if (flag_omit_frame_pointer
)
770 r
= gen_rtx_MEM (Pmode
,
771 gen_rtx_PLUS (Pmode
, frame
,
772 GEN_INT (-2 * UNITS_PER_WORD
)));
773 set_mem_alias_set (r
, get_frame_alias_set ());
778 /* Return true if EXP should be placed in the small data section. */
781 lm32_in_small_data_p (const_tree exp
)
783 /* We want to merge strings, so we never consider them small data. */
784 if (TREE_CODE (exp
) == STRING_CST
)
787 /* Functions are never in the small data area. Duh. */
788 if (TREE_CODE (exp
) == FUNCTION_DECL
)
791 if (TREE_CODE (exp
) == VAR_DECL
&& DECL_SECTION_NAME (exp
))
793 const char *section
= TREE_STRING_POINTER (DECL_SECTION_NAME (exp
));
794 if (strcmp (section
, ".sdata") == 0 || strcmp (section
, ".sbss") == 0)
799 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
801 /* If this is an incomplete type with size 0, then we can't put it
802 in sdata because it might be too big when completed. */
803 if (size
> 0 && size
<= g_switch_value
)
810 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
811 Assume that the areas do not overlap. */
814 lm32_block_move_inline (rtx dest
, rtx src
, HOST_WIDE_INT length
,
815 HOST_WIDE_INT alignment
)
817 HOST_WIDE_INT offset
, delta
;
818 unsigned HOST_WIDE_INT bits
;
820 enum machine_mode mode
;
823 /* Work out how many bits to move at a time. */
837 mode
= mode_for_size (bits
, MODE_INT
, 0);
838 delta
= bits
/ BITS_PER_UNIT
;
840 /* Allocate a buffer for the temporary registers. */
841 regs
= XALLOCAVEC (rtx
, length
/ delta
);
843 /* Load as many BITS-sized chunks as possible. */
844 for (offset
= 0, i
= 0; offset
+ delta
<= length
; offset
+= delta
, i
++)
846 regs
[i
] = gen_reg_rtx (mode
);
847 emit_move_insn (regs
[i
], adjust_address (src
, mode
, offset
));
850 /* Copy the chunks to the destination. */
851 for (offset
= 0, i
= 0; offset
+ delta
<= length
; offset
+= delta
, i
++)
852 emit_move_insn (adjust_address (dest
, mode
, offset
), regs
[i
]);
854 /* Mop up any left-over bytes. */
857 src
= adjust_address (src
, BLKmode
, offset
);
858 dest
= adjust_address (dest
, BLKmode
, offset
);
859 move_by_pieces (dest
, src
, length
- offset
,
860 MIN (MEM_ALIGN (src
), MEM_ALIGN (dest
)), 0);
864 /* Expand string/block move operations.
866 operands[0] is the pointer to the destination.
867 operands[1] is the pointer to the source.
868 operands[2] is the number of bytes to move.
869 operands[3] is the alignment. */
872 lm32_expand_block_move (rtx
* operands
)
874 if ((GET_CODE (operands
[2]) == CONST_INT
) && (INTVAL (operands
[2]) <= 32))
876 lm32_block_move_inline (operands
[0], operands
[1], INTVAL (operands
[2]),
877 INTVAL (operands
[3]));
883 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
884 isn't protected by a PIC unspec. */
886 nonpic_symbol_mentioned_p (rtx x
)
891 if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == LABEL_REF
892 || GET_CODE (x
) == PC
)
895 /* We don't want to look into the possible MEM location of a
896 CONST_DOUBLE, since we're not going to use it, in general. */
897 if (GET_CODE (x
) == CONST_DOUBLE
)
900 if (GET_CODE (x
) == UNSPEC
)
903 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
904 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
910 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
911 if (nonpic_symbol_mentioned_p (XVECEXP (x
, i
, j
)))
914 else if (fmt
[i
] == 'e' && nonpic_symbol_mentioned_p (XEXP (x
, i
)))
921 /* Compute a (partial) cost for rtx X. Return true if the complete
922 cost has been computed, and false if subexpressions should be
923 scanned. In either case, *TOTAL contains the cost result. */
926 lm32_rtx_costs (rtx x
, int code
, int outer_code
, int opno ATTRIBUTE_UNUSED
,
927 int *total
, bool speed
)
929 enum machine_mode mode
= GET_MODE (x
);
932 const int arithmetic_latency
= 1;
933 const int shift_latency
= 1;
934 const int compare_latency
= 2;
935 const int multiply_latency
= 3;
936 const int load_latency
= 3;
937 const int libcall_size_cost
= 5;
939 /* Determine if we can handle the given mode size in a single instruction. */
940 small_mode
= (mode
== QImode
) || (mode
== HImode
) || (mode
== SImode
);
953 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
));
956 COSTS_N_INSNS (arithmetic_latency
+ (LM32_NUM_REGS (mode
) - 1));
963 *total
= COSTS_N_INSNS (1);
965 *total
= COSTS_N_INSNS (compare_latency
);
969 /* FIXME. Guessing here. */
970 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) * (2 + 3) / 2);
977 if (TARGET_BARREL_SHIFT_ENABLED
&& small_mode
)
980 *total
= COSTS_N_INSNS (1);
982 *total
= COSTS_N_INSNS (shift_latency
);
984 else if (TARGET_BARREL_SHIFT_ENABLED
)
986 /* FIXME: Guessing here. */
987 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) * 4);
989 else if (small_mode
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
991 *total
= COSTS_N_INSNS (INTVAL (XEXP (x
, 1)));
997 *total
= COSTS_N_INSNS (libcall_size_cost
);
999 *total
= COSTS_N_INSNS (100);
1004 if (TARGET_MULTIPLY_ENABLED
&& small_mode
)
1007 *total
= COSTS_N_INSNS (1);
1009 *total
= COSTS_N_INSNS (multiply_latency
);
1015 *total
= COSTS_N_INSNS (libcall_size_cost
);
1017 *total
= COSTS_N_INSNS (100);
1025 if (TARGET_DIVIDE_ENABLED
&& small_mode
)
1028 *total
= COSTS_N_INSNS (1);
1031 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
1034 unsigned HOST_WIDE_INT i
= INTVAL (XEXP (x
, 1));
1041 if (IN_RANGE (i
, 0, 65536))
1042 *total
= COSTS_N_INSNS (1 + 1 + cycles
);
1044 *total
= COSTS_N_INSNS (2 + 1 + cycles
);
1047 else if (GET_CODE (XEXP (x
, 1)) == REG
)
1049 *total
= COSTS_N_INSNS (1 + GET_MODE_SIZE (mode
) / 2);
1054 *total
= COSTS_N_INSNS (1 + GET_MODE_SIZE (mode
) / 2);
1063 *total
= COSTS_N_INSNS (libcall_size_cost
);
1065 *total
= COSTS_N_INSNS (100);
1072 *total
= COSTS_N_INSNS (1);
1074 *total
= COSTS_N_INSNS (arithmetic_latency
);
1078 if (MEM_P (XEXP (x
, 0)))
1079 *total
= COSTS_N_INSNS (0);
1080 else if (small_mode
)
1083 *total
= COSTS_N_INSNS (1);
1085 *total
= COSTS_N_INSNS (arithmetic_latency
);
1088 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) / 2);
1097 *total
= COSTS_N_INSNS (0);
1108 if (satisfies_constraint_L (x
))
1109 *total
= COSTS_N_INSNS (0);
1111 *total
= COSTS_N_INSNS (2);
1118 if (satisfies_constraint_K (x
))
1119 *total
= COSTS_N_INSNS (0);
1121 *total
= COSTS_N_INSNS (2);
1125 if (TARGET_MULTIPLY_ENABLED
)
1127 if (satisfies_constraint_K (x
))
1128 *total
= COSTS_N_INSNS (0);
1130 *total
= COSTS_N_INSNS (2);
1136 if (satisfies_constraint_K (x
))
1137 *total
= COSTS_N_INSNS (1);
1139 *total
= COSTS_N_INSNS (2);
1150 *total
= COSTS_N_INSNS (0);
1157 *total
= COSTS_N_INSNS (0);
1166 *total
= COSTS_N_INSNS (2);
1170 *total
= COSTS_N_INSNS (1);
1175 *total
= COSTS_N_INSNS (1);
1177 *total
= COSTS_N_INSNS (load_latency
);
1185 /* Implemenent TARGET_CAN_ELIMINATE. */
1188 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
1190 return (to
== STACK_POINTER_REGNUM
&& frame_pointer_needed
) ? false : true;
1193 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1196 lm32_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx x
, bool strict
)
1199 if (strict
&& REG_P (x
) && STRICT_REG_OK_FOR_BASE_P (x
))
1201 if (!strict
&& REG_P (x
) && NONSTRICT_REG_OK_FOR_BASE_P (x
))
1205 if (GET_CODE (x
) == PLUS
1206 && REG_P (XEXP (x
, 0))
1207 && ((strict
&& STRICT_REG_OK_FOR_BASE_P (XEXP (x
, 0)))
1208 || (!strict
&& NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x
, 0))))
1209 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1210 && satisfies_constraint_K (XEXP ((x
), 1)))
1214 if (GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_SMALL_P (x
))
1220 /* Check a move is not memory to memory. */
1223 lm32_move_ok (enum machine_mode mode
, rtx operands
[2]) {
1224 if (memory_operand (operands
[0], mode
))
1225 return register_or_zero_operand (operands
[1], mode
);