1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
55 #ifndef TARGET_NO_PROTOTYPE
56 #define TARGET_NO_PROTOTYPE 0
59 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
60 && easy_vector_same (x, y))
62 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
64 && easy_vector_same (x, y))
66 #define min(A,B) ((A) < (B) ? (A) : (B))
67 #define max(A,B) ((A) > (B) ? (A) : (B))
71 enum processor_type rs6000_cpu
;
72 struct rs6000_cpu_select rs6000_select
[3] =
74 /* switch name, tune arch */
75 { (const char *)0, "--with-cpu=", 1, 1 },
76 { (const char *)0, "-mcpu=", 1, 1 },
77 { (const char *)0, "-mtune=", 1, 0 },
80 /* Size of long double */
81 const char *rs6000_long_double_size_string
;
82 int rs6000_long_double_type_size
;
84 /* Whether -mabi=altivec has appeared */
85 int rs6000_altivec_abi
;
87 /* Whether VRSAVE instructions should be generated. */
88 int rs6000_altivec_vrsave
;
90 /* String from -mvrsave= option. */
91 const char *rs6000_altivec_vrsave_string
;
93 /* Nonzero if we want SPE ABI extensions. */
96 /* Whether isel instructions should be generated. */
99 /* Whether SPE simd instructions should be generated. */
102 /* Nonzero if floating point operations are done in the GPRs. */
103 int rs6000_float_gprs
= 0;
105 /* String from -mfloat-gprs=. */
106 const char *rs6000_float_gprs_string
;
108 /* String from -misel=. */
109 const char *rs6000_isel_string
;
111 /* String from -mspe=. */
112 const char *rs6000_spe_string
;
114 /* Set to nonzero once AIX common-mode calls have been defined. */
115 static GTY(()) int common_mode_defined
;
117 /* Save information from a "cmpxx" operation until the branch or scc is
119 rtx rs6000_compare_op0
, rs6000_compare_op1
;
120 int rs6000_compare_fp_p
;
122 /* Label number of label created for -mrelocatable, to call to so we can
123 get the address of the GOT section */
124 int rs6000_pic_labelno
;
127 /* Which abi to adhere to */
128 const char *rs6000_abi_name
;
130 /* Semantics of the small data area */
131 enum rs6000_sdata_type rs6000_sdata
= SDATA_DATA
;
133 /* Which small data model to use */
134 const char *rs6000_sdata_name
= (char *)0;
136 /* Counter for labels which are to be placed in .fixup. */
137 int fixuplabelno
= 0;
140 /* Bit size of immediate TLS offsets and string from which it is decoded. */
141 int rs6000_tls_size
= 32;
142 const char *rs6000_tls_size_string
;
144 /* ABI enumeration available for subtarget to use. */
145 enum rs6000_abi rs6000_current_abi
;
147 /* ABI string from -mabi= option. */
148 const char *rs6000_abi_string
;
151 const char *rs6000_debug_name
;
152 int rs6000_debug_stack
; /* debug stack applications */
153 int rs6000_debug_arg
; /* debug argument handling */
156 static GTY(()) tree opaque_V2SI_type_node
;
157 static GTY(()) tree opaque_V2SF_type_node
;
158 static GTY(()) tree opaque_p_V2SI_type_node
;
160 const char *rs6000_traceback_name
;
162 traceback_default
= 0,
168 /* Flag to say the TOC is initialized */
170 char toc_label_name
[10];
172 /* Alias set for saves and restores from the rs6000 stack. */
173 static int rs6000_sr_alias_set
;
175 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
176 The only place that looks at this is rs6000_set_default_type_attributes;
177 everywhere else should rely on the presence or absence of a longcall
178 attribute on the function declaration. */
179 int rs6000_default_long_calls
;
180 const char *rs6000_longcall_switch
;
182 /* Control alignment for fields within structures. */
183 /* String from -malign-XXXXX. */
184 const char *rs6000_alignment_string
;
185 int rs6000_alignment_flags
;
187 struct builtin_description
189 /* mask is not const because we're going to alter it below. This
190 nonsense will go away when we rewrite the -march infrastructure
191 to give us more target flag bits. */
193 const enum insn_code icode
;
194 const char *const name
;
195 const enum rs6000_builtins code
;
198 static bool rs6000_function_ok_for_sibcall
PARAMS ((tree
, tree
));
199 static int num_insns_constant_wide
PARAMS ((HOST_WIDE_INT
));
200 static void validate_condition_mode
201 PARAMS ((enum rtx_code
, enum machine_mode
));
202 static rtx rs6000_generate_compare
PARAMS ((enum rtx_code
));
203 static void rs6000_maybe_dead
PARAMS ((rtx
));
204 static void rs6000_emit_stack_tie
PARAMS ((void));
205 static void rs6000_frame_related
PARAMS ((rtx
, rtx
, HOST_WIDE_INT
, rtx
, rtx
));
206 static rtx spe_synthesize_frame_save
PARAMS ((rtx
));
207 static bool spe_func_has_64bit_regs_p
PARAMS ((void));
208 static void emit_frame_save
PARAMS ((rtx
, rtx
, enum machine_mode
,
209 unsigned int, int, int));
210 static rtx gen_frame_mem_offset
PARAMS ((enum machine_mode
, rtx
, int));
211 static void rs6000_emit_allocate_stack
PARAMS ((HOST_WIDE_INT
, int));
212 static unsigned rs6000_hash_constant
PARAMS ((rtx
));
213 static unsigned toc_hash_function
PARAMS ((const void *));
214 static int toc_hash_eq
PARAMS ((const void *, const void *));
215 static int constant_pool_expr_1
PARAMS ((rtx
, int *, int *));
216 static bool constant_pool_expr_p
PARAMS ((rtx
));
217 static bool toc_relative_expr_p
PARAMS ((rtx
));
218 static bool legitimate_small_data_p
PARAMS ((enum machine_mode
, rtx
));
219 static bool legitimate_offset_address_p
PARAMS ((enum machine_mode
, rtx
, int));
220 static bool legitimate_indexed_address_p
PARAMS ((rtx
, int));
221 static bool legitimate_indirect_address_p
PARAMS ((rtx
, int));
222 static bool legitimate_lo_sum_address_p
PARAMS ((enum machine_mode
, rtx
, int));
223 static struct machine_function
* rs6000_init_machine_status
PARAMS ((void));
224 static bool rs6000_assemble_integer
PARAMS ((rtx
, unsigned int, int));
225 #ifdef HAVE_GAS_HIDDEN
226 static void rs6000_assemble_visibility
PARAMS ((tree
, int));
228 static int rs6000_ra_ever_killed
PARAMS ((void));
229 static tree rs6000_handle_longcall_attribute
PARAMS ((tree
*, tree
, tree
, int, bool *));
230 extern const struct attribute_spec rs6000_attribute_table
[];
231 static void rs6000_set_default_type_attributes
PARAMS ((tree
));
232 static void rs6000_output_function_prologue
PARAMS ((FILE *, HOST_WIDE_INT
));
233 static void rs6000_output_function_epilogue
PARAMS ((FILE *, HOST_WIDE_INT
));
234 static void rs6000_output_mi_thunk
PARAMS ((FILE *, tree
, HOST_WIDE_INT
,
235 HOST_WIDE_INT
, tree
));
236 static rtx rs6000_emit_set_long_const
PARAMS ((rtx
,
237 HOST_WIDE_INT
, HOST_WIDE_INT
));
239 static unsigned int rs6000_elf_section_type_flags
PARAMS ((tree
, const char *,
241 static void rs6000_elf_asm_out_constructor
PARAMS ((rtx
, int));
242 static void rs6000_elf_asm_out_destructor
PARAMS ((rtx
, int));
243 static void rs6000_elf_select_section
PARAMS ((tree
, int,
244 unsigned HOST_WIDE_INT
));
245 static void rs6000_elf_unique_section
PARAMS ((tree
, int));
246 static void rs6000_elf_select_rtx_section
PARAMS ((enum machine_mode
, rtx
,
247 unsigned HOST_WIDE_INT
));
248 static void rs6000_elf_encode_section_info
PARAMS ((tree
, rtx
, int))
250 static bool rs6000_elf_in_small_data_p
PARAMS ((tree
));
253 static void rs6000_xcoff_asm_globalize_label
PARAMS ((FILE *, const char *));
254 static void rs6000_xcoff_asm_named_section
PARAMS ((const char *, unsigned int));
255 static void rs6000_xcoff_select_section
PARAMS ((tree
, int,
256 unsigned HOST_WIDE_INT
));
257 static void rs6000_xcoff_unique_section
PARAMS ((tree
, int));
258 static void rs6000_xcoff_select_rtx_section
PARAMS ((enum machine_mode
, rtx
,
259 unsigned HOST_WIDE_INT
));
260 static const char * rs6000_xcoff_strip_name_encoding
PARAMS ((const char *));
261 static unsigned int rs6000_xcoff_section_type_flags
PARAMS ((tree
, const char *, int));
262 static void rs6000_xcoff_file_end
PARAMS ((void));
265 static bool rs6000_binds_local_p
PARAMS ((tree
));
267 static int rs6000_use_dfa_pipeline_interface
PARAMS ((void));
268 static int rs6000_variable_issue
PARAMS ((FILE *, int, rtx
, int));
269 static bool rs6000_rtx_costs
PARAMS ((rtx
, int, int, int *));
270 static int rs6000_adjust_cost
PARAMS ((rtx
, rtx
, rtx
, int));
271 static int rs6000_adjust_priority
PARAMS ((rtx
, int));
272 static int rs6000_issue_rate
PARAMS ((void));
273 static int rs6000_use_sched_lookahead
PARAMS ((void));
275 static void rs6000_init_builtins
PARAMS ((void));
276 static rtx rs6000_expand_unop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
277 static rtx rs6000_expand_binop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
278 static rtx rs6000_expand_ternop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
279 static rtx rs6000_expand_builtin
PARAMS ((tree
, rtx
, rtx
, enum machine_mode
, int));
280 static void altivec_init_builtins
PARAMS ((void));
281 static void rs6000_common_init_builtins
PARAMS ((void));
283 static void enable_mask_for_builtins
PARAMS ((struct builtin_description
*,
284 int, enum rs6000_builtins
,
285 enum rs6000_builtins
));
286 static void spe_init_builtins
PARAMS ((void));
287 static rtx spe_expand_builtin
PARAMS ((tree
, rtx
, bool *));
288 static rtx spe_expand_predicate_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
289 static rtx spe_expand_evsel_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
290 static int rs6000_emit_int_cmove
PARAMS ((rtx
, rtx
, rtx
, rtx
));
292 static rtx altivec_expand_builtin
PARAMS ((tree
, rtx
, bool *));
293 static rtx altivec_expand_ld_builtin
PARAMS ((tree
, rtx
, bool *));
294 static rtx altivec_expand_st_builtin
PARAMS ((tree
, rtx
, bool *));
295 static rtx altivec_expand_dst_builtin
PARAMS ((tree
, rtx
, bool *));
296 static rtx altivec_expand_abs_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
297 static rtx altivec_expand_predicate_builtin
PARAMS ((enum insn_code
, const char *, tree
, rtx
));
298 static rtx altivec_expand_stv_builtin
PARAMS ((enum insn_code
, tree
));
299 static void rs6000_parse_abi_options
PARAMS ((void));
300 static void rs6000_parse_alignment_option
PARAMS ((void));
301 static void rs6000_parse_tls_size_option
PARAMS ((void));
302 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
303 static int first_altivec_reg_to_save
PARAMS ((void));
304 static unsigned int compute_vrsave_mask
PARAMS ((void));
305 static void is_altivec_return_reg
PARAMS ((rtx
, void *));
306 static rtx generate_set_vrsave
PARAMS ((rtx
, rs6000_stack_t
*, int));
307 int easy_vector_constant
PARAMS ((rtx
, enum machine_mode
));
308 static int easy_vector_same
PARAMS ((rtx
, enum machine_mode
));
309 static bool is_ev64_opaque_type
PARAMS ((tree
));
310 static rtx rs6000_dwarf_register_span
PARAMS ((rtx
));
311 static rtx rs6000_legitimize_tls_address
PARAMS ((rtx
, enum tls_model
));
312 static rtx rs6000_tls_get_addr
PARAMS ((void));
313 static rtx rs6000_got_sym
PARAMS ((void));
314 static inline int rs6000_tls_symbol_ref_1
PARAMS ((rtx
*, void *));
315 static const char *rs6000_get_some_local_dynamic_name
PARAMS ((void));
316 static int rs6000_get_some_local_dynamic_name_1
PARAMS ((rtx
*, void *));
317 static rtx
rs6000_complex_function_value (enum machine_mode
);
318 static rtx
rs6000_spe_function_arg (CUMULATIVE_ARGS
*, enum machine_mode
, tree
);
320 /* Hash table stuff for keeping track of TOC entries. */
322 struct toc_hash_struct
GTY(())
324 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
325 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
327 enum machine_mode key_mode
;
331 static GTY ((param_is (struct toc_hash_struct
))) htab_t toc_hash_table
;
333 /* Default register names. */
334 char rs6000_reg_names
[][8] =
336 "0", "1", "2", "3", "4", "5", "6", "7",
337 "8", "9", "10", "11", "12", "13", "14", "15",
338 "16", "17", "18", "19", "20", "21", "22", "23",
339 "24", "25", "26", "27", "28", "29", "30", "31",
340 "0", "1", "2", "3", "4", "5", "6", "7",
341 "8", "9", "10", "11", "12", "13", "14", "15",
342 "16", "17", "18", "19", "20", "21", "22", "23",
343 "24", "25", "26", "27", "28", "29", "30", "31",
344 "mq", "lr", "ctr","ap",
345 "0", "1", "2", "3", "4", "5", "6", "7",
347 /* AltiVec registers. */
348 "0", "1", "2", "3", "4", "5", "6", "7",
349 "8", "9", "10", "11", "12", "13", "14", "15",
350 "16", "17", "18", "19", "20", "21", "22", "23",
351 "24", "25", "26", "27", "28", "29", "30", "31",
357 #ifdef TARGET_REGNAMES
358 static const char alt_reg_names
[][8] =
360 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
361 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
362 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
363 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
364 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
365 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
366 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
367 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
368 "mq", "lr", "ctr", "ap",
369 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
371 /* AltiVec registers. */
372 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
373 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
374 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
375 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
382 #ifndef MASK_STRICT_ALIGN
383 #define MASK_STRICT_ALIGN 0
385 #ifndef TARGET_PROFILE_KERNEL
386 #define TARGET_PROFILE_KERNEL 0
389 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
390 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
392 /* Return 1 for a symbol ref for a thread-local storage symbol. */
393 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
394 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
396 /* Initialize the GCC target structure. */
397 #undef TARGET_ATTRIBUTE_TABLE
398 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
399 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
400 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
402 #undef TARGET_ASM_ALIGNED_DI_OP
403 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
405 /* Default unaligned ops are only provided for ELF. Find the ops needed
406 for non-ELF systems. */
407 #ifndef OBJECT_FORMAT_ELF
409 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
411 #undef TARGET_ASM_UNALIGNED_HI_OP
412 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
413 #undef TARGET_ASM_UNALIGNED_SI_OP
414 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
415 #undef TARGET_ASM_UNALIGNED_DI_OP
416 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
419 #undef TARGET_ASM_UNALIGNED_HI_OP
420 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
421 #undef TARGET_ASM_UNALIGNED_SI_OP
422 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
426 /* This hook deals with fixups for relocatable code and DI-mode objects
428 #undef TARGET_ASM_INTEGER
429 #define TARGET_ASM_INTEGER rs6000_assemble_integer
431 #ifdef HAVE_GAS_HIDDEN
432 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
433 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
436 #undef TARGET_HAVE_TLS
437 #define TARGET_HAVE_TLS HAVE_AS_TLS
439 #undef TARGET_CANNOT_FORCE_CONST_MEM
440 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
442 #undef TARGET_ASM_FUNCTION_PROLOGUE
443 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
444 #undef TARGET_ASM_FUNCTION_EPILOGUE
445 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
447 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
448 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
449 #undef TARGET_SCHED_VARIABLE_ISSUE
450 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
452 #undef TARGET_SCHED_ISSUE_RATE
453 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
454 #undef TARGET_SCHED_ADJUST_COST
455 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
456 #undef TARGET_SCHED_ADJUST_PRIORITY
457 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
459 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
460 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
462 #undef TARGET_INIT_BUILTINS
463 #define TARGET_INIT_BUILTINS rs6000_init_builtins
465 #undef TARGET_EXPAND_BUILTIN
466 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
469 #undef TARGET_BINDS_LOCAL_P
470 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
473 #undef TARGET_ASM_OUTPUT_MI_THUNK
474 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
476 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
477 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
479 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
480 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
482 #undef TARGET_RTX_COSTS
483 #define TARGET_RTX_COSTS rs6000_rtx_costs
484 #undef TARGET_ADDRESS_COST
485 #define TARGET_ADDRESS_COST hook_int_rtx_0
487 #undef TARGET_VECTOR_OPAQUE_P
488 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
490 #undef TARGET_DWARF_REGISTER_SPAN
491 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
493 struct gcc_target targetm
= TARGET_INITIALIZER
;
495 /* Override command line options. Mostly we process the processor
496 type and sometimes adjust other TARGET_ options. */
499 rs6000_override_options (default_cpu
)
500 const char *default_cpu
;
503 struct rs6000_cpu_select
*ptr
;
505 /* Simplify the entries below by making a mask for any POWER
506 variant and any PowerPC variant. */
508 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
509 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
510 | MASK_PPC_GFXOPT | MASK_POWERPC64)
511 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
515 const char *const name
; /* Canonical processor name. */
516 const enum processor_type processor
; /* Processor type enum value. */
517 const int target_enable
; /* Target flags to enable. */
518 const int target_disable
; /* Target flags to disable. */
519 } const processor_target_table
[]
520 = {{"common", PROCESSOR_COMMON
, MASK_NEW_MNEMONICS
,
521 POWER_MASKS
| POWERPC_MASKS
},
522 {"power", PROCESSOR_POWER
,
523 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
524 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
525 {"power2", PROCESSOR_POWER
,
526 MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
,
527 POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
528 {"power3", PROCESSOR_PPC630
,
529 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
531 {"power4", PROCESSOR_POWER4
,
532 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
534 {"powerpc", PROCESSOR_POWERPC
,
535 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
536 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
537 {"powerpc64", PROCESSOR_POWERPC64
,
538 MASK_POWERPC
| MASK_POWERPC64
| MASK_NEW_MNEMONICS
,
539 POWER_MASKS
| POWERPC_OPT_MASKS
},
540 {"rios", PROCESSOR_RIOS1
,
541 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
542 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
543 {"rios1", PROCESSOR_RIOS1
,
544 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
545 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
546 {"rsc", PROCESSOR_PPC601
,
547 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
548 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
549 {"rsc1", PROCESSOR_PPC601
,
550 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
551 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
552 {"rios2", PROCESSOR_RIOS2
,
553 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
| MASK_POWER2
,
554 POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
555 {"rs64a", PROCESSOR_RS64A
,
556 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
557 POWER_MASKS
| POWERPC_OPT_MASKS
},
558 {"401", PROCESSOR_PPC403
,
559 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
560 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
561 {"403", PROCESSOR_PPC403
,
562 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
| MASK_STRICT_ALIGN
,
563 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
564 {"405", PROCESSOR_PPC405
,
565 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
566 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
567 {"405fp", PROCESSOR_PPC405
,
568 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
569 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
570 {"440", PROCESSOR_PPC440
,
571 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
572 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
573 {"440fp", PROCESSOR_PPC440
,
574 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
575 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
576 {"505", PROCESSOR_MPCCORE
,
577 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
578 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
579 {"601", PROCESSOR_PPC601
,
580 MASK_POWER
| MASK_POWERPC
| MASK_NEW_MNEMONICS
| MASK_MULTIPLE
| MASK_STRING
,
581 MASK_POWER2
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
582 {"602", PROCESSOR_PPC603
,
583 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
584 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
585 {"603", PROCESSOR_PPC603
,
586 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
587 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
588 {"603e", PROCESSOR_PPC603
,
589 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
590 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
591 {"ec603e", PROCESSOR_PPC603
,
592 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
593 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
594 {"604", PROCESSOR_PPC604
,
595 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
596 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
597 {"604e", PROCESSOR_PPC604e
,
598 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
599 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
600 {"620", PROCESSOR_PPC620
,
601 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
603 {"630", PROCESSOR_PPC630
,
604 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
606 {"740", PROCESSOR_PPC750
,
607 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
608 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
609 {"750", PROCESSOR_PPC750
,
610 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
611 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
612 {"7400", PROCESSOR_PPC7400
,
613 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
614 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
615 {"7450", PROCESSOR_PPC7450
,
616 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
617 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
618 {"8540", PROCESSOR_PPC8540
,
619 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
620 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
621 {"801", PROCESSOR_MPCCORE
,
622 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
623 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
624 {"821", PROCESSOR_MPCCORE
,
625 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
626 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
627 {"823", PROCESSOR_MPCCORE
,
628 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
629 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
630 {"860", PROCESSOR_MPCCORE
,
631 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
632 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
}};
634 const size_t ptt_size
= ARRAY_SIZE (processor_target_table
);
636 /* Save current -mmultiple/-mno-multiple status. */
637 int multiple
= TARGET_MULTIPLE
;
638 /* Save current -mstring/-mno-string status. */
639 int string
= TARGET_STRING
;
641 /* Identify the processor type. */
642 rs6000_select
[0].string
= default_cpu
;
643 rs6000_cpu
= TARGET_POWERPC64
? PROCESSOR_DEFAULT64
: PROCESSOR_DEFAULT
;
645 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
647 ptr
= &rs6000_select
[i
];
648 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
650 for (j
= 0; j
< ptt_size
; j
++)
651 if (! strcmp (ptr
->string
, processor_target_table
[j
].name
))
654 rs6000_cpu
= processor_target_table
[j
].processor
;
658 target_flags
|= processor_target_table
[j
].target_enable
;
659 target_flags
&= ~processor_target_table
[j
].target_disable
;
665 error ("bad value (%s) for %s switch", ptr
->string
, ptr
->name
);
672 /* If we are optimizing big endian systems for space, use the load/store
673 multiple and string instructions. */
674 if (BYTES_BIG_ENDIAN
&& optimize_size
)
675 target_flags
|= MASK_MULTIPLE
| MASK_STRING
;
677 /* If -mmultiple or -mno-multiple was explicitly used, don't
678 override with the processor default */
679 if ((target_flags_explicit
& MASK_MULTIPLE
) != 0)
680 target_flags
= (target_flags
& ~MASK_MULTIPLE
) | multiple
;
682 /* If -mstring or -mno-string was explicitly used, don't override
683 with the processor default. */
684 if ((target_flags_explicit
& MASK_STRING
) != 0)
685 target_flags
= (target_flags
& ~MASK_STRING
) | string
;
687 /* Don't allow -mmultiple or -mstring on little endian systems
688 unless the cpu is a 750, because the hardware doesn't support the
689 instructions used in little endian mode, and causes an alignment
690 trap. The 750 does not cause an alignment trap (except when the
691 target is unaligned). */
693 if (!BYTES_BIG_ENDIAN
&& rs6000_cpu
!= PROCESSOR_PPC750
)
697 target_flags
&= ~MASK_MULTIPLE
;
698 if ((target_flags_explicit
& MASK_MULTIPLE
) != 0)
699 warning ("-mmultiple is not supported on little endian systems");
704 target_flags
&= ~MASK_STRING
;
705 if ((target_flags_explicit
& MASK_STRING
) != 0)
706 warning ("-mstring is not supported on little endian systems");
710 /* Set debug flags */
711 if (rs6000_debug_name
)
713 if (! strcmp (rs6000_debug_name
, "all"))
714 rs6000_debug_stack
= rs6000_debug_arg
= 1;
715 else if (! strcmp (rs6000_debug_name
, "stack"))
716 rs6000_debug_stack
= 1;
717 else if (! strcmp (rs6000_debug_name
, "arg"))
718 rs6000_debug_arg
= 1;
720 error ("unknown -mdebug-%s switch", rs6000_debug_name
);
723 if (rs6000_traceback_name
)
725 if (! strncmp (rs6000_traceback_name
, "full", 4))
726 rs6000_traceback
= traceback_full
;
727 else if (! strncmp (rs6000_traceback_name
, "part", 4))
728 rs6000_traceback
= traceback_part
;
729 else if (! strncmp (rs6000_traceback_name
, "no", 2))
730 rs6000_traceback
= traceback_none
;
732 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
733 rs6000_traceback_name
);
736 /* Set size of long double */
737 rs6000_long_double_type_size
= 64;
738 if (rs6000_long_double_size_string
)
741 int size
= strtol (rs6000_long_double_size_string
, &tail
, 10);
742 if (*tail
!= '\0' || (size
!= 64 && size
!= 128))
743 error ("Unknown switch -mlong-double-%s",
744 rs6000_long_double_size_string
);
746 rs6000_long_double_type_size
= size
;
749 /* Handle -mabi= options. */
750 rs6000_parse_abi_options ();
752 /* Handle -malign-XXXXX option. */
753 rs6000_parse_alignment_option ();
755 /* Handle generic -mFOO=YES/NO options. */
756 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string
,
757 &rs6000_altivec_vrsave
);
758 rs6000_parse_yes_no_option ("isel", rs6000_isel_string
,
760 rs6000_parse_yes_no_option ("spe", rs6000_spe_string
, &rs6000_spe
);
761 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string
,
764 /* Handle -mtls-size option. */
765 rs6000_parse_tls_size_option ();
767 #ifdef SUBTARGET_OVERRIDE_OPTIONS
768 SUBTARGET_OVERRIDE_OPTIONS
;
770 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
771 SUBSUBTARGET_OVERRIDE_OPTIONS
;
776 /* The e500 does not have string instructions, and we set
777 MASK_STRING above when optimizing for size. */
778 if ((target_flags
& MASK_STRING
) != 0)
779 target_flags
= target_flags
& ~MASK_STRING
;
781 /* No SPE means 64-bit long doubles, even if an E500. */
782 if (rs6000_spe_string
!= 0
783 && !strcmp (rs6000_spe_string
, "no"))
784 rs6000_long_double_type_size
= 64;
786 else if (rs6000_select
[1].string
!= NULL
)
788 /* For the powerpc-eabispe configuration, we set all these by
789 default, so let's unset them if we manually set another
790 CPU that is not the E500. */
791 if (rs6000_abi_string
== 0)
793 if (rs6000_spe_string
== 0)
795 if (rs6000_float_gprs_string
== 0)
796 rs6000_float_gprs
= 0;
797 if (rs6000_isel_string
== 0)
799 if (rs6000_long_double_size_string
== 0)
800 rs6000_long_double_type_size
= 64;
803 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
804 using TARGET_OPTIONS to handle a toggle switch, but we're out of
805 bits in target_flags so TARGET_SWITCHES cannot be used.
806 Assumption here is that rs6000_longcall_switch points into the
807 text of the complete option, rather than being a copy, so we can
808 scan back for the presence or absence of the no- modifier. */
809 if (rs6000_longcall_switch
)
811 const char *base
= rs6000_longcall_switch
;
812 while (base
[-1] != 'm') base
--;
814 if (*rs6000_longcall_switch
!= '\0')
815 error ("invalid option `%s'", base
);
816 rs6000_default_long_calls
= (base
[0] != 'n');
819 #ifdef TARGET_REGNAMES
820 /* If the user desires alternate register names, copy in the
821 alternate names now. */
823 memcpy (rs6000_reg_names
, alt_reg_names
, sizeof (rs6000_reg_names
));
826 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
827 If -maix-struct-return or -msvr4-struct-return was explicitly
828 used, don't override with the ABI default. */
829 if ((target_flags_explicit
& MASK_AIX_STRUCT_RET
) == 0)
831 if (DEFAULT_ABI
== ABI_V4
&& !DRAFT_V4_STRUCT_RET
)
832 target_flags
= (target_flags
& ~MASK_AIX_STRUCT_RET
);
834 target_flags
|= MASK_AIX_STRUCT_RET
;
837 if (TARGET_LONG_DOUBLE_128
838 && (DEFAULT_ABI
== ABI_AIX
|| DEFAULT_ABI
== ABI_DARWIN
))
839 real_format_for_mode
[TFmode
- QFmode
] = &ibm_extended_format
;
841 /* Allocate an alias set for register saves & restores from stack. */
842 rs6000_sr_alias_set
= new_alias_set ();
845 ASM_GENERATE_INTERNAL_LABEL (toc_label_name
, "LCTOC", 1);
847 /* We can only guarantee the availability of DI pseudo-ops when
848 assembling for 64-bit targets. */
851 targetm
.asm_out
.aligned_op
.di
= NULL
;
852 targetm
.asm_out
.unaligned_op
.di
= NULL
;
855 /* Set maximum branch target alignment at two instructions, eight bytes. */
856 align_jumps_max_skip
= 8;
857 align_loops_max_skip
= 8;
859 /* Arrange to save and restore machine status around nested functions. */
860 init_machine_status
= rs6000_init_machine_status
;
863 /* Handle generic options of the form -mfoo=yes/no.
864 NAME is the option name.
865 VALUE is the option value.
866 FLAG is the pointer to the flag where to store a 1 or 0, depending on
867 whether the option value is 'yes' or 'no' respectively. */
869 rs6000_parse_yes_no_option (const char *name
, const char *value
, int *flag
)
873 else if (!strcmp (value
, "yes"))
875 else if (!strcmp (value
, "no"))
878 error ("unknown -m%s= option specified: '%s'", name
, value
);
881 /* Handle -mabi= options. */
883 rs6000_parse_abi_options ()
885 if (rs6000_abi_string
== 0)
887 else if (! strcmp (rs6000_abi_string
, "altivec"))
888 rs6000_altivec_abi
= 1;
889 else if (! strcmp (rs6000_abi_string
, "no-altivec"))
890 rs6000_altivec_abi
= 0;
891 else if (! strcmp (rs6000_abi_string
, "spe"))
895 error ("not configured for ABI: '%s'", rs6000_abi_string
);
898 else if (! strcmp (rs6000_abi_string
, "no-spe"))
901 error ("unknown ABI specified: '%s'", rs6000_abi_string
);
904 /* Handle -malign-XXXXXX options. */
906 rs6000_parse_alignment_option ()
908 if (rs6000_alignment_string
== 0
909 || ! strcmp (rs6000_alignment_string
, "power"))
910 rs6000_alignment_flags
= MASK_ALIGN_POWER
;
911 else if (! strcmp (rs6000_alignment_string
, "natural"))
912 rs6000_alignment_flags
= MASK_ALIGN_NATURAL
;
914 error ("unknown -malign-XXXXX option specified: '%s'",
915 rs6000_alignment_string
);
918 /* Validate and record the size specified with the -mtls-size option. */
921 rs6000_parse_tls_size_option ()
923 if (rs6000_tls_size_string
== 0)
925 else if (strcmp (rs6000_tls_size_string
, "16") == 0)
926 rs6000_tls_size
= 16;
927 else if (strcmp (rs6000_tls_size_string
, "32") == 0)
928 rs6000_tls_size
= 32;
929 else if (strcmp (rs6000_tls_size_string
, "64") == 0)
930 rs6000_tls_size
= 64;
932 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string
);
936 optimization_options (level
, size
)
937 int level ATTRIBUTE_UNUSED
;
938 int size ATTRIBUTE_UNUSED
;
942 /* Do anything needed at the start of the asm file. */
945 rs6000_file_start (file
, default_cpu
)
947 const char *default_cpu
;
951 const char *start
= buffer
;
952 struct rs6000_cpu_select
*ptr
;
954 if (flag_verbose_asm
)
956 sprintf (buffer
, "\n%s rs6000/powerpc options:", ASM_COMMENT_START
);
957 rs6000_select
[0].string
= default_cpu
;
959 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
961 ptr
= &rs6000_select
[i
];
962 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
964 fprintf (file
, "%s %s%s", start
, ptr
->name
, ptr
->string
);
970 switch (rs6000_sdata
)
972 case SDATA_NONE
: fprintf (file
, "%s -msdata=none", start
); start
= ""; break;
973 case SDATA_DATA
: fprintf (file
, "%s -msdata=data", start
); start
= ""; break;
974 case SDATA_SYSV
: fprintf (file
, "%s -msdata=sysv", start
); start
= ""; break;
975 case SDATA_EABI
: fprintf (file
, "%s -msdata=eabi", start
); start
= ""; break;
978 if (rs6000_sdata
&& g_switch_value
)
980 fprintf (file
, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED
, start
,
991 /* Return nonzero if this function is known to have a null epilogue. */
996 if (reload_completed
)
998 rs6000_stack_t
*info
= rs6000_stack_info ();
1000 if (info
->first_gp_reg_save
== 32
1001 && info
->first_fp_reg_save
== 64
1002 && info
->first_altivec_reg_save
== LAST_ALTIVEC_REGNO
+ 1
1003 && ! info
->lr_save_p
1004 && ! info
->cr_save_p
1005 && info
->vrsave_mask
== 0
1013 /* Returns 1 always. */
1016 any_operand (op
, mode
)
1017 rtx op ATTRIBUTE_UNUSED
;
1018 enum machine_mode mode ATTRIBUTE_UNUSED
;
1023 /* Returns 1 if op is the count register. */
1025 count_register_operand (op
, mode
)
1027 enum machine_mode mode ATTRIBUTE_UNUSED
;
1029 if (GET_CODE (op
) != REG
)
1032 if (REGNO (op
) == COUNT_REGISTER_REGNUM
)
1035 if (REGNO (op
) > FIRST_PSEUDO_REGISTER
)
1041 /* Returns 1 if op is an altivec register. */
1043 altivec_register_operand (op
, mode
)
1045 enum machine_mode mode ATTRIBUTE_UNUSED
;
1048 return (register_operand (op
, mode
)
1049 && (GET_CODE (op
) != REG
1050 || REGNO (op
) > FIRST_PSEUDO_REGISTER
1051 || ALTIVEC_REGNO_P (REGNO (op
))));
1055 xer_operand (op
, mode
)
1057 enum machine_mode mode ATTRIBUTE_UNUSED
;
1059 if (GET_CODE (op
) != REG
)
1062 if (XER_REGNO_P (REGNO (op
)))
1068 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1069 by such constants completes more quickly. */
1072 s8bit_cint_operand (op
, mode
)
1074 enum machine_mode mode ATTRIBUTE_UNUSED
;
1076 return ( GET_CODE (op
) == CONST_INT
1077 && (INTVAL (op
) >= -128 && INTVAL (op
) <= 127));
1080 /* Return 1 if OP is a constant that can fit in a D field. */
1083 short_cint_operand (op
, mode
)
1085 enum machine_mode mode ATTRIBUTE_UNUSED
;
1087 return (GET_CODE (op
) == CONST_INT
1088 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I'));
1091 /* Similar for an unsigned D field. */
1094 u_short_cint_operand (op
, mode
)
1096 enum machine_mode mode ATTRIBUTE_UNUSED
;
1098 return (GET_CODE (op
) == CONST_INT
1099 && CONST_OK_FOR_LETTER_P (INTVAL (op
) & GET_MODE_MASK (mode
), 'K'));
1102 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1105 non_short_cint_operand (op
, mode
)
1107 enum machine_mode mode ATTRIBUTE_UNUSED
;
1109 return (GET_CODE (op
) == CONST_INT
1110 && (unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x8000) >= 0x10000);
1113 /* Returns 1 if OP is a CONST_INT that is a positive value
1114 and an exact power of 2. */
1117 exact_log2_cint_operand (op
, mode
)
1119 enum machine_mode mode ATTRIBUTE_UNUSED
;
1121 return (GET_CODE (op
) == CONST_INT
1123 && exact_log2 (INTVAL (op
)) >= 0);
1126 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1130 gpc_reg_operand (op
, mode
)
1132 enum machine_mode mode
;
1134 return (register_operand (op
, mode
)
1135 && (GET_CODE (op
) != REG
1136 || (REGNO (op
) >= ARG_POINTER_REGNUM
1137 && !XER_REGNO_P (REGNO (op
)))
1138 || REGNO (op
) < MQ_REGNO
));
1141 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1145 cc_reg_operand (op
, mode
)
1147 enum machine_mode mode
;
1149 return (register_operand (op
, mode
)
1150 && (GET_CODE (op
) != REG
1151 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
1152 || CR_REGNO_P (REGNO (op
))));
1155 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1156 CR field that isn't CR0. */
1159 cc_reg_not_cr0_operand (op
, mode
)
1161 enum machine_mode mode
;
1163 return (register_operand (op
, mode
)
1164 && (GET_CODE (op
) != REG
1165 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
1166 || CR_REGNO_NOT_CR0_P (REGNO (op
))));
1169 /* Returns 1 if OP is either a constant integer valid for a D-field or
1170 a non-special register. If a register, it must be in the proper
1171 mode unless MODE is VOIDmode. */
1174 reg_or_short_operand (op
, mode
)
1176 enum machine_mode mode
;
1178 return short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1181 /* Similar, except check if the negation of the constant would be
1182 valid for a D-field. */
1185 reg_or_neg_short_operand (op
, mode
)
1187 enum machine_mode mode
;
1189 if (GET_CODE (op
) == CONST_INT
)
1190 return CONST_OK_FOR_LETTER_P (INTVAL (op
), 'P');
1192 return gpc_reg_operand (op
, mode
);
1195 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1196 a non-special register. If a register, it must be in the proper
1197 mode unless MODE is VOIDmode. */
1200 reg_or_aligned_short_operand (op
, mode
)
1202 enum machine_mode mode
;
1204 if (gpc_reg_operand (op
, mode
))
1206 else if (short_cint_operand (op
, mode
) && !(INTVAL (op
) & 3))
1213 /* Return 1 if the operand is either a register or an integer whose
1214 high-order 16 bits are zero. */
1217 reg_or_u_short_operand (op
, mode
)
1219 enum machine_mode mode
;
1221 return u_short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1224 /* Return 1 is the operand is either a non-special register or ANY
1225 constant integer. */
1228 reg_or_cint_operand (op
, mode
)
1230 enum machine_mode mode
;
1232 return (GET_CODE (op
) == CONST_INT
|| gpc_reg_operand (op
, mode
));
1235 /* Return 1 is the operand is either a non-special register or ANY
1236 32-bit signed constant integer. */
1239 reg_or_arith_cint_operand (op
, mode
)
1241 enum machine_mode mode
;
1243 return (gpc_reg_operand (op
, mode
)
1244 || (GET_CODE (op
) == CONST_INT
1245 #if HOST_BITS_PER_WIDE_INT != 32
1246 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80000000)
1247 < (unsigned HOST_WIDE_INT
) 0x100000000ll
)
1252 /* Return 1 is the operand is either a non-special register or a 32-bit
1253 signed constant integer valid for 64-bit addition. */
1256 reg_or_add_cint64_operand (op
, mode
)
1258 enum machine_mode mode
;
1260 return (gpc_reg_operand (op
, mode
)
1261 || (GET_CODE (op
) == CONST_INT
1262 #if HOST_BITS_PER_WIDE_INT == 32
1263 && INTVAL (op
) < 0x7fff8000
1265 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80008000)
1271 /* Return 1 is the operand is either a non-special register or a 32-bit
1272 signed constant integer valid for 64-bit subtraction. */
1275 reg_or_sub_cint64_operand (op
, mode
)
1277 enum machine_mode mode
;
1279 return (gpc_reg_operand (op
, mode
)
1280 || (GET_CODE (op
) == CONST_INT
1281 #if HOST_BITS_PER_WIDE_INT == 32
1282 && (- INTVAL (op
)) < 0x7fff8000
1284 && ((unsigned HOST_WIDE_INT
) ((- INTVAL (op
)) + 0x80008000)
1290 /* Return 1 is the operand is either a non-special register or ANY
1291 32-bit unsigned constant integer. */
1294 reg_or_logical_cint_operand (op
, mode
)
1296 enum machine_mode mode
;
1298 if (GET_CODE (op
) == CONST_INT
)
1300 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
)
1302 if (GET_MODE_BITSIZE (mode
) <= 32)
1305 if (INTVAL (op
) < 0)
1309 return ((INTVAL (op
) & GET_MODE_MASK (mode
)
1310 & (~ (unsigned HOST_WIDE_INT
) 0xffffffff)) == 0);
1312 else if (GET_CODE (op
) == CONST_DOUBLE
)
1314 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
1318 return CONST_DOUBLE_HIGH (op
) == 0;
1321 return gpc_reg_operand (op
, mode
);
1324 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1327 got_operand (op
, mode
)
1329 enum machine_mode mode ATTRIBUTE_UNUSED
;
1331 return (GET_CODE (op
) == SYMBOL_REF
1332 || GET_CODE (op
) == CONST
1333 || GET_CODE (op
) == LABEL_REF
);
1336 /* Return 1 if the operand is a simple references that can be loaded via
1337 the GOT (labels involving addition aren't allowed). */
1340 got_no_const_operand (op
, mode
)
1342 enum machine_mode mode ATTRIBUTE_UNUSED
;
1344 return (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
);
1347 /* Return the number of instructions it takes to form a constant in an
1348 integer register. */
1351 num_insns_constant_wide (value
)
1352 HOST_WIDE_INT value
;
1354 /* signed constant loadable with {cal|addi} */
1355 if (CONST_OK_FOR_LETTER_P (value
, 'I'))
1358 /* constant loadable with {cau|addis} */
1359 else if (CONST_OK_FOR_LETTER_P (value
, 'L'))
1362 #if HOST_BITS_PER_WIDE_INT == 64
1363 else if (TARGET_POWERPC64
)
1365 HOST_WIDE_INT low
= ((value
& 0xffffffff) ^ 0x80000000) - 0x80000000;
1366 HOST_WIDE_INT high
= value
>> 31;
1368 if (high
== 0 || high
== -1)
1374 return num_insns_constant_wide (high
) + 1;
1376 return (num_insns_constant_wide (high
)
1377 + num_insns_constant_wide (low
) + 1);
1386 num_insns_constant (op
, mode
)
1388 enum machine_mode mode
;
1390 if (GET_CODE (op
) == CONST_INT
)
1392 #if HOST_BITS_PER_WIDE_INT == 64
1393 if ((INTVAL (op
) >> 31) != 0 && (INTVAL (op
) >> 31) != -1
1394 && mask64_operand (op
, mode
))
1398 return num_insns_constant_wide (INTVAL (op
));
1401 else if (GET_CODE (op
) == CONST_DOUBLE
&& mode
== SFmode
)
1406 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1407 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1408 return num_insns_constant_wide ((HOST_WIDE_INT
) l
);
1411 else if (GET_CODE (op
) == CONST_DOUBLE
)
1417 int endian
= (WORDS_BIG_ENDIAN
== 0);
1419 if (mode
== VOIDmode
|| mode
== DImode
)
1421 high
= CONST_DOUBLE_HIGH (op
);
1422 low
= CONST_DOUBLE_LOW (op
);
1426 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1427 REAL_VALUE_TO_TARGET_DOUBLE (rv
, l
);
1429 low
= l
[1 - endian
];
1433 return (num_insns_constant_wide (low
)
1434 + num_insns_constant_wide (high
));
1438 if (high
== 0 && low
>= 0)
1439 return num_insns_constant_wide (low
);
1441 else if (high
== -1 && low
< 0)
1442 return num_insns_constant_wide (low
);
1444 else if (mask64_operand (op
, mode
))
1448 return num_insns_constant_wide (high
) + 1;
1451 return (num_insns_constant_wide (high
)
1452 + num_insns_constant_wide (low
) + 1);
1460 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1461 register with one instruction per word. We only do this if we can
1462 safely read CONST_DOUBLE_{LOW,HIGH}. */
1465 easy_fp_constant (op
, mode
)
1467 enum machine_mode mode
;
1469 if (GET_CODE (op
) != CONST_DOUBLE
1470 || GET_MODE (op
) != mode
1471 || (GET_MODE_CLASS (mode
) != MODE_FLOAT
&& mode
!= DImode
))
1474 /* Consider all constants with -msoft-float to be easy. */
1475 if ((TARGET_SOFT_FLOAT
|| !TARGET_FPRS
)
1479 /* If we are using V.4 style PIC, consider all constants to be hard. */
1480 if (flag_pic
&& DEFAULT_ABI
== ABI_V4
)
1483 #ifdef TARGET_RELOCATABLE
1484 /* Similarly if we are using -mrelocatable, consider all constants
1486 if (TARGET_RELOCATABLE
)
1495 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1496 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
1498 return (num_insns_constant_wide ((HOST_WIDE_INT
) k
[0]) == 1
1499 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[1]) == 1
1500 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[2]) == 1
1501 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[3]) == 1);
1504 else if (mode
== DFmode
)
1509 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1510 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
1512 return (num_insns_constant_wide ((HOST_WIDE_INT
) k
[0]) == 1
1513 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[1]) == 1);
1516 else if (mode
== SFmode
)
1521 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1522 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1524 return num_insns_constant_wide (l
) == 1;
1527 else if (mode
== DImode
)
1528 return ((TARGET_POWERPC64
1529 && GET_CODE (op
) == CONST_DOUBLE
&& CONST_DOUBLE_LOW (op
) == 0)
1530 || (num_insns_constant (op
, DImode
) <= 2));
1532 else if (mode
== SImode
)
1538 /* Return nonzero if all elements of a vector have the same value. */
1541 easy_vector_same (op
, mode
)
1543 enum machine_mode mode ATTRIBUTE_UNUSED
;
1547 units
= CONST_VECTOR_NUNITS (op
);
1549 cst
= INTVAL (CONST_VECTOR_ELT (op
, 0));
1550 for (i
= 1; i
< units
; ++i
)
1551 if (INTVAL (CONST_VECTOR_ELT (op
, i
)) != cst
)
1558 /* Return 1 if the operand is a CONST_INT and can be put into a
1559 register without using memory. */
1562 easy_vector_constant (op
, mode
)
1564 enum machine_mode mode
;
1568 if (GET_CODE (op
) != CONST_VECTOR
1573 if (zero_constant (op
, mode
)
1574 && ((TARGET_ALTIVEC
&& ALTIVEC_VECTOR_MODE (mode
))
1575 || (TARGET_SPE
&& SPE_VECTOR_MODE (mode
))))
1578 if (GET_MODE_CLASS (mode
) != MODE_VECTOR_INT
)
1581 if (TARGET_SPE
&& mode
== V1DImode
)
1584 cst
= INTVAL (CONST_VECTOR_ELT (op
, 0));
1585 cst2
= INTVAL (CONST_VECTOR_ELT (op
, 1));
1587 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1589 evmergelo r0, r0, r0
1592 I don't know how efficient it would be to allow bigger constants,
1593 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1594 instructions is better than a 64-bit memory load, but I don't
1595 have the e500 timing specs. */
1596 if (TARGET_SPE
&& mode
== V2SImode
1597 && cst
>= -0x7fff && cst
<= 0x7fff
1598 && cst2
>= -0x7fff && cst2
<= 0x7fff)
1601 if (TARGET_ALTIVEC
&& EASY_VECTOR_15 (cst
, op
, mode
))
1604 if (TARGET_ALTIVEC
&& EASY_VECTOR_15_ADD_SELF (cst
, op
, mode
))
1610 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1613 easy_vector_constant_add_self (op
, mode
)
1615 enum machine_mode mode
;
1619 if (!easy_vector_constant (op
, mode
))
1622 cst
= INTVAL (CONST_VECTOR_ELT (op
, 0));
1624 return TARGET_ALTIVEC
&& EASY_VECTOR_15_ADD_SELF (cst
, op
, mode
);
1628 output_vec_const_move (operands
)
1632 enum machine_mode mode
;
1638 cst
= INTVAL (CONST_VECTOR_ELT (vec
, 0));
1639 cst2
= INTVAL (CONST_VECTOR_ELT (vec
, 1));
1640 mode
= GET_MODE (dest
);
1644 if (zero_constant (vec
, mode
))
1645 return "vxor %0,%0,%0";
1646 else if (EASY_VECTOR_15 (cst
, vec
, mode
))
1648 operands
[1] = GEN_INT (cst
);
1652 return "vspltisw %0,%1";
1654 return "vspltish %0,%1";
1656 return "vspltisb %0,%1";
1661 else if (EASY_VECTOR_15_ADD_SELF (cst
, vec
, mode
))
1669 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1670 pattern of V1DI, V4HI, and V2SF.
1672 FIXME: We should probabl return # and add post reload
1673 splitters for these, but this way is so easy ;-).
1675 operands
[1] = GEN_INT (cst
);
1676 operands
[2] = GEN_INT (cst2
);
1678 return "li %0,%1\n\tevmergelo %0,%0,%0";
1680 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1686 /* Return 1 if the operand is the constant 0. This works for scalars
1687 as well as vectors. */
1689 zero_constant (op
, mode
)
1691 enum machine_mode mode
;
1693 return op
== CONST0_RTX (mode
);
1696 /* Return 1 if the operand is 0.0. */
1698 zero_fp_constant (op
, mode
)
1700 enum machine_mode mode
;
1702 return GET_MODE_CLASS (mode
) == MODE_FLOAT
&& op
== CONST0_RTX (mode
);
1705 /* Return 1 if the operand is in volatile memory. Note that during
1706 the RTL generation phase, memory_operand does not return TRUE for
1707 volatile memory references. So this function allows us to
1708 recognize volatile references where its safe. */
1711 volatile_mem_operand (op
, mode
)
1713 enum machine_mode mode
;
1715 if (GET_CODE (op
) != MEM
)
1718 if (!MEM_VOLATILE_P (op
))
1721 if (mode
!= GET_MODE (op
))
1724 if (reload_completed
)
1725 return memory_operand (op
, mode
);
1727 if (reload_in_progress
)
1728 return strict_memory_address_p (mode
, XEXP (op
, 0));
1730 return memory_address_p (mode
, XEXP (op
, 0));
1733 /* Return 1 if the operand is an offsettable memory operand. */
1736 offsettable_mem_operand (op
, mode
)
1738 enum machine_mode mode
;
1740 return ((GET_CODE (op
) == MEM
)
1741 && offsettable_address_p (reload_completed
|| reload_in_progress
,
1742 mode
, XEXP (op
, 0)));
1745 /* Return 1 if the operand is either an easy FP constant (see above) or
1749 mem_or_easy_const_operand (op
, mode
)
1751 enum machine_mode mode
;
1753 return memory_operand (op
, mode
) || easy_fp_constant (op
, mode
);
1756 /* Return 1 if the operand is either a non-special register or an item
1757 that can be used as the operand of a `mode' add insn. */
1760 add_operand (op
, mode
)
1762 enum machine_mode mode
;
1764 if (GET_CODE (op
) == CONST_INT
)
1765 return (CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1766 || CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1768 return gpc_reg_operand (op
, mode
);
1771 /* Return 1 if OP is a constant but not a valid add_operand. */
1774 non_add_cint_operand (op
, mode
)
1776 enum machine_mode mode ATTRIBUTE_UNUSED
;
1778 return (GET_CODE (op
) == CONST_INT
1779 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1780 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1783 /* Return 1 if the operand is a non-special register or a constant that
1784 can be used as the operand of an OR or XOR insn on the RS/6000. */
1787 logical_operand (op
, mode
)
1789 enum machine_mode mode
;
1791 HOST_WIDE_INT opl
, oph
;
1793 if (gpc_reg_operand (op
, mode
))
1796 if (GET_CODE (op
) == CONST_INT
)
1798 opl
= INTVAL (op
) & GET_MODE_MASK (mode
);
1800 #if HOST_BITS_PER_WIDE_INT <= 32
1801 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
&& opl
< 0)
1805 else if (GET_CODE (op
) == CONST_DOUBLE
)
1807 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1810 opl
= CONST_DOUBLE_LOW (op
);
1811 oph
= CONST_DOUBLE_HIGH (op
);
1818 return ((opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff) == 0
1819 || (opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff0000) == 0);
1822 /* Return 1 if C is a constant that is not a logical operand (as
1823 above), but could be split into one. */
1826 non_logical_cint_operand (op
, mode
)
1828 enum machine_mode mode
;
1830 return ((GET_CODE (op
) == CONST_INT
|| GET_CODE (op
) == CONST_DOUBLE
)
1831 && ! logical_operand (op
, mode
)
1832 && reg_or_logical_cint_operand (op
, mode
));
1835 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1836 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1837 Reject all ones and all zeros, since these should have been optimized
1838 away and confuse the making of MB and ME. */
1841 mask_operand (op
, mode
)
1843 enum machine_mode mode ATTRIBUTE_UNUSED
;
1845 HOST_WIDE_INT c
, lsb
;
1847 if (GET_CODE (op
) != CONST_INT
)
1852 /* Fail in 64-bit mode if the mask wraps around because the upper
1853 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1854 if (TARGET_POWERPC64
&& (c
& 0x80000001) == 0x80000001)
1857 /* We don't change the number of transitions by inverting,
1858 so make sure we start with the LS bit zero. */
1862 /* Reject all zeros or all ones. */
1866 /* Find the first transition. */
1869 /* Invert to look for a second transition. */
1872 /* Erase first transition. */
1875 /* Find the second transition (if any). */
1878 /* Match if all the bits above are 1's (or c is zero). */
1882 /* Return 1 for the PowerPC64 rlwinm corner case. */
1885 mask_operand_wrap (op
, mode
)
1887 enum machine_mode mode ATTRIBUTE_UNUSED
;
1889 HOST_WIDE_INT c
, lsb
;
1891 if (GET_CODE (op
) != CONST_INT
)
1896 if ((c
& 0x80000001) != 0x80000001)
1910 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1911 It is if there are no more than one 1->0 or 0->1 transitions.
1912 Reject all zeros, since zero should have been optimized away and
1913 confuses the making of MB and ME. */
1916 mask64_operand (op
, mode
)
1918 enum machine_mode mode ATTRIBUTE_UNUSED
;
1920 if (GET_CODE (op
) == CONST_INT
)
1922 HOST_WIDE_INT c
, lsb
;
1926 /* Reject all zeros. */
1930 /* We don't change the number of transitions by inverting,
1931 so make sure we start with the LS bit zero. */
1935 /* Find the transition, and check that all bits above are 1's. */
1938 /* Match if all the bits above are 1's (or c is zero). */
1944 /* Like mask64_operand, but allow up to three transitions. This
1945 predicate is used by insn patterns that generate two rldicl or
1946 rldicr machine insns. */
1949 mask64_2_operand (op
, mode
)
1951 enum machine_mode mode ATTRIBUTE_UNUSED
;
1953 if (GET_CODE (op
) == CONST_INT
)
1955 HOST_WIDE_INT c
, lsb
;
1959 /* Disallow all zeros. */
1963 /* We don't change the number of transitions by inverting,
1964 so make sure we start with the LS bit zero. */
1968 /* Find the first transition. */
1971 /* Invert to look for a second transition. */
1974 /* Erase first transition. */
1977 /* Find the second transition. */
1980 /* Invert to look for a third transition. */
1983 /* Erase second transition. */
1986 /* Find the third transition (if any). */
1989 /* Match if all the bits above are 1's (or c is zero). */
1995 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1996 implement ANDing by the mask IN. */
1998 build_mask64_2_operands (in
, out
)
2002 #if HOST_BITS_PER_WIDE_INT >= 64
2003 unsigned HOST_WIDE_INT c
, lsb
, m1
, m2
;
2006 if (GET_CODE (in
) != CONST_INT
)
2012 /* Assume c initially something like 0x00fff000000fffff. The idea
2013 is to rotate the word so that the middle ^^^^^^ group of zeros
2014 is at the MS end and can be cleared with an rldicl mask. We then
2015 rotate back and clear off the MS ^^ group of zeros with a
2017 c
= ~c
; /* c == 0xff000ffffff00000 */
2018 lsb
= c
& -c
; /* lsb == 0x0000000000100000 */
2019 m1
= -lsb
; /* m1 == 0xfffffffffff00000 */
2020 c
= ~c
; /* c == 0x00fff000000fffff */
2021 c
&= -lsb
; /* c == 0x00fff00000000000 */
2022 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
2023 c
= ~c
; /* c == 0xff000fffffffffff */
2024 c
&= -lsb
; /* c == 0xff00000000000000 */
2026 while ((lsb
>>= 1) != 0)
2027 shift
++; /* shift == 44 on exit from loop */
2028 m1
<<= 64 - shift
; /* m1 == 0xffffff0000000000 */
2029 m1
= ~m1
; /* m1 == 0x000000ffffffffff */
2030 m2
= ~c
; /* m2 == 0x00ffffffffffffff */
2034 /* Assume c initially something like 0xff000f0000000000. The idea
2035 is to rotate the word so that the ^^^ middle group of zeros
2036 is at the LS end and can be cleared with an rldicr mask. We then
2037 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2039 lsb
= c
& -c
; /* lsb == 0x0000010000000000 */
2040 m2
= -lsb
; /* m2 == 0xffffff0000000000 */
2041 c
= ~c
; /* c == 0x00fff0ffffffffff */
2042 c
&= -lsb
; /* c == 0x00fff00000000000 */
2043 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
2044 c
= ~c
; /* c == 0xff000fffffffffff */
2045 c
&= -lsb
; /* c == 0xff00000000000000 */
2047 while ((lsb
>>= 1) != 0)
2048 shift
++; /* shift == 44 on exit from loop */
2049 m1
= ~c
; /* m1 == 0x00ffffffffffffff */
2050 m1
>>= shift
; /* m1 == 0x0000000000000fff */
2051 m1
= ~m1
; /* m1 == 0xfffffffffffff000 */
2054 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2055 masks will be all 1's. We are guaranteed more than one transition. */
2056 out
[0] = GEN_INT (64 - shift
);
2057 out
[1] = GEN_INT (m1
);
2058 out
[2] = GEN_INT (shift
);
2059 out
[3] = GEN_INT (m2
);
2067 /* Return 1 if the operand is either a non-special register or a constant
2068 that can be used as the operand of a PowerPC64 logical AND insn. */
2071 and64_operand (op
, mode
)
2073 enum machine_mode mode
;
2075 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
2076 return (gpc_reg_operand (op
, mode
) || mask64_operand (op
, mode
));
2078 return (logical_operand (op
, mode
) || mask64_operand (op
, mode
));
2081 /* Like the above, but also match constants that can be implemented
2082 with two rldicl or rldicr insns. */
2085 and64_2_operand (op
, mode
)
2087 enum machine_mode mode
;
2089 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
2090 return gpc_reg_operand (op
, mode
) || mask64_2_operand (op
, mode
);
2092 return logical_operand (op
, mode
) || mask64_2_operand (op
, mode
);
2095 /* Return 1 if the operand is either a non-special register or a
2096 constant that can be used as the operand of an RS/6000 logical AND insn. */
2099 and_operand (op
, mode
)
2101 enum machine_mode mode
;
2103 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
2104 return (gpc_reg_operand (op
, mode
) || mask_operand (op
, mode
));
2106 return (logical_operand (op
, mode
) || mask_operand (op
, mode
));
2109 /* Return 1 if the operand is a general register or memory operand. */
2112 reg_or_mem_operand (op
, mode
)
2114 enum machine_mode mode
;
2116 return (gpc_reg_operand (op
, mode
)
2117 || memory_operand (op
, mode
)
2118 || volatile_mem_operand (op
, mode
));
2121 /* Return 1 if the operand is a general register or memory operand without
2122 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2126 lwa_operand (op
, mode
)
2128 enum machine_mode mode
;
2132 if (reload_completed
&& GET_CODE (inner
) == SUBREG
)
2133 inner
= SUBREG_REG (inner
);
2135 return gpc_reg_operand (inner
, mode
)
2136 || (memory_operand (inner
, mode
)
2137 && GET_CODE (XEXP (inner
, 0)) != PRE_INC
2138 && GET_CODE (XEXP (inner
, 0)) != PRE_DEC
2139 && (GET_CODE (XEXP (inner
, 0)) != PLUS
2140 || GET_CODE (XEXP (XEXP (inner
, 0), 1)) != CONST_INT
2141 || INTVAL (XEXP (XEXP (inner
, 0), 1)) % 4 == 0));
2144 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2147 symbol_ref_operand (op
, mode
)
2149 enum machine_mode mode
;
2151 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
2154 return (GET_CODE (op
) == SYMBOL_REF
2155 && (DEFAULT_ABI
!= ABI_AIX
|| SYMBOL_REF_FUNCTION_P (op
)));
2158 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2159 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2162 call_operand (op
, mode
)
2164 enum machine_mode mode
;
2166 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
2169 return (GET_CODE (op
) == SYMBOL_REF
2170 || (GET_CODE (op
) == REG
2171 && (REGNO (op
) == LINK_REGISTER_REGNUM
2172 || REGNO (op
) == COUNT_REGISTER_REGNUM
2173 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
)));
2176 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2180 current_file_function_operand (op
, mode
)
2182 enum machine_mode mode ATTRIBUTE_UNUSED
;
2184 return (GET_CODE (op
) == SYMBOL_REF
2185 && (DEFAULT_ABI
!= ABI_AIX
|| SYMBOL_REF_FUNCTION_P (op
))
2186 && (SYMBOL_REF_LOCAL_P (op
)
2187 || (op
== XEXP (DECL_RTL (current_function_decl
), 0))));
2190 /* Return 1 if this operand is a valid input for a move insn. */
2193 input_operand (op
, mode
)
2195 enum machine_mode mode
;
2197 /* Memory is always valid. */
2198 if (memory_operand (op
, mode
))
2201 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2202 if (GET_CODE (op
) == CONSTANT_P_RTX
)
2205 /* For floating-point, easy constants are valid. */
2206 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
2208 && easy_fp_constant (op
, mode
))
2211 /* Allow any integer constant. */
2212 if (GET_MODE_CLASS (mode
) == MODE_INT
2213 && (GET_CODE (op
) == CONST_INT
2214 || GET_CODE (op
) == CONST_DOUBLE
))
2217 /* Allow easy vector constants. */
2218 if (GET_CODE (op
) == CONST_VECTOR
2219 && easy_vector_constant (op
, mode
))
2222 /* For floating-point or multi-word mode, the only remaining valid type
2224 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
2225 || GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2226 return register_operand (op
, mode
);
2228 /* The only cases left are integral modes one word or smaller (we
2229 do not get called for MODE_CC values). These can be in any
2231 if (register_operand (op
, mode
))
2234 /* A SYMBOL_REF referring to the TOC is valid. */
2235 if (legitimate_constant_pool_address_p (op
))
2238 /* A constant pool expression (relative to the TOC) is valid */
2239 if (toc_relative_expr_p (op
))
2242 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2244 if (DEFAULT_ABI
== ABI_V4
2245 && (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == CONST
)
2246 && small_data_operand (op
, Pmode
))
2252 /* Return 1 for an operand in small memory on V.4/eabi. */
2255 small_data_operand (op
, mode
)
2256 rtx op ATTRIBUTE_UNUSED
;
2257 enum machine_mode mode ATTRIBUTE_UNUSED
;
2262 if (rs6000_sdata
== SDATA_NONE
|| rs6000_sdata
== SDATA_DATA
)
2265 if (DEFAULT_ABI
!= ABI_V4
)
2268 if (GET_CODE (op
) == SYMBOL_REF
)
2271 else if (GET_CODE (op
) != CONST
2272 || GET_CODE (XEXP (op
, 0)) != PLUS
2273 || GET_CODE (XEXP (XEXP (op
, 0), 0)) != SYMBOL_REF
2274 || GET_CODE (XEXP (XEXP (op
, 0), 1)) != CONST_INT
)
2279 rtx sum
= XEXP (op
, 0);
2280 HOST_WIDE_INT summand
;
2282 /* We have to be careful here, because it is the referenced address
2283 that must be 32k from _SDA_BASE_, not just the symbol. */
2284 summand
= INTVAL (XEXP (sum
, 1));
2285 if (summand
< 0 || (unsigned HOST_WIDE_INT
) summand
> g_switch_value
)
2288 sym_ref
= XEXP (sum
, 0);
2291 return SYMBOL_REF_SMALL_P (sym_ref
);
2297 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2300 constant_pool_expr_1 (op
, have_sym
, have_toc
)
2305 switch (GET_CODE(op
))
2308 if (RS6000_SYMBOL_REF_TLS_P (op
))
2310 else if (CONSTANT_POOL_ADDRESS_P (op
))
2312 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op
), Pmode
))
2320 else if (! strcmp (XSTR (op
, 0), toc_label_name
))
2329 return (constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
)
2330 && constant_pool_expr_1 (XEXP (op
, 1), have_sym
, have_toc
));
2332 return constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
);
2341 constant_pool_expr_p (op
)
2346 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_sym
;
2350 toc_relative_expr_p (op
)
2355 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_toc
;
2358 /* SPE offset addressing is limited to 5-bits worth of double words. */
2359 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2362 legitimate_constant_pool_address_p (x
)
2366 && GET_CODE (x
) == PLUS
2367 && GET_CODE (XEXP (x
, 0)) == REG
2368 && (TARGET_MINIMAL_TOC
|| REGNO (XEXP (x
, 0)) == TOC_REGISTER
)
2369 && constant_pool_expr_p (XEXP (x
, 1)));
2373 legitimate_small_data_p (mode
, x
)
2374 enum machine_mode mode
;
2377 return (DEFAULT_ABI
== ABI_V4
2378 && !flag_pic
&& !TARGET_TOC
2379 && (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
)
2380 && small_data_operand (x
, mode
));
2384 legitimate_offset_address_p (mode
, x
, strict
)
2385 enum machine_mode mode
;
2389 unsigned HOST_WIDE_INT offset
, extra
;
2391 if (GET_CODE (x
) != PLUS
)
2393 if (GET_CODE (XEXP (x
, 0)) != REG
)
2395 if (!INT_REG_OK_FOR_BASE_P (XEXP (x
, 0), strict
))
2397 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
2400 offset
= INTVAL (XEXP (x
, 1));
2408 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2409 which leaves the only valid constant offset of zero, which by
2410 canonicalization rules is also invalid. */
2417 /* SPE vector modes. */
2418 return SPE_CONST_OFFSET_OK (offset
);
2424 else if (offset
& 3)
2432 else if (offset
& 3)
2442 return (offset
+ extra
>= offset
) && (offset
+ extra
+ 0x8000 < 0x10000);
2446 legitimate_indexed_address_p (x
, strict
)
2452 if (GET_CODE (x
) != PLUS
)
2457 if (!REG_P (op0
) || !REG_P (op1
))
2460 return ((INT_REG_OK_FOR_BASE_P (op0
, strict
)
2461 && INT_REG_OK_FOR_INDEX_P (op1
, strict
))
2462 || (INT_REG_OK_FOR_BASE_P (op1
, strict
)
2463 && INT_REG_OK_FOR_INDEX_P (op0
, strict
)));
2467 legitimate_indirect_address_p (x
, strict
)
2471 return GET_CODE (x
) == REG
&& INT_REG_OK_FOR_BASE_P (x
, strict
);
2475 legitimate_lo_sum_address_p (mode
, x
, strict
)
2476 enum machine_mode mode
;
2480 if (GET_CODE (x
) != LO_SUM
)
2482 if (GET_CODE (XEXP (x
, 0)) != REG
)
2484 if (!INT_REG_OK_FOR_BASE_P (XEXP (x
, 0), strict
))
2490 if (DEFAULT_ABI
!= ABI_AIX
&& flag_pic
)
2494 if (GET_MODE_NUNITS (mode
) != 1)
2496 if (GET_MODE_BITSIZE (mode
) > 32
2497 && !(TARGET_HARD_FLOAT
&& TARGET_FPRS
&& mode
== DFmode
))
2500 return CONSTANT_P (x
);
2507 /* Try machine-dependent ways of modifying an illegitimate address
2508 to be legitimate. If we find one, return the new, valid address.
2509 This is used from only one place: `memory_address' in explow.c.
2511 OLDX is the address as it was before break_out_memory_refs was
2512 called. In some cases it is useful to look at this to decide what
2515 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2517 It is always safe for this function to do nothing. It exists to
2518 recognize opportunities to optimize the output.
2520 On RS/6000, first check for the sum of a register with a constant
2521 integer that is out of range. If so, generate code to add the
2522 constant with the low-order 16 bits masked to the register and force
2523 this result into another register (this can be done with `cau').
2524 Then generate an address of REG+(CONST&0xffff), allowing for the
2525 possibility of bit 16 being a one.
2527 Then check for the sum of a register and something not constant, try to
2528 load the other things into a register and return the sum. */
2531 rs6000_legitimize_address (x
, oldx
, mode
)
2533 rtx oldx ATTRIBUTE_UNUSED
;
2534 enum machine_mode mode
;
2536 if (GET_CODE (x
) == SYMBOL_REF
)
2538 enum tls_model model
= SYMBOL_REF_TLS_MODEL (x
);
2540 return rs6000_legitimize_tls_address (x
, model
);
2543 if (GET_CODE (x
) == PLUS
2544 && GET_CODE (XEXP (x
, 0)) == REG
2545 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2546 && (unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1)) + 0x8000) >= 0x10000)
2548 HOST_WIDE_INT high_int
, low_int
;
2550 low_int
= ((INTVAL (XEXP (x
, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2551 high_int
= INTVAL (XEXP (x
, 1)) - low_int
;
2552 sum
= force_operand (gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2553 GEN_INT (high_int
)), 0);
2554 return gen_rtx_PLUS (Pmode
, sum
, GEN_INT (low_int
));
2556 else if (GET_CODE (x
) == PLUS
2557 && GET_CODE (XEXP (x
, 0)) == REG
2558 && GET_CODE (XEXP (x
, 1)) != CONST_INT
2559 && GET_MODE_NUNITS (mode
) == 1
2560 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
2562 || (mode
!= DFmode
&& mode
!= TFmode
))
2563 && (TARGET_POWERPC64
|| mode
!= DImode
)
2566 return gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2567 force_reg (Pmode
, force_operand (XEXP (x
, 1), 0)));
2569 else if (ALTIVEC_VECTOR_MODE (mode
))
2573 /* Make sure both operands are registers. */
2574 if (GET_CODE (x
) == PLUS
)
2575 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, XEXP (x
, 0)),
2576 force_reg (Pmode
, XEXP (x
, 1)));
2578 reg
= force_reg (Pmode
, x
);
2581 else if (SPE_VECTOR_MODE (mode
))
2583 /* We accept [reg + reg] and [reg + OFFSET]. */
2585 if (GET_CODE (x
) == PLUS
)
2587 rtx op1
= XEXP (x
, 0);
2588 rtx op2
= XEXP (x
, 1);
2590 op1
= force_reg (Pmode
, op1
);
2592 if (GET_CODE (op2
) != REG
2593 && (GET_CODE (op2
) != CONST_INT
2594 || !SPE_CONST_OFFSET_OK (INTVAL (op2
))))
2595 op2
= force_reg (Pmode
, op2
);
2597 return gen_rtx_PLUS (Pmode
, op1
, op2
);
2600 return force_reg (Pmode
, x
);
2606 && GET_CODE (x
) != CONST_INT
2607 && GET_CODE (x
) != CONST_DOUBLE
2609 && GET_MODE_NUNITS (mode
) == 1
2610 && (GET_MODE_BITSIZE (mode
) <= 32
2611 || ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) && mode
== DFmode
)))
2613 rtx reg
= gen_reg_rtx (Pmode
);
2614 emit_insn (gen_elf_high (reg
, (x
)));
2615 return gen_rtx_LO_SUM (Pmode
, reg
, (x
));
2617 else if (TARGET_MACHO
&& TARGET_32BIT
&& TARGET_NO_TOC
2620 && ! MACHO_DYNAMIC_NO_PIC_P
2622 && GET_CODE (x
) != CONST_INT
2623 && GET_CODE (x
) != CONST_DOUBLE
2625 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) || mode
!= DFmode
)
2629 rtx reg
= gen_reg_rtx (Pmode
);
2630 emit_insn (gen_macho_high (reg
, (x
)));
2631 return gen_rtx_LO_SUM (Pmode
, reg
, (x
));
2634 && constant_pool_expr_p (x
)
2635 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), Pmode
))
2637 return create_TOC_reference (x
);
2643 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2645 static GTY(()) rtx rs6000_tls_symbol
;
2647 rs6000_tls_get_addr ()
2649 if (!rs6000_tls_symbol
)
2650 rs6000_tls_symbol
= init_one_libfunc ("__tls_get_addr");
2652 return rs6000_tls_symbol
;
2655 /* Construct the SYMBOL_REF for TLS GOT references. */
2657 static GTY(()) rtx rs6000_got_symbol
;
2661 if (!rs6000_got_symbol
)
2663 rs6000_got_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
2664 SYMBOL_REF_FLAGS (rs6000_got_symbol
) |= SYMBOL_FLAG_LOCAL
;
2665 SYMBOL_REF_FLAGS (rs6000_got_symbol
) |= SYMBOL_FLAG_EXTERNAL
;
2668 return rs6000_got_symbol
;
2671 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2672 this (thread-local) address. */
2675 rs6000_legitimize_tls_address (addr
, model
)
2677 enum tls_model model
;
2681 dest
= gen_reg_rtx (Pmode
);
2682 if (model
== TLS_MODEL_LOCAL_EXEC
&& rs6000_tls_size
== 16)
2688 tlsreg
= gen_rtx_REG (Pmode
, 13);
2689 insn
= gen_tls_tprel_64 (dest
, tlsreg
, addr
);
2693 tlsreg
= gen_rtx_REG (Pmode
, 2);
2694 insn
= gen_tls_tprel_32 (dest
, tlsreg
, addr
);
2698 else if (model
== TLS_MODEL_LOCAL_EXEC
&& rs6000_tls_size
== 32)
2702 tmp
= gen_reg_rtx (Pmode
);
2705 tlsreg
= gen_rtx_REG (Pmode
, 13);
2706 insn
= gen_tls_tprel_ha_64 (tmp
, tlsreg
, addr
);
2710 tlsreg
= gen_rtx_REG (Pmode
, 2);
2711 insn
= gen_tls_tprel_ha_32 (tmp
, tlsreg
, addr
);
2715 insn
= gen_tls_tprel_lo_64 (dest
, tmp
, addr
);
2717 insn
= gen_tls_tprel_lo_32 (dest
, tmp
, addr
);
2722 rtx r3
, got
, tga
, tmp1
, tmp2
, eqv
;
2725 got
= gen_rtx_REG (Pmode
, TOC_REGISTER
);
2729 got
= gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
);
2732 rtx gsym
= rs6000_got_sym ();
2733 got
= gen_reg_rtx (Pmode
);
2735 rs6000_emit_move (got
, gsym
, Pmode
);
2739 static int tls_got_labelno
= 0;
2740 rtx tempLR
, lab
, tmp3
, mem
;
2743 ASM_GENERATE_INTERNAL_LABEL (buf
, "LTLS", tls_got_labelno
++);
2744 lab
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
2745 tempLR
= gen_reg_rtx (Pmode
);
2746 tmp1
= gen_reg_rtx (Pmode
);
2747 tmp2
= gen_reg_rtx (Pmode
);
2748 tmp3
= gen_reg_rtx (Pmode
);
2749 mem
= gen_rtx_MEM (Pmode
, tmp1
);
2750 RTX_UNCHANGING_P (mem
) = 1;
2752 first
= emit_insn (gen_load_toc_v4_PIC_1b (tempLR
, lab
,
2754 emit_move_insn (tmp1
, tempLR
);
2755 emit_move_insn (tmp2
, mem
);
2756 emit_insn (gen_addsi3 (tmp3
, tmp1
, tmp2
));
2757 last
= emit_move_insn (got
, tmp3
);
2758 REG_NOTES (last
) = gen_rtx_EXPR_LIST (REG_EQUAL
, gsym
,
2760 REG_NOTES (first
) = gen_rtx_INSN_LIST (REG_LIBCALL
, last
,
2762 REG_NOTES (last
) = gen_rtx_INSN_LIST (REG_RETVAL
, first
,
2768 if (model
== TLS_MODEL_GLOBAL_DYNAMIC
)
2770 r3
= gen_rtx_REG (Pmode
, 3);
2772 insn
= gen_tls_gd_64 (r3
, got
, addr
);
2774 insn
= gen_tls_gd_32 (r3
, got
, addr
);
2777 tga
= gen_rtx_MEM (Pmode
, rs6000_tls_get_addr ());
2778 insn
= gen_call_value (r3
, tga
, const0_rtx
, const0_rtx
);
2779 insn
= emit_call_insn (insn
);
2780 CONST_OR_PURE_CALL_P (insn
) = 1;
2781 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), r3
);
2782 insn
= get_insns ();
2784 emit_libcall_block (insn
, dest
, r3
, addr
);
2786 else if (model
== TLS_MODEL_LOCAL_DYNAMIC
)
2788 r3
= gen_rtx_REG (Pmode
, 3);
2790 insn
= gen_tls_ld_64 (r3
, got
);
2792 insn
= gen_tls_ld_32 (r3
, got
);
2795 tga
= gen_rtx_MEM (Pmode
, rs6000_tls_get_addr ());
2796 insn
= gen_call_value (r3
, tga
, const0_rtx
, const0_rtx
);
2797 insn
= emit_call_insn (insn
);
2798 CONST_OR_PURE_CALL_P (insn
) = 1;
2799 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), r3
);
2800 insn
= get_insns ();
2802 tmp1
= gen_reg_rtx (Pmode
);
2803 eqv
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
),
2805 emit_libcall_block (insn
, tmp1
, r3
, eqv
);
2806 if (rs6000_tls_size
== 16)
2809 insn
= gen_tls_dtprel_64 (dest
, tmp1
, addr
);
2811 insn
= gen_tls_dtprel_32 (dest
, tmp1
, addr
);
2813 else if (rs6000_tls_size
== 32)
2815 tmp2
= gen_reg_rtx (Pmode
);
2817 insn
= gen_tls_dtprel_ha_64 (tmp2
, tmp1
, addr
);
2819 insn
= gen_tls_dtprel_ha_32 (tmp2
, tmp1
, addr
);
2822 insn
= gen_tls_dtprel_lo_64 (dest
, tmp2
, addr
);
2824 insn
= gen_tls_dtprel_lo_32 (dest
, tmp2
, addr
);
2828 tmp2
= gen_reg_rtx (Pmode
);
2830 insn
= gen_tls_got_dtprel_64 (tmp2
, got
, addr
);
2832 insn
= gen_tls_got_dtprel_32 (tmp2
, got
, addr
);
2834 insn
= gen_rtx_SET (Pmode
, dest
,
2835 gen_rtx_PLUS (Pmode
, tmp2
, tmp1
));
2841 /* IE, or 64 bit offset LE. */
2842 tmp2
= gen_reg_rtx (Pmode
);
2844 insn
= gen_tls_got_tprel_64 (tmp2
, got
, addr
);
2846 insn
= gen_tls_got_tprel_32 (tmp2
, got
, addr
);
2849 insn
= gen_tls_tls_64 (dest
, tmp2
, addr
);
2851 insn
= gen_tls_tls_32 (dest
, tmp2
, addr
);
2859 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
2860 instruction definitions. */
2863 rs6000_tls_symbol_ref (x
, mode
)
2865 enum machine_mode mode ATTRIBUTE_UNUSED
;
2867 return RS6000_SYMBOL_REF_TLS_P (x
);
2870 /* Return 1 if X contains a thread-local symbol. */
2873 rs6000_tls_referenced_p (x
)
2876 return for_each_rtx (&x
, &rs6000_tls_symbol_ref_1
, 0);
2879 /* Return 1 if *X is a thread-local symbol. This is the same as
2880 rs6000_tls_symbol_ref except for the type of the unused argument. */
2883 rs6000_tls_symbol_ref_1 (x
, data
)
2885 void *data ATTRIBUTE_UNUSED
;
2887 return RS6000_SYMBOL_REF_TLS_P (*x
);
2890 /* The convention appears to be to define this wherever it is used.
2891 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2892 is now used here. */
2893 #ifndef REG_MODE_OK_FOR_BASE_P
2894 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2897 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2898 replace the input X, or the original X if no replacement is called for.
2899 The output parameter *WIN is 1 if the calling macro should goto WIN,
2902 For RS/6000, we wish to handle large displacements off a base
2903 register by splitting the addend across an addiu/addis and the mem insn.
2904 This cuts number of extra insns needed from 3 to 1.
2906 On Darwin, we use this to generate code for floating point constants.
2907 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2908 The Darwin code is inside #if TARGET_MACHO because only then is
2909 machopic_function_base_name() defined. */
2911 rs6000_legitimize_reload_address (x
, mode
, opnum
, type
, ind_levels
, win
)
2913 enum machine_mode mode
;
2916 int ind_levels ATTRIBUTE_UNUSED
;
2919 /* We must recognize output that we have already generated ourselves. */
2920 if (GET_CODE (x
) == PLUS
2921 && GET_CODE (XEXP (x
, 0)) == PLUS
2922 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
2923 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2924 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2926 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2927 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
2928 opnum
, (enum reload_type
)type
);
2934 if (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
2935 && GET_CODE (x
) == LO_SUM
2936 && GET_CODE (XEXP (x
, 0)) == PLUS
2937 && XEXP (XEXP (x
, 0), 0) == pic_offset_table_rtx
2938 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == HIGH
2939 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 0)) == CONST
2940 && XEXP (XEXP (XEXP (x
, 0), 1), 0) == XEXP (x
, 1)
2941 && GET_CODE (XEXP (XEXP (x
, 1), 0)) == MINUS
2942 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 0)) == SYMBOL_REF
2943 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 1)) == SYMBOL_REF
)
2945 /* Result of previous invocation of this function on Darwin
2946 floating point constant. */
2947 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2948 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
2949 opnum
, (enum reload_type
)type
);
2954 if (GET_CODE (x
) == PLUS
2955 && GET_CODE (XEXP (x
, 0)) == REG
2956 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2957 && REG_MODE_OK_FOR_BASE_P (XEXP (x
, 0), mode
)
2958 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2959 && !SPE_VECTOR_MODE (mode
)
2960 && !ALTIVEC_VECTOR_MODE (mode
))
2962 HOST_WIDE_INT val
= INTVAL (XEXP (x
, 1));
2963 HOST_WIDE_INT low
= ((val
& 0xffff) ^ 0x8000) - 0x8000;
2965 = (((val
- low
) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2967 /* Check for 32-bit overflow. */
2968 if (high
+ low
!= val
)
2974 /* Reload the high part into a base reg; leave the low part
2975 in the mem directly. */
2977 x
= gen_rtx_PLUS (GET_MODE (x
),
2978 gen_rtx_PLUS (GET_MODE (x
), XEXP (x
, 0),
2982 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2983 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
2984 opnum
, (enum reload_type
)type
);
2989 if (GET_CODE (x
) == SYMBOL_REF
2990 && DEFAULT_ABI
== ABI_DARWIN
2991 && !ALTIVEC_VECTOR_MODE (mode
)
2994 /* Darwin load of floating point constant. */
2995 rtx offset
= gen_rtx (CONST
, Pmode
,
2996 gen_rtx (MINUS
, Pmode
, x
,
2997 gen_rtx (SYMBOL_REF
, Pmode
,
2998 machopic_function_base_name ())));
2999 x
= gen_rtx (LO_SUM
, GET_MODE (x
),
3000 gen_rtx (PLUS
, Pmode
, pic_offset_table_rtx
,
3001 gen_rtx (HIGH
, Pmode
, offset
)), offset
);
3002 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3003 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
3004 opnum
, (enum reload_type
)type
);
3008 if (GET_CODE (x
) == SYMBOL_REF
3009 && DEFAULT_ABI
== ABI_DARWIN
3010 && !ALTIVEC_VECTOR_MODE (mode
)
3011 && MACHO_DYNAMIC_NO_PIC_P
)
3013 /* Darwin load of floating point constant. */
3014 x
= gen_rtx (LO_SUM
, GET_MODE (x
),
3015 gen_rtx (HIGH
, Pmode
, x
), x
);
3016 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3017 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
3018 opnum
, (enum reload_type
)type
);
3024 && constant_pool_expr_p (x
)
3025 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), mode
))
3027 (x
) = create_TOC_reference (x
);
3035 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3036 that is a valid memory address for an instruction.
3037 The MODE argument is the machine mode for the MEM expression
3038 that wants to use this address.
3040 On the RS/6000, there are four valid address: a SYMBOL_REF that
3041 refers to a constant pool entry of an address (or the sum of it
3042 plus a constant), a short (16-bit signed) constant plus a register,
3043 the sum of two registers, or a register indirect, possibly with an
3044 auto-increment. For DFmode and DImode with a constant plus register,
3045 we must ensure that both words are addressable or PowerPC64 with offset
3048 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3049 32-bit DImode, TImode), indexed addressing cannot be used because
3050 adjacent memory cells are accessed by adding word-sized offsets
3051 during assembly output. */
3053 rs6000_legitimate_address (mode
, x
, reg_ok_strict
)
3054 enum machine_mode mode
;
3058 if (RS6000_SYMBOL_REF_TLS_P (x
))
3060 if (legitimate_indirect_address_p (x
, reg_ok_strict
))
3062 if ((GET_CODE (x
) == PRE_INC
|| GET_CODE (x
) == PRE_DEC
)
3063 && !ALTIVEC_VECTOR_MODE (mode
)
3064 && !SPE_VECTOR_MODE (mode
)
3066 && legitimate_indirect_address_p (XEXP (x
, 0), reg_ok_strict
))
3068 if (legitimate_small_data_p (mode
, x
))
3070 if (legitimate_constant_pool_address_p (x
))
3072 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3074 && GET_CODE (x
) == PLUS
3075 && GET_CODE (XEXP (x
, 0)) == REG
3076 && XEXP (x
, 0) == virtual_stack_vars_rtx
3077 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3079 if (legitimate_offset_address_p (mode
, x
, reg_ok_strict
))
3082 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3084 || (mode
!= DFmode
&& mode
!= TFmode
))
3085 && (TARGET_POWERPC64
|| mode
!= DImode
)
3086 && legitimate_indexed_address_p (x
, reg_ok_strict
))
3088 if (legitimate_lo_sum_address_p (mode
, x
, reg_ok_strict
))
3093 /* Go to LABEL if ADDR (a legitimate address expression)
3094 has an effect that depends on the machine mode it is used for.
3096 On the RS/6000 this is true of all integral offsets (since AltiVec
3097 modes don't allow them) or is a pre-increment or decrement.
3099 ??? Except that due to conceptual problems in offsettable_address_p
3100 we can't really report the problems of integral offsets. So leave
3101 this assuming that the adjustable offset must be valid for the
3102 sub-words of a TFmode operand, which is what we had before. */
3105 rs6000_mode_dependent_address (addr
)
3108 switch (GET_CODE (addr
))
3111 if (GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
3113 unsigned HOST_WIDE_INT val
= INTVAL (XEXP (addr
, 1));
3114 return val
+ 12 + 0x8000 >= 0x10000;
3123 return TARGET_UPDATE
;
3132 /* Try to output insns to set TARGET equal to the constant C if it can
3133 be done in less than N insns. Do all computations in MODE.
3134 Returns the place where the output has been placed if it can be
3135 done and the insns have been emitted. If it would take more than N
3136 insns, zero is returned and no insns and emitted. */
3139 rs6000_emit_set_const (dest
, mode
, source
, n
)
3141 enum machine_mode mode
;
3142 int n ATTRIBUTE_UNUSED
;
3144 rtx result
, insn
, set
;
3145 HOST_WIDE_INT c0
, c1
;
3147 if (mode
== QImode
|| mode
== HImode
)
3150 dest
= gen_reg_rtx (mode
);
3151 emit_insn (gen_rtx_SET (VOIDmode
, dest
, source
));
3154 else if (mode
== SImode
)
3156 result
= no_new_pseudos
? dest
: gen_reg_rtx (SImode
);
3158 emit_insn (gen_rtx_SET (VOIDmode
, result
,
3159 GEN_INT (INTVAL (source
)
3160 & (~ (HOST_WIDE_INT
) 0xffff))));
3161 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
3162 gen_rtx_IOR (SImode
, result
,
3163 GEN_INT (INTVAL (source
) & 0xffff))));
3166 else if (mode
== DImode
)
3168 if (GET_CODE (source
) == CONST_INT
)
3170 c0
= INTVAL (source
);
3173 else if (GET_CODE (source
) == CONST_DOUBLE
)
3175 #if HOST_BITS_PER_WIDE_INT >= 64
3176 c0
= CONST_DOUBLE_LOW (source
);
3179 c0
= CONST_DOUBLE_LOW (source
);
3180 c1
= CONST_DOUBLE_HIGH (source
);
3186 result
= rs6000_emit_set_long_const (dest
, c0
, c1
);
3191 insn
= get_last_insn ();
3192 set
= single_set (insn
);
3193 if (! CONSTANT_P (SET_SRC (set
)))
3194 set_unique_reg_note (insn
, REG_EQUAL
, source
);
3199 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3200 fall back to a straight forward decomposition. We do this to avoid
3201 exponential run times encountered when looking for longer sequences
3202 with rs6000_emit_set_const. */
3204 rs6000_emit_set_long_const (dest
, c1
, c2
)
3206 HOST_WIDE_INT c1
, c2
;
3208 if (!TARGET_POWERPC64
)
3210 rtx operand1
, operand2
;
3212 operand1
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
== 0,
3214 operand2
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
!= 0,
3216 emit_move_insn (operand1
, GEN_INT (c1
));
3217 emit_move_insn (operand2
, GEN_INT (c2
));
3221 HOST_WIDE_INT ud1
, ud2
, ud3
, ud4
;
3224 ud2
= (c1
& 0xffff0000) >> 16;
3225 #if HOST_BITS_PER_WIDE_INT >= 64
3229 ud4
= (c2
& 0xffff0000) >> 16;
3231 if ((ud4
== 0xffff && ud3
== 0xffff && ud2
== 0xffff && (ud1
& 0x8000))
3232 || (ud4
== 0 && ud3
== 0 && ud2
== 0 && ! (ud1
& 0x8000)))
3235 emit_move_insn (dest
, GEN_INT (((ud1
^ 0x8000) - 0x8000)));
3237 emit_move_insn (dest
, GEN_INT (ud1
));
3240 else if ((ud4
== 0xffff && ud3
== 0xffff && (ud2
& 0x8000))
3241 || (ud4
== 0 && ud3
== 0 && ! (ud2
& 0x8000)))
3244 emit_move_insn (dest
, GEN_INT (((ud2
<< 16) ^ 0x80000000)
3247 emit_move_insn (dest
, GEN_INT (ud2
<< 16));
3249 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
3251 else if ((ud4
== 0xffff && (ud3
& 0x8000))
3252 || (ud4
== 0 && ! (ud3
& 0x8000)))
3255 emit_move_insn (dest
, GEN_INT (((ud3
<< 16) ^ 0x80000000)
3258 emit_move_insn (dest
, GEN_INT (ud3
<< 16));
3261 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud2
)));
3262 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (16)));
3264 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
3269 emit_move_insn (dest
, GEN_INT (((ud4
<< 16) ^ 0x80000000)
3272 emit_move_insn (dest
, GEN_INT (ud4
<< 16));
3275 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud3
)));
3277 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (32)));
3279 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
,
3280 GEN_INT (ud2
<< 16)));
3282 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
3288 /* Emit a move from SOURCE to DEST in mode MODE. */
3290 rs6000_emit_move (dest
, source
, mode
)
3293 enum machine_mode mode
;
3297 operands
[1] = source
;
3299 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3300 if (GET_CODE (operands
[1]) == CONST_DOUBLE
3301 && ! FLOAT_MODE_P (mode
)
3302 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3304 /* FIXME. This should never happen. */
3305 /* Since it seems that it does, do the safe thing and convert
3307 operands
[1] = gen_int_mode (CONST_DOUBLE_LOW (operands
[1]), mode
);
3309 if (GET_CODE (operands
[1]) == CONST_DOUBLE
3310 && ! FLOAT_MODE_P (mode
)
3311 && ((CONST_DOUBLE_HIGH (operands
[1]) == 0
3312 && CONST_DOUBLE_LOW (operands
[1]) >= 0)
3313 || (CONST_DOUBLE_HIGH (operands
[1]) == -1
3314 && CONST_DOUBLE_LOW (operands
[1]) < 0)))
3317 /* Check if GCC is setting up a block move that will end up using FP
3318 registers as temporaries. We must make sure this is acceptable. */
3319 if (GET_CODE (operands
[0]) == MEM
3320 && GET_CODE (operands
[1]) == MEM
3322 && (SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[0]))
3323 || SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[1])))
3324 && ! (SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[0]) > 32
3325 ? 32 : MEM_ALIGN (operands
[0])))
3326 || SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[1]) > 32
3328 : MEM_ALIGN (operands
[1]))))
3329 && ! MEM_VOLATILE_P (operands
[0])
3330 && ! MEM_VOLATILE_P (operands
[1]))
3332 emit_move_insn (adjust_address (operands
[0], SImode
, 0),
3333 adjust_address (operands
[1], SImode
, 0));
3334 emit_move_insn (adjust_address (operands
[0], SImode
, 4),
3335 adjust_address (operands
[1], SImode
, 4));
3339 if (!no_new_pseudos
)
3341 if (GET_CODE (operands
[1]) == MEM
&& optimize
> 0
3342 && (mode
== QImode
|| mode
== HImode
|| mode
== SImode
)
3343 && GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
3345 rtx reg
= gen_reg_rtx (word_mode
);
3347 emit_insn (gen_rtx_SET (word_mode
, reg
,
3348 gen_rtx_ZERO_EXTEND (word_mode
,
3350 operands
[1] = gen_lowpart (mode
, reg
);
3352 if (GET_CODE (operands
[0]) != REG
)
3353 operands
[1] = force_reg (mode
, operands
[1]);
3356 if (mode
== SFmode
&& ! TARGET_POWERPC
3357 && TARGET_HARD_FLOAT
&& TARGET_FPRS
3358 && GET_CODE (operands
[0]) == MEM
)
3362 if (reload_in_progress
|| reload_completed
)
3363 regnum
= true_regnum (operands
[1]);
3364 else if (GET_CODE (operands
[1]) == REG
)
3365 regnum
= REGNO (operands
[1]);
3369 /* If operands[1] is a register, on POWER it may have
3370 double-precision data in it, so truncate it to single
3372 if (FP_REGNO_P (regnum
) || regnum
>= FIRST_PSEUDO_REGISTER
)
3375 newreg
= (no_new_pseudos
? operands
[1] : gen_reg_rtx (mode
));
3376 emit_insn (gen_aux_truncdfsf2 (newreg
, operands
[1]));
3377 operands
[1] = newreg
;
3381 /* Recognize the case where operand[1] is a reference to thread-local
3382 data and load its address to a register. */
3383 if (GET_CODE (operands
[1]) == SYMBOL_REF
)
3385 enum tls_model model
= SYMBOL_REF_TLS_MODEL (operands
[1]);
3387 operands
[1] = rs6000_legitimize_tls_address (operands
[1], model
);
3390 /* Handle the case where reload calls us with an invalid address. */
3391 if (reload_in_progress
&& mode
== Pmode
3392 && (! general_operand (operands
[1], mode
)
3393 || ! nonimmediate_operand (operands
[0], mode
)))
3396 /* Handle the case of CONSTANT_P_RTX. */
3397 if (GET_CODE (operands
[1]) == CONSTANT_P_RTX
)
3400 /* FIXME: In the long term, this switch statement should go away
3401 and be replaced by a sequence of tests based on things like
3407 if (CONSTANT_P (operands
[1])
3408 && GET_CODE (operands
[1]) != CONST_INT
)
3409 operands
[1] = force_const_mem (mode
, operands
[1]);
3415 if (CONSTANT_P (operands
[1])
3416 && ! easy_fp_constant (operands
[1], mode
))
3417 operands
[1] = force_const_mem (mode
, operands
[1]);
3428 if (CONSTANT_P (operands
[1])
3429 && !easy_vector_constant (operands
[1], mode
))
3430 operands
[1] = force_const_mem (mode
, operands
[1]);
3435 /* Use default pattern for address of ELF small data */
3438 && DEFAULT_ABI
== ABI_V4
3439 && (GET_CODE (operands
[1]) == SYMBOL_REF
3440 || GET_CODE (operands
[1]) == CONST
)
3441 && small_data_operand (operands
[1], mode
))
3443 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
3447 if (DEFAULT_ABI
== ABI_V4
3448 && mode
== Pmode
&& mode
== SImode
3449 && flag_pic
== 1 && got_operand (operands
[1], mode
))
3451 emit_insn (gen_movsi_got (operands
[0], operands
[1]));
3455 if ((TARGET_ELF
|| DEFAULT_ABI
== ABI_DARWIN
)
3459 && CONSTANT_P (operands
[1])
3460 && GET_CODE (operands
[1]) != HIGH
3461 && GET_CODE (operands
[1]) != CONST_INT
)
3463 rtx target
= (no_new_pseudos
? operands
[0] : gen_reg_rtx (mode
));
3465 /* If this is a function address on -mcall-aixdesc,
3466 convert it to the address of the descriptor. */
3467 if (DEFAULT_ABI
== ABI_AIX
3468 && GET_CODE (operands
[1]) == SYMBOL_REF
3469 && XSTR (operands
[1], 0)[0] == '.')
3471 const char *name
= XSTR (operands
[1], 0);
3473 while (*name
== '.')
3475 new_ref
= gen_rtx_SYMBOL_REF (Pmode
, name
);
3476 CONSTANT_POOL_ADDRESS_P (new_ref
)
3477 = CONSTANT_POOL_ADDRESS_P (operands
[1]);
3478 SYMBOL_REF_FLAGS (new_ref
) = SYMBOL_REF_FLAGS (operands
[1]);
3479 SYMBOL_REF_USED (new_ref
) = SYMBOL_REF_USED (operands
[1]);
3480 SYMBOL_REF_DECL (new_ref
) = SYMBOL_REF_DECL (operands
[1]);
3481 operands
[1] = new_ref
;
3484 if (DEFAULT_ABI
== ABI_DARWIN
)
3487 if (MACHO_DYNAMIC_NO_PIC_P
)
3489 /* Take care of any required data indirection. */
3490 operands
[1] = rs6000_machopic_legitimize_pic_address (
3491 operands
[1], mode
, operands
[0]);
3492 if (operands
[0] != operands
[1])
3493 emit_insn (gen_rtx_SET (VOIDmode
,
3494 operands
[0], operands
[1]));
3498 emit_insn (gen_macho_high (target
, operands
[1]));
3499 emit_insn (gen_macho_low (operands
[0], target
, operands
[1]));
3503 emit_insn (gen_elf_high (target
, operands
[1]));
3504 emit_insn (gen_elf_low (operands
[0], target
, operands
[1]));
3508 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3509 and we have put it in the TOC, we just need to make a TOC-relative
3512 && GET_CODE (operands
[1]) == SYMBOL_REF
3513 && constant_pool_expr_p (operands
[1])
3514 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands
[1]),
3515 get_pool_mode (operands
[1])))
3517 operands
[1] = create_TOC_reference (operands
[1]);
3519 else if (mode
== Pmode
3520 && CONSTANT_P (operands
[1])
3521 && ((GET_CODE (operands
[1]) != CONST_INT
3522 && ! easy_fp_constant (operands
[1], mode
))
3523 || (GET_CODE (operands
[1]) == CONST_INT
3524 && num_insns_constant (operands
[1], mode
) > 2)
3525 || (GET_CODE (operands
[0]) == REG
3526 && FP_REGNO_P (REGNO (operands
[0]))))
3527 && GET_CODE (operands
[1]) != HIGH
3528 && ! legitimate_constant_pool_address_p (operands
[1])
3529 && ! toc_relative_expr_p (operands
[1]))
3531 /* Emit a USE operation so that the constant isn't deleted if
3532 expensive optimizations are turned on because nobody
3533 references it. This should only be done for operands that
3534 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3535 This should not be done for operands that contain LABEL_REFs.
3536 For now, we just handle the obvious case. */
3537 if (GET_CODE (operands
[1]) != LABEL_REF
)
3538 emit_insn (gen_rtx_USE (VOIDmode
, operands
[1]));
3541 /* Darwin uses a special PIC legitimizer. */
3542 if (DEFAULT_ABI
== ABI_DARWIN
&& MACHOPIC_INDIRECT
)
3545 rs6000_machopic_legitimize_pic_address (operands
[1], mode
,
3547 if (operands
[0] != operands
[1])
3548 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
3553 /* If we are to limit the number of things we put in the TOC and
3554 this is a symbol plus a constant we can add in one insn,
3555 just put the symbol in the TOC and add the constant. Don't do
3556 this if reload is in progress. */
3557 if (GET_CODE (operands
[1]) == CONST
3558 && TARGET_NO_SUM_IN_TOC
&& ! reload_in_progress
3559 && GET_CODE (XEXP (operands
[1], 0)) == PLUS
3560 && add_operand (XEXP (XEXP (operands
[1], 0), 1), mode
)
3561 && (GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == LABEL_REF
3562 || GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == SYMBOL_REF
)
3563 && ! side_effects_p (operands
[0]))
3566 force_const_mem (mode
, XEXP (XEXP (operands
[1], 0), 0));
3567 rtx other
= XEXP (XEXP (operands
[1], 0), 1);
3569 sym
= force_reg (mode
, sym
);
3571 emit_insn (gen_addsi3 (operands
[0], sym
, other
));
3573 emit_insn (gen_adddi3 (operands
[0], sym
, other
));
3577 operands
[1] = force_const_mem (mode
, operands
[1]);
3580 && constant_pool_expr_p (XEXP (operands
[1], 0))
3581 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3582 get_pool_constant (XEXP (operands
[1], 0)),
3583 get_pool_mode (XEXP (operands
[1], 0))))
3586 = gen_rtx_MEM (mode
,
3587 create_TOC_reference (XEXP (operands
[1], 0)));
3588 set_mem_alias_set (operands
[1], get_TOC_alias_set ());
3589 RTX_UNCHANGING_P (operands
[1]) = 1;
3595 if (GET_CODE (operands
[0]) == MEM
3596 && GET_CODE (XEXP (operands
[0], 0)) != REG
3597 && ! reload_in_progress
)
3599 = replace_equiv_address (operands
[0],
3600 copy_addr_to_reg (XEXP (operands
[0], 0)));
3602 if (GET_CODE (operands
[1]) == MEM
3603 && GET_CODE (XEXP (operands
[1], 0)) != REG
3604 && ! reload_in_progress
)
3606 = replace_equiv_address (operands
[1],
3607 copy_addr_to_reg (XEXP (operands
[1], 0)));
3610 emit_insn (gen_rtx_PARALLEL (VOIDmode
,
3612 gen_rtx_SET (VOIDmode
,
3613 operands
[0], operands
[1]),
3614 gen_rtx_CLOBBER (VOIDmode
,
3615 gen_rtx_SCRATCH (SImode
)))));
3624 /* Above, we may have called force_const_mem which may have returned
3625 an invalid address. If we can, fix this up; otherwise, reload will
3626 have to deal with it. */
3627 if (GET_CODE (operands
[1]) == MEM
&& ! reload_in_progress
)
3628 operands
[1] = validize_mem (operands
[1]);
3631 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
3634 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3635 for a call to a function whose data type is FNTYPE.
3636 For a library call, FNTYPE is 0.
3638 For incoming args we set the number of arguments in the prototype large
3639 so we never return a PARALLEL. */
3642 init_cumulative_args (cum
, fntype
, libname
, incoming
)
3643 CUMULATIVE_ARGS
*cum
;
3645 rtx libname ATTRIBUTE_UNUSED
;
3648 static CUMULATIVE_ARGS zero_cumulative
;
3650 *cum
= zero_cumulative
;
3652 cum
->fregno
= FP_ARG_MIN_REG
;
3653 cum
->vregno
= ALTIVEC_ARG_MIN_REG
;
3654 cum
->prototype
= (fntype
&& TYPE_ARG_TYPES (fntype
));
3655 cum
->call_cookie
= CALL_NORMAL
;
3656 cum
->sysv_gregno
= GP_ARG_MIN_REG
;
3657 cum
->stdarg
= fntype
3658 && (TYPE_ARG_TYPES (fntype
) != 0
3659 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
3660 != void_type_node
));
3663 cum
->nargs_prototype
= 1000; /* don't return a PARALLEL */
3665 else if (cum
->prototype
)
3666 cum
->nargs_prototype
= (list_length (TYPE_ARG_TYPES (fntype
)) - 1
3667 + (TYPE_MODE (TREE_TYPE (fntype
)) == BLKmode
3668 || RETURN_IN_MEMORY (TREE_TYPE (fntype
))));
3671 cum
->nargs_prototype
= 0;
3673 cum
->orig_nargs
= cum
->nargs_prototype
;
3675 /* Check for a longcall attribute. */
3677 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
))
3678 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
3679 cum
->call_cookie
= CALL_LONG
;
3681 if (TARGET_DEBUG_ARG
)
3683 fprintf (stderr
, "\ninit_cumulative_args:");
3686 tree ret_type
= TREE_TYPE (fntype
);
3687 fprintf (stderr
, " ret code = %s,",
3688 tree_code_name
[ (int)TREE_CODE (ret_type
) ]);
3691 if (cum
->call_cookie
& CALL_LONG
)
3692 fprintf (stderr
, " longcall,");
3694 fprintf (stderr
, " proto = %d, nargs = %d\n",
3695 cum
->prototype
, cum
->nargs_prototype
);
3699 /* If defined, a C expression which determines whether, and in which
3700 direction, to pad out an argument with extra space. The value
3701 should be of type `enum direction': either `upward' to pad above
3702 the argument, `downward' to pad below, or `none' to inhibit
3705 For the AIX ABI structs are always stored left shifted in their
3709 function_arg_padding (mode
, type
)
3710 enum machine_mode mode
;
3713 if (type
!= 0 && AGGREGATE_TYPE_P (type
))
3716 /* This is the default definition. */
3717 return (! BYTES_BIG_ENDIAN
3720 ? (type
&& TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
3721 && int_size_in_bytes (type
) < (PARM_BOUNDARY
/ BITS_PER_UNIT
))
3722 : GET_MODE_BITSIZE (mode
) < PARM_BOUNDARY
)
3723 ? downward
: upward
));
3726 /* If defined, a C expression that gives the alignment boundary, in bits,
3727 of an argument with the specified mode and type. If it is not defined,
3728 PARM_BOUNDARY is used for all arguments.
3730 V.4 wants long longs to be double word aligned. */
3733 function_arg_boundary (mode
, type
)
3734 enum machine_mode mode
;
3735 tree type ATTRIBUTE_UNUSED
;
3737 if (DEFAULT_ABI
== ABI_V4
&& (mode
== DImode
|| mode
== DFmode
))
3739 else if (SPE_VECTOR_MODE (mode
))
3741 else if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3744 return PARM_BOUNDARY
;
3747 /* Update the data in CUM to advance over an argument
3748 of mode MODE and data type TYPE.
3749 (TYPE is null for libcalls where that information may not be available.) */
3752 function_arg_advance (cum
, mode
, type
, named
)
3753 CUMULATIVE_ARGS
*cum
;
3754 enum machine_mode mode
;
3758 cum
->nargs_prototype
--;
3760 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3762 if (cum
->vregno
<= ALTIVEC_ARG_MAX_REG
&& cum
->nargs_prototype
>= 0)
3765 cum
->words
+= RS6000_ARG_SIZE (mode
, type
);
3767 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
)
3769 && cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
3771 else if (DEFAULT_ABI
== ABI_V4
)
3773 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
3774 && (mode
== SFmode
|| mode
== DFmode
))
3776 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
3781 cum
->words
+= cum
->words
& 1;
3782 cum
->words
+= RS6000_ARG_SIZE (mode
, type
);
3788 int gregno
= cum
->sysv_gregno
;
3790 /* Aggregates and IEEE quad get passed by reference. */
3791 if ((type
&& AGGREGATE_TYPE_P (type
))
3795 n_words
= RS6000_ARG_SIZE (mode
, type
);
3797 /* Long long and SPE vectors are put in odd registers. */
3798 if (n_words
== 2 && (gregno
& 1) == 0)
3801 /* Long long and SPE vectors are not split between registers
3803 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
3805 /* Long long is aligned on the stack. */
3807 cum
->words
+= cum
->words
& 1;
3808 cum
->words
+= n_words
;
3811 /* Note: continuing to accumulate gregno past when we've started
3812 spilling to the stack indicates the fact that we've started
3813 spilling to the stack to expand_builtin_saveregs. */
3814 cum
->sysv_gregno
= gregno
+ n_words
;
3817 if (TARGET_DEBUG_ARG
)
3819 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
3820 cum
->words
, cum
->fregno
);
3821 fprintf (stderr
, "gregno = %2d, nargs = %4d, proto = %d, ",
3822 cum
->sysv_gregno
, cum
->nargs_prototype
, cum
->prototype
);
3823 fprintf (stderr
, "mode = %4s, named = %d\n",
3824 GET_MODE_NAME (mode
), named
);
3829 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
3830 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
3832 cum
->words
+= align
+ RS6000_ARG_SIZE (mode
, type
);
3834 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
3835 && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3836 cum
->fregno
+= (mode
== TFmode
? 2 : 1);
3838 if (TARGET_DEBUG_ARG
)
3840 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
3841 cum
->words
, cum
->fregno
);
3842 fprintf (stderr
, "nargs = %4d, proto = %d, mode = %4s, ",
3843 cum
->nargs_prototype
, cum
->prototype
, GET_MODE_NAME (mode
));
3844 fprintf (stderr
, "named = %d, align = %d\n", named
, align
);
3849 /* Determine where to put a SIMD argument on the SPE. */
3851 rs6000_spe_function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
, tree type
)
3855 int gregno
= cum
->sysv_gregno
;
3856 int n_words
= RS6000_ARG_SIZE (mode
, type
);
3858 /* SPE vectors are put in odd registers. */
3859 if (n_words
== 2 && (gregno
& 1) == 0)
3862 if (gregno
+ n_words
- 1 <= GP_ARG_MAX_REG
)
3865 enum machine_mode m
= SImode
;
3867 r1
= gen_rtx_REG (m
, gregno
);
3868 r1
= gen_rtx_EXPR_LIST (m
, r1
, const0_rtx
);
3869 r2
= gen_rtx_REG (m
, gregno
+ 1);
3870 r2
= gen_rtx_EXPR_LIST (m
, r2
, GEN_INT (4));
3871 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r2
));
3878 if (cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
3879 return gen_rtx_REG (mode
, cum
->sysv_gregno
);
3885 /* Determine where to put an argument to a function.
3886 Value is zero to push the argument on the stack,
3887 or a hard register in which to store the argument.
3889 MODE is the argument's machine mode.
3890 TYPE is the data type of the argument (as a tree).
3891 This is null for libcalls where that information may
3893 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3894 the preceding args and about the function being called.
3895 NAMED is nonzero if this argument is a named parameter
3896 (otherwise it is an extra parameter matching an ellipsis).
3898 On RS/6000 the first eight words of non-FP are normally in registers
3899 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3900 Under V.4, the first 8 FP args are in registers.
3902 If this is floating-point and no prototype is specified, we use
3903 both an FP and integer register (or possibly FP reg and stack). Library
3904 functions (when TYPE is zero) always have the proper types for args,
3905 so we can pass the FP value just in one register. emit_library_function
3906 doesn't support PARALLEL anyway. */
3909 function_arg (cum
, mode
, type
, named
)
3910 CUMULATIVE_ARGS
*cum
;
3911 enum machine_mode mode
;
3915 enum rs6000_abi abi
= DEFAULT_ABI
;
3917 /* Return a marker to indicate whether CR1 needs to set or clear the
3918 bit that V.4 uses to say fp args were passed in registers.
3919 Assume that we don't need the marker for software floating point,
3920 or compiler generated library calls. */
3921 if (mode
== VOIDmode
)
3924 && cum
->nargs_prototype
< 0
3925 && type
&& (cum
->prototype
|| TARGET_NO_PROTOTYPE
))
3927 /* For the SPE, we need to crxor CR6 always. */
3929 return GEN_INT (cum
->call_cookie
| CALL_V4_SET_FP_ARGS
);
3930 else if (TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3931 return GEN_INT (cum
->call_cookie
3932 | ((cum
->fregno
== FP_ARG_MIN_REG
)
3933 ? CALL_V4_SET_FP_ARGS
3934 : CALL_V4_CLEAR_FP_ARGS
));
3937 return GEN_INT (cum
->call_cookie
);
3940 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3942 if (named
&& cum
->vregno
<= ALTIVEC_ARG_MAX_REG
)
3943 return gen_rtx_REG (mode
, cum
->vregno
);
3947 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
))
3948 return rs6000_spe_function_arg (cum
, mode
, type
);
3949 else if (abi
== ABI_V4
)
3951 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
3952 && (mode
== SFmode
|| mode
== DFmode
))
3954 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
3955 return gen_rtx_REG (mode
, cum
->fregno
);
3962 int gregno
= cum
->sysv_gregno
;
3964 /* Aggregates and IEEE quad get passed by reference. */
3965 if ((type
&& AGGREGATE_TYPE_P (type
))
3969 n_words
= RS6000_ARG_SIZE (mode
, type
);
3971 /* Long long and SPE vectors are put in odd registers. */
3972 if (n_words
== 2 && (gregno
& 1) == 0)
3975 /* Long long do not split between registers and stack. */
3976 if (gregno
+ n_words
- 1 <= GP_ARG_MAX_REG
)
3977 return gen_rtx_REG (mode
, gregno
);
3984 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
3985 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
3986 int align_words
= cum
->words
+ align
;
3988 if (type
&& TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
3991 if (USE_FP_FOR_ARG_P (*cum
, mode
, type
))
3994 || ((cum
->nargs_prototype
> 0)
3995 /* IBM AIX extended its linkage convention definition always
3996 to require FP args after register save area hole on the
3998 && (DEFAULT_ABI
!= ABI_AIX
4000 || (align_words
< GP_ARG_NUM_REG
))))
4001 return gen_rtx_REG (mode
, cum
->fregno
);
4003 return gen_rtx_PARALLEL (mode
,
4005 gen_rtx_EXPR_LIST (VOIDmode
,
4006 ((align_words
>= GP_ARG_NUM_REG
)
4009 + RS6000_ARG_SIZE (mode
, type
)
4011 /* If this is partially on the stack, then
4012 we only include the portion actually
4013 in registers here. */
4014 ? gen_rtx_REG (SImode
,
4015 GP_ARG_MIN_REG
+ align_words
)
4016 : gen_rtx_REG (mode
,
4017 GP_ARG_MIN_REG
+ align_words
))),
4019 gen_rtx_EXPR_LIST (VOIDmode
,
4020 gen_rtx_REG (mode
, cum
->fregno
),
4023 else if (align_words
< GP_ARG_NUM_REG
)
4024 return gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
4030 /* For an arg passed partly in registers and partly in memory,
4031 this is the number of registers used.
4032 For args passed entirely in registers or entirely in memory, zero. */
4035 function_arg_partial_nregs (cum
, mode
, type
, named
)
4036 CUMULATIVE_ARGS
*cum
;
4037 enum machine_mode mode
;
4039 int named ATTRIBUTE_UNUSED
;
4041 if (DEFAULT_ABI
== ABI_V4
)
4044 if (USE_FP_FOR_ARG_P (*cum
, mode
, type
)
4045 || USE_ALTIVEC_FOR_ARG_P (*cum
, mode
, type
))
4047 if (cum
->nargs_prototype
>= 0)
4051 if (cum
->words
< GP_ARG_NUM_REG
4052 && GP_ARG_NUM_REG
< (cum
->words
+ RS6000_ARG_SIZE (mode
, type
)))
4054 int ret
= GP_ARG_NUM_REG
- cum
->words
;
4055 if (ret
&& TARGET_DEBUG_ARG
)
4056 fprintf (stderr
, "function_arg_partial_nregs: %d\n", ret
);
4064 /* A C expression that indicates when an argument must be passed by
4065 reference. If nonzero for an argument, a copy of that argument is
4066 made in memory and a pointer to the argument is passed instead of
4067 the argument itself. The pointer is passed in whatever way is
4068 appropriate for passing a pointer to that type.
4070 Under V.4, structures and unions are passed by reference.
4072 As an extension to all ABIs, variable sized types are passed by
4076 function_arg_pass_by_reference (cum
, mode
, type
, named
)
4077 CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
;
4078 enum machine_mode mode ATTRIBUTE_UNUSED
;
4080 int named ATTRIBUTE_UNUSED
;
4082 if (DEFAULT_ABI
== ABI_V4
4083 && ((type
&& AGGREGATE_TYPE_P (type
))
4086 if (TARGET_DEBUG_ARG
)
4087 fprintf (stderr
, "function_arg_pass_by_reference: aggregate\n");
4091 return type
&& int_size_in_bytes (type
) <= 0;
4094 /* Perform any needed actions needed for a function that is receiving a
4095 variable number of arguments.
4099 MODE and TYPE are the mode and type of the current parameter.
4101 PRETEND_SIZE is a variable that should be set to the amount of stack
4102 that must be pushed by the prolog to pretend that our caller pushed
4105 Normally, this macro will push all remaining incoming registers on the
4106 stack and set PRETEND_SIZE to the length of the registers pushed. */
4109 setup_incoming_varargs (cum
, mode
, type
, pretend_size
, no_rtl
)
4110 CUMULATIVE_ARGS
*cum
;
4111 enum machine_mode mode
;
4113 int *pretend_size ATTRIBUTE_UNUSED
;
4117 CUMULATIVE_ARGS next_cum
;
4118 int reg_size
= TARGET_32BIT
? 4 : 8;
4119 rtx save_area
= NULL_RTX
, mem
;
4120 int first_reg_offset
, set
;
4124 fntype
= TREE_TYPE (current_function_decl
);
4125 stdarg_p
= (TYPE_ARG_TYPES (fntype
) != 0
4126 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
4127 != void_type_node
));
4129 /* For varargs, we do not want to skip the dummy va_dcl argument.
4130 For stdargs, we do want to skip the last named argument. */
4133 function_arg_advance (&next_cum
, mode
, type
, 1);
4135 if (DEFAULT_ABI
== ABI_V4
)
4137 /* Indicate to allocate space on the stack for varargs save area. */
4138 cfun
->machine
->sysv_varargs_p
= 1;
4140 save_area
= plus_constant (virtual_stack_vars_rtx
,
4141 - RS6000_VARARGS_SIZE
);
4143 first_reg_offset
= next_cum
.sysv_gregno
- GP_ARG_MIN_REG
;
4147 first_reg_offset
= next_cum
.words
;
4148 save_area
= virtual_incoming_args_rtx
;
4149 cfun
->machine
->sysv_varargs_p
= 0;
4151 if (MUST_PASS_IN_STACK (mode
, type
))
4152 first_reg_offset
+= RS6000_ARG_SIZE (TYPE_MODE (type
), type
);
4155 set
= get_varargs_alias_set ();
4156 if (! no_rtl
&& first_reg_offset
< GP_ARG_NUM_REG
)
4158 mem
= gen_rtx_MEM (BLKmode
,
4159 plus_constant (save_area
,
4160 first_reg_offset
* reg_size
)),
4161 set_mem_alias_set (mem
, set
);
4162 set_mem_align (mem
, BITS_PER_WORD
);
4164 move_block_from_reg (GP_ARG_MIN_REG
+ first_reg_offset
, mem
,
4165 GP_ARG_NUM_REG
- first_reg_offset
);
4168 /* Save FP registers if needed. */
4169 if (DEFAULT_ABI
== ABI_V4
4170 && TARGET_HARD_FLOAT
&& TARGET_FPRS
4172 && next_cum
.fregno
<= FP_ARG_V4_MAX_REG
)
4174 int fregno
= next_cum
.fregno
;
4175 rtx cr1
= gen_rtx_REG (CCmode
, CR1_REGNO
);
4176 rtx lab
= gen_label_rtx ();
4177 int off
= (GP_ARG_NUM_REG
* reg_size
) + ((fregno
- FP_ARG_MIN_REG
) * 8);
4179 emit_jump_insn (gen_rtx_SET (VOIDmode
,
4181 gen_rtx_IF_THEN_ELSE (VOIDmode
,
4182 gen_rtx_NE (VOIDmode
, cr1
,
4184 gen_rtx_LABEL_REF (VOIDmode
, lab
),
4187 while (fregno
<= FP_ARG_V4_MAX_REG
)
4189 mem
= gen_rtx_MEM (DFmode
, plus_constant (save_area
, off
));
4190 set_mem_alias_set (mem
, set
);
4191 emit_move_insn (mem
, gen_rtx_REG (DFmode
, fregno
));
4200 /* Create the va_list data type. */
4203 rs6000_build_va_list ()
4205 tree f_gpr
, f_fpr
, f_res
, f_ovf
, f_sav
, record
, type_decl
;
4207 /* For AIX, prefer 'char *' because that's what the system
4208 header files like. */
4209 if (DEFAULT_ABI
!= ABI_V4
)
4210 return build_pointer_type (char_type_node
);
4212 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
4213 type_decl
= build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
4215 f_gpr
= build_decl (FIELD_DECL
, get_identifier ("gpr"),
4216 unsigned_char_type_node
);
4217 f_fpr
= build_decl (FIELD_DECL
, get_identifier ("fpr"),
4218 unsigned_char_type_node
);
4219 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
4221 f_res
= build_decl (FIELD_DECL
, get_identifier ("reserved"),
4222 short_unsigned_type_node
);
4223 f_ovf
= build_decl (FIELD_DECL
, get_identifier ("overflow_arg_area"),
4225 f_sav
= build_decl (FIELD_DECL
, get_identifier ("reg_save_area"),
4228 DECL_FIELD_CONTEXT (f_gpr
) = record
;
4229 DECL_FIELD_CONTEXT (f_fpr
) = record
;
4230 DECL_FIELD_CONTEXT (f_res
) = record
;
4231 DECL_FIELD_CONTEXT (f_ovf
) = record
;
4232 DECL_FIELD_CONTEXT (f_sav
) = record
;
4234 TREE_CHAIN (record
) = type_decl
;
4235 TYPE_NAME (record
) = type_decl
;
4236 TYPE_FIELDS (record
) = f_gpr
;
4237 TREE_CHAIN (f_gpr
) = f_fpr
;
4238 TREE_CHAIN (f_fpr
) = f_res
;
4239 TREE_CHAIN (f_res
) = f_ovf
;
4240 TREE_CHAIN (f_ovf
) = f_sav
;
4242 layout_type (record
);
4244 /* The correct type is an array type of one element. */
4245 return build_array_type (record
, build_index_type (size_zero_node
));
4248 /* Implement va_start. */
4251 rs6000_va_start (valist
, nextarg
)
4255 HOST_WIDE_INT words
, n_gpr
, n_fpr
;
4256 tree f_gpr
, f_fpr
, f_res
, f_ovf
, f_sav
;
4257 tree gpr
, fpr
, ovf
, sav
, t
;
4259 /* Only SVR4 needs something special. */
4260 if (DEFAULT_ABI
!= ABI_V4
)
4262 std_expand_builtin_va_start (valist
, nextarg
);
4266 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
4267 f_fpr
= TREE_CHAIN (f_gpr
);
4268 f_res
= TREE_CHAIN (f_fpr
);
4269 f_ovf
= TREE_CHAIN (f_res
);
4270 f_sav
= TREE_CHAIN (f_ovf
);
4272 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
4273 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
4274 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
4275 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
4276 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
4278 /* Count number of gp and fp argument registers used. */
4279 words
= current_function_args_info
.words
;
4280 n_gpr
= current_function_args_info
.sysv_gregno
- GP_ARG_MIN_REG
;
4281 n_fpr
= current_function_args_info
.fregno
- FP_ARG_MIN_REG
;
4283 if (TARGET_DEBUG_ARG
)
4284 fprintf (stderr
, "va_start: words = "HOST_WIDE_INT_PRINT_DEC
", n_gpr = "
4285 HOST_WIDE_INT_PRINT_DEC
", n_fpr = "HOST_WIDE_INT_PRINT_DEC
"\n",
4286 words
, n_gpr
, n_fpr
);
4288 t
= build (MODIFY_EXPR
, TREE_TYPE (gpr
), gpr
, build_int_2 (n_gpr
, 0));
4289 TREE_SIDE_EFFECTS (t
) = 1;
4290 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4292 t
= build (MODIFY_EXPR
, TREE_TYPE (fpr
), fpr
, build_int_2 (n_fpr
, 0));
4293 TREE_SIDE_EFFECTS (t
) = 1;
4294 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4296 /* Find the overflow area. */
4297 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
4299 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), t
,
4300 build_int_2 (words
* UNITS_PER_WORD
, 0));
4301 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
4302 TREE_SIDE_EFFECTS (t
) = 1;
4303 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4305 /* Find the register save area. */
4306 t
= make_tree (TREE_TYPE (sav
), virtual_stack_vars_rtx
);
4307 t
= build (PLUS_EXPR
, TREE_TYPE (sav
), t
,
4308 build_int_2 (-RS6000_VARARGS_SIZE
, -1));
4309 t
= build (MODIFY_EXPR
, TREE_TYPE (sav
), sav
, t
);
4310 TREE_SIDE_EFFECTS (t
) = 1;
4311 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4314 /* Implement va_arg. */
4317 rs6000_va_arg (valist
, type
)
4320 tree f_gpr
, f_fpr
, f_res
, f_ovf
, f_sav
;
4321 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
4322 int indirect_p
, size
, rsize
, n_reg
, sav_ofs
, sav_scale
;
4323 rtx lab_false
, lab_over
, addr_rtx
, r
;
4325 if (DEFAULT_ABI
!= ABI_V4
)
4327 /* Variable sized types are passed by reference. */
4328 if (int_size_in_bytes (type
) <= 0)
4330 u
= build_pointer_type (type
);
4332 /* Args grow upward. */
4333 t
= build (POSTINCREMENT_EXPR
, TREE_TYPE (valist
), valist
,
4334 build_int_2 (POINTER_SIZE
/ BITS_PER_UNIT
, 0));
4335 TREE_SIDE_EFFECTS (t
) = 1;
4337 t
= build1 (NOP_EXPR
, build_pointer_type (u
), t
);
4338 TREE_SIDE_EFFECTS (t
) = 1;
4340 t
= build1 (INDIRECT_REF
, u
, t
);
4341 TREE_SIDE_EFFECTS (t
) = 1;
4343 return expand_expr (t
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4346 return std_expand_builtin_va_arg (valist
, type
);
4349 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
4350 f_fpr
= TREE_CHAIN (f_gpr
);
4351 f_res
= TREE_CHAIN (f_fpr
);
4352 f_ovf
= TREE_CHAIN (f_res
);
4353 f_sav
= TREE_CHAIN (f_ovf
);
4355 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
4356 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
4357 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
4358 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
4359 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
4361 size
= int_size_in_bytes (type
);
4362 rsize
= (size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
4364 if (AGGREGATE_TYPE_P (type
) || TYPE_MODE (type
) == TFmode
)
4366 /* Aggregates and long doubles are passed by reference. */
4372 size
= UNITS_PER_WORD
;
4375 else if (FLOAT_TYPE_P (type
) && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
4377 /* FP args go in FP registers, if present. */
4386 /* Otherwise into GP registers. */
4394 /* Pull the value out of the saved registers ... */
4396 lab_false
= gen_label_rtx ();
4397 lab_over
= gen_label_rtx ();
4398 addr_rtx
= gen_reg_rtx (Pmode
);
4400 /* AltiVec vectors never go in registers. */
4401 if (!TARGET_ALTIVEC
|| TREE_CODE (type
) != VECTOR_TYPE
)
4403 TREE_THIS_VOLATILE (reg
) = 1;
4404 emit_cmp_and_jump_insns
4405 (expand_expr (reg
, NULL_RTX
, QImode
, EXPAND_NORMAL
),
4406 GEN_INT (8 - n_reg
+ 1), GE
, const1_rtx
, QImode
, 1,
4409 /* Long long is aligned in the registers. */
4412 u
= build (BIT_AND_EXPR
, TREE_TYPE (reg
), reg
,
4413 build_int_2 (n_reg
- 1, 0));
4414 u
= build (PLUS_EXPR
, TREE_TYPE (reg
), reg
, u
);
4415 u
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, u
);
4416 TREE_SIDE_EFFECTS (u
) = 1;
4417 expand_expr (u
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4421 t
= build (PLUS_EXPR
, ptr_type_node
, sav
, build_int_2 (sav_ofs
, 0));
4425 u
= build (POSTINCREMENT_EXPR
, TREE_TYPE (reg
), reg
,
4426 build_int_2 (n_reg
, 0));
4427 TREE_SIDE_EFFECTS (u
) = 1;
4429 u
= build1 (CONVERT_EXPR
, integer_type_node
, u
);
4430 TREE_SIDE_EFFECTS (u
) = 1;
4432 u
= build (MULT_EXPR
, integer_type_node
, u
, build_int_2 (sav_scale
, 0));
4433 TREE_SIDE_EFFECTS (u
) = 1;
4435 t
= build (PLUS_EXPR
, ptr_type_node
, t
, u
);
4436 TREE_SIDE_EFFECTS (t
) = 1;
4438 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
4440 emit_move_insn (addr_rtx
, r
);
4442 emit_jump_insn (gen_jump (lab_over
));
4446 emit_label (lab_false
);
4448 /* ... otherwise out of the overflow area. */
4450 /* Make sure we don't find reg 7 for the next int arg.
4452 All AltiVec vectors go in the overflow area. So in the AltiVec
4453 case we need to get the vectors from the overflow area, but
4454 remember where the GPRs and FPRs are. */
4455 if (n_reg
> 1 && (TREE_CODE (type
) != VECTOR_TYPE
4456 || !TARGET_ALTIVEC
))
4458 t
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, build_int_2 (8, 0));
4459 TREE_SIDE_EFFECTS (t
) = 1;
4460 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4463 /* Care for on-stack alignment if needed. */
4470 /* AltiVec vectors are 16 byte aligned. */
4471 if (TARGET_ALTIVEC
&& TREE_CODE (type
) == VECTOR_TYPE
)
4476 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), ovf
, build_int_2 (align
, 0));
4477 t
= build (BIT_AND_EXPR
, TREE_TYPE (t
), t
, build_int_2 (-align
-1, -1));
4481 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
4483 emit_move_insn (addr_rtx
, r
);
4485 t
= build (PLUS_EXPR
, TREE_TYPE (t
), t
, build_int_2 (size
, 0));
4486 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
4487 TREE_SIDE_EFFECTS (t
) = 1;
4488 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4490 emit_label (lab_over
);
4494 r
= gen_rtx_MEM (Pmode
, addr_rtx
);
4495 set_mem_alias_set (r
, get_varargs_alias_set ());
4496 emit_move_insn (addr_rtx
, r
);
4504 #define def_builtin(MASK, NAME, TYPE, CODE) \
4506 if ((MASK) & target_flags) \
4507 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
4511 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
4513 static const struct builtin_description bdesc_3arg
[] =
4515 { MASK_ALTIVEC
, CODE_FOR_altivec_vmaddfp
, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP
},
4516 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhaddshs
, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS
},
4517 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhraddshs
, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS
},
4518 { MASK_ALTIVEC
, CODE_FOR_altivec_vmladduhm
, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM
},
4519 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumubm
, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM
},
4520 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsummbm
, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM
},
4521 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhm
, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM
},
4522 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshm
, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM
},
4523 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhs
, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS
},
4524 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshs
, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS
},
4525 { MASK_ALTIVEC
, CODE_FOR_altivec_vnmsubfp
, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP
},
4526 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4sf
, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF
},
4527 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4si
, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI
},
4528 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_8hi
, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI
},
4529 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_16qi
, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI
},
4530 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4sf
, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF
},
4531 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4si
, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI
},
4532 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_8hi
, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI
},
4533 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_16qi
, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI
},
4534 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_16qi
, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI
},
4535 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_8hi
, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI
},
4536 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4si
, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI
},
4537 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4sf
, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF
},
4540 /* DST operations: void foo (void *, const int, const char). */
4542 static const struct builtin_description bdesc_dst
[] =
4544 { MASK_ALTIVEC
, CODE_FOR_altivec_dst
, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST
},
4545 { MASK_ALTIVEC
, CODE_FOR_altivec_dstt
, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT
},
4546 { MASK_ALTIVEC
, CODE_FOR_altivec_dstst
, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST
},
4547 { MASK_ALTIVEC
, CODE_FOR_altivec_dststt
, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT
}
4550 /* Simple binary operations: VECc = foo (VECa, VECb). */
4552 static struct builtin_description bdesc_2arg
[] =
4554 { MASK_ALTIVEC
, CODE_FOR_addv16qi3
, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM
},
4555 { MASK_ALTIVEC
, CODE_FOR_addv8hi3
, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM
},
4556 { MASK_ALTIVEC
, CODE_FOR_addv4si3
, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM
},
4557 { MASK_ALTIVEC
, CODE_FOR_addv4sf3
, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP
},
4558 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddcuw
, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW
},
4559 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddubs
, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS
},
4560 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsbs
, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS
},
4561 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduhs
, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS
},
4562 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddshs
, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS
},
4563 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduws
, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS
},
4564 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsws
, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS
},
4565 { MASK_ALTIVEC
, CODE_FOR_andv4si3
, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND
},
4566 { MASK_ALTIVEC
, CODE_FOR_altivec_vandc
, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC
},
4567 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgub
, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB
},
4568 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsb
, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB
},
4569 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguh
, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH
},
4570 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsh
, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH
},
4571 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguw
, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW
},
4572 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsw
, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW
},
4573 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfux
, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX
},
4574 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfsx
, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX
},
4575 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpbfp
, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP
},
4576 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequb
, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB
},
4577 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequh
, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH
},
4578 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequw
, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW
},
4579 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpeqfp
, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP
},
4580 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgefp
, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP
},
4581 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtub
, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB
},
4582 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsb
, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB
},
4583 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuh
, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH
},
4584 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsh
, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH
},
4585 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuw
, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW
},
4586 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsw
, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW
},
4587 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtfp
, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP
},
4588 { MASK_ALTIVEC
, CODE_FOR_altivec_vctsxs
, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS
},
4589 { MASK_ALTIVEC
, CODE_FOR_altivec_vctuxs
, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS
},
4590 { MASK_ALTIVEC
, CODE_FOR_umaxv16qi3
, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB
},
4591 { MASK_ALTIVEC
, CODE_FOR_smaxv16qi3
, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB
},
4592 { MASK_ALTIVEC
, CODE_FOR_umaxv8hi3
, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH
},
4593 { MASK_ALTIVEC
, CODE_FOR_smaxv8hi3
, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH
},
4594 { MASK_ALTIVEC
, CODE_FOR_umaxv4si3
, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW
},
4595 { MASK_ALTIVEC
, CODE_FOR_smaxv4si3
, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW
},
4596 { MASK_ALTIVEC
, CODE_FOR_smaxv4sf3
, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP
},
4597 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghb
, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB
},
4598 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghh
, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH
},
4599 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghw
, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW
},
4600 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglb
, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB
},
4601 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglh
, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH
},
4602 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglw
, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW
},
4603 { MASK_ALTIVEC
, CODE_FOR_uminv16qi3
, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB
},
4604 { MASK_ALTIVEC
, CODE_FOR_sminv16qi3
, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB
},
4605 { MASK_ALTIVEC
, CODE_FOR_uminv8hi3
, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH
},
4606 { MASK_ALTIVEC
, CODE_FOR_sminv8hi3
, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH
},
4607 { MASK_ALTIVEC
, CODE_FOR_uminv4si3
, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW
},
4608 { MASK_ALTIVEC
, CODE_FOR_sminv4si3
, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW
},
4609 { MASK_ALTIVEC
, CODE_FOR_sminv4sf3
, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP
},
4610 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleub
, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB
},
4611 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesb
, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB
},
4612 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleuh
, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH
},
4613 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesh
, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH
},
4614 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuloub
, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB
},
4615 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosb
, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB
},
4616 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulouh
, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH
},
4617 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosh
, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH
},
4618 { MASK_ALTIVEC
, CODE_FOR_altivec_vnor
, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR
},
4619 { MASK_ALTIVEC
, CODE_FOR_iorv4si3
, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR
},
4620 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhum
, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM
},
4621 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwum
, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM
},
4622 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkpx
, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX
},
4623 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhss
, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS
},
4624 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshss
, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS
},
4625 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwss
, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS
},
4626 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswss
, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS
},
4627 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhus
, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS
},
4628 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshus
, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS
},
4629 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwus
, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS
},
4630 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswus
, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS
},
4631 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlb
, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB
},
4632 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlh
, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH
},
4633 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlw
, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW
},
4634 { MASK_ALTIVEC
, CODE_FOR_altivec_vslb
, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB
},
4635 { MASK_ALTIVEC
, CODE_FOR_altivec_vslh
, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH
},
4636 { MASK_ALTIVEC
, CODE_FOR_altivec_vslw
, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW
},
4637 { MASK_ALTIVEC
, CODE_FOR_altivec_vsl
, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL
},
4638 { MASK_ALTIVEC
, CODE_FOR_altivec_vslo
, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO
},
4639 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltb
, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB
},
4640 { MASK_ALTIVEC
, CODE_FOR_altivec_vsplth
, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH
},
4641 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltw
, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW
},
4642 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrb
, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB
},
4643 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrh
, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH
},
4644 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrw
, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW
},
4645 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrab
, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB
},
4646 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrah
, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH
},
4647 { MASK_ALTIVEC
, CODE_FOR_altivec_vsraw
, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW
},
4648 { MASK_ALTIVEC
, CODE_FOR_altivec_vsr
, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR
},
4649 { MASK_ALTIVEC
, CODE_FOR_altivec_vsro
, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO
},
4650 { MASK_ALTIVEC
, CODE_FOR_subv16qi3
, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM
},
4651 { MASK_ALTIVEC
, CODE_FOR_subv8hi3
, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM
},
4652 { MASK_ALTIVEC
, CODE_FOR_subv4si3
, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM
},
4653 { MASK_ALTIVEC
, CODE_FOR_subv4sf3
, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP
},
4654 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubcuw
, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW
},
4655 { MASK_ALTIVEC
, CODE_FOR_altivec_vsububs
, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS
},
4656 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsbs
, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS
},
4657 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuhs
, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS
},
4658 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubshs
, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS
},
4659 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuws
, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS
},
4660 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsws
, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS
},
4661 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4ubs
, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS
},
4662 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4sbs
, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS
},
4663 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4shs
, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS
},
4664 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum2sws
, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS
},
4665 { MASK_ALTIVEC
, CODE_FOR_altivec_vsumsws
, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS
},
4666 { MASK_ALTIVEC
, CODE_FOR_xorv4si3
, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR
},
4668 /* Place holder, leave as first spe builtin. */
4669 { 0, CODE_FOR_spe_evaddw
, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW
},
4670 { 0, CODE_FOR_spe_evand
, "__builtin_spe_evand", SPE_BUILTIN_EVAND
},
4671 { 0, CODE_FOR_spe_evandc
, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC
},
4672 { 0, CODE_FOR_spe_evdivws
, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS
},
4673 { 0, CODE_FOR_spe_evdivwu
, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU
},
4674 { 0, CODE_FOR_spe_eveqv
, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV
},
4675 { 0, CODE_FOR_spe_evfsadd
, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD
},
4676 { 0, CODE_FOR_spe_evfsdiv
, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV
},
4677 { 0, CODE_FOR_spe_evfsmul
, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL
},
4678 { 0, CODE_FOR_spe_evfssub
, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB
},
4679 { 0, CODE_FOR_spe_evmergehi
, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI
},
4680 { 0, CODE_FOR_spe_evmergehilo
, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO
},
4681 { 0, CODE_FOR_spe_evmergelo
, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO
},
4682 { 0, CODE_FOR_spe_evmergelohi
, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI
},
4683 { 0, CODE_FOR_spe_evmhegsmfaa
, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA
},
4684 { 0, CODE_FOR_spe_evmhegsmfan
, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN
},
4685 { 0, CODE_FOR_spe_evmhegsmiaa
, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA
},
4686 { 0, CODE_FOR_spe_evmhegsmian
, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN
},
4687 { 0, CODE_FOR_spe_evmhegumiaa
, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA
},
4688 { 0, CODE_FOR_spe_evmhegumian
, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN
},
4689 { 0, CODE_FOR_spe_evmhesmf
, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF
},
4690 { 0, CODE_FOR_spe_evmhesmfa
, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA
},
4691 { 0, CODE_FOR_spe_evmhesmfaaw
, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW
},
4692 { 0, CODE_FOR_spe_evmhesmfanw
, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW
},
4693 { 0, CODE_FOR_spe_evmhesmi
, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI
},
4694 { 0, CODE_FOR_spe_evmhesmia
, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA
},
4695 { 0, CODE_FOR_spe_evmhesmiaaw
, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW
},
4696 { 0, CODE_FOR_spe_evmhesmianw
, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW
},
4697 { 0, CODE_FOR_spe_evmhessf
, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF
},
4698 { 0, CODE_FOR_spe_evmhessfa
, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA
},
4699 { 0, CODE_FOR_spe_evmhessfaaw
, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW
},
4700 { 0, CODE_FOR_spe_evmhessfanw
, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW
},
4701 { 0, CODE_FOR_spe_evmhessiaaw
, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW
},
4702 { 0, CODE_FOR_spe_evmhessianw
, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW
},
4703 { 0, CODE_FOR_spe_evmheumi
, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI
},
4704 { 0, CODE_FOR_spe_evmheumia
, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA
},
4705 { 0, CODE_FOR_spe_evmheumiaaw
, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW
},
4706 { 0, CODE_FOR_spe_evmheumianw
, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW
},
4707 { 0, CODE_FOR_spe_evmheusiaaw
, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW
},
4708 { 0, CODE_FOR_spe_evmheusianw
, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW
},
4709 { 0, CODE_FOR_spe_evmhogsmfaa
, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA
},
4710 { 0, CODE_FOR_spe_evmhogsmfan
, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN
},
4711 { 0, CODE_FOR_spe_evmhogsmiaa
, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA
},
4712 { 0, CODE_FOR_spe_evmhogsmian
, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN
},
4713 { 0, CODE_FOR_spe_evmhogumiaa
, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA
},
4714 { 0, CODE_FOR_spe_evmhogumian
, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN
},
4715 { 0, CODE_FOR_spe_evmhosmf
, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF
},
4716 { 0, CODE_FOR_spe_evmhosmfa
, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA
},
4717 { 0, CODE_FOR_spe_evmhosmfaaw
, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW
},
4718 { 0, CODE_FOR_spe_evmhosmfanw
, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW
},
4719 { 0, CODE_FOR_spe_evmhosmi
, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI
},
4720 { 0, CODE_FOR_spe_evmhosmia
, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA
},
4721 { 0, CODE_FOR_spe_evmhosmiaaw
, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW
},
4722 { 0, CODE_FOR_spe_evmhosmianw
, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW
},
4723 { 0, CODE_FOR_spe_evmhossf
, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF
},
4724 { 0, CODE_FOR_spe_evmhossfa
, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA
},
4725 { 0, CODE_FOR_spe_evmhossfaaw
, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW
},
4726 { 0, CODE_FOR_spe_evmhossfanw
, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW
},
4727 { 0, CODE_FOR_spe_evmhossiaaw
, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW
},
4728 { 0, CODE_FOR_spe_evmhossianw
, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW
},
4729 { 0, CODE_FOR_spe_evmhoumi
, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI
},
4730 { 0, CODE_FOR_spe_evmhoumia
, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA
},
4731 { 0, CODE_FOR_spe_evmhoumiaaw
, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW
},
4732 { 0, CODE_FOR_spe_evmhoumianw
, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW
},
4733 { 0, CODE_FOR_spe_evmhousiaaw
, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW
},
4734 { 0, CODE_FOR_spe_evmhousianw
, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW
},
4735 { 0, CODE_FOR_spe_evmwhsmf
, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF
},
4736 { 0, CODE_FOR_spe_evmwhsmfa
, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA
},
4737 { 0, CODE_FOR_spe_evmwhsmi
, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI
},
4738 { 0, CODE_FOR_spe_evmwhsmia
, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA
},
4739 { 0, CODE_FOR_spe_evmwhssf
, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF
},
4740 { 0, CODE_FOR_spe_evmwhssfa
, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA
},
4741 { 0, CODE_FOR_spe_evmwhumi
, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI
},
4742 { 0, CODE_FOR_spe_evmwhumia
, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA
},
4743 { 0, CODE_FOR_spe_evmwlsmiaaw
, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW
},
4744 { 0, CODE_FOR_spe_evmwlsmianw
, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW
},
4745 { 0, CODE_FOR_spe_evmwlssiaaw
, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW
},
4746 { 0, CODE_FOR_spe_evmwlssianw
, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW
},
4747 { 0, CODE_FOR_spe_evmwlumi
, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI
},
4748 { 0, CODE_FOR_spe_evmwlumia
, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA
},
4749 { 0, CODE_FOR_spe_evmwlumiaaw
, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW
},
4750 { 0, CODE_FOR_spe_evmwlumianw
, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW
},
4751 { 0, CODE_FOR_spe_evmwlusiaaw
, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW
},
4752 { 0, CODE_FOR_spe_evmwlusianw
, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW
},
4753 { 0, CODE_FOR_spe_evmwsmf
, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF
},
4754 { 0, CODE_FOR_spe_evmwsmfa
, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA
},
4755 { 0, CODE_FOR_spe_evmwsmfaa
, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA
},
4756 { 0, CODE_FOR_spe_evmwsmfan
, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN
},
4757 { 0, CODE_FOR_spe_evmwsmi
, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI
},
4758 { 0, CODE_FOR_spe_evmwsmia
, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA
},
4759 { 0, CODE_FOR_spe_evmwsmiaa
, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA
},
4760 { 0, CODE_FOR_spe_evmwsmian
, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN
},
4761 { 0, CODE_FOR_spe_evmwssf
, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF
},
4762 { 0, CODE_FOR_spe_evmwssfa
, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA
},
4763 { 0, CODE_FOR_spe_evmwssfaa
, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA
},
4764 { 0, CODE_FOR_spe_evmwssfan
, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN
},
4765 { 0, CODE_FOR_spe_evmwumi
, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI
},
4766 { 0, CODE_FOR_spe_evmwumia
, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA
},
4767 { 0, CODE_FOR_spe_evmwumiaa
, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA
},
4768 { 0, CODE_FOR_spe_evmwumian
, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN
},
4769 { 0, CODE_FOR_spe_evnand
, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND
},
4770 { 0, CODE_FOR_spe_evnor
, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR
},
4771 { 0, CODE_FOR_spe_evor
, "__builtin_spe_evor", SPE_BUILTIN_EVOR
},
4772 { 0, CODE_FOR_spe_evorc
, "__builtin_spe_evorc", SPE_BUILTIN_EVORC
},
4773 { 0, CODE_FOR_spe_evrlw
, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW
},
4774 { 0, CODE_FOR_spe_evslw
, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW
},
4775 { 0, CODE_FOR_spe_evsrws
, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS
},
4776 { 0, CODE_FOR_spe_evsrwu
, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU
},
4777 { 0, CODE_FOR_spe_evsubfw
, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW
},
4779 /* SPE binary operations expecting a 5-bit unsigned literal. */
4780 { 0, CODE_FOR_spe_evaddiw
, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW
},
4782 { 0, CODE_FOR_spe_evrlwi
, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI
},
4783 { 0, CODE_FOR_spe_evslwi
, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI
},
4784 { 0, CODE_FOR_spe_evsrwis
, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS
},
4785 { 0, CODE_FOR_spe_evsrwiu
, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU
},
4786 { 0, CODE_FOR_spe_evsubifw
, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW
},
4787 { 0, CODE_FOR_spe_evmwhssfaa
, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA
},
4788 { 0, CODE_FOR_spe_evmwhssmaa
, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA
},
4789 { 0, CODE_FOR_spe_evmwhsmfaa
, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA
},
4790 { 0, CODE_FOR_spe_evmwhsmiaa
, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA
},
4791 { 0, CODE_FOR_spe_evmwhusiaa
, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA
},
4792 { 0, CODE_FOR_spe_evmwhumiaa
, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA
},
4793 { 0, CODE_FOR_spe_evmwhssfan
, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN
},
4794 { 0, CODE_FOR_spe_evmwhssian
, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN
},
4795 { 0, CODE_FOR_spe_evmwhsmfan
, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN
},
4796 { 0, CODE_FOR_spe_evmwhsmian
, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN
},
4797 { 0, CODE_FOR_spe_evmwhusian
, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN
},
4798 { 0, CODE_FOR_spe_evmwhumian
, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN
},
4799 { 0, CODE_FOR_spe_evmwhgssfaa
, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA
},
4800 { 0, CODE_FOR_spe_evmwhgsmfaa
, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA
},
4801 { 0, CODE_FOR_spe_evmwhgsmiaa
, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA
},
4802 { 0, CODE_FOR_spe_evmwhgumiaa
, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA
},
4803 { 0, CODE_FOR_spe_evmwhgssfan
, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN
},
4804 { 0, CODE_FOR_spe_evmwhgsmfan
, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN
},
4805 { 0, CODE_FOR_spe_evmwhgsmian
, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN
},
4806 { 0, CODE_FOR_spe_evmwhgumian
, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN
},
4807 { 0, CODE_FOR_spe_brinc
, "__builtin_spe_brinc", SPE_BUILTIN_BRINC
},
4809 /* Place-holder. Leave as last binary SPE builtin. */
4810 { 0, CODE_FOR_xorv2si3
, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR
},
4813 /* AltiVec predicates. */
4815 struct builtin_description_predicates
4817 const unsigned int mask
;
4818 const enum insn_code icode
;
4820 const char *const name
;
4821 const enum rs6000_builtins code
;
4824 static const struct builtin_description_predicates bdesc_altivec_preds
[] =
4826 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P
},
4827 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P
},
4828 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P
},
4829 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P
},
4830 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P
},
4831 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P
},
4832 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P
},
4833 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P
},
4834 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P
},
4835 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P
},
4836 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P
},
4837 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P
},
4838 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P
}
4841 /* SPE predicates. */
4842 static struct builtin_description bdesc_spe_predicates
[] =
4844 /* Place-holder. Leave as first. */
4845 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ
},
4846 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS
},
4847 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU
},
4848 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS
},
4849 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU
},
4850 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ
},
4851 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT
},
4852 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT
},
4853 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ
},
4854 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT
},
4855 /* Place-holder. Leave as last. */
4856 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT
},
4859 /* SPE evsel predicates. */
4860 static struct builtin_description bdesc_spe_evsel
[] =
4862 /* Place-holder. Leave as first. */
4863 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS
},
4864 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU
},
4865 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS
},
4866 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU
},
4867 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ
},
4868 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT
},
4869 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT
},
4870 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ
},
4871 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT
},
4872 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT
},
4873 /* Place-holder. Leave as last. */
4874 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ
},
4877 /* ABS* operations. */
4879 static const struct builtin_description bdesc_abs
[] =
4881 { MASK_ALTIVEC
, CODE_FOR_absv4si2
, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI
},
4882 { MASK_ALTIVEC
, CODE_FOR_absv8hi2
, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI
},
4883 { MASK_ALTIVEC
, CODE_FOR_absv4sf2
, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF
},
4884 { MASK_ALTIVEC
, CODE_FOR_absv16qi2
, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI
},
4885 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v4si
, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI
},
4886 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v8hi
, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI
},
4887 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v16qi
, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI
}
4890 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4893 static struct builtin_description bdesc_1arg
[] =
4895 { MASK_ALTIVEC
, CODE_FOR_altivec_vexptefp
, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP
},
4896 { MASK_ALTIVEC
, CODE_FOR_altivec_vlogefp
, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP
},
4897 { MASK_ALTIVEC
, CODE_FOR_altivec_vrefp
, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP
},
4898 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfim
, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM
},
4899 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfin
, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN
},
4900 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfip
, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP
},
4901 { MASK_ALTIVEC
, CODE_FOR_ftruncv4sf2
, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ
},
4902 { MASK_ALTIVEC
, CODE_FOR_altivec_vrsqrtefp
, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP
},
4903 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisb
, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB
},
4904 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltish
, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH
},
4905 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisw
, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW
},
4906 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsb
, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB
},
4907 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhpx
, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX
},
4908 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsh
, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH
},
4909 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsb
, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB
},
4910 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklpx
, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX
},
4911 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsh
, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH
},
4913 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4914 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4915 { 0, CODE_FOR_spe_evabs
, "__builtin_spe_evabs", SPE_BUILTIN_EVABS
},
4916 { 0, CODE_FOR_spe_evaddsmiaaw
, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW
},
4917 { 0, CODE_FOR_spe_evaddssiaaw
, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW
},
4918 { 0, CODE_FOR_spe_evaddumiaaw
, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW
},
4919 { 0, CODE_FOR_spe_evaddusiaaw
, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW
},
4920 { 0, CODE_FOR_spe_evcntlsw
, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW
},
4921 { 0, CODE_FOR_spe_evcntlzw
, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW
},
4922 { 0, CODE_FOR_spe_evextsb
, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB
},
4923 { 0, CODE_FOR_spe_evextsh
, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH
},
4924 { 0, CODE_FOR_spe_evfsabs
, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS
},
4925 { 0, CODE_FOR_spe_evfscfsf
, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF
},
4926 { 0, CODE_FOR_spe_evfscfsi
, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI
},
4927 { 0, CODE_FOR_spe_evfscfuf
, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF
},
4928 { 0, CODE_FOR_spe_evfscfui
, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI
},
4929 { 0, CODE_FOR_spe_evfsctsf
, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF
},
4930 { 0, CODE_FOR_spe_evfsctsi
, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI
},
4931 { 0, CODE_FOR_spe_evfsctsiz
, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ
},
4932 { 0, CODE_FOR_spe_evfsctuf
, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF
},
4933 { 0, CODE_FOR_spe_evfsctui
, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI
},
4934 { 0, CODE_FOR_spe_evfsctuiz
, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ
},
4935 { 0, CODE_FOR_spe_evfsnabs
, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS
},
4936 { 0, CODE_FOR_spe_evfsneg
, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG
},
4937 { 0, CODE_FOR_spe_evmra
, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA
},
4938 { 0, CODE_FOR_spe_evneg
, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG
},
4939 { 0, CODE_FOR_spe_evrndw
, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW
},
4940 { 0, CODE_FOR_spe_evsubfsmiaaw
, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW
},
4941 { 0, CODE_FOR_spe_evsubfssiaaw
, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW
},
4942 { 0, CODE_FOR_spe_evsubfumiaaw
, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW
},
4943 { 0, CODE_FOR_spe_evsplatfi
, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI
},
4944 { 0, CODE_FOR_spe_evsplati
, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI
},
4946 /* Place-holder. Leave as last unary SPE builtin. */
4947 { 0, CODE_FOR_spe_evsubfusiaaw
, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW
},
4951 rs6000_expand_unop_builtin (icode
, arglist
, target
)
4952 enum insn_code icode
;
4957 tree arg0
= TREE_VALUE (arglist
);
4958 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4959 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4960 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4962 if (icode
== CODE_FOR_nothing
)
4963 /* Builtin not supported on this processor. */
4966 /* If we got invalid arguments bail out before generating bad rtl. */
4967 if (arg0
== error_mark_node
)
4970 if (icode
== CODE_FOR_altivec_vspltisb
4971 || icode
== CODE_FOR_altivec_vspltish
4972 || icode
== CODE_FOR_altivec_vspltisw
4973 || icode
== CODE_FOR_spe_evsplatfi
4974 || icode
== CODE_FOR_spe_evsplati
)
4976 /* Only allow 5-bit *signed* literals. */
4977 if (GET_CODE (op0
) != CONST_INT
4978 || INTVAL (op0
) > 0x1f
4979 || INTVAL (op0
) < -0x1f)
4981 error ("argument 1 must be a 5-bit signed literal");
4987 || GET_MODE (target
) != tmode
4988 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4989 target
= gen_reg_rtx (tmode
);
4991 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4992 op0
= copy_to_mode_reg (mode0
, op0
);
4994 pat
= GEN_FCN (icode
) (target
, op0
);
5003 altivec_expand_abs_builtin (icode
, arglist
, target
)
5004 enum insn_code icode
;
5008 rtx pat
, scratch1
, scratch2
;
5009 tree arg0
= TREE_VALUE (arglist
);
5010 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5011 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5012 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5014 /* If we have invalid arguments, bail out before generating bad rtl. */
5015 if (arg0
== error_mark_node
)
5019 || GET_MODE (target
) != tmode
5020 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5021 target
= gen_reg_rtx (tmode
);
5023 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5024 op0
= copy_to_mode_reg (mode0
, op0
);
5026 scratch1
= gen_reg_rtx (mode0
);
5027 scratch2
= gen_reg_rtx (mode0
);
5029 pat
= GEN_FCN (icode
) (target
, op0
, scratch1
, scratch2
);
5038 rs6000_expand_binop_builtin (icode
, arglist
, target
)
5039 enum insn_code icode
;
5044 tree arg0
= TREE_VALUE (arglist
);
5045 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5046 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5047 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5048 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5049 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5050 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5052 if (icode
== CODE_FOR_nothing
)
5053 /* Builtin not supported on this processor. */
5056 /* If we got invalid arguments bail out before generating bad rtl. */
5057 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
5060 if (icode
== CODE_FOR_altivec_vcfux
5061 || icode
== CODE_FOR_altivec_vcfsx
5062 || icode
== CODE_FOR_altivec_vctsxs
5063 || icode
== CODE_FOR_altivec_vctuxs
5064 || icode
== CODE_FOR_altivec_vspltb
5065 || icode
== CODE_FOR_altivec_vsplth
5066 || icode
== CODE_FOR_altivec_vspltw
5067 || icode
== CODE_FOR_spe_evaddiw
5068 || icode
== CODE_FOR_spe_evldd
5069 || icode
== CODE_FOR_spe_evldh
5070 || icode
== CODE_FOR_spe_evldw
5071 || icode
== CODE_FOR_spe_evlhhesplat
5072 || icode
== CODE_FOR_spe_evlhhossplat
5073 || icode
== CODE_FOR_spe_evlhhousplat
5074 || icode
== CODE_FOR_spe_evlwhe
5075 || icode
== CODE_FOR_spe_evlwhos
5076 || icode
== CODE_FOR_spe_evlwhou
5077 || icode
== CODE_FOR_spe_evlwhsplat
5078 || icode
== CODE_FOR_spe_evlwwsplat
5079 || icode
== CODE_FOR_spe_evrlwi
5080 || icode
== CODE_FOR_spe_evslwi
5081 || icode
== CODE_FOR_spe_evsrwis
5082 || icode
== CODE_FOR_spe_evsubifw
5083 || icode
== CODE_FOR_spe_evsrwiu
)
5085 /* Only allow 5-bit unsigned literals. */
5086 if (TREE_CODE (arg1
) != INTEGER_CST
5087 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
5089 error ("argument 2 must be a 5-bit unsigned literal");
5095 || GET_MODE (target
) != tmode
5096 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5097 target
= gen_reg_rtx (tmode
);
5099 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5100 op0
= copy_to_mode_reg (mode0
, op0
);
5101 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5102 op1
= copy_to_mode_reg (mode1
, op1
);
5104 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
5113 altivec_expand_predicate_builtin (icode
, opcode
, arglist
, target
)
5114 enum insn_code icode
;
5120 tree cr6_form
= TREE_VALUE (arglist
);
5121 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
5122 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5123 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5124 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5125 enum machine_mode tmode
= SImode
;
5126 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5127 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5130 if (TREE_CODE (cr6_form
) != INTEGER_CST
)
5132 error ("argument 1 of __builtin_altivec_predicate must be a constant");
5136 cr6_form_int
= TREE_INT_CST_LOW (cr6_form
);
5141 /* If we have invalid arguments, bail out before generating bad rtl. */
5142 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
5146 || GET_MODE (target
) != tmode
5147 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5148 target
= gen_reg_rtx (tmode
);
5150 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5151 op0
= copy_to_mode_reg (mode0
, op0
);
5152 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5153 op1
= copy_to_mode_reg (mode1
, op1
);
5155 scratch
= gen_reg_rtx (mode0
);
5157 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
,
5158 gen_rtx (SYMBOL_REF
, Pmode
, opcode
));
5163 /* The vec_any* and vec_all* predicates use the same opcodes for two
5164 different operations, but the bits in CR6 will be different
5165 depending on what information we want. So we have to play tricks
5166 with CR6 to get the right bits out.
5168 If you think this is disgusting, look at the specs for the
5169 AltiVec predicates. */
5171 switch (cr6_form_int
)
5174 emit_insn (gen_cr6_test_for_zero (target
));
5177 emit_insn (gen_cr6_test_for_zero_reverse (target
));
5180 emit_insn (gen_cr6_test_for_lt (target
));
5183 emit_insn (gen_cr6_test_for_lt_reverse (target
));
5186 error ("argument 1 of __builtin_altivec_predicate is out of range");
5194 altivec_expand_stv_builtin (icode
, arglist
)
5195 enum insn_code icode
;
5198 tree arg0
= TREE_VALUE (arglist
);
5199 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5200 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5201 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5202 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5203 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
5205 enum machine_mode mode0
= insn_data
[icode
].operand
[0].mode
;
5206 enum machine_mode mode1
= insn_data
[icode
].operand
[1].mode
;
5207 enum machine_mode mode2
= insn_data
[icode
].operand
[2].mode
;
5209 /* Invalid arguments. Bail before doing anything stoopid! */
5210 if (arg0
== error_mark_node
5211 || arg1
== error_mark_node
5212 || arg2
== error_mark_node
)
5215 if (! (*insn_data
[icode
].operand
[2].predicate
) (op0
, mode2
))
5216 op0
= copy_to_mode_reg (mode2
, op0
);
5217 if (! (*insn_data
[icode
].operand
[0].predicate
) (op1
, mode0
))
5218 op1
= copy_to_mode_reg (mode0
, op1
);
5219 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
5220 op2
= copy_to_mode_reg (mode1
, op2
);
5222 pat
= GEN_FCN (icode
) (op1
, op2
, op0
);
5229 rs6000_expand_ternop_builtin (icode
, arglist
, target
)
5230 enum insn_code icode
;
5235 tree arg0
= TREE_VALUE (arglist
);
5236 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5237 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5238 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5239 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5240 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
5241 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5242 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5243 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5244 enum machine_mode mode2
= insn_data
[icode
].operand
[3].mode
;
5246 if (icode
== CODE_FOR_nothing
)
5247 /* Builtin not supported on this processor. */
5250 /* If we got invalid arguments bail out before generating bad rtl. */
5251 if (arg0
== error_mark_node
5252 || arg1
== error_mark_node
5253 || arg2
== error_mark_node
)
5256 if (icode
== CODE_FOR_altivec_vsldoi_4sf
5257 || icode
== CODE_FOR_altivec_vsldoi_4si
5258 || icode
== CODE_FOR_altivec_vsldoi_8hi
5259 || icode
== CODE_FOR_altivec_vsldoi_16qi
)
5261 /* Only allow 4-bit unsigned literals. */
5262 if (TREE_CODE (arg2
) != INTEGER_CST
5263 || TREE_INT_CST_LOW (arg2
) & ~0xf)
5265 error ("argument 3 must be a 4-bit unsigned literal");
5271 || GET_MODE (target
) != tmode
5272 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5273 target
= gen_reg_rtx (tmode
);
5275 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5276 op0
= copy_to_mode_reg (mode0
, op0
);
5277 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5278 op1
= copy_to_mode_reg (mode1
, op1
);
5279 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
5280 op2
= copy_to_mode_reg (mode2
, op2
);
5282 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
5290 /* Expand the lvx builtins. */
5292 altivec_expand_ld_builtin (exp
, target
, expandedp
)
5297 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5298 tree arglist
= TREE_OPERAND (exp
, 1);
5299 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5301 enum machine_mode tmode
, mode0
;
5303 enum insn_code icode
;
5307 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi
:
5308 icode
= CODE_FOR_altivec_lvx_16qi
;
5310 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi
:
5311 icode
= CODE_FOR_altivec_lvx_8hi
;
5313 case ALTIVEC_BUILTIN_LD_INTERNAL_4si
:
5314 icode
= CODE_FOR_altivec_lvx_4si
;
5316 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf
:
5317 icode
= CODE_FOR_altivec_lvx_4sf
;
5326 arg0
= TREE_VALUE (arglist
);
5327 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5328 tmode
= insn_data
[icode
].operand
[0].mode
;
5329 mode0
= insn_data
[icode
].operand
[1].mode
;
5332 || GET_MODE (target
) != tmode
5333 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5334 target
= gen_reg_rtx (tmode
);
5336 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5337 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
5339 pat
= GEN_FCN (icode
) (target
, op0
);
5346 /* Expand the stvx builtins. */
5348 altivec_expand_st_builtin (exp
, target
, expandedp
)
5350 rtx target ATTRIBUTE_UNUSED
;
5353 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5354 tree arglist
= TREE_OPERAND (exp
, 1);
5355 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5357 enum machine_mode mode0
, mode1
;
5359 enum insn_code icode
;
5363 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi
:
5364 icode
= CODE_FOR_altivec_stvx_16qi
;
5366 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi
:
5367 icode
= CODE_FOR_altivec_stvx_8hi
;
5369 case ALTIVEC_BUILTIN_ST_INTERNAL_4si
:
5370 icode
= CODE_FOR_altivec_stvx_4si
;
5372 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf
:
5373 icode
= CODE_FOR_altivec_stvx_4sf
;
5380 arg0
= TREE_VALUE (arglist
);
5381 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5382 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5383 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5384 mode0
= insn_data
[icode
].operand
[0].mode
;
5385 mode1
= insn_data
[icode
].operand
[1].mode
;
5387 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
5388 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
5389 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
5390 op1
= copy_to_mode_reg (mode1
, op1
);
5392 pat
= GEN_FCN (icode
) (op0
, op1
);
5400 /* Expand the dst builtins. */
5402 altivec_expand_dst_builtin (exp
, target
, expandedp
)
5404 rtx target ATTRIBUTE_UNUSED
;
5407 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5408 tree arglist
= TREE_OPERAND (exp
, 1);
5409 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5410 tree arg0
, arg1
, arg2
;
5411 enum machine_mode mode0
, mode1
, mode2
;
5412 rtx pat
, op0
, op1
, op2
;
5413 struct builtin_description
*d
;
5418 /* Handle DST variants. */
5419 d
= (struct builtin_description
*) bdesc_dst
;
5420 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
5421 if (d
->code
== fcode
)
5423 arg0
= TREE_VALUE (arglist
);
5424 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5425 arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5426 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5427 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5428 op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
5429 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
5430 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
5431 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
5433 /* Invalid arguments, bail out before generating bad rtl. */
5434 if (arg0
== error_mark_node
5435 || arg1
== error_mark_node
5436 || arg2
== error_mark_node
)
5439 if (TREE_CODE (arg2
) != INTEGER_CST
5440 || TREE_INT_CST_LOW (arg2
) & ~0x3)
5442 error ("argument to `%s' must be a 2-bit unsigned literal", d
->name
);
5446 if (! (*insn_data
[d
->icode
].operand
[0].predicate
) (op0
, mode0
))
5447 op0
= copy_to_mode_reg (mode0
, op0
);
5448 if (! (*insn_data
[d
->icode
].operand
[1].predicate
) (op1
, mode1
))
5449 op1
= copy_to_mode_reg (mode1
, op1
);
5451 pat
= GEN_FCN (d
->icode
) (op0
, op1
, op2
);
5462 /* Expand the builtin in EXP and store the result in TARGET. Store
5463 true in *EXPANDEDP if we found a builtin to expand. */
5465 altivec_expand_builtin (exp
, target
, expandedp
)
5470 struct builtin_description
*d
;
5471 struct builtin_description_predicates
*dp
;
5473 enum insn_code icode
;
5474 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5475 tree arglist
= TREE_OPERAND (exp
, 1);
5478 enum machine_mode tmode
, mode0
;
5479 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5481 target
= altivec_expand_ld_builtin (exp
, target
, expandedp
);
5485 target
= altivec_expand_st_builtin (exp
, target
, expandedp
);
5489 target
= altivec_expand_dst_builtin (exp
, target
, expandedp
);
5497 case ALTIVEC_BUILTIN_STVX
:
5498 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx
, arglist
);
5499 case ALTIVEC_BUILTIN_STVEBX
:
5500 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx
, arglist
);
5501 case ALTIVEC_BUILTIN_STVEHX
:
5502 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx
, arglist
);
5503 case ALTIVEC_BUILTIN_STVEWX
:
5504 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx
, arglist
);
5505 case ALTIVEC_BUILTIN_STVXL
:
5506 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl
, arglist
);
5508 case ALTIVEC_BUILTIN_MFVSCR
:
5509 icode
= CODE_FOR_altivec_mfvscr
;
5510 tmode
= insn_data
[icode
].operand
[0].mode
;
5513 || GET_MODE (target
) != tmode
5514 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5515 target
= gen_reg_rtx (tmode
);
5517 pat
= GEN_FCN (icode
) (target
);
5523 case ALTIVEC_BUILTIN_MTVSCR
:
5524 icode
= CODE_FOR_altivec_mtvscr
;
5525 arg0
= TREE_VALUE (arglist
);
5526 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5527 mode0
= insn_data
[icode
].operand
[0].mode
;
5529 /* If we got invalid arguments bail out before generating bad rtl. */
5530 if (arg0
== error_mark_node
)
5533 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
5534 op0
= copy_to_mode_reg (mode0
, op0
);
5536 pat
= GEN_FCN (icode
) (op0
);
5541 case ALTIVEC_BUILTIN_DSSALL
:
5542 emit_insn (gen_altivec_dssall ());
5545 case ALTIVEC_BUILTIN_DSS
:
5546 icode
= CODE_FOR_altivec_dss
;
5547 arg0
= TREE_VALUE (arglist
);
5548 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5549 mode0
= insn_data
[icode
].operand
[0].mode
;
5551 /* If we got invalid arguments bail out before generating bad rtl. */
5552 if (arg0
== error_mark_node
)
5555 if (TREE_CODE (arg0
) != INTEGER_CST
5556 || TREE_INT_CST_LOW (arg0
) & ~0x3)
5558 error ("argument to dss must be a 2-bit unsigned literal");
5562 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
5563 op0
= copy_to_mode_reg (mode0
, op0
);
5565 emit_insn (gen_altivec_dss (op0
));
5569 /* Expand abs* operations. */
5570 d
= (struct builtin_description
*) bdesc_abs
;
5571 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
5572 if (d
->code
== fcode
)
5573 return altivec_expand_abs_builtin (d
->icode
, arglist
, target
);
5575 /* Expand the AltiVec predicates. */
5576 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
5577 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
5578 if (dp
->code
== fcode
)
5579 return altivec_expand_predicate_builtin (dp
->icode
, dp
->opcode
, arglist
, target
);
5581 /* LV* are funky. We initialized them differently. */
5584 case ALTIVEC_BUILTIN_LVSL
:
5585 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl
,
5587 case ALTIVEC_BUILTIN_LVSR
:
5588 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr
,
5590 case ALTIVEC_BUILTIN_LVEBX
:
5591 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx
,
5593 case ALTIVEC_BUILTIN_LVEHX
:
5594 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx
,
5596 case ALTIVEC_BUILTIN_LVEWX
:
5597 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx
,
5599 case ALTIVEC_BUILTIN_LVXL
:
5600 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl
,
5602 case ALTIVEC_BUILTIN_LVX
:
5603 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx
,
5614 /* Binops that need to be initialized manually, but can be expanded
5615 automagically by rs6000_expand_binop_builtin. */
5616 static struct builtin_description bdesc_2arg_spe
[] =
5618 { 0, CODE_FOR_spe_evlddx
, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX
},
5619 { 0, CODE_FOR_spe_evldwx
, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX
},
5620 { 0, CODE_FOR_spe_evldhx
, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX
},
5621 { 0, CODE_FOR_spe_evlwhex
, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX
},
5622 { 0, CODE_FOR_spe_evlwhoux
, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX
},
5623 { 0, CODE_FOR_spe_evlwhosx
, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX
},
5624 { 0, CODE_FOR_spe_evlwwsplatx
, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX
},
5625 { 0, CODE_FOR_spe_evlwhsplatx
, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX
},
5626 { 0, CODE_FOR_spe_evlhhesplatx
, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX
},
5627 { 0, CODE_FOR_spe_evlhhousplatx
, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX
},
5628 { 0, CODE_FOR_spe_evlhhossplatx
, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX
},
5629 { 0, CODE_FOR_spe_evldd
, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD
},
5630 { 0, CODE_FOR_spe_evldw
, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW
},
5631 { 0, CODE_FOR_spe_evldh
, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH
},
5632 { 0, CODE_FOR_spe_evlwhe
, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE
},
5633 { 0, CODE_FOR_spe_evlwhou
, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU
},
5634 { 0, CODE_FOR_spe_evlwhos
, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS
},
5635 { 0, CODE_FOR_spe_evlwwsplat
, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT
},
5636 { 0, CODE_FOR_spe_evlwhsplat
, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT
},
5637 { 0, CODE_FOR_spe_evlhhesplat
, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT
},
5638 { 0, CODE_FOR_spe_evlhhousplat
, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT
},
5639 { 0, CODE_FOR_spe_evlhhossplat
, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT
}
5642 /* Expand the builtin in EXP and store the result in TARGET. Store
5643 true in *EXPANDEDP if we found a builtin to expand.
5645 This expands the SPE builtins that are not simple unary and binary
5648 spe_expand_builtin (exp
, target
, expandedp
)
5653 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5654 tree arglist
= TREE_OPERAND (exp
, 1);
5656 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5657 enum insn_code icode
;
5658 enum machine_mode tmode
, mode0
;
5660 struct builtin_description
*d
;
5665 /* Syntax check for a 5-bit unsigned immediate. */
5668 case SPE_BUILTIN_EVSTDD
:
5669 case SPE_BUILTIN_EVSTDH
:
5670 case SPE_BUILTIN_EVSTDW
:
5671 case SPE_BUILTIN_EVSTWHE
:
5672 case SPE_BUILTIN_EVSTWHO
:
5673 case SPE_BUILTIN_EVSTWWE
:
5674 case SPE_BUILTIN_EVSTWWO
:
5675 arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5676 if (TREE_CODE (arg1
) != INTEGER_CST
5677 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
5679 error ("argument 2 must be a 5-bit unsigned literal");
5687 d
= (struct builtin_description
*) bdesc_2arg_spe
;
5688 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg_spe
); ++i
, ++d
)
5689 if (d
->code
== fcode
)
5690 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
5692 d
= (struct builtin_description
*) bdesc_spe_predicates
;
5693 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, ++d
)
5694 if (d
->code
== fcode
)
5695 return spe_expand_predicate_builtin (d
->icode
, arglist
, target
);
5697 d
= (struct builtin_description
*) bdesc_spe_evsel
;
5698 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, ++d
)
5699 if (d
->code
== fcode
)
5700 return spe_expand_evsel_builtin (d
->icode
, arglist
, target
);
5704 case SPE_BUILTIN_EVSTDDX
:
5705 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx
, arglist
);
5706 case SPE_BUILTIN_EVSTDHX
:
5707 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx
, arglist
);
5708 case SPE_BUILTIN_EVSTDWX
:
5709 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx
, arglist
);
5710 case SPE_BUILTIN_EVSTWHEX
:
5711 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex
, arglist
);
5712 case SPE_BUILTIN_EVSTWHOX
:
5713 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox
, arglist
);
5714 case SPE_BUILTIN_EVSTWWEX
:
5715 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex
, arglist
);
5716 case SPE_BUILTIN_EVSTWWOX
:
5717 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox
, arglist
);
5718 case SPE_BUILTIN_EVSTDD
:
5719 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd
, arglist
);
5720 case SPE_BUILTIN_EVSTDH
:
5721 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh
, arglist
);
5722 case SPE_BUILTIN_EVSTDW
:
5723 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw
, arglist
);
5724 case SPE_BUILTIN_EVSTWHE
:
5725 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe
, arglist
);
5726 case SPE_BUILTIN_EVSTWHO
:
5727 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho
, arglist
);
5728 case SPE_BUILTIN_EVSTWWE
:
5729 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe
, arglist
);
5730 case SPE_BUILTIN_EVSTWWO
:
5731 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo
, arglist
);
5732 case SPE_BUILTIN_MFSPEFSCR
:
5733 icode
= CODE_FOR_spe_mfspefscr
;
5734 tmode
= insn_data
[icode
].operand
[0].mode
;
5737 || GET_MODE (target
) != tmode
5738 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5739 target
= gen_reg_rtx (tmode
);
5741 pat
= GEN_FCN (icode
) (target
);
5746 case SPE_BUILTIN_MTSPEFSCR
:
5747 icode
= CODE_FOR_spe_mtspefscr
;
5748 arg0
= TREE_VALUE (arglist
);
5749 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5750 mode0
= insn_data
[icode
].operand
[0].mode
;
5752 if (arg0
== error_mark_node
)
5755 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
5756 op0
= copy_to_mode_reg (mode0
, op0
);
5758 pat
= GEN_FCN (icode
) (op0
);
5771 spe_expand_predicate_builtin (icode
, arglist
, target
)
5772 enum insn_code icode
;
5776 rtx pat
, scratch
, tmp
;
5777 tree form
= TREE_VALUE (arglist
);
5778 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
5779 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5780 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5781 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5782 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5783 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5787 if (TREE_CODE (form
) != INTEGER_CST
)
5789 error ("argument 1 of __builtin_spe_predicate must be a constant");
5793 form_int
= TREE_INT_CST_LOW (form
);
5798 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
5802 || GET_MODE (target
) != SImode
5803 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, SImode
))
5804 target
= gen_reg_rtx (SImode
);
5806 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5807 op0
= copy_to_mode_reg (mode0
, op0
);
5808 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5809 op1
= copy_to_mode_reg (mode1
, op1
);
5811 scratch
= gen_reg_rtx (CCmode
);
5813 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
5818 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5819 _lower_. We use one compare, but look in different bits of the
5820 CR for each variant.
5822 There are 2 elements in each SPE simd type (upper/lower). The CR
5823 bits are set as follows:
5825 BIT0 | BIT 1 | BIT 2 | BIT 3
5826 U | L | (U | L) | (U & L)
5828 So, for an "all" relationship, BIT 3 would be set.
5829 For an "any" relationship, BIT 2 would be set. Etc.
5831 Following traditional nomenclature, these bits map to:
5833 BIT0 | BIT 1 | BIT 2 | BIT 3
5836 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5841 /* All variant. OV bit. */
5843 /* We need to get to the OV bit, which is the ORDERED bit. We
5844 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5845 that's ugly and will trigger a validate_condition_mode abort.
5846 So let's just use another pattern. */
5847 emit_insn (gen_move_from_CR_ov_bit (target
, scratch
));
5849 /* Any variant. EQ bit. */
5853 /* Upper variant. LT bit. */
5857 /* Lower variant. GT bit. */
5862 error ("argument 1 of __builtin_spe_predicate is out of range");
5866 tmp
= gen_rtx_fmt_ee (code
, SImode
, scratch
, const0_rtx
);
5867 emit_move_insn (target
, tmp
);
5872 /* The evsel builtins look like this:
5874 e = __builtin_spe_evsel_OP (a, b, c, d);
5878 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5879 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5883 spe_expand_evsel_builtin (icode
, arglist
, target
)
5884 enum insn_code icode
;
5889 tree arg0
= TREE_VALUE (arglist
);
5890 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5891 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5892 tree arg3
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist
))));
5893 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5894 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5895 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
5896 rtx op3
= expand_expr (arg3
, NULL_RTX
, VOIDmode
, 0);
5897 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5898 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5903 if (arg0
== error_mark_node
|| arg1
== error_mark_node
5904 || arg2
== error_mark_node
|| arg3
== error_mark_node
)
5908 || GET_MODE (target
) != mode0
5909 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, mode0
))
5910 target
= gen_reg_rtx (mode0
);
5912 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5913 op0
= copy_to_mode_reg (mode0
, op0
);
5914 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
5915 op1
= copy_to_mode_reg (mode0
, op1
);
5916 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
5917 op2
= copy_to_mode_reg (mode0
, op2
);
5918 if (! (*insn_data
[icode
].operand
[1].predicate
) (op3
, mode1
))
5919 op3
= copy_to_mode_reg (mode0
, op3
);
5921 /* Generate the compare. */
5922 scratch
= gen_reg_rtx (CCmode
);
5923 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
5928 if (mode0
== V2SImode
)
5929 emit_insn (gen_spe_evsel (target
, op2
, op3
, scratch
));
5931 emit_insn (gen_spe_evsel_fs (target
, op2
, op3
, scratch
));
5936 /* Expand an expression EXP that calls a built-in function,
5937 with result going to TARGET if that's convenient
5938 (and in mode MODE if that's convenient).
5939 SUBTARGET may be used as the target for computing one of EXP's operands.
5940 IGNORE is nonzero if the value is to be ignored. */
5943 rs6000_expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
5946 rtx subtarget ATTRIBUTE_UNUSED
;
5947 enum machine_mode mode ATTRIBUTE_UNUSED
;
5948 int ignore ATTRIBUTE_UNUSED
;
5950 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5951 tree arglist
= TREE_OPERAND (exp
, 1);
5952 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5953 struct builtin_description
*d
;
5960 ret
= altivec_expand_builtin (exp
, target
, &success
);
5967 ret
= spe_expand_builtin (exp
, target
, &success
);
5973 if (TARGET_ALTIVEC
|| TARGET_SPE
)
5975 /* Handle simple unary operations. */
5976 d
= (struct builtin_description
*) bdesc_1arg
;
5977 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
5978 if (d
->code
== fcode
)
5979 return rs6000_expand_unop_builtin (d
->icode
, arglist
, target
);
5981 /* Handle simple binary operations. */
5982 d
= (struct builtin_description
*) bdesc_2arg
;
5983 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5984 if (d
->code
== fcode
)
5985 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
5987 /* Handle simple ternary operations. */
5988 d
= (struct builtin_description
*) bdesc_3arg
;
5989 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
5990 if (d
->code
== fcode
)
5991 return rs6000_expand_ternop_builtin (d
->icode
, arglist
, target
);
5999 rs6000_init_builtins ()
6001 opaque_V2SI_type_node
= copy_node (V2SI_type_node
);
6002 opaque_V2SF_type_node
= copy_node (V2SF_type_node
);
6003 opaque_p_V2SI_type_node
= build_pointer_type (opaque_V2SI_type_node
);
6006 spe_init_builtins ();
6008 altivec_init_builtins ();
6009 if (TARGET_ALTIVEC
|| TARGET_SPE
)
6010 rs6000_common_init_builtins ();
6013 /* Search through a set of builtins and enable the mask bits.
6014 DESC is an array of builtins.
6015 SIZE is the total number of builtins.
6016 START is the builtin enum at which to start.
6017 END is the builtin enum at which to end. */
6019 enable_mask_for_builtins (desc
, size
, start
, end
)
6020 struct builtin_description
*desc
;
6022 enum rs6000_builtins start
, end
;
6026 for (i
= 0; i
< size
; ++i
)
6027 if (desc
[i
].code
== start
)
6033 for (; i
< size
; ++i
)
6035 /* Flip all the bits on. */
6036 desc
[i
].mask
= target_flags
;
6037 if (desc
[i
].code
== end
)
6043 spe_init_builtins ()
6045 tree endlink
= void_list_node
;
6046 tree puint_type_node
= build_pointer_type (unsigned_type_node
);
6047 tree pushort_type_node
= build_pointer_type (short_unsigned_type_node
);
6048 struct builtin_description
*d
;
6051 tree v2si_ftype_4_v2si
6052 = build_function_type
6053 (opaque_V2SI_type_node
,
6054 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6055 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6056 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6057 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6060 tree v2sf_ftype_4_v2sf
6061 = build_function_type
6062 (opaque_V2SF_type_node
,
6063 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
6064 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
6065 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
6066 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
6069 tree int_ftype_int_v2si_v2si
6070 = build_function_type
6072 tree_cons (NULL_TREE
, integer_type_node
,
6073 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6074 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6077 tree int_ftype_int_v2sf_v2sf
6078 = build_function_type
6080 tree_cons (NULL_TREE
, integer_type_node
,
6081 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
6082 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
6085 tree void_ftype_v2si_puint_int
6086 = build_function_type (void_type_node
,
6087 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6088 tree_cons (NULL_TREE
, puint_type_node
,
6089 tree_cons (NULL_TREE
,
6093 tree void_ftype_v2si_puint_char
6094 = build_function_type (void_type_node
,
6095 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6096 tree_cons (NULL_TREE
, puint_type_node
,
6097 tree_cons (NULL_TREE
,
6101 tree void_ftype_v2si_pv2si_int
6102 = build_function_type (void_type_node
,
6103 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6104 tree_cons (NULL_TREE
, opaque_p_V2SI_type_node
,
6105 tree_cons (NULL_TREE
,
6109 tree void_ftype_v2si_pv2si_char
6110 = build_function_type (void_type_node
,
6111 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6112 tree_cons (NULL_TREE
, opaque_p_V2SI_type_node
,
6113 tree_cons (NULL_TREE
,
6118 = build_function_type (void_type_node
,
6119 tree_cons (NULL_TREE
, integer_type_node
, endlink
));
6122 = build_function_type (integer_type_node
, endlink
);
6124 tree v2si_ftype_pv2si_int
6125 = build_function_type (opaque_V2SI_type_node
,
6126 tree_cons (NULL_TREE
, opaque_p_V2SI_type_node
,
6127 tree_cons (NULL_TREE
, integer_type_node
,
6130 tree v2si_ftype_puint_int
6131 = build_function_type (opaque_V2SI_type_node
,
6132 tree_cons (NULL_TREE
, puint_type_node
,
6133 tree_cons (NULL_TREE
, integer_type_node
,
6136 tree v2si_ftype_pushort_int
6137 = build_function_type (opaque_V2SI_type_node
,
6138 tree_cons (NULL_TREE
, pushort_type_node
,
6139 tree_cons (NULL_TREE
, integer_type_node
,
6142 /* The initialization of the simple binary and unary builtins is
6143 done in rs6000_common_init_builtins, but we have to enable the
6144 mask bits here manually because we have run out of `target_flags'
6145 bits. We really need to redesign this mask business. */
6147 enable_mask_for_builtins ((struct builtin_description
*) bdesc_2arg
,
6148 ARRAY_SIZE (bdesc_2arg
),
6151 enable_mask_for_builtins ((struct builtin_description
*) bdesc_1arg
,
6152 ARRAY_SIZE (bdesc_1arg
),
6154 SPE_BUILTIN_EVSUBFUSIAAW
);
6155 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_predicates
,
6156 ARRAY_SIZE (bdesc_spe_predicates
),
6157 SPE_BUILTIN_EVCMPEQ
,
6158 SPE_BUILTIN_EVFSTSTLT
);
6159 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_evsel
,
6160 ARRAY_SIZE (bdesc_spe_evsel
),
6161 SPE_BUILTIN_EVSEL_CMPGTS
,
6162 SPE_BUILTIN_EVSEL_FSTSTEQ
);
6164 /* Initialize irregular SPE builtins. */
6166 def_builtin (target_flags
, "__builtin_spe_mtspefscr", void_ftype_int
, SPE_BUILTIN_MTSPEFSCR
);
6167 def_builtin (target_flags
, "__builtin_spe_mfspefscr", int_ftype_void
, SPE_BUILTIN_MFSPEFSCR
);
6168 def_builtin (target_flags
, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDDX
);
6169 def_builtin (target_flags
, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDHX
);
6170 def_builtin (target_flags
, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDWX
);
6171 def_builtin (target_flags
, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHEX
);
6172 def_builtin (target_flags
, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHOX
);
6173 def_builtin (target_flags
, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWEX
);
6174 def_builtin (target_flags
, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWOX
);
6175 def_builtin (target_flags
, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDD
);
6176 def_builtin (target_flags
, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDH
);
6177 def_builtin (target_flags
, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDW
);
6178 def_builtin (target_flags
, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHE
);
6179 def_builtin (target_flags
, "__builtin_spe_evstwho", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHO
);
6180 def_builtin (target_flags
, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWE
);
6181 def_builtin (target_flags
, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWO
);
6184 def_builtin (target_flags
, "__builtin_spe_evlddx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDDX
);
6185 def_builtin (target_flags
, "__builtin_spe_evldwx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDWX
);
6186 def_builtin (target_flags
, "__builtin_spe_evldhx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDHX
);
6187 def_builtin (target_flags
, "__builtin_spe_evlwhex", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHEX
);
6188 def_builtin (target_flags
, "__builtin_spe_evlwhoux", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOUX
);
6189 def_builtin (target_flags
, "__builtin_spe_evlwhosx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOSX
);
6190 def_builtin (target_flags
, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLATX
);
6191 def_builtin (target_flags
, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLATX
);
6192 def_builtin (target_flags
, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLATX
);
6193 def_builtin (target_flags
, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLATX
);
6194 def_builtin (target_flags
, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLATX
);
6195 def_builtin (target_flags
, "__builtin_spe_evldd", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDD
);
6196 def_builtin (target_flags
, "__builtin_spe_evldw", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDW
);
6197 def_builtin (target_flags
, "__builtin_spe_evldh", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDH
);
6198 def_builtin (target_flags
, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLAT
);
6199 def_builtin (target_flags
, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLAT
);
6200 def_builtin (target_flags
, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLAT
);
6201 def_builtin (target_flags
, "__builtin_spe_evlwhe", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHE
);
6202 def_builtin (target_flags
, "__builtin_spe_evlwhos", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOS
);
6203 def_builtin (target_flags
, "__builtin_spe_evlwhou", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOU
);
6204 def_builtin (target_flags
, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLAT
);
6205 def_builtin (target_flags
, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLAT
);
6208 d
= (struct builtin_description
*) bdesc_spe_predicates
;
6209 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, d
++)
6213 switch (insn_data
[d
->icode
].operand
[1].mode
)
6216 type
= int_ftype_int_v2si_v2si
;
6219 type
= int_ftype_int_v2sf_v2sf
;
6225 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6228 /* Evsel predicates. */
6229 d
= (struct builtin_description
*) bdesc_spe_evsel
;
6230 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, d
++)
6234 switch (insn_data
[d
->icode
].operand
[1].mode
)
6237 type
= v2si_ftype_4_v2si
;
6240 type
= v2sf_ftype_4_v2sf
;
6246 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6251 altivec_init_builtins ()
6253 struct builtin_description
*d
;
6254 struct builtin_description_predicates
*dp
;
6256 tree pfloat_type_node
= build_pointer_type (float_type_node
);
6257 tree pint_type_node
= build_pointer_type (integer_type_node
);
6258 tree pshort_type_node
= build_pointer_type (short_integer_type_node
);
6259 tree pchar_type_node
= build_pointer_type (char_type_node
);
6261 tree pvoid_type_node
= build_pointer_type (void_type_node
);
6263 tree pcfloat_type_node
= build_pointer_type (build_qualified_type (float_type_node
, TYPE_QUAL_CONST
));
6264 tree pcint_type_node
= build_pointer_type (build_qualified_type (integer_type_node
, TYPE_QUAL_CONST
));
6265 tree pcshort_type_node
= build_pointer_type (build_qualified_type (short_integer_type_node
, TYPE_QUAL_CONST
));
6266 tree pcchar_type_node
= build_pointer_type (build_qualified_type (char_type_node
, TYPE_QUAL_CONST
));
6268 tree pcvoid_type_node
= build_pointer_type (build_qualified_type (void_type_node
, TYPE_QUAL_CONST
));
6270 tree int_ftype_int_v4si_v4si
6271 = build_function_type_list (integer_type_node
,
6272 integer_type_node
, V4SI_type_node
,
6273 V4SI_type_node
, NULL_TREE
);
6274 tree v4sf_ftype_pcfloat
6275 = build_function_type_list (V4SF_type_node
, pcfloat_type_node
, NULL_TREE
);
6276 tree void_ftype_pfloat_v4sf
6277 = build_function_type_list (void_type_node
,
6278 pfloat_type_node
, V4SF_type_node
, NULL_TREE
);
6279 tree v4si_ftype_pcint
6280 = build_function_type_list (V4SI_type_node
, pcint_type_node
, NULL_TREE
);
6281 tree void_ftype_pint_v4si
6282 = build_function_type_list (void_type_node
,
6283 pint_type_node
, V4SI_type_node
, NULL_TREE
);
6284 tree v8hi_ftype_pcshort
6285 = build_function_type_list (V8HI_type_node
, pcshort_type_node
, NULL_TREE
);
6286 tree void_ftype_pshort_v8hi
6287 = build_function_type_list (void_type_node
,
6288 pshort_type_node
, V8HI_type_node
, NULL_TREE
);
6289 tree v16qi_ftype_pcchar
6290 = build_function_type_list (V16QI_type_node
, pcchar_type_node
, NULL_TREE
);
6291 tree void_ftype_pchar_v16qi
6292 = build_function_type_list (void_type_node
,
6293 pchar_type_node
, V16QI_type_node
, NULL_TREE
);
6294 tree void_ftype_v4si
6295 = build_function_type_list (void_type_node
, V4SI_type_node
, NULL_TREE
);
6296 tree v8hi_ftype_void
6297 = build_function_type (V8HI_type_node
, void_list_node
);
6298 tree void_ftype_void
6299 = build_function_type (void_type_node
, void_list_node
);
6301 = build_function_type_list (void_type_node
, char_type_node
, NULL_TREE
);
6303 tree v16qi_ftype_int_pcvoid
6304 = build_function_type_list (V16QI_type_node
,
6305 integer_type_node
, pcvoid_type_node
, NULL_TREE
);
6306 tree v8hi_ftype_int_pcvoid
6307 = build_function_type_list (V8HI_type_node
,
6308 integer_type_node
, pcvoid_type_node
, NULL_TREE
);
6309 tree v4si_ftype_int_pcvoid
6310 = build_function_type_list (V4SI_type_node
,
6311 integer_type_node
, pcvoid_type_node
, NULL_TREE
);
6313 tree void_ftype_v4si_int_pvoid
6314 = build_function_type_list (void_type_node
,
6315 V4SI_type_node
, integer_type_node
,
6316 pvoid_type_node
, NULL_TREE
);
6317 tree void_ftype_v16qi_int_pvoid
6318 = build_function_type_list (void_type_node
,
6319 V16QI_type_node
, integer_type_node
,
6320 pvoid_type_node
, NULL_TREE
);
6321 tree void_ftype_v8hi_int_pvoid
6322 = build_function_type_list (void_type_node
,
6323 V8HI_type_node
, integer_type_node
,
6324 pvoid_type_node
, NULL_TREE
);
6325 tree int_ftype_int_v8hi_v8hi
6326 = build_function_type_list (integer_type_node
,
6327 integer_type_node
, V8HI_type_node
,
6328 V8HI_type_node
, NULL_TREE
);
6329 tree int_ftype_int_v16qi_v16qi
6330 = build_function_type_list (integer_type_node
,
6331 integer_type_node
, V16QI_type_node
,
6332 V16QI_type_node
, NULL_TREE
);
6333 tree int_ftype_int_v4sf_v4sf
6334 = build_function_type_list (integer_type_node
,
6335 integer_type_node
, V4SF_type_node
,
6336 V4SF_type_node
, NULL_TREE
);
6337 tree v4si_ftype_v4si
6338 = build_function_type_list (V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
6339 tree v8hi_ftype_v8hi
6340 = build_function_type_list (V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
6341 tree v16qi_ftype_v16qi
6342 = build_function_type_list (V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
6343 tree v4sf_ftype_v4sf
6344 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
6345 tree void_ftype_pcvoid_int_char
6346 = build_function_type_list (void_type_node
,
6347 pcvoid_type_node
, integer_type_node
,
6348 char_type_node
, NULL_TREE
);
6350 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat
,
6351 ALTIVEC_BUILTIN_LD_INTERNAL_4sf
);
6352 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf
,
6353 ALTIVEC_BUILTIN_ST_INTERNAL_4sf
);
6354 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint
,
6355 ALTIVEC_BUILTIN_LD_INTERNAL_4si
);
6356 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si
,
6357 ALTIVEC_BUILTIN_ST_INTERNAL_4si
);
6358 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort
,
6359 ALTIVEC_BUILTIN_LD_INTERNAL_8hi
);
6360 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi
,
6361 ALTIVEC_BUILTIN_ST_INTERNAL_8hi
);
6362 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar
,
6363 ALTIVEC_BUILTIN_LD_INTERNAL_16qi
);
6364 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi
,
6365 ALTIVEC_BUILTIN_ST_INTERNAL_16qi
);
6366 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mtvscr", void_ftype_v4si
, ALTIVEC_BUILTIN_MTVSCR
);
6367 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mfvscr", v8hi_ftype_void
, ALTIVEC_BUILTIN_MFVSCR
);
6368 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dssall", void_ftype_void
, ALTIVEC_BUILTIN_DSSALL
);
6369 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dss", void_ftype_qi
, ALTIVEC_BUILTIN_DSS
);
6370 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVSL
);
6371 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVSR
);
6372 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVEBX
);
6373 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVEHX
);
6374 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVEWX
);
6375 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVXL
);
6376 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVX
);
6377 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVX
);
6378 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVEWX
);
6379 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVXL
);
6380 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid
, ALTIVEC_BUILTIN_STVEBX
);
6381 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid
, ALTIVEC_BUILTIN_STVEHX
);
6383 /* Add the DST variants. */
6384 d
= (struct builtin_description
*) bdesc_dst
;
6385 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
6386 def_builtin (d
->mask
, d
->name
, void_ftype_pcvoid_int_char
, d
->code
);
6388 /* Initialize the predicates. */
6389 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
6390 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
6392 enum machine_mode mode1
;
6395 mode1
= insn_data
[dp
->icode
].operand
[1].mode
;
6400 type
= int_ftype_int_v4si_v4si
;
6403 type
= int_ftype_int_v8hi_v8hi
;
6406 type
= int_ftype_int_v16qi_v16qi
;
6409 type
= int_ftype_int_v4sf_v4sf
;
6415 def_builtin (dp
->mask
, dp
->name
, type
, dp
->code
);
6418 /* Initialize the abs* operators. */
6419 d
= (struct builtin_description
*) bdesc_abs
;
6420 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
6422 enum machine_mode mode0
;
6425 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6430 type
= v4si_ftype_v4si
;
6433 type
= v8hi_ftype_v8hi
;
6436 type
= v16qi_ftype_v16qi
;
6439 type
= v4sf_ftype_v4sf
;
6445 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6450 rs6000_common_init_builtins ()
6452 struct builtin_description
*d
;
6455 tree v4sf_ftype_v4sf_v4sf_v16qi
6456 = build_function_type_list (V4SF_type_node
,
6457 V4SF_type_node
, V4SF_type_node
,
6458 V16QI_type_node
, NULL_TREE
);
6459 tree v4si_ftype_v4si_v4si_v16qi
6460 = build_function_type_list (V4SI_type_node
,
6461 V4SI_type_node
, V4SI_type_node
,
6462 V16QI_type_node
, NULL_TREE
);
6463 tree v8hi_ftype_v8hi_v8hi_v16qi
6464 = build_function_type_list (V8HI_type_node
,
6465 V8HI_type_node
, V8HI_type_node
,
6466 V16QI_type_node
, NULL_TREE
);
6467 tree v16qi_ftype_v16qi_v16qi_v16qi
6468 = build_function_type_list (V16QI_type_node
,
6469 V16QI_type_node
, V16QI_type_node
,
6470 V16QI_type_node
, NULL_TREE
);
6471 tree v4si_ftype_char
6472 = build_function_type_list (V4SI_type_node
, char_type_node
, NULL_TREE
);
6473 tree v8hi_ftype_char
6474 = build_function_type_list (V8HI_type_node
, char_type_node
, NULL_TREE
);
6475 tree v16qi_ftype_char
6476 = build_function_type_list (V16QI_type_node
, char_type_node
, NULL_TREE
);
6477 tree v8hi_ftype_v16qi
6478 = build_function_type_list (V8HI_type_node
, V16QI_type_node
, NULL_TREE
);
6479 tree v4sf_ftype_v4sf
6480 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
6482 tree v2si_ftype_v2si_v2si
6483 = build_function_type_list (opaque_V2SI_type_node
,
6484 opaque_V2SI_type_node
,
6485 opaque_V2SI_type_node
, NULL_TREE
);
6487 tree v2sf_ftype_v2sf_v2sf
6488 = build_function_type_list (opaque_V2SF_type_node
,
6489 opaque_V2SF_type_node
,
6490 opaque_V2SF_type_node
, NULL_TREE
);
6492 tree v2si_ftype_int_int
6493 = build_function_type_list (opaque_V2SI_type_node
,
6494 integer_type_node
, integer_type_node
,
6497 tree v2si_ftype_v2si
6498 = build_function_type_list (opaque_V2SI_type_node
,
6499 opaque_V2SI_type_node
, NULL_TREE
);
6501 tree v2sf_ftype_v2sf
6502 = build_function_type_list (opaque_V2SF_type_node
,
6503 opaque_V2SF_type_node
, NULL_TREE
);
6505 tree v2sf_ftype_v2si
6506 = build_function_type_list (opaque_V2SF_type_node
,
6507 opaque_V2SI_type_node
, NULL_TREE
);
6509 tree v2si_ftype_v2sf
6510 = build_function_type_list (opaque_V2SI_type_node
,
6511 opaque_V2SF_type_node
, NULL_TREE
);
6513 tree v2si_ftype_v2si_char
6514 = build_function_type_list (opaque_V2SI_type_node
,
6515 opaque_V2SI_type_node
,
6516 char_type_node
, NULL_TREE
);
6518 tree v2si_ftype_int_char
6519 = build_function_type_list (opaque_V2SI_type_node
,
6520 integer_type_node
, char_type_node
, NULL_TREE
);
6522 tree v2si_ftype_char
6523 = build_function_type_list (opaque_V2SI_type_node
,
6524 char_type_node
, NULL_TREE
);
6526 tree int_ftype_int_int
6527 = build_function_type_list (integer_type_node
,
6528 integer_type_node
, integer_type_node
,
6531 tree v4si_ftype_v4si_v4si
6532 = build_function_type_list (V4SI_type_node
,
6533 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
6534 tree v4sf_ftype_v4si_char
6535 = build_function_type_list (V4SF_type_node
,
6536 V4SI_type_node
, char_type_node
, NULL_TREE
);
6537 tree v4si_ftype_v4sf_char
6538 = build_function_type_list (V4SI_type_node
,
6539 V4SF_type_node
, char_type_node
, NULL_TREE
);
6540 tree v4si_ftype_v4si_char
6541 = build_function_type_list (V4SI_type_node
,
6542 V4SI_type_node
, char_type_node
, NULL_TREE
);
6543 tree v8hi_ftype_v8hi_char
6544 = build_function_type_list (V8HI_type_node
,
6545 V8HI_type_node
, char_type_node
, NULL_TREE
);
6546 tree v16qi_ftype_v16qi_char
6547 = build_function_type_list (V16QI_type_node
,
6548 V16QI_type_node
, char_type_node
, NULL_TREE
);
6549 tree v16qi_ftype_v16qi_v16qi_char
6550 = build_function_type_list (V16QI_type_node
,
6551 V16QI_type_node
, V16QI_type_node
,
6552 char_type_node
, NULL_TREE
);
6553 tree v8hi_ftype_v8hi_v8hi_char
6554 = build_function_type_list (V8HI_type_node
,
6555 V8HI_type_node
, V8HI_type_node
,
6556 char_type_node
, NULL_TREE
);
6557 tree v4si_ftype_v4si_v4si_char
6558 = build_function_type_list (V4SI_type_node
,
6559 V4SI_type_node
, V4SI_type_node
,
6560 char_type_node
, NULL_TREE
);
6561 tree v4sf_ftype_v4sf_v4sf_char
6562 = build_function_type_list (V4SF_type_node
,
6563 V4SF_type_node
, V4SF_type_node
,
6564 char_type_node
, NULL_TREE
);
6565 tree v4sf_ftype_v4sf_v4sf
6566 = build_function_type_list (V4SF_type_node
,
6567 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
6568 tree v4sf_ftype_v4sf_v4sf_v4si
6569 = build_function_type_list (V4SF_type_node
,
6570 V4SF_type_node
, V4SF_type_node
,
6571 V4SI_type_node
, NULL_TREE
);
6572 tree v4sf_ftype_v4sf_v4sf_v4sf
6573 = build_function_type_list (V4SF_type_node
,
6574 V4SF_type_node
, V4SF_type_node
,
6575 V4SF_type_node
, NULL_TREE
);
6576 tree v4si_ftype_v4si_v4si_v4si
6577 = build_function_type_list (V4SI_type_node
,
6578 V4SI_type_node
, V4SI_type_node
,
6579 V4SI_type_node
, NULL_TREE
);
6580 tree v8hi_ftype_v8hi_v8hi
6581 = build_function_type_list (V8HI_type_node
,
6582 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
6583 tree v8hi_ftype_v8hi_v8hi_v8hi
6584 = build_function_type_list (V8HI_type_node
,
6585 V8HI_type_node
, V8HI_type_node
,
6586 V8HI_type_node
, NULL_TREE
);
6587 tree v4si_ftype_v8hi_v8hi_v4si
6588 = build_function_type_list (V4SI_type_node
,
6589 V8HI_type_node
, V8HI_type_node
,
6590 V4SI_type_node
, NULL_TREE
);
6591 tree v4si_ftype_v16qi_v16qi_v4si
6592 = build_function_type_list (V4SI_type_node
,
6593 V16QI_type_node
, V16QI_type_node
,
6594 V4SI_type_node
, NULL_TREE
);
6595 tree v16qi_ftype_v16qi_v16qi
6596 = build_function_type_list (V16QI_type_node
,
6597 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
6598 tree v4si_ftype_v4sf_v4sf
6599 = build_function_type_list (V4SI_type_node
,
6600 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
6601 tree v8hi_ftype_v16qi_v16qi
6602 = build_function_type_list (V8HI_type_node
,
6603 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
6604 tree v4si_ftype_v8hi_v8hi
6605 = build_function_type_list (V4SI_type_node
,
6606 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
6607 tree v8hi_ftype_v4si_v4si
6608 = build_function_type_list (V8HI_type_node
,
6609 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
6610 tree v16qi_ftype_v8hi_v8hi
6611 = build_function_type_list (V16QI_type_node
,
6612 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
6613 tree v4si_ftype_v16qi_v4si
6614 = build_function_type_list (V4SI_type_node
,
6615 V16QI_type_node
, V4SI_type_node
, NULL_TREE
);
6616 tree v4si_ftype_v16qi_v16qi
6617 = build_function_type_list (V4SI_type_node
,
6618 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
6619 tree v4si_ftype_v8hi_v4si
6620 = build_function_type_list (V4SI_type_node
,
6621 V8HI_type_node
, V4SI_type_node
, NULL_TREE
);
6622 tree v4si_ftype_v8hi
6623 = build_function_type_list (V4SI_type_node
, V8HI_type_node
, NULL_TREE
);
6624 tree int_ftype_v4si_v4si
6625 = build_function_type_list (integer_type_node
,
6626 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
6627 tree int_ftype_v4sf_v4sf
6628 = build_function_type_list (integer_type_node
,
6629 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
6630 tree int_ftype_v16qi_v16qi
6631 = build_function_type_list (integer_type_node
,
6632 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
6633 tree int_ftype_v8hi_v8hi
6634 = build_function_type_list (integer_type_node
,
6635 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
6637 /* Add the simple ternary operators. */
6638 d
= (struct builtin_description
*) bdesc_3arg
;
6639 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
6642 enum machine_mode mode0
, mode1
, mode2
, mode3
;
6645 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
6648 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6649 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
6650 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
6651 mode3
= insn_data
[d
->icode
].operand
[3].mode
;
6653 /* When all four are of the same mode. */
6654 if (mode0
== mode1
&& mode1
== mode2
&& mode2
== mode3
)
6659 type
= v4si_ftype_v4si_v4si_v4si
;
6662 type
= v4sf_ftype_v4sf_v4sf_v4sf
;
6665 type
= v8hi_ftype_v8hi_v8hi_v8hi
;
6668 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
6674 else if (mode0
== mode1
&& mode1
== mode2
&& mode3
== V16QImode
)
6679 type
= v4si_ftype_v4si_v4si_v16qi
;
6682 type
= v4sf_ftype_v4sf_v4sf_v16qi
;
6685 type
= v8hi_ftype_v8hi_v8hi_v16qi
;
6688 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
6694 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
6695 && mode3
== V4SImode
)
6696 type
= v4si_ftype_v16qi_v16qi_v4si
;
6697 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
6698 && mode3
== V4SImode
)
6699 type
= v4si_ftype_v8hi_v8hi_v4si
;
6700 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
6701 && mode3
== V4SImode
)
6702 type
= v4sf_ftype_v4sf_v4sf_v4si
;
6704 /* vchar, vchar, vchar, 4 bit literal. */
6705 else if (mode0
== V16QImode
&& mode1
== mode0
&& mode2
== mode0
6707 type
= v16qi_ftype_v16qi_v16qi_char
;
6709 /* vshort, vshort, vshort, 4 bit literal. */
6710 else if (mode0
== V8HImode
&& mode1
== mode0
&& mode2
== mode0
6712 type
= v8hi_ftype_v8hi_v8hi_char
;
6714 /* vint, vint, vint, 4 bit literal. */
6715 else if (mode0
== V4SImode
&& mode1
== mode0
&& mode2
== mode0
6717 type
= v4si_ftype_v4si_v4si_char
;
6719 /* vfloat, vfloat, vfloat, 4 bit literal. */
6720 else if (mode0
== V4SFmode
&& mode1
== mode0
&& mode2
== mode0
6722 type
= v4sf_ftype_v4sf_v4sf_char
;
6727 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6730 /* Add the simple binary operators. */
6731 d
= (struct builtin_description
*) bdesc_2arg
;
6732 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
6734 enum machine_mode mode0
, mode1
, mode2
;
6737 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
6740 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6741 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
6742 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
6744 /* When all three operands are of the same mode. */
6745 if (mode0
== mode1
&& mode1
== mode2
)
6750 type
= v4sf_ftype_v4sf_v4sf
;
6753 type
= v4si_ftype_v4si_v4si
;
6756 type
= v16qi_ftype_v16qi_v16qi
;
6759 type
= v8hi_ftype_v8hi_v8hi
;
6762 type
= v2si_ftype_v2si_v2si
;
6765 type
= v2sf_ftype_v2sf_v2sf
;
6768 type
= int_ftype_int_int
;
6775 /* A few other combos we really don't want to do manually. */
6777 /* vint, vfloat, vfloat. */
6778 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
)
6779 type
= v4si_ftype_v4sf_v4sf
;
6781 /* vshort, vchar, vchar. */
6782 else if (mode0
== V8HImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
6783 type
= v8hi_ftype_v16qi_v16qi
;
6785 /* vint, vshort, vshort. */
6786 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
6787 type
= v4si_ftype_v8hi_v8hi
;
6789 /* vshort, vint, vint. */
6790 else if (mode0
== V8HImode
&& mode1
== V4SImode
&& mode2
== V4SImode
)
6791 type
= v8hi_ftype_v4si_v4si
;
6793 /* vchar, vshort, vshort. */
6794 else if (mode0
== V16QImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
6795 type
= v16qi_ftype_v8hi_v8hi
;
6797 /* vint, vchar, vint. */
6798 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V4SImode
)
6799 type
= v4si_ftype_v16qi_v4si
;
6801 /* vint, vchar, vchar. */
6802 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
6803 type
= v4si_ftype_v16qi_v16qi
;
6805 /* vint, vshort, vint. */
6806 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V4SImode
)
6807 type
= v4si_ftype_v8hi_v4si
;
6809 /* vint, vint, 5 bit literal. */
6810 else if (mode0
== V4SImode
&& mode1
== V4SImode
&& mode2
== QImode
)
6811 type
= v4si_ftype_v4si_char
;
6813 /* vshort, vshort, 5 bit literal. */
6814 else if (mode0
== V8HImode
&& mode1
== V8HImode
&& mode2
== QImode
)
6815 type
= v8hi_ftype_v8hi_char
;
6817 /* vchar, vchar, 5 bit literal. */
6818 else if (mode0
== V16QImode
&& mode1
== V16QImode
&& mode2
== QImode
)
6819 type
= v16qi_ftype_v16qi_char
;
6821 /* vfloat, vint, 5 bit literal. */
6822 else if (mode0
== V4SFmode
&& mode1
== V4SImode
&& mode2
== QImode
)
6823 type
= v4sf_ftype_v4si_char
;
6825 /* vint, vfloat, 5 bit literal. */
6826 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== QImode
)
6827 type
= v4si_ftype_v4sf_char
;
6829 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== SImode
)
6830 type
= v2si_ftype_int_int
;
6832 else if (mode0
== V2SImode
&& mode1
== V2SImode
&& mode2
== QImode
)
6833 type
= v2si_ftype_v2si_char
;
6835 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== QImode
)
6836 type
= v2si_ftype_int_char
;
6839 else if (mode0
== SImode
)
6844 type
= int_ftype_v4si_v4si
;
6847 type
= int_ftype_v4sf_v4sf
;
6850 type
= int_ftype_v16qi_v16qi
;
6853 type
= int_ftype_v8hi_v8hi
;
6863 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6866 /* Add the simple unary operators. */
6867 d
= (struct builtin_description
*) bdesc_1arg
;
6868 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
6870 enum machine_mode mode0
, mode1
;
6873 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
6876 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6877 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
6879 if (mode0
== V4SImode
&& mode1
== QImode
)
6880 type
= v4si_ftype_char
;
6881 else if (mode0
== V8HImode
&& mode1
== QImode
)
6882 type
= v8hi_ftype_char
;
6883 else if (mode0
== V16QImode
&& mode1
== QImode
)
6884 type
= v16qi_ftype_char
;
6885 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
)
6886 type
= v4sf_ftype_v4sf
;
6887 else if (mode0
== V8HImode
&& mode1
== V16QImode
)
6888 type
= v8hi_ftype_v16qi
;
6889 else if (mode0
== V4SImode
&& mode1
== V8HImode
)
6890 type
= v4si_ftype_v8hi
;
6891 else if (mode0
== V2SImode
&& mode1
== V2SImode
)
6892 type
= v2si_ftype_v2si
;
6893 else if (mode0
== V2SFmode
&& mode1
== V2SFmode
)
6894 type
= v2sf_ftype_v2sf
;
6895 else if (mode0
== V2SFmode
&& mode1
== V2SImode
)
6896 type
= v2sf_ftype_v2si
;
6897 else if (mode0
== V2SImode
&& mode1
== V2SFmode
)
6898 type
= v2si_ftype_v2sf
;
6899 else if (mode0
== V2SImode
&& mode1
== QImode
)
6900 type
= v2si_ftype_char
;
6904 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6909 /* Expand a block move operation, and return 1 if successful. Return 0
6910 if we should let the compiler generate normal code.
6912 operands[0] is the destination
6913 operands[1] is the source
6914 operands[2] is the length
6915 operands[3] is the alignment */
6917 #define MAX_MOVE_REG 4
6920 expand_block_move (operands
)
6923 rtx orig_dest
= operands
[0];
6924 rtx orig_src
= operands
[1];
6925 rtx bytes_rtx
= operands
[2];
6926 rtx align_rtx
= operands
[3];
6927 int constp
= (GET_CODE (bytes_rtx
) == CONST_INT
);
6932 rtx stores
[MAX_MOVE_REG
];
6935 /* If this is not a fixed size move, just call memcpy */
6939 /* If this is not a fixed size alignment, abort */
6940 if (GET_CODE (align_rtx
) != CONST_INT
)
6942 align
= INTVAL (align_rtx
);
6944 /* Anything to move? */
6945 bytes
= INTVAL (bytes_rtx
);
6949 /* store_one_arg depends on expand_block_move to handle at least the size of
6950 reg_parm_stack_space. */
6951 if (bytes
> (TARGET_POWERPC64
? 64 : 32))
6954 for (offset
= 0; bytes
> 0; offset
+= move_bytes
, bytes
-= move_bytes
)
6957 rtx (*movstrsi
) PARAMS ((rtx
, rtx
, rtx
, rtx
));
6958 rtx (*mov
) PARAMS ((rtx
, rtx
));
6960 enum machine_mode mode
= BLKmode
;
6964 && bytes
> 24 /* move up to 32 bytes at a time */
6972 && ! fixed_regs
[12])
6974 move_bytes
= (bytes
> 32) ? 32 : bytes
;
6975 gen_func
.movstrsi
= gen_movstrsi_8reg
;
6977 else if (TARGET_STRING
6978 && bytes
> 16 /* move up to 24 bytes at a time */
6984 && ! fixed_regs
[10])
6986 move_bytes
= (bytes
> 24) ? 24 : bytes
;
6987 gen_func
.movstrsi
= gen_movstrsi_6reg
;
6989 else if (TARGET_STRING
6990 && bytes
> 8 /* move up to 16 bytes at a time */
6996 move_bytes
= (bytes
> 16) ? 16 : bytes
;
6997 gen_func
.movstrsi
= gen_movstrsi_4reg
;
6999 else if (bytes
>= 8 && TARGET_POWERPC64
7000 /* 64-bit loads and stores require word-aligned
7002 && (align
>= 8 || (! STRICT_ALIGNMENT
&& align
>= 4)))
7006 gen_func
.mov
= gen_movdi
;
7008 else if (TARGET_STRING
&& bytes
> 4 && !TARGET_POWERPC64
)
7009 { /* move up to 8 bytes at a time */
7010 move_bytes
= (bytes
> 8) ? 8 : bytes
;
7011 gen_func
.movstrsi
= gen_movstrsi_2reg
;
7013 else if (bytes
>= 4 && (align
>= 4 || ! STRICT_ALIGNMENT
))
7014 { /* move 4 bytes */
7017 gen_func
.mov
= gen_movsi
;
7019 else if (bytes
== 2 && (align
>= 2 || ! STRICT_ALIGNMENT
))
7020 { /* move 2 bytes */
7023 gen_func
.mov
= gen_movhi
;
7025 else if (TARGET_STRING
&& bytes
> 1)
7026 { /* move up to 4 bytes at a time */
7027 move_bytes
= (bytes
> 4) ? 4 : bytes
;
7028 gen_func
.movstrsi
= gen_movstrsi_1reg
;
7030 else /* move 1 byte at a time */
7034 gen_func
.mov
= gen_movqi
;
7037 src
= adjust_address (orig_src
, mode
, offset
);
7038 dest
= adjust_address (orig_dest
, mode
, offset
);
7040 if (mode
!= BLKmode
)
7042 rtx tmp_reg
= gen_reg_rtx (mode
);
7044 emit_insn ((*gen_func
.mov
) (tmp_reg
, src
));
7045 stores
[num_reg
++] = (*gen_func
.mov
) (dest
, tmp_reg
);
7048 if (mode
== BLKmode
|| num_reg
>= MAX_MOVE_REG
|| bytes
== move_bytes
)
7051 for (i
= 0; i
< num_reg
; i
++)
7052 emit_insn (stores
[i
]);
7056 if (mode
== BLKmode
)
7058 /* Move the address into scratch registers. The movstrsi
7059 patterns require zero offset. */
7060 if (!REG_P (XEXP (src
, 0)))
7062 rtx src_reg
= copy_addr_to_reg (XEXP (src
, 0));
7063 src
= replace_equiv_address (src
, src_reg
);
7065 set_mem_size (src
, GEN_INT (move_bytes
));
7067 if (!REG_P (XEXP (dest
, 0)))
7069 rtx dest_reg
= copy_addr_to_reg (XEXP (dest
, 0));
7070 dest
= replace_equiv_address (dest
, dest_reg
);
7072 set_mem_size (dest
, GEN_INT (move_bytes
));
7074 emit_insn ((*gen_func
.movstrsi
) (dest
, src
,
7075 GEN_INT (move_bytes
& 31),
7084 /* Return 1 if OP is a load multiple operation. It is known to be a
7085 PARALLEL and the first section will be tested. */
7088 load_multiple_operation (op
, mode
)
7090 enum machine_mode mode ATTRIBUTE_UNUSED
;
7092 int count
= XVECLEN (op
, 0);
7093 unsigned int dest_regno
;
7097 /* Perform a quick check so we don't blow up below. */
7099 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
7100 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
7101 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
7104 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
7105 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
7107 for (i
= 1; i
< count
; i
++)
7109 rtx elt
= XVECEXP (op
, 0, i
);
7111 if (GET_CODE (elt
) != SET
7112 || GET_CODE (SET_DEST (elt
)) != REG
7113 || GET_MODE (SET_DEST (elt
)) != SImode
7114 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
7115 || GET_CODE (SET_SRC (elt
)) != MEM
7116 || GET_MODE (SET_SRC (elt
)) != SImode
7117 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
7118 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
7119 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
7120 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != i
* 4)
7127 /* Similar, but tests for store multiple. Here, the second vector element
7128 is a CLOBBER. It will be tested later. */
7131 store_multiple_operation (op
, mode
)
7133 enum machine_mode mode ATTRIBUTE_UNUSED
;
7135 int count
= XVECLEN (op
, 0) - 1;
7136 unsigned int src_regno
;
7140 /* Perform a quick check so we don't blow up below. */
7142 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
7143 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
7144 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
7147 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
7148 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
7150 for (i
= 1; i
< count
; i
++)
7152 rtx elt
= XVECEXP (op
, 0, i
+ 1);
7154 if (GET_CODE (elt
) != SET
7155 || GET_CODE (SET_SRC (elt
)) != REG
7156 || GET_MODE (SET_SRC (elt
)) != SImode
7157 || REGNO (SET_SRC (elt
)) != src_regno
+ i
7158 || GET_CODE (SET_DEST (elt
)) != MEM
7159 || GET_MODE (SET_DEST (elt
)) != SImode
7160 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
7161 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
7162 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
7163 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != i
* 4)
7170 /* Return a string to perform a load_multiple operation.
7171 operands[0] is the vector.
7172 operands[1] is the source address.
7173 operands[2] is the first destination register. */
7176 rs6000_output_load_multiple (operands
)
7179 /* We have to handle the case where the pseudo used to contain the address
7180 is assigned to one of the output registers. */
7182 int words
= XVECLEN (operands
[0], 0);
7185 if (XVECLEN (operands
[0], 0) == 1)
7186 return "{l|lwz} %2,0(%1)";
7188 for (i
= 0; i
< words
; i
++)
7189 if (refers_to_regno_p (REGNO (operands
[2]) + i
,
7190 REGNO (operands
[2]) + i
+ 1, operands
[1], 0))
7194 xop
[0] = GEN_INT (4 * (words
-1));
7195 xop
[1] = operands
[1];
7196 xop
[2] = operands
[2];
7197 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop
);
7202 xop
[0] = GEN_INT (4 * (words
-1));
7203 xop
[1] = operands
[1];
7204 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + 1);
7205 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop
);
7210 for (j
= 0; j
< words
; j
++)
7213 xop
[0] = GEN_INT (j
* 4);
7214 xop
[1] = operands
[1];
7215 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + j
);
7216 output_asm_insn ("{l|lwz} %2,%0(%1)", xop
);
7218 xop
[0] = GEN_INT (i
* 4);
7219 xop
[1] = operands
[1];
7220 output_asm_insn ("{l|lwz} %1,%0(%1)", xop
);
7225 return "{lsi|lswi} %2,%1,%N0";
7228 /* Return 1 for a parallel vrsave operation. */
7231 vrsave_operation (op
, mode
)
7233 enum machine_mode mode ATTRIBUTE_UNUSED
;
7235 int count
= XVECLEN (op
, 0);
7236 unsigned int dest_regno
, src_regno
;
7240 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
7241 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
7242 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC_VOLATILE
)
7245 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
7246 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
7248 if (dest_regno
!= VRSAVE_REGNO
7249 && src_regno
!= VRSAVE_REGNO
)
7252 for (i
= 1; i
< count
; i
++)
7254 rtx elt
= XVECEXP (op
, 0, i
);
7256 if (GET_CODE (elt
) != CLOBBER
7257 && GET_CODE (elt
) != SET
)
7264 /* Return 1 for an PARALLEL suitable for mtcrf. */
7267 mtcrf_operation (op
, mode
)
7269 enum machine_mode mode ATTRIBUTE_UNUSED
;
7271 int count
= XVECLEN (op
, 0);
7275 /* Perform a quick check so we don't blow up below. */
7277 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
7278 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC
7279 || XVECLEN (SET_SRC (XVECEXP (op
, 0, 0)), 0) != 2)
7281 src_reg
= XVECEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0, 0);
7283 if (GET_CODE (src_reg
) != REG
7284 || GET_MODE (src_reg
) != SImode
7285 || ! INT_REGNO_P (REGNO (src_reg
)))
7288 for (i
= 0; i
< count
; i
++)
7290 rtx exp
= XVECEXP (op
, 0, i
);
7294 if (GET_CODE (exp
) != SET
7295 || GET_CODE (SET_DEST (exp
)) != REG
7296 || GET_MODE (SET_DEST (exp
)) != CCmode
7297 || ! CR_REGNO_P (REGNO (SET_DEST (exp
))))
7299 unspec
= SET_SRC (exp
);
7300 maskval
= 1 << (MAX_CR_REGNO
- REGNO (SET_DEST (exp
)));
7302 if (GET_CODE (unspec
) != UNSPEC
7303 || XINT (unspec
, 1) != UNSPEC_MOVESI_TO_CR
7304 || XVECLEN (unspec
, 0) != 2
7305 || XVECEXP (unspec
, 0, 0) != src_reg
7306 || GET_CODE (XVECEXP (unspec
, 0, 1)) != CONST_INT
7307 || INTVAL (XVECEXP (unspec
, 0, 1)) != maskval
)
7313 /* Return 1 for an PARALLEL suitable for lmw. */
7316 lmw_operation (op
, mode
)
7318 enum machine_mode mode ATTRIBUTE_UNUSED
;
7320 int count
= XVECLEN (op
, 0);
7321 unsigned int dest_regno
;
7323 unsigned int base_regno
;
7324 HOST_WIDE_INT offset
;
7327 /* Perform a quick check so we don't blow up below. */
7329 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
7330 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
7331 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
7334 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
7335 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
7338 || count
!= 32 - (int) dest_regno
)
7341 if (legitimate_indirect_address_p (src_addr
, 0))
7344 base_regno
= REGNO (src_addr
);
7345 if (base_regno
== 0)
7348 else if (legitimate_offset_address_p (SImode
, src_addr
, 0))
7350 offset
= INTVAL (XEXP (src_addr
, 1));
7351 base_regno
= REGNO (XEXP (src_addr
, 0));
7356 for (i
= 0; i
< count
; i
++)
7358 rtx elt
= XVECEXP (op
, 0, i
);
7361 HOST_WIDE_INT newoffset
;
7363 if (GET_CODE (elt
) != SET
7364 || GET_CODE (SET_DEST (elt
)) != REG
7365 || GET_MODE (SET_DEST (elt
)) != SImode
7366 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
7367 || GET_CODE (SET_SRC (elt
)) != MEM
7368 || GET_MODE (SET_SRC (elt
)) != SImode
)
7370 newaddr
= XEXP (SET_SRC (elt
), 0);
7371 if (legitimate_indirect_address_p (newaddr
, 0))
7376 else if (legitimate_offset_address_p (SImode
, newaddr
, 0))
7378 addr_reg
= XEXP (newaddr
, 0);
7379 newoffset
= INTVAL (XEXP (newaddr
, 1));
7383 if (REGNO (addr_reg
) != base_regno
7384 || newoffset
!= offset
+ 4 * i
)
7391 /* Return 1 for an PARALLEL suitable for stmw. */
7394 stmw_operation (op
, mode
)
7396 enum machine_mode mode ATTRIBUTE_UNUSED
;
7398 int count
= XVECLEN (op
, 0);
7399 unsigned int src_regno
;
7401 unsigned int base_regno
;
7402 HOST_WIDE_INT offset
;
7405 /* Perform a quick check so we don't blow up below. */
7407 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
7408 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
7409 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
7412 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
7413 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
7416 || count
!= 32 - (int) src_regno
)
7419 if (legitimate_indirect_address_p (dest_addr
, 0))
7422 base_regno
= REGNO (dest_addr
);
7423 if (base_regno
== 0)
7426 else if (legitimate_offset_address_p (SImode
, dest_addr
, 0))
7428 offset
= INTVAL (XEXP (dest_addr
, 1));
7429 base_regno
= REGNO (XEXP (dest_addr
, 0));
7434 for (i
= 0; i
< count
; i
++)
7436 rtx elt
= XVECEXP (op
, 0, i
);
7439 HOST_WIDE_INT newoffset
;
7441 if (GET_CODE (elt
) != SET
7442 || GET_CODE (SET_SRC (elt
)) != REG
7443 || GET_MODE (SET_SRC (elt
)) != SImode
7444 || REGNO (SET_SRC (elt
)) != src_regno
+ i
7445 || GET_CODE (SET_DEST (elt
)) != MEM
7446 || GET_MODE (SET_DEST (elt
)) != SImode
)
7448 newaddr
= XEXP (SET_DEST (elt
), 0);
7449 if (legitimate_indirect_address_p (newaddr
, 0))
7454 else if (legitimate_offset_address_p (SImode
, newaddr
, 0))
7456 addr_reg
= XEXP (newaddr
, 0);
7457 newoffset
= INTVAL (XEXP (newaddr
, 1));
7461 if (REGNO (addr_reg
) != base_regno
7462 || newoffset
!= offset
+ 4 * i
)
7469 /* A validation routine: say whether CODE, a condition code, and MODE
7470 match. The other alternatives either don't make sense or should
7471 never be generated. */
7474 validate_condition_mode (code
, mode
)
7476 enum machine_mode mode
;
7478 if (GET_RTX_CLASS (code
) != '<'
7479 || GET_MODE_CLASS (mode
) != MODE_CC
)
7482 /* These don't make sense. */
7483 if ((code
== GT
|| code
== LT
|| code
== GE
|| code
== LE
)
7484 && mode
== CCUNSmode
)
7487 if ((code
== GTU
|| code
== LTU
|| code
== GEU
|| code
== LEU
)
7488 && mode
!= CCUNSmode
)
7491 if (mode
!= CCFPmode
7492 && (code
== ORDERED
|| code
== UNORDERED
7493 || code
== UNEQ
|| code
== LTGT
7494 || code
== UNGT
|| code
== UNLT
7495 || code
== UNGE
|| code
== UNLE
))
7498 /* These should never be generated except for
7499 flag_finite_math_only. */
7500 if (mode
== CCFPmode
7501 && ! flag_finite_math_only
7502 && (code
== LE
|| code
== GE
7503 || code
== UNEQ
|| code
== LTGT
7504 || code
== UNGT
|| code
== UNLT
))
7507 /* These are invalid; the information is not there. */
7508 if (mode
== CCEQmode
7509 && code
!= EQ
&& code
!= NE
)
7513 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
7514 We only check the opcode against the mode of the CC value here. */
7517 branch_comparison_operator (op
, mode
)
7519 enum machine_mode mode ATTRIBUTE_UNUSED
;
7521 enum rtx_code code
= GET_CODE (op
);
7522 enum machine_mode cc_mode
;
7524 if (GET_RTX_CLASS (code
) != '<')
7527 cc_mode
= GET_MODE (XEXP (op
, 0));
7528 if (GET_MODE_CLASS (cc_mode
) != MODE_CC
)
7531 validate_condition_mode (code
, cc_mode
);
7536 /* Return 1 if OP is a comparison operation that is valid for a branch
7537 insn and which is true if the corresponding bit in the CC register
7541 branch_positive_comparison_operator (op
, mode
)
7543 enum machine_mode mode
;
7547 if (! branch_comparison_operator (op
, mode
))
7550 code
= GET_CODE (op
);
7551 return (code
== EQ
|| code
== LT
|| code
== GT
7552 || (TARGET_E500
&& TARGET_HARD_FLOAT
&& !TARGET_FPRS
&& code
== NE
)
7553 || code
== LTU
|| code
== GTU
7554 || code
== UNORDERED
);
7557 /* Return 1 if OP is a comparison operation that is valid for an scc
7558 insn: it must be a positive comparison. */
7561 scc_comparison_operator (op
, mode
)
7563 enum machine_mode mode
;
7565 return branch_positive_comparison_operator (op
, mode
);
7569 trap_comparison_operator (op
, mode
)
7571 enum machine_mode mode
;
7573 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
7575 return GET_RTX_CLASS (GET_CODE (op
)) == '<';
7579 boolean_operator (op
, mode
)
7581 enum machine_mode mode ATTRIBUTE_UNUSED
;
7583 enum rtx_code code
= GET_CODE (op
);
7584 return (code
== AND
|| code
== IOR
|| code
== XOR
);
7588 boolean_or_operator (op
, mode
)
7590 enum machine_mode mode ATTRIBUTE_UNUSED
;
7592 enum rtx_code code
= GET_CODE (op
);
7593 return (code
== IOR
|| code
== XOR
);
7597 min_max_operator (op
, mode
)
7599 enum machine_mode mode ATTRIBUTE_UNUSED
;
7601 enum rtx_code code
= GET_CODE (op
);
7602 return (code
== SMIN
|| code
== SMAX
|| code
== UMIN
|| code
== UMAX
);
7605 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
7606 mask required to convert the result of a rotate insn into a shift
7607 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
7610 includes_lshift_p (shiftop
, andop
)
7614 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
7616 shift_mask
<<= INTVAL (shiftop
);
7618 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
7621 /* Similar, but for right shift. */
7624 includes_rshift_p (shiftop
, andop
)
7628 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
7630 shift_mask
>>= INTVAL (shiftop
);
7632 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
7635 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
7636 to perform a left shift. It must have exactly SHIFTOP least
7637 significant 0's, then one or more 1's, then zero or more 0's. */
7640 includes_rldic_lshift_p (shiftop
, andop
)
7644 if (GET_CODE (andop
) == CONST_INT
)
7646 HOST_WIDE_INT c
, lsb
, shift_mask
;
7649 if (c
== 0 || c
== ~0)
7653 shift_mask
<<= INTVAL (shiftop
);
7655 /* Find the least significant one bit. */
7658 /* It must coincide with the LSB of the shift mask. */
7659 if (-lsb
!= shift_mask
)
7662 /* Invert to look for the next transition (if any). */
7665 /* Remove the low group of ones (originally low group of zeros). */
7668 /* Again find the lsb, and check we have all 1's above. */
7672 else if (GET_CODE (andop
) == CONST_DOUBLE
7673 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
7675 HOST_WIDE_INT low
, high
, lsb
;
7676 HOST_WIDE_INT shift_mask_low
, shift_mask_high
;
7678 low
= CONST_DOUBLE_LOW (andop
);
7679 if (HOST_BITS_PER_WIDE_INT
< 64)
7680 high
= CONST_DOUBLE_HIGH (andop
);
7682 if ((low
== 0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== 0))
7683 || (low
== ~0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0)))
7686 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
7688 shift_mask_high
= ~0;
7689 if (INTVAL (shiftop
) > 32)
7690 shift_mask_high
<<= INTVAL (shiftop
) - 32;
7694 if (-lsb
!= shift_mask_high
|| INTVAL (shiftop
) < 32)
7701 return high
== -lsb
;
7704 shift_mask_low
= ~0;
7705 shift_mask_low
<<= INTVAL (shiftop
);
7709 if (-lsb
!= shift_mask_low
)
7712 if (HOST_BITS_PER_WIDE_INT
< 64)
7717 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
7720 return high
== -lsb
;
7724 return low
== -lsb
&& (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0);
7730 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7731 to perform a left shift. It must have SHIFTOP or more least
7732 signifigant 0's, with the remainder of the word 1's. */
7735 includes_rldicr_lshift_p (shiftop
, andop
)
7739 if (GET_CODE (andop
) == CONST_INT
)
7741 HOST_WIDE_INT c
, lsb
, shift_mask
;
7744 shift_mask
<<= INTVAL (shiftop
);
7747 /* Find the least signifigant one bit. */
7750 /* It must be covered by the shift mask.
7751 This test also rejects c == 0. */
7752 if ((lsb
& shift_mask
) == 0)
7755 /* Check we have all 1's above the transition, and reject all 1's. */
7756 return c
== -lsb
&& lsb
!= 1;
7758 else if (GET_CODE (andop
) == CONST_DOUBLE
7759 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
7761 HOST_WIDE_INT low
, lsb
, shift_mask_low
;
7763 low
= CONST_DOUBLE_LOW (andop
);
7765 if (HOST_BITS_PER_WIDE_INT
< 64)
7767 HOST_WIDE_INT high
, shift_mask_high
;
7769 high
= CONST_DOUBLE_HIGH (andop
);
7773 shift_mask_high
= ~0;
7774 if (INTVAL (shiftop
) > 32)
7775 shift_mask_high
<<= INTVAL (shiftop
) - 32;
7779 if ((lsb
& shift_mask_high
) == 0)
7782 return high
== -lsb
;
7788 shift_mask_low
= ~0;
7789 shift_mask_low
<<= INTVAL (shiftop
);
7793 if ((lsb
& shift_mask_low
) == 0)
7796 return low
== -lsb
&& lsb
!= 1;
7802 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7803 for lfq and stfq insns.
7805 Note reg1 and reg2 *must* be hard registers. To be sure we will
7806 abort if we are passed pseudo registers. */
7809 registers_ok_for_quad_peep (reg1
, reg2
)
7812 /* We might have been passed a SUBREG. */
7813 if (GET_CODE (reg1
) != REG
|| GET_CODE (reg2
) != REG
)
7816 return (REGNO (reg1
) == REGNO (reg2
) - 1);
7819 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7820 addr1 and addr2 must be in consecutive memory locations
7821 (addr2 == addr1 + 8). */
7824 addrs_ok_for_quad_peep (addr1
, addr2
)
7831 /* Extract an offset (if used) from the first addr. */
7832 if (GET_CODE (addr1
) == PLUS
)
7834 /* If not a REG, return zero. */
7835 if (GET_CODE (XEXP (addr1
, 0)) != REG
)
7839 reg1
= REGNO (XEXP (addr1
, 0));
7840 /* The offset must be constant! */
7841 if (GET_CODE (XEXP (addr1
, 1)) != CONST_INT
)
7843 offset1
= INTVAL (XEXP (addr1
, 1));
7846 else if (GET_CODE (addr1
) != REG
)
7850 reg1
= REGNO (addr1
);
7851 /* This was a simple (mem (reg)) expression. Offset is 0. */
7855 /* Make sure the second address is a (mem (plus (reg) (const_int)))
7856 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
7857 register as addr1. */
7858 if (offset1
== -8 && GET_CODE (addr2
) == REG
&& reg1
== REGNO (addr2
))
7860 if (GET_CODE (addr2
) != PLUS
)
7863 if (GET_CODE (XEXP (addr2
, 0)) != REG
7864 || GET_CODE (XEXP (addr2
, 1)) != CONST_INT
)
7867 if (reg1
!= REGNO (XEXP (addr2
, 0)))
7870 /* The offset for the second addr must be 8 more than the first addr. */
7871 if (INTVAL (XEXP (addr2
, 1)) != offset1
+ 8)
7874 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7879 /* Return the register class of a scratch register needed to copy IN into
7880 or out of a register in CLASS in MODE. If it can be done directly,
7881 NO_REGS is returned. */
7884 secondary_reload_class (class, mode
, in
)
7885 enum reg_class
class;
7886 enum machine_mode mode ATTRIBUTE_UNUSED
;
7891 if (TARGET_ELF
|| (DEFAULT_ABI
== ABI_DARWIN
7893 && MACHOPIC_INDIRECT
7897 /* We cannot copy a symbolic operand directly into anything
7898 other than BASE_REGS for TARGET_ELF. So indicate that a
7899 register from BASE_REGS is needed as an intermediate
7902 On Darwin, pic addresses require a load from memory, which
7903 needs a base register. */
7904 if (class != BASE_REGS
7905 && (GET_CODE (in
) == SYMBOL_REF
7906 || GET_CODE (in
) == HIGH
7907 || GET_CODE (in
) == LABEL_REF
7908 || GET_CODE (in
) == CONST
))
7912 if (GET_CODE (in
) == REG
)
7915 if (regno
>= FIRST_PSEUDO_REGISTER
)
7917 regno
= true_regnum (in
);
7918 if (regno
>= FIRST_PSEUDO_REGISTER
)
7922 else if (GET_CODE (in
) == SUBREG
)
7924 regno
= true_regnum (in
);
7925 if (regno
>= FIRST_PSEUDO_REGISTER
)
7931 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7933 if (class == GENERAL_REGS
|| class == BASE_REGS
7934 || (regno
>= 0 && INT_REGNO_P (regno
)))
7937 /* Constants, memory, and FP registers can go into FP registers. */
7938 if ((regno
== -1 || FP_REGNO_P (regno
))
7939 && (class == FLOAT_REGS
|| class == NON_SPECIAL_REGS
))
7942 /* Memory, and AltiVec registers can go into AltiVec registers. */
7943 if ((regno
== -1 || ALTIVEC_REGNO_P (regno
))
7944 && class == ALTIVEC_REGS
)
7947 /* We can copy among the CR registers. */
7948 if ((class == CR_REGS
|| class == CR0_REGS
)
7949 && regno
>= 0 && CR_REGNO_P (regno
))
7952 /* Otherwise, we need GENERAL_REGS. */
7953 return GENERAL_REGS
;
7956 /* Given a comparison operation, return the bit number in CCR to test. We
7957 know this is a valid comparison.
7959 SCC_P is 1 if this is for an scc. That means that %D will have been
7960 used instead of %C, so the bits will be in different places.
7962 Return -1 if OP isn't a valid comparison for some reason. */
7969 enum rtx_code code
= GET_CODE (op
);
7970 enum machine_mode cc_mode
;
7975 if (GET_RTX_CLASS (code
) != '<')
7980 if (GET_CODE (reg
) != REG
7981 || ! CR_REGNO_P (REGNO (reg
)))
7984 cc_mode
= GET_MODE (reg
);
7985 cc_regnum
= REGNO (reg
);
7986 base_bit
= 4 * (cc_regnum
- CR0_REGNO
);
7988 validate_condition_mode (code
, cc_mode
);
7990 /* When generating a sCOND operation, only positive conditions are
7992 if (scc_p
&& code
!= EQ
&& code
!= GT
&& code
!= LT
&& code
!= UNORDERED
7993 && code
!= GTU
&& code
!= LTU
)
7999 if (TARGET_E500
&& !TARGET_FPRS
8000 && TARGET_HARD_FLOAT
&& cc_mode
== CCFPmode
)
8001 return base_bit
+ 1;
8002 return scc_p
? base_bit
+ 3 : base_bit
+ 2;
8004 if (TARGET_E500
&& !TARGET_FPRS
8005 && TARGET_HARD_FLOAT
&& cc_mode
== CCFPmode
)
8006 return base_bit
+ 1;
8007 return base_bit
+ 2;
8008 case GT
: case GTU
: case UNLE
:
8009 return base_bit
+ 1;
8010 case LT
: case LTU
: case UNGE
:
8012 case ORDERED
: case UNORDERED
:
8013 return base_bit
+ 3;
8016 /* If scc, we will have done a cror to put the bit in the
8017 unordered position. So test that bit. For integer, this is ! LT
8018 unless this is an scc insn. */
8019 return scc_p
? base_bit
+ 3 : base_bit
;
8022 return scc_p
? base_bit
+ 3 : base_bit
+ 1;
8029 /* Return the GOT register. */
8032 rs6000_got_register (value
)
8033 rtx value ATTRIBUTE_UNUSED
;
8035 /* The second flow pass currently (June 1999) can't update
8036 regs_ever_live without disturbing other parts of the compiler, so
8037 update it here to make the prolog/epilogue code happy. */
8038 if (no_new_pseudos
&& ! regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
])
8039 regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
8041 current_function_uses_pic_offset_table
= 1;
8043 return pic_offset_table_rtx
;
8046 /* Function to init struct machine_function.
8047 This will be called, via a pointer variable,
8048 from push_function_context. */
8050 static struct machine_function
*
8051 rs6000_init_machine_status ()
8053 return ggc_alloc_cleared (sizeof (machine_function
));
8056 /* These macros test for integers and extract the low-order bits. */
8058 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
8059 && GET_MODE (X) == VOIDmode)
8061 #define INT_LOWPART(X) \
8062 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
8069 unsigned long val
= INT_LOWPART (op
);
8071 /* If the high bit is zero, the value is the first 1 bit we find
8073 if ((val
& 0x80000000) == 0)
8075 if ((val
& 0xffffffff) == 0)
8079 while (((val
<<= 1) & 0x80000000) == 0)
8084 /* If the high bit is set and the low bit is not, or the mask is all
8085 1's, the value is zero. */
8086 if ((val
& 1) == 0 || (val
& 0xffffffff) == 0xffffffff)
8089 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8092 while (((val
>>= 1) & 1) != 0)
8103 unsigned long val
= INT_LOWPART (op
);
8105 /* If the low bit is zero, the value is the first 1 bit we find from
8109 if ((val
& 0xffffffff) == 0)
8113 while (((val
>>= 1) & 1) == 0)
8119 /* If the low bit is set and the high bit is not, or the mask is all
8120 1's, the value is 31. */
8121 if ((val
& 0x80000000) == 0 || (val
& 0xffffffff) == 0xffffffff)
8124 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8127 while (((val
<<= 1) & 0x80000000) != 0)
8133 /* Locate some local-dynamic symbol still in use by this function
8134 so that we can print its name in some tls_ld pattern. */
8137 rs6000_get_some_local_dynamic_name ()
8141 if (cfun
->machine
->some_ld_name
)
8142 return cfun
->machine
->some_ld_name
;
8144 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
8146 && for_each_rtx (&PATTERN (insn
),
8147 rs6000_get_some_local_dynamic_name_1
, 0))
8148 return cfun
->machine
->some_ld_name
;
8153 /* Helper function for rs6000_get_some_local_dynamic_name. */
8156 rs6000_get_some_local_dynamic_name_1 (px
, data
)
8158 void *data ATTRIBUTE_UNUSED
;
8162 if (GET_CODE (x
) == SYMBOL_REF
)
8164 const char *str
= XSTR (x
, 0);
8165 if (SYMBOL_REF_TLS_MODEL (x
) == TLS_MODEL_LOCAL_DYNAMIC
)
8167 cfun
->machine
->some_ld_name
= str
;
8175 /* Print an operand. Recognize special options, documented below. */
8178 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8179 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
8181 #define SMALL_DATA_RELOC "sda21"
8182 #define SMALL_DATA_REG 0
8186 print_operand (file
, x
, code
)
8193 unsigned HOST_WIDE_INT uval
;
8198 /* Write out an instruction after the call which may be replaced
8199 with glue code by the loader. This depends on the AIX version. */
8200 asm_fprintf (file
, RS6000_CALL_GLUE
);
8203 /* %a is output_address. */
8206 /* If X is a constant integer whose low-order 5 bits are zero,
8207 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
8208 in the AIX assembler where "sri" with a zero shift count
8209 writes a trash instruction. */
8210 if (GET_CODE (x
) == CONST_INT
&& (INTVAL (x
) & 31) == 0)
8217 /* If constant, low-order 16 bits of constant, unsigned.
8218 Otherwise, write normally. */
8220 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 0xffff);
8222 print_operand (file
, x
, 0);
8226 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
8227 for 64-bit mask direction. */
8228 putc (((INT_LOWPART(x
) & 1) == 0 ? 'r' : 'l'), file
);
8231 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
8235 /* X is a CR register. Print the number of the EQ bit of the CR */
8236 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
8237 output_operand_lossage ("invalid %%E value");
8239 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
) + 2);
8243 /* X is a CR register. Print the shift count needed to move it
8244 to the high-order four bits. */
8245 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
8246 output_operand_lossage ("invalid %%f value");
8248 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
));
8252 /* Similar, but print the count for the rotate in the opposite
8254 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
8255 output_operand_lossage ("invalid %%F value");
8257 fprintf (file
, "%d", 32 - 4 * (REGNO (x
) - CR0_REGNO
));
8261 /* X is a constant integer. If it is negative, print "m",
8262 otherwise print "z". This is to make an aze or ame insn. */
8263 if (GET_CODE (x
) != CONST_INT
)
8264 output_operand_lossage ("invalid %%G value");
8265 else if (INTVAL (x
) >= 0)
8272 /* If constant, output low-order five bits. Otherwise, write
8275 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 31);
8277 print_operand (file
, x
, 0);
8281 /* If constant, output low-order six bits. Otherwise, write
8284 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 63);
8286 print_operand (file
, x
, 0);
8290 /* Print `i' if this is a constant, else nothing. */
8296 /* Write the bit number in CCR for jump. */
8299 output_operand_lossage ("invalid %%j code");
8301 fprintf (file
, "%d", i
);
8305 /* Similar, but add one for shift count in rlinm for scc and pass
8306 scc flag to `ccr_bit'. */
8309 output_operand_lossage ("invalid %%J code");
8311 /* If we want bit 31, write a shift count of zero, not 32. */
8312 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
8316 /* X must be a constant. Write the 1's complement of the
8319 output_operand_lossage ("invalid %%k value");
8321 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ~ INT_LOWPART (x
));
8325 /* X must be a symbolic constant on ELF. Write an
8326 expression suitable for an 'addi' that adds in the low 16
8328 if (GET_CODE (x
) != CONST
)
8330 print_operand_address (file
, x
);
8335 if (GET_CODE (XEXP (x
, 0)) != PLUS
8336 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) != SYMBOL_REF
8337 && GET_CODE (XEXP (XEXP (x
, 0), 0)) != LABEL_REF
)
8338 || GET_CODE (XEXP (XEXP (x
, 0), 1)) != CONST_INT
)
8339 output_operand_lossage ("invalid %%K value");
8340 print_operand_address (file
, XEXP (XEXP (x
, 0), 0));
8342 /* For GNU as, there must be a non-alphanumeric character
8343 between 'l' and the number. The '-' is added by
8344 print_operand() already. */
8345 if (INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0)
8347 print_operand (file
, XEXP (XEXP (x
, 0), 1), 0);
8351 /* %l is output_asm_label. */
8354 /* Write second word of DImode or DFmode reference. Works on register
8355 or non-indexed memory only. */
8356 if (GET_CODE (x
) == REG
)
8357 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
8358 else if (GET_CODE (x
) == MEM
)
8360 /* Handle possible auto-increment. Since it is pre-increment and
8361 we have already done it, we can just use an offset of word. */
8362 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
8363 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
8364 output_address (plus_constant (XEXP (XEXP (x
, 0), 0),
8367 output_address (XEXP (adjust_address_nv (x
, SImode
,
8371 if (small_data_operand (x
, GET_MODE (x
)))
8372 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
8373 reg_names
[SMALL_DATA_REG
]);
8378 /* MB value for a mask operand. */
8379 if (! mask_operand (x
, SImode
))
8380 output_operand_lossage ("invalid %%m value");
8382 fprintf (file
, "%d", extract_MB (x
));
8386 /* ME value for a mask operand. */
8387 if (! mask_operand (x
, SImode
))
8388 output_operand_lossage ("invalid %%M value");
8390 fprintf (file
, "%d", extract_ME (x
));
8393 /* %n outputs the negative of its operand. */
8396 /* Write the number of elements in the vector times 4. */
8397 if (GET_CODE (x
) != PARALLEL
)
8398 output_operand_lossage ("invalid %%N value");
8400 fprintf (file
, "%d", XVECLEN (x
, 0) * 4);
8404 /* Similar, but subtract 1 first. */
8405 if (GET_CODE (x
) != PARALLEL
)
8406 output_operand_lossage ("invalid %%O value");
8408 fprintf (file
, "%d", (XVECLEN (x
, 0) - 1) * 4);
8412 /* X is a CONST_INT that is a power of two. Output the logarithm. */
8414 || INT_LOWPART (x
) < 0
8415 || (i
= exact_log2 (INT_LOWPART (x
))) < 0)
8416 output_operand_lossage ("invalid %%p value");
8418 fprintf (file
, "%d", i
);
8422 /* The operand must be an indirect memory reference. The result
8423 is the register number. */
8424 if (GET_CODE (x
) != MEM
|| GET_CODE (XEXP (x
, 0)) != REG
8425 || REGNO (XEXP (x
, 0)) >= 32)
8426 output_operand_lossage ("invalid %%P value");
8428 fprintf (file
, "%d", REGNO (XEXP (x
, 0)));
8432 /* This outputs the logical code corresponding to a boolean
8433 expression. The expression may have one or both operands
8434 negated (if one, only the first one). For condition register
8435 logical operations, it will also treat the negated
8436 CR codes as NOTs, but not handle NOTs of them. */
8438 const char *const *t
= 0;
8440 enum rtx_code code
= GET_CODE (x
);
8441 static const char * const tbl
[3][3] = {
8442 { "and", "andc", "nor" },
8443 { "or", "orc", "nand" },
8444 { "xor", "eqv", "xor" } };
8448 else if (code
== IOR
)
8450 else if (code
== XOR
)
8453 output_operand_lossage ("invalid %%q value");
8455 if (GET_CODE (XEXP (x
, 0)) != NOT
)
8459 if (GET_CODE (XEXP (x
, 1)) == NOT
)
8470 /* X is a CR register. Print the mask for `mtcrf'. */
8471 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
8472 output_operand_lossage ("invalid %%R value");
8474 fprintf (file
, "%d", 128 >> (REGNO (x
) - CR0_REGNO
));
8478 /* Low 5 bits of 32 - value */
8480 output_operand_lossage ("invalid %%s value");
8482 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, (32 - INT_LOWPART (x
)) & 31);
8486 /* PowerPC64 mask position. All 0's is excluded.
8487 CONST_INT 32-bit mask is considered sign-extended so any
8488 transition must occur within the CONST_INT, not on the boundary. */
8489 if (! mask64_operand (x
, DImode
))
8490 output_operand_lossage ("invalid %%S value");
8492 uval
= INT_LOWPART (x
);
8494 if (uval
& 1) /* Clear Left */
8496 #if HOST_BITS_PER_WIDE_INT > 64
8497 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
8501 else /* Clear Right */
8504 #if HOST_BITS_PER_WIDE_INT > 64
8505 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
8513 fprintf (file
, "%d", i
);
8517 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
8518 if (GET_CODE (x
) != REG
|| GET_MODE (x
) != CCmode
)
8521 /* Bit 3 is OV bit. */
8522 i
= 4 * (REGNO (x
) - CR0_REGNO
) + 3;
8524 /* If we want bit 31, write a shift count of zero, not 32. */
8525 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
8529 /* Print the symbolic name of a branch target register. */
8530 if (GET_CODE (x
) != REG
|| (REGNO (x
) != LINK_REGISTER_REGNUM
8531 && REGNO (x
) != COUNT_REGISTER_REGNUM
))
8532 output_operand_lossage ("invalid %%T value");
8533 else if (REGNO (x
) == LINK_REGISTER_REGNUM
)
8534 fputs (TARGET_NEW_MNEMONICS
? "lr" : "r", file
);
8536 fputs ("ctr", file
);
8540 /* High-order 16 bits of constant for use in unsigned operand. */
8542 output_operand_lossage ("invalid %%u value");
8544 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
8545 (INT_LOWPART (x
) >> 16) & 0xffff);
8549 /* High-order 16 bits of constant for use in signed operand. */
8551 output_operand_lossage ("invalid %%v value");
8553 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
8554 (INT_LOWPART (x
) >> 16) & 0xffff);
8558 /* Print `u' if this has an auto-increment or auto-decrement. */
8559 if (GET_CODE (x
) == MEM
8560 && (GET_CODE (XEXP (x
, 0)) == PRE_INC
8561 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
))
8566 /* Print the trap code for this operand. */
8567 switch (GET_CODE (x
))
8570 fputs ("eq", file
); /* 4 */
8573 fputs ("ne", file
); /* 24 */
8576 fputs ("lt", file
); /* 16 */
8579 fputs ("le", file
); /* 20 */
8582 fputs ("gt", file
); /* 8 */
8585 fputs ("ge", file
); /* 12 */
8588 fputs ("llt", file
); /* 2 */
8591 fputs ("lle", file
); /* 6 */
8594 fputs ("lgt", file
); /* 1 */
8597 fputs ("lge", file
); /* 5 */
8605 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
8608 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
8609 ((INT_LOWPART (x
) & 0xffff) ^ 0x8000) - 0x8000);
8611 print_operand (file
, x
, 0);
8615 /* MB value for a PowerPC64 rldic operand. */
8616 val
= (GET_CODE (x
) == CONST_INT
8617 ? INTVAL (x
) : CONST_DOUBLE_HIGH (x
));
8622 for (i
= 0; i
< HOST_BITS_PER_WIDE_INT
; i
++)
8623 if ((val
<<= 1) < 0)
8626 #if HOST_BITS_PER_WIDE_INT == 32
8627 if (GET_CODE (x
) == CONST_INT
&& i
>= 0)
8628 i
+= 32; /* zero-extend high-part was all 0's */
8629 else if (GET_CODE (x
) == CONST_DOUBLE
&& i
== 32)
8631 val
= CONST_DOUBLE_LOW (x
);
8638 for ( ; i
< 64; i
++)
8639 if ((val
<<= 1) < 0)
8644 fprintf (file
, "%d", i
+ 1);
8648 if (GET_CODE (x
) == MEM
8649 && legitimate_indexed_address_p (XEXP (x
, 0), 0))
8654 /* Like 'L', for third word of TImode */
8655 if (GET_CODE (x
) == REG
)
8656 fprintf (file
, "%s", reg_names
[REGNO (x
) + 2]);
8657 else if (GET_CODE (x
) == MEM
)
8659 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
8660 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
8661 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 8));
8663 output_address (XEXP (adjust_address_nv (x
, SImode
, 8), 0));
8664 if (small_data_operand (x
, GET_MODE (x
)))
8665 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
8666 reg_names
[SMALL_DATA_REG
]);
8671 /* X is a SYMBOL_REF. Write out the name preceded by a
8672 period and without any trailing data in brackets. Used for function
8673 names. If we are configured for System V (or the embedded ABI) on
8674 the PowerPC, do not emit the period, since those systems do not use
8675 TOCs and the like. */
8676 if (GET_CODE (x
) != SYMBOL_REF
)
8679 if (XSTR (x
, 0)[0] != '.')
8681 switch (DEFAULT_ABI
)
8696 RS6000_OUTPUT_BASENAME (file
, XSTR (x
, 0));
8698 assemble_name (file
, XSTR (x
, 0));
8702 /* Like 'L', for last word of TImode. */
8703 if (GET_CODE (x
) == REG
)
8704 fprintf (file
, "%s", reg_names
[REGNO (x
) + 3]);
8705 else if (GET_CODE (x
) == MEM
)
8707 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
8708 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
8709 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 12));
8711 output_address (XEXP (adjust_address_nv (x
, SImode
, 12), 0));
8712 if (small_data_operand (x
, GET_MODE (x
)))
8713 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
8714 reg_names
[SMALL_DATA_REG
]);
8718 /* Print AltiVec or SPE memory operand. */
8723 if (GET_CODE (x
) != MEM
)
8731 if (GET_CODE (tmp
) == REG
)
8733 fprintf (file
, "0(%s)", reg_names
[REGNO (tmp
)]);
8736 /* Handle [reg+UIMM]. */
8737 else if (GET_CODE (tmp
) == PLUS
&&
8738 GET_CODE (XEXP (tmp
, 1)) == CONST_INT
)
8742 if (GET_CODE (XEXP (tmp
, 0)) != REG
)
8745 x
= INTVAL (XEXP (tmp
, 1));
8746 fprintf (file
, "%d(%s)", x
, reg_names
[REGNO (XEXP (tmp
, 0))]);
8750 /* Fall through. Must be [reg+reg]. */
8752 if (GET_CODE (tmp
) == REG
)
8753 fprintf (file
, "0,%s", reg_names
[REGNO (tmp
)]);
8754 else if (GET_CODE (tmp
) == PLUS
&& GET_CODE (XEXP (tmp
, 1)) == REG
)
8756 if (REGNO (XEXP (tmp
, 0)) == 0)
8757 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 1)) ],
8758 reg_names
[ REGNO (XEXP (tmp
, 0)) ]);
8760 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 0)) ],
8761 reg_names
[ REGNO (XEXP (tmp
, 1)) ]);
8769 if (GET_CODE (x
) == REG
)
8770 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
8771 else if (GET_CODE (x
) == MEM
)
8773 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8774 know the width from the mode. */
8775 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
)
8776 fprintf (file
, "%d(%s)", GET_MODE_SIZE (GET_MODE (x
)),
8777 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
8778 else if (GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
8779 fprintf (file
, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x
)),
8780 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
8782 output_address (XEXP (x
, 0));
8785 output_addr_const (file
, x
);
8789 assemble_name (file
, rs6000_get_some_local_dynamic_name ());
8793 output_operand_lossage ("invalid %%xn code");
8797 /* Print the address of an operand. */
8800 print_operand_address (file
, x
)
8804 if (GET_CODE (x
) == REG
)
8805 fprintf (file
, "0(%s)", reg_names
[ REGNO (x
) ]);
8806 else if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
8807 || GET_CODE (x
) == LABEL_REF
)
8809 output_addr_const (file
, x
);
8810 if (small_data_operand (x
, GET_MODE (x
)))
8811 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
8812 reg_names
[SMALL_DATA_REG
]);
8813 else if (TARGET_TOC
)
8816 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == REG
)
8818 if (REGNO (XEXP (x
, 0)) == 0)
8819 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 1)) ],
8820 reg_names
[ REGNO (XEXP (x
, 0)) ]);
8822 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 0)) ],
8823 reg_names
[ REGNO (XEXP (x
, 1)) ]);
8825 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
8826 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
"(%s)",
8827 INTVAL (XEXP (x
, 1)), reg_names
[ REGNO (XEXP (x
, 0)) ]);
8829 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
8830 && CONSTANT_P (XEXP (x
, 1)))
8832 output_addr_const (file
, XEXP (x
, 1));
8833 fprintf (file
, "@l(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8837 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
8838 && CONSTANT_P (XEXP (x
, 1)))
8840 fprintf (file
, "lo16(");
8841 output_addr_const (file
, XEXP (x
, 1));
8842 fprintf (file
, ")(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8845 else if (legitimate_constant_pool_address_p (x
))
8847 if (TARGET_AIX
&& (!TARGET_ELF
|| !TARGET_MINIMAL_TOC
))
8849 rtx contains_minus
= XEXP (x
, 1);
8853 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8854 turn it into (sym) for output_addr_const. */
8855 while (GET_CODE (XEXP (contains_minus
, 0)) != MINUS
)
8856 contains_minus
= XEXP (contains_minus
, 0);
8858 minus
= XEXP (contains_minus
, 0);
8859 symref
= XEXP (minus
, 0);
8860 XEXP (contains_minus
, 0) = symref
;
8865 name
= XSTR (symref
, 0);
8866 newname
= alloca (strlen (name
) + sizeof ("@toc"));
8867 strcpy (newname
, name
);
8868 strcat (newname
, "@toc");
8869 XSTR (symref
, 0) = newname
;
8871 output_addr_const (file
, XEXP (x
, 1));
8873 XSTR (symref
, 0) = name
;
8874 XEXP (contains_minus
, 0) = minus
;
8877 output_addr_const (file
, XEXP (x
, 1));
8879 fprintf (file
, "(%s)", reg_names
[REGNO (XEXP (x
, 0))]);
8885 /* Target hook for assembling integer objects. The PowerPC version has
8886 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8887 is defined. It also needs to handle DI-mode objects on 64-bit
8891 rs6000_assemble_integer (x
, size
, aligned_p
)
8896 #ifdef RELOCATABLE_NEEDS_FIXUP
8897 /* Special handling for SI values. */
8898 if (size
== 4 && aligned_p
)
8900 extern int in_toc_section
PARAMS ((void));
8901 static int recurse
= 0;
8903 /* For -mrelocatable, we mark all addresses that need to be fixed up
8904 in the .fixup section. */
8905 if (TARGET_RELOCATABLE
8906 && !in_toc_section ()
8907 && !in_text_section ()
8909 && GET_CODE (x
) != CONST_INT
8910 && GET_CODE (x
) != CONST_DOUBLE
8916 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCP", fixuplabelno
);
8918 ASM_OUTPUT_LABEL (asm_out_file
, buf
);
8919 fprintf (asm_out_file
, "\t.long\t(");
8920 output_addr_const (asm_out_file
, x
);
8921 fprintf (asm_out_file
, ")@fixup\n");
8922 fprintf (asm_out_file
, "\t.section\t\".fixup\",\"aw\"\n");
8923 ASM_OUTPUT_ALIGN (asm_out_file
, 2);
8924 fprintf (asm_out_file
, "\t.long\t");
8925 assemble_name (asm_out_file
, buf
);
8926 fprintf (asm_out_file
, "\n\t.previous\n");
8930 /* Remove initial .'s to turn a -mcall-aixdesc function
8931 address into the address of the descriptor, not the function
8933 else if (GET_CODE (x
) == SYMBOL_REF
8934 && XSTR (x
, 0)[0] == '.'
8935 && DEFAULT_ABI
== ABI_AIX
)
8937 const char *name
= XSTR (x
, 0);
8938 while (*name
== '.')
8941 fprintf (asm_out_file
, "\t.long\t%s\n", name
);
8945 #endif /* RELOCATABLE_NEEDS_FIXUP */
8946 return default_assemble_integer (x
, size
, aligned_p
);
8949 #ifdef HAVE_GAS_HIDDEN
8950 /* Emit an assembler directive to set symbol visibility for DECL to
8954 rs6000_assemble_visibility (decl
, vis
)
8958 /* Functions need to have their entry point symbol visibility set as
8959 well as their descriptor symbol visibility. */
8960 if (DEFAULT_ABI
== ABI_AIX
&& TREE_CODE (decl
) == FUNCTION_DECL
)
8962 static const char * const visibility_types
[] = {
8963 NULL
, "internal", "hidden", "protected"
8966 const char *name
, *type
;
8968 name
= ((* targetm
.strip_name_encoding
)
8969 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
))));
8970 type
= visibility_types
[vis
];
8972 fprintf (asm_out_file
, "\t.%s\t%s\n", type
, name
);
8973 fprintf (asm_out_file
, "\t.%s\t.%s\n", type
, name
);
8976 default_assemble_visibility (decl
, vis
);
8981 rs6000_reverse_condition (mode
, code
)
8982 enum machine_mode mode
;
8985 /* Reversal of FP compares takes care -- an ordered compare
8986 becomes an unordered compare and vice versa. */
8987 if (mode
== CCFPmode
8988 && (!flag_finite_math_only
8989 || code
== UNLT
|| code
== UNLE
|| code
== UNGT
|| code
== UNGE
8990 || code
== UNEQ
|| code
== LTGT
))
8991 return reverse_condition_maybe_unordered (code
);
8993 return reverse_condition (code
);
8996 /* Generate a compare for CODE. Return a brand-new rtx that
8997 represents the result of the compare. */
9000 rs6000_generate_compare (code
)
9003 enum machine_mode comp_mode
;
9006 if (rs6000_compare_fp_p
)
9007 comp_mode
= CCFPmode
;
9008 else if (code
== GTU
|| code
== LTU
9009 || code
== GEU
|| code
== LEU
)
9010 comp_mode
= CCUNSmode
;
9014 /* First, the compare. */
9015 compare_result
= gen_reg_rtx (comp_mode
);
9017 /* SPE FP compare instructions on the GPRs. Yuck! */
9018 if ((TARGET_E500
&& !TARGET_FPRS
&& TARGET_HARD_FLOAT
)
9019 && rs6000_compare_fp_p
)
9021 rtx cmp
, or1
, or2
, or_result
, compare_result2
;
9029 cmp
= flag_finite_math_only
9030 ? gen_tstsfeq_gpr (compare_result
, rs6000_compare_op0
,
9032 : gen_cmpsfeq_gpr (compare_result
, rs6000_compare_op0
,
9033 rs6000_compare_op1
);
9041 cmp
= flag_finite_math_only
9042 ? gen_tstsfgt_gpr (compare_result
, rs6000_compare_op0
,
9044 : gen_cmpsfgt_gpr (compare_result
, rs6000_compare_op0
,
9045 rs6000_compare_op1
);
9053 cmp
= flag_finite_math_only
9054 ? gen_tstsflt_gpr (compare_result
, rs6000_compare_op0
,
9056 : gen_cmpsflt_gpr (compare_result
, rs6000_compare_op0
,
9057 rs6000_compare_op1
);
9063 /* Synthesize LE and GE from LT/GT || EQ. */
9064 if (code
== LE
|| code
== GE
|| code
== LEU
|| code
== GEU
)
9066 /* Synthesize GE/LE frome GT/LT || EQ. */
9072 case LE
: code
= LT
; break;
9073 case GE
: code
= GT
; break;
9074 case LEU
: code
= LT
; break;
9075 case GEU
: code
= GT
; break;
9079 or1
= gen_reg_rtx (SImode
);
9080 or2
= gen_reg_rtx (SImode
);
9081 or_result
= gen_reg_rtx (CCEQmode
);
9082 compare_result2
= gen_reg_rtx (CCFPmode
);
9085 cmp
= flag_finite_math_only
9086 ? gen_tstsfeq_gpr (compare_result2
, rs6000_compare_op0
,
9088 : gen_cmpsfeq_gpr (compare_result2
, rs6000_compare_op0
,
9089 rs6000_compare_op1
);
9092 /* The MC8540 FP compare instructions set the CR bits
9093 differently than other PPC compare instructions. For
9094 that matter, there is no generic test instruction, but a
9095 testgt, testlt, and testeq. For a true condition, bit 2
9096 is set (x1xx) in the CR. Following the traditional CR
9102 ... bit 2 would be a GT CR alias, so later on we
9103 look in the GT bits for the branch instructions.
9104 However, we must be careful to emit correct RTL in
9105 the meantime, so optimizations don't get confused. */
9107 or1
= gen_rtx (NE
, SImode
, compare_result
, const0_rtx
);
9108 or2
= gen_rtx (NE
, SImode
, compare_result2
, const0_rtx
);
9110 /* OR them together. */
9111 cmp
= gen_rtx_SET (VOIDmode
, or_result
,
9112 gen_rtx_COMPARE (CCEQmode
,
9113 gen_rtx_IOR (SImode
, or1
, or2
),
9115 compare_result
= or_result
;
9120 /* We only care about 1 bit (x1xx), so map everything to NE to
9121 maintain rtl sanity. We'll get to the right bit (x1xx) at
9122 code output time. */
9123 if (code
== NE
|| code
== LTGT
)
9124 /* Do the inverse here because we have no cmpne
9125 instruction. We use the cmpeq instruction and expect
9126 to get a 0 instead. */
9135 emit_insn (gen_rtx_SET (VOIDmode
, compare_result
,
9136 gen_rtx_COMPARE (comp_mode
,
9138 rs6000_compare_op1
)));
9140 /* Some kinds of FP comparisons need an OR operation;
9141 under flag_finite_math_only we don't bother. */
9142 if (rs6000_compare_fp_p
9143 && ! flag_finite_math_only
9144 && ! (TARGET_HARD_FLOAT
&& TARGET_E500
&& !TARGET_FPRS
)
9145 && (code
== LE
|| code
== GE
9146 || code
== UNEQ
|| code
== LTGT
9147 || code
== UNGT
|| code
== UNLT
))
9149 enum rtx_code or1
, or2
;
9150 rtx or1_rtx
, or2_rtx
, compare2_rtx
;
9151 rtx or_result
= gen_reg_rtx (CCEQmode
);
9155 case LE
: or1
= LT
; or2
= EQ
; break;
9156 case GE
: or1
= GT
; or2
= EQ
; break;
9157 case UNEQ
: or1
= UNORDERED
; or2
= EQ
; break;
9158 case LTGT
: or1
= LT
; or2
= GT
; break;
9159 case UNGT
: or1
= UNORDERED
; or2
= GT
; break;
9160 case UNLT
: or1
= UNORDERED
; or2
= LT
; break;
9163 validate_condition_mode (or1
, comp_mode
);
9164 validate_condition_mode (or2
, comp_mode
);
9165 or1_rtx
= gen_rtx (or1
, SImode
, compare_result
, const0_rtx
);
9166 or2_rtx
= gen_rtx (or2
, SImode
, compare_result
, const0_rtx
);
9167 compare2_rtx
= gen_rtx_COMPARE (CCEQmode
,
9168 gen_rtx_IOR (SImode
, or1_rtx
, or2_rtx
),
9170 emit_insn (gen_rtx_SET (VOIDmode
, or_result
, compare2_rtx
));
9172 compare_result
= or_result
;
9176 validate_condition_mode (code
, GET_MODE (compare_result
));
9178 return gen_rtx (code
, VOIDmode
, compare_result
, const0_rtx
);
9182 /* Emit the RTL for an sCOND pattern. */
9185 rs6000_emit_sCOND (code
, result
)
9190 enum machine_mode op_mode
;
9191 enum rtx_code cond_code
;
9193 condition_rtx
= rs6000_generate_compare (code
);
9194 cond_code
= GET_CODE (condition_rtx
);
9197 || cond_code
== GE
|| cond_code
== LE
9198 || cond_code
== GEU
|| cond_code
== LEU
9199 || cond_code
== ORDERED
|| cond_code
== UNGE
|| cond_code
== UNLE
)
9201 rtx not_result
= gen_reg_rtx (CCEQmode
);
9202 rtx not_op
, rev_cond_rtx
;
9203 enum machine_mode cc_mode
;
9205 cc_mode
= GET_MODE (XEXP (condition_rtx
, 0));
9207 rev_cond_rtx
= gen_rtx (rs6000_reverse_condition (cc_mode
, cond_code
),
9208 SImode
, XEXP (condition_rtx
, 0), const0_rtx
);
9209 not_op
= gen_rtx_COMPARE (CCEQmode
, rev_cond_rtx
, const0_rtx
);
9210 emit_insn (gen_rtx_SET (VOIDmode
, not_result
, not_op
));
9211 condition_rtx
= gen_rtx_EQ (VOIDmode
, not_result
, const0_rtx
);
9214 op_mode
= GET_MODE (rs6000_compare_op0
);
9215 if (op_mode
== VOIDmode
)
9216 op_mode
= GET_MODE (rs6000_compare_op1
);
9218 if (TARGET_POWERPC64
&& (op_mode
== DImode
|| rs6000_compare_fp_p
))
9220 PUT_MODE (condition_rtx
, DImode
);
9221 convert_move (result
, condition_rtx
, 0);
9225 PUT_MODE (condition_rtx
, SImode
);
9226 emit_insn (gen_rtx_SET (VOIDmode
, result
, condition_rtx
));
9230 /* Emit a branch of kind CODE to location LOC. */
9233 rs6000_emit_cbranch (code
, loc
)
9237 rtx condition_rtx
, loc_ref
;
9239 condition_rtx
= rs6000_generate_compare (code
);
9240 loc_ref
= gen_rtx_LABEL_REF (VOIDmode
, loc
);
9241 emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
,
9242 gen_rtx_IF_THEN_ELSE (VOIDmode
, condition_rtx
,
9246 /* Return the string to output a conditional branch to LABEL, which is
9247 the operand number of the label, or -1 if the branch is really a
9250 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
9251 condition code register and its mode specifies what kind of
9254 REVERSED is nonzero if we should reverse the sense of the comparison.
9256 INSN is the insn. */
9259 output_cbranch (op
, label
, reversed
, insn
)
9265 static char string
[64];
9266 enum rtx_code code
= GET_CODE (op
);
9267 rtx cc_reg
= XEXP (op
, 0);
9268 enum machine_mode mode
= GET_MODE (cc_reg
);
9269 int cc_regno
= REGNO (cc_reg
) - CR0_REGNO
;
9270 int need_longbranch
= label
!= NULL
&& get_attr_length (insn
) == 8;
9271 int really_reversed
= reversed
^ need_longbranch
;
9277 validate_condition_mode (code
, mode
);
9279 /* Work out which way this really branches. We could use
9280 reverse_condition_maybe_unordered here always but this
9281 makes the resulting assembler clearer. */
9282 if (really_reversed
)
9284 /* Reversal of FP compares takes care -- an ordered compare
9285 becomes an unordered compare and vice versa. */
9286 if (mode
== CCFPmode
)
9287 code
= reverse_condition_maybe_unordered (code
);
9289 code
= reverse_condition (code
);
9292 if ((TARGET_E500
&& !TARGET_FPRS
&& TARGET_HARD_FLOAT
) && mode
== CCFPmode
)
9294 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
9297 /* Opposite of GT. */
9299 else if (code
== NE
)
9307 /* Not all of these are actually distinct opcodes, but
9308 we distinguish them for clarity of the resulting assembler. */
9310 ccode
= "ne"; break;
9312 ccode
= "eq"; break;
9314 ccode
= "ge"; break;
9315 case GT
: case GTU
: case UNGT
:
9316 ccode
= "gt"; break;
9318 ccode
= "le"; break;
9319 case LT
: case LTU
: case UNLT
:
9320 ccode
= "lt"; break;
9321 case UNORDERED
: ccode
= "un"; break;
9322 case ORDERED
: ccode
= "nu"; break;
9323 case UNGE
: ccode
= "nl"; break;
9324 case UNLE
: ccode
= "ng"; break;
9329 /* Maybe we have a guess as to how likely the branch is.
9330 The old mnemonics don't have a way to specify this information. */
9332 note
= find_reg_note (insn
, REG_BR_PROB
, NULL_RTX
);
9333 if (note
!= NULL_RTX
)
9335 /* PROB is the difference from 50%. */
9336 int prob
= INTVAL (XEXP (note
, 0)) - REG_BR_PROB_BASE
/ 2;
9337 bool always_hint
= rs6000_cpu
!= PROCESSOR_POWER4
;
9339 /* Only hint for highly probable/improbable branches on newer
9340 cpus as static prediction overrides processor dynamic
9341 prediction. For older cpus we may as well always hint, but
9342 assume not taken for branches that are very close to 50% as a
9343 mispredicted taken branch is more expensive than a
9344 mispredicted not-taken branch. */
9346 || abs (prob
) > REG_BR_PROB_BASE
/ 100 * 48)
9348 if (abs (prob
) > REG_BR_PROB_BASE
/ 20
9349 && ((prob
> 0) ^ need_longbranch
))
9357 s
+= sprintf (s
, "{b%sr|b%slr%s} ", ccode
, ccode
, pred
);
9359 s
+= sprintf (s
, "{b%s|b%s%s} ", ccode
, ccode
, pred
);
9361 /* We need to escape any '%' characters in the reg_names string.
9362 Assume they'd only be the first character... */
9363 if (reg_names
[cc_regno
+ CR0_REGNO
][0] == '%')
9365 s
+= sprintf (s
, "%s", reg_names
[cc_regno
+ CR0_REGNO
]);
9369 /* If the branch distance was too far, we may have to use an
9370 unconditional branch to go the distance. */
9371 if (need_longbranch
)
9372 s
+= sprintf (s
, ",$+8\n\tb %s", label
);
9374 s
+= sprintf (s
, ",%s", label
);
9380 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
9381 operands of the last comparison is nonzero/true, FALSE_COND if it
9382 is zero/false. Return 0 if the hardware has no such operation. */
9385 rs6000_emit_cmove (dest
, op
, true_cond
, false_cond
)
9391 enum rtx_code code
= GET_CODE (op
);
9392 rtx op0
= rs6000_compare_op0
;
9393 rtx op1
= rs6000_compare_op1
;
9395 enum machine_mode compare_mode
= GET_MODE (op0
);
9396 enum machine_mode result_mode
= GET_MODE (dest
);
9399 /* These modes should always match. */
9400 if (GET_MODE (op1
) != compare_mode
9401 /* In the isel case however, we can use a compare immediate, so
9402 op1 may be a small constant. */
9403 && (!TARGET_ISEL
|| !short_cint_operand (op1
, VOIDmode
)))
9405 if (GET_MODE (true_cond
) != result_mode
)
9407 if (GET_MODE (false_cond
) != result_mode
)
9410 /* First, work out if the hardware can do this at all, or
9411 if it's too slow... */
9412 if (! rs6000_compare_fp_p
)
9415 return rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
);
9419 /* Eliminate half of the comparisons by switching operands, this
9420 makes the remaining code simpler. */
9421 if (code
== UNLT
|| code
== UNGT
|| code
== UNORDERED
|| code
== NE
9422 || code
== LTGT
|| code
== LT
|| code
== UNLE
)
9424 code
= reverse_condition_maybe_unordered (code
);
9426 true_cond
= false_cond
;
9430 /* UNEQ and LTGT take four instructions for a comparison with zero,
9431 it'll probably be faster to use a branch here too. */
9432 if (code
== UNEQ
&& HONOR_NANS (compare_mode
))
9435 if (GET_CODE (op1
) == CONST_DOUBLE
)
9436 REAL_VALUE_FROM_CONST_DOUBLE (c1
, op1
);
9438 /* We're going to try to implement comparisons by performing
9439 a subtract, then comparing against zero. Unfortunately,
9440 Inf - Inf is NaN which is not zero, and so if we don't
9441 know that the operand is finite and the comparison
9442 would treat EQ different to UNORDERED, we can't do it. */
9443 if (HONOR_INFINITIES (compare_mode
)
9444 && code
!= GT
&& code
!= UNGE
9445 && (GET_CODE (op1
) != CONST_DOUBLE
|| real_isinf (&c1
))
9446 /* Constructs of the form (a OP b ? a : b) are safe. */
9447 && ((! rtx_equal_p (op0
, false_cond
) && ! rtx_equal_p (op1
, false_cond
))
9448 || (! rtx_equal_p (op0
, true_cond
)
9449 && ! rtx_equal_p (op1
, true_cond
))))
9451 /* At this point we know we can use fsel. */
9453 /* Reduce the comparison to a comparison against zero. */
9454 temp
= gen_reg_rtx (compare_mode
);
9455 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
9456 gen_rtx_MINUS (compare_mode
, op0
, op1
)));
9458 op1
= CONST0_RTX (compare_mode
);
9460 /* If we don't care about NaNs we can reduce some of the comparisons
9461 down to faster ones. */
9462 if (! HONOR_NANS (compare_mode
))
9468 true_cond
= false_cond
;
9481 /* Now, reduce everything down to a GE. */
9488 temp
= gen_reg_rtx (compare_mode
);
9489 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
9494 temp
= gen_reg_rtx (compare_mode
);
9495 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_ABS (compare_mode
, op0
)));
9500 temp
= gen_reg_rtx (compare_mode
);
9501 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
9502 gen_rtx_NEG (compare_mode
,
9503 gen_rtx_ABS (compare_mode
, op0
))));
9508 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
9509 temp
= gen_reg_rtx (result_mode
);
9510 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
9511 gen_rtx_IF_THEN_ELSE (result_mode
,
9512 gen_rtx_GE (VOIDmode
,
9514 true_cond
, false_cond
)));
9515 false_cond
= true_cond
;
9518 temp
= gen_reg_rtx (compare_mode
);
9519 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
9524 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
9525 temp
= gen_reg_rtx (result_mode
);
9526 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
9527 gen_rtx_IF_THEN_ELSE (result_mode
,
9528 gen_rtx_GE (VOIDmode
,
9530 true_cond
, false_cond
)));
9531 true_cond
= false_cond
;
9534 temp
= gen_reg_rtx (compare_mode
);
9535 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
9543 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
9544 gen_rtx_IF_THEN_ELSE (result_mode
,
9545 gen_rtx_GE (VOIDmode
,
9547 true_cond
, false_cond
)));
9551 /* Same as above, but for ints (isel). */
9554 rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
)
9560 rtx condition_rtx
, cr
;
9562 /* All isel implementations thus far are 32-bits. */
9563 if (GET_MODE (rs6000_compare_op0
) != SImode
)
9566 /* We still have to do the compare, because isel doesn't do a
9567 compare, it just looks at the CRx bits set by a previous compare
9569 condition_rtx
= rs6000_generate_compare (GET_CODE (op
));
9570 cr
= XEXP (condition_rtx
, 0);
9572 if (GET_MODE (cr
) == CCmode
)
9573 emit_insn (gen_isel_signed (dest
, condition_rtx
,
9574 true_cond
, false_cond
, cr
));
9576 emit_insn (gen_isel_unsigned (dest
, condition_rtx
,
9577 true_cond
, false_cond
, cr
));
9583 output_isel (operands
)
9588 code
= GET_CODE (operands
[1]);
9589 if (code
== GE
|| code
== GEU
|| code
== LE
|| code
== LEU
|| code
== NE
)
9591 PUT_CODE (operands
[1], reverse_condition (code
));
9592 return "isel %0,%3,%2,%j1";
9595 return "isel %0,%2,%3,%j1";
9599 rs6000_emit_minmax (dest
, code
, op0
, op1
)
9605 enum machine_mode mode
= GET_MODE (op0
);
9609 if (code
== SMAX
|| code
== SMIN
)
9614 if (code
== SMAX
|| code
== UMAX
)
9615 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
9618 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
9620 if (target
== NULL_RTX
)
9623 emit_move_insn (dest
, target
);
9626 /* This page contains routines that are used to determine what the
9627 function prologue and epilogue code will do and write them out. */
9629 /* Return the first fixed-point register that is required to be
9630 saved. 32 if none. */
9633 first_reg_to_save ()
9637 /* Find lowest numbered live register. */
9638 for (first_reg
= 13; first_reg
<= 31; first_reg
++)
9639 if (regs_ever_live
[first_reg
]
9640 && (! call_used_regs
[first_reg
]
9641 || (first_reg
== RS6000_PIC_OFFSET_TABLE_REGNUM
9642 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
9643 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
)))))
9648 && current_function_uses_pic_offset_table
9649 && first_reg
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
9650 return RS6000_PIC_OFFSET_TABLE_REGNUM
;
9656 /* Similar, for FP regs. */
9659 first_fp_reg_to_save ()
9663 /* Find lowest numbered live register. */
9664 for (first_reg
= 14 + 32; first_reg
<= 63; first_reg
++)
9665 if (regs_ever_live
[first_reg
])
9671 /* Similar, for AltiVec regs. */
9674 first_altivec_reg_to_save ()
9678 /* Stack frame remains as is unless we are in AltiVec ABI. */
9679 if (! TARGET_ALTIVEC_ABI
)
9680 return LAST_ALTIVEC_REGNO
+ 1;
9682 /* Find lowest numbered live register. */
9683 for (i
= FIRST_ALTIVEC_REGNO
+ 20; i
<= LAST_ALTIVEC_REGNO
; ++i
)
9684 if (regs_ever_live
[i
])
9690 /* Return a 32-bit mask of the AltiVec registers we need to set in
9691 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
9692 the 32-bit word is 0. */
9695 compute_vrsave_mask ()
9697 unsigned int i
, mask
= 0;
9699 /* First, find out if we use _any_ altivec registers. */
9700 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
9701 if (regs_ever_live
[i
])
9702 mask
|= ALTIVEC_REG_BIT (i
);
9707 /* Next, remove the argument registers from the set. These must
9708 be in the VRSAVE mask set by the caller, so we don't need to add
9709 them in again. More importantly, the mask we compute here is
9710 used to generate CLOBBERs in the set_vrsave insn, and we do not
9711 wish the argument registers to die. */
9712 for (i
= cfun
->args_info
.vregno
; i
>= ALTIVEC_ARG_MIN_REG
; --i
)
9713 mask
&= ~ALTIVEC_REG_BIT (i
);
9715 /* Similarly, remove the return value from the set. */
9718 diddle_return_value (is_altivec_return_reg
, &yes
);
9720 mask
&= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN
);
9727 is_altivec_return_reg (reg
, xyes
)
9731 bool *yes
= (bool *) xyes
;
9732 if (REGNO (reg
) == ALTIVEC_ARG_RETURN
)
9737 /* Calculate the stack information for the current function. This is
9738 complicated by having two separate calling sequences, the AIX calling
9739 sequence and the V.4 calling sequence.
9741 AIX (and Darwin/Mac OS X) stack frames look like:
9743 SP----> +---------------------------------------+
9744 | back chain to caller | 0 0
9745 +---------------------------------------+
9746 | saved CR | 4 8 (8-11)
9747 +---------------------------------------+
9749 +---------------------------------------+
9750 | reserved for compilers | 12 24
9751 +---------------------------------------+
9752 | reserved for binders | 16 32
9753 +---------------------------------------+
9754 | saved TOC pointer | 20 40
9755 +---------------------------------------+
9756 | Parameter save area (P) | 24 48
9757 +---------------------------------------+
9758 | Alloca space (A) | 24+P etc.
9759 +---------------------------------------+
9760 | Local variable space (L) | 24+P+A
9761 +---------------------------------------+
9762 | Float/int conversion temporary (X) | 24+P+A+L
9763 +---------------------------------------+
9764 | Save area for AltiVec registers (W) | 24+P+A+L+X
9765 +---------------------------------------+
9766 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9767 +---------------------------------------+
9768 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9769 +---------------------------------------+
9770 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9771 +---------------------------------------+
9772 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9773 +---------------------------------------+
9774 old SP->| back chain to caller's caller |
9775 +---------------------------------------+
9777 The required alignment for AIX configurations is two words (i.e., 8
9781 V.4 stack frames look like:
9783 SP----> +---------------------------------------+
9784 | back chain to caller | 0
9785 +---------------------------------------+
9786 | caller's saved LR | 4
9787 +---------------------------------------+
9788 | Parameter save area (P) | 8
9789 +---------------------------------------+
9790 | Alloca space (A) | 8+P
9791 +---------------------------------------+
9792 | Varargs save area (V) | 8+P+A
9793 +---------------------------------------+
9794 | Local variable space (L) | 8+P+A+V
9795 +---------------------------------------+
9796 | Float/int conversion temporary (X) | 8+P+A+V+L
9797 +---------------------------------------+
9798 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9799 +---------------------------------------+
9800 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9801 +---------------------------------------+
9802 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9803 +---------------------------------------+
9804 | SPE: area for 64-bit GP registers |
9805 +---------------------------------------+
9806 | SPE alignment padding |
9807 +---------------------------------------+
9808 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9809 +---------------------------------------+
9810 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9811 +---------------------------------------+
9812 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9813 +---------------------------------------+
9814 old SP->| back chain to caller's caller |
9815 +---------------------------------------+
9817 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9818 given. (But note below and in sysv4.h that we require only 8 and
9819 may round up the size of our stack frame anyways. The historical
9820 reason is early versions of powerpc-linux which didn't properly
9821 align the stack at program startup. A happy side-effect is that
9822 -mno-eabi libraries can be used with -meabi programs.)
9824 The EABI configuration defaults to the V.4 layout. However,
9825 the stack alignment requirements may differ. If -mno-eabi is not
9826 given, the required stack alignment is 8 bytes; if -mno-eabi is
9827 given, the required alignment is 16 bytes. (But see V.4 comment
9830 #ifndef ABI_STACK_BOUNDARY
9831 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9835 rs6000_stack_info ()
9837 static rs6000_stack_t info
, zero_info
;
9838 rs6000_stack_t
*info_ptr
= &info
;
9839 int reg_size
= TARGET_POWERPC64
? 8 : 4;
9843 /* Zero all fields portably. */
9848 /* Cache value so we don't rescan instruction chain over and over. */
9849 if (cfun
->machine
->insn_chain_scanned_p
== 0)
9851 cfun
->machine
->insn_chain_scanned_p
= 1;
9852 info_ptr
->spe_64bit_regs_used
= (int) spe_func_has_64bit_regs_p ();
9856 /* Select which calling sequence. */
9857 info_ptr
->abi
= DEFAULT_ABI
;
9859 /* Calculate which registers need to be saved & save area size. */
9860 info_ptr
->first_gp_reg_save
= first_reg_to_save ();
9861 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9862 even if it currently looks like we won't. */
9863 if (((TARGET_TOC
&& TARGET_MINIMAL_TOC
)
9864 || (flag_pic
== 1 && DEFAULT_ABI
== ABI_V4
)
9865 || (flag_pic
&& DEFAULT_ABI
== ABI_DARWIN
))
9866 && info_ptr
->first_gp_reg_save
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
9867 info_ptr
->gp_size
= reg_size
* (32 - RS6000_PIC_OFFSET_TABLE_REGNUM
);
9869 info_ptr
->gp_size
= reg_size
* (32 - info_ptr
->first_gp_reg_save
);
9871 /* For the SPE, we have an additional upper 32-bits on each GPR.
9872 Ideally we should save the entire 64-bits only when the upper
9873 half is used in SIMD instructions. Since we only record
9874 registers live (not the size they are used in), this proves
9875 difficult because we'd have to traverse the instruction chain at
9876 the right time, taking reload into account. This is a real pain,
9877 so we opt to save the GPRs in 64-bits always if but one register
9878 gets used in 64-bits. Otherwise, all the registers in the frame
9879 get saved in 32-bits.
9881 So... since when we save all GPRs (except the SP) in 64-bits, the
9882 traditional GP save area will be empty. */
9883 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
9884 info_ptr
->gp_size
= 0;
9886 info_ptr
->first_fp_reg_save
= first_fp_reg_to_save ();
9887 info_ptr
->fp_size
= 8 * (64 - info_ptr
->first_fp_reg_save
);
9889 info_ptr
->first_altivec_reg_save
= first_altivec_reg_to_save ();
9890 info_ptr
->altivec_size
= 16 * (LAST_ALTIVEC_REGNO
+ 1
9891 - info_ptr
->first_altivec_reg_save
);
9893 /* Does this function call anything? */
9894 info_ptr
->calls_p
= (! current_function_is_leaf
9895 || cfun
->machine
->ra_needs_full_frame
);
9897 /* Determine if we need to save the link register. */
9898 if (rs6000_ra_ever_killed ()
9899 || (DEFAULT_ABI
== ABI_AIX
9900 && current_function_profile
9901 && !TARGET_PROFILE_KERNEL
)
9902 #ifdef TARGET_RELOCATABLE
9903 || (TARGET_RELOCATABLE
&& (get_pool_size () != 0))
9905 || (info_ptr
->first_fp_reg_save
!= 64
9906 && !FP_SAVE_INLINE (info_ptr
->first_fp_reg_save
))
9907 || info_ptr
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
9908 || (DEFAULT_ABI
== ABI_V4
&& current_function_calls_alloca
)
9909 || (DEFAULT_ABI
== ABI_DARWIN
9911 && current_function_uses_pic_offset_table
)
9912 || info_ptr
->calls_p
)
9914 info_ptr
->lr_save_p
= 1;
9915 regs_ever_live
[LINK_REGISTER_REGNUM
] = 1;
9918 /* Determine if we need to save the condition code registers. */
9919 if (regs_ever_live
[CR2_REGNO
]
9920 || regs_ever_live
[CR3_REGNO
]
9921 || regs_ever_live
[CR4_REGNO
])
9923 info_ptr
->cr_save_p
= 1;
9924 if (DEFAULT_ABI
== ABI_V4
)
9925 info_ptr
->cr_size
= reg_size
;
9928 /* If the current function calls __builtin_eh_return, then we need
9929 to allocate stack space for registers that will hold data for
9930 the exception handler. */
9931 if (current_function_calls_eh_return
)
9934 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; ++i
)
9937 /* SPE saves EH registers in 64-bits. */
9938 ehrd_size
= i
* (TARGET_SPE_ABI
9939 && info_ptr
->spe_64bit_regs_used
!= 0
9940 ? UNITS_PER_SPE_WORD
: UNITS_PER_WORD
);
9945 /* Determine various sizes. */
9946 info_ptr
->reg_size
= reg_size
;
9947 info_ptr
->fixed_size
= RS6000_SAVE_AREA
;
9948 info_ptr
->varargs_size
= RS6000_VARARGS_AREA
;
9949 info_ptr
->vars_size
= RS6000_ALIGN (get_frame_size (), 8);
9950 info_ptr
->parm_size
= RS6000_ALIGN (current_function_outgoing_args_size
,
9953 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
9954 info_ptr
->spe_gp_size
= 8 * (32 - info_ptr
->first_gp_reg_save
);
9956 info_ptr
->spe_gp_size
= 0;
9958 if (TARGET_ALTIVEC_ABI
&& TARGET_ALTIVEC_VRSAVE
)
9960 info_ptr
->vrsave_mask
= compute_vrsave_mask ();
9961 info_ptr
->vrsave_size
= info_ptr
->vrsave_mask
? 4 : 0;
9965 info_ptr
->vrsave_mask
= 0;
9966 info_ptr
->vrsave_size
= 0;
9969 /* Calculate the offsets. */
9970 switch (DEFAULT_ABI
)
9978 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
9979 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
9981 if (TARGET_ALTIVEC_ABI
)
9983 info_ptr
->vrsave_save_offset
9984 = info_ptr
->gp_save_offset
- info_ptr
->vrsave_size
;
9986 /* Align stack so vector save area is on a quadword boundary. */
9987 if (info_ptr
->altivec_size
!= 0)
9988 info_ptr
->altivec_padding_size
9989 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
9991 info_ptr
->altivec_padding_size
= 0;
9993 info_ptr
->altivec_save_offset
9994 = info_ptr
->vrsave_save_offset
9995 - info_ptr
->altivec_padding_size
9996 - info_ptr
->altivec_size
;
9998 /* Adjust for AltiVec case. */
9999 info_ptr
->ehrd_offset
= info_ptr
->altivec_save_offset
- ehrd_size
;
10002 info_ptr
->ehrd_offset
= info_ptr
->gp_save_offset
- ehrd_size
;
10003 info_ptr
->cr_save_offset
= reg_size
; /* first word when 64-bit. */
10004 info_ptr
->lr_save_offset
= 2*reg_size
;
10008 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
10009 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
10010 info_ptr
->cr_save_offset
= info_ptr
->gp_save_offset
- info_ptr
->cr_size
;
10012 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
10014 /* Align stack so SPE GPR save area is aligned on a
10015 double-word boundary. */
10016 if (info_ptr
->spe_gp_size
!= 0)
10017 info_ptr
->spe_padding_size
10018 = 8 - (-info_ptr
->cr_save_offset
% 8);
10020 info_ptr
->spe_padding_size
= 0;
10022 info_ptr
->spe_gp_save_offset
10023 = info_ptr
->cr_save_offset
10024 - info_ptr
->spe_padding_size
10025 - info_ptr
->spe_gp_size
;
10027 /* Adjust for SPE case. */
10028 info_ptr
->toc_save_offset
10029 = info_ptr
->spe_gp_save_offset
- info_ptr
->toc_size
;
10031 else if (TARGET_ALTIVEC_ABI
)
10033 info_ptr
->vrsave_save_offset
10034 = info_ptr
->cr_save_offset
- info_ptr
->vrsave_size
;
10036 /* Align stack so vector save area is on a quadword boundary. */
10037 if (info_ptr
->altivec_size
!= 0)
10038 info_ptr
->altivec_padding_size
10039 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
10041 info_ptr
->altivec_padding_size
= 0;
10043 info_ptr
->altivec_save_offset
10044 = info_ptr
->vrsave_save_offset
10045 - info_ptr
->altivec_padding_size
10046 - info_ptr
->altivec_size
;
10048 /* Adjust for AltiVec case. */
10049 info_ptr
->toc_save_offset
10050 = info_ptr
->altivec_save_offset
- info_ptr
->toc_size
;
10053 info_ptr
->toc_save_offset
= info_ptr
->cr_save_offset
- info_ptr
->toc_size
;
10054 info_ptr
->ehrd_offset
= info_ptr
->toc_save_offset
- ehrd_size
;
10055 info_ptr
->lr_save_offset
= reg_size
;
10059 info_ptr
->save_size
= RS6000_ALIGN (info_ptr
->fp_size
10060 + info_ptr
->gp_size
10061 + info_ptr
->altivec_size
10062 + info_ptr
->altivec_padding_size
10063 + info_ptr
->spe_gp_size
10064 + info_ptr
->spe_padding_size
10066 + info_ptr
->cr_size
10067 + info_ptr
->lr_size
10068 + info_ptr
->vrsave_size
10069 + info_ptr
->toc_size
,
10070 (TARGET_ALTIVEC_ABI
|| ABI_DARWIN
)
10073 total_raw_size
= (info_ptr
->vars_size
10074 + info_ptr
->parm_size
10075 + info_ptr
->save_size
10076 + info_ptr
->varargs_size
10077 + info_ptr
->fixed_size
);
10079 info_ptr
->total_size
=
10080 RS6000_ALIGN (total_raw_size
, ABI_STACK_BOUNDARY
/ BITS_PER_UNIT
);
10082 /* Determine if we need to allocate any stack frame:
10084 For AIX we need to push the stack if a frame pointer is needed
10085 (because the stack might be dynamically adjusted), if we are
10086 debugging, if we make calls, or if the sum of fp_save, gp_save,
10087 and local variables are more than the space needed to save all
10088 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
10089 + 18*8 = 288 (GPR13 reserved).
10091 For V.4 we don't have the stack cushion that AIX uses, but assume
10092 that the debugger can handle stackless frames. */
10094 if (info_ptr
->calls_p
)
10095 info_ptr
->push_p
= 1;
10097 else if (DEFAULT_ABI
== ABI_V4
)
10098 info_ptr
->push_p
= total_raw_size
> info_ptr
->fixed_size
;
10100 else if (frame_pointer_needed
)
10101 info_ptr
->push_p
= 1;
10103 else if (TARGET_XCOFF
&& write_symbols
!= NO_DEBUG
)
10104 info_ptr
->push_p
= 1;
10108 = total_raw_size
- info_ptr
->fixed_size
> (TARGET_32BIT
? 220 : 288);
10110 /* Zero offsets if we're not saving those registers. */
10111 if (info_ptr
->fp_size
== 0)
10112 info_ptr
->fp_save_offset
= 0;
10114 if (info_ptr
->gp_size
== 0)
10115 info_ptr
->gp_save_offset
= 0;
10117 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->altivec_size
== 0)
10118 info_ptr
->altivec_save_offset
= 0;
10120 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->vrsave_mask
== 0)
10121 info_ptr
->vrsave_save_offset
= 0;
10123 if (! TARGET_SPE_ABI
10124 || info_ptr
->spe_64bit_regs_used
== 0
10125 || info_ptr
->spe_gp_size
== 0)
10126 info_ptr
->spe_gp_save_offset
= 0;
10128 if (! info_ptr
->lr_save_p
)
10129 info_ptr
->lr_save_offset
= 0;
10131 if (! info_ptr
->cr_save_p
)
10132 info_ptr
->cr_save_offset
= 0;
10134 if (! info_ptr
->toc_save_p
)
10135 info_ptr
->toc_save_offset
= 0;
10140 /* Return true if the current function uses any GPRs in 64-bit SIMD
10144 spe_func_has_64bit_regs_p ()
10148 /* Functions that save and restore all the call-saved registers will
10149 need to save/restore the registers in 64-bits. */
10150 if (current_function_calls_eh_return
10151 || current_function_calls_setjmp
10152 || current_function_has_nonlocal_goto
)
10155 insns
= get_insns ();
10157 for (insn
= NEXT_INSN (insns
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
10163 i
= PATTERN (insn
);
10164 if (GET_CODE (i
) == SET
10165 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i
))))
10174 debug_stack_info (info
)
10175 rs6000_stack_t
*info
;
10177 const char *abi_string
;
10180 info
= rs6000_stack_info ();
10182 fprintf (stderr
, "\nStack information for function %s:\n",
10183 ((current_function_decl
&& DECL_NAME (current_function_decl
))
10184 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl
))
10189 default: abi_string
= "Unknown"; break;
10190 case ABI_NONE
: abi_string
= "NONE"; break;
10191 case ABI_AIX
: abi_string
= "AIX"; break;
10192 case ABI_DARWIN
: abi_string
= "Darwin"; break;
10193 case ABI_V4
: abi_string
= "V.4"; break;
10196 fprintf (stderr
, "\tABI = %5s\n", abi_string
);
10198 if (TARGET_ALTIVEC_ABI
)
10199 fprintf (stderr
, "\tALTIVEC ABI extensions enabled.\n");
10201 if (TARGET_SPE_ABI
)
10202 fprintf (stderr
, "\tSPE ABI extensions enabled.\n");
10204 if (info
->first_gp_reg_save
!= 32)
10205 fprintf (stderr
, "\tfirst_gp_reg_save = %5d\n", info
->first_gp_reg_save
);
10207 if (info
->first_fp_reg_save
!= 64)
10208 fprintf (stderr
, "\tfirst_fp_reg_save = %5d\n", info
->first_fp_reg_save
);
10210 if (info
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
)
10211 fprintf (stderr
, "\tfirst_altivec_reg_save = %5d\n",
10212 info
->first_altivec_reg_save
);
10214 if (info
->lr_save_p
)
10215 fprintf (stderr
, "\tlr_save_p = %5d\n", info
->lr_save_p
);
10217 if (info
->cr_save_p
)
10218 fprintf (stderr
, "\tcr_save_p = %5d\n", info
->cr_save_p
);
10220 if (info
->toc_save_p
)
10221 fprintf (stderr
, "\ttoc_save_p = %5d\n", info
->toc_save_p
);
10223 if (info
->vrsave_mask
)
10224 fprintf (stderr
, "\tvrsave_mask = 0x%x\n", info
->vrsave_mask
);
10227 fprintf (stderr
, "\tpush_p = %5d\n", info
->push_p
);
10230 fprintf (stderr
, "\tcalls_p = %5d\n", info
->calls_p
);
10232 if (info
->gp_save_offset
)
10233 fprintf (stderr
, "\tgp_save_offset = %5d\n", info
->gp_save_offset
);
10235 if (info
->fp_save_offset
)
10236 fprintf (stderr
, "\tfp_save_offset = %5d\n", info
->fp_save_offset
);
10238 if (info
->altivec_save_offset
)
10239 fprintf (stderr
, "\taltivec_save_offset = %5d\n",
10240 info
->altivec_save_offset
);
10242 if (info
->spe_gp_save_offset
)
10243 fprintf (stderr
, "\tspe_gp_save_offset = %5d\n",
10244 info
->spe_gp_save_offset
);
10246 if (info
->vrsave_save_offset
)
10247 fprintf (stderr
, "\tvrsave_save_offset = %5d\n",
10248 info
->vrsave_save_offset
);
10250 if (info
->lr_save_offset
)
10251 fprintf (stderr
, "\tlr_save_offset = %5d\n", info
->lr_save_offset
);
10253 if (info
->cr_save_offset
)
10254 fprintf (stderr
, "\tcr_save_offset = %5d\n", info
->cr_save_offset
);
10256 if (info
->toc_save_offset
)
10257 fprintf (stderr
, "\ttoc_save_offset = %5d\n", info
->toc_save_offset
);
10259 if (info
->varargs_save_offset
)
10260 fprintf (stderr
, "\tvarargs_save_offset = %5d\n", info
->varargs_save_offset
);
10262 if (info
->total_size
)
10263 fprintf (stderr
, "\ttotal_size = %5d\n", info
->total_size
);
10265 if (info
->varargs_size
)
10266 fprintf (stderr
, "\tvarargs_size = %5d\n", info
->varargs_size
);
10268 if (info
->vars_size
)
10269 fprintf (stderr
, "\tvars_size = %5d\n", info
->vars_size
);
10271 if (info
->parm_size
)
10272 fprintf (stderr
, "\tparm_size = %5d\n", info
->parm_size
);
10274 if (info
->fixed_size
)
10275 fprintf (stderr
, "\tfixed_size = %5d\n", info
->fixed_size
);
10278 fprintf (stderr
, "\tgp_size = %5d\n", info
->gp_size
);
10280 if (info
->spe_gp_size
)
10281 fprintf (stderr
, "\tspe_gp_size = %5d\n", info
->spe_gp_size
);
10284 fprintf (stderr
, "\tfp_size = %5d\n", info
->fp_size
);
10286 if (info
->altivec_size
)
10287 fprintf (stderr
, "\taltivec_size = %5d\n", info
->altivec_size
);
10289 if (info
->vrsave_size
)
10290 fprintf (stderr
, "\tvrsave_size = %5d\n", info
->vrsave_size
);
10292 if (info
->altivec_padding_size
)
10293 fprintf (stderr
, "\taltivec_padding_size= %5d\n",
10294 info
->altivec_padding_size
);
10296 if (info
->spe_padding_size
)
10297 fprintf (stderr
, "\tspe_padding_size = %5d\n",
10298 info
->spe_padding_size
);
10301 fprintf (stderr
, "\tlr_size = %5d\n", info
->lr_size
);
10304 fprintf (stderr
, "\tcr_size = %5d\n", info
->cr_size
);
10306 if (info
->toc_size
)
10307 fprintf (stderr
, "\ttoc_size = %5d\n", info
->toc_size
);
10309 if (info
->save_size
)
10310 fprintf (stderr
, "\tsave_size = %5d\n", info
->save_size
);
10312 if (info
->reg_size
!= 4)
10313 fprintf (stderr
, "\treg_size = %5d\n", info
->reg_size
);
10315 fprintf (stderr
, "\n");
10319 rs6000_return_addr (count
, frame
)
10323 /* Currently we don't optimize very well between prolog and body
10324 code and for PIC code the code can be actually quite bad, so
10325 don't try to be too clever here. */
10326 if (count
!= 0 || (DEFAULT_ABI
!= ABI_AIX
&& flag_pic
))
10328 cfun
->machine
->ra_needs_full_frame
= 1;
10335 plus_constant (copy_to_reg
10336 (gen_rtx_MEM (Pmode
,
10337 memory_address (Pmode
, frame
))),
10338 RETURN_ADDRESS_OFFSET
)));
10341 return get_hard_reg_initial_val (Pmode
, LINK_REGISTER_REGNUM
);
10344 /* Say whether a function is a candidate for sibcall handling or not.
10345 We do not allow indirect calls to be optimized into sibling calls.
10346 Also, we can't do it if there are any vector parameters; there's
10347 nowhere to put the VRsave code so it works; note that functions with
10348 vector parameters are required to have a prototype, so the argument
10349 type info must be available here. (The tail recursion case can work
10350 with vector parameters, but there's no way to distinguish here.) */
10352 rs6000_function_ok_for_sibcall (decl
, exp
)
10354 tree exp ATTRIBUTE_UNUSED
;
10359 if (TARGET_ALTIVEC_VRSAVE
)
10361 for (type
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
10362 type
; type
= TREE_CHAIN (type
))
10364 if (TREE_CODE (TREE_VALUE (type
)) == VECTOR_TYPE
)
10368 if (DEFAULT_ABI
== ABI_DARWIN
10369 || (*targetm
.binds_local_p
) (decl
))
10371 tree attr_list
= TYPE_ATTRIBUTES (TREE_TYPE (decl
));
10373 if (!lookup_attribute ("longcall", attr_list
)
10374 || lookup_attribute ("shortcall", attr_list
))
10382 rs6000_ra_ever_killed ()
10388 /* Irritatingly, there are two kinds of thunks -- those created with
10389 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
10390 through the regular part of the compiler. This is a very hacky
10391 way to tell them apart. */
10392 if (current_function_is_thunk
&& !no_new_pseudos
)
10395 /* regs_ever_live has LR marked as used if any sibcalls are present,
10396 but this should not force saving and restoring in the
10397 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
10398 clobbers LR, so that is inappropriate. */
10400 /* Also, the prologue can generate a store into LR that
10401 doesn't really count, like this:
10404 bcl to set PIC register
10408 When we're called from the epilogue, we need to avoid counting
10409 this as a store. */
10411 push_topmost_sequence ();
10412 top
= get_insns ();
10413 pop_topmost_sequence ();
10414 reg
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
10416 for (insn
= NEXT_INSN (top
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
10420 if (FIND_REG_INC_NOTE (insn
, reg
))
10422 else if (GET_CODE (insn
) == CALL_INSN
10423 && !SIBLING_CALL_P (insn
))
10425 else if (set_of (reg
, insn
) != NULL_RTX
10426 && !prologue_epilogue_contains (insn
))
10433 /* Add a REG_MAYBE_DEAD note to the insn. */
10435 rs6000_maybe_dead (insn
)
10438 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
,
10443 /* Emit instructions needed to load the TOC register.
10444 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
10445 a constant pool; or for SVR4 -fpic. */
10448 rs6000_emit_load_toc_table (fromprolog
)
10452 dest
= gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
);
10454 if (TARGET_ELF
&& DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
10456 rtx temp
= (fromprolog
10457 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
10458 : gen_reg_rtx (Pmode
));
10459 insn
= emit_insn (gen_load_toc_v4_pic_si (temp
));
10461 rs6000_maybe_dead (insn
);
10462 insn
= emit_move_insn (dest
, temp
);
10464 rs6000_maybe_dead (insn
);
10466 else if (TARGET_ELF
&& DEFAULT_ABI
!= ABI_AIX
&& flag_pic
== 2)
10469 rtx tempLR
= (fromprolog
10470 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
10471 : gen_reg_rtx (Pmode
));
10472 rtx temp0
= (fromprolog
10473 ? gen_rtx_REG (Pmode
, 0)
10474 : gen_reg_rtx (Pmode
));
10477 /* possibly create the toc section */
10478 if (! toc_initialized
)
10481 function_section (current_function_decl
);
10488 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
10489 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
10491 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCL", rs6000_pic_labelno
);
10492 symL
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
10494 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR
,
10496 rs6000_maybe_dead (emit_move_insn (dest
, tempLR
));
10497 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0
, dest
,
10504 static int reload_toc_labelno
= 0;
10506 tocsym
= gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
);
10508 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCG", reload_toc_labelno
++);
10509 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
10511 emit_insn (gen_load_toc_v4_PIC_1b (tempLR
, symF
, tocsym
));
10512 emit_move_insn (dest
, tempLR
);
10513 emit_move_insn (temp0
, gen_rtx_MEM (Pmode
, dest
));
10515 insn
= emit_insn (gen_addsi3 (dest
, temp0
, dest
));
10517 rs6000_maybe_dead (insn
);
10519 else if (TARGET_ELF
&& !TARGET_AIX
&& flag_pic
== 0 && TARGET_MINIMAL_TOC
)
10521 /* This is for AIX code running in non-PIC ELF32. */
10524 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCTOC", 1);
10525 realsym
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
10527 insn
= emit_insn (gen_elf_high (dest
, realsym
));
10529 rs6000_maybe_dead (insn
);
10530 insn
= emit_insn (gen_elf_low (dest
, dest
, realsym
));
10532 rs6000_maybe_dead (insn
);
10534 else if (DEFAULT_ABI
== ABI_AIX
)
10537 insn
= emit_insn (gen_load_toc_aix_si (dest
));
10539 insn
= emit_insn (gen_load_toc_aix_di (dest
));
10541 rs6000_maybe_dead (insn
);
10548 get_TOC_alias_set ()
10550 static int set
= -1;
10552 set
= new_alias_set ();
10556 /* This retuns nonzero if the current function uses the TOC. This is
10557 determined by the presence of (unspec ... UNSPEC_TOC) or
10558 use (unspec ... UNSPEC_TOC), which are generated by the various
10559 load_toc_* patterns. */
10566 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
10569 rtx pat
= PATTERN (insn
);
10572 if (GET_CODE (pat
) == PARALLEL
)
10573 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
10575 rtx sub
= XVECEXP (pat
, 0, i
);
10576 if (GET_CODE (sub
) == USE
)
10578 sub
= XEXP (sub
, 0);
10579 if (GET_CODE (sub
) == UNSPEC
10580 && XINT (sub
, 1) == UNSPEC_TOC
)
10589 create_TOC_reference (symbol
)
10592 return gen_rtx_PLUS (Pmode
,
10593 gen_rtx_REG (Pmode
, TOC_REGISTER
),
10594 gen_rtx_CONST (Pmode
,
10595 gen_rtx_MINUS (Pmode
, symbol
,
10596 gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
))));
10599 /* __throw will restore its own return address to be the same as the
10600 return address of the function that the throw is being made to.
10601 This is unfortunate, because we want to check the original
10602 return address to see if we need to restore the TOC.
10603 So we have to squirrel it away here.
10604 This is used only in compiling __throw and __rethrow.
10606 Most of this code should be removed by CSE. */
10607 static rtx insn_after_throw
;
10609 /* This does the saving... */
10611 rs6000_aix_emit_builtin_unwind_init ()
10614 rtx stack_top
= gen_reg_rtx (Pmode
);
10615 rtx opcode_addr
= gen_reg_rtx (Pmode
);
10617 insn_after_throw
= gen_reg_rtx (SImode
);
10619 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
10620 emit_move_insn (stack_top
, mem
);
10622 mem
= gen_rtx_MEM (Pmode
,
10623 gen_rtx_PLUS (Pmode
, stack_top
,
10624 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
10625 emit_move_insn (opcode_addr
, mem
);
10626 emit_move_insn (insn_after_throw
, gen_rtx_MEM (SImode
, opcode_addr
));
10629 /* Emit insns to _restore_ the TOC register, at runtime (specifically
10630 in _eh.o). Only used on AIX.
10632 The idea is that on AIX, function calls look like this:
10633 bl somefunction-trampoline
10637 somefunction-trampoline:
10639 ... load function address in the count register ...
10641 or like this, if the linker determines that this is not a cross-module call
10642 and so the TOC need not be restored:
10645 or like this, if the compiler could determine that this is not a
10648 now, the tricky bit here is that register 2 is saved and restored
10649 by the _linker_, so we can't readily generate debugging information
10650 for it. So we need to go back up the call chain looking at the
10651 insns at return addresses to see which calls saved the TOC register
10652 and so see where it gets restored from.
10654 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
10655 just before the actual epilogue.
10657 On the bright side, this incurs no space or time overhead unless an
10658 exception is thrown, except for the extra code in libgcc.a.
10660 The parameter STACKSIZE is a register containing (at runtime)
10661 the amount to be popped off the stack in addition to the stack frame
10662 of this routine (which will be __throw or __rethrow, and so is
10663 guaranteed to have a stack frame). */
10666 rs6000_emit_eh_toc_restore (stacksize
)
10670 rtx bottom_of_stack
= gen_reg_rtx (Pmode
);
10671 rtx tocompare
= gen_reg_rtx (SImode
);
10672 rtx opcode
= gen_reg_rtx (SImode
);
10673 rtx opcode_addr
= gen_reg_rtx (Pmode
);
10675 rtx loop_start
= gen_label_rtx ();
10676 rtx no_toc_restore_needed
= gen_label_rtx ();
10677 rtx loop_exit
= gen_label_rtx ();
10679 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
10680 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10681 emit_move_insn (bottom_of_stack
, mem
);
10683 top_of_stack
= expand_binop (Pmode
, add_optab
,
10684 bottom_of_stack
, stacksize
,
10685 NULL_RTX
, 1, OPTAB_WIDEN
);
10687 emit_move_insn (tocompare
, gen_int_mode (TARGET_32BIT
? 0x80410014
10688 : 0xE8410028, SImode
));
10690 if (insn_after_throw
== NULL_RTX
)
10692 emit_move_insn (opcode
, insn_after_throw
);
10694 emit_note (NULL
, NOTE_INSN_LOOP_BEG
);
10695 emit_label (loop_start
);
10697 do_compare_rtx_and_jump (opcode
, tocompare
, NE
, 1,
10698 SImode
, NULL_RTX
, NULL_RTX
,
10699 no_toc_restore_needed
);
10701 mem
= gen_rtx_MEM (Pmode
,
10702 gen_rtx_PLUS (Pmode
, bottom_of_stack
,
10703 GEN_INT (5 * GET_MODE_SIZE (Pmode
))));
10704 emit_move_insn (gen_rtx_REG (Pmode
, 2), mem
);
10706 emit_label (no_toc_restore_needed
);
10707 do_compare_rtx_and_jump (top_of_stack
, bottom_of_stack
, EQ
, 1,
10708 Pmode
, NULL_RTX
, NULL_RTX
,
10711 mem
= gen_rtx_MEM (Pmode
, bottom_of_stack
);
10712 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10713 emit_move_insn (bottom_of_stack
, mem
);
10715 mem
= gen_rtx_MEM (Pmode
,
10716 gen_rtx_PLUS (Pmode
, bottom_of_stack
,
10717 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
10718 emit_move_insn (opcode_addr
, mem
);
10719 emit_move_insn (opcode
, gen_rtx_MEM (SImode
, opcode_addr
));
10721 emit_note (NULL
, NOTE_INSN_LOOP_CONT
);
10722 emit_jump (loop_start
);
10723 emit_note (NULL
, NOTE_INSN_LOOP_END
);
10724 emit_label (loop_exit
);
10727 /* This ties together stack memory (MEM with an alias set of
10728 rs6000_sr_alias_set) and the change to the stack pointer. */
10731 rs6000_emit_stack_tie ()
10733 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
));
10735 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10736 emit_insn (gen_stack_tie (mem
));
10739 /* Emit the correct code for allocating stack space, as insns.
10740 If COPY_R12, make sure a copy of the old frame is left in r12.
10741 The generated code may use hard register 0 as a temporary. */
10744 rs6000_emit_allocate_stack (size
, copy_r12
)
10745 HOST_WIDE_INT size
;
10749 rtx stack_reg
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
10750 rtx tmp_reg
= gen_rtx_REG (Pmode
, 0);
10751 rtx todec
= GEN_INT (-size
);
10753 if (current_function_limit_stack
)
10755 if (REG_P (stack_limit_rtx
)
10756 && REGNO (stack_limit_rtx
) > 1
10757 && REGNO (stack_limit_rtx
) <= 31)
10759 emit_insn (TARGET_32BIT
10760 ? gen_addsi3 (tmp_reg
,
10763 : gen_adddi3 (tmp_reg
,
10767 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
10770 else if (GET_CODE (stack_limit_rtx
) == SYMBOL_REF
10772 && DEFAULT_ABI
== ABI_V4
)
10774 rtx toload
= gen_rtx_CONST (VOIDmode
,
10775 gen_rtx_PLUS (Pmode
,
10779 emit_insn (gen_elf_high (tmp_reg
, toload
));
10780 emit_insn (gen_elf_low (tmp_reg
, tmp_reg
, toload
));
10781 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
10785 warning ("stack limit expression is not supported");
10788 if (copy_r12
|| ! TARGET_UPDATE
)
10789 emit_move_insn (gen_rtx_REG (Pmode
, 12), stack_reg
);
10795 /* Need a note here so that try_split doesn't get confused. */
10796 if (get_last_insn() == NULL_RTX
)
10797 emit_note (0, NOTE_INSN_DELETED
);
10798 insn
= emit_move_insn (tmp_reg
, todec
);
10799 try_split (PATTERN (insn
), insn
, 0);
10803 insn
= emit_insn (TARGET_32BIT
10804 ? gen_movsi_update (stack_reg
, stack_reg
,
10806 : gen_movdi_update (stack_reg
, stack_reg
,
10807 todec
, stack_reg
));
10811 insn
= emit_insn (TARGET_32BIT
10812 ? gen_addsi3 (stack_reg
, stack_reg
, todec
)
10813 : gen_adddi3 (stack_reg
, stack_reg
, todec
));
10814 emit_move_insn (gen_rtx_MEM (Pmode
, stack_reg
),
10815 gen_rtx_REG (Pmode
, 12));
10818 RTX_FRAME_RELATED_P (insn
) = 1;
10820 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
10821 gen_rtx_SET (VOIDmode
, stack_reg
,
10822 gen_rtx_PLUS (Pmode
, stack_reg
,
10827 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10828 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10829 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10830 deduce these equivalences by itself so it wasn't necessary to hold
10831 its hand so much. */
10834 rs6000_frame_related (insn
, reg
, val
, reg2
, rreg
)
10843 /* copy_rtx will not make unique copies of registers, so we need to
10844 ensure we don't have unwanted sharing here. */
10846 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
10849 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
10851 real
= copy_rtx (PATTERN (insn
));
10853 if (reg2
!= NULL_RTX
)
10854 real
= replace_rtx (real
, reg2
, rreg
);
10856 real
= replace_rtx (real
, reg
,
10857 gen_rtx_PLUS (Pmode
, gen_rtx_REG (Pmode
,
10858 STACK_POINTER_REGNUM
),
10861 /* We expect that 'real' is either a SET or a PARALLEL containing
10862 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10863 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10865 if (GET_CODE (real
) == SET
)
10869 temp
= simplify_rtx (SET_SRC (set
));
10871 SET_SRC (set
) = temp
;
10872 temp
= simplify_rtx (SET_DEST (set
));
10874 SET_DEST (set
) = temp
;
10875 if (GET_CODE (SET_DEST (set
)) == MEM
)
10877 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
10879 XEXP (SET_DEST (set
), 0) = temp
;
10882 else if (GET_CODE (real
) == PARALLEL
)
10885 for (i
= 0; i
< XVECLEN (real
, 0); i
++)
10886 if (GET_CODE (XVECEXP (real
, 0, i
)) == SET
)
10888 rtx set
= XVECEXP (real
, 0, i
);
10890 temp
= simplify_rtx (SET_SRC (set
));
10892 SET_SRC (set
) = temp
;
10893 temp
= simplify_rtx (SET_DEST (set
));
10895 SET_DEST (set
) = temp
;
10896 if (GET_CODE (SET_DEST (set
)) == MEM
)
10898 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
10900 XEXP (SET_DEST (set
), 0) = temp
;
10902 RTX_FRAME_RELATED_P (set
) = 1;
10909 real
= spe_synthesize_frame_save (real
);
10911 RTX_FRAME_RELATED_P (insn
) = 1;
10912 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
10917 /* Given an SPE frame note, return a PARALLEL of SETs with the
10918 original note, plus a synthetic register save. */
10921 spe_synthesize_frame_save (real
)
10924 rtx synth
, offset
, reg
, real2
;
10926 if (GET_CODE (real
) != SET
10927 || GET_MODE (SET_SRC (real
)) != V2SImode
)
10930 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
10931 frame related note. The parallel contains a set of the register
10932 being saved, and another set to a synthetic register (n+1200).
10933 This is so we can differentiate between 64-bit and 32-bit saves.
10934 Words cannot describe this nastiness. */
10936 if (GET_CODE (SET_DEST (real
)) != MEM
10937 || GET_CODE (XEXP (SET_DEST (real
), 0)) != PLUS
10938 || GET_CODE (SET_SRC (real
)) != REG
)
10942 (set (mem (plus (reg x) (const y)))
10945 (set (mem (plus (reg x) (const y+4)))
10949 real2
= copy_rtx (real
);
10950 PUT_MODE (SET_DEST (real2
), SImode
);
10951 reg
= SET_SRC (real2
);
10952 real2
= replace_rtx (real2
, reg
, gen_rtx_REG (SImode
, REGNO (reg
)));
10953 synth
= copy_rtx (real2
);
10955 if (BYTES_BIG_ENDIAN
)
10957 offset
= XEXP (XEXP (SET_DEST (real2
), 0), 1);
10958 real2
= replace_rtx (real2
, offset
, GEN_INT (INTVAL (offset
) + 4));
10961 reg
= SET_SRC (synth
);
10963 synth
= replace_rtx (synth
, reg
,
10964 gen_rtx_REG (SImode
, REGNO (reg
) + 1200));
10966 offset
= XEXP (XEXP (SET_DEST (synth
), 0), 1);
10967 synth
= replace_rtx (synth
, offset
,
10968 GEN_INT (INTVAL (offset
)
10969 + (BYTES_BIG_ENDIAN
? 0 : 4)));
10971 RTX_FRAME_RELATED_P (synth
) = 1;
10972 RTX_FRAME_RELATED_P (real2
) = 1;
10973 if (BYTES_BIG_ENDIAN
)
10974 real
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, synth
, real2
));
10976 real
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, real2
, synth
));
10981 /* Returns an insn that has a vrsave set operation with the
10982 appropriate CLOBBERs. */
10985 generate_set_vrsave (reg
, info
, epiloguep
)
10987 rs6000_stack_t
*info
;
10991 rtx insn
, clobs
[TOTAL_ALTIVEC_REGS
+ 1];
10992 rtx vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
10995 = gen_rtx_SET (VOIDmode
,
10997 gen_rtx_UNSPEC_VOLATILE (SImode
,
10998 gen_rtvec (2, reg
, vrsave
),
11003 /* We need to clobber the registers in the mask so the scheduler
11004 does not move sets to VRSAVE before sets of AltiVec registers.
11006 However, if the function receives nonlocal gotos, reload will set
11007 all call saved registers live. We will end up with:
11009 (set (reg 999) (mem))
11010 (parallel [ (set (reg vrsave) (unspec blah))
11011 (clobber (reg 999))])
11013 The clobber will cause the store into reg 999 to be dead, and
11014 flow will attempt to delete an epilogue insn. In this case, we
11015 need an unspec use/set of the register. */
11017 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
11018 if (info
->vrsave_mask
!= 0 && ALTIVEC_REG_BIT (i
) != 0)
11020 if (!epiloguep
|| call_used_regs
[i
])
11021 clobs
[nclobs
++] = gen_rtx_CLOBBER (VOIDmode
,
11022 gen_rtx_REG (V4SImode
, i
));
11025 rtx reg
= gen_rtx_REG (V4SImode
, i
);
11028 = gen_rtx_SET (VOIDmode
,
11030 gen_rtx_UNSPEC (V4SImode
,
11031 gen_rtvec (1, reg
), 27));
11035 insn
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nclobs
));
11037 for (i
= 0; i
< nclobs
; ++i
)
11038 XVECEXP (insn
, 0, i
) = clobs
[i
];
11043 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
11044 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
11047 emit_frame_save (frame_reg
, frame_ptr
, mode
, regno
, offset
, total_size
)
11050 enum machine_mode mode
;
11051 unsigned int regno
;
11055 rtx reg
, offset_rtx
, insn
, mem
, addr
, int_rtx
;
11056 rtx replacea
, replaceb
;
11058 int_rtx
= GEN_INT (offset
);
11060 /* Some cases that need register indexed addressing. */
11061 if ((TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
11063 && SPE_VECTOR_MODE (mode
)
11064 && !SPE_CONST_OFFSET_OK (offset
)))
11066 /* Whomever calls us must make sure r11 is available in the
11067 flow path of instructions in the prologue. */
11068 offset_rtx
= gen_rtx_REG (Pmode
, 11);
11069 emit_move_insn (offset_rtx
, int_rtx
);
11071 replacea
= offset_rtx
;
11072 replaceb
= int_rtx
;
11076 offset_rtx
= int_rtx
;
11077 replacea
= NULL_RTX
;
11078 replaceb
= NULL_RTX
;
11081 reg
= gen_rtx_REG (mode
, regno
);
11082 addr
= gen_rtx_PLUS (Pmode
, frame_reg
, offset_rtx
);
11083 mem
= gen_rtx_MEM (mode
, addr
);
11084 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11086 insn
= emit_move_insn (mem
, reg
);
11088 rs6000_frame_related (insn
, frame_ptr
, total_size
, replacea
, replaceb
);
11091 /* Emit an offset memory reference suitable for a frame store, while
11092 converting to a valid addressing mode. */
11095 gen_frame_mem_offset (mode
, reg
, offset
)
11096 enum machine_mode mode
;
11100 rtx int_rtx
, offset_rtx
;
11102 int_rtx
= GEN_INT (offset
);
11104 if (TARGET_SPE_ABI
&& SPE_VECTOR_MODE (mode
))
11106 offset_rtx
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
11107 emit_move_insn (offset_rtx
, int_rtx
);
11110 offset_rtx
= int_rtx
;
11112 return gen_rtx_MEM (mode
, gen_rtx_PLUS (Pmode
, reg
, offset_rtx
));
11115 /* Emit function prologue as insns. */
11118 rs6000_emit_prologue ()
11120 rs6000_stack_t
*info
= rs6000_stack_info ();
11121 enum machine_mode reg_mode
= TARGET_POWERPC64
? DImode
: SImode
;
11122 int reg_size
= TARGET_POWERPC64
? 8 : 4;
11123 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
11124 rtx frame_ptr_rtx
= gen_rtx_REG (Pmode
, 12);
11125 rtx frame_reg_rtx
= sp_reg_rtx
;
11126 rtx cr_save_rtx
= NULL
;
11128 int saving_FPRs_inline
;
11129 int using_store_multiple
;
11130 HOST_WIDE_INT sp_offset
= 0;
11132 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
11134 reg_mode
= V2SImode
;
11138 using_store_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
11139 && (!TARGET_SPE_ABI
11140 || info
->spe_64bit_regs_used
== 0)
11141 && info
->first_gp_reg_save
< 31);
11142 saving_FPRs_inline
= (info
->first_fp_reg_save
== 64
11143 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
11145 /* For V.4, update stack before we do any saving and set back pointer. */
11146 if (info
->push_p
&& DEFAULT_ABI
== ABI_V4
)
11148 if (info
->total_size
< 32767)
11149 sp_offset
= info
->total_size
;
11151 frame_reg_rtx
= frame_ptr_rtx
;
11152 rs6000_emit_allocate_stack (info
->total_size
,
11153 (frame_reg_rtx
!= sp_reg_rtx
11154 && (info
->cr_save_p
11156 || info
->first_fp_reg_save
< 64
11157 || info
->first_gp_reg_save
< 32
11159 if (frame_reg_rtx
!= sp_reg_rtx
)
11160 rs6000_emit_stack_tie ();
11163 /* Save AltiVec registers if needed. */
11164 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
11168 /* There should be a non inline version of this, for when we
11169 are saving lots of vector registers. */
11170 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
11171 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
11173 rtx areg
, savereg
, mem
;
11176 offset
= info
->altivec_save_offset
+ sp_offset
11177 + 16 * (i
- info
->first_altivec_reg_save
);
11179 savereg
= gen_rtx_REG (V4SImode
, i
);
11181 areg
= gen_rtx_REG (Pmode
, 0);
11182 emit_move_insn (areg
, GEN_INT (offset
));
11184 /* AltiVec addressing mode is [reg+reg]. */
11185 mem
= gen_rtx_MEM (V4SImode
,
11186 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
));
11188 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11190 insn
= emit_move_insn (mem
, savereg
);
11192 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
11193 areg
, GEN_INT (offset
));
11197 /* VRSAVE is a bit vector representing which AltiVec registers
11198 are used. The OS uses this to determine which vector
11199 registers to save on a context switch. We need to save
11200 VRSAVE on the stack frame, add whatever AltiVec registers we
11201 used in this function, and do the corresponding magic in the
11204 if (TARGET_ALTIVEC
&& info
->vrsave_mask
!= 0)
11206 rtx reg
, mem
, vrsave
;
11209 /* Get VRSAVE onto a GPR. */
11210 reg
= gen_rtx_REG (SImode
, 12);
11211 vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
11213 emit_insn (gen_get_vrsave_internal (reg
));
11215 emit_insn (gen_rtx_SET (VOIDmode
, reg
, vrsave
));
11218 offset
= info
->vrsave_save_offset
+ sp_offset
;
11220 = gen_rtx_MEM (SImode
,
11221 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, GEN_INT (offset
)));
11222 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11223 insn
= emit_move_insn (mem
, reg
);
11225 /* Include the registers in the mask. */
11226 emit_insn (gen_iorsi3 (reg
, reg
, GEN_INT ((int) info
->vrsave_mask
)));
11228 insn
= emit_insn (generate_set_vrsave (reg
, info
, 0));
11231 /* If we use the link register, get it into r0. */
11232 if (info
->lr_save_p
)
11233 emit_move_insn (gen_rtx_REG (Pmode
, 0),
11234 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
11236 /* If we need to save CR, put it into r12. */
11237 if (info
->cr_save_p
&& frame_reg_rtx
!= frame_ptr_rtx
)
11239 cr_save_rtx
= gen_rtx_REG (SImode
, 12);
11240 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
11243 /* Do any required saving of fpr's. If only one or two to save, do
11244 it ourselves. Otherwise, call function. */
11245 if (saving_FPRs_inline
)
11248 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
11249 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
11250 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
11251 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, DFmode
,
11252 info
->first_fp_reg_save
+ i
,
11253 info
->fp_save_offset
+ sp_offset
+ 8 * i
,
11256 else if (info
->first_fp_reg_save
!= 64)
11260 const char *alloc_rname
;
11262 p
= rtvec_alloc (2 + 64 - info
->first_fp_reg_save
);
11264 RTVEC_ELT (p
, 0) = gen_rtx_CLOBBER (VOIDmode
,
11265 gen_rtx_REG (Pmode
,
11266 LINK_REGISTER_REGNUM
));
11267 sprintf (rname
, "%s%d%s", SAVE_FP_PREFIX
,
11268 info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
);
11269 alloc_rname
= ggc_strdup (rname
);
11270 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
11271 gen_rtx_SYMBOL_REF (Pmode
,
11273 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
11275 rtx addr
, reg
, mem
;
11276 reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
11277 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11278 GEN_INT (info
->fp_save_offset
11279 + sp_offset
+ 8*i
));
11280 mem
= gen_rtx_MEM (DFmode
, addr
);
11281 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11283 RTVEC_ELT (p
, i
+ 2) = gen_rtx_SET (VOIDmode
, mem
, reg
);
11285 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11286 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
11287 NULL_RTX
, NULL_RTX
);
11290 /* Save GPRs. This is done as a PARALLEL if we are using
11291 the store-multiple instructions. */
11292 if (using_store_multiple
)
11296 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
11297 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
11299 rtx addr
, reg
, mem
;
11300 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
11301 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11302 GEN_INT (info
->gp_save_offset
11305 mem
= gen_rtx_MEM (reg_mode
, addr
);
11306 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11308 RTVEC_ELT (p
, i
) = gen_rtx_SET (VOIDmode
, mem
, reg
);
11310 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11311 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
11312 NULL_RTX
, NULL_RTX
);
11317 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
11318 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
11319 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
11320 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
11321 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
11322 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
11324 rtx addr
, reg
, mem
;
11325 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
11327 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
11329 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
11332 if (!SPE_CONST_OFFSET_OK (offset
))
11334 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
11335 emit_move_insn (b
, GEN_INT (offset
));
11338 b
= GEN_INT (offset
);
11340 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
11341 mem
= gen_rtx_MEM (V2SImode
, addr
);
11342 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11343 insn
= emit_move_insn (mem
, reg
);
11345 if (GET_CODE (b
) == CONST_INT
)
11346 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
11347 NULL_RTX
, NULL_RTX
);
11349 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
11350 b
, GEN_INT (offset
));
11354 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11355 GEN_INT (info
->gp_save_offset
11358 mem
= gen_rtx_MEM (reg_mode
, addr
);
11359 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11361 insn
= emit_move_insn (mem
, reg
);
11362 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
11363 NULL_RTX
, NULL_RTX
);
11368 /* ??? There's no need to emit actual instructions here, but it's the
11369 easiest way to get the frame unwind information emitted. */
11370 if (current_function_calls_eh_return
)
11372 unsigned int i
, regno
;
11376 regno
= EH_RETURN_DATA_REGNO (i
);
11377 if (regno
== INVALID_REGNUM
)
11380 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, reg_mode
, regno
,
11381 info
->ehrd_offset
+ sp_offset
11382 + reg_size
* (int) i
,
11387 /* Save lr if we used it. */
11388 if (info
->lr_save_p
)
11390 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11391 GEN_INT (info
->lr_save_offset
+ sp_offset
));
11392 rtx reg
= gen_rtx_REG (Pmode
, 0);
11393 rtx mem
= gen_rtx_MEM (Pmode
, addr
);
11394 /* This should not be of rs6000_sr_alias_set, because of
11395 __builtin_return_address. */
11397 insn
= emit_move_insn (mem
, reg
);
11398 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
11399 reg
, gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
11402 /* Save CR if we use any that must be preserved. */
11403 if (info
->cr_save_p
)
11405 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11406 GEN_INT (info
->cr_save_offset
+ sp_offset
));
11407 rtx mem
= gen_rtx_MEM (SImode
, addr
);
11409 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11411 /* If r12 was used to hold the original sp, copy cr into r0 now
11413 if (REGNO (frame_reg_rtx
) == 12)
11415 cr_save_rtx
= gen_rtx_REG (SImode
, 0);
11416 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
11418 insn
= emit_move_insn (mem
, cr_save_rtx
);
11420 /* Now, there's no way that dwarf2out_frame_debug_expr is going
11421 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
11422 But that's OK. All we have to do is specify that _one_ condition
11423 code register is saved in this stack slot. The thrower's epilogue
11424 will then restore all the call-saved registers.
11425 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
11426 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
11427 cr_save_rtx
, gen_rtx_REG (SImode
, CR2_REGNO
));
11430 /* Update stack and set back pointer unless this is V.4,
11431 for which it was done previously. */
11432 if (info
->push_p
&& DEFAULT_ABI
!= ABI_V4
)
11433 rs6000_emit_allocate_stack (info
->total_size
, FALSE
);
11435 /* Set frame pointer, if needed. */
11436 if (frame_pointer_needed
)
11438 insn
= emit_move_insn (gen_rtx_REG (Pmode
, FRAME_POINTER_REGNUM
),
11440 RTX_FRAME_RELATED_P (insn
) = 1;
11443 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
11444 if ((TARGET_TOC
&& TARGET_MINIMAL_TOC
&& get_pool_size () != 0)
11445 || (DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1
11446 && regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
]))
11448 /* If emit_load_toc_table will use the link register, we need to save
11449 it. We use R12 for this purpose because emit_load_toc_table
11450 can use register 0. This allows us to use a plain 'blr' to return
11451 from the procedure more often. */
11452 int save_LR_around_toc_setup
= (TARGET_ELF
11453 && DEFAULT_ABI
!= ABI_AIX
11455 && ! info
->lr_save_p
11456 && EXIT_BLOCK_PTR
->pred
!= NULL
);
11457 if (save_LR_around_toc_setup
)
11459 rtx lr
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
11460 rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx
, lr
));
11461 rs6000_emit_load_toc_table (TRUE
);
11462 rs6000_maybe_dead (emit_move_insn (lr
, frame_ptr_rtx
));
11465 rs6000_emit_load_toc_table (TRUE
);
11469 if (DEFAULT_ABI
== ABI_DARWIN
11470 && flag_pic
&& current_function_uses_pic_offset_table
)
11472 rtx dest
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
11473 const char *picbase
= machopic_function_base_name ();
11474 rtx src
= gen_rtx_SYMBOL_REF (Pmode
, picbase
);
11476 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest
, src
)));
11478 rs6000_maybe_dead (
11479 emit_move_insn (gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
),
11480 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)));
11485 /* Write function prologue. */
11488 rs6000_output_function_prologue (file
, size
)
11490 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
11492 rs6000_stack_t
*info
= rs6000_stack_info ();
11494 if (TARGET_DEBUG_STACK
)
11495 debug_stack_info (info
);
11497 /* Write .extern for any function we will call to save and restore
11499 if (info
->first_fp_reg_save
< 64
11500 && !FP_SAVE_INLINE (info
->first_fp_reg_save
))
11501 fprintf (file
, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
11502 SAVE_FP_PREFIX
, info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
,
11503 RESTORE_FP_PREFIX
, info
->first_fp_reg_save
- 32,
11504 RESTORE_FP_SUFFIX
);
11506 /* Write .extern for AIX common mode routines, if needed. */
11507 if (! TARGET_POWER
&& ! TARGET_POWERPC
&& ! common_mode_defined
)
11509 fputs ("\t.extern __mulh\n", file
);
11510 fputs ("\t.extern __mull\n", file
);
11511 fputs ("\t.extern __divss\n", file
);
11512 fputs ("\t.extern __divus\n", file
);
11513 fputs ("\t.extern __quoss\n", file
);
11514 fputs ("\t.extern __quous\n", file
);
11515 common_mode_defined
= 1;
11518 if (! HAVE_prologue
)
11522 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
11523 the "toplevel" insn chain. */
11524 emit_note (0, NOTE_INSN_DELETED
);
11525 rs6000_emit_prologue ();
11526 emit_note (0, NOTE_INSN_DELETED
);
11528 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11532 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
11534 INSN_ADDRESSES_NEW (insn
, addr
);
11539 if (TARGET_DEBUG_STACK
)
11540 debug_rtx_list (get_insns (), 100);
11541 final (get_insns (), file
, FALSE
, FALSE
);
11545 rs6000_pic_labelno
++;
11548 /* Emit function epilogue as insns.
11550 At present, dwarf2out_frame_debug_expr doesn't understand
11551 register restores, so we don't bother setting RTX_FRAME_RELATED_P
11552 anywhere in the epilogue. Most of the insns below would in any case
11553 need special notes to explain where r11 is in relation to the stack. */
11556 rs6000_emit_epilogue (sibcall
)
11559 rs6000_stack_t
*info
;
11560 int restoring_FPRs_inline
;
11561 int using_load_multiple
;
11562 int using_mfcr_multiple
;
11563 int use_backchain_to_restore_sp
;
11565 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, 1);
11566 rtx frame_reg_rtx
= sp_reg_rtx
;
11567 enum machine_mode reg_mode
= TARGET_POWERPC64
? DImode
: SImode
;
11568 int reg_size
= TARGET_POWERPC64
? 8 : 4;
11571 info
= rs6000_stack_info ();
11573 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
11575 reg_mode
= V2SImode
;
11579 using_load_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
11580 && (!TARGET_SPE_ABI
11581 || info
->spe_64bit_regs_used
== 0)
11582 && info
->first_gp_reg_save
< 31);
11583 restoring_FPRs_inline
= (sibcall
11584 || current_function_calls_eh_return
11585 || info
->first_fp_reg_save
== 64
11586 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
11587 use_backchain_to_restore_sp
= (frame_pointer_needed
11588 || current_function_calls_alloca
11589 || info
->total_size
> 32767);
11590 using_mfcr_multiple
= (rs6000_cpu
== PROCESSOR_PPC601
11591 || rs6000_cpu
== PROCESSOR_PPC603
11592 || rs6000_cpu
== PROCESSOR_PPC750
11595 /* If we have a frame pointer, a call to alloca, or a large stack
11596 frame, restore the old stack pointer using the backchain. Otherwise,
11597 we know what size to update it with. */
11598 if (use_backchain_to_restore_sp
)
11600 /* Under V.4, don't reset the stack pointer until after we're done
11601 loading the saved registers. */
11602 if (DEFAULT_ABI
== ABI_V4
)
11603 frame_reg_rtx
= gen_rtx_REG (Pmode
, 11);
11605 emit_move_insn (frame_reg_rtx
,
11606 gen_rtx_MEM (Pmode
, sp_reg_rtx
));
11609 else if (info
->push_p
)
11611 if (DEFAULT_ABI
== ABI_V4
)
11612 sp_offset
= info
->total_size
;
11615 emit_insn (TARGET_32BIT
11616 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
11617 GEN_INT (info
->total_size
))
11618 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
11619 GEN_INT (info
->total_size
)));
11623 /* Restore AltiVec registers if needed. */
11624 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
11628 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
11629 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
11631 rtx addr
, areg
, mem
;
11633 areg
= gen_rtx_REG (Pmode
, 0);
11635 (areg
, GEN_INT (info
->altivec_save_offset
11637 + 16 * (i
- info
->first_altivec_reg_save
)));
11639 /* AltiVec addressing mode is [reg+reg]. */
11640 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
);
11641 mem
= gen_rtx_MEM (V4SImode
, addr
);
11642 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11644 emit_move_insn (gen_rtx_REG (V4SImode
, i
), mem
);
11648 /* Restore VRSAVE if needed. */
11649 if (TARGET_ALTIVEC_ABI
&& info
->vrsave_mask
!= 0)
11651 rtx addr
, mem
, reg
;
11653 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11654 GEN_INT (info
->vrsave_save_offset
+ sp_offset
));
11655 mem
= gen_rtx_MEM (SImode
, addr
);
11656 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11657 reg
= gen_rtx_REG (SImode
, 12);
11658 emit_move_insn (reg
, mem
);
11660 emit_insn (generate_set_vrsave (reg
, info
, 1));
11663 /* Get the old lr if we saved it. */
11664 if (info
->lr_save_p
)
11666 rtx mem
= gen_frame_mem_offset (Pmode
, frame_reg_rtx
,
11667 info
->lr_save_offset
+ sp_offset
);
11669 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11671 emit_move_insn (gen_rtx_REG (Pmode
, 0), mem
);
11674 /* Get the old cr if we saved it. */
11675 if (info
->cr_save_p
)
11677 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11678 GEN_INT (info
->cr_save_offset
+ sp_offset
));
11679 rtx mem
= gen_rtx_MEM (SImode
, addr
);
11681 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11683 emit_move_insn (gen_rtx_REG (SImode
, 12), mem
);
11686 /* Set LR here to try to overlap restores below. */
11687 if (info
->lr_save_p
)
11688 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
11689 gen_rtx_REG (Pmode
, 0));
11691 /* Load exception handler data registers, if needed. */
11692 if (current_function_calls_eh_return
)
11694 unsigned int i
, regno
;
11700 regno
= EH_RETURN_DATA_REGNO (i
);
11701 if (regno
== INVALID_REGNUM
)
11704 mem
= gen_frame_mem_offset (reg_mode
, frame_reg_rtx
,
11705 info
->ehrd_offset
+ sp_offset
11706 + reg_size
* (int) i
);
11707 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11709 emit_move_insn (gen_rtx_REG (reg_mode
, regno
), mem
);
11713 /* Restore GPRs. This is done as a PARALLEL if we are using
11714 the load-multiple instructions. */
11715 if (using_load_multiple
)
11718 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
11719 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
11721 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11722 GEN_INT (info
->gp_save_offset
11725 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
11727 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11730 gen_rtx_SET (VOIDmode
,
11731 gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
),
11734 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11737 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
11738 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
11739 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
11740 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
11741 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
11742 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
11744 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11745 GEN_INT (info
->gp_save_offset
11748 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
11750 /* Restore 64-bit quantities for SPE. */
11751 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
11753 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
11756 if (!SPE_CONST_OFFSET_OK (offset
))
11758 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
11759 emit_move_insn (b
, GEN_INT (offset
));
11762 b
= GEN_INT (offset
);
11764 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
11765 mem
= gen_rtx_MEM (V2SImode
, addr
);
11768 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11770 emit_move_insn (gen_rtx_REG (reg_mode
,
11771 info
->first_gp_reg_save
+ i
), mem
);
11774 /* Restore fpr's if we need to do it without calling a function. */
11775 if (restoring_FPRs_inline
)
11776 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
11777 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
11778 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
11781 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11782 GEN_INT (info
->fp_save_offset
11785 mem
= gen_rtx_MEM (DFmode
, addr
);
11786 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11788 emit_move_insn (gen_rtx_REG (DFmode
,
11789 info
->first_fp_reg_save
+ i
),
11793 /* If we saved cr, restore it here. Just those that were used. */
11794 if (info
->cr_save_p
)
11796 rtx r12_rtx
= gen_rtx_REG (SImode
, 12);
11799 if (using_mfcr_multiple
)
11801 for (i
= 0; i
< 8; i
++)
11802 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
11808 if (using_mfcr_multiple
&& count
> 1)
11813 p
= rtvec_alloc (count
);
11816 for (i
= 0; i
< 8; i
++)
11817 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
11819 rtvec r
= rtvec_alloc (2);
11820 RTVEC_ELT (r
, 0) = r12_rtx
;
11821 RTVEC_ELT (r
, 1) = GEN_INT (1 << (7-i
));
11822 RTVEC_ELT (p
, ndx
) =
11823 gen_rtx_SET (VOIDmode
, gen_rtx_REG (CCmode
, CR0_REGNO
+i
),
11824 gen_rtx_UNSPEC (CCmode
, r
, UNSPEC_MOVESI_TO_CR
));
11827 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11832 for (i
= 0; i
< 8; i
++)
11833 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
11835 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode
,
11841 /* If this is V.4, unwind the stack pointer after all of the loads
11842 have been done. We need to emit a block here so that sched
11843 doesn't decide to move the sp change before the register restores
11844 (which may not have any obvious dependency on the stack). This
11845 doesn't hurt performance, because there is no scheduling that can
11846 be done after this point. */
11847 if (DEFAULT_ABI
== ABI_V4
)
11849 if (frame_reg_rtx
!= sp_reg_rtx
)
11850 rs6000_emit_stack_tie ();
11852 if (use_backchain_to_restore_sp
)
11854 emit_move_insn (sp_reg_rtx
, frame_reg_rtx
);
11856 else if (sp_offset
!= 0)
11858 emit_insn (TARGET_32BIT
11859 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
11860 GEN_INT (sp_offset
))
11861 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
11862 GEN_INT (sp_offset
)));
11866 if (current_function_calls_eh_return
)
11868 rtx sa
= EH_RETURN_STACKADJ_RTX
;
11869 emit_insn (TARGET_32BIT
11870 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
, sa
)
11871 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
, sa
));
11877 if (! restoring_FPRs_inline
)
11878 p
= rtvec_alloc (3 + 64 - info
->first_fp_reg_save
);
11880 p
= rtvec_alloc (2);
11882 RTVEC_ELT (p
, 0) = gen_rtx_RETURN (VOIDmode
);
11883 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
11884 gen_rtx_REG (Pmode
,
11885 LINK_REGISTER_REGNUM
));
11887 /* If we have to restore more than two FP registers, branch to the
11888 restore function. It will return to our caller. */
11889 if (! restoring_FPRs_inline
)
11893 const char *alloc_rname
;
11895 sprintf (rname
, "%s%d%s", RESTORE_FP_PREFIX
,
11896 info
->first_fp_reg_save
- 32, RESTORE_FP_SUFFIX
);
11897 alloc_rname
= ggc_strdup (rname
);
11898 RTVEC_ELT (p
, 2) = gen_rtx_USE (VOIDmode
,
11899 gen_rtx_SYMBOL_REF (Pmode
,
11902 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
11905 addr
= gen_rtx_PLUS (Pmode
, sp_reg_rtx
,
11906 GEN_INT (info
->fp_save_offset
+ 8*i
));
11907 mem
= gen_rtx_MEM (DFmode
, addr
);
11908 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11910 RTVEC_ELT (p
, i
+3) =
11911 gen_rtx_SET (VOIDmode
,
11912 gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
),
11917 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11921 /* Write function epilogue. */
11924 rs6000_output_function_epilogue (file
, size
)
11926 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
11928 rs6000_stack_t
*info
= rs6000_stack_info ();
11930 if (! HAVE_epilogue
)
11932 rtx insn
= get_last_insn ();
11933 /* If the last insn was a BARRIER, we don't have to write anything except
11934 the trace table. */
11935 if (GET_CODE (insn
) == NOTE
)
11936 insn
= prev_nonnote_insn (insn
);
11937 if (insn
== 0 || GET_CODE (insn
) != BARRIER
)
11939 /* This is slightly ugly, but at least we don't have two
11940 copies of the epilogue-emitting code. */
11943 /* A NOTE_INSN_DELETED is supposed to be at the start
11944 and end of the "toplevel" insn chain. */
11945 emit_note (0, NOTE_INSN_DELETED
);
11946 rs6000_emit_epilogue (FALSE
);
11947 emit_note (0, NOTE_INSN_DELETED
);
11949 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11953 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
11955 INSN_ADDRESSES_NEW (insn
, addr
);
11960 if (TARGET_DEBUG_STACK
)
11961 debug_rtx_list (get_insns (), 100);
11962 final (get_insns (), file
, FALSE
, FALSE
);
11967 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11970 We don't output a traceback table if -finhibit-size-directive was
11971 used. The documentation for -finhibit-size-directive reads
11972 ``don't output a @code{.size} assembler directive, or anything
11973 else that would cause trouble if the function is split in the
11974 middle, and the two halves are placed at locations far apart in
11975 memory.'' The traceback table has this property, since it
11976 includes the offset from the start of the function to the
11977 traceback table itself.
11979 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11980 different traceback table. */
11981 if (DEFAULT_ABI
== ABI_AIX
&& ! flag_inhibit_size_directive
11982 && rs6000_traceback
!= traceback_none
)
11984 const char *fname
= NULL
;
11985 const char *language_string
= lang_hooks
.name
;
11986 int fixed_parms
= 0, float_parms
= 0, parm_info
= 0;
11988 int optional_tbtab
;
11990 if (rs6000_traceback
== traceback_full
)
11991 optional_tbtab
= 1;
11992 else if (rs6000_traceback
== traceback_part
)
11993 optional_tbtab
= 0;
11995 optional_tbtab
= !optimize_size
&& !TARGET_ELF
;
11997 if (optional_tbtab
)
11999 fname
= XSTR (XEXP (DECL_RTL (current_function_decl
), 0), 0);
12000 while (*fname
== '.') /* V.4 encodes . in the name */
12003 /* Need label immediately before tbtab, so we can compute
12004 its offset from the function start. */
12005 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
12006 ASM_OUTPUT_LABEL (file
, fname
);
12009 /* The .tbtab pseudo-op can only be used for the first eight
12010 expressions, since it can't handle the possibly variable
12011 length fields that follow. However, if you omit the optional
12012 fields, the assembler outputs zeros for all optional fields
12013 anyways, giving each variable length field is minimum length
12014 (as defined in sys/debug.h). Thus we can not use the .tbtab
12015 pseudo-op at all. */
12017 /* An all-zero word flags the start of the tbtab, for debuggers
12018 that have to find it by searching forward from the entry
12019 point or from the current pc. */
12020 fputs ("\t.long 0\n", file
);
12022 /* Tbtab format type. Use format type 0. */
12023 fputs ("\t.byte 0,", file
);
12025 /* Language type. Unfortunately, there doesn't seem to be any
12026 official way to get this info, so we use language_string. C
12027 is 0. C++ is 9. No number defined for Obj-C, so use the
12028 value for C for now. There is no official value for Java,
12029 although IBM appears to be using 13. There is no official value
12030 for Chill, so we've chosen 44 pseudo-randomly. */
12031 if (! strcmp (language_string
, "GNU C")
12032 || ! strcmp (language_string
, "GNU Objective-C"))
12034 else if (! strcmp (language_string
, "GNU F77"))
12036 else if (! strcmp (language_string
, "GNU Ada"))
12038 else if (! strcmp (language_string
, "GNU Pascal"))
12040 else if (! strcmp (language_string
, "GNU C++"))
12042 else if (! strcmp (language_string
, "GNU Java"))
12044 else if (! strcmp (language_string
, "GNU CHILL"))
12048 fprintf (file
, "%d,", i
);
12050 /* 8 single bit fields: global linkage (not set for C extern linkage,
12051 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
12052 from start of procedure stored in tbtab, internal function, function
12053 has controlled storage, function has no toc, function uses fp,
12054 function logs/aborts fp operations. */
12055 /* Assume that fp operations are used if any fp reg must be saved. */
12056 fprintf (file
, "%d,",
12057 (optional_tbtab
<< 5) | ((info
->first_fp_reg_save
!= 64) << 1));
12059 /* 6 bitfields: function is interrupt handler, name present in
12060 proc table, function calls alloca, on condition directives
12061 (controls stack walks, 3 bits), saves condition reg, saves
12063 /* The `function calls alloca' bit seems to be set whenever reg 31 is
12064 set up as a frame pointer, even when there is no alloca call. */
12065 fprintf (file
, "%d,",
12066 ((optional_tbtab
<< 6)
12067 | ((optional_tbtab
& frame_pointer_needed
) << 5)
12068 | (info
->cr_save_p
<< 1)
12069 | (info
->lr_save_p
)));
12071 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
12073 fprintf (file
, "%d,",
12074 (info
->push_p
<< 7) | (64 - info
->first_fp_reg_save
));
12076 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
12077 fprintf (file
, "%d,", (32 - first_reg_to_save ()));
12079 if (optional_tbtab
)
12081 /* Compute the parameter info from the function decl argument
12084 int next_parm_info_bit
= 31;
12086 for (decl
= DECL_ARGUMENTS (current_function_decl
);
12087 decl
; decl
= TREE_CHAIN (decl
))
12089 rtx parameter
= DECL_INCOMING_RTL (decl
);
12090 enum machine_mode mode
= GET_MODE (parameter
);
12092 if (GET_CODE (parameter
) == REG
)
12094 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
12100 if (mode
== SFmode
)
12102 else if (mode
== DFmode
|| mode
== TFmode
)
12107 /* If only one bit will fit, don't or in this entry. */
12108 if (next_parm_info_bit
> 0)
12109 parm_info
|= (bits
<< (next_parm_info_bit
- 1));
12110 next_parm_info_bit
-= 2;
12114 fixed_parms
+= ((GET_MODE_SIZE (mode
)
12115 + (UNITS_PER_WORD
- 1))
12117 next_parm_info_bit
-= 1;
12123 /* Number of fixed point parameters. */
12124 /* This is actually the number of words of fixed point parameters; thus
12125 an 8 byte struct counts as 2; and thus the maximum value is 8. */
12126 fprintf (file
, "%d,", fixed_parms
);
12128 /* 2 bitfields: number of floating point parameters (7 bits), parameters
12130 /* This is actually the number of fp registers that hold parameters;
12131 and thus the maximum value is 13. */
12132 /* Set parameters on stack bit if parameters are not in their original
12133 registers, regardless of whether they are on the stack? Xlc
12134 seems to set the bit when not optimizing. */
12135 fprintf (file
, "%d\n", ((float_parms
<< 1) | (! optimize
)));
12137 if (! optional_tbtab
)
12140 /* Optional fields follow. Some are variable length. */
12142 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
12143 11 double float. */
12144 /* There is an entry for each parameter in a register, in the order that
12145 they occur in the parameter list. Any intervening arguments on the
12146 stack are ignored. If the list overflows a long (max possible length
12147 34 bits) then completely leave off all elements that don't fit. */
12148 /* Only emit this long if there was at least one parameter. */
12149 if (fixed_parms
|| float_parms
)
12150 fprintf (file
, "\t.long %d\n", parm_info
);
12152 /* Offset from start of code to tb table. */
12153 fputs ("\t.long ", file
);
12154 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
12156 RS6000_OUTPUT_BASENAME (file
, fname
);
12158 assemble_name (file
, fname
);
12160 fputs ("-.", file
);
12162 RS6000_OUTPUT_BASENAME (file
, fname
);
12164 assemble_name (file
, fname
);
12168 /* Interrupt handler mask. */
12169 /* Omit this long, since we never set the interrupt handler bit
12172 /* Number of CTL (controlled storage) anchors. */
12173 /* Omit this long, since the has_ctl bit is never set above. */
12175 /* Displacement into stack of each CTL anchor. */
12176 /* Omit this list of longs, because there are no CTL anchors. */
12178 /* Length of function name. */
12181 fprintf (file
, "\t.short %d\n", (int) strlen (fname
));
12183 /* Function name. */
12184 assemble_string (fname
, strlen (fname
));
12186 /* Register for alloca automatic storage; this is always reg 31.
12187 Only emit this if the alloca bit was set above. */
12188 if (frame_pointer_needed
)
12189 fputs ("\t.byte 31\n", file
);
12191 fputs ("\t.align 2\n", file
);
12195 /* A C compound statement that outputs the assembler code for a thunk
12196 function, used to implement C++ virtual function calls with
12197 multiple inheritance. The thunk acts as a wrapper around a virtual
12198 function, adjusting the implicit object parameter before handing
12199 control off to the real function.
12201 First, emit code to add the integer DELTA to the location that
12202 contains the incoming first argument. Assume that this argument
12203 contains a pointer, and is the one used to pass the `this' pointer
12204 in C++. This is the incoming argument *before* the function
12205 prologue, e.g. `%o0' on a sparc. The addition must preserve the
12206 values of all other incoming arguments.
12208 After the addition, emit code to jump to FUNCTION, which is a
12209 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
12210 not touch the return address. Hence returning from FUNCTION will
12211 return to whoever called the current `thunk'.
12213 The effect must be as if FUNCTION had been called directly with the
12214 adjusted first argument. This macro is responsible for emitting
12215 all of the code for a thunk function; output_function_prologue()
12216 and output_function_epilogue() are not invoked.
12218 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
12219 been extracted from it.) It might possibly be useful on some
12220 targets, but probably not.
12222 If you do not define this macro, the target-independent code in the
12223 C++ frontend will generate a less efficient heavyweight thunk that
12224 calls FUNCTION instead of jumping to it. The generic approach does
12225 not support varargs. */
12228 rs6000_output_mi_thunk (file
, thunk_fndecl
, delta
, vcall_offset
, function
)
12230 tree thunk_fndecl ATTRIBUTE_UNUSED
;
12231 HOST_WIDE_INT delta
;
12232 HOST_WIDE_INT vcall_offset
;
12235 rtx
this, insn
, funexp
;
12237 reload_completed
= 1;
12238 no_new_pseudos
= 1;
12240 /* Mark the end of the (empty) prologue. */
12241 emit_note (NULL
, NOTE_INSN_PROLOGUE_END
);
12243 /* Find the "this" pointer. If the function returns a structure,
12244 the structure return pointer is in r3. */
12245 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
))))
12246 this = gen_rtx_REG (Pmode
, 4);
12248 this = gen_rtx_REG (Pmode
, 3);
12250 /* Apply the constant offset, if required. */
12253 rtx delta_rtx
= GEN_INT (delta
);
12254 emit_insn (TARGET_32BIT
12255 ? gen_addsi3 (this, this, delta_rtx
)
12256 : gen_adddi3 (this, this, delta_rtx
));
12259 /* Apply the offset from the vtable, if required. */
12262 rtx vcall_offset_rtx
= GEN_INT (vcall_offset
);
12263 rtx tmp
= gen_rtx_REG (Pmode
, 12);
12265 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, this));
12266 emit_insn (TARGET_32BIT
12267 ? gen_addsi3 (tmp
, tmp
, vcall_offset_rtx
)
12268 : gen_adddi3 (tmp
, tmp
, vcall_offset_rtx
));
12269 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, tmp
));
12270 emit_insn (TARGET_32BIT
12271 ? gen_addsi3 (this, this, tmp
)
12272 : gen_adddi3 (this, this, tmp
));
12275 /* Generate a tail call to the target function. */
12276 if (!TREE_USED (function
))
12278 assemble_external (function
);
12279 TREE_USED (function
) = 1;
12281 funexp
= XEXP (DECL_RTL (function
), 0);
12282 SYMBOL_REF_FLAGS (funexp
) &= ~SYMBOL_FLAG_LOCAL
;
12283 funexp
= gen_rtx_MEM (FUNCTION_MODE
, funexp
);
12286 if (MACHOPIC_INDIRECT
)
12287 funexp
= machopic_indirect_call_target (funexp
);
12290 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
12291 generate sibcall RTL explicitly to avoid constraint abort. */
12292 insn
= emit_call_insn (
12293 gen_rtx_PARALLEL (VOIDmode
,
12295 gen_rtx_CALL (VOIDmode
,
12296 funexp
, const0_rtx
),
12297 gen_rtx_USE (VOIDmode
, const0_rtx
),
12298 gen_rtx_USE (VOIDmode
,
12299 gen_rtx_REG (SImode
,
12300 LINK_REGISTER_REGNUM
)),
12301 gen_rtx_RETURN (VOIDmode
))));
12302 SIBLING_CALL_P (insn
) = 1;
12305 /* Run just enough of rest_of_compilation to get the insns emitted.
12306 There's not really enough bulk here to make other passes such as
12307 instruction scheduling worth while. Note that use_thunk calls
12308 assemble_start_function and assemble_end_function. */
12309 insn
= get_insns ();
12310 insn_locators_initialize ();
12311 shorten_branches (insn
);
12312 final_start_function (insn
, file
, 1);
12313 final (insn
, file
, 1, 0);
12314 final_end_function ();
12316 reload_completed
= 0;
12317 no_new_pseudos
= 0;
12320 /* A quick summary of the various types of 'constant-pool tables'
12323 Target Flags Name One table per
12324 AIX (none) AIX TOC object file
12325 AIX -mfull-toc AIX TOC object file
12326 AIX -mminimal-toc AIX minimal TOC translation unit
12327 SVR4/EABI (none) SVR4 SDATA object file
12328 SVR4/EABI -fpic SVR4 pic object file
12329 SVR4/EABI -fPIC SVR4 PIC translation unit
12330 SVR4/EABI -mrelocatable EABI TOC function
12331 SVR4/EABI -maix AIX TOC object file
12332 SVR4/EABI -maix -mminimal-toc
12333 AIX minimal TOC translation unit
12335 Name Reg. Set by entries contains:
12336 made by addrs? fp? sum?
12338 AIX TOC 2 crt0 as Y option option
12339 AIX minimal TOC 30 prolog gcc Y Y option
12340 SVR4 SDATA 13 crt0 gcc N Y N
12341 SVR4 pic 30 prolog ld Y not yet N
12342 SVR4 PIC 30 prolog gcc Y option option
12343 EABI TOC 30 prolog gcc Y option option
12347 /* Hash functions for the hash table. */
12350 rs6000_hash_constant (k
)
12353 enum rtx_code code
= GET_CODE (k
);
12354 enum machine_mode mode
= GET_MODE (k
);
12355 unsigned result
= (code
<< 3) ^ mode
;
12356 const char *format
;
12359 format
= GET_RTX_FORMAT (code
);
12360 flen
= strlen (format
);
12366 return result
* 1231 + (unsigned) INSN_UID (XEXP (k
, 0));
12369 if (mode
!= VOIDmode
)
12370 return real_hash (CONST_DOUBLE_REAL_VALUE (k
)) * result
;
12382 for (; fidx
< flen
; fidx
++)
12383 switch (format
[fidx
])
12388 const char *str
= XSTR (k
, fidx
);
12389 len
= strlen (str
);
12390 result
= result
* 613 + len
;
12391 for (i
= 0; i
< len
; i
++)
12392 result
= result
* 613 + (unsigned) str
[i
];
12397 result
= result
* 1231 + rs6000_hash_constant (XEXP (k
, fidx
));
12401 result
= result
* 613 + (unsigned) XINT (k
, fidx
);
12404 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT
))
12405 result
= result
* 613 + (unsigned) XWINT (k
, fidx
);
12409 for (i
= 0; i
< sizeof(HOST_WIDE_INT
)/sizeof(unsigned); i
++)
12410 result
= result
* 613 + (unsigned) (XWINT (k
, fidx
)
12424 toc_hash_function (hash_entry
)
12425 const void * hash_entry
;
12427 const struct toc_hash_struct
*thc
=
12428 (const struct toc_hash_struct
*) hash_entry
;
12429 return rs6000_hash_constant (thc
->key
) ^ thc
->key_mode
;
12432 /* Compare H1 and H2 for equivalence. */
12435 toc_hash_eq (h1
, h2
)
12439 rtx r1
= ((const struct toc_hash_struct
*) h1
)->key
;
12440 rtx r2
= ((const struct toc_hash_struct
*) h2
)->key
;
12442 if (((const struct toc_hash_struct
*) h1
)->key_mode
12443 != ((const struct toc_hash_struct
*) h2
)->key_mode
)
12446 return rtx_equal_p (r1
, r2
);
12449 /* These are the names given by the C++ front-end to vtables, and
12450 vtable-like objects. Ideally, this logic should not be here;
12451 instead, there should be some programmatic way of inquiring as
12452 to whether or not an object is a vtable. */
12454 #define VTABLE_NAME_P(NAME) \
12455 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
12456 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
12457 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
12458 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
12461 rs6000_output_symbol_ref (file
, x
)
12465 /* Currently C++ toc references to vtables can be emitted before it
12466 is decided whether the vtable is public or private. If this is
12467 the case, then the linker will eventually complain that there is
12468 a reference to an unknown section. Thus, for vtables only,
12469 we emit the TOC reference to reference the symbol and not the
12471 const char *name
= XSTR (x
, 0);
12473 if (VTABLE_NAME_P (name
))
12475 RS6000_OUTPUT_BASENAME (file
, name
);
12478 assemble_name (file
, name
);
12481 /* Output a TOC entry. We derive the entry name from what is being
12485 output_toc (file
, x
, labelno
, mode
)
12489 enum machine_mode mode
;
12492 const char *name
= buf
;
12493 const char *real_name
;
12500 /* When the linker won't eliminate them, don't output duplicate
12501 TOC entries (this happens on AIX if there is any kind of TOC,
12502 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
12504 if (TARGET_TOC
&& GET_CODE (x
) != LABEL_REF
)
12506 struct toc_hash_struct
*h
;
12509 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
12510 time because GGC is not initialised at that point. */
12511 if (toc_hash_table
== NULL
)
12512 toc_hash_table
= htab_create_ggc (1021, toc_hash_function
,
12513 toc_hash_eq
, NULL
);
12515 h
= ggc_alloc (sizeof (*h
));
12517 h
->key_mode
= mode
;
12518 h
->labelno
= labelno
;
12520 found
= htab_find_slot (toc_hash_table
, h
, 1);
12521 if (*found
== NULL
)
12523 else /* This is indeed a duplicate.
12524 Set this label equal to that label. */
12526 fputs ("\t.set ", file
);
12527 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
12528 fprintf (file
, "%d,", labelno
);
12529 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
12530 fprintf (file
, "%d\n", ((*(const struct toc_hash_struct
**)
12536 /* If we're going to put a double constant in the TOC, make sure it's
12537 aligned properly when strict alignment is on. */
12538 if (GET_CODE (x
) == CONST_DOUBLE
12539 && STRICT_ALIGNMENT
12540 && GET_MODE_BITSIZE (mode
) >= 64
12541 && ! (TARGET_NO_FP_IN_TOC
&& ! TARGET_MINIMAL_TOC
)) {
12542 ASM_OUTPUT_ALIGN (file
, 3);
12545 (*targetm
.asm_out
.internal_label
) (file
, "LC", labelno
);
12547 /* Handle FP constants specially. Note that if we have a minimal
12548 TOC, things we put here aren't actually in the TOC, so we can allow
12550 if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == TFmode
)
12552 REAL_VALUE_TYPE rv
;
12555 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
12556 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
12560 if (TARGET_MINIMAL_TOC
)
12561 fputs (DOUBLE_INT_ASM_OP
, file
);
12563 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12564 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
12565 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
12566 fprintf (file
, "0x%lx%08lx,0x%lx%08lx\n",
12567 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
12568 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
12573 if (TARGET_MINIMAL_TOC
)
12574 fputs ("\t.long ", file
);
12576 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12577 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
12578 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
12579 fprintf (file
, "0x%lx,0x%lx,0x%lx,0x%lx\n",
12580 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
12581 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
12585 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
12587 REAL_VALUE_TYPE rv
;
12590 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
12591 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
12595 if (TARGET_MINIMAL_TOC
)
12596 fputs (DOUBLE_INT_ASM_OP
, file
);
12598 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
12599 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
12600 fprintf (file
, "0x%lx%08lx\n",
12601 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
12606 if (TARGET_MINIMAL_TOC
)
12607 fputs ("\t.long ", file
);
12609 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
12610 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
12611 fprintf (file
, "0x%lx,0x%lx\n",
12612 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
12616 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
12618 REAL_VALUE_TYPE rv
;
12621 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
12622 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
12626 if (TARGET_MINIMAL_TOC
)
12627 fputs (DOUBLE_INT_ASM_OP
, file
);
12629 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
12630 fprintf (file
, "0x%lx00000000\n", l
& 0xffffffff);
12635 if (TARGET_MINIMAL_TOC
)
12636 fputs ("\t.long ", file
);
12638 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
12639 fprintf (file
, "0x%lx\n", l
& 0xffffffff);
12643 else if (GET_MODE (x
) == VOIDmode
12644 && (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
))
12646 unsigned HOST_WIDE_INT low
;
12647 HOST_WIDE_INT high
;
12649 if (GET_CODE (x
) == CONST_DOUBLE
)
12651 low
= CONST_DOUBLE_LOW (x
);
12652 high
= CONST_DOUBLE_HIGH (x
);
12655 #if HOST_BITS_PER_WIDE_INT == 32
12658 high
= (low
& 0x80000000) ? ~0 : 0;
12662 low
= INTVAL (x
) & 0xffffffff;
12663 high
= (HOST_WIDE_INT
) INTVAL (x
) >> 32;
12667 /* TOC entries are always Pmode-sized, but since this
12668 is a bigendian machine then if we're putting smaller
12669 integer constants in the TOC we have to pad them.
12670 (This is still a win over putting the constants in
12671 a separate constant pool, because then we'd have
12672 to have both a TOC entry _and_ the actual constant.)
12674 For a 32-bit target, CONST_INT values are loaded and shifted
12675 entirely within `low' and can be stored in one TOC entry. */
12677 if (TARGET_64BIT
&& POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
12678 abort ();/* It would be easy to make this work, but it doesn't now. */
12680 if (POINTER_SIZE
> GET_MODE_BITSIZE (mode
))
12682 #if HOST_BITS_PER_WIDE_INT == 32
12683 lshift_double (low
, high
, POINTER_SIZE
- GET_MODE_BITSIZE (mode
),
12684 POINTER_SIZE
, &low
, &high
, 0);
12687 low
<<= POINTER_SIZE
- GET_MODE_BITSIZE (mode
);
12688 high
= (HOST_WIDE_INT
) low
>> 32;
12695 if (TARGET_MINIMAL_TOC
)
12696 fputs (DOUBLE_INT_ASM_OP
, file
);
12698 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
12699 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
12700 fprintf (file
, "0x%lx%08lx\n",
12701 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
12706 if (POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
12708 if (TARGET_MINIMAL_TOC
)
12709 fputs ("\t.long ", file
);
12711 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
12712 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
12713 fprintf (file
, "0x%lx,0x%lx\n",
12714 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
12718 if (TARGET_MINIMAL_TOC
)
12719 fputs ("\t.long ", file
);
12721 fprintf (file
, "\t.tc IS_%lx[TC],", (long) low
& 0xffffffff);
12722 fprintf (file
, "0x%lx\n", (long) low
& 0xffffffff);
12728 if (GET_CODE (x
) == CONST
)
12730 if (GET_CODE (XEXP (x
, 0)) != PLUS
)
12733 base
= XEXP (XEXP (x
, 0), 0);
12734 offset
= INTVAL (XEXP (XEXP (x
, 0), 1));
12737 if (GET_CODE (base
) == SYMBOL_REF
)
12738 name
= XSTR (base
, 0);
12739 else if (GET_CODE (base
) == LABEL_REF
)
12740 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (XEXP (base
, 0)));
12741 else if (GET_CODE (base
) == CODE_LABEL
)
12742 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (base
));
12746 real_name
= (*targetm
.strip_name_encoding
) (name
);
12747 if (TARGET_MINIMAL_TOC
)
12748 fputs (TARGET_32BIT
? "\t.long " : DOUBLE_INT_ASM_OP
, file
);
12751 fprintf (file
, "\t.tc %s", real_name
);
12754 fprintf (file
, ".N%d", - offset
);
12756 fprintf (file
, ".P%d", offset
);
12758 fputs ("[TC],", file
);
12761 /* Currently C++ toc references to vtables can be emitted before it
12762 is decided whether the vtable is public or private. If this is
12763 the case, then the linker will eventually complain that there is
12764 a TOC reference to an unknown section. Thus, for vtables only,
12765 we emit the TOC reference to reference the symbol and not the
12767 if (VTABLE_NAME_P (name
))
12769 RS6000_OUTPUT_BASENAME (file
, name
);
12771 fprintf (file
, "%d", offset
);
12772 else if (offset
> 0)
12773 fprintf (file
, "+%d", offset
);
12776 output_addr_const (file
, x
);
12780 /* Output an assembler pseudo-op to write an ASCII string of N characters
12781 starting at P to FILE.
12783 On the RS/6000, we have to do this using the .byte operation and
12784 write out special characters outside the quoted string.
12785 Also, the assembler is broken; very long strings are truncated,
12786 so we must artificially break them up early. */
12789 output_ascii (file
, p
, n
)
12795 int i
, count_string
;
12796 const char *for_string
= "\t.byte \"";
12797 const char *for_decimal
= "\t.byte ";
12798 const char *to_close
= NULL
;
12801 for (i
= 0; i
< n
; i
++)
12804 if (c
>= ' ' && c
< 0177)
12807 fputs (for_string
, file
);
12810 /* Write two quotes to get one. */
12818 for_decimal
= "\"\n\t.byte ";
12822 if (count_string
>= 512)
12824 fputs (to_close
, file
);
12826 for_string
= "\t.byte \"";
12827 for_decimal
= "\t.byte ";
12835 fputs (for_decimal
, file
);
12836 fprintf (file
, "%d", c
);
12838 for_string
= "\n\t.byte \"";
12839 for_decimal
= ", ";
12845 /* Now close the string if we have written one. Then end the line. */
12847 fputs (to_close
, file
);
12850 /* Generate a unique section name for FILENAME for a section type
12851 represented by SECTION_DESC. Output goes into BUF.
12853 SECTION_DESC can be any string, as long as it is different for each
12854 possible section type.
12856 We name the section in the same manner as xlc. The name begins with an
12857 underscore followed by the filename (after stripping any leading directory
12858 names) with the last period replaced by the string SECTION_DESC. If
12859 FILENAME does not contain a period, SECTION_DESC is appended to the end of
12863 rs6000_gen_section_name (buf
, filename
, section_desc
)
12865 const char *filename
;
12866 const char *section_desc
;
12868 const char *q
, *after_last_slash
, *last_period
= 0;
12872 after_last_slash
= filename
;
12873 for (q
= filename
; *q
; q
++)
12876 after_last_slash
= q
+ 1;
12877 else if (*q
== '.')
12881 len
= strlen (after_last_slash
) + strlen (section_desc
) + 2;
12882 *buf
= (char *) xmalloc (len
);
12887 for (q
= after_last_slash
; *q
; q
++)
12889 if (q
== last_period
)
12891 strcpy (p
, section_desc
);
12892 p
+= strlen (section_desc
);
12896 else if (ISALNUM (*q
))
12900 if (last_period
== 0)
12901 strcpy (p
, section_desc
);
12906 /* Emit profile function. */
12909 output_profile_hook (labelno
)
12910 int labelno ATTRIBUTE_UNUSED
;
12912 if (TARGET_PROFILE_KERNEL
)
12915 if (DEFAULT_ABI
== ABI_AIX
)
12917 #ifndef NO_PROFILE_COUNTERS
12918 # define NO_PROFILE_COUNTERS 0
12920 if (NO_PROFILE_COUNTERS
)
12921 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 0);
12925 const char *label_name
;
12928 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
12929 label_name
= (*targetm
.strip_name_encoding
) (ggc_strdup (buf
));
12930 fun
= gen_rtx_SYMBOL_REF (Pmode
, label_name
);
12932 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 1,
12936 else if (DEFAULT_ABI
== ABI_DARWIN
)
12938 const char *mcount_name
= RS6000_MCOUNT
;
12939 int caller_addr_regno
= LINK_REGISTER_REGNUM
;
12941 /* Be conservative and always set this, at least for now. */
12942 current_function_uses_pic_offset_table
= 1;
12945 /* For PIC code, set up a stub and collect the caller's address
12946 from r0, which is where the prologue puts it. */
12947 if (MACHOPIC_INDIRECT
)
12949 mcount_name
= machopic_stub_name (mcount_name
);
12950 if (current_function_uses_pic_offset_table
)
12951 caller_addr_regno
= 0;
12954 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, mcount_name
),
12956 gen_rtx_REG (Pmode
, caller_addr_regno
), Pmode
);
12960 /* Write function profiler code. */
12963 output_function_profiler (file
, labelno
)
12970 switch (DEFAULT_ABI
)
12979 warning ("no profiling of 64-bit code for this ABI");
12982 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
12983 fprintf (file
, "\tmflr %s\n", reg_names
[0]);
12986 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file
);
12987 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12988 reg_names
[0], save_lr
, reg_names
[1]);
12989 asm_fprintf (file
, "\tmflr %s\n", reg_names
[12]);
12990 asm_fprintf (file
, "\t{l|lwz} %s,", reg_names
[0]);
12991 assemble_name (file
, buf
);
12992 asm_fprintf (file
, "@got(%s)\n", reg_names
[12]);
12994 else if (flag_pic
> 1)
12996 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12997 reg_names
[0], save_lr
, reg_names
[1]);
12998 /* Now, we need to get the address of the label. */
12999 fputs ("\tbl 1f\n\t.long ", file
);
13000 assemble_name (file
, buf
);
13001 fputs ("-.\n1:", file
);
13002 asm_fprintf (file
, "\tmflr %s\n", reg_names
[11]);
13003 asm_fprintf (file
, "\t{l|lwz} %s,0(%s)\n",
13004 reg_names
[0], reg_names
[11]);
13005 asm_fprintf (file
, "\t{cax|add} %s,%s,%s\n",
13006 reg_names
[0], reg_names
[0], reg_names
[11]);
13010 asm_fprintf (file
, "\t{liu|lis} %s,", reg_names
[12]);
13011 assemble_name (file
, buf
);
13012 fputs ("@ha\n", file
);
13013 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
13014 reg_names
[0], save_lr
, reg_names
[1]);
13015 asm_fprintf (file
, "\t{cal|la} %s,", reg_names
[0]);
13016 assemble_name (file
, buf
);
13017 asm_fprintf (file
, "@l(%s)\n", reg_names
[12]);
13020 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
13021 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
13026 if (!TARGET_PROFILE_KERNEL
)
13028 /* Don't do anything, done in output_profile_hook (). */
13035 asm_fprintf (file
, "\tmflr %s\n", reg_names
[0]);
13036 asm_fprintf (file
, "\tstd %s,16(%s)\n", reg_names
[0], reg_names
[1]);
13038 if (current_function_needs_context
)
13040 asm_fprintf (file
, "\tstd %s,24(%s)\n",
13041 reg_names
[STATIC_CHAIN_REGNUM
], reg_names
[1]);
13042 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
13043 asm_fprintf (file
, "\tld %s,24(%s)\n",
13044 reg_names
[STATIC_CHAIN_REGNUM
], reg_names
[1]);
13047 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
13055 rs6000_use_dfa_pipeline_interface ()
13060 /* Power4 load update and store update instructions are cracked into a
13061 load or store and an integer insn which are executed in the same cycle.
13062 Branches have their own dispatch slot which does not count against the
13063 GCC issue rate, but it changes the program flow so there are no other
13064 instructions to issue in this cycle. */
13067 rs6000_variable_issue (stream
, verbose
, insn
, more
)
13068 FILE *stream ATTRIBUTE_UNUSED
;
13069 int verbose ATTRIBUTE_UNUSED
;
13073 if (GET_CODE (PATTERN (insn
)) == USE
13074 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
13077 if (rs6000_cpu
== PROCESSOR_POWER4
)
13079 enum attr_type type
= get_attr_type (insn
);
13080 if (type
== TYPE_LOAD_EXT_U
|| type
== TYPE_LOAD_EXT_UX
13081 || type
== TYPE_LOAD_UX
|| type
== TYPE_STORE_UX
)
13083 else if (type
== TYPE_LOAD_U
|| type
== TYPE_STORE_U
13084 || type
== TYPE_FPLOAD_U
|| type
== TYPE_FPSTORE_U
13085 || type
== TYPE_FPLOAD_UX
|| type
== TYPE_FPSTORE_UX
13086 || type
== TYPE_LOAD_EXT
|| type
== TYPE_DELAYED_CR
13087 || type
== TYPE_COMPARE
|| type
== TYPE_DELAYED_COMPARE
13088 || type
== TYPE_IMUL_COMPARE
|| type
== TYPE_LMUL_COMPARE
13089 || type
== TYPE_IDIV
|| type
== TYPE_LDIV
13090 || type
== TYPE_INSERT_WORD
)
13091 return more
> 2 ? more
- 2 : 0;
13097 /* Adjust the cost of a scheduling dependency. Return the new cost of
13098 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
13101 rs6000_adjust_cost (insn
, link
, dep_insn
, cost
)
13104 rtx dep_insn ATTRIBUTE_UNUSED
;
13107 if (! recog_memoized (insn
))
13110 if (REG_NOTE_KIND (link
) != 0)
13113 if (REG_NOTE_KIND (link
) == 0)
13115 /* Data dependency; DEP_INSN writes a register that INSN reads
13116 some cycles later. */
13117 switch (get_attr_type (insn
))
13120 /* Tell the first scheduling pass about the latency between
13121 a mtctr and bctr (and mtlr and br/blr). The first
13122 scheduling pass will not know about this latency since
13123 the mtctr instruction, which has the latency associated
13124 to it, will be generated by reload. */
13125 return TARGET_POWER
? 5 : 4;
13127 /* Leave some extra cycles between a compare and its
13128 dependent branch, to inhibit expensive mispredicts. */
13129 if ((rs6000_cpu_attr
== CPU_PPC603
13130 || rs6000_cpu_attr
== CPU_PPC604
13131 || rs6000_cpu_attr
== CPU_PPC604E
13132 || rs6000_cpu_attr
== CPU_PPC620
13133 || rs6000_cpu_attr
== CPU_PPC630
13134 || rs6000_cpu_attr
== CPU_PPC750
13135 || rs6000_cpu_attr
== CPU_PPC7400
13136 || rs6000_cpu_attr
== CPU_PPC7450
13137 || rs6000_cpu_attr
== CPU_POWER4
)
13138 && recog_memoized (dep_insn
)
13139 && (INSN_CODE (dep_insn
) >= 0)
13140 && (get_attr_type (dep_insn
) == TYPE_CMP
13141 || get_attr_type (dep_insn
) == TYPE_COMPARE
13142 || get_attr_type (dep_insn
) == TYPE_DELAYED_COMPARE
13143 || get_attr_type (dep_insn
) == TYPE_IMUL_COMPARE
13144 || get_attr_type (dep_insn
) == TYPE_LMUL_COMPARE
13145 || get_attr_type (dep_insn
) == TYPE_FPCOMPARE
13146 || get_attr_type (dep_insn
) == TYPE_CR_LOGICAL
13147 || get_attr_type (dep_insn
) == TYPE_DELAYED_CR
))
13152 /* Fall out to return default cost. */
13158 /* A C statement (sans semicolon) to update the integer scheduling
13159 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
13160 INSN earlier, increase the priority to execute INSN later. Do not
13161 define this macro if you do not need to adjust the scheduling
13162 priorities of insns. */
13165 rs6000_adjust_priority (insn
, priority
)
13166 rtx insn ATTRIBUTE_UNUSED
;
13169 /* On machines (like the 750) which have asymmetric integer units,
13170 where one integer unit can do multiply and divides and the other
13171 can't, reduce the priority of multiply/divide so it is scheduled
13172 before other integer operations. */
13175 if (! INSN_P (insn
))
13178 if (GET_CODE (PATTERN (insn
)) == USE
)
13181 switch (rs6000_cpu_attr
) {
13183 switch (get_attr_type (insn
))
13190 fprintf (stderr
, "priority was %#x (%d) before adjustment\n",
13191 priority
, priority
);
13192 if (priority
>= 0 && priority
< 0x01000000)
13202 /* Return how many instructions the machine can issue per cycle. */
13205 rs6000_issue_rate ()
13207 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
13208 if (!reload_completed
)
13211 switch (rs6000_cpu_attr
) {
13212 case CPU_RIOS1
: /* ? */
13214 case CPU_PPC601
: /* ? */
13235 /* Return how many instructions to look ahead for better insn
13239 rs6000_use_sched_lookahead ()
13241 if (rs6000_cpu_attr
== CPU_PPC8540
)
13247 /* Length in units of the trampoline for entering a nested function. */
13250 rs6000_trampoline_size ()
13254 switch (DEFAULT_ABI
)
13260 ret
= (TARGET_32BIT
) ? 12 : 24;
13265 ret
= (TARGET_32BIT
) ? 40 : 48;
13272 /* Emit RTL insns to initialize the variable parts of a trampoline.
13273 FNADDR is an RTX for the address of the function's pure code.
13274 CXT is an RTX for the static chain value for the function. */
13277 rs6000_initialize_trampoline (addr
, fnaddr
, cxt
)
13282 enum machine_mode pmode
= Pmode
;
13283 int regsize
= (TARGET_32BIT
) ? 4 : 8;
13284 rtx ctx_reg
= force_reg (pmode
, cxt
);
13286 switch (DEFAULT_ABI
)
13291 /* Macros to shorten the code expansions below. */
13292 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
13293 #define MEM_PLUS(addr,offset) \
13294 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
13296 /* Under AIX, just build the 3 word function descriptor */
13299 rtx fn_reg
= gen_reg_rtx (pmode
);
13300 rtx toc_reg
= gen_reg_rtx (pmode
);
13301 emit_move_insn (fn_reg
, MEM_DEREF (fnaddr
));
13302 emit_move_insn (toc_reg
, MEM_PLUS (fnaddr
, regsize
));
13303 emit_move_insn (MEM_DEREF (addr
), fn_reg
);
13304 emit_move_insn (MEM_PLUS (addr
, regsize
), toc_reg
);
13305 emit_move_insn (MEM_PLUS (addr
, 2*regsize
), ctx_reg
);
13309 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
13312 emit_library_call (gen_rtx_SYMBOL_REF (SImode
, "__trampoline_setup"),
13313 FALSE
, VOIDmode
, 4,
13315 GEN_INT (rs6000_trampoline_size ()), SImode
,
13325 /* Table of valid machine attributes. */
13327 const struct attribute_spec rs6000_attribute_table
[] =
13329 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
13330 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
13331 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
13332 { NULL
, 0, 0, false, false, false, NULL
}
13335 /* Handle a "longcall" or "shortcall" attribute; arguments as in
13336 struct attribute_spec.handler. */
13339 rs6000_handle_longcall_attribute (node
, name
, args
, flags
, no_add_attrs
)
13342 tree args ATTRIBUTE_UNUSED
;
13343 int flags ATTRIBUTE_UNUSED
;
13344 bool *no_add_attrs
;
13346 if (TREE_CODE (*node
) != FUNCTION_TYPE
13347 && TREE_CODE (*node
) != FIELD_DECL
13348 && TREE_CODE (*node
) != TYPE_DECL
)
13350 warning ("`%s' attribute only applies to functions",
13351 IDENTIFIER_POINTER (name
));
13352 *no_add_attrs
= true;
13358 /* Set longcall attributes on all functions declared when
13359 rs6000_default_long_calls is true. */
13361 rs6000_set_default_type_attributes (type
)
13364 if (rs6000_default_long_calls
13365 && (TREE_CODE (type
) == FUNCTION_TYPE
13366 || TREE_CODE (type
) == METHOD_TYPE
))
13367 TYPE_ATTRIBUTES (type
) = tree_cons (get_identifier ("longcall"),
13369 TYPE_ATTRIBUTES (type
));
13372 /* Return a reference suitable for calling a function with the
13373 longcall attribute. */
13376 rs6000_longcall_ref (call_ref
)
13379 const char *call_name
;
13382 if (GET_CODE (call_ref
) != SYMBOL_REF
)
13385 /* System V adds '.' to the internal name, so skip them. */
13386 call_name
= XSTR (call_ref
, 0);
13387 if (*call_name
== '.')
13389 while (*call_name
== '.')
13392 node
= get_identifier (call_name
);
13393 call_ref
= gen_rtx_SYMBOL_REF (VOIDmode
, IDENTIFIER_POINTER (node
));
13396 return force_reg (Pmode
, call_ref
);
13399 #ifdef USING_ELFOS_H
13401 /* A C statement or statements to switch to the appropriate section
13402 for output of RTX in mode MODE. You can assume that RTX is some
13403 kind of constant in RTL. The argument MODE is redundant except in
13404 the case of a `const_int' rtx. Select the section by calling
13405 `text_section' or one of the alternatives for other sections.
13407 Do not define this macro if you put all constants in the read-only
13411 rs6000_elf_select_rtx_section (mode
, x
, align
)
13412 enum machine_mode mode
;
13414 unsigned HOST_WIDE_INT align
;
13416 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
13419 default_elf_select_rtx_section (mode
, x
, align
);
13422 /* A C statement or statements to switch to the appropriate
13423 section for output of DECL. DECL is either a `VAR_DECL' node
13424 or a constant of some sort. RELOC indicates whether forming
13425 the initial value of DECL requires link-time relocations. */
13428 rs6000_elf_select_section (decl
, reloc
, align
)
13431 unsigned HOST_WIDE_INT align
;
13433 /* Pretend that we're always building for a shared library when
13434 ABI_AIX, because otherwise we end up with dynamic relocations
13435 in read-only sections. This happens for function pointers,
13436 references to vtables in typeinfo, and probably other cases. */
13437 default_elf_select_section_1 (decl
, reloc
, align
,
13438 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
13441 /* A C statement to build up a unique section name, expressed as a
13442 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
13443 RELOC indicates whether the initial value of EXP requires
13444 link-time relocations. If you do not define this macro, GCC will use
13445 the symbol name prefixed by `.' as the section name. Note - this
13446 macro can now be called for uninitialized data items as well as
13447 initialized data and functions. */
13450 rs6000_elf_unique_section (decl
, reloc
)
13454 /* As above, pretend that we're always building for a shared library
13455 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
13456 default_unique_section_1 (decl
, reloc
,
13457 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
13460 /* For a SYMBOL_REF, set generic flags and then perform some
13461 target-specific processing.
13463 When the AIX ABI is requested on a non-AIX system, replace the
13464 function name with the real name (with a leading .) rather than the
13465 function descriptor name. This saves a lot of overriding code to
13466 read the prefixes. */
13469 rs6000_elf_encode_section_info (decl
, rtl
, first
)
13474 default_encode_section_info (decl
, rtl
, first
);
13477 && TREE_CODE (decl
) == FUNCTION_DECL
13479 && DEFAULT_ABI
== ABI_AIX
)
13481 rtx sym_ref
= XEXP (rtl
, 0);
13482 size_t len
= strlen (XSTR (sym_ref
, 0));
13483 char *str
= alloca (len
+ 2);
13485 memcpy (str
+ 1, XSTR (sym_ref
, 0), len
+ 1);
13486 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len
+ 1);
13491 rs6000_elf_in_small_data_p (decl
)
13494 if (rs6000_sdata
== SDATA_NONE
)
13497 if (TREE_CODE (decl
) == VAR_DECL
&& DECL_SECTION_NAME (decl
))
13499 const char *section
= TREE_STRING_POINTER (DECL_SECTION_NAME (decl
));
13500 if (strcmp (section
, ".sdata") == 0
13501 || strcmp (section
, ".sdata2") == 0
13502 || strcmp (section
, ".sbss") == 0
13503 || strcmp (section
, ".sbss2") == 0
13504 || strcmp (section
, ".PPC.EMB.sdata0") == 0
13505 || strcmp (section
, ".PPC.EMB.sbss0") == 0)
13510 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
13513 && (unsigned HOST_WIDE_INT
) size
<= g_switch_value
13514 /* If it's not public, and we're not going to reference it there,
13515 there's no need to put it in the small data section. */
13516 && (rs6000_sdata
!= SDATA_DATA
|| TREE_PUBLIC (decl
)))
13523 #endif /* USING_ELFOS_H */
13526 /* Return a REG that occurs in ADDR with coefficient 1.
13527 ADDR can be effectively incremented by incrementing REG.
13529 r0 is special and we must not select it as an address
13530 register by this routine since our caller will try to
13531 increment the returned register via an "la" instruction. */
13534 find_addr_reg (addr
)
13537 while (GET_CODE (addr
) == PLUS
)
13539 if (GET_CODE (XEXP (addr
, 0)) == REG
13540 && REGNO (XEXP (addr
, 0)) != 0)
13541 addr
= XEXP (addr
, 0);
13542 else if (GET_CODE (XEXP (addr
, 1)) == REG
13543 && REGNO (XEXP (addr
, 1)) != 0)
13544 addr
= XEXP (addr
, 1);
13545 else if (CONSTANT_P (XEXP (addr
, 0)))
13546 addr
= XEXP (addr
, 1);
13547 else if (CONSTANT_P (XEXP (addr
, 1)))
13548 addr
= XEXP (addr
, 0);
13552 if (GET_CODE (addr
) == REG
&& REGNO (addr
) != 0)
13558 rs6000_fatal_bad_address (op
)
13561 fatal_insn ("bad address", op
);
13567 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
13568 reference and a constant. */
13571 symbolic_operand (op
)
13574 switch (GET_CODE (op
))
13581 return (GET_CODE (op
) == SYMBOL_REF
||
13582 (GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
13583 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
13584 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
13591 #ifdef RS6000_LONG_BRANCH
13593 static tree stub_list
= 0;
13595 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
13596 procedure calls to the linked list. */
13599 add_compiler_stub (label_name
, function_name
, line_number
)
13601 tree function_name
;
13604 tree stub
= build_tree_list (function_name
, label_name
);
13605 TREE_TYPE (stub
) = build_int_2 (line_number
, 0);
13606 TREE_CHAIN (stub
) = stub_list
;
13610 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
13611 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
13612 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
13614 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
13615 handling procedure calls from the linked list and initializes the
13619 output_compiler_stub ()
13622 char label_buf
[256];
13626 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
13628 fprintf (asm_out_file
,
13629 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub
)));
13631 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13632 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
13633 fprintf (asm_out_file
, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub
));
13634 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13636 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
))[0] == '*')
13638 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
))+1);
13641 label_buf
[0] = '_';
13642 strcpy (label_buf
+1,
13643 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
)));
13646 strcpy (tmp_buf
, "lis r12,hi16(");
13647 strcat (tmp_buf
, label_buf
);
13648 strcat (tmp_buf
, ")\n\tori r12,r12,lo16(");
13649 strcat (tmp_buf
, label_buf
);
13650 strcat (tmp_buf
, ")\n\tmtctr r12\n\tbctr");
13651 output_asm_insn (tmp_buf
, 0);
13653 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13654 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
13655 fprintf(asm_out_file
, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub
));
13656 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13662 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
13663 already there or not. */
13666 no_previous_def (function_name
)
13667 tree function_name
;
13670 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
13671 if (function_name
== STUB_FUNCTION_NAME (stub
))
13676 /* GET_PREV_LABEL gets the label name from the previous definition of
13680 get_prev_label (function_name
)
13681 tree function_name
;
13684 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
13685 if (function_name
== STUB_FUNCTION_NAME (stub
))
13686 return STUB_LABEL_NAME (stub
);
13690 /* INSN is either a function call or a millicode call. It may have an
13691 unconditional jump in its delay slot.
13693 CALL_DEST is the routine we are calling. */
13696 output_call (insn
, call_dest
, operand_number
)
13699 int operand_number
;
13701 static char buf
[256];
13702 if (GET_CODE (call_dest
) == SYMBOL_REF
&& TARGET_LONG_BRANCH
&& !flag_pic
)
13705 tree funname
= get_identifier (XSTR (call_dest
, 0));
13707 if (no_previous_def (funname
))
13709 int line_number
= 0;
13710 rtx label_rtx
= gen_label_rtx ();
13711 char *label_buf
, temp_buf
[256];
13712 ASM_GENERATE_INTERNAL_LABEL (temp_buf
, "L",
13713 CODE_LABEL_NUMBER (label_rtx
));
13714 label_buf
= temp_buf
[0] == '*' ? temp_buf
+ 1 : temp_buf
;
13715 labelname
= get_identifier (label_buf
);
13716 for (; insn
&& GET_CODE (insn
) != NOTE
; insn
= PREV_INSN (insn
));
13718 line_number
= NOTE_LINE_NUMBER (insn
);
13719 add_compiler_stub (labelname
, funname
, line_number
);
13722 labelname
= get_prev_label (funname
);
13724 sprintf (buf
, "jbsr %%z%d,%.246s",
13725 operand_number
, IDENTIFIER_POINTER (labelname
));
13730 sprintf (buf
, "bl %%z%d", operand_number
);
13735 #endif /* RS6000_LONG_BRANCH */
13737 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
13739 const char *const symbol_ = (SYMBOL); \
13740 char *buffer_ = (BUF); \
13741 if (symbol_[0] == '"') \
13743 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
13745 else if (name_needs_quotes(symbol_)) \
13747 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
13751 sprintf(buffer_, "L%d$%s", (N), symbol_); \
13756 /* Generate PIC and indirect symbol stubs. */
13759 machopic_output_stub (file
, symb
, stub
)
13761 const char *symb
, *stub
;
13763 unsigned int length
;
13764 char *symbol_name
, *lazy_ptr_name
;
13765 char *local_label_0
;
13766 static int label
= 0;
13768 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
13769 symb
= (*targetm
.strip_name_encoding
) (symb
);
13773 length
= strlen (symb
);
13774 symbol_name
= alloca (length
+ 32);
13775 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name
, symb
, length
);
13777 lazy_ptr_name
= alloca (length
+ 32);
13778 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name
, symb
, length
);
13780 local_label_0
= alloca (length
+ 32);
13781 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0
, symb
, length
, 0);
13784 machopic_picsymbol_stub1_section ();
13786 machopic_symbol_stub1_section ();
13787 fprintf (file
, "\t.align 2\n");
13789 fprintf (file
, "%s:\n", stub
);
13790 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
13794 fprintf (file
, "\tmflr r0\n");
13795 fprintf (file
, "\tbcl 20,31,%s\n", local_label_0
);
13796 fprintf (file
, "%s:\n\tmflr r11\n", local_label_0
);
13797 fprintf (file
, "\taddis r11,r11,ha16(%s-%s)\n",
13798 lazy_ptr_name
, local_label_0
);
13799 fprintf (file
, "\tmtlr r0\n");
13800 fprintf (file
, "\tlwzu r12,lo16(%s-%s)(r11)\n",
13801 lazy_ptr_name
, local_label_0
);
13802 fprintf (file
, "\tmtctr r12\n");
13803 fprintf (file
, "\tbctr\n");
13807 fprintf (file
, "\tlis r11,ha16(%s)\n", lazy_ptr_name
);
13808 fprintf (file
, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name
);
13809 fprintf (file
, "\tmtctr r12\n");
13810 fprintf (file
, "\tbctr\n");
13813 machopic_lazy_symbol_ptr_section ();
13814 fprintf (file
, "%s:\n", lazy_ptr_name
);
13815 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
13816 fprintf (file
, "\t.long dyld_stub_binding_helper\n");
13819 /* Legitimize PIC addresses. If the address is already
13820 position-independent, we return ORIG. Newly generated
13821 position-independent addresses go into a reg. This is REG if non
13822 zero, otherwise we allocate register(s) as necessary. */
13824 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13827 rs6000_machopic_legitimize_pic_address (orig
, mode
, reg
)
13829 enum machine_mode mode
;
13834 if (reg
== NULL
&& ! reload_in_progress
&& ! reload_completed
)
13835 reg
= gen_reg_rtx (Pmode
);
13837 if (GET_CODE (orig
) == CONST
)
13839 if (GET_CODE (XEXP (orig
, 0)) == PLUS
13840 && XEXP (XEXP (orig
, 0), 0) == pic_offset_table_rtx
)
13843 if (GET_CODE (XEXP (orig
, 0)) == PLUS
)
13846 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0),
13849 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1),
13855 if (GET_CODE (offset
) == CONST_INT
)
13857 if (SMALL_INT (offset
))
13858 return plus_constant (base
, INTVAL (offset
));
13859 else if (! reload_in_progress
&& ! reload_completed
)
13860 offset
= force_reg (Pmode
, offset
);
13863 rtx mem
= force_const_mem (Pmode
, orig
);
13864 return machopic_legitimize_pic_address (mem
, Pmode
, reg
);
13867 return gen_rtx (PLUS
, Pmode
, base
, offset
);
13870 /* Fall back on generic machopic code. */
13871 return machopic_legitimize_pic_address (orig
, mode
, reg
);
13874 /* This is just a placeholder to make linking work without having to
13875 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13876 ever needed for Darwin (not too likely!) this would have to get a
13877 real definition. */
13884 #endif /* TARGET_MACHO */
13887 static unsigned int
13888 rs6000_elf_section_type_flags (decl
, name
, reloc
)
13894 = default_section_type_flags_1 (decl
, name
, reloc
,
13895 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
13897 if (TARGET_RELOCATABLE
)
13898 flags
|= SECTION_WRITE
;
13903 /* Record an element in the table of global constructors. SYMBOL is
13904 a SYMBOL_REF of the function to be called; PRIORITY is a number
13905 between 0 and MAX_INIT_PRIORITY.
13907 This differs from default_named_section_asm_out_constructor in
13908 that we have special handling for -mrelocatable. */
13911 rs6000_elf_asm_out_constructor (symbol
, priority
)
13915 const char *section
= ".ctors";
13918 if (priority
!= DEFAULT_INIT_PRIORITY
)
13920 sprintf (buf
, ".ctors.%.5u",
13921 /* Invert the numbering so the linker puts us in the proper
13922 order; constructors are run from right to left, and the
13923 linker sorts in increasing order. */
13924 MAX_INIT_PRIORITY
- priority
);
13928 named_section_flags (section
, SECTION_WRITE
);
13929 assemble_align (POINTER_SIZE
);
13931 if (TARGET_RELOCATABLE
)
13933 fputs ("\t.long (", asm_out_file
);
13934 output_addr_const (asm_out_file
, symbol
);
13935 fputs (")@fixup\n", asm_out_file
);
13938 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
13942 rs6000_elf_asm_out_destructor (symbol
, priority
)
13946 const char *section
= ".dtors";
13949 if (priority
!= DEFAULT_INIT_PRIORITY
)
13951 sprintf (buf
, ".dtors.%.5u",
13952 /* Invert the numbering so the linker puts us in the proper
13953 order; constructors are run from right to left, and the
13954 linker sorts in increasing order. */
13955 MAX_INIT_PRIORITY
- priority
);
13959 named_section_flags (section
, SECTION_WRITE
);
13960 assemble_align (POINTER_SIZE
);
13962 if (TARGET_RELOCATABLE
)
13964 fputs ("\t.long (", asm_out_file
);
13965 output_addr_const (asm_out_file
, symbol
);
13966 fputs (")@fixup\n", asm_out_file
);
13969 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
13973 rs6000_elf_declare_function_name (file
, name
, decl
)
13980 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file
);
13981 ASM_OUTPUT_LABEL (file
, name
);
13982 fputs (DOUBLE_INT_ASM_OP
, file
);
13984 assemble_name (file
, name
);
13985 fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file
);
13986 assemble_name (file
, name
);
13987 fputs (",24\n\t.type\t.", file
);
13988 assemble_name (file
, name
);
13989 fputs (",@function\n", file
);
13990 if (TREE_PUBLIC (decl
) && ! DECL_WEAK (decl
))
13992 fputs ("\t.globl\t.", file
);
13993 assemble_name (file
, name
);
13996 ASM_DECLARE_RESULT (file
, DECL_RESULT (decl
));
13998 ASM_OUTPUT_LABEL (file
, name
);
14002 if (TARGET_RELOCATABLE
14003 && (get_pool_size () != 0 || current_function_profile
)
14008 (*targetm
.asm_out
.internal_label
) (file
, "LCL", rs6000_pic_labelno
);
14010 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCTOC", 1);
14011 fprintf (file
, "\t.long ");
14012 assemble_name (file
, buf
);
14014 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
14015 assemble_name (file
, buf
);
14019 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
14020 ASM_DECLARE_RESULT (file
, DECL_RESULT (decl
));
14022 if (DEFAULT_ABI
== ABI_AIX
)
14024 const char *desc_name
, *orig_name
;
14026 orig_name
= (*targetm
.strip_name_encoding
) (name
);
14027 desc_name
= orig_name
;
14028 while (*desc_name
== '.')
14031 if (TREE_PUBLIC (decl
))
14032 fprintf (file
, "\t.globl %s\n", desc_name
);
14034 fprintf (file
, "%s\n", MINIMAL_TOC_SECTION_ASM_OP
);
14035 fprintf (file
, "%s:\n", desc_name
);
14036 fprintf (file
, "\t.long %s\n", orig_name
);
14037 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file
);
14038 if (DEFAULT_ABI
== ABI_AIX
)
14039 fputs ("\t.long 0\n", file
);
14040 fprintf (file
, "\t.previous\n");
14042 ASM_OUTPUT_LABEL (file
, name
);
14048 rs6000_xcoff_asm_globalize_label (stream
, name
)
14052 fputs (GLOBAL_ASM_OP
, stream
);
14053 RS6000_OUTPUT_BASENAME (stream
, name
);
14054 putc ('\n', stream
);
14058 rs6000_xcoff_asm_named_section (name
, flags
)
14060 unsigned int flags
;
14063 static const char * const suffix
[3] = { "PR", "RO", "RW" };
14065 if (flags
& SECTION_CODE
)
14067 else if (flags
& SECTION_WRITE
)
14072 fprintf (asm_out_file
, "\t.csect %s%s[%s],%u\n",
14073 (flags
& SECTION_CODE
) ? "." : "",
14074 name
, suffix
[smclass
], flags
& SECTION_ENTSIZE
);
14078 rs6000_xcoff_select_section (decl
, reloc
, align
)
14081 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
14083 if (decl_readonly_section_1 (decl
, reloc
, 1))
14085 if (TREE_PUBLIC (decl
))
14086 read_only_data_section ();
14088 read_only_private_data_section ();
14092 if (TREE_PUBLIC (decl
))
14095 private_data_section ();
14100 rs6000_xcoff_unique_section (decl
, reloc
)
14102 int reloc ATTRIBUTE_UNUSED
;
14106 /* Use select_section for private and uninitialized data. */
14107 if (!TREE_PUBLIC (decl
)
14108 || DECL_COMMON (decl
)
14109 || DECL_INITIAL (decl
) == NULL_TREE
14110 || DECL_INITIAL (decl
) == error_mark_node
14111 || (flag_zero_initialized_in_bss
14112 && initializer_zerop (DECL_INITIAL (decl
))))
14115 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
14116 name
= (*targetm
.strip_name_encoding
) (name
);
14117 DECL_SECTION_NAME (decl
) = build_string (strlen (name
), name
);
14120 /* Select section for constant in constant pool.
14122 On RS/6000, all constants are in the private read-only data area.
14123 However, if this is being placed in the TOC it must be output as a
14127 rs6000_xcoff_select_rtx_section (mode
, x
, align
)
14128 enum machine_mode mode
;
14130 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
14132 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
14135 read_only_private_data_section ();
14138 /* Remove any trailing [DS] or the like from the symbol name. */
14140 static const char *
14141 rs6000_xcoff_strip_name_encoding (name
)
14147 len
= strlen (name
);
14148 if (name
[len
- 1] == ']')
14149 return ggc_alloc_string (name
, len
- 4);
14154 /* Section attributes. AIX is always PIC. */
14156 static unsigned int
14157 rs6000_xcoff_section_type_flags (decl
, name
, reloc
)
14162 unsigned int align
;
14163 unsigned int flags
= default_section_type_flags_1 (decl
, name
, reloc
, 1);
14165 /* Align to at least UNIT size. */
14166 if (flags
& SECTION_CODE
)
14167 align
= MIN_UNITS_PER_WORD
;
14169 /* Increase alignment of large objects if not already stricter. */
14170 align
= MAX ((DECL_ALIGN (decl
) / BITS_PER_UNIT
),
14171 int_size_in_bytes (TREE_TYPE (decl
)) > MIN_UNITS_PER_WORD
14172 ? UNITS_PER_FP_WORD
: MIN_UNITS_PER_WORD
);
14174 return flags
| (exact_log2 (align
) & SECTION_ENTSIZE
);
14177 /* Output at end of assembler file.
14178 On the RS/6000, referencing data should automatically pull in text. */
14181 rs6000_xcoff_file_end ()
14184 fputs ("_section_.text:\n", asm_out_file
);
14186 fputs (TARGET_32BIT
14187 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
14190 #endif /* TARGET_XCOFF */
14193 /* Cross-module name binding. Darwin does not support overriding
14194 functions at dynamic-link time. */
14197 rs6000_binds_local_p (decl
)
14200 return default_binds_local_p_1 (decl
, 0);
14204 /* Compute a (partial) cost for rtx X. Return true if the complete
14205 cost has been computed, and false if subexpressions should be
14206 scanned. In either case, *TOTAL contains the cost result. */
14209 rs6000_rtx_costs (x
, code
, outer_code
, total
)
14211 int code
, outer_code ATTRIBUTE_UNUSED
;
14216 /* On the RS/6000, if it is valid in the insn, it is free.
14217 So this always returns 0. */
14228 *total
= ((GET_CODE (XEXP (x
, 1)) == CONST_INT
14229 && ((unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1))
14230 + 0x8000) >= 0x10000)
14231 && ((INTVAL (XEXP (x
, 1)) & 0xffff) != 0))
14232 ? COSTS_N_INSNS (2)
14233 : COSTS_N_INSNS (1));
14239 *total
= ((GET_CODE (XEXP (x
, 1)) == CONST_INT
14240 && (INTVAL (XEXP (x
, 1)) & (~ (HOST_WIDE_INT
) 0xffff)) != 0
14241 && ((INTVAL (XEXP (x
, 1)) & 0xffff) != 0))
14242 ? COSTS_N_INSNS (2)
14243 : COSTS_N_INSNS (1));
14249 *total
= COSTS_N_INSNS (2);
14252 switch (rs6000_cpu
)
14254 case PROCESSOR_RIOS1
:
14255 case PROCESSOR_PPC405
:
14256 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
14257 ? COSTS_N_INSNS (5)
14258 : (INTVAL (XEXP (x
, 1)) >= -256
14259 && INTVAL (XEXP (x
, 1)) <= 255)
14260 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
14263 case PROCESSOR_PPC440
:
14264 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
14265 ? COSTS_N_INSNS (3)
14266 : COSTS_N_INSNS (2));
14269 case PROCESSOR_RS64A
:
14270 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
14271 ? GET_MODE (XEXP (x
, 1)) != DImode
14272 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
14273 : (INTVAL (XEXP (x
, 1)) >= -256
14274 && INTVAL (XEXP (x
, 1)) <= 255)
14275 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
14278 case PROCESSOR_RIOS2
:
14279 case PROCESSOR_MPCCORE
:
14280 case PROCESSOR_PPC604e
:
14281 *total
= COSTS_N_INSNS (2);
14284 case PROCESSOR_PPC601
:
14285 *total
= COSTS_N_INSNS (5);
14288 case PROCESSOR_PPC603
:
14289 case PROCESSOR_PPC7400
:
14290 case PROCESSOR_PPC750
:
14291 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
14292 ? COSTS_N_INSNS (5)
14293 : (INTVAL (XEXP (x
, 1)) >= -256
14294 && INTVAL (XEXP (x
, 1)) <= 255)
14295 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
14298 case PROCESSOR_PPC7450
:
14299 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
14300 ? COSTS_N_INSNS (4)
14301 : COSTS_N_INSNS (3));
14304 case PROCESSOR_PPC403
:
14305 case PROCESSOR_PPC604
:
14306 case PROCESSOR_PPC8540
:
14307 *total
= COSTS_N_INSNS (4);
14310 case PROCESSOR_PPC620
:
14311 case PROCESSOR_PPC630
:
14312 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
14313 ? GET_MODE (XEXP (x
, 1)) != DImode
14314 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
14315 : (INTVAL (XEXP (x
, 1)) >= -256
14316 && INTVAL (XEXP (x
, 1)) <= 255)
14317 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
14320 case PROCESSOR_POWER4
:
14321 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
14322 ? GET_MODE (XEXP (x
, 1)) != DImode
14323 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
14324 : COSTS_N_INSNS (2));
14333 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
14334 && exact_log2 (INTVAL (XEXP (x
, 1))) >= 0)
14336 *total
= COSTS_N_INSNS (2);
14343 switch (rs6000_cpu
)
14345 case PROCESSOR_RIOS1
:
14346 *total
= COSTS_N_INSNS (19);
14349 case PROCESSOR_RIOS2
:
14350 *total
= COSTS_N_INSNS (13);
14353 case PROCESSOR_RS64A
:
14354 *total
= (GET_MODE (XEXP (x
, 1)) != DImode
14355 ? COSTS_N_INSNS (65)
14356 : COSTS_N_INSNS (67));
14359 case PROCESSOR_MPCCORE
:
14360 *total
= COSTS_N_INSNS (6);
14363 case PROCESSOR_PPC403
:
14364 *total
= COSTS_N_INSNS (33);
14367 case PROCESSOR_PPC405
:
14368 *total
= COSTS_N_INSNS (35);
14371 case PROCESSOR_PPC440
:
14372 *total
= COSTS_N_INSNS (34);
14375 case PROCESSOR_PPC601
:
14376 *total
= COSTS_N_INSNS (36);
14379 case PROCESSOR_PPC603
:
14380 *total
= COSTS_N_INSNS (37);
14383 case PROCESSOR_PPC604
:
14384 case PROCESSOR_PPC604e
:
14385 *total
= COSTS_N_INSNS (20);
14388 case PROCESSOR_PPC620
:
14389 case PROCESSOR_PPC630
:
14390 *total
= (GET_MODE (XEXP (x
, 1)) != DImode
14391 ? COSTS_N_INSNS (21)
14392 : COSTS_N_INSNS (37));
14395 case PROCESSOR_PPC750
:
14396 case PROCESSOR_PPC8540
:
14397 case PROCESSOR_PPC7400
:
14398 *total
= COSTS_N_INSNS (19);
14401 case PROCESSOR_PPC7450
:
14402 *total
= COSTS_N_INSNS (23);
14405 case PROCESSOR_POWER4
:
14406 *total
= (GET_MODE (XEXP (x
, 1)) != DImode
14407 ? COSTS_N_INSNS (18)
14408 : COSTS_N_INSNS (34));
14416 *total
= COSTS_N_INSNS (4);
14420 /* MEM should be slightly more expensive than (plus (reg) (const)) */
14429 /* A C expression returning the cost of moving data from a register of class
14430 CLASS1 to one of CLASS2. */
14433 rs6000_register_move_cost (mode
, from
, to
)
14434 enum machine_mode mode
;
14435 enum reg_class from
, to
;
14437 /* Moves from/to GENERAL_REGS. */
14438 if (reg_classes_intersect_p (to
, GENERAL_REGS
)
14439 || reg_classes_intersect_p (from
, GENERAL_REGS
))
14441 if (! reg_classes_intersect_p (to
, GENERAL_REGS
))
14444 if (from
== FLOAT_REGS
|| from
== ALTIVEC_REGS
)
14445 return (rs6000_memory_move_cost (mode
, from
, 0)
14446 + rs6000_memory_move_cost (mode
, GENERAL_REGS
, 0));
14448 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
14449 else if (from
== CR_REGS
)
14453 /* A move will cost one instruction per GPR moved. */
14454 return 2 * HARD_REGNO_NREGS (0, mode
);
14457 /* Moving between two similar registers is just one instruction. */
14458 else if (reg_classes_intersect_p (to
, from
))
14459 return mode
== TFmode
? 4 : 2;
14461 /* Everything else has to go through GENERAL_REGS. */
14463 return (rs6000_register_move_cost (mode
, GENERAL_REGS
, to
)
14464 + rs6000_register_move_cost (mode
, from
, GENERAL_REGS
));
14467 /* A C expressions returning the cost of moving data of MODE from a register to
14471 rs6000_memory_move_cost (mode
, class, in
)
14472 enum machine_mode mode
;
14473 enum reg_class
class;
14474 int in ATTRIBUTE_UNUSED
;
14476 if (reg_classes_intersect_p (class, GENERAL_REGS
))
14477 return 4 * HARD_REGNO_NREGS (0, mode
);
14478 else if (reg_classes_intersect_p (class, FLOAT_REGS
))
14479 return 4 * HARD_REGNO_NREGS (32, mode
);
14480 else if (reg_classes_intersect_p (class, ALTIVEC_REGS
))
14481 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO
, mode
);
14483 return 4 + rs6000_register_move_cost (mode
, class, GENERAL_REGS
);
14486 /* Return an RTX representing where to find the function value of a
14487 function returning MODE. */
14489 rs6000_complex_function_value (enum machine_mode mode
)
14491 unsigned int regno
;
14493 enum machine_mode inner
= GET_MODE_INNER (mode
);
14495 if (FLOAT_MODE_P (mode
))
14496 regno
= FP_ARG_RETURN
;
14499 regno
= GP_ARG_RETURN
;
14501 /* 32-bit is OK since it'll go in r3/r4. */
14503 && GET_MODE_BITSIZE (inner
) >= 32)
14504 return gen_rtx_REG (mode
, regno
);
14507 r1
= gen_rtx_EXPR_LIST (inner
, gen_rtx_REG (inner
, regno
),
14509 r2
= gen_rtx_EXPR_LIST (inner
, gen_rtx_REG (inner
, regno
+ 1),
14510 GEN_INT (GET_MODE_UNIT_SIZE (inner
)));
14511 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r2
));
14514 /* Define how to find the value returned by a function.
14515 VALTYPE is the data type of the value (as a tree).
14516 If the precise function being called is known, FUNC is its FUNCTION_DECL;
14517 otherwise, FUNC is 0.
14519 On the SPE, both FPs and vectors are returned in r3.
14521 On RS/6000 an integer value is in r3 and a floating-point value is in
14522 fp1, unless -msoft-float. */
14525 rs6000_function_value (tree valtype
, tree func ATTRIBUTE_UNUSED
)
14527 enum machine_mode mode
;
14528 unsigned int regno
;
14530 if ((INTEGRAL_TYPE_P (valtype
)
14531 && TYPE_PRECISION (valtype
) < BITS_PER_WORD
)
14532 || POINTER_TYPE_P (valtype
))
14535 mode
= TYPE_MODE (valtype
);
14537 if (TREE_CODE (valtype
) == REAL_TYPE
&& TARGET_HARD_FLOAT
&& TARGET_FPRS
)
14538 regno
= FP_ARG_RETURN
;
14539 else if (TREE_CODE (valtype
) == COMPLEX_TYPE
14540 && TARGET_HARD_FLOAT
14541 && SPLIT_COMPLEX_ARGS
)
14542 return rs6000_complex_function_value (mode
);
14543 else if (TREE_CODE (valtype
) == VECTOR_TYPE
&& TARGET_ALTIVEC
)
14544 regno
= ALTIVEC_ARG_RETURN
;
14546 regno
= GP_ARG_RETURN
;
14548 return gen_rtx_REG (mode
, regno
);
14551 /* Define how to find the value returned by a library function
14552 assuming the value has mode MODE. */
14554 rs6000_libcall_value (enum machine_mode mode
)
14556 unsigned int regno
;
14558 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
14559 && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
14560 regno
= FP_ARG_RETURN
;
14561 else if (ALTIVEC_VECTOR_MODE (mode
))
14562 regno
= ALTIVEC_ARG_RETURN
;
14563 else if (COMPLEX_MODE_P (mode
) && SPLIT_COMPLEX_ARGS
)
14564 return rs6000_complex_function_value (mode
);
14566 regno
= GP_ARG_RETURN
;
14568 return gen_rtx_REG (mode
, regno
);
14571 /* Return true if TYPE is of type __ev64_opaque__. */
14574 is_ev64_opaque_type (type
)
14578 && (type
== opaque_V2SI_type_node
14579 || type
== opaque_V2SF_type_node
14580 || type
== opaque_p_V2SI_type_node
14581 || (TREE_CODE (type
) == VECTOR_TYPE
14582 && TYPE_NAME (type
)
14583 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
14584 && DECL_NAME (TYPE_NAME (type
))
14585 && strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type
))),
14586 "__ev64_opaque__") == 0)));
14590 rs6000_dwarf_register_span (reg
)
14595 if (!TARGET_SPE
|| !SPE_VECTOR_MODE (GET_MODE (reg
)))
14598 regno
= REGNO (reg
);
14600 /* The duality of the SPE register size wreaks all kinds of havoc.
14601 This is a way of distinguishing r0 in 32-bits from r0 in
14604 gen_rtx_PARALLEL (VOIDmode
,
14607 gen_rtx_REG (SImode
, regno
+ 1200),
14608 gen_rtx_REG (SImode
, regno
))
14610 gen_rtx_REG (SImode
, regno
),
14611 gen_rtx_REG (SImode
, regno
+ 1200)));
14614 #include "gt-rs6000.h"