1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
54 #ifndef TARGET_NO_PROTOTYPE
55 #define TARGET_NO_PROTOTYPE 0
58 #define min(A,B) ((A) < (B) ? (A) : (B))
59 #define max(A,B) ((A) > (B) ? (A) : (B))
63 enum processor_type rs6000_cpu
;
64 struct rs6000_cpu_select rs6000_select
[3] =
66 /* switch name, tune arch */
67 { (const char *)0, "--with-cpu=", 1, 1 },
68 { (const char *)0, "-mcpu=", 1, 1 },
69 { (const char *)0, "-mtune=", 1, 0 },
72 /* Size of long double */
73 const char *rs6000_long_double_size_string
;
74 int rs6000_long_double_type_size
;
76 /* Whether -mabi=altivec has appeared */
77 int rs6000_altivec_abi
;
79 /* Whether VRSAVE instructions should be generated. */
80 int rs6000_altivec_vrsave
;
82 /* String from -mvrsave= option. */
83 const char *rs6000_altivec_vrsave_string
;
85 /* Nonzero if we want SPE ABI extensions. */
88 /* Whether isel instructions should be generated. */
91 /* Nonzero if we have FPRs. */
94 /* String from -misel=. */
95 const char *rs6000_isel_string
;
97 /* Set to nonzero once AIX common-mode calls have been defined. */
98 static GTY(()) int common_mode_defined
;
100 /* Private copy of original value of flag_pic for ABI_AIX. */
101 static int rs6000_flag_pic
;
103 /* Save information from a "cmpxx" operation until the branch or scc is
105 rtx rs6000_compare_op0
, rs6000_compare_op1
;
106 int rs6000_compare_fp_p
;
108 /* Label number of label created for -mrelocatable, to call to so we can
109 get the address of the GOT section */
110 int rs6000_pic_labelno
;
113 /* Which abi to adhere to */
114 const char *rs6000_abi_name
= RS6000_ABI_NAME
;
116 /* Semantics of the small data area */
117 enum rs6000_sdata_type rs6000_sdata
= SDATA_DATA
;
119 /* Which small data model to use */
120 const char *rs6000_sdata_name
= (char *)0;
122 /* Counter for labels which are to be placed in .fixup. */
123 int fixuplabelno
= 0;
126 /* ABI enumeration available for subtarget to use. */
127 enum rs6000_abi rs6000_current_abi
;
129 /* ABI string from -mabi= option. */
130 const char *rs6000_abi_string
;
133 const char *rs6000_debug_name
;
134 int rs6000_debug_stack
; /* debug stack applications */
135 int rs6000_debug_arg
; /* debug argument handling */
137 const char *rs6000_traceback_name
;
139 traceback_default
= 0,
145 /* Flag to say the TOC is initialized */
147 char toc_label_name
[10];
149 /* Alias set for saves and restores from the rs6000 stack. */
150 static int rs6000_sr_alias_set
;
152 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
153 The only place that looks at this is rs6000_set_default_type_attributes;
154 everywhere else should rely on the presence or absence of a longcall
155 attribute on the function declaration. */
156 int rs6000_default_long_calls
;
157 const char *rs6000_longcall_switch
;
159 struct builtin_description
161 /* mask is not const because we're going to alter it below. This
162 nonsense will go away when we rewrite the -march infrastructure
163 to give us more target flag bits. */
165 const enum insn_code icode
;
166 const char *const name
;
167 const enum rs6000_builtins code
;
170 static bool rs6000_function_ok_for_sibcall
PARAMS ((tree
, tree
));
171 static int num_insns_constant_wide
PARAMS ((HOST_WIDE_INT
));
172 static void validate_condition_mode
173 PARAMS ((enum rtx_code
, enum machine_mode
));
174 static rtx rs6000_generate_compare
PARAMS ((enum rtx_code
));
175 static void rs6000_maybe_dead
PARAMS ((rtx
));
176 static void rs6000_emit_stack_tie
PARAMS ((void));
177 static void rs6000_frame_related
PARAMS ((rtx
, rtx
, HOST_WIDE_INT
, rtx
, rtx
));
178 static void emit_frame_save
PARAMS ((rtx
, rtx
, enum machine_mode
,
179 unsigned int, int, int));
180 static rtx gen_frame_mem_offset
PARAMS ((enum machine_mode
, rtx
, int));
181 static void rs6000_emit_allocate_stack
PARAMS ((HOST_WIDE_INT
, int));
182 static unsigned rs6000_hash_constant
PARAMS ((rtx
));
183 static unsigned toc_hash_function
PARAMS ((const void *));
184 static int toc_hash_eq
PARAMS ((const void *, const void *));
185 static int constant_pool_expr_1
PARAMS ((rtx
, int *, int *));
186 static struct machine_function
* rs6000_init_machine_status
PARAMS ((void));
187 static bool rs6000_assemble_integer
PARAMS ((rtx
, unsigned int, int));
188 #ifdef HAVE_GAS_HIDDEN
189 static void rs6000_assemble_visibility
PARAMS ((tree
, int));
191 static int rs6000_ra_ever_killed
PARAMS ((void));
192 static tree rs6000_handle_longcall_attribute
PARAMS ((tree
*, tree
, tree
, int, bool *));
193 const struct attribute_spec rs6000_attribute_table
[];
194 static void rs6000_set_default_type_attributes
PARAMS ((tree
));
195 static void rs6000_output_function_prologue
PARAMS ((FILE *, HOST_WIDE_INT
));
196 static void rs6000_output_function_epilogue
PARAMS ((FILE *, HOST_WIDE_INT
));
197 static void rs6000_output_mi_thunk
PARAMS ((FILE *, tree
, HOST_WIDE_INT
,
198 HOST_WIDE_INT
, tree
));
199 static rtx rs6000_emit_set_long_const
PARAMS ((rtx
,
200 HOST_WIDE_INT
, HOST_WIDE_INT
));
202 static unsigned int rs6000_elf_section_type_flags
PARAMS ((tree
, const char *,
204 static void rs6000_elf_asm_out_constructor
PARAMS ((rtx
, int));
205 static void rs6000_elf_asm_out_destructor
PARAMS ((rtx
, int));
206 static void rs6000_elf_select_section
PARAMS ((tree
, int,
207 unsigned HOST_WIDE_INT
));
208 static void rs6000_elf_unique_section
PARAMS ((tree
, int));
209 static void rs6000_elf_select_rtx_section
PARAMS ((enum machine_mode
, rtx
,
210 unsigned HOST_WIDE_INT
));
211 static void rs6000_elf_encode_section_info
PARAMS ((tree
, int))
213 static const char *rs6000_elf_strip_name_encoding
PARAMS ((const char *));
214 static bool rs6000_elf_in_small_data_p
PARAMS ((tree
));
217 static void rs6000_xcoff_asm_globalize_label
PARAMS ((FILE *, const char *));
218 static void rs6000_xcoff_asm_named_section
PARAMS ((const char *, unsigned int));
219 static void rs6000_xcoff_select_section
PARAMS ((tree
, int,
220 unsigned HOST_WIDE_INT
));
221 static void rs6000_xcoff_unique_section
PARAMS ((tree
, int));
222 static void rs6000_xcoff_select_rtx_section
PARAMS ((enum machine_mode
, rtx
,
223 unsigned HOST_WIDE_INT
));
224 static const char * rs6000_xcoff_strip_name_encoding
PARAMS ((const char *));
225 static unsigned int rs6000_xcoff_section_type_flags
PARAMS ((tree
, const char *, int));
227 static void rs6000_xcoff_encode_section_info
PARAMS ((tree
, int))
229 static bool rs6000_binds_local_p
PARAMS ((tree
));
230 static int rs6000_use_dfa_pipeline_interface
PARAMS ((void));
231 static int rs6000_variable_issue
PARAMS ((FILE *, int, rtx
, int));
232 static bool rs6000_rtx_costs
PARAMS ((rtx
, int, int, int *));
233 static int rs6000_adjust_cost
PARAMS ((rtx
, rtx
, rtx
, int));
234 static int rs6000_adjust_priority
PARAMS ((rtx
, int));
235 static int rs6000_issue_rate
PARAMS ((void));
237 static void rs6000_init_builtins
PARAMS ((void));
238 static rtx rs6000_expand_unop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
239 static rtx rs6000_expand_binop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
240 static rtx rs6000_expand_ternop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
241 static rtx rs6000_expand_builtin
PARAMS ((tree
, rtx
, rtx
, enum machine_mode
, int));
242 static void altivec_init_builtins
PARAMS ((void));
243 static void rs6000_common_init_builtins
PARAMS ((void));
245 static void enable_mask_for_builtins
PARAMS ((struct builtin_description
*,
246 int, enum rs6000_builtins
,
247 enum rs6000_builtins
));
248 static void spe_init_builtins
PARAMS ((void));
249 static rtx spe_expand_builtin
PARAMS ((tree
, rtx
, bool *));
250 static rtx spe_expand_predicate_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
251 static rtx spe_expand_evsel_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
252 static int rs6000_emit_int_cmove
PARAMS ((rtx
, rtx
, rtx
, rtx
));
254 static rtx altivec_expand_builtin
PARAMS ((tree
, rtx
, bool *));
255 static rtx altivec_expand_ld_builtin
PARAMS ((tree
, rtx
, bool *));
256 static rtx altivec_expand_st_builtin
PARAMS ((tree
, rtx
, bool *));
257 static rtx altivec_expand_dst_builtin
PARAMS ((tree
, rtx
, bool *));
258 static rtx altivec_expand_abs_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
259 static rtx altivec_expand_predicate_builtin
PARAMS ((enum insn_code
, const char *, tree
, rtx
));
260 static rtx altivec_expand_stv_builtin
PARAMS ((enum insn_code
, tree
));
261 static void rs6000_parse_abi_options
PARAMS ((void));
262 static void rs6000_parse_vrsave_option
PARAMS ((void));
263 static void rs6000_parse_isel_option
PARAMS ((void));
264 static int first_altivec_reg_to_save
PARAMS ((void));
265 static unsigned int compute_vrsave_mask
PARAMS ((void));
266 static void is_altivec_return_reg
PARAMS ((rtx
, void *));
267 static rtx generate_set_vrsave
PARAMS ((rtx
, rs6000_stack_t
*, int));
268 static void altivec_frame_fixup
PARAMS ((rtx
, rtx
, HOST_WIDE_INT
));
269 static int easy_vector_constant
PARAMS ((rtx
));
270 static bool is_ev64_opaque_type
PARAMS ((tree
));
271 static rtx rs6000_dwarf_register_span
PARAMS ((rtx
));
273 /* Hash table stuff for keeping track of TOC entries. */
275 struct toc_hash_struct
GTY(())
277 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
278 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
280 enum machine_mode key_mode
;
284 static GTY ((param_is (struct toc_hash_struct
))) htab_t toc_hash_table
;
286 /* Default register names. */
287 char rs6000_reg_names
[][8] =
289 "0", "1", "2", "3", "4", "5", "6", "7",
290 "8", "9", "10", "11", "12", "13", "14", "15",
291 "16", "17", "18", "19", "20", "21", "22", "23",
292 "24", "25", "26", "27", "28", "29", "30", "31",
293 "0", "1", "2", "3", "4", "5", "6", "7",
294 "8", "9", "10", "11", "12", "13", "14", "15",
295 "16", "17", "18", "19", "20", "21", "22", "23",
296 "24", "25", "26", "27", "28", "29", "30", "31",
297 "mq", "lr", "ctr","ap",
298 "0", "1", "2", "3", "4", "5", "6", "7",
300 /* AltiVec registers. */
301 "0", "1", "2", "3", "4", "5", "6", "7",
302 "8", "9", "10", "11", "12", "13", "14", "15",
303 "16", "17", "18", "19", "20", "21", "22", "23",
304 "24", "25", "26", "27", "28", "29", "30", "31",
310 #ifdef TARGET_REGNAMES
311 static const char alt_reg_names
[][8] =
313 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
314 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
315 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
316 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
317 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
318 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
319 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
320 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
321 "mq", "lr", "ctr", "ap",
322 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
324 /* AltiVec registers. */
325 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
326 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
327 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
328 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
335 #ifndef MASK_STRICT_ALIGN
336 #define MASK_STRICT_ALIGN 0
339 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
340 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
342 /* Initialize the GCC target structure. */
343 #undef TARGET_ATTRIBUTE_TABLE
344 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
345 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
346 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
348 #undef TARGET_ASM_ALIGNED_DI_OP
349 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
351 /* Default unaligned ops are only provided for ELF. Find the ops needed
352 for non-ELF systems. */
353 #ifndef OBJECT_FORMAT_ELF
355 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
357 #undef TARGET_ASM_UNALIGNED_HI_OP
358 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
359 #undef TARGET_ASM_UNALIGNED_SI_OP
360 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
361 #undef TARGET_ASM_UNALIGNED_DI_OP
362 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
365 #undef TARGET_ASM_UNALIGNED_HI_OP
366 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
367 #undef TARGET_ASM_UNALIGNED_SI_OP
368 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
372 /* This hook deals with fixups for relocatable code and DI-mode objects
374 #undef TARGET_ASM_INTEGER
375 #define TARGET_ASM_INTEGER rs6000_assemble_integer
377 #ifdef HAVE_GAS_HIDDEN
378 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
379 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
382 #undef TARGET_ASM_FUNCTION_PROLOGUE
383 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
384 #undef TARGET_ASM_FUNCTION_EPILOGUE
385 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
387 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
388 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
389 #undef TARGET_SCHED_VARIABLE_ISSUE
390 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
392 #undef TARGET_SCHED_ISSUE_RATE
393 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
394 #undef TARGET_SCHED_ADJUST_COST
395 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
396 #undef TARGET_SCHED_ADJUST_PRIORITY
397 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
399 #undef TARGET_INIT_BUILTINS
400 #define TARGET_INIT_BUILTINS rs6000_init_builtins
402 #undef TARGET_EXPAND_BUILTIN
403 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
405 #undef TARGET_BINDS_LOCAL_P
406 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
408 #undef TARGET_ASM_OUTPUT_MI_THUNK
409 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
411 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
412 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
414 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
415 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
417 #undef TARGET_RTX_COSTS
418 #define TARGET_RTX_COSTS rs6000_rtx_costs
419 #undef TARGET_ADDRESS_COST
420 #define TARGET_ADDRESS_COST hook_int_rtx_0
422 #undef TARGET_VECTOR_OPAQUE_P
423 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
425 #undef TARGET_DWARF_REGISTER_SPAN
426 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
428 struct gcc_target targetm
= TARGET_INITIALIZER
;
430 /* Override command line options. Mostly we process the processor
431 type and sometimes adjust other TARGET_ options. */
434 rs6000_override_options (default_cpu
)
435 const char *default_cpu
;
438 struct rs6000_cpu_select
*ptr
;
440 /* Simplify the entries below by making a mask for any POWER
441 variant and any PowerPC variant. */
443 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
444 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
445 | MASK_PPC_GFXOPT | MASK_POWERPC64)
446 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
450 const char *const name
; /* Canonical processor name. */
451 const enum processor_type processor
; /* Processor type enum value. */
452 const int target_enable
; /* Target flags to enable. */
453 const int target_disable
; /* Target flags to disable. */
454 } const processor_target_table
[]
455 = {{"common", PROCESSOR_COMMON
, MASK_NEW_MNEMONICS
,
456 POWER_MASKS
| POWERPC_MASKS
},
457 {"power", PROCESSOR_POWER
,
458 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
459 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
460 {"power2", PROCESSOR_POWER
,
461 MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
,
462 POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
463 {"power3", PROCESSOR_PPC630
,
464 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
465 POWER_MASKS
| MASK_PPC_GPOPT
},
466 {"power4", PROCESSOR_POWER4
,
467 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
468 POWER_MASKS
| MASK_PPC_GPOPT
},
469 {"powerpc", PROCESSOR_POWERPC
,
470 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
471 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
472 {"powerpc64", PROCESSOR_POWERPC64
,
473 MASK_POWERPC
| MASK_POWERPC64
| MASK_NEW_MNEMONICS
,
474 POWER_MASKS
| POWERPC_OPT_MASKS
},
475 {"rios", PROCESSOR_RIOS1
,
476 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
477 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
478 {"rios1", PROCESSOR_RIOS1
,
479 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
480 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
481 {"rsc", PROCESSOR_PPC601
,
482 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
483 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
484 {"rsc1", PROCESSOR_PPC601
,
485 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
486 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
487 {"rios2", PROCESSOR_RIOS2
,
488 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
| MASK_POWER2
,
489 POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
490 {"rs64a", PROCESSOR_RS64A
,
491 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
492 POWER_MASKS
| POWERPC_OPT_MASKS
},
493 {"401", PROCESSOR_PPC403
,
494 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
495 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
496 {"403", PROCESSOR_PPC403
,
497 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
| MASK_STRICT_ALIGN
,
498 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
499 {"405", PROCESSOR_PPC405
,
500 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
501 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
502 {"405f", PROCESSOR_PPC405
,
503 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
504 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
505 {"505", PROCESSOR_MPCCORE
,
506 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
507 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
508 {"601", PROCESSOR_PPC601
,
509 MASK_POWER
| MASK_POWERPC
| MASK_NEW_MNEMONICS
| MASK_MULTIPLE
| MASK_STRING
,
510 MASK_POWER2
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
511 {"602", PROCESSOR_PPC603
,
512 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
513 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
514 {"603", PROCESSOR_PPC603
,
515 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
516 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
517 {"603e", PROCESSOR_PPC603
,
518 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
519 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
520 {"ec603e", PROCESSOR_PPC603
,
521 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
522 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
523 {"604", PROCESSOR_PPC604
,
524 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
525 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
526 {"604e", PROCESSOR_PPC604e
,
527 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
528 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
529 {"620", PROCESSOR_PPC620
,
530 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
531 POWER_MASKS
| MASK_PPC_GPOPT
},
532 {"630", PROCESSOR_PPC630
,
533 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
534 POWER_MASKS
| MASK_PPC_GPOPT
},
535 {"740", PROCESSOR_PPC750
,
536 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
537 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
538 {"750", PROCESSOR_PPC750
,
539 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
540 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
541 {"7400", PROCESSOR_PPC7400
,
542 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
543 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
544 {"7450", PROCESSOR_PPC7450
,
545 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
546 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
547 {"8540", PROCESSOR_PPC8540
,
548 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
549 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
550 {"801", PROCESSOR_MPCCORE
,
551 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
552 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
553 {"821", PROCESSOR_MPCCORE
,
554 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
555 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
556 {"823", PROCESSOR_MPCCORE
,
557 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
558 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
559 {"860", PROCESSOR_MPCCORE
,
560 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
561 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
}};
563 const size_t ptt_size
= ARRAY_SIZE (processor_target_table
);
565 /* Save current -mmultiple/-mno-multiple status. */
566 int multiple
= TARGET_MULTIPLE
;
567 /* Save current -mstring/-mno-string status. */
568 int string
= TARGET_STRING
;
570 /* Identify the processor type. */
571 rs6000_select
[0].string
= default_cpu
;
572 rs6000_cpu
= TARGET_POWERPC64
? PROCESSOR_DEFAULT64
: PROCESSOR_DEFAULT
;
574 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
576 ptr
= &rs6000_select
[i
];
577 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
579 for (j
= 0; j
< ptt_size
; j
++)
580 if (! strcmp (ptr
->string
, processor_target_table
[j
].name
))
583 rs6000_cpu
= processor_target_table
[j
].processor
;
587 target_flags
|= processor_target_table
[j
].target_enable
;
588 target_flags
&= ~processor_target_table
[j
].target_disable
;
594 error ("bad value (%s) for %s switch", ptr
->string
, ptr
->name
);
598 if (rs6000_cpu
== PROCESSOR_PPC8540
)
601 /* If we are optimizing big endian systems for space, use the load/store
602 multiple and string instructions. */
603 if (BYTES_BIG_ENDIAN
&& optimize_size
)
604 target_flags
|= MASK_MULTIPLE
| MASK_STRING
;
606 /* If -mmultiple or -mno-multiple was explicitly used, don't
607 override with the processor default */
608 if ((target_flags_explicit
& MASK_MULTIPLE
) != 0)
609 target_flags
= (target_flags
& ~MASK_MULTIPLE
) | multiple
;
611 /* If -mstring or -mno-string was explicitly used, don't override
612 with the processor default. */
613 if ((target_flags_explicit
& MASK_STRING
) != 0)
614 target_flags
= (target_flags
& ~MASK_STRING
) | string
;
616 /* Don't allow -mmultiple or -mstring on little endian systems
617 unless the cpu is a 750, because the hardware doesn't support the
618 instructions used in little endian mode, and causes an alignment
619 trap. The 750 does not cause an alignment trap (except when the
620 target is unaligned). */
622 if (!BYTES_BIG_ENDIAN
&& rs6000_cpu
!= PROCESSOR_PPC750
)
626 target_flags
&= ~MASK_MULTIPLE
;
627 if ((target_flags_explicit
& MASK_MULTIPLE
) != 0)
628 warning ("-mmultiple is not supported on little endian systems");
633 target_flags
&= ~MASK_STRING
;
634 if ((target_flags_explicit
& MASK_STRING
) != 0)
635 warning ("-mstring is not supported on little endian systems");
639 if (flag_pic
!= 0 && DEFAULT_ABI
== ABI_AIX
)
641 rs6000_flag_pic
= flag_pic
;
645 /* For Darwin, always silently make -fpic and -fPIC identical. */
646 if (flag_pic
== 1 && DEFAULT_ABI
== ABI_DARWIN
)
649 /* Set debug flags */
650 if (rs6000_debug_name
)
652 if (! strcmp (rs6000_debug_name
, "all"))
653 rs6000_debug_stack
= rs6000_debug_arg
= 1;
654 else if (! strcmp (rs6000_debug_name
, "stack"))
655 rs6000_debug_stack
= 1;
656 else if (! strcmp (rs6000_debug_name
, "arg"))
657 rs6000_debug_arg
= 1;
659 error ("unknown -mdebug-%s switch", rs6000_debug_name
);
662 if (rs6000_traceback_name
)
664 if (! strncmp (rs6000_traceback_name
, "full", 4))
665 rs6000_traceback
= traceback_full
;
666 else if (! strncmp (rs6000_traceback_name
, "part", 4))
667 rs6000_traceback
= traceback_part
;
668 else if (! strncmp (rs6000_traceback_name
, "no", 2))
669 rs6000_traceback
= traceback_none
;
671 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
672 rs6000_traceback_name
);
675 /* Set size of long double */
676 rs6000_long_double_type_size
= 64;
677 if (rs6000_long_double_size_string
)
680 int size
= strtol (rs6000_long_double_size_string
, &tail
, 10);
681 if (*tail
!= '\0' || (size
!= 64 && size
!= 128))
682 error ("Unknown switch -mlong-double-%s",
683 rs6000_long_double_size_string
);
685 rs6000_long_double_type_size
= size
;
688 /* Handle -mabi= options. */
689 rs6000_parse_abi_options ();
691 /* Handle -mvrsave= option. */
692 rs6000_parse_vrsave_option ();
694 /* Handle -misel= option. */
695 rs6000_parse_isel_option ();
697 #ifdef SUBTARGET_OVERRIDE_OPTIONS
698 SUBTARGET_OVERRIDE_OPTIONS
;
700 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
701 SUBSUBTARGET_OVERRIDE_OPTIONS
;
704 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
705 using TARGET_OPTIONS to handle a toggle switch, but we're out of
706 bits in target_flags so TARGET_SWITCHES cannot be used.
707 Assumption here is that rs6000_longcall_switch points into the
708 text of the complete option, rather than being a copy, so we can
709 scan back for the presence or absence of the no- modifier. */
710 if (rs6000_longcall_switch
)
712 const char *base
= rs6000_longcall_switch
;
713 while (base
[-1] != 'm') base
--;
715 if (*rs6000_longcall_switch
!= '\0')
716 error ("invalid option `%s'", base
);
717 rs6000_default_long_calls
= (base
[0] != 'n');
720 #ifdef TARGET_REGNAMES
721 /* If the user desires alternate register names, copy in the
722 alternate names now. */
724 memcpy (rs6000_reg_names
, alt_reg_names
, sizeof (rs6000_reg_names
));
727 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
728 If -maix-struct-return or -msvr4-struct-return was explicitly
729 used, don't override with the ABI default. */
730 if ((target_flags_explicit
& MASK_AIX_STRUCT_RET
) == 0)
732 if (DEFAULT_ABI
== ABI_V4
&& !DRAFT_V4_STRUCT_RET
)
733 target_flags
= (target_flags
& ~MASK_AIX_STRUCT_RET
);
735 target_flags
|= MASK_AIX_STRUCT_RET
;
738 if (TARGET_LONG_DOUBLE_128
739 && (DEFAULT_ABI
== ABI_AIX
|| DEFAULT_ABI
== ABI_DARWIN
))
740 real_format_for_mode
[TFmode
- QFmode
] = &ibm_extended_format
;
742 /* Allocate an alias set for register saves & restores from stack. */
743 rs6000_sr_alias_set
= new_alias_set ();
746 ASM_GENERATE_INTERNAL_LABEL (toc_label_name
, "LCTOC", 1);
748 /* We can only guarantee the availability of DI pseudo-ops when
749 assembling for 64-bit targets. */
752 targetm
.asm_out
.aligned_op
.di
= NULL
;
753 targetm
.asm_out
.unaligned_op
.di
= NULL
;
756 /* Set maximum branch target alignment at two instructions, eight bytes. */
757 align_jumps_max_skip
= 8;
758 align_loops_max_skip
= 8;
760 /* Arrange to save and restore machine status around nested functions. */
761 init_machine_status
= rs6000_init_machine_status
;
764 /* Handle -misel= option. */
766 rs6000_parse_isel_option ()
768 if (rs6000_isel_string
== 0)
770 else if (! strcmp (rs6000_isel_string
, "yes"))
772 else if (! strcmp (rs6000_isel_string
, "no"))
775 error ("unknown -misel= option specified: '%s'",
779 /* Handle -mvrsave= options. */
781 rs6000_parse_vrsave_option ()
783 /* Generate VRSAVE instructions by default. */
784 if (rs6000_altivec_vrsave_string
== 0
785 || ! strcmp (rs6000_altivec_vrsave_string
, "yes"))
786 rs6000_altivec_vrsave
= 1;
787 else if (! strcmp (rs6000_altivec_vrsave_string
, "no"))
788 rs6000_altivec_vrsave
= 0;
790 error ("unknown -mvrsave= option specified: '%s'",
791 rs6000_altivec_vrsave_string
);
794 /* Handle -mabi= options. */
796 rs6000_parse_abi_options ()
798 if (rs6000_abi_string
== 0)
800 else if (! strcmp (rs6000_abi_string
, "altivec"))
801 rs6000_altivec_abi
= 1;
802 else if (! strcmp (rs6000_abi_string
, "no-altivec"))
803 rs6000_altivec_abi
= 0;
804 else if (! strcmp (rs6000_abi_string
, "spe"))
808 error ("not configured for ABI: '%s'", rs6000_abi_string
);
811 else if (! strcmp (rs6000_abi_string
, "no-spe"))
814 error ("unknown ABI specified: '%s'", rs6000_abi_string
);
818 optimization_options (level
, size
)
819 int level ATTRIBUTE_UNUSED
;
820 int size ATTRIBUTE_UNUSED
;
824 /* Do anything needed at the start of the asm file. */
827 rs6000_file_start (file
, default_cpu
)
829 const char *default_cpu
;
833 const char *start
= buffer
;
834 struct rs6000_cpu_select
*ptr
;
836 if (flag_verbose_asm
)
838 sprintf (buffer
, "\n%s rs6000/powerpc options:", ASM_COMMENT_START
);
839 rs6000_select
[0].string
= default_cpu
;
841 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
843 ptr
= &rs6000_select
[i
];
844 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
846 fprintf (file
, "%s %s%s", start
, ptr
->name
, ptr
->string
);
852 switch (rs6000_sdata
)
854 case SDATA_NONE
: fprintf (file
, "%s -msdata=none", start
); start
= ""; break;
855 case SDATA_DATA
: fprintf (file
, "%s -msdata=data", start
); start
= ""; break;
856 case SDATA_SYSV
: fprintf (file
, "%s -msdata=sysv", start
); start
= ""; break;
857 case SDATA_EABI
: fprintf (file
, "%s -msdata=eabi", start
); start
= ""; break;
860 if (rs6000_sdata
&& g_switch_value
)
862 fprintf (file
, "%s -G %d", start
, g_switch_value
);
872 /* Return nonzero if this function is known to have a null epilogue. */
877 if (reload_completed
)
879 rs6000_stack_t
*info
= rs6000_stack_info ();
881 if (info
->first_gp_reg_save
== 32
882 && info
->first_fp_reg_save
== 64
883 && info
->first_altivec_reg_save
== LAST_ALTIVEC_REGNO
+ 1
886 && info
->vrsave_mask
== 0
894 /* Returns 1 always. */
897 any_operand (op
, mode
)
898 rtx op ATTRIBUTE_UNUSED
;
899 enum machine_mode mode ATTRIBUTE_UNUSED
;
904 /* Returns 1 if op is the count register. */
906 count_register_operand (op
, mode
)
908 enum machine_mode mode ATTRIBUTE_UNUSED
;
910 if (GET_CODE (op
) != REG
)
913 if (REGNO (op
) == COUNT_REGISTER_REGNUM
)
916 if (REGNO (op
) > FIRST_PSEUDO_REGISTER
)
922 /* Returns 1 if op is an altivec register. */
924 altivec_register_operand (op
, mode
)
926 enum machine_mode mode ATTRIBUTE_UNUSED
;
929 return (register_operand (op
, mode
)
930 && (GET_CODE (op
) != REG
931 || REGNO (op
) > FIRST_PSEUDO_REGISTER
932 || ALTIVEC_REGNO_P (REGNO (op
))));
936 xer_operand (op
, mode
)
938 enum machine_mode mode ATTRIBUTE_UNUSED
;
940 if (GET_CODE (op
) != REG
)
943 if (XER_REGNO_P (REGNO (op
)))
949 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
950 by such constants completes more quickly. */
953 s8bit_cint_operand (op
, mode
)
955 enum machine_mode mode ATTRIBUTE_UNUSED
;
957 return ( GET_CODE (op
) == CONST_INT
958 && (INTVAL (op
) >= -128 && INTVAL (op
) <= 127));
961 /* Return 1 if OP is a constant that can fit in a D field. */
964 short_cint_operand (op
, mode
)
966 enum machine_mode mode ATTRIBUTE_UNUSED
;
968 return (GET_CODE (op
) == CONST_INT
969 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I'));
972 /* Similar for an unsigned D field. */
975 u_short_cint_operand (op
, mode
)
977 enum machine_mode mode ATTRIBUTE_UNUSED
;
979 return (GET_CODE (op
) == CONST_INT
980 && CONST_OK_FOR_LETTER_P (INTVAL (op
) & GET_MODE_MASK (mode
), 'K'));
983 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
986 non_short_cint_operand (op
, mode
)
988 enum machine_mode mode ATTRIBUTE_UNUSED
;
990 return (GET_CODE (op
) == CONST_INT
991 && (unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x8000) >= 0x10000);
994 /* Returns 1 if OP is a CONST_INT that is a positive value
995 and an exact power of 2. */
998 exact_log2_cint_operand (op
, mode
)
1000 enum machine_mode mode ATTRIBUTE_UNUSED
;
1002 return (GET_CODE (op
) == CONST_INT
1004 && exact_log2 (INTVAL (op
)) >= 0);
1007 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1011 gpc_reg_operand (op
, mode
)
1013 enum machine_mode mode
;
1015 return (register_operand (op
, mode
)
1016 && (GET_CODE (op
) != REG
1017 || (REGNO (op
) >= ARG_POINTER_REGNUM
1018 && !XER_REGNO_P (REGNO (op
)))
1019 || REGNO (op
) < MQ_REGNO
));
1022 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1026 cc_reg_operand (op
, mode
)
1028 enum machine_mode mode
;
1030 return (register_operand (op
, mode
)
1031 && (GET_CODE (op
) != REG
1032 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
1033 || CR_REGNO_P (REGNO (op
))));
1036 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1037 CR field that isn't CR0. */
1040 cc_reg_not_cr0_operand (op
, mode
)
1042 enum machine_mode mode
;
1044 return (register_operand (op
, mode
)
1045 && (GET_CODE (op
) != REG
1046 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
1047 || CR_REGNO_NOT_CR0_P (REGNO (op
))));
1050 /* Returns 1 if OP is either a constant integer valid for a D-field or
1051 a non-special register. If a register, it must be in the proper
1052 mode unless MODE is VOIDmode. */
1055 reg_or_short_operand (op
, mode
)
1057 enum machine_mode mode
;
1059 return short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1062 /* Similar, except check if the negation of the constant would be
1063 valid for a D-field. */
1066 reg_or_neg_short_operand (op
, mode
)
1068 enum machine_mode mode
;
1070 if (GET_CODE (op
) == CONST_INT
)
1071 return CONST_OK_FOR_LETTER_P (INTVAL (op
), 'P');
1073 return gpc_reg_operand (op
, mode
);
1076 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1077 a non-special register. If a register, it must be in the proper
1078 mode unless MODE is VOIDmode. */
1081 reg_or_aligned_short_operand (op
, mode
)
1083 enum machine_mode mode
;
1085 if (gpc_reg_operand (op
, mode
))
1087 else if (short_cint_operand (op
, mode
) && !(INTVAL (op
) & 3))
1094 /* Return 1 if the operand is either a register or an integer whose
1095 high-order 16 bits are zero. */
1098 reg_or_u_short_operand (op
, mode
)
1100 enum machine_mode mode
;
1102 return u_short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1105 /* Return 1 is the operand is either a non-special register or ANY
1106 constant integer. */
1109 reg_or_cint_operand (op
, mode
)
1111 enum machine_mode mode
;
1113 return (GET_CODE (op
) == CONST_INT
|| gpc_reg_operand (op
, mode
));
1116 /* Return 1 is the operand is either a non-special register or ANY
1117 32-bit signed constant integer. */
1120 reg_or_arith_cint_operand (op
, mode
)
1122 enum machine_mode mode
;
1124 return (gpc_reg_operand (op
, mode
)
1125 || (GET_CODE (op
) == CONST_INT
1126 #if HOST_BITS_PER_WIDE_INT != 32
1127 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80000000)
1128 < (unsigned HOST_WIDE_INT
) 0x100000000ll
)
1133 /* Return 1 is the operand is either a non-special register or a 32-bit
1134 signed constant integer valid for 64-bit addition. */
1137 reg_or_add_cint64_operand (op
, mode
)
1139 enum machine_mode mode
;
1141 return (gpc_reg_operand (op
, mode
)
1142 || (GET_CODE (op
) == CONST_INT
1143 #if HOST_BITS_PER_WIDE_INT == 32
1144 && INTVAL (op
) < 0x7fff8000
1146 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80008000)
1152 /* Return 1 is the operand is either a non-special register or a 32-bit
1153 signed constant integer valid for 64-bit subtraction. */
1156 reg_or_sub_cint64_operand (op
, mode
)
1158 enum machine_mode mode
;
1160 return (gpc_reg_operand (op
, mode
)
1161 || (GET_CODE (op
) == CONST_INT
1162 #if HOST_BITS_PER_WIDE_INT == 32
1163 && (- INTVAL (op
)) < 0x7fff8000
1165 && ((unsigned HOST_WIDE_INT
) ((- INTVAL (op
)) + 0x80008000)
1171 /* Return 1 is the operand is either a non-special register or ANY
1172 32-bit unsigned constant integer. */
1175 reg_or_logical_cint_operand (op
, mode
)
1177 enum machine_mode mode
;
1179 if (GET_CODE (op
) == CONST_INT
)
1181 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
)
1183 if (GET_MODE_BITSIZE (mode
) <= 32)
1186 if (INTVAL (op
) < 0)
1190 return ((INTVAL (op
) & GET_MODE_MASK (mode
)
1191 & (~ (unsigned HOST_WIDE_INT
) 0xffffffff)) == 0);
1193 else if (GET_CODE (op
) == CONST_DOUBLE
)
1195 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
1199 return CONST_DOUBLE_HIGH (op
) == 0;
1202 return gpc_reg_operand (op
, mode
);
1205 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1208 got_operand (op
, mode
)
1210 enum machine_mode mode ATTRIBUTE_UNUSED
;
1212 return (GET_CODE (op
) == SYMBOL_REF
1213 || GET_CODE (op
) == CONST
1214 || GET_CODE (op
) == LABEL_REF
);
1217 /* Return 1 if the operand is a simple references that can be loaded via
1218 the GOT (labels involving addition aren't allowed). */
1221 got_no_const_operand (op
, mode
)
1223 enum machine_mode mode ATTRIBUTE_UNUSED
;
1225 return (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
);
1228 /* Return the number of instructions it takes to form a constant in an
1229 integer register. */
1232 num_insns_constant_wide (value
)
1233 HOST_WIDE_INT value
;
1235 /* signed constant loadable with {cal|addi} */
1236 if (CONST_OK_FOR_LETTER_P (value
, 'I'))
1239 /* constant loadable with {cau|addis} */
1240 else if (CONST_OK_FOR_LETTER_P (value
, 'L'))
1243 #if HOST_BITS_PER_WIDE_INT == 64
1244 else if (TARGET_POWERPC64
)
1246 HOST_WIDE_INT low
= ((value
& 0xffffffff) ^ 0x80000000) - 0x80000000;
1247 HOST_WIDE_INT high
= value
>> 31;
1249 if (high
== 0 || high
== -1)
1255 return num_insns_constant_wide (high
) + 1;
1257 return (num_insns_constant_wide (high
)
1258 + num_insns_constant_wide (low
) + 1);
1267 num_insns_constant (op
, mode
)
1269 enum machine_mode mode
;
1271 if (GET_CODE (op
) == CONST_INT
)
1273 #if HOST_BITS_PER_WIDE_INT == 64
1274 if ((INTVAL (op
) >> 31) != 0 && (INTVAL (op
) >> 31) != -1
1275 && mask64_operand (op
, mode
))
1279 return num_insns_constant_wide (INTVAL (op
));
1282 else if (GET_CODE (op
) == CONST_DOUBLE
&& mode
== SFmode
)
1287 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1288 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1289 return num_insns_constant_wide ((HOST_WIDE_INT
) l
);
1292 else if (GET_CODE (op
) == CONST_DOUBLE
)
1298 int endian
= (WORDS_BIG_ENDIAN
== 0);
1300 if (mode
== VOIDmode
|| mode
== DImode
)
1302 high
= CONST_DOUBLE_HIGH (op
);
1303 low
= CONST_DOUBLE_LOW (op
);
1307 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1308 REAL_VALUE_TO_TARGET_DOUBLE (rv
, l
);
1310 low
= l
[1 - endian
];
1314 return (num_insns_constant_wide (low
)
1315 + num_insns_constant_wide (high
));
1319 if (high
== 0 && low
>= 0)
1320 return num_insns_constant_wide (low
);
1322 else if (high
== -1 && low
< 0)
1323 return num_insns_constant_wide (low
);
1325 else if (mask64_operand (op
, mode
))
1329 return num_insns_constant_wide (high
) + 1;
1332 return (num_insns_constant_wide (high
)
1333 + num_insns_constant_wide (low
) + 1);
1341 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1342 register with one instruction per word. We only do this if we can
1343 safely read CONST_DOUBLE_{LOW,HIGH}. */
1346 easy_fp_constant (op
, mode
)
1348 enum machine_mode mode
;
1350 if (GET_CODE (op
) != CONST_DOUBLE
1351 || GET_MODE (op
) != mode
1352 || (GET_MODE_CLASS (mode
) != MODE_FLOAT
&& mode
!= DImode
))
1355 /* Consider all constants with -msoft-float to be easy. */
1356 if ((TARGET_SOFT_FLOAT
|| !TARGET_FPRS
)
1360 /* If we are using V.4 style PIC, consider all constants to be hard. */
1361 if (flag_pic
&& DEFAULT_ABI
== ABI_V4
)
1364 #ifdef TARGET_RELOCATABLE
1365 /* Similarly if we are using -mrelocatable, consider all constants
1367 if (TARGET_RELOCATABLE
)
1376 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1377 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
1379 return (num_insns_constant_wide ((HOST_WIDE_INT
) k
[0]) == 1
1380 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[1]) == 1
1381 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[2]) == 1
1382 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[3]) == 1);
1385 else if (mode
== DFmode
)
1390 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1391 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
1393 return (num_insns_constant_wide ((HOST_WIDE_INT
) k
[0]) == 1
1394 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[1]) == 1);
1397 else if (mode
== SFmode
)
1402 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1403 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1405 return num_insns_constant_wide (l
) == 1;
1408 else if (mode
== DImode
)
1409 return ((TARGET_POWERPC64
1410 && GET_CODE (op
) == CONST_DOUBLE
&& CONST_DOUBLE_LOW (op
) == 0)
1411 || (num_insns_constant (op
, DImode
) <= 2));
1413 else if (mode
== SImode
)
1419 /* Return 1 if the operand is a CONST_INT and can be put into a
1420 register with one instruction. */
1423 easy_vector_constant (op
)
1429 if (GET_CODE (op
) != CONST_VECTOR
)
1432 units
= CONST_VECTOR_NUNITS (op
);
1434 /* We can generate 0 easily. Look for that. */
1435 for (i
= 0; i
< units
; ++i
)
1437 elt
= CONST_VECTOR_ELT (op
, i
);
1439 /* We could probably simplify this by just checking for equality
1440 with CONST0_RTX for the current mode, but let's be safe
1443 switch (GET_CODE (elt
))
1446 if (INTVAL (elt
) != 0)
1450 if (CONST_DOUBLE_LOW (elt
) != 0 || CONST_DOUBLE_HIGH (elt
) != 0)
1458 /* We could probably generate a few other constants trivially, but
1459 gcc doesn't generate them yet. FIXME later. */
1463 /* Return 1 if the operand is the constant 0. This works for scalars
1464 as well as vectors. */
1466 zero_constant (op
, mode
)
1468 enum machine_mode mode
;
1470 return op
== CONST0_RTX (mode
);
1473 /* Return 1 if the operand is 0.0. */
1475 zero_fp_constant (op
, mode
)
1477 enum machine_mode mode
;
1479 return GET_MODE_CLASS (mode
) == MODE_FLOAT
&& op
== CONST0_RTX (mode
);
1482 /* Return 1 if the operand is in volatile memory. Note that during
1483 the RTL generation phase, memory_operand does not return TRUE for
1484 volatile memory references. So this function allows us to
1485 recognize volatile references where its safe. */
1488 volatile_mem_operand (op
, mode
)
1490 enum machine_mode mode
;
1492 if (GET_CODE (op
) != MEM
)
1495 if (!MEM_VOLATILE_P (op
))
1498 if (mode
!= GET_MODE (op
))
1501 if (reload_completed
)
1502 return memory_operand (op
, mode
);
1504 if (reload_in_progress
)
1505 return strict_memory_address_p (mode
, XEXP (op
, 0));
1507 return memory_address_p (mode
, XEXP (op
, 0));
1510 /* Return 1 if the operand is an offsettable memory operand. */
1513 offsettable_mem_operand (op
, mode
)
1515 enum machine_mode mode
;
1517 return ((GET_CODE (op
) == MEM
)
1518 && offsettable_address_p (reload_completed
|| reload_in_progress
,
1519 mode
, XEXP (op
, 0)));
1522 /* Return 1 if the operand is either an easy FP constant (see above) or
1526 mem_or_easy_const_operand (op
, mode
)
1528 enum machine_mode mode
;
1530 return memory_operand (op
, mode
) || easy_fp_constant (op
, mode
);
1533 /* Return 1 if the operand is either a non-special register or an item
1534 that can be used as the operand of a `mode' add insn. */
1537 add_operand (op
, mode
)
1539 enum machine_mode mode
;
1541 if (GET_CODE (op
) == CONST_INT
)
1542 return (CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1543 || CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1545 return gpc_reg_operand (op
, mode
);
1548 /* Return 1 if OP is a constant but not a valid add_operand. */
1551 non_add_cint_operand (op
, mode
)
1553 enum machine_mode mode ATTRIBUTE_UNUSED
;
1555 return (GET_CODE (op
) == CONST_INT
1556 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1557 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1560 /* Return 1 if the operand is a non-special register or a constant that
1561 can be used as the operand of an OR or XOR insn on the RS/6000. */
1564 logical_operand (op
, mode
)
1566 enum machine_mode mode
;
1568 HOST_WIDE_INT opl
, oph
;
1570 if (gpc_reg_operand (op
, mode
))
1573 if (GET_CODE (op
) == CONST_INT
)
1575 opl
= INTVAL (op
) & GET_MODE_MASK (mode
);
1577 #if HOST_BITS_PER_WIDE_INT <= 32
1578 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
&& opl
< 0)
1582 else if (GET_CODE (op
) == CONST_DOUBLE
)
1584 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1587 opl
= CONST_DOUBLE_LOW (op
);
1588 oph
= CONST_DOUBLE_HIGH (op
);
1595 return ((opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff) == 0
1596 || (opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff0000) == 0);
1599 /* Return 1 if C is a constant that is not a logical operand (as
1600 above), but could be split into one. */
1603 non_logical_cint_operand (op
, mode
)
1605 enum machine_mode mode
;
1607 return ((GET_CODE (op
) == CONST_INT
|| GET_CODE (op
) == CONST_DOUBLE
)
1608 && ! logical_operand (op
, mode
)
1609 && reg_or_logical_cint_operand (op
, mode
));
1612 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1613 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1614 Reject all ones and all zeros, since these should have been optimized
1615 away and confuse the making of MB and ME. */
1618 mask_operand (op
, mode
)
1620 enum machine_mode mode ATTRIBUTE_UNUSED
;
1622 HOST_WIDE_INT c
, lsb
;
1624 if (GET_CODE (op
) != CONST_INT
)
1629 /* Fail in 64-bit mode if the mask wraps around because the upper
1630 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1631 if (TARGET_POWERPC64
&& (c
& 0x80000001) == 0x80000001)
1634 /* We don't change the number of transitions by inverting,
1635 so make sure we start with the LS bit zero. */
1639 /* Reject all zeros or all ones. */
1643 /* Find the first transition. */
1646 /* Invert to look for a second transition. */
1649 /* Erase first transition. */
1652 /* Find the second transition (if any). */
1655 /* Match if all the bits above are 1's (or c is zero). */
1659 /* Return 1 for the PowerPC64 rlwinm corner case. */
1662 mask_operand_wrap (op
, mode
)
1664 enum machine_mode mode ATTRIBUTE_UNUSED
;
1666 HOST_WIDE_INT c
, lsb
;
1668 if (GET_CODE (op
) != CONST_INT
)
1673 if ((c
& 0x80000001) != 0x80000001)
1687 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1688 It is if there are no more than one 1->0 or 0->1 transitions.
1689 Reject all zeros, since zero should have been optimized away and
1690 confuses the making of MB and ME. */
1693 mask64_operand (op
, mode
)
1695 enum machine_mode mode ATTRIBUTE_UNUSED
;
1697 if (GET_CODE (op
) == CONST_INT
)
1699 HOST_WIDE_INT c
, lsb
;
1703 /* Reject all zeros. */
1707 /* We don't change the number of transitions by inverting,
1708 so make sure we start with the LS bit zero. */
1712 /* Find the transition, and check that all bits above are 1's. */
1715 /* Match if all the bits above are 1's (or c is zero). */
1721 /* Like mask64_operand, but allow up to three transitions. This
1722 predicate is used by insn patterns that generate two rldicl or
1723 rldicr machine insns. */
1726 mask64_2_operand (op
, mode
)
1728 enum machine_mode mode ATTRIBUTE_UNUSED
;
1730 if (GET_CODE (op
) == CONST_INT
)
1732 HOST_WIDE_INT c
, lsb
;
1736 /* Disallow all zeros. */
1740 /* We don't change the number of transitions by inverting,
1741 so make sure we start with the LS bit zero. */
1745 /* Find the first transition. */
1748 /* Invert to look for a second transition. */
1751 /* Erase first transition. */
1754 /* Find the second transition. */
1757 /* Invert to look for a third transition. */
1760 /* Erase second transition. */
1763 /* Find the third transition (if any). */
1766 /* Match if all the bits above are 1's (or c is zero). */
1772 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1773 implement ANDing by the mask IN. */
1775 build_mask64_2_operands (in
, out
)
1779 #if HOST_BITS_PER_WIDE_INT >= 64
1780 unsigned HOST_WIDE_INT c
, lsb
, m1
, m2
;
1783 if (GET_CODE (in
) != CONST_INT
)
1789 /* Assume c initially something like 0x00fff000000fffff. The idea
1790 is to rotate the word so that the middle ^^^^^^ group of zeros
1791 is at the MS end and can be cleared with an rldicl mask. We then
1792 rotate back and clear off the MS ^^ group of zeros with a
1794 c
= ~c
; /* c == 0xff000ffffff00000 */
1795 lsb
= c
& -c
; /* lsb == 0x0000000000100000 */
1796 m1
= -lsb
; /* m1 == 0xfffffffffff00000 */
1797 c
= ~c
; /* c == 0x00fff000000fffff */
1798 c
&= -lsb
; /* c == 0x00fff00000000000 */
1799 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
1800 c
= ~c
; /* c == 0xff000fffffffffff */
1801 c
&= -lsb
; /* c == 0xff00000000000000 */
1803 while ((lsb
>>= 1) != 0)
1804 shift
++; /* shift == 44 on exit from loop */
1805 m1
<<= 64 - shift
; /* m1 == 0xffffff0000000000 */
1806 m1
= ~m1
; /* m1 == 0x000000ffffffffff */
1807 m2
= ~c
; /* m2 == 0x00ffffffffffffff */
1811 /* Assume c initially something like 0xff000f0000000000. The idea
1812 is to rotate the word so that the ^^^ middle group of zeros
1813 is at the LS end and can be cleared with an rldicr mask. We then
1814 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1816 lsb
= c
& -c
; /* lsb == 0x0000010000000000 */
1817 m2
= -lsb
; /* m2 == 0xffffff0000000000 */
1818 c
= ~c
; /* c == 0x00fff0ffffffffff */
1819 c
&= -lsb
; /* c == 0x00fff00000000000 */
1820 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
1821 c
= ~c
; /* c == 0xff000fffffffffff */
1822 c
&= -lsb
; /* c == 0xff00000000000000 */
1824 while ((lsb
>>= 1) != 0)
1825 shift
++; /* shift == 44 on exit from loop */
1826 m1
= ~c
; /* m1 == 0x00ffffffffffffff */
1827 m1
>>= shift
; /* m1 == 0x0000000000000fff */
1828 m1
= ~m1
; /* m1 == 0xfffffffffffff000 */
1831 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1832 masks will be all 1's. We are guaranteed more than one transition. */
1833 out
[0] = GEN_INT (64 - shift
);
1834 out
[1] = GEN_INT (m1
);
1835 out
[2] = GEN_INT (shift
);
1836 out
[3] = GEN_INT (m2
);
1844 /* Return 1 if the operand is either a non-special register or a constant
1845 that can be used as the operand of a PowerPC64 logical AND insn. */
1848 and64_operand (op
, mode
)
1850 enum machine_mode mode
;
1852 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
1853 return (gpc_reg_operand (op
, mode
) || mask64_operand (op
, mode
));
1855 return (logical_operand (op
, mode
) || mask64_operand (op
, mode
));
1858 /* Like the above, but also match constants that can be implemented
1859 with two rldicl or rldicr insns. */
1862 and64_2_operand (op
, mode
)
1864 enum machine_mode mode
;
1866 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
1867 return gpc_reg_operand (op
, mode
) || mask64_2_operand (op
, mode
);
1869 return logical_operand (op
, mode
) || mask64_2_operand (op
, mode
);
1872 /* Return 1 if the operand is either a non-special register or a
1873 constant that can be used as the operand of an RS/6000 logical AND insn. */
1876 and_operand (op
, mode
)
1878 enum machine_mode mode
;
1880 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
1881 return (gpc_reg_operand (op
, mode
) || mask_operand (op
, mode
));
1883 return (logical_operand (op
, mode
) || mask_operand (op
, mode
));
1886 /* Return 1 if the operand is a general register or memory operand. */
1889 reg_or_mem_operand (op
, mode
)
1891 enum machine_mode mode
;
1893 return (gpc_reg_operand (op
, mode
)
1894 || memory_operand (op
, mode
)
1895 || volatile_mem_operand (op
, mode
));
1898 /* Return 1 if the operand is a general register or memory operand without
1899 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1903 lwa_operand (op
, mode
)
1905 enum machine_mode mode
;
1909 if (reload_completed
&& GET_CODE (inner
) == SUBREG
)
1910 inner
= SUBREG_REG (inner
);
1912 return gpc_reg_operand (inner
, mode
)
1913 || (memory_operand (inner
, mode
)
1914 && GET_CODE (XEXP (inner
, 0)) != PRE_INC
1915 && GET_CODE (XEXP (inner
, 0)) != PRE_DEC
1916 && (GET_CODE (XEXP (inner
, 0)) != PLUS
1917 || GET_CODE (XEXP (XEXP (inner
, 0), 1)) != CONST_INT
1918 || INTVAL (XEXP (XEXP (inner
, 0), 1)) % 4 == 0));
1921 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
1924 symbol_ref_operand (op
, mode
)
1926 enum machine_mode mode
;
1928 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1931 return (GET_CODE (op
) == SYMBOL_REF
);
1934 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1935 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
1938 call_operand (op
, mode
)
1940 enum machine_mode mode
;
1942 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1945 return (GET_CODE (op
) == SYMBOL_REF
1946 || (GET_CODE (op
) == REG
1947 && (REGNO (op
) == LINK_REGISTER_REGNUM
1948 || REGNO (op
) == COUNT_REGISTER_REGNUM
1949 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
)));
1952 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1953 this file and the function is not weakly defined. */
1956 current_file_function_operand (op
, mode
)
1958 enum machine_mode mode ATTRIBUTE_UNUSED
;
1960 return (GET_CODE (op
) == SYMBOL_REF
1961 && (SYMBOL_REF_FLAG (op
)
1962 || (op
== XEXP (DECL_RTL (current_function_decl
), 0)
1963 && ! DECL_WEAK (current_function_decl
))));
1966 /* Return 1 if this operand is a valid input for a move insn. */
1969 input_operand (op
, mode
)
1971 enum machine_mode mode
;
1973 /* Memory is always valid. */
1974 if (memory_operand (op
, mode
))
1977 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1978 if (GET_CODE (op
) == CONSTANT_P_RTX
)
1981 /* For floating-point, easy constants are valid. */
1982 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
1984 && easy_fp_constant (op
, mode
))
1987 /* Allow any integer constant. */
1988 if (GET_MODE_CLASS (mode
) == MODE_INT
1989 && (GET_CODE (op
) == CONST_INT
1990 || GET_CODE (op
) == CONST_DOUBLE
))
1993 /* For floating-point or multi-word mode, the only remaining valid type
1995 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
1996 || GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
1997 return register_operand (op
, mode
);
1999 /* The only cases left are integral modes one word or smaller (we
2000 do not get called for MODE_CC values). These can be in any
2002 if (register_operand (op
, mode
))
2005 /* A SYMBOL_REF referring to the TOC is valid. */
2006 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op
))
2009 /* A constant pool expression (relative to the TOC) is valid */
2010 if (TOC_RELATIVE_EXPR_P (op
))
2013 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2015 if (DEFAULT_ABI
== ABI_V4
2016 && (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == CONST
)
2017 && small_data_operand (op
, Pmode
))
2023 /* Return 1 for an operand in small memory on V.4/eabi. */
2026 small_data_operand (op
, mode
)
2027 rtx op ATTRIBUTE_UNUSED
;
2028 enum machine_mode mode ATTRIBUTE_UNUSED
;
2033 if (rs6000_sdata
== SDATA_NONE
|| rs6000_sdata
== SDATA_DATA
)
2036 if (DEFAULT_ABI
!= ABI_V4
)
2039 if (GET_CODE (op
) == SYMBOL_REF
)
2042 else if (GET_CODE (op
) != CONST
2043 || GET_CODE (XEXP (op
, 0)) != PLUS
2044 || GET_CODE (XEXP (XEXP (op
, 0), 0)) != SYMBOL_REF
2045 || GET_CODE (XEXP (XEXP (op
, 0), 1)) != CONST_INT
)
2050 rtx sum
= XEXP (op
, 0);
2051 HOST_WIDE_INT summand
;
2053 /* We have to be careful here, because it is the referenced address
2054 that must be 32k from _SDA_BASE_, not just the symbol. */
2055 summand
= INTVAL (XEXP (sum
, 1));
2056 if (summand
< 0 || summand
> g_switch_value
)
2059 sym_ref
= XEXP (sum
, 0);
2062 if (*XSTR (sym_ref
, 0) != '@')
2073 constant_pool_expr_1 (op
, have_sym
, have_toc
)
2078 switch (GET_CODE(op
))
2081 if (CONSTANT_POOL_ADDRESS_P (op
))
2083 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op
), Pmode
))
2091 else if (! strcmp (XSTR (op
, 0), toc_label_name
))
2100 return (constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
)
2101 && constant_pool_expr_1 (XEXP (op
, 1), have_sym
, have_toc
));
2103 return constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
);
2112 constant_pool_expr_p (op
)
2117 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_sym
;
2121 toc_relative_expr_p (op
)
2126 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_toc
;
2129 /* Try machine-dependent ways of modifying an illegitimate address
2130 to be legitimate. If we find one, return the new, valid address.
2131 This is used from only one place: `memory_address' in explow.c.
2133 OLDX is the address as it was before break_out_memory_refs was
2134 called. In some cases it is useful to look at this to decide what
2137 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2139 It is always safe for this function to do nothing. It exists to
2140 recognize opportunities to optimize the output.
2142 On RS/6000, first check for the sum of a register with a constant
2143 integer that is out of range. If so, generate code to add the
2144 constant with the low-order 16 bits masked to the register and force
2145 this result into another register (this can be done with `cau').
2146 Then generate an address of REG+(CONST&0xffff), allowing for the
2147 possibility of bit 16 being a one.
2149 Then check for the sum of a register and something not constant, try to
2150 load the other things into a register and return the sum. */
2152 rs6000_legitimize_address (x
, oldx
, mode
)
2154 rtx oldx ATTRIBUTE_UNUSED
;
2155 enum machine_mode mode
;
2157 if (GET_CODE (x
) == PLUS
2158 && GET_CODE (XEXP (x
, 0)) == REG
2159 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2160 && (unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1)) + 0x8000) >= 0x10000)
2162 HOST_WIDE_INT high_int
, low_int
;
2164 low_int
= ((INTVAL (XEXP (x
, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2165 high_int
= INTVAL (XEXP (x
, 1)) - low_int
;
2166 sum
= force_operand (gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2167 GEN_INT (high_int
)), 0);
2168 return gen_rtx_PLUS (Pmode
, sum
, GEN_INT (low_int
));
2170 else if (GET_CODE (x
) == PLUS
2171 && GET_CODE (XEXP (x
, 0)) == REG
2172 && GET_CODE (XEXP (x
, 1)) != CONST_INT
2173 && GET_MODE_NUNITS (mode
) == 1
2174 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
2176 || (mode
!= DFmode
&& mode
!= TFmode
))
2177 && (TARGET_POWERPC64
|| mode
!= DImode
)
2180 return gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2181 force_reg (Pmode
, force_operand (XEXP (x
, 1), 0)));
2183 else if (ALTIVEC_VECTOR_MODE (mode
))
2187 /* Make sure both operands are registers. */
2188 if (GET_CODE (x
) == PLUS
)
2189 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, XEXP (x
, 0)),
2190 force_reg (Pmode
, XEXP (x
, 1)));
2192 reg
= force_reg (Pmode
, x
);
2195 else if (SPE_VECTOR_MODE (mode
))
2197 /* We accept [reg + reg] and [reg + OFFSET]. */
2199 if (GET_CODE (x
) == PLUS
)
2201 rtx op1
= XEXP (x
, 0);
2202 rtx op2
= XEXP (x
, 1);
2204 op1
= force_reg (Pmode
, op1
);
2206 if (GET_CODE (op2
) != REG
2207 && (GET_CODE (op2
) != CONST_INT
2208 || !SPE_CONST_OFFSET_OK (INTVAL (op2
))))
2209 op2
= force_reg (Pmode
, op2
);
2211 return gen_rtx_PLUS (Pmode
, op1
, op2
);
2214 return force_reg (Pmode
, x
);
2216 else if (TARGET_ELF
&& TARGET_32BIT
&& TARGET_NO_TOC
&& ! flag_pic
2217 && GET_CODE (x
) != CONST_INT
2218 && GET_CODE (x
) != CONST_DOUBLE
2220 && GET_MODE_NUNITS (mode
) == 1
2221 && (GET_MODE_BITSIZE (mode
) <= 32
2222 || ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) && mode
== DFmode
)))
2224 rtx reg
= gen_reg_rtx (Pmode
);
2225 emit_insn (gen_elf_high (reg
, (x
)));
2226 return gen_rtx_LO_SUM (Pmode
, reg
, (x
));
2228 else if (TARGET_MACHO
&& TARGET_32BIT
&& TARGET_NO_TOC
2230 && GET_CODE (x
) != CONST_INT
2231 && GET_CODE (x
) != CONST_DOUBLE
2233 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) || mode
!= DFmode
)
2237 rtx reg
= gen_reg_rtx (Pmode
);
2238 emit_insn (gen_macho_high (reg
, (x
)));
2239 return gen_rtx_LO_SUM (Pmode
, reg
, (x
));
2242 && CONSTANT_POOL_EXPR_P (x
)
2243 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), Pmode
))
2245 return create_TOC_reference (x
);
2251 /* The convention appears to be to define this wherever it is used.
2252 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2253 is now used here. */
2254 #ifndef REG_MODE_OK_FOR_BASE_P
2255 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2258 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2259 replace the input X, or the original X if no replacement is called for.
2260 The output parameter *WIN is 1 if the calling macro should goto WIN,
2263 For RS/6000, we wish to handle large displacements off a base
2264 register by splitting the addend across an addiu/addis and the mem insn.
2265 This cuts number of extra insns needed from 3 to 1.
2267 On Darwin, we use this to generate code for floating point constants.
2268 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2269 The Darwin code is inside #if TARGET_MACHO because only then is
2270 machopic_function_base_name() defined. */
2272 rs6000_legitimize_reload_address (x
, mode
, opnum
, type
, ind_levels
, win
)
2274 enum machine_mode mode
;
2277 int ind_levels ATTRIBUTE_UNUSED
;
2280 /* We must recognize output that we have already generated ourselves. */
2281 if (GET_CODE (x
) == PLUS
2282 && GET_CODE (XEXP (x
, 0)) == PLUS
2283 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
2284 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2285 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2287 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2288 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
2289 opnum
, (enum reload_type
)type
);
2295 if (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
2296 && GET_CODE (x
) == LO_SUM
2297 && GET_CODE (XEXP (x
, 0)) == PLUS
2298 && XEXP (XEXP (x
, 0), 0) == pic_offset_table_rtx
2299 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == HIGH
2300 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 0)) == CONST
2301 && XEXP (XEXP (XEXP (x
, 0), 1), 0) == XEXP (x
, 1)
2302 && GET_CODE (XEXP (XEXP (x
, 1), 0)) == MINUS
2303 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 0)) == SYMBOL_REF
2304 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 1)) == SYMBOL_REF
)
2306 /* Result of previous invocation of this function on Darwin
2307 floating point constant. */
2308 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2309 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
2310 opnum
, (enum reload_type
)type
);
2315 if (GET_CODE (x
) == PLUS
2316 && GET_CODE (XEXP (x
, 0)) == REG
2317 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2318 && REG_MODE_OK_FOR_BASE_P (XEXP (x
, 0), mode
)
2319 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2320 && !SPE_VECTOR_MODE (mode
)
2321 && !ALTIVEC_VECTOR_MODE (mode
))
2323 HOST_WIDE_INT val
= INTVAL (XEXP (x
, 1));
2324 HOST_WIDE_INT low
= ((val
& 0xffff) ^ 0x8000) - 0x8000;
2326 = (((val
- low
) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2328 /* Check for 32-bit overflow. */
2329 if (high
+ low
!= val
)
2335 /* Reload the high part into a base reg; leave the low part
2336 in the mem directly. */
2338 x
= gen_rtx_PLUS (GET_MODE (x
),
2339 gen_rtx_PLUS (GET_MODE (x
), XEXP (x
, 0),
2343 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2344 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
2345 opnum
, (enum reload_type
)type
);
2350 if (GET_CODE (x
) == SYMBOL_REF
2351 && DEFAULT_ABI
== ABI_DARWIN
2352 && !ALTIVEC_VECTOR_MODE (mode
)
2355 /* Darwin load of floating point constant. */
2356 rtx offset
= gen_rtx (CONST
, Pmode
,
2357 gen_rtx (MINUS
, Pmode
, x
,
2358 gen_rtx (SYMBOL_REF
, Pmode
,
2359 machopic_function_base_name ())));
2360 x
= gen_rtx (LO_SUM
, GET_MODE (x
),
2361 gen_rtx (PLUS
, Pmode
, pic_offset_table_rtx
,
2362 gen_rtx (HIGH
, Pmode
, offset
)), offset
);
2363 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2364 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
2365 opnum
, (enum reload_type
)type
);
2371 && CONSTANT_POOL_EXPR_P (x
)
2372 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), mode
))
2374 (x
) = create_TOC_reference (x
);
2382 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2383 that is a valid memory address for an instruction.
2384 The MODE argument is the machine mode for the MEM expression
2385 that wants to use this address.
2387 On the RS/6000, there are four valid address: a SYMBOL_REF that
2388 refers to a constant pool entry of an address (or the sum of it
2389 plus a constant), a short (16-bit signed) constant plus a register,
2390 the sum of two registers, or a register indirect, possibly with an
2391 auto-increment. For DFmode and DImode with a constant plus register,
2392 we must ensure that both words are addressable or PowerPC64 with offset
2395 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2396 32-bit DImode, TImode), indexed addressing cannot be used because
2397 adjacent memory cells are accessed by adding word-sized offsets
2398 during assembly output. */
2400 rs6000_legitimate_address (mode
, x
, reg_ok_strict
)
2401 enum machine_mode mode
;
2405 if (LEGITIMATE_INDIRECT_ADDRESS_P (x
, reg_ok_strict
))
2407 if ((GET_CODE (x
) == PRE_INC
|| GET_CODE (x
) == PRE_DEC
)
2408 && !ALTIVEC_VECTOR_MODE (mode
)
2409 && !SPE_VECTOR_MODE (mode
)
2411 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x
, 0), reg_ok_strict
))
2413 if (LEGITIMATE_SMALL_DATA_P (mode
, x
))
2415 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x
))
2417 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2419 && GET_CODE (x
) == PLUS
2420 && GET_CODE (XEXP (x
, 0)) == REG
2421 && XEXP (x
, 0) == virtual_stack_vars_rtx
2422 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2424 if (LEGITIMATE_OFFSET_ADDRESS_P (mode
, x
, reg_ok_strict
))
2427 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
2429 || (mode
!= DFmode
&& mode
!= TFmode
))
2430 && (TARGET_POWERPC64
|| mode
!= DImode
)
2431 && LEGITIMATE_INDEXED_ADDRESS_P (x
, reg_ok_strict
))
2433 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode
, x
, reg_ok_strict
))
2438 /* Try to output insns to set TARGET equal to the constant C if it can
2439 be done in less than N insns. Do all computations in MODE.
2440 Returns the place where the output has been placed if it can be
2441 done and the insns have been emitted. If it would take more than N
2442 insns, zero is returned and no insns and emitted. */
2445 rs6000_emit_set_const (dest
, mode
, source
, n
)
2447 enum machine_mode mode
;
2448 int n ATTRIBUTE_UNUSED
;
2450 rtx result
, insn
, set
;
2451 HOST_WIDE_INT c0
, c1
;
2453 if (mode
== QImode
|| mode
== HImode
)
2456 dest
= gen_reg_rtx (mode
);
2457 emit_insn (gen_rtx_SET (VOIDmode
, dest
, source
));
2460 else if (mode
== SImode
)
2462 result
= no_new_pseudos
? dest
: gen_reg_rtx (SImode
);
2464 emit_insn (gen_rtx_SET (VOIDmode
, result
,
2465 GEN_INT (INTVAL (source
)
2466 & (~ (HOST_WIDE_INT
) 0xffff))));
2467 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
2468 gen_rtx_IOR (SImode
, result
,
2469 GEN_INT (INTVAL (source
) & 0xffff))));
2472 else if (mode
== DImode
)
2474 if (GET_CODE (source
) == CONST_INT
)
2476 c0
= INTVAL (source
);
2479 else if (GET_CODE (source
) == CONST_DOUBLE
)
2481 #if HOST_BITS_PER_WIDE_INT >= 64
2482 c0
= CONST_DOUBLE_LOW (source
);
2485 c0
= CONST_DOUBLE_LOW (source
);
2486 c1
= CONST_DOUBLE_HIGH (source
);
2492 result
= rs6000_emit_set_long_const (dest
, c0
, c1
);
2497 insn
= get_last_insn ();
2498 set
= single_set (insn
);
2499 if (! CONSTANT_P (SET_SRC (set
)))
2500 set_unique_reg_note (insn
, REG_EQUAL
, source
);
2505 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2506 fall back to a straight forward decomposition. We do this to avoid
2507 exponential run times encountered when looking for longer sequences
2508 with rs6000_emit_set_const. */
2510 rs6000_emit_set_long_const (dest
, c1
, c2
)
2512 HOST_WIDE_INT c1
, c2
;
2514 if (!TARGET_POWERPC64
)
2516 rtx operand1
, operand2
;
2518 operand1
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
== 0,
2520 operand2
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
!= 0,
2522 emit_move_insn (operand1
, GEN_INT (c1
));
2523 emit_move_insn (operand2
, GEN_INT (c2
));
2527 HOST_WIDE_INT ud1
, ud2
, ud3
, ud4
;
2530 ud2
= (c1
& 0xffff0000) >> 16;
2531 #if HOST_BITS_PER_WIDE_INT >= 64
2535 ud4
= (c2
& 0xffff0000) >> 16;
2537 if ((ud4
== 0xffff && ud3
== 0xffff && ud2
== 0xffff && (ud1
& 0x8000))
2538 || (ud4
== 0 && ud3
== 0 && ud2
== 0 && ! (ud1
& 0x8000)))
2541 emit_move_insn (dest
, GEN_INT (((ud1
^ 0x8000) - 0x8000)));
2543 emit_move_insn (dest
, GEN_INT (ud1
));
2546 else if ((ud4
== 0xffff && ud3
== 0xffff && (ud2
& 0x8000))
2547 || (ud4
== 0 && ud3
== 0 && ! (ud2
& 0x8000)))
2550 emit_move_insn (dest
, GEN_INT (((ud2
<< 16) ^ 0x80000000)
2553 emit_move_insn (dest
, GEN_INT (ud2
<< 16));
2555 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2557 else if ((ud4
== 0xffff && (ud3
& 0x8000))
2558 || (ud4
== 0 && ! (ud3
& 0x8000)))
2561 emit_move_insn (dest
, GEN_INT (((ud3
<< 16) ^ 0x80000000)
2564 emit_move_insn (dest
, GEN_INT (ud3
<< 16));
2567 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud2
)));
2568 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (16)));
2570 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2575 emit_move_insn (dest
, GEN_INT (((ud4
<< 16) ^ 0x80000000)
2578 emit_move_insn (dest
, GEN_INT (ud4
<< 16));
2581 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud3
)));
2583 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (32)));
2585 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
,
2586 GEN_INT (ud2
<< 16)));
2588 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2594 /* Emit a move from SOURCE to DEST in mode MODE. */
2596 rs6000_emit_move (dest
, source
, mode
)
2599 enum machine_mode mode
;
2603 operands
[1] = source
;
2605 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2606 if (GET_CODE (operands
[1]) == CONST_DOUBLE
2607 && ! FLOAT_MODE_P (mode
)
2608 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
2610 /* FIXME. This should never happen. */
2611 /* Since it seems that it does, do the safe thing and convert
2613 operands
[1] = gen_int_mode (CONST_DOUBLE_LOW (operands
[1]), mode
);
2615 if (GET_CODE (operands
[1]) == CONST_DOUBLE
2616 && ! FLOAT_MODE_P (mode
)
2617 && ((CONST_DOUBLE_HIGH (operands
[1]) == 0
2618 && CONST_DOUBLE_LOW (operands
[1]) >= 0)
2619 || (CONST_DOUBLE_HIGH (operands
[1]) == -1
2620 && CONST_DOUBLE_LOW (operands
[1]) < 0)))
2623 /* Check if GCC is setting up a block move that will end up using FP
2624 registers as temporaries. We must make sure this is acceptable. */
2625 if (GET_CODE (operands
[0]) == MEM
2626 && GET_CODE (operands
[1]) == MEM
2628 && (SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[0]))
2629 || SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[1])))
2630 && ! (SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[0]) > 32
2631 ? 32 : MEM_ALIGN (operands
[0])))
2632 || SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[1]) > 32
2634 : MEM_ALIGN (operands
[1]))))
2635 && ! MEM_VOLATILE_P (operands
[0])
2636 && ! MEM_VOLATILE_P (operands
[1]))
2638 emit_move_insn (adjust_address (operands
[0], SImode
, 0),
2639 adjust_address (operands
[1], SImode
, 0));
2640 emit_move_insn (adjust_address (operands
[0], SImode
, 4),
2641 adjust_address (operands
[1], SImode
, 4));
2645 if (!no_new_pseudos
)
2647 if (GET_CODE (operands
[1]) == MEM
&& optimize
> 0
2648 && (mode
== QImode
|| mode
== HImode
|| mode
== SImode
)
2649 && GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
2651 rtx reg
= gen_reg_rtx (word_mode
);
2653 emit_insn (gen_rtx_SET (word_mode
, reg
,
2654 gen_rtx_ZERO_EXTEND (word_mode
,
2656 operands
[1] = gen_lowpart (mode
, reg
);
2658 if (GET_CODE (operands
[0]) != REG
)
2659 operands
[1] = force_reg (mode
, operands
[1]);
2662 if (mode
== SFmode
&& ! TARGET_POWERPC
2663 && TARGET_HARD_FLOAT
&& TARGET_FPRS
2664 && GET_CODE (operands
[0]) == MEM
)
2668 if (reload_in_progress
|| reload_completed
)
2669 regnum
= true_regnum (operands
[1]);
2670 else if (GET_CODE (operands
[1]) == REG
)
2671 regnum
= REGNO (operands
[1]);
2675 /* If operands[1] is a register, on POWER it may have
2676 double-precision data in it, so truncate it to single
2678 if (FP_REGNO_P (regnum
) || regnum
>= FIRST_PSEUDO_REGISTER
)
2681 newreg
= (no_new_pseudos
? operands
[1] : gen_reg_rtx (mode
));
2682 emit_insn (gen_aux_truncdfsf2 (newreg
, operands
[1]));
2683 operands
[1] = newreg
;
2687 /* Handle the case where reload calls us with an invalid address;
2688 and the case of CONSTANT_P_RTX. */
2689 if (!ALTIVEC_VECTOR_MODE (mode
)
2690 && (! general_operand (operands
[1], mode
)
2691 || ! nonimmediate_operand (operands
[0], mode
)
2692 || GET_CODE (operands
[1]) == CONSTANT_P_RTX
))
2694 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2698 /* FIXME: In the long term, this switch statement should go away
2699 and be replaced by a sequence of tests based on things like
2705 if (CONSTANT_P (operands
[1])
2706 && GET_CODE (operands
[1]) != CONST_INT
)
2707 operands
[1] = force_const_mem (mode
, operands
[1]);
2713 if (CONSTANT_P (operands
[1])
2714 && ! easy_fp_constant (operands
[1], mode
))
2715 operands
[1] = force_const_mem (mode
, operands
[1]);
2726 if (CONSTANT_P (operands
[1])
2727 && !easy_vector_constant (operands
[1]))
2728 operands
[1] = force_const_mem (mode
, operands
[1]);
2733 /* Use default pattern for address of ELF small data */
2736 && DEFAULT_ABI
== ABI_V4
2737 && (GET_CODE (operands
[1]) == SYMBOL_REF
2738 || GET_CODE (operands
[1]) == CONST
)
2739 && small_data_operand (operands
[1], mode
))
2741 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2745 if (DEFAULT_ABI
== ABI_V4
2746 && mode
== Pmode
&& mode
== SImode
2747 && flag_pic
== 1 && got_operand (operands
[1], mode
))
2749 emit_insn (gen_movsi_got (operands
[0], operands
[1]));
2753 if ((TARGET_ELF
|| DEFAULT_ABI
== ABI_DARWIN
)
2754 && TARGET_NO_TOC
&& ! flag_pic
2756 && CONSTANT_P (operands
[1])
2757 && GET_CODE (operands
[1]) != HIGH
2758 && GET_CODE (operands
[1]) != CONST_INT
)
2760 rtx target
= (no_new_pseudos
? operands
[0] : gen_reg_rtx (mode
));
2762 /* If this is a function address on -mcall-aixdesc,
2763 convert it to the address of the descriptor. */
2764 if (DEFAULT_ABI
== ABI_AIX
2765 && GET_CODE (operands
[1]) == SYMBOL_REF
2766 && XSTR (operands
[1], 0)[0] == '.')
2768 const char *name
= XSTR (operands
[1], 0);
2770 while (*name
== '.')
2772 new_ref
= gen_rtx_SYMBOL_REF (Pmode
, name
);
2773 CONSTANT_POOL_ADDRESS_P (new_ref
)
2774 = CONSTANT_POOL_ADDRESS_P (operands
[1]);
2775 SYMBOL_REF_FLAG (new_ref
) = SYMBOL_REF_FLAG (operands
[1]);
2776 SYMBOL_REF_USED (new_ref
) = SYMBOL_REF_USED (operands
[1]);
2777 operands
[1] = new_ref
;
2780 if (DEFAULT_ABI
== ABI_DARWIN
)
2782 emit_insn (gen_macho_high (target
, operands
[1]));
2783 emit_insn (gen_macho_low (operands
[0], target
, operands
[1]));
2787 emit_insn (gen_elf_high (target
, operands
[1]));
2788 emit_insn (gen_elf_low (operands
[0], target
, operands
[1]));
2792 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2793 and we have put it in the TOC, we just need to make a TOC-relative
2796 && GET_CODE (operands
[1]) == SYMBOL_REF
2797 && CONSTANT_POOL_EXPR_P (operands
[1])
2798 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands
[1]),
2799 get_pool_mode (operands
[1])))
2801 operands
[1] = create_TOC_reference (operands
[1]);
2803 else if (mode
== Pmode
2804 && CONSTANT_P (operands
[1])
2805 && ((GET_CODE (operands
[1]) != CONST_INT
2806 && ! easy_fp_constant (operands
[1], mode
))
2807 || (GET_CODE (operands
[1]) == CONST_INT
2808 && num_insns_constant (operands
[1], mode
) > 2)
2809 || (GET_CODE (operands
[0]) == REG
2810 && FP_REGNO_P (REGNO (operands
[0]))))
2811 && GET_CODE (operands
[1]) != HIGH
2812 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands
[1])
2813 && ! TOC_RELATIVE_EXPR_P (operands
[1]))
2815 /* Emit a USE operation so that the constant isn't deleted if
2816 expensive optimizations are turned on because nobody
2817 references it. This should only be done for operands that
2818 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2819 This should not be done for operands that contain LABEL_REFs.
2820 For now, we just handle the obvious case. */
2821 if (GET_CODE (operands
[1]) != LABEL_REF
)
2822 emit_insn (gen_rtx_USE (VOIDmode
, operands
[1]));
2825 /* Darwin uses a special PIC legitimizer. */
2826 if (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
)
2829 rs6000_machopic_legitimize_pic_address (operands
[1], mode
,
2831 if (operands
[0] != operands
[1])
2832 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2837 /* If we are to limit the number of things we put in the TOC and
2838 this is a symbol plus a constant we can add in one insn,
2839 just put the symbol in the TOC and add the constant. Don't do
2840 this if reload is in progress. */
2841 if (GET_CODE (operands
[1]) == CONST
2842 && TARGET_NO_SUM_IN_TOC
&& ! reload_in_progress
2843 && GET_CODE (XEXP (operands
[1], 0)) == PLUS
2844 && add_operand (XEXP (XEXP (operands
[1], 0), 1), mode
)
2845 && (GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == LABEL_REF
2846 || GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == SYMBOL_REF
)
2847 && ! side_effects_p (operands
[0]))
2850 force_const_mem (mode
, XEXP (XEXP (operands
[1], 0), 0));
2851 rtx other
= XEXP (XEXP (operands
[1], 0), 1);
2853 sym
= force_reg (mode
, sym
);
2855 emit_insn (gen_addsi3 (operands
[0], sym
, other
));
2857 emit_insn (gen_adddi3 (operands
[0], sym
, other
));
2861 operands
[1] = force_const_mem (mode
, operands
[1]);
2864 && CONSTANT_POOL_EXPR_P (XEXP (operands
[1], 0))
2865 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2866 get_pool_constant (XEXP (operands
[1], 0)),
2867 get_pool_mode (XEXP (operands
[1], 0))))
2870 = gen_rtx_MEM (mode
,
2871 create_TOC_reference (XEXP (operands
[1], 0)));
2872 set_mem_alias_set (operands
[1], get_TOC_alias_set ());
2873 RTX_UNCHANGING_P (operands
[1]) = 1;
2879 if (GET_CODE (operands
[0]) == MEM
2880 && GET_CODE (XEXP (operands
[0], 0)) != REG
2881 && ! reload_in_progress
)
2883 = replace_equiv_address (operands
[0],
2884 copy_addr_to_reg (XEXP (operands
[0], 0)));
2886 if (GET_CODE (operands
[1]) == MEM
2887 && GET_CODE (XEXP (operands
[1], 0)) != REG
2888 && ! reload_in_progress
)
2890 = replace_equiv_address (operands
[1],
2891 copy_addr_to_reg (XEXP (operands
[1], 0)));
2894 emit_insn (gen_rtx_PARALLEL (VOIDmode
,
2896 gen_rtx_SET (VOIDmode
,
2897 operands
[0], operands
[1]),
2898 gen_rtx_CLOBBER (VOIDmode
,
2899 gen_rtx_SCRATCH (SImode
)))));
2908 /* Above, we may have called force_const_mem which may have returned
2909 an invalid address. If we can, fix this up; otherwise, reload will
2910 have to deal with it. */
2911 if (GET_CODE (operands
[1]) == MEM
2912 && ! memory_address_p (mode
, XEXP (operands
[1], 0))
2913 && ! reload_in_progress
)
2914 operands
[1] = adjust_address (operands
[1], mode
, 0);
2916 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2920 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2921 for a call to a function whose data type is FNTYPE.
2922 For a library call, FNTYPE is 0.
2924 For incoming args we set the number of arguments in the prototype large
2925 so we never return a PARALLEL. */
2928 init_cumulative_args (cum
, fntype
, libname
, incoming
)
2929 CUMULATIVE_ARGS
*cum
;
2931 rtx libname ATTRIBUTE_UNUSED
;
2934 static CUMULATIVE_ARGS zero_cumulative
;
2936 *cum
= zero_cumulative
;
2938 cum
->fregno
= FP_ARG_MIN_REG
;
2939 cum
->vregno
= ALTIVEC_ARG_MIN_REG
;
2940 cum
->prototype
= (fntype
&& TYPE_ARG_TYPES (fntype
));
2941 cum
->call_cookie
= CALL_NORMAL
;
2942 cum
->sysv_gregno
= GP_ARG_MIN_REG
;
2945 cum
->nargs_prototype
= 1000; /* don't return a PARALLEL */
2947 else if (cum
->prototype
)
2948 cum
->nargs_prototype
= (list_length (TYPE_ARG_TYPES (fntype
)) - 1
2949 + (TYPE_MODE (TREE_TYPE (fntype
)) == BLKmode
2950 || RETURN_IN_MEMORY (TREE_TYPE (fntype
))));
2953 cum
->nargs_prototype
= 0;
2955 cum
->orig_nargs
= cum
->nargs_prototype
;
2957 /* Check for a longcall attribute. */
2959 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
))
2960 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
2961 cum
->call_cookie
= CALL_LONG
;
2963 if (TARGET_DEBUG_ARG
)
2965 fprintf (stderr
, "\ninit_cumulative_args:");
2968 tree ret_type
= TREE_TYPE (fntype
);
2969 fprintf (stderr
, " ret code = %s,",
2970 tree_code_name
[ (int)TREE_CODE (ret_type
) ]);
2973 if (cum
->call_cookie
& CALL_LONG
)
2974 fprintf (stderr
, " longcall,");
2976 fprintf (stderr
, " proto = %d, nargs = %d\n",
2977 cum
->prototype
, cum
->nargs_prototype
);
2981 /* If defined, a C expression which determines whether, and in which
2982 direction, to pad out an argument with extra space. The value
2983 should be of type `enum direction': either `upward' to pad above
2984 the argument, `downward' to pad below, or `none' to inhibit
2987 For the AIX ABI structs are always stored left shifted in their
2991 function_arg_padding (mode
, type
)
2992 enum machine_mode mode
;
2995 if (type
!= 0 && AGGREGATE_TYPE_P (type
))
2998 /* This is the default definition. */
2999 return (! BYTES_BIG_ENDIAN
3002 ? (type
&& TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
3003 && int_size_in_bytes (type
) < (PARM_BOUNDARY
/ BITS_PER_UNIT
))
3004 : GET_MODE_BITSIZE (mode
) < PARM_BOUNDARY
)
3005 ? downward
: upward
));
3008 /* If defined, a C expression that gives the alignment boundary, in bits,
3009 of an argument with the specified mode and type. If it is not defined,
3010 PARM_BOUNDARY is used for all arguments.
3012 V.4 wants long longs to be double word aligned. */
3015 function_arg_boundary (mode
, type
)
3016 enum machine_mode mode
;
3017 tree type ATTRIBUTE_UNUSED
;
3019 if (DEFAULT_ABI
== ABI_V4
&& (mode
== DImode
|| mode
== DFmode
))
3021 else if (SPE_VECTOR_MODE (mode
))
3023 else if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3026 return PARM_BOUNDARY
;
3029 /* Update the data in CUM to advance over an argument
3030 of mode MODE and data type TYPE.
3031 (TYPE is null for libcalls where that information may not be available.) */
3034 function_arg_advance (cum
, mode
, type
, named
)
3035 CUMULATIVE_ARGS
*cum
;
3036 enum machine_mode mode
;
3040 cum
->nargs_prototype
--;
3042 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3044 if (cum
->vregno
<= ALTIVEC_ARG_MAX_REG
&& cum
->nargs_prototype
>= 0)
3047 cum
->words
+= RS6000_ARG_SIZE (mode
, type
);
3049 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
)
3050 && named
&& cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
3052 else if (DEFAULT_ABI
== ABI_V4
)
3054 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
3055 && (mode
== SFmode
|| mode
== DFmode
))
3057 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
3062 cum
->words
+= cum
->words
& 1;
3063 cum
->words
+= RS6000_ARG_SIZE (mode
, type
);
3069 int gregno
= cum
->sysv_gregno
;
3071 /* Aggregates and IEEE quad get passed by reference. */
3072 if ((type
&& AGGREGATE_TYPE_P (type
))
3076 n_words
= RS6000_ARG_SIZE (mode
, type
);
3078 /* Long long and SPE vectors are put in odd registers. */
3079 if (n_words
== 2 && (gregno
& 1) == 0)
3082 /* Long long and SPE vectors are not split between registers
3084 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
3086 /* Long long is aligned on the stack. */
3088 cum
->words
+= cum
->words
& 1;
3089 cum
->words
+= n_words
;
3092 /* Note: continuing to accumulate gregno past when we've started
3093 spilling to the stack indicates the fact that we've started
3094 spilling to the stack to expand_builtin_saveregs. */
3095 cum
->sysv_gregno
= gregno
+ n_words
;
3098 if (TARGET_DEBUG_ARG
)
3100 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
3101 cum
->words
, cum
->fregno
);
3102 fprintf (stderr
, "gregno = %2d, nargs = %4d, proto = %d, ",
3103 cum
->sysv_gregno
, cum
->nargs_prototype
, cum
->prototype
);
3104 fprintf (stderr
, "mode = %4s, named = %d\n",
3105 GET_MODE_NAME (mode
), named
);
3110 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
3111 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
3113 cum
->words
+= align
+ RS6000_ARG_SIZE (mode
, type
);
3115 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
3116 && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3117 cum
->fregno
+= (mode
== TFmode
? 2 : 1);
3119 if (TARGET_DEBUG_ARG
)
3121 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
3122 cum
->words
, cum
->fregno
);
3123 fprintf (stderr
, "nargs = %4d, proto = %d, mode = %4s, ",
3124 cum
->nargs_prototype
, cum
->prototype
, GET_MODE_NAME (mode
));
3125 fprintf (stderr
, "named = %d, align = %d\n", named
, align
);
3130 /* Determine where to put an argument to a function.
3131 Value is zero to push the argument on the stack,
3132 or a hard register in which to store the argument.
3134 MODE is the argument's machine mode.
3135 TYPE is the data type of the argument (as a tree).
3136 This is null for libcalls where that information may
3138 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3139 the preceding args and about the function being called.
3140 NAMED is nonzero if this argument is a named parameter
3141 (otherwise it is an extra parameter matching an ellipsis).
3143 On RS/6000 the first eight words of non-FP are normally in registers
3144 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3145 Under V.4, the first 8 FP args are in registers.
3147 If this is floating-point and no prototype is specified, we use
3148 both an FP and integer register (or possibly FP reg and stack). Library
3149 functions (when TYPE is zero) always have the proper types for args,
3150 so we can pass the FP value just in one register. emit_library_function
3151 doesn't support PARALLEL anyway. */
3154 function_arg (cum
, mode
, type
, named
)
3155 CUMULATIVE_ARGS
*cum
;
3156 enum machine_mode mode
;
3160 enum rs6000_abi abi
= DEFAULT_ABI
;
3162 /* Return a marker to indicate whether CR1 needs to set or clear the
3163 bit that V.4 uses to say fp args were passed in registers.
3164 Assume that we don't need the marker for software floating point,
3165 or compiler generated library calls. */
3166 if (mode
== VOIDmode
)
3169 && cum
->nargs_prototype
< 0
3170 && type
&& (cum
->prototype
|| TARGET_NO_PROTOTYPE
))
3172 /* For the SPE, we need to crxor CR6 always. */
3174 return GEN_INT (cum
->call_cookie
| CALL_V4_SET_FP_ARGS
);
3175 else if (TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3176 return GEN_INT (cum
->call_cookie
3177 | ((cum
->fregno
== FP_ARG_MIN_REG
)
3178 ? CALL_V4_SET_FP_ARGS
3179 : CALL_V4_CLEAR_FP_ARGS
));
3182 return GEN_INT (cum
->call_cookie
);
3185 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3187 if (named
&& cum
->vregno
<= ALTIVEC_ARG_MAX_REG
)
3188 return gen_rtx_REG (mode
, cum
->vregno
);
3192 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
) && named
)
3194 if (cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
3195 return gen_rtx_REG (mode
, cum
->sysv_gregno
);
3199 else if (abi
== ABI_V4
)
3201 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
3202 && (mode
== SFmode
|| mode
== DFmode
))
3204 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
3205 return gen_rtx_REG (mode
, cum
->fregno
);
3212 int gregno
= cum
->sysv_gregno
;
3214 /* Aggregates and IEEE quad get passed by reference. */
3215 if ((type
&& AGGREGATE_TYPE_P (type
))
3219 n_words
= RS6000_ARG_SIZE (mode
, type
);
3221 /* Long long and SPE vectors are put in odd registers. */
3222 if (n_words
== 2 && (gregno
& 1) == 0)
3225 /* Long long and SPE vectors are not split between registers
3227 if (gregno
+ n_words
- 1 <= GP_ARG_MAX_REG
)
3229 /* SPE vectors in ... get split into 2 registers. */
3230 if (TARGET_SPE
&& TARGET_SPE_ABI
3231 && SPE_VECTOR_MODE (mode
) && !named
)
3234 enum machine_mode m
= SImode
;
3236 r1
= gen_rtx_REG (m
, gregno
);
3237 r1
= gen_rtx_EXPR_LIST (m
, r1
, const0_rtx
);
3238 r2
= gen_rtx_REG (m
, gregno
+ 1);
3239 r2
= gen_rtx_EXPR_LIST (m
, r2
, GEN_INT (4));
3240 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r2
));
3242 return gen_rtx_REG (mode
, gregno
);
3250 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
3251 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
3252 int align_words
= cum
->words
+ align
;
3254 if (type
&& TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
3257 if (USE_FP_FOR_ARG_P (*cum
, mode
, type
))
3260 || ((cum
->nargs_prototype
> 0)
3261 /* IBM AIX extended its linkage convention definition always
3262 to require FP args after register save area hole on the
3264 && (DEFAULT_ABI
!= ABI_AIX
3266 || (align_words
< GP_ARG_NUM_REG
))))
3267 return gen_rtx_REG (mode
, cum
->fregno
);
3269 return gen_rtx_PARALLEL (mode
,
3271 gen_rtx_EXPR_LIST (VOIDmode
,
3272 ((align_words
>= GP_ARG_NUM_REG
)
3275 + RS6000_ARG_SIZE (mode
, type
)
3277 /* If this is partially on the stack, then
3278 we only include the portion actually
3279 in registers here. */
3280 ? gen_rtx_REG (SImode
,
3281 GP_ARG_MIN_REG
+ align_words
)
3282 : gen_rtx_REG (mode
,
3283 GP_ARG_MIN_REG
+ align_words
))),
3285 gen_rtx_EXPR_LIST (VOIDmode
,
3286 gen_rtx_REG (mode
, cum
->fregno
),
3289 else if (align_words
< GP_ARG_NUM_REG
)
3290 return gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
3296 /* For an arg passed partly in registers and partly in memory,
3297 this is the number of registers used.
3298 For args passed entirely in registers or entirely in memory, zero. */
3301 function_arg_partial_nregs (cum
, mode
, type
, named
)
3302 CUMULATIVE_ARGS
*cum
;
3303 enum machine_mode mode
;
3305 int named ATTRIBUTE_UNUSED
;
3307 if (DEFAULT_ABI
== ABI_V4
)
3310 if (USE_FP_FOR_ARG_P (*cum
, mode
, type
)
3311 || USE_ALTIVEC_FOR_ARG_P (*cum
, mode
, type
))
3313 if (cum
->nargs_prototype
>= 0)
3317 if (cum
->words
< GP_ARG_NUM_REG
3318 && GP_ARG_NUM_REG
< (cum
->words
+ RS6000_ARG_SIZE (mode
, type
)))
3320 int ret
= GP_ARG_NUM_REG
- cum
->words
;
3321 if (ret
&& TARGET_DEBUG_ARG
)
3322 fprintf (stderr
, "function_arg_partial_nregs: %d\n", ret
);
3330 /* A C expression that indicates when an argument must be passed by
3331 reference. If nonzero for an argument, a copy of that argument is
3332 made in memory and a pointer to the argument is passed instead of
3333 the argument itself. The pointer is passed in whatever way is
3334 appropriate for passing a pointer to that type.
3336 Under V.4, structures and unions are passed by reference.
3338 As an extension to all ABIs, variable sized types are passed by
3342 function_arg_pass_by_reference (cum
, mode
, type
, named
)
3343 CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
;
3344 enum machine_mode mode ATTRIBUTE_UNUSED
;
3346 int named ATTRIBUTE_UNUSED
;
3348 if (DEFAULT_ABI
== ABI_V4
3349 && ((type
&& AGGREGATE_TYPE_P (type
))
3352 if (TARGET_DEBUG_ARG
)
3353 fprintf (stderr
, "function_arg_pass_by_reference: aggregate\n");
3357 return type
&& int_size_in_bytes (type
) <= 0;
3360 /* Perform any needed actions needed for a function that is receiving a
3361 variable number of arguments.
3365 MODE and TYPE are the mode and type of the current parameter.
3367 PRETEND_SIZE is a variable that should be set to the amount of stack
3368 that must be pushed by the prolog to pretend that our caller pushed
3371 Normally, this macro will push all remaining incoming registers on the
3372 stack and set PRETEND_SIZE to the length of the registers pushed. */
3375 setup_incoming_varargs (cum
, mode
, type
, pretend_size
, no_rtl
)
3376 CUMULATIVE_ARGS
*cum
;
3377 enum machine_mode mode
;
3379 int *pretend_size ATTRIBUTE_UNUSED
;
3383 CUMULATIVE_ARGS next_cum
;
3384 int reg_size
= TARGET_32BIT
? 4 : 8;
3385 rtx save_area
= NULL_RTX
, mem
;
3386 int first_reg_offset
, set
;
3390 fntype
= TREE_TYPE (current_function_decl
);
3391 stdarg_p
= (TYPE_ARG_TYPES (fntype
) != 0
3392 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
3393 != void_type_node
));
3395 /* For varargs, we do not want to skip the dummy va_dcl argument.
3396 For stdargs, we do want to skip the last named argument. */
3399 function_arg_advance (&next_cum
, mode
, type
, 1);
3401 if (DEFAULT_ABI
== ABI_V4
)
3403 /* Indicate to allocate space on the stack for varargs save area. */
3404 cfun
->machine
->sysv_varargs_p
= 1;
3406 save_area
= plus_constant (virtual_stack_vars_rtx
,
3407 - RS6000_VARARGS_SIZE
);
3409 first_reg_offset
= next_cum
.sysv_gregno
- GP_ARG_MIN_REG
;
3413 first_reg_offset
= next_cum
.words
;
3414 save_area
= virtual_incoming_args_rtx
;
3415 cfun
->machine
->sysv_varargs_p
= 0;
3417 if (MUST_PASS_IN_STACK (mode
, type
))
3418 first_reg_offset
+= RS6000_ARG_SIZE (TYPE_MODE (type
), type
);
3421 set
= get_varargs_alias_set ();
3422 if (! no_rtl
&& first_reg_offset
< GP_ARG_NUM_REG
)
3424 mem
= gen_rtx_MEM (BLKmode
,
3425 plus_constant (save_area
,
3426 first_reg_offset
* reg_size
)),
3427 set_mem_alias_set (mem
, set
);
3428 set_mem_align (mem
, BITS_PER_WORD
);
3431 (GP_ARG_MIN_REG
+ first_reg_offset
, mem
,
3432 GP_ARG_NUM_REG
- first_reg_offset
,
3433 (GP_ARG_NUM_REG
- first_reg_offset
) * UNITS_PER_WORD
);
3436 /* Save FP registers if needed. */
3437 if (DEFAULT_ABI
== ABI_V4
3438 && TARGET_HARD_FLOAT
&& TARGET_FPRS
3440 && next_cum
.fregno
<= FP_ARG_V4_MAX_REG
)
3442 int fregno
= next_cum
.fregno
;
3443 rtx cr1
= gen_rtx_REG (CCmode
, CR1_REGNO
);
3444 rtx lab
= gen_label_rtx ();
3445 int off
= (GP_ARG_NUM_REG
* reg_size
) + ((fregno
- FP_ARG_MIN_REG
) * 8);
3447 emit_jump_insn (gen_rtx_SET (VOIDmode
,
3449 gen_rtx_IF_THEN_ELSE (VOIDmode
,
3450 gen_rtx_NE (VOIDmode
, cr1
,
3452 gen_rtx_LABEL_REF (VOIDmode
, lab
),
3455 while (fregno
<= FP_ARG_V4_MAX_REG
)
3457 mem
= gen_rtx_MEM (DFmode
, plus_constant (save_area
, off
));
3458 set_mem_alias_set (mem
, set
);
3459 emit_move_insn (mem
, gen_rtx_REG (DFmode
, fregno
));
3468 /* Create the va_list data type. */
3471 rs6000_build_va_list ()
3473 tree f_gpr
, f_fpr
, f_ovf
, f_sav
, record
, type_decl
;
3475 /* For AIX, prefer 'char *' because that's what the system
3476 header files like. */
3477 if (DEFAULT_ABI
!= ABI_V4
)
3478 return build_pointer_type (char_type_node
);
3480 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
3481 type_decl
= build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
3483 f_gpr
= build_decl (FIELD_DECL
, get_identifier ("gpr"),
3484 unsigned_char_type_node
);
3485 f_fpr
= build_decl (FIELD_DECL
, get_identifier ("fpr"),
3486 unsigned_char_type_node
);
3487 f_ovf
= build_decl (FIELD_DECL
, get_identifier ("overflow_arg_area"),
3489 f_sav
= build_decl (FIELD_DECL
, get_identifier ("reg_save_area"),
3492 DECL_FIELD_CONTEXT (f_gpr
) = record
;
3493 DECL_FIELD_CONTEXT (f_fpr
) = record
;
3494 DECL_FIELD_CONTEXT (f_ovf
) = record
;
3495 DECL_FIELD_CONTEXT (f_sav
) = record
;
3497 TREE_CHAIN (record
) = type_decl
;
3498 TYPE_NAME (record
) = type_decl
;
3499 TYPE_FIELDS (record
) = f_gpr
;
3500 TREE_CHAIN (f_gpr
) = f_fpr
;
3501 TREE_CHAIN (f_fpr
) = f_ovf
;
3502 TREE_CHAIN (f_ovf
) = f_sav
;
3504 layout_type (record
);
3506 /* The correct type is an array type of one element. */
3507 return build_array_type (record
, build_index_type (size_zero_node
));
3510 /* Implement va_start. */
3513 rs6000_va_start (valist
, nextarg
)
3517 HOST_WIDE_INT words
, n_gpr
, n_fpr
;
3518 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
3519 tree gpr
, fpr
, ovf
, sav
, t
;
3521 /* Only SVR4 needs something special. */
3522 if (DEFAULT_ABI
!= ABI_V4
)
3524 std_expand_builtin_va_start (valist
, nextarg
);
3528 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
3529 f_fpr
= TREE_CHAIN (f_gpr
);
3530 f_ovf
= TREE_CHAIN (f_fpr
);
3531 f_sav
= TREE_CHAIN (f_ovf
);
3533 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
3534 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
3535 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
3536 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
3537 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
3539 /* Count number of gp and fp argument registers used. */
3540 words
= current_function_args_info
.words
;
3541 n_gpr
= current_function_args_info
.sysv_gregno
- GP_ARG_MIN_REG
;
3542 n_fpr
= current_function_args_info
.fregno
- FP_ARG_MIN_REG
;
3544 if (TARGET_DEBUG_ARG
)
3546 fputs ("va_start: words = ", stderr
);
3547 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, words
);
3548 fputs (", n_gpr = ", stderr
);
3549 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, n_gpr
);
3550 fputs (", n_fpr = ", stderr
);
3551 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, n_fpr
);
3552 putc ('\n', stderr
);
3555 t
= build (MODIFY_EXPR
, TREE_TYPE (gpr
), gpr
, build_int_2 (n_gpr
, 0));
3556 TREE_SIDE_EFFECTS (t
) = 1;
3557 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3559 t
= build (MODIFY_EXPR
, TREE_TYPE (fpr
), fpr
, build_int_2 (n_fpr
, 0));
3560 TREE_SIDE_EFFECTS (t
) = 1;
3561 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3563 /* Find the overflow area. */
3564 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
3566 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), t
,
3567 build_int_2 (words
* UNITS_PER_WORD
, 0));
3568 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
3569 TREE_SIDE_EFFECTS (t
) = 1;
3570 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3572 /* Find the register save area. */
3573 t
= make_tree (TREE_TYPE (sav
), virtual_stack_vars_rtx
);
3574 t
= build (PLUS_EXPR
, TREE_TYPE (sav
), t
,
3575 build_int_2 (-RS6000_VARARGS_SIZE
, -1));
3576 t
= build (MODIFY_EXPR
, TREE_TYPE (sav
), sav
, t
);
3577 TREE_SIDE_EFFECTS (t
) = 1;
3578 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3581 /* Implement va_arg. */
3584 rs6000_va_arg (valist
, type
)
3587 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
3588 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
3589 int indirect_p
, size
, rsize
, n_reg
, sav_ofs
, sav_scale
;
3590 rtx lab_false
, lab_over
, addr_rtx
, r
;
3592 if (DEFAULT_ABI
!= ABI_V4
)
3594 /* Variable sized types are passed by reference. */
3595 if (int_size_in_bytes (type
) <= 0)
3597 u
= build_pointer_type (type
);
3599 /* Args grow upward. */
3600 t
= build (POSTINCREMENT_EXPR
, TREE_TYPE (valist
), valist
,
3601 build_int_2 (POINTER_SIZE
/ BITS_PER_UNIT
, 0));
3602 TREE_SIDE_EFFECTS (t
) = 1;
3604 t
= build1 (NOP_EXPR
, build_pointer_type (u
), t
);
3605 TREE_SIDE_EFFECTS (t
) = 1;
3607 t
= build1 (INDIRECT_REF
, u
, t
);
3608 TREE_SIDE_EFFECTS (t
) = 1;
3610 return expand_expr (t
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
3613 return std_expand_builtin_va_arg (valist
, type
);
3616 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
3617 f_fpr
= TREE_CHAIN (f_gpr
);
3618 f_ovf
= TREE_CHAIN (f_fpr
);
3619 f_sav
= TREE_CHAIN (f_ovf
);
3621 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
3622 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
3623 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
3624 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
3625 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
3627 size
= int_size_in_bytes (type
);
3628 rsize
= (size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
3630 if (AGGREGATE_TYPE_P (type
) || TYPE_MODE (type
) == TFmode
)
3632 /* Aggregates and long doubles are passed by reference. */
3638 size
= UNITS_PER_WORD
;
3641 else if (FLOAT_TYPE_P (type
) && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3643 /* FP args go in FP registers, if present. */
3652 /* Otherwise into GP registers. */
3660 /* Pull the value out of the saved registers ... */
3662 lab_false
= gen_label_rtx ();
3663 lab_over
= gen_label_rtx ();
3664 addr_rtx
= gen_reg_rtx (Pmode
);
3666 /* AltiVec vectors never go in registers. */
3667 if (!TARGET_ALTIVEC
|| TREE_CODE (type
) != VECTOR_TYPE
)
3669 TREE_THIS_VOLATILE (reg
) = 1;
3670 emit_cmp_and_jump_insns
3671 (expand_expr (reg
, NULL_RTX
, QImode
, EXPAND_NORMAL
),
3672 GEN_INT (8 - n_reg
+ 1), GE
, const1_rtx
, QImode
, 1,
3675 /* Long long is aligned in the registers. */
3678 u
= build (BIT_AND_EXPR
, TREE_TYPE (reg
), reg
,
3679 build_int_2 (n_reg
- 1, 0));
3680 u
= build (PLUS_EXPR
, TREE_TYPE (reg
), reg
, u
);
3681 u
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, u
);
3682 TREE_SIDE_EFFECTS (u
) = 1;
3683 expand_expr (u
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3687 t
= build (PLUS_EXPR
, ptr_type_node
, sav
, build_int_2 (sav_ofs
, 0));
3691 u
= build (POSTINCREMENT_EXPR
, TREE_TYPE (reg
), reg
,
3692 build_int_2 (n_reg
, 0));
3693 TREE_SIDE_EFFECTS (u
) = 1;
3695 u
= build1 (CONVERT_EXPR
, integer_type_node
, u
);
3696 TREE_SIDE_EFFECTS (u
) = 1;
3698 u
= build (MULT_EXPR
, integer_type_node
, u
, build_int_2 (sav_scale
, 0));
3699 TREE_SIDE_EFFECTS (u
) = 1;
3701 t
= build (PLUS_EXPR
, ptr_type_node
, t
, u
);
3702 TREE_SIDE_EFFECTS (t
) = 1;
3704 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
3706 emit_move_insn (addr_rtx
, r
);
3708 emit_jump_insn (gen_jump (lab_over
));
3712 emit_label (lab_false
);
3714 /* ... otherwise out of the overflow area. */
3716 /* Make sure we don't find reg 7 for the next int arg.
3718 All AltiVec vectors go in the overflow area. So in the AltiVec
3719 case we need to get the vectors from the overflow area, but
3720 remember where the GPRs and FPRs are. */
3721 if (n_reg
> 1 && (TREE_CODE (type
) != VECTOR_TYPE
3722 || !TARGET_ALTIVEC
))
3724 t
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, build_int_2 (8, 0));
3725 TREE_SIDE_EFFECTS (t
) = 1;
3726 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3729 /* Care for on-stack alignment if needed. */
3736 /* AltiVec vectors are 16 byte aligned. */
3737 if (TARGET_ALTIVEC
&& TREE_CODE (type
) == VECTOR_TYPE
)
3742 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), ovf
, build_int_2 (align
, 0));
3743 t
= build (BIT_AND_EXPR
, TREE_TYPE (t
), t
, build_int_2 (-align
-1, -1));
3747 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
3749 emit_move_insn (addr_rtx
, r
);
3751 t
= build (PLUS_EXPR
, TREE_TYPE (t
), t
, build_int_2 (size
, 0));
3752 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
3753 TREE_SIDE_EFFECTS (t
) = 1;
3754 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3756 emit_label (lab_over
);
3760 r
= gen_rtx_MEM (Pmode
, addr_rtx
);
3761 set_mem_alias_set (r
, get_varargs_alias_set ());
3762 emit_move_insn (addr_rtx
, r
);
3770 #define def_builtin(MASK, NAME, TYPE, CODE) \
3772 if ((MASK) & target_flags) \
3773 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3777 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3779 static const struct builtin_description bdesc_3arg
[] =
3781 { MASK_ALTIVEC
, CODE_FOR_altivec_vmaddfp
, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP
},
3782 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhaddshs
, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS
},
3783 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhraddshs
, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS
},
3784 { MASK_ALTIVEC
, CODE_FOR_altivec_vmladduhm
, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM
},
3785 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumubm
, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM
},
3786 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsummbm
, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM
},
3787 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhm
, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM
},
3788 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshm
, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM
},
3789 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhs
, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS
},
3790 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshs
, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS
},
3791 { MASK_ALTIVEC
, CODE_FOR_altivec_vnmsubfp
, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP
},
3792 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4sf
, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF
},
3793 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4si
, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI
},
3794 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_8hi
, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI
},
3795 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_16qi
, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI
},
3796 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4sf
, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF
},
3797 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4si
, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI
},
3798 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_8hi
, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI
},
3799 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_16qi
, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI
},
3800 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_16qi
, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI
},
3801 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_8hi
, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI
},
3802 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4si
, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI
},
3803 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4sf
, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF
},
3806 /* DST operations: void foo (void *, const int, const char). */
3808 static const struct builtin_description bdesc_dst
[] =
3810 { MASK_ALTIVEC
, CODE_FOR_altivec_dst
, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST
},
3811 { MASK_ALTIVEC
, CODE_FOR_altivec_dstt
, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT
},
3812 { MASK_ALTIVEC
, CODE_FOR_altivec_dstst
, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST
},
3813 { MASK_ALTIVEC
, CODE_FOR_altivec_dststt
, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT
}
3816 /* Simple binary operations: VECc = foo (VECa, VECb). */
3818 static struct builtin_description bdesc_2arg
[] =
3820 { MASK_ALTIVEC
, CODE_FOR_addv16qi3
, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM
},
3821 { MASK_ALTIVEC
, CODE_FOR_addv8hi3
, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM
},
3822 { MASK_ALTIVEC
, CODE_FOR_addv4si3
, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM
},
3823 { MASK_ALTIVEC
, CODE_FOR_addv4sf3
, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP
},
3824 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddcuw
, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW
},
3825 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddubs
, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS
},
3826 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsbs
, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS
},
3827 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduhs
, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS
},
3828 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddshs
, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS
},
3829 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduws
, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS
},
3830 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsws
, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS
},
3831 { MASK_ALTIVEC
, CODE_FOR_andv4si3
, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND
},
3832 { MASK_ALTIVEC
, CODE_FOR_altivec_vandc
, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC
},
3833 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgub
, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB
},
3834 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsb
, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB
},
3835 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguh
, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH
},
3836 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsh
, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH
},
3837 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguw
, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW
},
3838 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsw
, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW
},
3839 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfux
, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX
},
3840 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfsx
, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX
},
3841 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpbfp
, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP
},
3842 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequb
, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB
},
3843 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequh
, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH
},
3844 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequw
, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW
},
3845 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpeqfp
, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP
},
3846 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgefp
, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP
},
3847 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtub
, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB
},
3848 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsb
, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB
},
3849 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuh
, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH
},
3850 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsh
, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH
},
3851 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuw
, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW
},
3852 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsw
, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW
},
3853 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtfp
, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP
},
3854 { MASK_ALTIVEC
, CODE_FOR_altivec_vctsxs
, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS
},
3855 { MASK_ALTIVEC
, CODE_FOR_altivec_vctuxs
, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS
},
3856 { MASK_ALTIVEC
, CODE_FOR_umaxv16qi3
, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB
},
3857 { MASK_ALTIVEC
, CODE_FOR_smaxv16qi3
, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB
},
3858 { MASK_ALTIVEC
, CODE_FOR_umaxv8hi3
, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH
},
3859 { MASK_ALTIVEC
, CODE_FOR_smaxv8hi3
, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH
},
3860 { MASK_ALTIVEC
, CODE_FOR_umaxv4si3
, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW
},
3861 { MASK_ALTIVEC
, CODE_FOR_smaxv4si3
, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW
},
3862 { MASK_ALTIVEC
, CODE_FOR_smaxv4sf3
, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP
},
3863 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghb
, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB
},
3864 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghh
, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH
},
3865 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghw
, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW
},
3866 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglb
, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB
},
3867 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglh
, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH
},
3868 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglw
, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW
},
3869 { MASK_ALTIVEC
, CODE_FOR_uminv16qi3
, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB
},
3870 { MASK_ALTIVEC
, CODE_FOR_sminv16qi3
, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB
},
3871 { MASK_ALTIVEC
, CODE_FOR_uminv8hi3
, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH
},
3872 { MASK_ALTIVEC
, CODE_FOR_sminv8hi3
, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH
},
3873 { MASK_ALTIVEC
, CODE_FOR_uminv4si3
, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW
},
3874 { MASK_ALTIVEC
, CODE_FOR_sminv4si3
, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW
},
3875 { MASK_ALTIVEC
, CODE_FOR_sminv4sf3
, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP
},
3876 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleub
, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB
},
3877 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesb
, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB
},
3878 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleuh
, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH
},
3879 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesh
, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH
},
3880 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuloub
, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB
},
3881 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosb
, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB
},
3882 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulouh
, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH
},
3883 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosh
, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH
},
3884 { MASK_ALTIVEC
, CODE_FOR_altivec_vnor
, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR
},
3885 { MASK_ALTIVEC
, CODE_FOR_iorv4si3
, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR
},
3886 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhum
, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM
},
3887 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwum
, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM
},
3888 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkpx
, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX
},
3889 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhss
, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS
},
3890 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshss
, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS
},
3891 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwss
, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS
},
3892 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswss
, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS
},
3893 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhus
, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS
},
3894 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshus
, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS
},
3895 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwus
, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS
},
3896 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswus
, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS
},
3897 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlb
, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB
},
3898 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlh
, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH
},
3899 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlw
, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW
},
3900 { MASK_ALTIVEC
, CODE_FOR_altivec_vslb
, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB
},
3901 { MASK_ALTIVEC
, CODE_FOR_altivec_vslh
, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH
},
3902 { MASK_ALTIVEC
, CODE_FOR_altivec_vslw
, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW
},
3903 { MASK_ALTIVEC
, CODE_FOR_altivec_vsl
, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL
},
3904 { MASK_ALTIVEC
, CODE_FOR_altivec_vslo
, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO
},
3905 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltb
, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB
},
3906 { MASK_ALTIVEC
, CODE_FOR_altivec_vsplth
, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH
},
3907 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltw
, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW
},
3908 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrb
, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB
},
3909 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrh
, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH
},
3910 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrw
, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW
},
3911 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrab
, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB
},
3912 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrah
, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH
},
3913 { MASK_ALTIVEC
, CODE_FOR_altivec_vsraw
, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW
},
3914 { MASK_ALTIVEC
, CODE_FOR_altivec_vsr
, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR
},
3915 { MASK_ALTIVEC
, CODE_FOR_altivec_vsro
, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO
},
3916 { MASK_ALTIVEC
, CODE_FOR_subv16qi3
, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM
},
3917 { MASK_ALTIVEC
, CODE_FOR_subv8hi3
, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM
},
3918 { MASK_ALTIVEC
, CODE_FOR_subv4si3
, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM
},
3919 { MASK_ALTIVEC
, CODE_FOR_subv4sf3
, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP
},
3920 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubcuw
, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW
},
3921 { MASK_ALTIVEC
, CODE_FOR_altivec_vsububs
, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS
},
3922 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsbs
, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS
},
3923 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuhs
, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS
},
3924 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubshs
, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS
},
3925 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuws
, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS
},
3926 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsws
, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS
},
3927 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4ubs
, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS
},
3928 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4sbs
, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS
},
3929 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4shs
, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS
},
3930 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum2sws
, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS
},
3931 { MASK_ALTIVEC
, CODE_FOR_altivec_vsumsws
, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS
},
3932 { MASK_ALTIVEC
, CODE_FOR_xorv4si3
, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR
},
3934 /* Place holder, leave as first spe builtin. */
3935 { 0, CODE_FOR_spe_evaddw
, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW
},
3936 { 0, CODE_FOR_spe_evand
, "__builtin_spe_evand", SPE_BUILTIN_EVAND
},
3937 { 0, CODE_FOR_spe_evandc
, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC
},
3938 { 0, CODE_FOR_spe_evdivws
, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS
},
3939 { 0, CODE_FOR_spe_evdivwu
, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU
},
3940 { 0, CODE_FOR_spe_eveqv
, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV
},
3941 { 0, CODE_FOR_spe_evfsadd
, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD
},
3942 { 0, CODE_FOR_spe_evfsdiv
, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV
},
3943 { 0, CODE_FOR_spe_evfsmul
, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL
},
3944 { 0, CODE_FOR_spe_evfssub
, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB
},
3945 { 0, CODE_FOR_spe_evmergehi
, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI
},
3946 { 0, CODE_FOR_spe_evmergehilo
, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO
},
3947 { 0, CODE_FOR_spe_evmergelo
, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO
},
3948 { 0, CODE_FOR_spe_evmergelohi
, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI
},
3949 { 0, CODE_FOR_spe_evmhegsmfaa
, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA
},
3950 { 0, CODE_FOR_spe_evmhegsmfan
, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN
},
3951 { 0, CODE_FOR_spe_evmhegsmiaa
, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA
},
3952 { 0, CODE_FOR_spe_evmhegsmian
, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN
},
3953 { 0, CODE_FOR_spe_evmhegumiaa
, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA
},
3954 { 0, CODE_FOR_spe_evmhegumian
, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN
},
3955 { 0, CODE_FOR_spe_evmhesmf
, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF
},
3956 { 0, CODE_FOR_spe_evmhesmfa
, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA
},
3957 { 0, CODE_FOR_spe_evmhesmfaaw
, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW
},
3958 { 0, CODE_FOR_spe_evmhesmfanw
, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW
},
3959 { 0, CODE_FOR_spe_evmhesmi
, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI
},
3960 { 0, CODE_FOR_spe_evmhesmia
, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA
},
3961 { 0, CODE_FOR_spe_evmhesmiaaw
, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW
},
3962 { 0, CODE_FOR_spe_evmhesmianw
, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW
},
3963 { 0, CODE_FOR_spe_evmhessf
, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF
},
3964 { 0, CODE_FOR_spe_evmhessfa
, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA
},
3965 { 0, CODE_FOR_spe_evmhessfaaw
, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW
},
3966 { 0, CODE_FOR_spe_evmhessfanw
, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW
},
3967 { 0, CODE_FOR_spe_evmhessiaaw
, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW
},
3968 { 0, CODE_FOR_spe_evmhessianw
, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW
},
3969 { 0, CODE_FOR_spe_evmheumi
, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI
},
3970 { 0, CODE_FOR_spe_evmheumia
, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA
},
3971 { 0, CODE_FOR_spe_evmheumiaaw
, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW
},
3972 { 0, CODE_FOR_spe_evmheumianw
, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW
},
3973 { 0, CODE_FOR_spe_evmheusiaaw
, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW
},
3974 { 0, CODE_FOR_spe_evmheusianw
, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW
},
3975 { 0, CODE_FOR_spe_evmhogsmfaa
, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA
},
3976 { 0, CODE_FOR_spe_evmhogsmfan
, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN
},
3977 { 0, CODE_FOR_spe_evmhogsmiaa
, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA
},
3978 { 0, CODE_FOR_spe_evmhogsmian
, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN
},
3979 { 0, CODE_FOR_spe_evmhogumiaa
, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA
},
3980 { 0, CODE_FOR_spe_evmhogumian
, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN
},
3981 { 0, CODE_FOR_spe_evmhosmf
, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF
},
3982 { 0, CODE_FOR_spe_evmhosmfa
, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA
},
3983 { 0, CODE_FOR_spe_evmhosmfaaw
, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW
},
3984 { 0, CODE_FOR_spe_evmhosmfanw
, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW
},
3985 { 0, CODE_FOR_spe_evmhosmi
, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI
},
3986 { 0, CODE_FOR_spe_evmhosmia
, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA
},
3987 { 0, CODE_FOR_spe_evmhosmiaaw
, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW
},
3988 { 0, CODE_FOR_spe_evmhosmianw
, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW
},
3989 { 0, CODE_FOR_spe_evmhossf
, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF
},
3990 { 0, CODE_FOR_spe_evmhossfa
, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA
},
3991 { 0, CODE_FOR_spe_evmhossfaaw
, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW
},
3992 { 0, CODE_FOR_spe_evmhossfanw
, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW
},
3993 { 0, CODE_FOR_spe_evmhossiaaw
, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW
},
3994 { 0, CODE_FOR_spe_evmhossianw
, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW
},
3995 { 0, CODE_FOR_spe_evmhoumi
, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI
},
3996 { 0, CODE_FOR_spe_evmhoumia
, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA
},
3997 { 0, CODE_FOR_spe_evmhoumiaaw
, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW
},
3998 { 0, CODE_FOR_spe_evmhoumianw
, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW
},
3999 { 0, CODE_FOR_spe_evmhousiaaw
, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW
},
4000 { 0, CODE_FOR_spe_evmhousianw
, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW
},
4001 { 0, CODE_FOR_spe_evmwhsmf
, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF
},
4002 { 0, CODE_FOR_spe_evmwhsmfa
, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA
},
4003 { 0, CODE_FOR_spe_evmwhsmi
, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI
},
4004 { 0, CODE_FOR_spe_evmwhsmia
, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA
},
4005 { 0, CODE_FOR_spe_evmwhssf
, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF
},
4006 { 0, CODE_FOR_spe_evmwhssfa
, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA
},
4007 { 0, CODE_FOR_spe_evmwhumi
, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI
},
4008 { 0, CODE_FOR_spe_evmwhumia
, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA
},
4009 { 0, CODE_FOR_spe_evmwlsmiaaw
, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW
},
4010 { 0, CODE_FOR_spe_evmwlsmianw
, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW
},
4011 { 0, CODE_FOR_spe_evmwlssiaaw
, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW
},
4012 { 0, CODE_FOR_spe_evmwlssianw
, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW
},
4013 { 0, CODE_FOR_spe_evmwlumi
, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI
},
4014 { 0, CODE_FOR_spe_evmwlumia
, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA
},
4015 { 0, CODE_FOR_spe_evmwlumiaaw
, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW
},
4016 { 0, CODE_FOR_spe_evmwlumianw
, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW
},
4017 { 0, CODE_FOR_spe_evmwlusiaaw
, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW
},
4018 { 0, CODE_FOR_spe_evmwlusianw
, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW
},
4019 { 0, CODE_FOR_spe_evmwsmf
, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF
},
4020 { 0, CODE_FOR_spe_evmwsmfa
, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA
},
4021 { 0, CODE_FOR_spe_evmwsmfaa
, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA
},
4022 { 0, CODE_FOR_spe_evmwsmfan
, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN
},
4023 { 0, CODE_FOR_spe_evmwsmi
, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI
},
4024 { 0, CODE_FOR_spe_evmwsmia
, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA
},
4025 { 0, CODE_FOR_spe_evmwsmiaa
, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA
},
4026 { 0, CODE_FOR_spe_evmwsmian
, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN
},
4027 { 0, CODE_FOR_spe_evmwssf
, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF
},
4028 { 0, CODE_FOR_spe_evmwssfa
, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA
},
4029 { 0, CODE_FOR_spe_evmwssfaa
, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA
},
4030 { 0, CODE_FOR_spe_evmwssfan
, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN
},
4031 { 0, CODE_FOR_spe_evmwumi
, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI
},
4032 { 0, CODE_FOR_spe_evmwumia
, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA
},
4033 { 0, CODE_FOR_spe_evmwumiaa
, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA
},
4034 { 0, CODE_FOR_spe_evmwumian
, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN
},
4035 { 0, CODE_FOR_spe_evnand
, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND
},
4036 { 0, CODE_FOR_spe_evnor
, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR
},
4037 { 0, CODE_FOR_spe_evor
, "__builtin_spe_evor", SPE_BUILTIN_EVOR
},
4038 { 0, CODE_FOR_spe_evorc
, "__builtin_spe_evorc", SPE_BUILTIN_EVORC
},
4039 { 0, CODE_FOR_spe_evrlw
, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW
},
4040 { 0, CODE_FOR_spe_evslw
, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW
},
4041 { 0, CODE_FOR_spe_evsrws
, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS
},
4042 { 0, CODE_FOR_spe_evsrwu
, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU
},
4043 { 0, CODE_FOR_spe_evsubfw
, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW
},
4045 /* SPE binary operations expecting a 5-bit unsigned literal. */
4046 { 0, CODE_FOR_spe_evaddiw
, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW
},
4048 { 0, CODE_FOR_spe_evrlwi
, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI
},
4049 { 0, CODE_FOR_spe_evslwi
, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI
},
4050 { 0, CODE_FOR_spe_evsrwis
, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS
},
4051 { 0, CODE_FOR_spe_evsrwiu
, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU
},
4052 { 0, CODE_FOR_spe_evsubifw
, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW
},
4053 { 0, CODE_FOR_spe_evmwhssfaa
, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA
},
4054 { 0, CODE_FOR_spe_evmwhssmaa
, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA
},
4055 { 0, CODE_FOR_spe_evmwhsmfaa
, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA
},
4056 { 0, CODE_FOR_spe_evmwhsmiaa
, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA
},
4057 { 0, CODE_FOR_spe_evmwhusiaa
, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA
},
4058 { 0, CODE_FOR_spe_evmwhumiaa
, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA
},
4059 { 0, CODE_FOR_spe_evmwhssfan
, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN
},
4060 { 0, CODE_FOR_spe_evmwhssian
, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN
},
4061 { 0, CODE_FOR_spe_evmwhsmfan
, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN
},
4062 { 0, CODE_FOR_spe_evmwhsmian
, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN
},
4063 { 0, CODE_FOR_spe_evmwhusian
, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN
},
4064 { 0, CODE_FOR_spe_evmwhumian
, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN
},
4065 { 0, CODE_FOR_spe_evmwhgssfaa
, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA
},
4066 { 0, CODE_FOR_spe_evmwhgsmfaa
, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA
},
4067 { 0, CODE_FOR_spe_evmwhgsmiaa
, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA
},
4068 { 0, CODE_FOR_spe_evmwhgumiaa
, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA
},
4069 { 0, CODE_FOR_spe_evmwhgssfan
, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN
},
4070 { 0, CODE_FOR_spe_evmwhgsmfan
, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN
},
4071 { 0, CODE_FOR_spe_evmwhgsmian
, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN
},
4072 { 0, CODE_FOR_spe_evmwhgumian
, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN
},
4073 { 0, CODE_FOR_spe_brinc
, "__builtin_spe_brinc", SPE_BUILTIN_BRINC
},
4075 /* Place-holder. Leave as last binary SPE builtin. */
4076 { 0, CODE_FOR_xorv2si3
, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR
},
4079 /* AltiVec predicates. */
4081 struct builtin_description_predicates
4083 const unsigned int mask
;
4084 const enum insn_code icode
;
4086 const char *const name
;
4087 const enum rs6000_builtins code
;
4090 static const struct builtin_description_predicates bdesc_altivec_preds
[] =
4092 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P
},
4093 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P
},
4094 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P
},
4095 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P
},
4096 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P
},
4097 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P
},
4098 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P
},
4099 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P
},
4100 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P
},
4101 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P
},
4102 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P
},
4103 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P
},
4104 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P
}
4107 /* SPE predicates. */
4108 static struct builtin_description bdesc_spe_predicates
[] =
4110 /* Place-holder. Leave as first. */
4111 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ
},
4112 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS
},
4113 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU
},
4114 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS
},
4115 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU
},
4116 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ
},
4117 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT
},
4118 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT
},
4119 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ
},
4120 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT
},
4121 /* Place-holder. Leave as last. */
4122 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT
},
4125 /* SPE evsel predicates. */
4126 static struct builtin_description bdesc_spe_evsel
[] =
4128 /* Place-holder. Leave as first. */
4129 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS
},
4130 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU
},
4131 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS
},
4132 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU
},
4133 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ
},
4134 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT
},
4135 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT
},
4136 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ
},
4137 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT
},
4138 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT
},
4139 /* Place-holder. Leave as last. */
4140 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ
},
4143 /* ABS* operations. */
4145 static const struct builtin_description bdesc_abs
[] =
4147 { MASK_ALTIVEC
, CODE_FOR_absv4si2
, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI
},
4148 { MASK_ALTIVEC
, CODE_FOR_absv8hi2
, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI
},
4149 { MASK_ALTIVEC
, CODE_FOR_absv4sf2
, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF
},
4150 { MASK_ALTIVEC
, CODE_FOR_absv16qi2
, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI
},
4151 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v4si
, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI
},
4152 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v8hi
, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI
},
4153 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v16qi
, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI
}
4156 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4159 static struct builtin_description bdesc_1arg
[] =
4161 { MASK_ALTIVEC
, CODE_FOR_altivec_vexptefp
, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP
},
4162 { MASK_ALTIVEC
, CODE_FOR_altivec_vlogefp
, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP
},
4163 { MASK_ALTIVEC
, CODE_FOR_altivec_vrefp
, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP
},
4164 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfim
, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM
},
4165 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfin
, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN
},
4166 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfip
, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP
},
4167 { MASK_ALTIVEC
, CODE_FOR_ftruncv4sf2
, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ
},
4168 { MASK_ALTIVEC
, CODE_FOR_altivec_vrsqrtefp
, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP
},
4169 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisb
, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB
},
4170 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltish
, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH
},
4171 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisw
, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW
},
4172 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsb
, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB
},
4173 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhpx
, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX
},
4174 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsh
, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH
},
4175 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsb
, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB
},
4176 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklpx
, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX
},
4177 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsh
, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH
},
4179 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4180 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4181 { 0, CODE_FOR_spe_evabs
, "__builtin_spe_evabs", SPE_BUILTIN_EVABS
},
4182 { 0, CODE_FOR_spe_evaddsmiaaw
, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW
},
4183 { 0, CODE_FOR_spe_evaddssiaaw
, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW
},
4184 { 0, CODE_FOR_spe_evaddumiaaw
, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW
},
4185 { 0, CODE_FOR_spe_evaddusiaaw
, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW
},
4186 { 0, CODE_FOR_spe_evcntlsw
, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW
},
4187 { 0, CODE_FOR_spe_evcntlzw
, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW
},
4188 { 0, CODE_FOR_spe_evextsb
, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB
},
4189 { 0, CODE_FOR_spe_evextsh
, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH
},
4190 { 0, CODE_FOR_spe_evfsabs
, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS
},
4191 { 0, CODE_FOR_spe_evfscfsf
, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF
},
4192 { 0, CODE_FOR_spe_evfscfsi
, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI
},
4193 { 0, CODE_FOR_spe_evfscfuf
, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF
},
4194 { 0, CODE_FOR_spe_evfscfui
, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI
},
4195 { 0, CODE_FOR_spe_evfsctsf
, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF
},
4196 { 0, CODE_FOR_spe_evfsctsi
, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI
},
4197 { 0, CODE_FOR_spe_evfsctsiz
, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ
},
4198 { 0, CODE_FOR_spe_evfsctuf
, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF
},
4199 { 0, CODE_FOR_spe_evfsctui
, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI
},
4200 { 0, CODE_FOR_spe_evfsctuiz
, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ
},
4201 { 0, CODE_FOR_spe_evfsnabs
, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS
},
4202 { 0, CODE_FOR_spe_evfsneg
, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG
},
4203 { 0, CODE_FOR_spe_evmra
, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA
},
4204 { 0, CODE_FOR_spe_evneg
, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG
},
4205 { 0, CODE_FOR_spe_evrndw
, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW
},
4206 { 0, CODE_FOR_spe_evsubfsmiaaw
, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW
},
4207 { 0, CODE_FOR_spe_evsubfssiaaw
, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW
},
4208 { 0, CODE_FOR_spe_evsubfumiaaw
, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW
},
4209 { 0, CODE_FOR_spe_evsplatfi
, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI
},
4210 { 0, CODE_FOR_spe_evsplati
, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI
},
4212 /* Place-holder. Leave as last unary SPE builtin. */
4213 { 0, CODE_FOR_spe_evsubfusiaaw
, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW
},
4217 rs6000_expand_unop_builtin (icode
, arglist
, target
)
4218 enum insn_code icode
;
4223 tree arg0
= TREE_VALUE (arglist
);
4224 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4225 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4226 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4228 if (icode
== CODE_FOR_nothing
)
4229 /* Builtin not supported on this processor. */
4232 /* If we got invalid arguments bail out before generating bad rtl. */
4233 if (arg0
== error_mark_node
)
4236 if (icode
== CODE_FOR_altivec_vspltisb
4237 || icode
== CODE_FOR_altivec_vspltish
4238 || icode
== CODE_FOR_altivec_vspltisw
4239 || icode
== CODE_FOR_spe_evsplatfi
4240 || icode
== CODE_FOR_spe_evsplati
)
4242 /* Only allow 5-bit *signed* literals. */
4243 if (GET_CODE (op0
) != CONST_INT
4244 || INTVAL (op0
) > 0x1f
4245 || INTVAL (op0
) < -0x1f)
4247 error ("argument 1 must be a 5-bit signed literal");
4253 || GET_MODE (target
) != tmode
4254 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4255 target
= gen_reg_rtx (tmode
);
4257 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4258 op0
= copy_to_mode_reg (mode0
, op0
);
4260 pat
= GEN_FCN (icode
) (target
, op0
);
4269 altivec_expand_abs_builtin (icode
, arglist
, target
)
4270 enum insn_code icode
;
4274 rtx pat
, scratch1
, scratch2
;
4275 tree arg0
= TREE_VALUE (arglist
);
4276 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4277 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4278 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4280 /* If we have invalid arguments, bail out before generating bad rtl. */
4281 if (arg0
== error_mark_node
)
4285 || GET_MODE (target
) != tmode
4286 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4287 target
= gen_reg_rtx (tmode
);
4289 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4290 op0
= copy_to_mode_reg (mode0
, op0
);
4292 scratch1
= gen_reg_rtx (mode0
);
4293 scratch2
= gen_reg_rtx (mode0
);
4295 pat
= GEN_FCN (icode
) (target
, op0
, scratch1
, scratch2
);
4304 rs6000_expand_binop_builtin (icode
, arglist
, target
)
4305 enum insn_code icode
;
4310 tree arg0
= TREE_VALUE (arglist
);
4311 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4312 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4313 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4314 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4315 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4316 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4318 if (icode
== CODE_FOR_nothing
)
4319 /* Builtin not supported on this processor. */
4322 /* If we got invalid arguments bail out before generating bad rtl. */
4323 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
4326 if (icode
== CODE_FOR_altivec_vcfux
4327 || icode
== CODE_FOR_altivec_vcfsx
4328 || icode
== CODE_FOR_altivec_vctsxs
4329 || icode
== CODE_FOR_altivec_vctuxs
4330 || icode
== CODE_FOR_altivec_vspltb
4331 || icode
== CODE_FOR_altivec_vsplth
4332 || icode
== CODE_FOR_altivec_vspltw
4333 || icode
== CODE_FOR_spe_evaddiw
4334 || icode
== CODE_FOR_spe_evldd
4335 || icode
== CODE_FOR_spe_evldh
4336 || icode
== CODE_FOR_spe_evldw
4337 || icode
== CODE_FOR_spe_evlhhesplat
4338 || icode
== CODE_FOR_spe_evlhhossplat
4339 || icode
== CODE_FOR_spe_evlhhousplat
4340 || icode
== CODE_FOR_spe_evlwhe
4341 || icode
== CODE_FOR_spe_evlwhos
4342 || icode
== CODE_FOR_spe_evlwhou
4343 || icode
== CODE_FOR_spe_evlwhsplat
4344 || icode
== CODE_FOR_spe_evlwwsplat
4345 || icode
== CODE_FOR_spe_evrlwi
4346 || icode
== CODE_FOR_spe_evslwi
4347 || icode
== CODE_FOR_spe_evsrwis
4348 || icode
== CODE_FOR_spe_evsrwiu
)
4350 /* Only allow 5-bit unsigned literals. */
4351 if (TREE_CODE (arg1
) != INTEGER_CST
4352 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
4354 error ("argument 2 must be a 5-bit unsigned literal");
4360 || GET_MODE (target
) != tmode
4361 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4362 target
= gen_reg_rtx (tmode
);
4364 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4365 op0
= copy_to_mode_reg (mode0
, op0
);
4366 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4367 op1
= copy_to_mode_reg (mode1
, op1
);
4369 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
4378 altivec_expand_predicate_builtin (icode
, opcode
, arglist
, target
)
4379 enum insn_code icode
;
4385 tree cr6_form
= TREE_VALUE (arglist
);
4386 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
4387 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4388 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4389 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4390 enum machine_mode tmode
= SImode
;
4391 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4392 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4395 if (TREE_CODE (cr6_form
) != INTEGER_CST
)
4397 error ("argument 1 of __builtin_altivec_predicate must be a constant");
4401 cr6_form_int
= TREE_INT_CST_LOW (cr6_form
);
4406 /* If we have invalid arguments, bail out before generating bad rtl. */
4407 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
4411 || GET_MODE (target
) != tmode
4412 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4413 target
= gen_reg_rtx (tmode
);
4415 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4416 op0
= copy_to_mode_reg (mode0
, op0
);
4417 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4418 op1
= copy_to_mode_reg (mode1
, op1
);
4420 scratch
= gen_reg_rtx (mode0
);
4422 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
,
4423 gen_rtx (SYMBOL_REF
, Pmode
, opcode
));
4428 /* The vec_any* and vec_all* predicates use the same opcodes for two
4429 different operations, but the bits in CR6 will be different
4430 depending on what information we want. So we have to play tricks
4431 with CR6 to get the right bits out.
4433 If you think this is disgusting, look at the specs for the
4434 AltiVec predicates. */
4436 switch (cr6_form_int
)
4439 emit_insn (gen_cr6_test_for_zero (target
));
4442 emit_insn (gen_cr6_test_for_zero_reverse (target
));
4445 emit_insn (gen_cr6_test_for_lt (target
));
4448 emit_insn (gen_cr6_test_for_lt_reverse (target
));
4451 error ("argument 1 of __builtin_altivec_predicate is out of range");
4459 altivec_expand_stv_builtin (icode
, arglist
)
4460 enum insn_code icode
;
4463 tree arg0
= TREE_VALUE (arglist
);
4464 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4465 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4466 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4467 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4468 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4470 enum machine_mode mode0
= insn_data
[icode
].operand
[0].mode
;
4471 enum machine_mode mode1
= insn_data
[icode
].operand
[1].mode
;
4472 enum machine_mode mode2
= insn_data
[icode
].operand
[2].mode
;
4474 /* Invalid arguments. Bail before doing anything stoopid! */
4475 if (arg0
== error_mark_node
4476 || arg1
== error_mark_node
4477 || arg2
== error_mark_node
)
4480 if (! (*insn_data
[icode
].operand
[2].predicate
) (op0
, mode2
))
4481 op0
= copy_to_mode_reg (mode2
, op0
);
4482 if (! (*insn_data
[icode
].operand
[0].predicate
) (op1
, mode0
))
4483 op1
= copy_to_mode_reg (mode0
, op1
);
4484 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
4485 op2
= copy_to_mode_reg (mode1
, op2
);
4487 pat
= GEN_FCN (icode
) (op1
, op2
, op0
);
4494 rs6000_expand_ternop_builtin (icode
, arglist
, target
)
4495 enum insn_code icode
;
4500 tree arg0
= TREE_VALUE (arglist
);
4501 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4502 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4503 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4504 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4505 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4506 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4507 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4508 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4509 enum machine_mode mode2
= insn_data
[icode
].operand
[3].mode
;
4511 if (icode
== CODE_FOR_nothing
)
4512 /* Builtin not supported on this processor. */
4515 /* If we got invalid arguments bail out before generating bad rtl. */
4516 if (arg0
== error_mark_node
4517 || arg1
== error_mark_node
4518 || arg2
== error_mark_node
)
4521 if (icode
== CODE_FOR_altivec_vsldoi_4sf
4522 || icode
== CODE_FOR_altivec_vsldoi_4si
4523 || icode
== CODE_FOR_altivec_vsldoi_8hi
4524 || icode
== CODE_FOR_altivec_vsldoi_16qi
)
4526 /* Only allow 4-bit unsigned literals. */
4527 if (TREE_CODE (arg2
) != INTEGER_CST
4528 || TREE_INT_CST_LOW (arg2
) & ~0xf)
4530 error ("argument 3 must be a 4-bit unsigned literal");
4536 || GET_MODE (target
) != tmode
4537 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4538 target
= gen_reg_rtx (tmode
);
4540 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4541 op0
= copy_to_mode_reg (mode0
, op0
);
4542 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4543 op1
= copy_to_mode_reg (mode1
, op1
);
4544 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
4545 op2
= copy_to_mode_reg (mode2
, op2
);
4547 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
4555 /* Expand the lvx builtins. */
4557 altivec_expand_ld_builtin (exp
, target
, expandedp
)
4562 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4563 tree arglist
= TREE_OPERAND (exp
, 1);
4564 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4566 enum machine_mode tmode
, mode0
;
4568 enum insn_code icode
;
4572 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi
:
4573 icode
= CODE_FOR_altivec_lvx_16qi
;
4575 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi
:
4576 icode
= CODE_FOR_altivec_lvx_8hi
;
4578 case ALTIVEC_BUILTIN_LD_INTERNAL_4si
:
4579 icode
= CODE_FOR_altivec_lvx_4si
;
4581 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf
:
4582 icode
= CODE_FOR_altivec_lvx_4sf
;
4591 arg0
= TREE_VALUE (arglist
);
4592 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4593 tmode
= insn_data
[icode
].operand
[0].mode
;
4594 mode0
= insn_data
[icode
].operand
[1].mode
;
4597 || GET_MODE (target
) != tmode
4598 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4599 target
= gen_reg_rtx (tmode
);
4601 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4602 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
4604 pat
= GEN_FCN (icode
) (target
, op0
);
4611 /* Expand the stvx builtins. */
4613 altivec_expand_st_builtin (exp
, target
, expandedp
)
4615 rtx target ATTRIBUTE_UNUSED
;
4618 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4619 tree arglist
= TREE_OPERAND (exp
, 1);
4620 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4622 enum machine_mode mode0
, mode1
;
4624 enum insn_code icode
;
4628 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi
:
4629 icode
= CODE_FOR_altivec_stvx_16qi
;
4631 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi
:
4632 icode
= CODE_FOR_altivec_stvx_8hi
;
4634 case ALTIVEC_BUILTIN_ST_INTERNAL_4si
:
4635 icode
= CODE_FOR_altivec_stvx_4si
;
4637 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf
:
4638 icode
= CODE_FOR_altivec_stvx_4sf
;
4645 arg0
= TREE_VALUE (arglist
);
4646 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4647 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4648 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4649 mode0
= insn_data
[icode
].operand
[0].mode
;
4650 mode1
= insn_data
[icode
].operand
[1].mode
;
4652 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4653 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
4654 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
4655 op1
= copy_to_mode_reg (mode1
, op1
);
4657 pat
= GEN_FCN (icode
) (op0
, op1
);
4665 /* Expand the dst builtins. */
4667 altivec_expand_dst_builtin (exp
, target
, expandedp
)
4669 rtx target ATTRIBUTE_UNUSED
;
4672 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4673 tree arglist
= TREE_OPERAND (exp
, 1);
4674 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4675 tree arg0
, arg1
, arg2
;
4676 enum machine_mode mode0
, mode1
, mode2
;
4677 rtx pat
, op0
, op1
, op2
;
4678 struct builtin_description
*d
;
4683 /* Handle DST variants. */
4684 d
= (struct builtin_description
*) bdesc_dst
;
4685 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
4686 if (d
->code
== fcode
)
4688 arg0
= TREE_VALUE (arglist
);
4689 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4690 arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4691 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4692 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4693 op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4694 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
4695 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
4696 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
4698 /* Invalid arguments, bail out before generating bad rtl. */
4699 if (arg0
== error_mark_node
4700 || arg1
== error_mark_node
4701 || arg2
== error_mark_node
)
4704 if (TREE_CODE (arg2
) != INTEGER_CST
4705 || TREE_INT_CST_LOW (arg2
) & ~0x3)
4707 error ("argument to `%s' must be a 2-bit unsigned literal", d
->name
);
4711 if (! (*insn_data
[d
->icode
].operand
[0].predicate
) (op0
, mode0
))
4712 op0
= copy_to_mode_reg (mode0
, op0
);
4713 if (! (*insn_data
[d
->icode
].operand
[1].predicate
) (op1
, mode1
))
4714 op1
= copy_to_mode_reg (mode1
, op1
);
4716 pat
= GEN_FCN (d
->icode
) (op0
, op1
, op2
);
4727 /* Expand the builtin in EXP and store the result in TARGET. Store
4728 true in *EXPANDEDP if we found a builtin to expand. */
4730 altivec_expand_builtin (exp
, target
, expandedp
)
4735 struct builtin_description
*d
;
4736 struct builtin_description_predicates
*dp
;
4738 enum insn_code icode
;
4739 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4740 tree arglist
= TREE_OPERAND (exp
, 1);
4743 enum machine_mode tmode
, mode0
;
4744 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4746 target
= altivec_expand_ld_builtin (exp
, target
, expandedp
);
4750 target
= altivec_expand_st_builtin (exp
, target
, expandedp
);
4754 target
= altivec_expand_dst_builtin (exp
, target
, expandedp
);
4762 case ALTIVEC_BUILTIN_STVX
:
4763 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx
, arglist
);
4764 case ALTIVEC_BUILTIN_STVEBX
:
4765 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx
, arglist
);
4766 case ALTIVEC_BUILTIN_STVEHX
:
4767 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx
, arglist
);
4768 case ALTIVEC_BUILTIN_STVEWX
:
4769 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx
, arglist
);
4770 case ALTIVEC_BUILTIN_STVXL
:
4771 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl
, arglist
);
4773 case ALTIVEC_BUILTIN_MFVSCR
:
4774 icode
= CODE_FOR_altivec_mfvscr
;
4775 tmode
= insn_data
[icode
].operand
[0].mode
;
4778 || GET_MODE (target
) != tmode
4779 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4780 target
= gen_reg_rtx (tmode
);
4782 pat
= GEN_FCN (icode
) (target
);
4788 case ALTIVEC_BUILTIN_MTVSCR
:
4789 icode
= CODE_FOR_altivec_mtvscr
;
4790 arg0
= TREE_VALUE (arglist
);
4791 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4792 mode0
= insn_data
[icode
].operand
[0].mode
;
4794 /* If we got invalid arguments bail out before generating bad rtl. */
4795 if (arg0
== error_mark_node
)
4798 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4799 op0
= copy_to_mode_reg (mode0
, op0
);
4801 pat
= GEN_FCN (icode
) (op0
);
4806 case ALTIVEC_BUILTIN_DSSALL
:
4807 emit_insn (gen_altivec_dssall ());
4810 case ALTIVEC_BUILTIN_DSS
:
4811 icode
= CODE_FOR_altivec_dss
;
4812 arg0
= TREE_VALUE (arglist
);
4813 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4814 mode0
= insn_data
[icode
].operand
[0].mode
;
4816 /* If we got invalid arguments bail out before generating bad rtl. */
4817 if (arg0
== error_mark_node
)
4820 if (TREE_CODE (arg0
) != INTEGER_CST
4821 || TREE_INT_CST_LOW (arg0
) & ~0x3)
4823 error ("argument to dss must be a 2-bit unsigned literal");
4827 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4828 op0
= copy_to_mode_reg (mode0
, op0
);
4830 emit_insn (gen_altivec_dss (op0
));
4834 /* Expand abs* operations. */
4835 d
= (struct builtin_description
*) bdesc_abs
;
4836 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
4837 if (d
->code
== fcode
)
4838 return altivec_expand_abs_builtin (d
->icode
, arglist
, target
);
4840 /* Expand the AltiVec predicates. */
4841 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
4842 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
4843 if (dp
->code
== fcode
)
4844 return altivec_expand_predicate_builtin (dp
->icode
, dp
->opcode
, arglist
, target
);
4846 /* LV* are funky. We initialized them differently. */
4849 case ALTIVEC_BUILTIN_LVSL
:
4850 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl
,
4852 case ALTIVEC_BUILTIN_LVSR
:
4853 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr
,
4855 case ALTIVEC_BUILTIN_LVEBX
:
4856 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx
,
4858 case ALTIVEC_BUILTIN_LVEHX
:
4859 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx
,
4861 case ALTIVEC_BUILTIN_LVEWX
:
4862 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx
,
4864 case ALTIVEC_BUILTIN_LVXL
:
4865 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl
,
4867 case ALTIVEC_BUILTIN_LVX
:
4868 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx
,
4879 /* Binops that need to be initialized manually, but can be expanded
4880 automagically by rs6000_expand_binop_builtin. */
4881 static struct builtin_description bdesc_2arg_spe
[] =
4883 { 0, CODE_FOR_spe_evlddx
, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX
},
4884 { 0, CODE_FOR_spe_evldwx
, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX
},
4885 { 0, CODE_FOR_spe_evldhx
, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX
},
4886 { 0, CODE_FOR_spe_evlwhex
, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX
},
4887 { 0, CODE_FOR_spe_evlwhoux
, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX
},
4888 { 0, CODE_FOR_spe_evlwhosx
, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX
},
4889 { 0, CODE_FOR_spe_evlwwsplatx
, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX
},
4890 { 0, CODE_FOR_spe_evlwhsplatx
, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX
},
4891 { 0, CODE_FOR_spe_evlhhesplatx
, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX
},
4892 { 0, CODE_FOR_spe_evlhhousplatx
, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX
},
4893 { 0, CODE_FOR_spe_evlhhossplatx
, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX
},
4894 { 0, CODE_FOR_spe_evldd
, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD
},
4895 { 0, CODE_FOR_spe_evldw
, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW
},
4896 { 0, CODE_FOR_spe_evldh
, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH
},
4897 { 0, CODE_FOR_spe_evlwhe
, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE
},
4898 { 0, CODE_FOR_spe_evlwhou
, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU
},
4899 { 0, CODE_FOR_spe_evlwhos
, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS
},
4900 { 0, CODE_FOR_spe_evlwwsplat
, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT
},
4901 { 0, CODE_FOR_spe_evlwhsplat
, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT
},
4902 { 0, CODE_FOR_spe_evlhhesplat
, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT
},
4903 { 0, CODE_FOR_spe_evlhhousplat
, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT
},
4904 { 0, CODE_FOR_spe_evlhhossplat
, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT
}
4907 /* Expand the builtin in EXP and store the result in TARGET. Store
4908 true in *EXPANDEDP if we found a builtin to expand.
4910 This expands the SPE builtins that are not simple unary and binary
4913 spe_expand_builtin (exp
, target
, expandedp
)
4918 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4919 tree arglist
= TREE_OPERAND (exp
, 1);
4921 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4922 enum insn_code icode
;
4923 enum machine_mode tmode
, mode0
;
4925 struct builtin_description
*d
;
4930 /* Syntax check for a 5-bit unsigned immediate. */
4933 case SPE_BUILTIN_EVSTDD
:
4934 case SPE_BUILTIN_EVSTDH
:
4935 case SPE_BUILTIN_EVSTDW
:
4936 case SPE_BUILTIN_EVSTWHE
:
4937 case SPE_BUILTIN_EVSTWHO
:
4938 case SPE_BUILTIN_EVSTWWE
:
4939 case SPE_BUILTIN_EVSTWWO
:
4940 arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4941 if (TREE_CODE (arg1
) != INTEGER_CST
4942 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
4944 error ("argument 2 must be a 5-bit unsigned literal");
4952 d
= (struct builtin_description
*) bdesc_2arg_spe
;
4953 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg_spe
); ++i
, ++d
)
4954 if (d
->code
== fcode
)
4955 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
4957 d
= (struct builtin_description
*) bdesc_spe_predicates
;
4958 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, ++d
)
4959 if (d
->code
== fcode
)
4960 return spe_expand_predicate_builtin (d
->icode
, arglist
, target
);
4962 d
= (struct builtin_description
*) bdesc_spe_evsel
;
4963 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, ++d
)
4964 if (d
->code
== fcode
)
4965 return spe_expand_evsel_builtin (d
->icode
, arglist
, target
);
4969 case SPE_BUILTIN_EVSTDDX
:
4970 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx
, arglist
);
4971 case SPE_BUILTIN_EVSTDHX
:
4972 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx
, arglist
);
4973 case SPE_BUILTIN_EVSTDWX
:
4974 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx
, arglist
);
4975 case SPE_BUILTIN_EVSTWHEX
:
4976 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex
, arglist
);
4977 case SPE_BUILTIN_EVSTWHOX
:
4978 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox
, arglist
);
4979 case SPE_BUILTIN_EVSTWWEX
:
4980 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex
, arglist
);
4981 case SPE_BUILTIN_EVSTWWOX
:
4982 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox
, arglist
);
4983 case SPE_BUILTIN_EVSTDD
:
4984 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd
, arglist
);
4985 case SPE_BUILTIN_EVSTDH
:
4986 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh
, arglist
);
4987 case SPE_BUILTIN_EVSTDW
:
4988 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw
, arglist
);
4989 case SPE_BUILTIN_EVSTWHE
:
4990 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe
, arglist
);
4991 case SPE_BUILTIN_EVSTWHO
:
4992 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho
, arglist
);
4993 case SPE_BUILTIN_EVSTWWE
:
4994 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe
, arglist
);
4995 case SPE_BUILTIN_EVSTWWO
:
4996 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo
, arglist
);
4997 case SPE_BUILTIN_MFSPEFSCR
:
4998 icode
= CODE_FOR_spe_mfspefscr
;
4999 tmode
= insn_data
[icode
].operand
[0].mode
;
5002 || GET_MODE (target
) != tmode
5003 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5004 target
= gen_reg_rtx (tmode
);
5006 pat
= GEN_FCN (icode
) (target
);
5011 case SPE_BUILTIN_MTSPEFSCR
:
5012 icode
= CODE_FOR_spe_mtspefscr
;
5013 arg0
= TREE_VALUE (arglist
);
5014 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5015 mode0
= insn_data
[icode
].operand
[0].mode
;
5017 if (arg0
== error_mark_node
)
5020 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
5021 op0
= copy_to_mode_reg (mode0
, op0
);
5023 pat
= GEN_FCN (icode
) (op0
);
5036 spe_expand_predicate_builtin (icode
, arglist
, target
)
5037 enum insn_code icode
;
5041 rtx pat
, scratch
, tmp
;
5042 tree form
= TREE_VALUE (arglist
);
5043 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
5044 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5045 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5046 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5047 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5048 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5052 if (TREE_CODE (form
) != INTEGER_CST
)
5054 error ("argument 1 of __builtin_spe_predicate must be a constant");
5058 form_int
= TREE_INT_CST_LOW (form
);
5063 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
5067 || GET_MODE (target
) != SImode
5068 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, SImode
))
5069 target
= gen_reg_rtx (SImode
);
5071 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5072 op0
= copy_to_mode_reg (mode0
, op0
);
5073 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5074 op1
= copy_to_mode_reg (mode1
, op1
);
5076 scratch
= gen_reg_rtx (CCmode
);
5078 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
5083 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5084 _lower_. We use one compare, but look in different bits of the
5085 CR for each variant.
5087 There are 2 elements in each SPE simd type (upper/lower). The CR
5088 bits are set as follows:
5090 BIT0 | BIT 1 | BIT 2 | BIT 3
5091 U | L | (U | L) | (U & L)
5093 So, for an "all" relationship, BIT 3 would be set.
5094 For an "any" relationship, BIT 2 would be set. Etc.
5096 Following traditional nomenclature, these bits map to:
5098 BIT0 | BIT 1 | BIT 2 | BIT 3
5101 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5106 /* All variant. OV bit. */
5108 /* We need to get to the OV bit, which is the ORDERED bit. We
5109 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5110 that's ugly and will trigger a validate_condition_mode abort.
5111 So let's just use another pattern. */
5112 emit_insn (gen_move_from_CR_ov_bit (target
, scratch
));
5114 /* Any variant. EQ bit. */
5118 /* Upper variant. LT bit. */
5122 /* Lower variant. GT bit. */
5127 error ("argument 1 of __builtin_spe_predicate is out of range");
5131 tmp
= gen_rtx_fmt_ee (code
, SImode
, scratch
, const0_rtx
);
5132 emit_move_insn (target
, tmp
);
5137 /* The evsel builtins look like this:
5139 e = __builtin_spe_evsel_OP (a, b, c, d);
5143 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5144 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5148 spe_expand_evsel_builtin (icode
, arglist
, target
)
5149 enum insn_code icode
;
5154 tree arg0
= TREE_VALUE (arglist
);
5155 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5156 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5157 tree arg3
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist
))));
5158 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5159 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5160 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
5161 rtx op3
= expand_expr (arg3
, NULL_RTX
, VOIDmode
, 0);
5162 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5163 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5168 if (arg0
== error_mark_node
|| arg1
== error_mark_node
5169 || arg2
== error_mark_node
|| arg3
== error_mark_node
)
5173 || GET_MODE (target
) != mode0
5174 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, mode0
))
5175 target
= gen_reg_rtx (mode0
);
5177 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5178 op0
= copy_to_mode_reg (mode0
, op0
);
5179 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
5180 op1
= copy_to_mode_reg (mode0
, op1
);
5181 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
5182 op2
= copy_to_mode_reg (mode0
, op2
);
5183 if (! (*insn_data
[icode
].operand
[1].predicate
) (op3
, mode1
))
5184 op3
= copy_to_mode_reg (mode0
, op3
);
5186 /* Generate the compare. */
5187 scratch
= gen_reg_rtx (CCmode
);
5188 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
5193 if (mode0
== V2SImode
)
5194 emit_insn (gen_spe_evsel (target
, op2
, op3
, scratch
));
5196 emit_insn (gen_spe_evsel_fs (target
, op2
, op3
, scratch
));
5201 /* Expand an expression EXP that calls a built-in function,
5202 with result going to TARGET if that's convenient
5203 (and in mode MODE if that's convenient).
5204 SUBTARGET may be used as the target for computing one of EXP's operands.
5205 IGNORE is nonzero if the value is to be ignored. */
5208 rs6000_expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
5211 rtx subtarget ATTRIBUTE_UNUSED
;
5212 enum machine_mode mode ATTRIBUTE_UNUSED
;
5213 int ignore ATTRIBUTE_UNUSED
;
5215 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5216 tree arglist
= TREE_OPERAND (exp
, 1);
5217 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5218 struct builtin_description
*d
;
5225 ret
= altivec_expand_builtin (exp
, target
, &success
);
5232 ret
= spe_expand_builtin (exp
, target
, &success
);
5238 if (TARGET_ALTIVEC
|| TARGET_SPE
)
5240 /* Handle simple unary operations. */
5241 d
= (struct builtin_description
*) bdesc_1arg
;
5242 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
5243 if (d
->code
== fcode
)
5244 return rs6000_expand_unop_builtin (d
->icode
, arglist
, target
);
5246 /* Handle simple binary operations. */
5247 d
= (struct builtin_description
*) bdesc_2arg
;
5248 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5249 if (d
->code
== fcode
)
5250 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
5252 /* Handle simple ternary operations. */
5253 d
= (struct builtin_description
*) bdesc_3arg
;
5254 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
5255 if (d
->code
== fcode
)
5256 return rs6000_expand_ternop_builtin (d
->icode
, arglist
, target
);
5264 rs6000_init_builtins ()
5267 spe_init_builtins ();
5269 altivec_init_builtins ();
5270 if (TARGET_ALTIVEC
|| TARGET_SPE
)
5271 rs6000_common_init_builtins ();
5274 /* Search through a set of builtins and enable the mask bits.
5275 DESC is an array of builtins.
5276 SIZE is the total number of builtins.
5277 START is the builtin enum at which to start.
5278 END is the builtin enum at which to end. */
5280 enable_mask_for_builtins (desc
, size
, start
, end
)
5281 struct builtin_description
*desc
;
5283 enum rs6000_builtins start
, end
;
5287 for (i
= 0; i
< size
; ++i
)
5288 if (desc
[i
].code
== start
)
5294 for (; i
< size
; ++i
)
5296 /* Flip all the bits on. */
5297 desc
[i
].mask
= target_flags
;
5298 if (desc
[i
].code
== end
)
5304 spe_init_builtins ()
5306 tree endlink
= void_list_node
;
5307 tree puint_type_node
= build_pointer_type (unsigned_type_node
);
5308 tree pushort_type_node
= build_pointer_type (short_unsigned_type_node
);
5309 tree pv2si_type_node
= build_pointer_type (V2SI_type_node
);
5310 struct builtin_description
*d
;
5313 tree v2si_ftype_4_v2si
5314 = build_function_type
5316 tree_cons (NULL_TREE
, V2SI_type_node
,
5317 tree_cons (NULL_TREE
, V2SI_type_node
,
5318 tree_cons (NULL_TREE
, V2SI_type_node
,
5319 tree_cons (NULL_TREE
, V2SI_type_node
,
5322 tree v2sf_ftype_4_v2sf
5323 = build_function_type
5325 tree_cons (NULL_TREE
, V2SF_type_node
,
5326 tree_cons (NULL_TREE
, V2SF_type_node
,
5327 tree_cons (NULL_TREE
, V2SF_type_node
,
5328 tree_cons (NULL_TREE
, V2SF_type_node
,
5331 tree int_ftype_int_v2si_v2si
5332 = build_function_type
5334 tree_cons (NULL_TREE
, integer_type_node
,
5335 tree_cons (NULL_TREE
, V2SI_type_node
,
5336 tree_cons (NULL_TREE
, V2SI_type_node
,
5339 tree int_ftype_int_v2sf_v2sf
5340 = build_function_type
5342 tree_cons (NULL_TREE
, integer_type_node
,
5343 tree_cons (NULL_TREE
, V2SF_type_node
,
5344 tree_cons (NULL_TREE
, V2SF_type_node
,
5347 tree void_ftype_v2si_puint_int
5348 = build_function_type (void_type_node
,
5349 tree_cons (NULL_TREE
, V2SI_type_node
,
5350 tree_cons (NULL_TREE
, puint_type_node
,
5351 tree_cons (NULL_TREE
,
5355 tree void_ftype_v2si_puint_char
5356 = build_function_type (void_type_node
,
5357 tree_cons (NULL_TREE
, V2SI_type_node
,
5358 tree_cons (NULL_TREE
, puint_type_node
,
5359 tree_cons (NULL_TREE
,
5363 tree void_ftype_v2si_pv2si_int
5364 = build_function_type (void_type_node
,
5365 tree_cons (NULL_TREE
, V2SI_type_node
,
5366 tree_cons (NULL_TREE
, pv2si_type_node
,
5367 tree_cons (NULL_TREE
,
5371 tree void_ftype_v2si_pv2si_char
5372 = build_function_type (void_type_node
,
5373 tree_cons (NULL_TREE
, V2SI_type_node
,
5374 tree_cons (NULL_TREE
, pv2si_type_node
,
5375 tree_cons (NULL_TREE
,
5380 = build_function_type (void_type_node
,
5381 tree_cons (NULL_TREE
, integer_type_node
, endlink
));
5384 = build_function_type (integer_type_node
,
5385 tree_cons (NULL_TREE
, void_type_node
, endlink
));
5387 tree v2si_ftype_pv2si_int
5388 = build_function_type (V2SI_type_node
,
5389 tree_cons (NULL_TREE
, pv2si_type_node
,
5390 tree_cons (NULL_TREE
, integer_type_node
,
5393 tree v2si_ftype_puint_int
5394 = build_function_type (V2SI_type_node
,
5395 tree_cons (NULL_TREE
, puint_type_node
,
5396 tree_cons (NULL_TREE
, integer_type_node
,
5399 tree v2si_ftype_pushort_int
5400 = build_function_type (V2SI_type_node
,
5401 tree_cons (NULL_TREE
, pushort_type_node
,
5402 tree_cons (NULL_TREE
, integer_type_node
,
5405 /* The initialization of the simple binary and unary builtins is
5406 done in rs6000_common_init_builtins, but we have to enable the
5407 mask bits here manually because we have run out of `target_flags'
5408 bits. We really need to redesign this mask business. */
5410 enable_mask_for_builtins ((struct builtin_description
*) bdesc_2arg
,
5411 ARRAY_SIZE (bdesc_2arg
),
5414 enable_mask_for_builtins ((struct builtin_description
*) bdesc_1arg
,
5415 ARRAY_SIZE (bdesc_1arg
),
5417 SPE_BUILTIN_EVSUBFUSIAAW
);
5418 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_predicates
,
5419 ARRAY_SIZE (bdesc_spe_predicates
),
5420 SPE_BUILTIN_EVCMPEQ
,
5421 SPE_BUILTIN_EVFSTSTLT
);
5422 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_evsel
,
5423 ARRAY_SIZE (bdesc_spe_evsel
),
5424 SPE_BUILTIN_EVSEL_CMPGTS
,
5425 SPE_BUILTIN_EVSEL_FSTSTEQ
);
5427 /* Initialize irregular SPE builtins. */
5429 def_builtin (target_flags
, "__builtin_spe_mtspefscr", void_ftype_int
, SPE_BUILTIN_MTSPEFSCR
);
5430 def_builtin (target_flags
, "__builtin_spe_mfspefscr", int_ftype_void
, SPE_BUILTIN_MFSPEFSCR
);
5431 def_builtin (target_flags
, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDDX
);
5432 def_builtin (target_flags
, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDHX
);
5433 def_builtin (target_flags
, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDWX
);
5434 def_builtin (target_flags
, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHEX
);
5435 def_builtin (target_flags
, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHOX
);
5436 def_builtin (target_flags
, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWEX
);
5437 def_builtin (target_flags
, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWOX
);
5438 def_builtin (target_flags
, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDD
);
5439 def_builtin (target_flags
, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDH
);
5440 def_builtin (target_flags
, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDW
);
5441 def_builtin (target_flags
, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHE
);
5442 def_builtin (target_flags
, "__builtin_spe_evstwho", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHO
);
5443 def_builtin (target_flags
, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWE
);
5444 def_builtin (target_flags
, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWO
);
5447 def_builtin (target_flags
, "__builtin_spe_evlddx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDDX
);
5448 def_builtin (target_flags
, "__builtin_spe_evldwx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDWX
);
5449 def_builtin (target_flags
, "__builtin_spe_evldhx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDHX
);
5450 def_builtin (target_flags
, "__builtin_spe_evlwhex", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHEX
);
5451 def_builtin (target_flags
, "__builtin_spe_evlwhoux", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOUX
);
5452 def_builtin (target_flags
, "__builtin_spe_evlwhosx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOSX
);
5453 def_builtin (target_flags
, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLATX
);
5454 def_builtin (target_flags
, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLATX
);
5455 def_builtin (target_flags
, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLATX
);
5456 def_builtin (target_flags
, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLATX
);
5457 def_builtin (target_flags
, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLATX
);
5458 def_builtin (target_flags
, "__builtin_spe_evldd", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDD
);
5459 def_builtin (target_flags
, "__builtin_spe_evldw", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDW
);
5460 def_builtin (target_flags
, "__builtin_spe_evldh", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDH
);
5461 def_builtin (target_flags
, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLAT
);
5462 def_builtin (target_flags
, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLAT
);
5463 def_builtin (target_flags
, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLAT
);
5464 def_builtin (target_flags
, "__builtin_spe_evlwhe", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHE
);
5465 def_builtin (target_flags
, "__builtin_spe_evlwhos", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOS
);
5466 def_builtin (target_flags
, "__builtin_spe_evlwhou", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOU
);
5467 def_builtin (target_flags
, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLAT
);
5468 def_builtin (target_flags
, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLAT
);
5471 d
= (struct builtin_description
*) bdesc_spe_predicates
;
5472 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, d
++)
5476 switch (insn_data
[d
->icode
].operand
[1].mode
)
5479 type
= int_ftype_int_v2si_v2si
;
5482 type
= int_ftype_int_v2sf_v2sf
;
5488 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5491 /* Evsel predicates. */
5492 d
= (struct builtin_description
*) bdesc_spe_evsel
;
5493 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, d
++)
5497 switch (insn_data
[d
->icode
].operand
[1].mode
)
5500 type
= v2si_ftype_4_v2si
;
5503 type
= v2sf_ftype_4_v2sf
;
5509 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5514 altivec_init_builtins ()
5516 struct builtin_description
*d
;
5517 struct builtin_description_predicates
*dp
;
5519 tree pfloat_type_node
= build_pointer_type (float_type_node
);
5520 tree pint_type_node
= build_pointer_type (integer_type_node
);
5521 tree pshort_type_node
= build_pointer_type (short_integer_type_node
);
5522 tree pchar_type_node
= build_pointer_type (char_type_node
);
5524 tree pvoid_type_node
= build_pointer_type (void_type_node
);
5526 tree pcfloat_type_node
= build_pointer_type (build_qualified_type (float_type_node
, TYPE_QUAL_CONST
));
5527 tree pcint_type_node
= build_pointer_type (build_qualified_type (integer_type_node
, TYPE_QUAL_CONST
));
5528 tree pcshort_type_node
= build_pointer_type (build_qualified_type (short_integer_type_node
, TYPE_QUAL_CONST
));
5529 tree pcchar_type_node
= build_pointer_type (build_qualified_type (char_type_node
, TYPE_QUAL_CONST
));
5531 tree pcvoid_type_node
= build_pointer_type (build_qualified_type (void_type_node
, TYPE_QUAL_CONST
));
5533 tree int_ftype_int_v4si_v4si
5534 = build_function_type_list (integer_type_node
,
5535 integer_type_node
, V4SI_type_node
,
5536 V4SI_type_node
, NULL_TREE
);
5537 tree v4sf_ftype_pcfloat
5538 = build_function_type_list (V4SF_type_node
, pcfloat_type_node
, NULL_TREE
);
5539 tree void_ftype_pfloat_v4sf
5540 = build_function_type_list (void_type_node
,
5541 pfloat_type_node
, V4SF_type_node
, NULL_TREE
);
5542 tree v4si_ftype_pcint
5543 = build_function_type_list (V4SI_type_node
, pcint_type_node
, NULL_TREE
);
5544 tree void_ftype_pint_v4si
5545 = build_function_type_list (void_type_node
,
5546 pint_type_node
, V4SI_type_node
, NULL_TREE
);
5547 tree v8hi_ftype_pcshort
5548 = build_function_type_list (V8HI_type_node
, pcshort_type_node
, NULL_TREE
);
5549 tree void_ftype_pshort_v8hi
5550 = build_function_type_list (void_type_node
,
5551 pshort_type_node
, V8HI_type_node
, NULL_TREE
);
5552 tree v16qi_ftype_pcchar
5553 = build_function_type_list (V16QI_type_node
, pcchar_type_node
, NULL_TREE
);
5554 tree void_ftype_pchar_v16qi
5555 = build_function_type_list (void_type_node
,
5556 pchar_type_node
, V16QI_type_node
, NULL_TREE
);
5557 tree void_ftype_v4si
5558 = build_function_type_list (void_type_node
, V4SI_type_node
, NULL_TREE
);
5559 tree v8hi_ftype_void
5560 = build_function_type (V8HI_type_node
, void_list_node
);
5561 tree void_ftype_void
5562 = build_function_type (void_type_node
, void_list_node
);
5564 = build_function_type_list (void_type_node
, char_type_node
, NULL_TREE
);
5566 tree v16qi_ftype_int_pcvoid
5567 = build_function_type_list (V16QI_type_node
,
5568 integer_type_node
, pcvoid_type_node
, NULL_TREE
);
5569 tree v8hi_ftype_int_pcvoid
5570 = build_function_type_list (V8HI_type_node
,
5571 integer_type_node
, pcvoid_type_node
, NULL_TREE
);
5572 tree v4si_ftype_int_pcvoid
5573 = build_function_type_list (V4SI_type_node
,
5574 integer_type_node
, pcvoid_type_node
, NULL_TREE
);
5576 tree void_ftype_v4si_int_pvoid
5577 = build_function_type_list (void_type_node
,
5578 V4SI_type_node
, integer_type_node
,
5579 pvoid_type_node
, NULL_TREE
);
5580 tree void_ftype_v16qi_int_pvoid
5581 = build_function_type_list (void_type_node
,
5582 V16QI_type_node
, integer_type_node
,
5583 pvoid_type_node
, NULL_TREE
);
5584 tree void_ftype_v8hi_int_pvoid
5585 = build_function_type_list (void_type_node
,
5586 V8HI_type_node
, integer_type_node
,
5587 pvoid_type_node
, NULL_TREE
);
5588 tree int_ftype_int_v8hi_v8hi
5589 = build_function_type_list (integer_type_node
,
5590 integer_type_node
, V8HI_type_node
,
5591 V8HI_type_node
, NULL_TREE
);
5592 tree int_ftype_int_v16qi_v16qi
5593 = build_function_type_list (integer_type_node
,
5594 integer_type_node
, V16QI_type_node
,
5595 V16QI_type_node
, NULL_TREE
);
5596 tree int_ftype_int_v4sf_v4sf
5597 = build_function_type_list (integer_type_node
,
5598 integer_type_node
, V4SF_type_node
,
5599 V4SF_type_node
, NULL_TREE
);
5600 tree v4si_ftype_v4si
5601 = build_function_type_list (V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5602 tree v8hi_ftype_v8hi
5603 = build_function_type_list (V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5604 tree v16qi_ftype_v16qi
5605 = build_function_type_list (V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5606 tree v4sf_ftype_v4sf
5607 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5608 tree void_ftype_pcvoid_int_char
5609 = build_function_type_list (void_type_node
,
5610 pcvoid_type_node
, integer_type_node
,
5611 char_type_node
, NULL_TREE
);
5613 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat
,
5614 ALTIVEC_BUILTIN_LD_INTERNAL_4sf
);
5615 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf
,
5616 ALTIVEC_BUILTIN_ST_INTERNAL_4sf
);
5617 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint
,
5618 ALTIVEC_BUILTIN_LD_INTERNAL_4si
);
5619 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si
,
5620 ALTIVEC_BUILTIN_ST_INTERNAL_4si
);
5621 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort
,
5622 ALTIVEC_BUILTIN_LD_INTERNAL_8hi
);
5623 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi
,
5624 ALTIVEC_BUILTIN_ST_INTERNAL_8hi
);
5625 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar
,
5626 ALTIVEC_BUILTIN_LD_INTERNAL_16qi
);
5627 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi
,
5628 ALTIVEC_BUILTIN_ST_INTERNAL_16qi
);
5629 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mtvscr", void_ftype_v4si
, ALTIVEC_BUILTIN_MTVSCR
);
5630 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mfvscr", v8hi_ftype_void
, ALTIVEC_BUILTIN_MFVSCR
);
5631 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dssall", void_ftype_void
, ALTIVEC_BUILTIN_DSSALL
);
5632 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dss", void_ftype_qi
, ALTIVEC_BUILTIN_DSS
);
5633 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVSL
);
5634 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVSR
);
5635 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVEBX
);
5636 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVEHX
);
5637 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVEWX
);
5638 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVXL
);
5639 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVX
);
5640 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVX
);
5641 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVEWX
);
5642 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVXL
);
5643 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid
, ALTIVEC_BUILTIN_STVEBX
);
5644 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid
, ALTIVEC_BUILTIN_STVEHX
);
5646 /* Add the DST variants. */
5647 d
= (struct builtin_description
*) bdesc_dst
;
5648 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
5649 def_builtin (d
->mask
, d
->name
, void_ftype_pcvoid_int_char
, d
->code
);
5651 /* Initialize the predicates. */
5652 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
5653 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
5655 enum machine_mode mode1
;
5658 mode1
= insn_data
[dp
->icode
].operand
[1].mode
;
5663 type
= int_ftype_int_v4si_v4si
;
5666 type
= int_ftype_int_v8hi_v8hi
;
5669 type
= int_ftype_int_v16qi_v16qi
;
5672 type
= int_ftype_int_v4sf_v4sf
;
5678 def_builtin (dp
->mask
, dp
->name
, type
, dp
->code
);
5681 /* Initialize the abs* operators. */
5682 d
= (struct builtin_description
*) bdesc_abs
;
5683 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
5685 enum machine_mode mode0
;
5688 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
5693 type
= v4si_ftype_v4si
;
5696 type
= v8hi_ftype_v8hi
;
5699 type
= v16qi_ftype_v16qi
;
5702 type
= v4sf_ftype_v4sf
;
5708 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5713 rs6000_common_init_builtins ()
5715 struct builtin_description
*d
;
5718 tree v4sf_ftype_v4sf_v4sf_v16qi
5719 = build_function_type_list (V4SF_type_node
,
5720 V4SF_type_node
, V4SF_type_node
,
5721 V16QI_type_node
, NULL_TREE
);
5722 tree v4si_ftype_v4si_v4si_v16qi
5723 = build_function_type_list (V4SI_type_node
,
5724 V4SI_type_node
, V4SI_type_node
,
5725 V16QI_type_node
, NULL_TREE
);
5726 tree v8hi_ftype_v8hi_v8hi_v16qi
5727 = build_function_type_list (V8HI_type_node
,
5728 V8HI_type_node
, V8HI_type_node
,
5729 V16QI_type_node
, NULL_TREE
);
5730 tree v16qi_ftype_v16qi_v16qi_v16qi
5731 = build_function_type_list (V16QI_type_node
,
5732 V16QI_type_node
, V16QI_type_node
,
5733 V16QI_type_node
, NULL_TREE
);
5734 tree v4si_ftype_char
5735 = build_function_type_list (V4SI_type_node
, char_type_node
, NULL_TREE
);
5736 tree v8hi_ftype_char
5737 = build_function_type_list (V8HI_type_node
, char_type_node
, NULL_TREE
);
5738 tree v16qi_ftype_char
5739 = build_function_type_list (V16QI_type_node
, char_type_node
, NULL_TREE
);
5740 tree v8hi_ftype_v16qi
5741 = build_function_type_list (V8HI_type_node
, V16QI_type_node
, NULL_TREE
);
5742 tree v4sf_ftype_v4sf
5743 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5745 tree v2si_ftype_v2si_v2si
5746 = build_function_type_list (V2SI_type_node
,
5747 V2SI_type_node
, V2SI_type_node
, NULL_TREE
);
5749 tree v2sf_ftype_v2sf_v2sf
5750 = build_function_type_list (V2SF_type_node
,
5751 V2SF_type_node
, V2SF_type_node
, NULL_TREE
);
5753 tree v2si_ftype_int_int
5754 = build_function_type_list (V2SI_type_node
,
5755 integer_type_node
, integer_type_node
,
5758 tree v2si_ftype_v2si
5759 = build_function_type_list (V2SI_type_node
, V2SI_type_node
, NULL_TREE
);
5761 tree v2sf_ftype_v2sf
5762 = build_function_type_list (V2SF_type_node
,
5763 V2SF_type_node
, NULL_TREE
);
5765 tree v2sf_ftype_v2si
5766 = build_function_type_list (V2SF_type_node
,
5767 V2SI_type_node
, NULL_TREE
);
5769 tree v2si_ftype_v2sf
5770 = build_function_type_list (V2SI_type_node
,
5771 V2SF_type_node
, NULL_TREE
);
5773 tree v2si_ftype_v2si_char
5774 = build_function_type_list (V2SI_type_node
,
5775 V2SI_type_node
, char_type_node
, NULL_TREE
);
5777 tree v2si_ftype_int_char
5778 = build_function_type_list (V2SI_type_node
,
5779 integer_type_node
, char_type_node
, NULL_TREE
);
5781 tree v2si_ftype_char
5782 = build_function_type_list (V2SI_type_node
, char_type_node
, NULL_TREE
);
5784 tree int_ftype_int_int
5785 = build_function_type_list (integer_type_node
,
5786 integer_type_node
, integer_type_node
,
5789 tree v4si_ftype_v4si_v4si
5790 = build_function_type_list (V4SI_type_node
,
5791 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5792 tree v4sf_ftype_v4si_char
5793 = build_function_type_list (V4SF_type_node
,
5794 V4SI_type_node
, char_type_node
, NULL_TREE
);
5795 tree v4si_ftype_v4sf_char
5796 = build_function_type_list (V4SI_type_node
,
5797 V4SF_type_node
, char_type_node
, NULL_TREE
);
5798 tree v4si_ftype_v4si_char
5799 = build_function_type_list (V4SI_type_node
,
5800 V4SI_type_node
, char_type_node
, NULL_TREE
);
5801 tree v8hi_ftype_v8hi_char
5802 = build_function_type_list (V8HI_type_node
,
5803 V8HI_type_node
, char_type_node
, NULL_TREE
);
5804 tree v16qi_ftype_v16qi_char
5805 = build_function_type_list (V16QI_type_node
,
5806 V16QI_type_node
, char_type_node
, NULL_TREE
);
5807 tree v16qi_ftype_v16qi_v16qi_char
5808 = build_function_type_list (V16QI_type_node
,
5809 V16QI_type_node
, V16QI_type_node
,
5810 char_type_node
, NULL_TREE
);
5811 tree v8hi_ftype_v8hi_v8hi_char
5812 = build_function_type_list (V8HI_type_node
,
5813 V8HI_type_node
, V8HI_type_node
,
5814 char_type_node
, NULL_TREE
);
5815 tree v4si_ftype_v4si_v4si_char
5816 = build_function_type_list (V4SI_type_node
,
5817 V4SI_type_node
, V4SI_type_node
,
5818 char_type_node
, NULL_TREE
);
5819 tree v4sf_ftype_v4sf_v4sf_char
5820 = build_function_type_list (V4SF_type_node
,
5821 V4SF_type_node
, V4SF_type_node
,
5822 char_type_node
, NULL_TREE
);
5823 tree v4sf_ftype_v4sf_v4sf
5824 = build_function_type_list (V4SF_type_node
,
5825 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5826 tree v4sf_ftype_v4sf_v4sf_v4si
5827 = build_function_type_list (V4SF_type_node
,
5828 V4SF_type_node
, V4SF_type_node
,
5829 V4SI_type_node
, NULL_TREE
);
5830 tree v4sf_ftype_v4sf_v4sf_v4sf
5831 = build_function_type_list (V4SF_type_node
,
5832 V4SF_type_node
, V4SF_type_node
,
5833 V4SF_type_node
, NULL_TREE
);
5834 tree v4si_ftype_v4si_v4si_v4si
5835 = build_function_type_list (V4SI_type_node
,
5836 V4SI_type_node
, V4SI_type_node
,
5837 V4SI_type_node
, NULL_TREE
);
5838 tree v8hi_ftype_v8hi_v8hi
5839 = build_function_type_list (V8HI_type_node
,
5840 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5841 tree v8hi_ftype_v8hi_v8hi_v8hi
5842 = build_function_type_list (V8HI_type_node
,
5843 V8HI_type_node
, V8HI_type_node
,
5844 V8HI_type_node
, NULL_TREE
);
5845 tree v4si_ftype_v8hi_v8hi_v4si
5846 = build_function_type_list (V4SI_type_node
,
5847 V8HI_type_node
, V8HI_type_node
,
5848 V4SI_type_node
, NULL_TREE
);
5849 tree v4si_ftype_v16qi_v16qi_v4si
5850 = build_function_type_list (V4SI_type_node
,
5851 V16QI_type_node
, V16QI_type_node
,
5852 V4SI_type_node
, NULL_TREE
);
5853 tree v16qi_ftype_v16qi_v16qi
5854 = build_function_type_list (V16QI_type_node
,
5855 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5856 tree v4si_ftype_v4sf_v4sf
5857 = build_function_type_list (V4SI_type_node
,
5858 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5859 tree v8hi_ftype_v16qi_v16qi
5860 = build_function_type_list (V8HI_type_node
,
5861 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5862 tree v4si_ftype_v8hi_v8hi
5863 = build_function_type_list (V4SI_type_node
,
5864 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5865 tree v8hi_ftype_v4si_v4si
5866 = build_function_type_list (V8HI_type_node
,
5867 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5868 tree v16qi_ftype_v8hi_v8hi
5869 = build_function_type_list (V16QI_type_node
,
5870 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5871 tree v4si_ftype_v16qi_v4si
5872 = build_function_type_list (V4SI_type_node
,
5873 V16QI_type_node
, V4SI_type_node
, NULL_TREE
);
5874 tree v4si_ftype_v16qi_v16qi
5875 = build_function_type_list (V4SI_type_node
,
5876 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5877 tree v4si_ftype_v8hi_v4si
5878 = build_function_type_list (V4SI_type_node
,
5879 V8HI_type_node
, V4SI_type_node
, NULL_TREE
);
5880 tree v4si_ftype_v8hi
5881 = build_function_type_list (V4SI_type_node
, V8HI_type_node
, NULL_TREE
);
5882 tree int_ftype_v4si_v4si
5883 = build_function_type_list (integer_type_node
,
5884 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5885 tree int_ftype_v4sf_v4sf
5886 = build_function_type_list (integer_type_node
,
5887 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5888 tree int_ftype_v16qi_v16qi
5889 = build_function_type_list (integer_type_node
,
5890 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5891 tree int_ftype_v8hi_v8hi
5892 = build_function_type_list (integer_type_node
,
5893 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5895 /* Add the simple ternary operators. */
5896 d
= (struct builtin_description
*) bdesc_3arg
;
5897 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
5900 enum machine_mode mode0
, mode1
, mode2
, mode3
;
5903 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
5906 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
5907 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
5908 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
5909 mode3
= insn_data
[d
->icode
].operand
[3].mode
;
5911 /* When all four are of the same mode. */
5912 if (mode0
== mode1
&& mode1
== mode2
&& mode2
== mode3
)
5917 type
= v4si_ftype_v4si_v4si_v4si
;
5920 type
= v4sf_ftype_v4sf_v4sf_v4sf
;
5923 type
= v8hi_ftype_v8hi_v8hi_v8hi
;
5926 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
5932 else if (mode0
== mode1
&& mode1
== mode2
&& mode3
== V16QImode
)
5937 type
= v4si_ftype_v4si_v4si_v16qi
;
5940 type
= v4sf_ftype_v4sf_v4sf_v16qi
;
5943 type
= v8hi_ftype_v8hi_v8hi_v16qi
;
5946 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
5952 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
5953 && mode3
== V4SImode
)
5954 type
= v4si_ftype_v16qi_v16qi_v4si
;
5955 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
5956 && mode3
== V4SImode
)
5957 type
= v4si_ftype_v8hi_v8hi_v4si
;
5958 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
5959 && mode3
== V4SImode
)
5960 type
= v4sf_ftype_v4sf_v4sf_v4si
;
5962 /* vchar, vchar, vchar, 4 bit literal. */
5963 else if (mode0
== V16QImode
&& mode1
== mode0
&& mode2
== mode0
5965 type
= v16qi_ftype_v16qi_v16qi_char
;
5967 /* vshort, vshort, vshort, 4 bit literal. */
5968 else if (mode0
== V8HImode
&& mode1
== mode0
&& mode2
== mode0
5970 type
= v8hi_ftype_v8hi_v8hi_char
;
5972 /* vint, vint, vint, 4 bit literal. */
5973 else if (mode0
== V4SImode
&& mode1
== mode0
&& mode2
== mode0
5975 type
= v4si_ftype_v4si_v4si_char
;
5977 /* vfloat, vfloat, vfloat, 4 bit literal. */
5978 else if (mode0
== V4SFmode
&& mode1
== mode0
&& mode2
== mode0
5980 type
= v4sf_ftype_v4sf_v4sf_char
;
5985 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5988 /* Add the simple binary operators. */
5989 d
= (struct builtin_description
*) bdesc_2arg
;
5990 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5992 enum machine_mode mode0
, mode1
, mode2
;
5995 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
5998 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
5999 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
6000 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
6002 /* When all three operands are of the same mode. */
6003 if (mode0
== mode1
&& mode1
== mode2
)
6008 type
= v4sf_ftype_v4sf_v4sf
;
6011 type
= v4si_ftype_v4si_v4si
;
6014 type
= v16qi_ftype_v16qi_v16qi
;
6017 type
= v8hi_ftype_v8hi_v8hi
;
6020 type
= v2si_ftype_v2si_v2si
;
6023 type
= v2sf_ftype_v2sf_v2sf
;
6026 type
= int_ftype_int_int
;
6033 /* A few other combos we really don't want to do manually. */
6035 /* vint, vfloat, vfloat. */
6036 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
)
6037 type
= v4si_ftype_v4sf_v4sf
;
6039 /* vshort, vchar, vchar. */
6040 else if (mode0
== V8HImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
6041 type
= v8hi_ftype_v16qi_v16qi
;
6043 /* vint, vshort, vshort. */
6044 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
6045 type
= v4si_ftype_v8hi_v8hi
;
6047 /* vshort, vint, vint. */
6048 else if (mode0
== V8HImode
&& mode1
== V4SImode
&& mode2
== V4SImode
)
6049 type
= v8hi_ftype_v4si_v4si
;
6051 /* vchar, vshort, vshort. */
6052 else if (mode0
== V16QImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
6053 type
= v16qi_ftype_v8hi_v8hi
;
6055 /* vint, vchar, vint. */
6056 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V4SImode
)
6057 type
= v4si_ftype_v16qi_v4si
;
6059 /* vint, vchar, vchar. */
6060 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
6061 type
= v4si_ftype_v16qi_v16qi
;
6063 /* vint, vshort, vint. */
6064 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V4SImode
)
6065 type
= v4si_ftype_v8hi_v4si
;
6067 /* vint, vint, 5 bit literal. */
6068 else if (mode0
== V4SImode
&& mode1
== V4SImode
&& mode2
== QImode
)
6069 type
= v4si_ftype_v4si_char
;
6071 /* vshort, vshort, 5 bit literal. */
6072 else if (mode0
== V8HImode
&& mode1
== V8HImode
&& mode2
== QImode
)
6073 type
= v8hi_ftype_v8hi_char
;
6075 /* vchar, vchar, 5 bit literal. */
6076 else if (mode0
== V16QImode
&& mode1
== V16QImode
&& mode2
== QImode
)
6077 type
= v16qi_ftype_v16qi_char
;
6079 /* vfloat, vint, 5 bit literal. */
6080 else if (mode0
== V4SFmode
&& mode1
== V4SImode
&& mode2
== QImode
)
6081 type
= v4sf_ftype_v4si_char
;
6083 /* vint, vfloat, 5 bit literal. */
6084 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== QImode
)
6085 type
= v4si_ftype_v4sf_char
;
6087 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== SImode
)
6088 type
= v2si_ftype_int_int
;
6090 else if (mode0
== V2SImode
&& mode1
== V2SImode
&& mode2
== QImode
)
6091 type
= v2si_ftype_v2si_char
;
6093 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== QImode
)
6094 type
= v2si_ftype_int_char
;
6097 else if (mode0
== SImode
)
6102 type
= int_ftype_v4si_v4si
;
6105 type
= int_ftype_v4sf_v4sf
;
6108 type
= int_ftype_v16qi_v16qi
;
6111 type
= int_ftype_v8hi_v8hi
;
6121 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6124 /* Add the simple unary operators. */
6125 d
= (struct builtin_description
*) bdesc_1arg
;
6126 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
6128 enum machine_mode mode0
, mode1
;
6131 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
6134 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6135 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
6137 if (mode0
== V4SImode
&& mode1
== QImode
)
6138 type
= v4si_ftype_char
;
6139 else if (mode0
== V8HImode
&& mode1
== QImode
)
6140 type
= v8hi_ftype_char
;
6141 else if (mode0
== V16QImode
&& mode1
== QImode
)
6142 type
= v16qi_ftype_char
;
6143 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
)
6144 type
= v4sf_ftype_v4sf
;
6145 else if (mode0
== V8HImode
&& mode1
== V16QImode
)
6146 type
= v8hi_ftype_v16qi
;
6147 else if (mode0
== V4SImode
&& mode1
== V8HImode
)
6148 type
= v4si_ftype_v8hi
;
6149 else if (mode0
== V2SImode
&& mode1
== V2SImode
)
6150 type
= v2si_ftype_v2si
;
6151 else if (mode0
== V2SFmode
&& mode1
== V2SFmode
)
6152 type
= v2sf_ftype_v2sf
;
6153 else if (mode0
== V2SFmode
&& mode1
== V2SImode
)
6154 type
= v2sf_ftype_v2si
;
6155 else if (mode0
== V2SImode
&& mode1
== V2SFmode
)
6156 type
= v2si_ftype_v2sf
;
6157 else if (mode0
== V2SImode
&& mode1
== QImode
)
6158 type
= v2si_ftype_char
;
6162 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6167 /* Expand a block move operation, and return 1 if successful. Return 0
6168 if we should let the compiler generate normal code.
6170 operands[0] is the destination
6171 operands[1] is the source
6172 operands[2] is the length
6173 operands[3] is the alignment */
6175 #define MAX_MOVE_REG 4
6178 expand_block_move (operands
)
6181 rtx orig_dest
= operands
[0];
6182 rtx orig_src
= operands
[1];
6183 rtx bytes_rtx
= operands
[2];
6184 rtx align_rtx
= operands
[3];
6185 int constp
= (GET_CODE (bytes_rtx
) == CONST_INT
);
6191 /* If this is not a fixed size move, just call memcpy */
6195 /* If this is not a fixed size alignment, abort */
6196 if (GET_CODE (align_rtx
) != CONST_INT
)
6198 align
= INTVAL (align_rtx
);
6200 /* Anything to move? */
6201 bytes
= INTVAL (bytes_rtx
);
6205 /* store_one_arg depends on expand_block_move to handle at least the size of
6206 reg_parm_stack_space. */
6207 if (bytes
> (TARGET_POWERPC64
? 64 : 32))
6210 if (TARGET_STRING
) /* string instructions are available */
6212 for (offset
= 0; bytes
> 0; offset
+= move_bytes
, bytes
-= move_bytes
)
6215 rtx (*movstrsi
) PARAMS ((rtx
, rtx
, rtx
, rtx
));
6216 rtx (*mov
) PARAMS ((rtx
, rtx
));
6218 enum machine_mode mode
= BLKmode
;
6221 if (bytes
> 24 /* move up to 32 bytes at a time */
6229 && ! fixed_regs
[12])
6231 move_bytes
= (bytes
> 32) ? 32 : bytes
;
6232 gen_func
.movstrsi
= gen_movstrsi_8reg
;
6234 else if (bytes
> 16 /* move up to 24 bytes at a time */
6240 && ! fixed_regs
[10])
6242 move_bytes
= (bytes
> 24) ? 24 : bytes
;
6243 gen_func
.movstrsi
= gen_movstrsi_6reg
;
6245 else if (bytes
> 8 /* move up to 16 bytes at a time */
6251 move_bytes
= (bytes
> 16) ? 16 : bytes
;
6252 gen_func
.movstrsi
= gen_movstrsi_4reg
;
6254 else if (bytes
>= 8 && TARGET_POWERPC64
6255 /* 64-bit loads and stores require word-aligned
6257 && (align
>= 8 || (! STRICT_ALIGNMENT
&& align
>= 4)))
6261 gen_func
.mov
= gen_movdi
;
6263 else if (bytes
> 4 && !TARGET_POWERPC64
)
6264 { /* move up to 8 bytes at a time */
6265 move_bytes
= (bytes
> 8) ? 8 : bytes
;
6266 gen_func
.movstrsi
= gen_movstrsi_2reg
;
6268 else if (bytes
>= 4 && (align
>= 4 || ! STRICT_ALIGNMENT
))
6269 { /* move 4 bytes */
6272 gen_func
.mov
= gen_movsi
;
6274 else if (bytes
== 2 && (align
>= 2 || ! STRICT_ALIGNMENT
))
6275 { /* move 2 bytes */
6278 gen_func
.mov
= gen_movhi
;
6280 else if (bytes
== 1) /* move 1 byte */
6284 gen_func
.mov
= gen_movqi
;
6287 { /* move up to 4 bytes at a time */
6288 move_bytes
= (bytes
> 4) ? 4 : bytes
;
6289 gen_func
.movstrsi
= gen_movstrsi_1reg
;
6292 src
= adjust_address (orig_src
, mode
, offset
);
6293 dest
= adjust_address (orig_dest
, mode
, offset
);
6295 if (mode
== BLKmode
)
6297 /* Move the address into scratch registers. The movstrsi
6298 patterns require zero offset. */
6299 if (!REG_P (XEXP (src
, 0)))
6301 rtx src_reg
= copy_addr_to_reg (XEXP (src
, 0));
6302 src
= replace_equiv_address (src
, src_reg
);
6304 set_mem_size (src
, GEN_INT (move_bytes
));
6306 if (!REG_P (XEXP (dest
, 0)))
6308 rtx dest_reg
= copy_addr_to_reg (XEXP (dest
, 0));
6309 dest
= replace_equiv_address (dest
, dest_reg
);
6311 set_mem_size (dest
, GEN_INT (move_bytes
));
6313 emit_insn ((*gen_func
.movstrsi
) (dest
, src
,
6314 GEN_INT (move_bytes
& 31),
6319 rtx tmp_reg
= gen_reg_rtx (mode
);
6321 emit_insn ((*gen_func
.mov
) (tmp_reg
, src
));
6322 emit_insn ((*gen_func
.mov
) (dest
, tmp_reg
));
6327 else /* string instructions not available */
6329 rtx stores
[MAX_MOVE_REG
];
6333 for (offset
= 0; bytes
> 0; offset
+= move_bytes
, bytes
-= move_bytes
)
6335 rtx (*gen_mov_func
) PARAMS ((rtx
, rtx
));
6336 enum machine_mode mode
;
6337 rtx src
, dest
, tmp_reg
;
6339 /* Generate the appropriate load and store, saving the stores
6341 if (bytes
>= 8 && TARGET_POWERPC64
6342 /* 64-bit loads and stores require word-aligned
6344 && (align
>= 8 || (! STRICT_ALIGNMENT
&& align
>= 4)))
6348 gen_mov_func
= gen_movdi
;
6350 else if (bytes
>= 4 && (align
>= 4 || ! STRICT_ALIGNMENT
))
6354 gen_mov_func
= gen_movsi
;
6356 else if (bytes
>= 2 && (align
>= 2 || ! STRICT_ALIGNMENT
))
6360 gen_mov_func
= gen_movhi
;
6366 gen_mov_func
= gen_movqi
;
6369 src
= adjust_address (orig_src
, mode
, offset
);
6370 dest
= adjust_address (orig_dest
, mode
, offset
);
6371 tmp_reg
= gen_reg_rtx (mode
);
6373 emit_insn ((*gen_mov_func
) (tmp_reg
, src
));
6374 stores
[num_reg
++] = (*gen_mov_func
) (dest
, tmp_reg
);
6376 if (num_reg
>= MAX_MOVE_REG
)
6378 for (i
= 0; i
< num_reg
; i
++)
6379 emit_insn (stores
[i
]);
6384 for (i
= 0; i
< num_reg
; i
++)
6385 emit_insn (stores
[i
]);
6392 /* Return 1 if OP is a load multiple operation. It is known to be a
6393 PARALLEL and the first section will be tested. */
6396 load_multiple_operation (op
, mode
)
6398 enum machine_mode mode ATTRIBUTE_UNUSED
;
6400 int count
= XVECLEN (op
, 0);
6401 unsigned int dest_regno
;
6405 /* Perform a quick check so we don't blow up below. */
6407 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6408 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6409 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
6412 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6413 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
6415 for (i
= 1; i
< count
; i
++)
6417 rtx elt
= XVECEXP (op
, 0, i
);
6419 if (GET_CODE (elt
) != SET
6420 || GET_CODE (SET_DEST (elt
)) != REG
6421 || GET_MODE (SET_DEST (elt
)) != SImode
6422 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
6423 || GET_CODE (SET_SRC (elt
)) != MEM
6424 || GET_MODE (SET_SRC (elt
)) != SImode
6425 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
6426 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
6427 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
6428 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != i
* 4)
6435 /* Similar, but tests for store multiple. Here, the second vector element
6436 is a CLOBBER. It will be tested later. */
6439 store_multiple_operation (op
, mode
)
6441 enum machine_mode mode ATTRIBUTE_UNUSED
;
6443 int count
= XVECLEN (op
, 0) - 1;
6444 unsigned int src_regno
;
6448 /* Perform a quick check so we don't blow up below. */
6450 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6451 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
6452 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
6455 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6456 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
6458 for (i
= 1; i
< count
; i
++)
6460 rtx elt
= XVECEXP (op
, 0, i
+ 1);
6462 if (GET_CODE (elt
) != SET
6463 || GET_CODE (SET_SRC (elt
)) != REG
6464 || GET_MODE (SET_SRC (elt
)) != SImode
6465 || REGNO (SET_SRC (elt
)) != src_regno
+ i
6466 || GET_CODE (SET_DEST (elt
)) != MEM
6467 || GET_MODE (SET_DEST (elt
)) != SImode
6468 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
6469 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
6470 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
6471 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != i
* 4)
6478 /* Return a string to perform a load_multiple operation.
6479 operands[0] is the vector.
6480 operands[1] is the source address.
6481 operands[2] is the first destination register. */
6484 rs6000_output_load_multiple (operands
)
6487 /* We have to handle the case where the pseudo used to contain the address
6488 is assigned to one of the output registers. */
6490 int words
= XVECLEN (operands
[0], 0);
6493 if (XVECLEN (operands
[0], 0) == 1)
6494 return "{l|lwz} %2,0(%1)";
6496 for (i
= 0; i
< words
; i
++)
6497 if (refers_to_regno_p (REGNO (operands
[2]) + i
,
6498 REGNO (operands
[2]) + i
+ 1, operands
[1], 0))
6502 xop
[0] = GEN_INT (4 * (words
-1));
6503 xop
[1] = operands
[1];
6504 xop
[2] = operands
[2];
6505 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop
);
6510 xop
[0] = GEN_INT (4 * (words
-1));
6511 xop
[1] = operands
[1];
6512 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + 1);
6513 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop
);
6518 for (j
= 0; j
< words
; j
++)
6521 xop
[0] = GEN_INT (j
* 4);
6522 xop
[1] = operands
[1];
6523 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + j
);
6524 output_asm_insn ("{l|lwz} %2,%0(%1)", xop
);
6526 xop
[0] = GEN_INT (i
* 4);
6527 xop
[1] = operands
[1];
6528 output_asm_insn ("{l|lwz} %1,%0(%1)", xop
);
6533 return "{lsi|lswi} %2,%1,%N0";
6536 /* Return 1 for a parallel vrsave operation. */
6539 vrsave_operation (op
, mode
)
6541 enum machine_mode mode ATTRIBUTE_UNUSED
;
6543 int count
= XVECLEN (op
, 0);
6544 unsigned int dest_regno
, src_regno
;
6548 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6549 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6550 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC_VOLATILE
)
6553 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6554 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6556 if (dest_regno
!= VRSAVE_REGNO
6557 && src_regno
!= VRSAVE_REGNO
)
6560 for (i
= 1; i
< count
; i
++)
6562 rtx elt
= XVECEXP (op
, 0, i
);
6564 if (GET_CODE (elt
) != CLOBBER
6565 && GET_CODE (elt
) != SET
)
6572 /* Return 1 for an PARALLEL suitable for mtcrf. */
6575 mtcrf_operation (op
, mode
)
6577 enum machine_mode mode ATTRIBUTE_UNUSED
;
6579 int count
= XVECLEN (op
, 0);
6583 /* Perform a quick check so we don't blow up below. */
6585 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6586 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC
6587 || XVECLEN (SET_SRC (XVECEXP (op
, 0, 0)), 0) != 2)
6589 src_reg
= XVECEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0, 0);
6591 if (GET_CODE (src_reg
) != REG
6592 || GET_MODE (src_reg
) != SImode
6593 || ! INT_REGNO_P (REGNO (src_reg
)))
6596 for (i
= 0; i
< count
; i
++)
6598 rtx exp
= XVECEXP (op
, 0, i
);
6602 if (GET_CODE (exp
) != SET
6603 || GET_CODE (SET_DEST (exp
)) != REG
6604 || GET_MODE (SET_DEST (exp
)) != CCmode
6605 || ! CR_REGNO_P (REGNO (SET_DEST (exp
))))
6607 unspec
= SET_SRC (exp
);
6608 maskval
= 1 << (MAX_CR_REGNO
- REGNO (SET_DEST (exp
)));
6610 if (GET_CODE (unspec
) != UNSPEC
6611 || XINT (unspec
, 1) != 20
6612 || XVECLEN (unspec
, 0) != 2
6613 || XVECEXP (unspec
, 0, 0) != src_reg
6614 || GET_CODE (XVECEXP (unspec
, 0, 1)) != CONST_INT
6615 || INTVAL (XVECEXP (unspec
, 0, 1)) != maskval
)
6621 /* Return 1 for an PARALLEL suitable for lmw. */
6624 lmw_operation (op
, mode
)
6626 enum machine_mode mode ATTRIBUTE_UNUSED
;
6628 int count
= XVECLEN (op
, 0);
6629 unsigned int dest_regno
;
6631 unsigned int base_regno
;
6632 HOST_WIDE_INT offset
;
6635 /* Perform a quick check so we don't blow up below. */
6637 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6638 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6639 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
6642 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6643 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
6646 || count
!= 32 - (int) dest_regno
)
6649 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr
, 0))
6652 base_regno
= REGNO (src_addr
);
6653 if (base_regno
== 0)
6656 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, src_addr
, 0))
6658 offset
= INTVAL (XEXP (src_addr
, 1));
6659 base_regno
= REGNO (XEXP (src_addr
, 0));
6664 for (i
= 0; i
< count
; i
++)
6666 rtx elt
= XVECEXP (op
, 0, i
);
6669 HOST_WIDE_INT newoffset
;
6671 if (GET_CODE (elt
) != SET
6672 || GET_CODE (SET_DEST (elt
)) != REG
6673 || GET_MODE (SET_DEST (elt
)) != SImode
6674 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
6675 || GET_CODE (SET_SRC (elt
)) != MEM
6676 || GET_MODE (SET_SRC (elt
)) != SImode
)
6678 newaddr
= XEXP (SET_SRC (elt
), 0);
6679 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr
, 0))
6684 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, newaddr
, 0))
6686 addr_reg
= XEXP (newaddr
, 0);
6687 newoffset
= INTVAL (XEXP (newaddr
, 1));
6691 if (REGNO (addr_reg
) != base_regno
6692 || newoffset
!= offset
+ 4 * i
)
6699 /* Return 1 for an PARALLEL suitable for stmw. */
6702 stmw_operation (op
, mode
)
6704 enum machine_mode mode ATTRIBUTE_UNUSED
;
6706 int count
= XVECLEN (op
, 0);
6707 unsigned int src_regno
;
6709 unsigned int base_regno
;
6710 HOST_WIDE_INT offset
;
6713 /* Perform a quick check so we don't blow up below. */
6715 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6716 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
6717 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
6720 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6721 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
6724 || count
!= 32 - (int) src_regno
)
6727 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr
, 0))
6730 base_regno
= REGNO (dest_addr
);
6731 if (base_regno
== 0)
6734 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, dest_addr
, 0))
6736 offset
= INTVAL (XEXP (dest_addr
, 1));
6737 base_regno
= REGNO (XEXP (dest_addr
, 0));
6742 for (i
= 0; i
< count
; i
++)
6744 rtx elt
= XVECEXP (op
, 0, i
);
6747 HOST_WIDE_INT newoffset
;
6749 if (GET_CODE (elt
) != SET
6750 || GET_CODE (SET_SRC (elt
)) != REG
6751 || GET_MODE (SET_SRC (elt
)) != SImode
6752 || REGNO (SET_SRC (elt
)) != src_regno
+ i
6753 || GET_CODE (SET_DEST (elt
)) != MEM
6754 || GET_MODE (SET_DEST (elt
)) != SImode
)
6756 newaddr
= XEXP (SET_DEST (elt
), 0);
6757 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr
, 0))
6762 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, newaddr
, 0))
6764 addr_reg
= XEXP (newaddr
, 0);
6765 newoffset
= INTVAL (XEXP (newaddr
, 1));
6769 if (REGNO (addr_reg
) != base_regno
6770 || newoffset
!= offset
+ 4 * i
)
6777 /* A validation routine: say whether CODE, a condition code, and MODE
6778 match. The other alternatives either don't make sense or should
6779 never be generated. */
6782 validate_condition_mode (code
, mode
)
6784 enum machine_mode mode
;
6786 if (GET_RTX_CLASS (code
) != '<'
6787 || GET_MODE_CLASS (mode
) != MODE_CC
)
6790 /* These don't make sense. */
6791 if ((code
== GT
|| code
== LT
|| code
== GE
|| code
== LE
)
6792 && mode
== CCUNSmode
)
6795 if ((code
== GTU
|| code
== LTU
|| code
== GEU
|| code
== LEU
)
6796 && mode
!= CCUNSmode
)
6799 if (mode
!= CCFPmode
6800 && (code
== ORDERED
|| code
== UNORDERED
6801 || code
== UNEQ
|| code
== LTGT
6802 || code
== UNGT
|| code
== UNLT
6803 || code
== UNGE
|| code
== UNLE
))
6806 /* These should never be generated except for
6807 flag_unsafe_math_optimizations and flag_finite_math_only. */
6808 if (mode
== CCFPmode
6809 && ! flag_unsafe_math_optimizations
6810 && ! flag_finite_math_only
6811 && (code
== LE
|| code
== GE
6812 || code
== UNEQ
|| code
== LTGT
6813 || code
== UNGT
|| code
== UNLT
))
6816 /* These are invalid; the information is not there. */
6817 if (mode
== CCEQmode
6818 && code
!= EQ
&& code
!= NE
)
6822 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
6823 We only check the opcode against the mode of the CC value here. */
6826 branch_comparison_operator (op
, mode
)
6828 enum machine_mode mode ATTRIBUTE_UNUSED
;
6830 enum rtx_code code
= GET_CODE (op
);
6831 enum machine_mode cc_mode
;
6833 if (GET_RTX_CLASS (code
) != '<')
6836 cc_mode
= GET_MODE (XEXP (op
, 0));
6837 if (GET_MODE_CLASS (cc_mode
) != MODE_CC
)
6840 validate_condition_mode (code
, cc_mode
);
6845 /* Return 1 if OP is a comparison operation that is valid for a branch
6846 insn and which is true if the corresponding bit in the CC register
6850 branch_positive_comparison_operator (op
, mode
)
6852 enum machine_mode mode
;
6856 if (! branch_comparison_operator (op
, mode
))
6859 code
= GET_CODE (op
);
6860 return (code
== EQ
|| code
== LT
|| code
== GT
6861 || (TARGET_SPE
&& TARGET_HARD_FLOAT
&& !TARGET_FPRS
&& code
== NE
)
6862 || code
== LTU
|| code
== GTU
6863 || code
== UNORDERED
);
6866 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
6867 We check the opcode against the mode of the CC value and disallow EQ or
6868 NE comparisons for integers. */
6871 scc_comparison_operator (op
, mode
)
6873 enum machine_mode mode
;
6875 enum rtx_code code
= GET_CODE (op
);
6876 enum machine_mode cc_mode
;
6878 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
6881 if (GET_RTX_CLASS (code
) != '<')
6884 cc_mode
= GET_MODE (XEXP (op
, 0));
6885 if (GET_MODE_CLASS (cc_mode
) != MODE_CC
)
6888 validate_condition_mode (code
, cc_mode
);
6890 if (code
== NE
&& cc_mode
!= CCFPmode
)
6897 trap_comparison_operator (op
, mode
)
6899 enum machine_mode mode
;
6901 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
6903 return GET_RTX_CLASS (GET_CODE (op
)) == '<';
6907 boolean_operator (op
, mode
)
6909 enum machine_mode mode ATTRIBUTE_UNUSED
;
6911 enum rtx_code code
= GET_CODE (op
);
6912 return (code
== AND
|| code
== IOR
|| code
== XOR
);
6916 boolean_or_operator (op
, mode
)
6918 enum machine_mode mode ATTRIBUTE_UNUSED
;
6920 enum rtx_code code
= GET_CODE (op
);
6921 return (code
== IOR
|| code
== XOR
);
6925 min_max_operator (op
, mode
)
6927 enum machine_mode mode ATTRIBUTE_UNUSED
;
6929 enum rtx_code code
= GET_CODE (op
);
6930 return (code
== SMIN
|| code
== SMAX
|| code
== UMIN
|| code
== UMAX
);
6933 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
6934 mask required to convert the result of a rotate insn into a shift
6935 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
6938 includes_lshift_p (shiftop
, andop
)
6942 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
6944 shift_mask
<<= INTVAL (shiftop
);
6946 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
6949 /* Similar, but for right shift. */
6952 includes_rshift_p (shiftop
, andop
)
6956 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
6958 shift_mask
>>= INTVAL (shiftop
);
6960 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
6963 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
6964 to perform a left shift. It must have exactly SHIFTOP least
6965 significant 0's, then one or more 1's, then zero or more 0's. */
6968 includes_rldic_lshift_p (shiftop
, andop
)
6972 if (GET_CODE (andop
) == CONST_INT
)
6974 HOST_WIDE_INT c
, lsb
, shift_mask
;
6977 if (c
== 0 || c
== ~0)
6981 shift_mask
<<= INTVAL (shiftop
);
6983 /* Find the least significant one bit. */
6986 /* It must coincide with the LSB of the shift mask. */
6987 if (-lsb
!= shift_mask
)
6990 /* Invert to look for the next transition (if any). */
6993 /* Remove the low group of ones (originally low group of zeros). */
6996 /* Again find the lsb, and check we have all 1's above. */
7000 else if (GET_CODE (andop
) == CONST_DOUBLE
7001 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
7003 HOST_WIDE_INT low
, high
, lsb
;
7004 HOST_WIDE_INT shift_mask_low
, shift_mask_high
;
7006 low
= CONST_DOUBLE_LOW (andop
);
7007 if (HOST_BITS_PER_WIDE_INT
< 64)
7008 high
= CONST_DOUBLE_HIGH (andop
);
7010 if ((low
== 0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== 0))
7011 || (low
== ~0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0)))
7014 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
7016 shift_mask_high
= ~0;
7017 if (INTVAL (shiftop
) > 32)
7018 shift_mask_high
<<= INTVAL (shiftop
) - 32;
7022 if (-lsb
!= shift_mask_high
|| INTVAL (shiftop
) < 32)
7029 return high
== -lsb
;
7032 shift_mask_low
= ~0;
7033 shift_mask_low
<<= INTVAL (shiftop
);
7037 if (-lsb
!= shift_mask_low
)
7040 if (HOST_BITS_PER_WIDE_INT
< 64)
7045 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
7048 return high
== -lsb
;
7052 return low
== -lsb
&& (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0);
7058 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7059 to perform a left shift. It must have SHIFTOP or more least
7060 signifigant 0's, with the remainder of the word 1's. */
7063 includes_rldicr_lshift_p (shiftop
, andop
)
7067 if (GET_CODE (andop
) == CONST_INT
)
7069 HOST_WIDE_INT c
, lsb
, shift_mask
;
7072 shift_mask
<<= INTVAL (shiftop
);
7075 /* Find the least signifigant one bit. */
7078 /* It must be covered by the shift mask.
7079 This test also rejects c == 0. */
7080 if ((lsb
& shift_mask
) == 0)
7083 /* Check we have all 1's above the transition, and reject all 1's. */
7084 return c
== -lsb
&& lsb
!= 1;
7086 else if (GET_CODE (andop
) == CONST_DOUBLE
7087 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
7089 HOST_WIDE_INT low
, lsb
, shift_mask_low
;
7091 low
= CONST_DOUBLE_LOW (andop
);
7093 if (HOST_BITS_PER_WIDE_INT
< 64)
7095 HOST_WIDE_INT high
, shift_mask_high
;
7097 high
= CONST_DOUBLE_HIGH (andop
);
7101 shift_mask_high
= ~0;
7102 if (INTVAL (shiftop
) > 32)
7103 shift_mask_high
<<= INTVAL (shiftop
) - 32;
7107 if ((lsb
& shift_mask_high
) == 0)
7110 return high
== -lsb
;
7116 shift_mask_low
= ~0;
7117 shift_mask_low
<<= INTVAL (shiftop
);
7121 if ((lsb
& shift_mask_low
) == 0)
7124 return low
== -lsb
&& lsb
!= 1;
7130 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7131 for lfq and stfq insns.
7133 Note reg1 and reg2 *must* be hard registers. To be sure we will
7134 abort if we are passed pseudo registers. */
7137 registers_ok_for_quad_peep (reg1
, reg2
)
7140 /* We might have been passed a SUBREG. */
7141 if (GET_CODE (reg1
) != REG
|| GET_CODE (reg2
) != REG
)
7144 return (REGNO (reg1
) == REGNO (reg2
) - 1);
7147 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7148 addr1 and addr2 must be in consecutive memory locations
7149 (addr2 == addr1 + 8). */
7152 addrs_ok_for_quad_peep (addr1
, addr2
)
7159 /* Extract an offset (if used) from the first addr. */
7160 if (GET_CODE (addr1
) == PLUS
)
7162 /* If not a REG, return zero. */
7163 if (GET_CODE (XEXP (addr1
, 0)) != REG
)
7167 reg1
= REGNO (XEXP (addr1
, 0));
7168 /* The offset must be constant! */
7169 if (GET_CODE (XEXP (addr1
, 1)) != CONST_INT
)
7171 offset1
= INTVAL (XEXP (addr1
, 1));
7174 else if (GET_CODE (addr1
) != REG
)
7178 reg1
= REGNO (addr1
);
7179 /* This was a simple (mem (reg)) expression. Offset is 0. */
7183 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
7184 if (GET_CODE (addr2
) != PLUS
)
7187 if (GET_CODE (XEXP (addr2
, 0)) != REG
7188 || GET_CODE (XEXP (addr2
, 1)) != CONST_INT
)
7191 if (reg1
!= REGNO (XEXP (addr2
, 0)))
7194 /* The offset for the second addr must be 8 more than the first addr. */
7195 if (INTVAL (XEXP (addr2
, 1)) != offset1
+ 8)
7198 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7203 /* Return the register class of a scratch register needed to copy IN into
7204 or out of a register in CLASS in MODE. If it can be done directly,
7205 NO_REGS is returned. */
7208 secondary_reload_class (class, mode
, in
)
7209 enum reg_class
class;
7210 enum machine_mode mode ATTRIBUTE_UNUSED
;
7215 if (TARGET_ELF
|| (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))
7217 /* We cannot copy a symbolic operand directly into anything
7218 other than BASE_REGS for TARGET_ELF. So indicate that a
7219 register from BASE_REGS is needed as an intermediate
7222 On Darwin, pic addresses require a load from memory, which
7223 needs a base register. */
7224 if (class != BASE_REGS
7225 && (GET_CODE (in
) == SYMBOL_REF
7226 || GET_CODE (in
) == HIGH
7227 || GET_CODE (in
) == LABEL_REF
7228 || GET_CODE (in
) == CONST
))
7232 if (GET_CODE (in
) == REG
)
7235 if (regno
>= FIRST_PSEUDO_REGISTER
)
7237 regno
= true_regnum (in
);
7238 if (regno
>= FIRST_PSEUDO_REGISTER
)
7242 else if (GET_CODE (in
) == SUBREG
)
7244 regno
= true_regnum (in
);
7245 if (regno
>= FIRST_PSEUDO_REGISTER
)
7251 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7253 if (class == GENERAL_REGS
|| class == BASE_REGS
7254 || (regno
>= 0 && INT_REGNO_P (regno
)))
7257 /* Constants, memory, and FP registers can go into FP registers. */
7258 if ((regno
== -1 || FP_REGNO_P (regno
))
7259 && (class == FLOAT_REGS
|| class == NON_SPECIAL_REGS
))
7262 /* Memory, and AltiVec registers can go into AltiVec registers. */
7263 if ((regno
== -1 || ALTIVEC_REGNO_P (regno
))
7264 && class == ALTIVEC_REGS
)
7267 /* We can copy among the CR registers. */
7268 if ((class == CR_REGS
|| class == CR0_REGS
)
7269 && regno
>= 0 && CR_REGNO_P (regno
))
7272 /* Otherwise, we need GENERAL_REGS. */
7273 return GENERAL_REGS
;
7276 /* Given a comparison operation, return the bit number in CCR to test. We
7277 know this is a valid comparison.
7279 SCC_P is 1 if this is for an scc. That means that %D will have been
7280 used instead of %C, so the bits will be in different places.
7282 Return -1 if OP isn't a valid comparison for some reason. */
7289 enum rtx_code code
= GET_CODE (op
);
7290 enum machine_mode cc_mode
;
7295 if (GET_RTX_CLASS (code
) != '<')
7300 if (GET_CODE (reg
) != REG
7301 || ! CR_REGNO_P (REGNO (reg
)))
7304 cc_mode
= GET_MODE (reg
);
7305 cc_regnum
= REGNO (reg
);
7306 base_bit
= 4 * (cc_regnum
- CR0_REGNO
);
7308 validate_condition_mode (code
, cc_mode
);
7313 if (TARGET_SPE
&& TARGET_HARD_FLOAT
&& cc_mode
== CCFPmode
)
7314 return base_bit
+ 1;
7315 return scc_p
? base_bit
+ 3 : base_bit
+ 2;
7317 if (TARGET_SPE
&& TARGET_HARD_FLOAT
&& cc_mode
== CCFPmode
)
7318 return base_bit
+ 1;
7319 return base_bit
+ 2;
7320 case GT
: case GTU
: case UNLE
:
7321 return base_bit
+ 1;
7322 case LT
: case LTU
: case UNGE
:
7324 case ORDERED
: case UNORDERED
:
7325 return base_bit
+ 3;
7328 /* If scc, we will have done a cror to put the bit in the
7329 unordered position. So test that bit. For integer, this is ! LT
7330 unless this is an scc insn. */
7331 return scc_p
? base_bit
+ 3 : base_bit
;
7334 return scc_p
? base_bit
+ 3 : base_bit
+ 1;
7341 /* Return the GOT register. */
7344 rs6000_got_register (value
)
7345 rtx value ATTRIBUTE_UNUSED
;
7347 /* The second flow pass currently (June 1999) can't update
7348 regs_ever_live without disturbing other parts of the compiler, so
7349 update it here to make the prolog/epilogue code happy. */
7350 if (no_new_pseudos
&& ! regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
])
7351 regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
7353 current_function_uses_pic_offset_table
= 1;
7355 return pic_offset_table_rtx
;
7358 /* Function to init struct machine_function.
7359 This will be called, via a pointer variable,
7360 from push_function_context. */
7362 static struct machine_function
*
7363 rs6000_init_machine_status ()
7365 return ggc_alloc_cleared (sizeof (machine_function
));
7368 /* These macros test for integers and extract the low-order bits. */
7370 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7371 && GET_MODE (X) == VOIDmode)
7373 #define INT_LOWPART(X) \
7374 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7381 unsigned long val
= INT_LOWPART (op
);
7383 /* If the high bit is zero, the value is the first 1 bit we find
7385 if ((val
& 0x80000000) == 0)
7387 if ((val
& 0xffffffff) == 0)
7391 while (((val
<<= 1) & 0x80000000) == 0)
7396 /* If the high bit is set and the low bit is not, or the mask is all
7397 1's, the value is zero. */
7398 if ((val
& 1) == 0 || (val
& 0xffffffff) == 0xffffffff)
7401 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7404 while (((val
>>= 1) & 1) != 0)
7415 unsigned long val
= INT_LOWPART (op
);
7417 /* If the low bit is zero, the value is the first 1 bit we find from
7421 if ((val
& 0xffffffff) == 0)
7425 while (((val
>>= 1) & 1) == 0)
7431 /* If the low bit is set and the high bit is not, or the mask is all
7432 1's, the value is 31. */
7433 if ((val
& 0x80000000) == 0 || (val
& 0xffffffff) == 0xffffffff)
7436 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7439 while (((val
<<= 1) & 0x80000000) != 0)
7445 /* Print an operand. Recognize special options, documented below. */
7448 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7449 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7451 #define SMALL_DATA_RELOC "sda21"
7452 #define SMALL_DATA_REG 0
7456 print_operand (file
, x
, code
)
7463 unsigned HOST_WIDE_INT uval
;
7468 /* Write out an instruction after the call which may be replaced
7469 with glue code by the loader. This depends on the AIX version. */
7470 asm_fprintf (file
, RS6000_CALL_GLUE
);
7473 /* %a is output_address. */
7476 /* If X is a constant integer whose low-order 5 bits are zero,
7477 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
7478 in the AIX assembler where "sri" with a zero shift count
7479 writes a trash instruction. */
7480 if (GET_CODE (x
) == CONST_INT
&& (INTVAL (x
) & 31) == 0)
7487 /* If constant, low-order 16 bits of constant, unsigned.
7488 Otherwise, write normally. */
7490 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 0xffff);
7492 print_operand (file
, x
, 0);
7496 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7497 for 64-bit mask direction. */
7498 putc (((INT_LOWPART(x
) & 1) == 0 ? 'r' : 'l'), file
);
7501 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7505 /* There used to be a comment for 'C' reading "This is an
7506 optional cror needed for certain floating-point
7507 comparisons. Otherwise write nothing." */
7509 /* Similar, except that this is for an scc, so we must be able to
7510 encode the test in a single bit that is one. We do the above
7511 for any LE, GE, GEU, or LEU and invert the bit for NE. */
7512 if (GET_CODE (x
) == LE
|| GET_CODE (x
) == GE
7513 || GET_CODE (x
) == LEU
|| GET_CODE (x
) == GEU
)
7515 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7517 fprintf (file
, "cror %d,%d,%d\n\t", base_bit
+ 3,
7519 base_bit
+ (GET_CODE (x
) == GE
|| GET_CODE (x
) == GEU
));
7522 else if (GET_CODE (x
) == NE
)
7524 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7526 fprintf (file
, "crnor %d,%d,%d\n\t", base_bit
+ 3,
7527 base_bit
+ 2, base_bit
+ 2);
7529 else if (TARGET_SPE
&& TARGET_HARD_FLOAT
7530 && GET_CODE (x
) == EQ
7531 && GET_MODE (XEXP (x
, 0)) == CCFPmode
)
7533 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7535 fprintf (file
, "crnor %d,%d,%d\n\t", base_bit
+ 1,
7536 base_bit
+ 1, base_bit
+ 1);
7541 /* X is a CR register. Print the number of the EQ bit of the CR */
7542 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7543 output_operand_lossage ("invalid %%E value");
7545 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
) + 2);
7549 /* X is a CR register. Print the shift count needed to move it
7550 to the high-order four bits. */
7551 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7552 output_operand_lossage ("invalid %%f value");
7554 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
));
7558 /* Similar, but print the count for the rotate in the opposite
7560 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7561 output_operand_lossage ("invalid %%F value");
7563 fprintf (file
, "%d", 32 - 4 * (REGNO (x
) - CR0_REGNO
));
7567 /* X is a constant integer. If it is negative, print "m",
7568 otherwise print "z". This is to make an aze or ame insn. */
7569 if (GET_CODE (x
) != CONST_INT
)
7570 output_operand_lossage ("invalid %%G value");
7571 else if (INTVAL (x
) >= 0)
7578 /* If constant, output low-order five bits. Otherwise, write
7581 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 31);
7583 print_operand (file
, x
, 0);
7587 /* If constant, output low-order six bits. Otherwise, write
7590 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 63);
7592 print_operand (file
, x
, 0);
7596 /* Print `i' if this is a constant, else nothing. */
7602 /* Write the bit number in CCR for jump. */
7605 output_operand_lossage ("invalid %%j code");
7607 fprintf (file
, "%d", i
);
7611 /* Similar, but add one for shift count in rlinm for scc and pass
7612 scc flag to `ccr_bit'. */
7615 output_operand_lossage ("invalid %%J code");
7617 /* If we want bit 31, write a shift count of zero, not 32. */
7618 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
7622 /* X must be a constant. Write the 1's complement of the
7625 output_operand_lossage ("invalid %%k value");
7627 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ~ INT_LOWPART (x
));
7631 /* X must be a symbolic constant on ELF. Write an
7632 expression suitable for an 'addi' that adds in the low 16
7634 if (GET_CODE (x
) != CONST
)
7636 print_operand_address (file
, x
);
7641 if (GET_CODE (XEXP (x
, 0)) != PLUS
7642 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) != SYMBOL_REF
7643 && GET_CODE (XEXP (XEXP (x
, 0), 0)) != LABEL_REF
)
7644 || GET_CODE (XEXP (XEXP (x
, 0), 1)) != CONST_INT
)
7645 output_operand_lossage ("invalid %%K value");
7646 print_operand_address (file
, XEXP (XEXP (x
, 0), 0));
7648 /* For GNU as, there must be a non-alphanumeric character
7649 between 'l' and the number. The '-' is added by
7650 print_operand() already. */
7651 if (INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0)
7653 print_operand (file
, XEXP (XEXP (x
, 0), 1), 0);
7657 /* %l is output_asm_label. */
7660 /* Write second word of DImode or DFmode reference. Works on register
7661 or non-indexed memory only. */
7662 if (GET_CODE (x
) == REG
)
7663 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
7664 else if (GET_CODE (x
) == MEM
)
7666 /* Handle possible auto-increment. Since it is pre-increment and
7667 we have already done it, we can just use an offset of word. */
7668 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
7669 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
7670 output_address (plus_constant (XEXP (XEXP (x
, 0), 0),
7673 output_address (XEXP (adjust_address_nv (x
, SImode
,
7677 if (small_data_operand (x
, GET_MODE (x
)))
7678 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
7679 reg_names
[SMALL_DATA_REG
]);
7684 /* MB value for a mask operand. */
7685 if (! mask_operand (x
, SImode
))
7686 output_operand_lossage ("invalid %%m value");
7688 fprintf (file
, "%d", extract_MB (x
));
7692 /* ME value for a mask operand. */
7693 if (! mask_operand (x
, SImode
))
7694 output_operand_lossage ("invalid %%M value");
7696 fprintf (file
, "%d", extract_ME (x
));
7699 /* %n outputs the negative of its operand. */
7702 /* Write the number of elements in the vector times 4. */
7703 if (GET_CODE (x
) != PARALLEL
)
7704 output_operand_lossage ("invalid %%N value");
7706 fprintf (file
, "%d", XVECLEN (x
, 0) * 4);
7710 /* Similar, but subtract 1 first. */
7711 if (GET_CODE (x
) != PARALLEL
)
7712 output_operand_lossage ("invalid %%O value");
7714 fprintf (file
, "%d", (XVECLEN (x
, 0) - 1) * 4);
7718 /* X is a CONST_INT that is a power of two. Output the logarithm. */
7720 || INT_LOWPART (x
) < 0
7721 || (i
= exact_log2 (INT_LOWPART (x
))) < 0)
7722 output_operand_lossage ("invalid %%p value");
7724 fprintf (file
, "%d", i
);
7728 /* The operand must be an indirect memory reference. The result
7729 is the register number. */
7730 if (GET_CODE (x
) != MEM
|| GET_CODE (XEXP (x
, 0)) != REG
7731 || REGNO (XEXP (x
, 0)) >= 32)
7732 output_operand_lossage ("invalid %%P value");
7734 fprintf (file
, "%d", REGNO (XEXP (x
, 0)));
7738 /* This outputs the logical code corresponding to a boolean
7739 expression. The expression may have one or both operands
7740 negated (if one, only the first one). For condition register
7741 logical operations, it will also treat the negated
7742 CR codes as NOTs, but not handle NOTs of them. */
7744 const char *const *t
= 0;
7746 enum rtx_code code
= GET_CODE (x
);
7747 static const char * const tbl
[3][3] = {
7748 { "and", "andc", "nor" },
7749 { "or", "orc", "nand" },
7750 { "xor", "eqv", "xor" } };
7754 else if (code
== IOR
)
7756 else if (code
== XOR
)
7759 output_operand_lossage ("invalid %%q value");
7761 if (GET_CODE (XEXP (x
, 0)) != NOT
)
7765 if (GET_CODE (XEXP (x
, 1)) == NOT
)
7776 /* X is a CR register. Print the mask for `mtcrf'. */
7777 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7778 output_operand_lossage ("invalid %%R value");
7780 fprintf (file
, "%d", 128 >> (REGNO (x
) - CR0_REGNO
));
7784 /* Low 5 bits of 32 - value */
7786 output_operand_lossage ("invalid %%s value");
7788 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, (32 - INT_LOWPART (x
)) & 31);
7792 /* PowerPC64 mask position. All 0's is excluded.
7793 CONST_INT 32-bit mask is considered sign-extended so any
7794 transition must occur within the CONST_INT, not on the boundary. */
7795 if (! mask64_operand (x
, DImode
))
7796 output_operand_lossage ("invalid %%S value");
7798 uval
= INT_LOWPART (x
);
7800 if (uval
& 1) /* Clear Left */
7802 #if HOST_BITS_PER_WIDE_INT > 64
7803 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
7807 else /* Clear Right */
7810 #if HOST_BITS_PER_WIDE_INT > 64
7811 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
7819 fprintf (file
, "%d", i
);
7823 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
7824 if (GET_CODE (x
) != REG
|| GET_MODE (x
) != CCmode
)
7827 /* Bit 3 is OV bit. */
7828 i
= 4 * (REGNO (x
) - CR0_REGNO
) + 3;
7830 /* If we want bit 31, write a shift count of zero, not 32. */
7831 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
7835 /* Print the symbolic name of a branch target register. */
7836 if (GET_CODE (x
) != REG
|| (REGNO (x
) != LINK_REGISTER_REGNUM
7837 && REGNO (x
) != COUNT_REGISTER_REGNUM
))
7838 output_operand_lossage ("invalid %%T value");
7839 else if (REGNO (x
) == LINK_REGISTER_REGNUM
)
7840 fputs (TARGET_NEW_MNEMONICS
? "lr" : "r", file
);
7842 fputs ("ctr", file
);
7846 /* High-order 16 bits of constant for use in unsigned operand. */
7848 output_operand_lossage ("invalid %%u value");
7850 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
7851 (INT_LOWPART (x
) >> 16) & 0xffff);
7855 /* High-order 16 bits of constant for use in signed operand. */
7857 output_operand_lossage ("invalid %%v value");
7859 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
7860 (INT_LOWPART (x
) >> 16) & 0xffff);
7864 /* Print `u' if this has an auto-increment or auto-decrement. */
7865 if (GET_CODE (x
) == MEM
7866 && (GET_CODE (XEXP (x
, 0)) == PRE_INC
7867 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
))
7872 /* Print the trap code for this operand. */
7873 switch (GET_CODE (x
))
7876 fputs ("eq", file
); /* 4 */
7879 fputs ("ne", file
); /* 24 */
7882 fputs ("lt", file
); /* 16 */
7885 fputs ("le", file
); /* 20 */
7888 fputs ("gt", file
); /* 8 */
7891 fputs ("ge", file
); /* 12 */
7894 fputs ("llt", file
); /* 2 */
7897 fputs ("lle", file
); /* 6 */
7900 fputs ("lgt", file
); /* 1 */
7903 fputs ("lge", file
); /* 5 */
7911 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
7914 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
7915 ((INT_LOWPART (x
) & 0xffff) ^ 0x8000) - 0x8000);
7917 print_operand (file
, x
, 0);
7921 /* MB value for a PowerPC64 rldic operand. */
7922 val
= (GET_CODE (x
) == CONST_INT
7923 ? INTVAL (x
) : CONST_DOUBLE_HIGH (x
));
7928 for (i
= 0; i
< HOST_BITS_PER_WIDE_INT
; i
++)
7929 if ((val
<<= 1) < 0)
7932 #if HOST_BITS_PER_WIDE_INT == 32
7933 if (GET_CODE (x
) == CONST_INT
&& i
>= 0)
7934 i
+= 32; /* zero-extend high-part was all 0's */
7935 else if (GET_CODE (x
) == CONST_DOUBLE
&& i
== 32)
7937 val
= CONST_DOUBLE_LOW (x
);
7944 for ( ; i
< 64; i
++)
7945 if ((val
<<= 1) < 0)
7950 fprintf (file
, "%d", i
+ 1);
7954 if (GET_CODE (x
) == MEM
7955 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x
, 0), 0))
7960 /* Like 'L', for third word of TImode */
7961 if (GET_CODE (x
) == REG
)
7962 fprintf (file
, "%s", reg_names
[REGNO (x
) + 2]);
7963 else if (GET_CODE (x
) == MEM
)
7965 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
7966 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
7967 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 8));
7969 output_address (XEXP (adjust_address_nv (x
, SImode
, 8), 0));
7970 if (small_data_operand (x
, GET_MODE (x
)))
7971 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
7972 reg_names
[SMALL_DATA_REG
]);
7977 /* X is a SYMBOL_REF. Write out the name preceded by a
7978 period and without any trailing data in brackets. Used for function
7979 names. If we are configured for System V (or the embedded ABI) on
7980 the PowerPC, do not emit the period, since those systems do not use
7981 TOCs and the like. */
7982 if (GET_CODE (x
) != SYMBOL_REF
)
7985 if (XSTR (x
, 0)[0] != '.')
7987 switch (DEFAULT_ABI
)
7997 case ABI_AIX_NODESC
:
8003 RS6000_OUTPUT_BASENAME (file
, XSTR (x
, 0));
8005 assemble_name (file
, XSTR (x
, 0));
8010 /* Like 'L', for last word of TImode. */
8011 if (GET_CODE (x
) == REG
)
8012 fprintf (file
, "%s", reg_names
[REGNO (x
) + 3]);
8013 else if (GET_CODE (x
) == MEM
)
8015 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
8016 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
8017 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 12));
8019 output_address (XEXP (adjust_address_nv (x
, SImode
, 12), 0));
8020 if (small_data_operand (x
, GET_MODE (x
)))
8021 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
8022 reg_names
[SMALL_DATA_REG
]);
8026 /* Print AltiVec or SPE memory operand. */
8031 if (GET_CODE (x
) != MEM
)
8039 if (GET_CODE (tmp
) == REG
)
8041 fprintf (file
, "0(%s)", reg_names
[REGNO (tmp
)]);
8044 /* Handle [reg+UIMM]. */
8045 else if (GET_CODE (tmp
) == PLUS
&&
8046 GET_CODE (XEXP (tmp
, 1)) == CONST_INT
)
8050 if (GET_CODE (XEXP (tmp
, 0)) != REG
)
8053 x
= INTVAL (XEXP (tmp
, 1));
8054 fprintf (file
, "%d(%s)", x
, reg_names
[REGNO (XEXP (tmp
, 0))]);
8058 /* Fall through. Must be [reg+reg]. */
8060 if (GET_CODE (tmp
) == REG
)
8061 fprintf (file
, "0,%s", reg_names
[REGNO (tmp
)]);
8062 else if (GET_CODE (tmp
) == PLUS
&& GET_CODE (XEXP (tmp
, 1)) == REG
)
8064 if (REGNO (XEXP (tmp
, 0)) == 0)
8065 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 1)) ],
8066 reg_names
[ REGNO (XEXP (tmp
, 0)) ]);
8068 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 0)) ],
8069 reg_names
[ REGNO (XEXP (tmp
, 1)) ]);
8077 if (GET_CODE (x
) == REG
)
8078 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
8079 else if (GET_CODE (x
) == MEM
)
8081 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8082 know the width from the mode. */
8083 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
)
8084 fprintf (file
, "%d(%s)", GET_MODE_SIZE (GET_MODE (x
)),
8085 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
8086 else if (GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
8087 fprintf (file
, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x
)),
8088 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
8090 output_address (XEXP (x
, 0));
8093 output_addr_const (file
, x
);
8097 output_operand_lossage ("invalid %%xn code");
8101 /* Print the address of an operand. */
8104 print_operand_address (file
, x
)
8108 if (GET_CODE (x
) == REG
)
8109 fprintf (file
, "0(%s)", reg_names
[ REGNO (x
) ]);
8110 else if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
8111 || GET_CODE (x
) == LABEL_REF
)
8113 output_addr_const (file
, x
);
8114 if (small_data_operand (x
, GET_MODE (x
)))
8115 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
8116 reg_names
[SMALL_DATA_REG
]);
8117 else if (TARGET_TOC
)
8120 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == REG
)
8122 if (REGNO (XEXP (x
, 0)) == 0)
8123 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 1)) ],
8124 reg_names
[ REGNO (XEXP (x
, 0)) ]);
8126 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 0)) ],
8127 reg_names
[ REGNO (XEXP (x
, 1)) ]);
8129 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
8131 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (XEXP (x
, 1)));
8132 fprintf (file
, "(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8135 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
8136 && CONSTANT_P (XEXP (x
, 1)))
8138 output_addr_const (file
, XEXP (x
, 1));
8139 fprintf (file
, "@l(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8143 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
8144 && CONSTANT_P (XEXP (x
, 1)))
8146 fprintf (file
, "lo16(");
8147 output_addr_const (file
, XEXP (x
, 1));
8148 fprintf (file
, ")(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8151 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x
))
8153 if (TARGET_AIX
&& (!TARGET_ELF
|| !TARGET_MINIMAL_TOC
))
8155 rtx contains_minus
= XEXP (x
, 1);
8159 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8160 turn it into (sym) for output_addr_const. */
8161 while (GET_CODE (XEXP (contains_minus
, 0)) != MINUS
)
8162 contains_minus
= XEXP (contains_minus
, 0);
8164 minus
= XEXP (contains_minus
, 0);
8165 symref
= XEXP (minus
, 0);
8166 XEXP (contains_minus
, 0) = symref
;
8171 name
= XSTR (symref
, 0);
8172 newname
= alloca (strlen (name
) + sizeof ("@toc"));
8173 strcpy (newname
, name
);
8174 strcat (newname
, "@toc");
8175 XSTR (symref
, 0) = newname
;
8177 output_addr_const (file
, XEXP (x
, 1));
8179 XSTR (symref
, 0) = name
;
8180 XEXP (contains_minus
, 0) = minus
;
8183 output_addr_const (file
, XEXP (x
, 1));
8185 fprintf (file
, "(%s)", reg_names
[REGNO (XEXP (x
, 0))]);
8191 /* Target hook for assembling integer objects. The PowerPC version has
8192 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8193 is defined. It also needs to handle DI-mode objects on 64-bit
8197 rs6000_assemble_integer (x
, size
, aligned_p
)
8202 #ifdef RELOCATABLE_NEEDS_FIXUP
8203 /* Special handling for SI values. */
8204 if (size
== 4 && aligned_p
)
8206 extern int in_toc_section
PARAMS ((void));
8207 static int recurse
= 0;
8209 /* For -mrelocatable, we mark all addresses that need to be fixed up
8210 in the .fixup section. */
8211 if (TARGET_RELOCATABLE
8212 && !in_toc_section ()
8213 && !in_text_section ()
8215 && GET_CODE (x
) != CONST_INT
8216 && GET_CODE (x
) != CONST_DOUBLE
8222 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCP", fixuplabelno
);
8224 ASM_OUTPUT_LABEL (asm_out_file
, buf
);
8225 fprintf (asm_out_file
, "\t.long\t(");
8226 output_addr_const (asm_out_file
, x
);
8227 fprintf (asm_out_file
, ")@fixup\n");
8228 fprintf (asm_out_file
, "\t.section\t\".fixup\",\"aw\"\n");
8229 ASM_OUTPUT_ALIGN (asm_out_file
, 2);
8230 fprintf (asm_out_file
, "\t.long\t");
8231 assemble_name (asm_out_file
, buf
);
8232 fprintf (asm_out_file
, "\n\t.previous\n");
8236 /* Remove initial .'s to turn a -mcall-aixdesc function
8237 address into the address of the descriptor, not the function
8239 else if (GET_CODE (x
) == SYMBOL_REF
8240 && XSTR (x
, 0)[0] == '.'
8241 && DEFAULT_ABI
== ABI_AIX
)
8243 const char *name
= XSTR (x
, 0);
8244 while (*name
== '.')
8247 fprintf (asm_out_file
, "\t.long\t%s\n", name
);
8251 #endif /* RELOCATABLE_NEEDS_FIXUP */
8252 return default_assemble_integer (x
, size
, aligned_p
);
8255 #ifdef HAVE_GAS_HIDDEN
8256 /* Emit an assembler directive to set symbol visibility for DECL to
8260 rs6000_assemble_visibility (decl
, vis
)
8264 /* Functions need to have their entry point symbol visibility set as
8265 well as their descriptor symbol visibility. */
8266 if (DEFAULT_ABI
== ABI_AIX
&& TREE_CODE (decl
) == FUNCTION_DECL
)
8268 static const char * const visibility_types
[] = {
8269 NULL
, "internal", "hidden", "protected"
8272 const char *name
, *type
;
8274 name
= ((* targetm
.strip_name_encoding
)
8275 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
))));
8276 type
= visibility_types
[vis
];
8278 fprintf (asm_out_file
, "\t.%s\t%s\n", type
, name
);
8279 fprintf (asm_out_file
, "\t.%s\t.%s\n", type
, name
);
8282 default_assemble_visibility (decl
, vis
);
8287 rs6000_reverse_condition (mode
, code
)
8288 enum machine_mode mode
;
8291 /* Reversal of FP compares takes care -- an ordered compare
8292 becomes an unordered compare and vice versa. */
8293 if (mode
== CCFPmode
&& !flag_unsafe_math_optimizations
)
8294 return reverse_condition_maybe_unordered (code
);
8296 return reverse_condition (code
);
8299 /* Generate a compare for CODE. Return a brand-new rtx that
8300 represents the result of the compare. */
8303 rs6000_generate_compare (code
)
8306 enum machine_mode comp_mode
;
8309 if (rs6000_compare_fp_p
)
8310 comp_mode
= CCFPmode
;
8311 else if (code
== GTU
|| code
== LTU
8312 || code
== GEU
|| code
== LEU
)
8313 comp_mode
= CCUNSmode
;
8317 /* First, the compare. */
8318 compare_result
= gen_reg_rtx (comp_mode
);
8320 /* SPE FP compare instructions on the GPRs. Yuck! */
8321 if ((TARGET_SPE
&& TARGET_HARD_FLOAT
) && rs6000_compare_fp_p
)
8323 rtx cmp
, or1
, or2
, or_result
, compare_result2
;
8331 cmp
= flag_unsafe_math_optimizations
8332 ? gen_tstsfeq_gpr (compare_result
, rs6000_compare_op0
,
8334 : gen_cmpsfeq_gpr (compare_result
, rs6000_compare_op0
,
8335 rs6000_compare_op1
);
8343 cmp
= flag_unsafe_math_optimizations
8344 ? gen_tstsfgt_gpr (compare_result
, rs6000_compare_op0
,
8346 : gen_cmpsfgt_gpr (compare_result
, rs6000_compare_op0
,
8347 rs6000_compare_op1
);
8355 cmp
= flag_unsafe_math_optimizations
8356 ? gen_tstsflt_gpr (compare_result
, rs6000_compare_op0
,
8358 : gen_cmpsflt_gpr (compare_result
, rs6000_compare_op0
,
8359 rs6000_compare_op1
);
8365 /* Synthesize LE and GE from LT/GT || EQ. */
8366 if (code
== LE
|| code
== GE
|| code
== LEU
|| code
== GEU
)
8368 /* Synthesize GE/LE frome GT/LT || EQ. */
8374 case LE
: code
= LT
; break;
8375 case GE
: code
= GT
; break;
8376 case LEU
: code
= LT
; break;
8377 case GEU
: code
= GT
; break;
8381 or1
= gen_reg_rtx (SImode
);
8382 or2
= gen_reg_rtx (SImode
);
8383 or_result
= gen_reg_rtx (CCEQmode
);
8384 compare_result2
= gen_reg_rtx (CCFPmode
);
8387 cmp
= flag_unsafe_math_optimizations
8388 ? gen_tstsfeq_gpr (compare_result2
, rs6000_compare_op0
,
8390 : gen_cmpsfeq_gpr (compare_result2
, rs6000_compare_op0
,
8391 rs6000_compare_op1
);
8394 /* The MC8540 FP compare instructions set the CR bits
8395 differently than other PPC compare instructions. For
8396 that matter, there is no generic test instruction, but a
8397 testgt, testlt, and testeq. For a true condition, bit 2
8398 is set (x1xx) in the CR. Following the traditional CR
8404 ... bit 2 would be a GT CR alias, so later on we
8405 look in the GT bits for the branch instructions.
8406 However, we must be careful to emit correct RTL in
8407 the meantime, so optimizations don't get confused. */
8409 or1
= gen_rtx (NE
, SImode
, compare_result
, const0_rtx
);
8410 or2
= gen_rtx (NE
, SImode
, compare_result2
, const0_rtx
);
8412 /* OR them together. */
8413 cmp
= gen_rtx_SET (VOIDmode
, or_result
,
8414 gen_rtx_COMPARE (CCEQmode
,
8415 gen_rtx_IOR (SImode
, or1
, or2
),
8417 compare_result
= or_result
;
8422 /* We only care about 1 bit (x1xx), so map everything to NE to
8423 maintain rtl sanity. We'll get to the right bit (x1xx) at
8424 code output time. */
8425 if (code
== NE
|| code
== LTGT
)
8426 /* Do the inverse here because we have no cmpne
8427 instruction. We use the cmpeq instruction and expect
8428 to get a 0 instead. */
8437 emit_insn (gen_rtx_SET (VOIDmode
, compare_result
,
8438 gen_rtx_COMPARE (comp_mode
,
8440 rs6000_compare_op1
)));
8442 /* Some kinds of FP comparisons need an OR operation;
8443 except for flag_unsafe_math_optimizations we don't bother. */
8444 if (rs6000_compare_fp_p
8445 && ! flag_unsafe_math_optimizations
8446 && ! (TARGET_HARD_FLOAT
&& TARGET_SPE
)
8447 && (code
== LE
|| code
== GE
8448 || code
== UNEQ
|| code
== LTGT
8449 || code
== UNGT
|| code
== UNLT
))
8451 enum rtx_code or1
, or2
;
8452 rtx or1_rtx
, or2_rtx
, compare2_rtx
;
8453 rtx or_result
= gen_reg_rtx (CCEQmode
);
8457 case LE
: or1
= LT
; or2
= EQ
; break;
8458 case GE
: or1
= GT
; or2
= EQ
; break;
8459 case UNEQ
: or1
= UNORDERED
; or2
= EQ
; break;
8460 case LTGT
: or1
= LT
; or2
= GT
; break;
8461 case UNGT
: or1
= UNORDERED
; or2
= GT
; break;
8462 case UNLT
: or1
= UNORDERED
; or2
= LT
; break;
8465 validate_condition_mode (or1
, comp_mode
);
8466 validate_condition_mode (or2
, comp_mode
);
8467 or1_rtx
= gen_rtx (or1
, SImode
, compare_result
, const0_rtx
);
8468 or2_rtx
= gen_rtx (or2
, SImode
, compare_result
, const0_rtx
);
8469 compare2_rtx
= gen_rtx_COMPARE (CCEQmode
,
8470 gen_rtx_IOR (SImode
, or1_rtx
, or2_rtx
),
8472 emit_insn (gen_rtx_SET (VOIDmode
, or_result
, compare2_rtx
));
8474 compare_result
= or_result
;
8478 validate_condition_mode (code
, GET_MODE (compare_result
));
8480 return gen_rtx (code
, VOIDmode
, compare_result
, const0_rtx
);
8484 /* Emit the RTL for an sCOND pattern. */
8487 rs6000_emit_sCOND (code
, result
)
8492 enum machine_mode op_mode
;
8494 condition_rtx
= rs6000_generate_compare (code
);
8496 op_mode
= GET_MODE (rs6000_compare_op0
);
8497 if (op_mode
== VOIDmode
)
8498 op_mode
= GET_MODE (rs6000_compare_op1
);
8500 if (TARGET_POWERPC64
&& (op_mode
== DImode
|| rs6000_compare_fp_p
))
8502 PUT_MODE (condition_rtx
, DImode
);
8503 convert_move (result
, condition_rtx
, 0);
8507 PUT_MODE (condition_rtx
, SImode
);
8508 emit_insn (gen_rtx_SET (VOIDmode
, result
, condition_rtx
));
8512 /* Emit a branch of kind CODE to location LOC. */
8515 rs6000_emit_cbranch (code
, loc
)
8519 rtx condition_rtx
, loc_ref
;
8521 condition_rtx
= rs6000_generate_compare (code
);
8522 loc_ref
= gen_rtx_LABEL_REF (VOIDmode
, loc
);
8523 emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
,
8524 gen_rtx_IF_THEN_ELSE (VOIDmode
, condition_rtx
,
8528 /* Return the string to output a conditional branch to LABEL, which is
8529 the operand number of the label, or -1 if the branch is really a
8532 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
8533 condition code register and its mode specifies what kind of
8536 REVERSED is nonzero if we should reverse the sense of the comparison.
8538 INSN is the insn. */
8541 output_cbranch (op
, label
, reversed
, insn
)
8547 static char string
[64];
8548 enum rtx_code code
= GET_CODE (op
);
8549 rtx cc_reg
= XEXP (op
, 0);
8550 enum machine_mode mode
= GET_MODE (cc_reg
);
8551 int cc_regno
= REGNO (cc_reg
) - CR0_REGNO
;
8552 int need_longbranch
= label
!= NULL
&& get_attr_length (insn
) == 8;
8553 int really_reversed
= reversed
^ need_longbranch
;
8559 validate_condition_mode (code
, mode
);
8561 /* Work out which way this really branches. We could use
8562 reverse_condition_maybe_unordered here always but this
8563 makes the resulting assembler clearer. */
8564 if (really_reversed
)
8566 /* Reversal of FP compares takes care -- an ordered compare
8567 becomes an unordered compare and vice versa. */
8568 if (mode
== CCFPmode
)
8569 code
= reverse_condition_maybe_unordered (code
);
8571 code
= reverse_condition (code
);
8574 if ((TARGET_SPE
&& TARGET_HARD_FLOAT
) && mode
== CCFPmode
)
8576 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8579 /* Opposite of GT. */
8581 else if (code
== NE
)
8589 /* Not all of these are actually distinct opcodes, but
8590 we distinguish them for clarity of the resulting assembler. */
8592 ccode
= "ne"; break;
8594 ccode
= "eq"; break;
8596 ccode
= "ge"; break;
8597 case GT
: case GTU
: case UNGT
:
8598 ccode
= "gt"; break;
8600 ccode
= "le"; break;
8601 case LT
: case LTU
: case UNLT
:
8602 ccode
= "lt"; break;
8603 case UNORDERED
: ccode
= "un"; break;
8604 case ORDERED
: ccode
= "nu"; break;
8605 case UNGE
: ccode
= "nl"; break;
8606 case UNLE
: ccode
= "ng"; break;
8611 /* Maybe we have a guess as to how likely the branch is.
8612 The old mnemonics don't have a way to specify this information. */
8614 note
= find_reg_note (insn
, REG_BR_PROB
, NULL_RTX
);
8615 if (note
!= NULL_RTX
)
8617 /* PROB is the difference from 50%. */
8618 int prob
= INTVAL (XEXP (note
, 0)) - REG_BR_PROB_BASE
/ 2;
8619 bool always_hint
= rs6000_cpu
!= PROCESSOR_POWER4
;
8621 /* Only hint for highly probable/improbable branches on newer
8622 cpus as static prediction overrides processor dynamic
8623 prediction. For older cpus we may as well always hint, but
8624 assume not taken for branches that are very close to 50% as a
8625 mispredicted taken branch is more expensive than a
8626 mispredicted not-taken branch. */
8628 || abs (prob
) > REG_BR_PROB_BASE
/ 100 * 48)
8630 if (abs (prob
) > REG_BR_PROB_BASE
/ 20
8631 && ((prob
> 0) ^ need_longbranch
))
8639 s
+= sprintf (s
, "{b%sr|b%slr%s} ", ccode
, ccode
, pred
);
8641 s
+= sprintf (s
, "{b%s|b%s%s} ", ccode
, ccode
, pred
);
8643 /* We need to escape any '%' characters in the reg_names string.
8644 Assume they'd only be the first character... */
8645 if (reg_names
[cc_regno
+ CR0_REGNO
][0] == '%')
8647 s
+= sprintf (s
, "%s", reg_names
[cc_regno
+ CR0_REGNO
]);
8651 /* If the branch distance was too far, we may have to use an
8652 unconditional branch to go the distance. */
8653 if (need_longbranch
)
8654 s
+= sprintf (s
, ",$+8\n\tb %s", label
);
8656 s
+= sprintf (s
, ",%s", label
);
8662 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
8663 operands of the last comparison is nonzero/true, FALSE_COND if it
8664 is zero/false. Return 0 if the hardware has no such operation. */
8667 rs6000_emit_cmove (dest
, op
, true_cond
, false_cond
)
8673 enum rtx_code code
= GET_CODE (op
);
8674 rtx op0
= rs6000_compare_op0
;
8675 rtx op1
= rs6000_compare_op1
;
8677 enum machine_mode compare_mode
= GET_MODE (op0
);
8678 enum machine_mode result_mode
= GET_MODE (dest
);
8681 /* These modes should always match. */
8682 if (GET_MODE (op1
) != compare_mode
8683 /* In the isel case however, we can use a compare immediate, so
8684 op1 may be a small constant. */
8685 && (!TARGET_ISEL
|| !short_cint_operand (op1
, VOIDmode
)))
8687 if (GET_MODE (true_cond
) != result_mode
)
8689 if (GET_MODE (false_cond
) != result_mode
)
8692 /* First, work out if the hardware can do this at all, or
8693 if it's too slow... */
8694 if (! rs6000_compare_fp_p
)
8697 return rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
);
8701 /* Eliminate half of the comparisons by switching operands, this
8702 makes the remaining code simpler. */
8703 if (code
== UNLT
|| code
== UNGT
|| code
== UNORDERED
|| code
== NE
8704 || code
== LTGT
|| code
== LT
)
8706 code
= reverse_condition_maybe_unordered (code
);
8708 true_cond
= false_cond
;
8712 /* UNEQ and LTGT take four instructions for a comparison with zero,
8713 it'll probably be faster to use a branch here too. */
8717 if (GET_CODE (op1
) == CONST_DOUBLE
)
8718 REAL_VALUE_FROM_CONST_DOUBLE (c1
, op1
);
8720 /* We're going to try to implement comparisons by performing
8721 a subtract, then comparing against zero. Unfortunately,
8722 Inf - Inf is NaN which is not zero, and so if we don't
8723 know that the operand is finite and the comparison
8724 would treat EQ different to UNORDERED, we can't do it. */
8725 if (! flag_unsafe_math_optimizations
8726 && code
!= GT
&& code
!= UNGE
8727 && (GET_CODE (op1
) != CONST_DOUBLE
|| real_isinf (&c1
))
8728 /* Constructs of the form (a OP b ? a : b) are safe. */
8729 && ((! rtx_equal_p (op0
, false_cond
) && ! rtx_equal_p (op1
, false_cond
))
8730 || (! rtx_equal_p (op0
, true_cond
)
8731 && ! rtx_equal_p (op1
, true_cond
))))
8733 /* At this point we know we can use fsel. */
8735 /* Reduce the comparison to a comparison against zero. */
8736 temp
= gen_reg_rtx (compare_mode
);
8737 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8738 gen_rtx_MINUS (compare_mode
, op0
, op1
)));
8740 op1
= CONST0_RTX (compare_mode
);
8742 /* If we don't care about NaNs we can reduce some of the comparisons
8743 down to faster ones. */
8744 if (flag_unsafe_math_optimizations
)
8750 true_cond
= false_cond
;
8763 /* Now, reduce everything down to a GE. */
8770 temp
= gen_reg_rtx (compare_mode
);
8771 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
8776 temp
= gen_reg_rtx (compare_mode
);
8777 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_ABS (compare_mode
, op0
)));
8782 temp
= gen_reg_rtx (compare_mode
);
8783 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8784 gen_rtx_NEG (compare_mode
,
8785 gen_rtx_ABS (compare_mode
, op0
))));
8790 temp
= gen_reg_rtx (result_mode
);
8791 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8792 gen_rtx_IF_THEN_ELSE (result_mode
,
8793 gen_rtx_GE (VOIDmode
,
8795 true_cond
, false_cond
)));
8797 true_cond
= false_cond
;
8799 temp
= gen_reg_rtx (compare_mode
);
8800 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
8805 temp
= gen_reg_rtx (result_mode
);
8806 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8807 gen_rtx_IF_THEN_ELSE (result_mode
,
8808 gen_rtx_GE (VOIDmode
,
8810 true_cond
, false_cond
)));
8812 false_cond
= true_cond
;
8814 temp
= gen_reg_rtx (compare_mode
);
8815 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
8823 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
8824 gen_rtx_IF_THEN_ELSE (result_mode
,
8825 gen_rtx_GE (VOIDmode
,
8827 true_cond
, false_cond
)));
8831 /* Same as above, but for ints (isel). */
8834 rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
)
8840 rtx condition_rtx
, cr
;
8842 /* All isel implementations thus far are 32-bits. */
8843 if (GET_MODE (rs6000_compare_op0
) != SImode
)
8846 /* We still have to do the compare, because isel doesn't do a
8847 compare, it just looks at the CRx bits set by a previous compare
8849 condition_rtx
= rs6000_generate_compare (GET_CODE (op
));
8850 cr
= XEXP (condition_rtx
, 0);
8852 if (GET_MODE (cr
) == CCmode
)
8853 emit_insn (gen_isel_signed (dest
, condition_rtx
,
8854 true_cond
, false_cond
, cr
));
8856 emit_insn (gen_isel_unsigned (dest
, condition_rtx
,
8857 true_cond
, false_cond
, cr
));
8863 output_isel (operands
)
8868 code
= GET_CODE (operands
[1]);
8869 if (code
== GE
|| code
== GEU
|| code
== LE
|| code
== LEU
|| code
== NE
)
8871 PUT_CODE (operands
[1], reverse_condition (code
));
8872 return "isel %0,%3,%2,%j1";
8875 return "isel %0,%2,%3,%j1";
8879 rs6000_emit_minmax (dest
, code
, op0
, op1
)
8885 enum machine_mode mode
= GET_MODE (op0
);
8889 if (code
== SMAX
|| code
== SMIN
)
8894 if (code
== SMAX
|| code
== UMAX
)
8895 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
8898 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
8900 if (target
== NULL_RTX
)
8903 emit_move_insn (dest
, target
);
8906 /* This page contains routines that are used to determine what the
8907 function prologue and epilogue code will do and write them out. */
8909 /* Return the first fixed-point register that is required to be
8910 saved. 32 if none. */
8913 first_reg_to_save ()
8917 /* Find lowest numbered live register. */
8918 for (first_reg
= 13; first_reg
<= 31; first_reg
++)
8919 if (regs_ever_live
[first_reg
]
8920 && (! call_used_regs
[first_reg
]
8921 || (first_reg
== RS6000_PIC_OFFSET_TABLE_REGNUM
8922 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
8923 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
)))))
8928 && current_function_uses_pic_offset_table
8929 && first_reg
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
8930 return RS6000_PIC_OFFSET_TABLE_REGNUM
;
8936 /* Similar, for FP regs. */
8939 first_fp_reg_to_save ()
8943 /* Find lowest numbered live register. */
8944 for (first_reg
= 14 + 32; first_reg
<= 63; first_reg
++)
8945 if (regs_ever_live
[first_reg
])
8951 /* Similar, for AltiVec regs. */
8954 first_altivec_reg_to_save ()
8958 /* Stack frame remains as is unless we are in AltiVec ABI. */
8959 if (! TARGET_ALTIVEC_ABI
)
8960 return LAST_ALTIVEC_REGNO
+ 1;
8962 /* Find lowest numbered live register. */
8963 for (i
= FIRST_ALTIVEC_REGNO
+ 20; i
<= LAST_ALTIVEC_REGNO
; ++i
)
8964 if (regs_ever_live
[i
])
8970 /* Return a 32-bit mask of the AltiVec registers we need to set in
8971 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
8972 the 32-bit word is 0. */
8975 compute_vrsave_mask ()
8977 unsigned int i
, mask
= 0;
8979 /* First, find out if we use _any_ altivec registers. */
8980 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
8981 if (regs_ever_live
[i
])
8982 mask
|= ALTIVEC_REG_BIT (i
);
8987 /* Next, add all registers that are call-clobbered. We do this
8988 because post-reload register optimizers such as regrename_optimize
8989 may choose to use them. They never change the register class
8990 chosen by reload, so cannot create new uses of altivec registers
8991 if there were none before, so the early exit above is safe. */
8992 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
8993 altivec registers not saved in the mask, which might well make the
8994 adjustments below more effective in eliding the save/restore of
8995 VRSAVE in small functions. */
8996 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
8997 if (call_used_regs
[i
])
8998 mask
|= ALTIVEC_REG_BIT (i
);
9000 /* Next, remove the argument registers from the set. These must
9001 be in the VRSAVE mask set by the caller, so we don't need to add
9002 them in again. More importantly, the mask we compute here is
9003 used to generate CLOBBERs in the set_vrsave insn, and we do not
9004 wish the argument registers to die. */
9005 for (i
= cfun
->args_info
.vregno
; i
>= ALTIVEC_ARG_MIN_REG
; --i
)
9006 mask
&= ~ALTIVEC_REG_BIT (i
);
9008 /* Similarly, remove the return value from the set. */
9011 diddle_return_value (is_altivec_return_reg
, &yes
);
9013 mask
&= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN
);
9020 is_altivec_return_reg (reg
, xyes
)
9024 bool *yes
= (bool *) xyes
;
9025 if (REGNO (reg
) == ALTIVEC_ARG_RETURN
)
9030 /* Calculate the stack information for the current function. This is
9031 complicated by having two separate calling sequences, the AIX calling
9032 sequence and the V.4 calling sequence.
9034 AIX (and Darwin/Mac OS X) stack frames look like:
9036 SP----> +---------------------------------------+
9037 | back chain to caller | 0 0
9038 +---------------------------------------+
9039 | saved CR | 4 8 (8-11)
9040 +---------------------------------------+
9042 +---------------------------------------+
9043 | reserved for compilers | 12 24
9044 +---------------------------------------+
9045 | reserved for binders | 16 32
9046 +---------------------------------------+
9047 | saved TOC pointer | 20 40
9048 +---------------------------------------+
9049 | Parameter save area (P) | 24 48
9050 +---------------------------------------+
9051 | Alloca space (A) | 24+P etc.
9052 +---------------------------------------+
9053 | Local variable space (L) | 24+P+A
9054 +---------------------------------------+
9055 | Float/int conversion temporary (X) | 24+P+A+L
9056 +---------------------------------------+
9057 | Save area for AltiVec registers (W) | 24+P+A+L+X
9058 +---------------------------------------+
9059 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9060 +---------------------------------------+
9061 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9062 +---------------------------------------+
9063 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9064 +---------------------------------------+
9065 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9066 +---------------------------------------+
9067 old SP->| back chain to caller's caller |
9068 +---------------------------------------+
9070 The required alignment for AIX configurations is two words (i.e., 8
9074 V.4 stack frames look like:
9076 SP----> +---------------------------------------+
9077 | back chain to caller | 0
9078 +---------------------------------------+
9079 | caller's saved LR | 4
9080 +---------------------------------------+
9081 | Parameter save area (P) | 8
9082 +---------------------------------------+
9083 | Alloca space (A) | 8+P
9084 +---------------------------------------+
9085 | Varargs save area (V) | 8+P+A
9086 +---------------------------------------+
9087 | Local variable space (L) | 8+P+A+V
9088 +---------------------------------------+
9089 | Float/int conversion temporary (X) | 8+P+A+V+L
9090 +---------------------------------------+
9091 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9092 +---------------------------------------+
9093 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9094 +---------------------------------------+
9095 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9096 +---------------------------------------+
9097 | SPE: area for 64-bit GP registers |
9098 +---------------------------------------+
9099 | SPE alignment padding |
9100 +---------------------------------------+
9101 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9102 +---------------------------------------+
9103 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9104 +---------------------------------------+
9105 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9106 +---------------------------------------+
9107 old SP->| back chain to caller's caller |
9108 +---------------------------------------+
9110 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9111 given. (But note below and in sysv4.h that we require only 8 and
9112 may round up the size of our stack frame anyways. The historical
9113 reason is early versions of powerpc-linux which didn't properly
9114 align the stack at program startup. A happy side-effect is that
9115 -mno-eabi libraries can be used with -meabi programs.)
9117 The EABI configuration defaults to the V.4 layout, unless
9118 -mcall-aix is used, in which case the AIX layout is used. However,
9119 the stack alignment requirements may differ. If -mno-eabi is not
9120 given, the required stack alignment is 8 bytes; if -mno-eabi is
9121 given, the required alignment is 16 bytes. (But see V.4 comment
9124 #ifndef ABI_STACK_BOUNDARY
9125 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9129 rs6000_stack_info ()
9131 static rs6000_stack_t info
, zero_info
;
9132 rs6000_stack_t
*info_ptr
= &info
;
9133 int reg_size
= TARGET_POWERPC64
? 8 : 4;
9134 enum rs6000_abi abi
;
9138 /* Zero all fields portably. */
9141 /* Select which calling sequence. */
9142 info_ptr
->abi
= abi
= DEFAULT_ABI
;
9144 /* Calculate which registers need to be saved & save area size. */
9145 info_ptr
->first_gp_reg_save
= first_reg_to_save ();
9146 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9147 even if it currently looks like we won't. */
9148 if (((TARGET_TOC
&& TARGET_MINIMAL_TOC
)
9149 || (flag_pic
== 1 && abi
== ABI_V4
)
9150 || (flag_pic
&& abi
== ABI_DARWIN
))
9151 && info_ptr
->first_gp_reg_save
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
9152 info_ptr
->gp_size
= reg_size
* (32 - RS6000_PIC_OFFSET_TABLE_REGNUM
);
9154 info_ptr
->gp_size
= reg_size
* (32 - info_ptr
->first_gp_reg_save
);
9156 /* For the SPE, we have an additional upper 32-bits on each GPR.
9157 Ideally we should save the entire 64-bits only when the upper
9158 half is used in SIMD instructions. Since we only record
9159 registers live (not the size they are used in), this proves
9160 difficult because we'd have to traverse the instruction chain at
9161 the right time, taking reload into account. This is a real pain,
9162 so we opt to save the GPRs in 64-bits always. Anyone overly
9163 concerned with frame size can fix this. ;-).
9165 So... since we save all GPRs (except the SP) in 64-bits, the
9166 traditional GP save area will be empty. */
9168 info_ptr
->gp_size
= 0;
9170 info_ptr
->first_fp_reg_save
= first_fp_reg_to_save ();
9171 info_ptr
->fp_size
= 8 * (64 - info_ptr
->first_fp_reg_save
);
9173 info_ptr
->first_altivec_reg_save
= first_altivec_reg_to_save ();
9174 info_ptr
->altivec_size
= 16 * (LAST_ALTIVEC_REGNO
+ 1
9175 - info_ptr
->first_altivec_reg_save
);
9177 /* Does this function call anything? */
9178 info_ptr
->calls_p
= (! current_function_is_leaf
9179 || cfun
->machine
->ra_needs_full_frame
);
9181 /* Determine if we need to save the link register. */
9182 if (rs6000_ra_ever_killed ()
9183 || (DEFAULT_ABI
== ABI_AIX
&& current_function_profile
)
9184 #ifdef TARGET_RELOCATABLE
9185 || (TARGET_RELOCATABLE
&& (get_pool_size () != 0))
9187 || (info_ptr
->first_fp_reg_save
!= 64
9188 && !FP_SAVE_INLINE (info_ptr
->first_fp_reg_save
))
9189 || info_ptr
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
9190 || (abi
== ABI_V4
&& current_function_calls_alloca
)
9191 || (DEFAULT_ABI
== ABI_DARWIN
9193 && current_function_uses_pic_offset_table
)
9194 || info_ptr
->calls_p
)
9196 info_ptr
->lr_save_p
= 1;
9197 regs_ever_live
[LINK_REGISTER_REGNUM
] = 1;
9200 /* Determine if we need to save the condition code registers. */
9201 if (regs_ever_live
[CR2_REGNO
]
9202 || regs_ever_live
[CR3_REGNO
]
9203 || regs_ever_live
[CR4_REGNO
])
9205 info_ptr
->cr_save_p
= 1;
9207 info_ptr
->cr_size
= reg_size
;
9210 /* If the current function calls __builtin_eh_return, then we need
9211 to allocate stack space for registers that will hold data for
9212 the exception handler. */
9213 if (current_function_calls_eh_return
)
9216 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; ++i
)
9219 /* SPE saves EH registers in 64-bits. */
9220 ehrd_size
= i
* (TARGET_SPE_ABI
? UNITS_PER_SPE_WORD
: UNITS_PER_WORD
);
9225 /* Determine various sizes. */
9226 info_ptr
->reg_size
= reg_size
;
9227 info_ptr
->fixed_size
= RS6000_SAVE_AREA
;
9228 info_ptr
->varargs_size
= RS6000_VARARGS_AREA
;
9229 info_ptr
->vars_size
= RS6000_ALIGN (get_frame_size (), 8);
9230 info_ptr
->parm_size
= RS6000_ALIGN (current_function_outgoing_args_size
,
9234 info_ptr
->spe_gp_size
= 8 * (32 - info_ptr
->first_gp_reg_save
);
9236 info_ptr
->spe_gp_size
= 0;
9238 if (TARGET_ALTIVEC_ABI
&& TARGET_ALTIVEC_VRSAVE
)
9240 info_ptr
->vrsave_mask
= compute_vrsave_mask ();
9241 info_ptr
->vrsave_size
= info_ptr
->vrsave_mask
? 4 : 0;
9245 info_ptr
->vrsave_mask
= 0;
9246 info_ptr
->vrsave_size
= 0;
9249 /* Calculate the offsets. */
9257 case ABI_AIX_NODESC
:
9259 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
9260 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
9262 if (TARGET_ALTIVEC_ABI
)
9264 info_ptr
->vrsave_save_offset
9265 = info_ptr
->gp_save_offset
- info_ptr
->vrsave_size
;
9267 /* Align stack so vector save area is on a quadword boundary. */
9268 if (info_ptr
->altivec_size
!= 0)
9269 info_ptr
->altivec_padding_size
9270 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
9272 info_ptr
->altivec_padding_size
= 0;
9274 info_ptr
->altivec_save_offset
9275 = info_ptr
->vrsave_save_offset
9276 - info_ptr
->altivec_padding_size
9277 - info_ptr
->altivec_size
;
9279 /* Adjust for AltiVec case. */
9280 info_ptr
->ehrd_offset
= info_ptr
->altivec_save_offset
- ehrd_size
;
9283 info_ptr
->ehrd_offset
= info_ptr
->gp_save_offset
- ehrd_size
;
9284 info_ptr
->cr_save_offset
= reg_size
; /* first word when 64-bit. */
9285 info_ptr
->lr_save_offset
= 2*reg_size
;
9289 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
9290 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
9291 info_ptr
->cr_save_offset
= info_ptr
->gp_save_offset
- info_ptr
->cr_size
;
9295 /* Align stack so SPE GPR save area is aligned on a
9296 double-word boundary. */
9297 if (info_ptr
->spe_gp_size
!= 0)
9298 info_ptr
->spe_padding_size
9299 = 8 - (-info_ptr
->cr_save_offset
% 8);
9301 info_ptr
->spe_padding_size
= 0;
9303 info_ptr
->spe_gp_save_offset
9304 = info_ptr
->cr_save_offset
9305 - info_ptr
->spe_padding_size
9306 - info_ptr
->spe_gp_size
;
9308 /* Adjust for SPE case. */
9309 info_ptr
->toc_save_offset
9310 = info_ptr
->spe_gp_save_offset
- info_ptr
->toc_size
;
9312 else if (TARGET_ALTIVEC_ABI
)
9314 info_ptr
->vrsave_save_offset
9315 = info_ptr
->cr_save_offset
- info_ptr
->vrsave_size
;
9317 /* Align stack so vector save area is on a quadword boundary. */
9318 if (info_ptr
->altivec_size
!= 0)
9319 info_ptr
->altivec_padding_size
9320 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
9322 info_ptr
->altivec_padding_size
= 0;
9324 info_ptr
->altivec_save_offset
9325 = info_ptr
->vrsave_save_offset
9326 - info_ptr
->altivec_padding_size
9327 - info_ptr
->altivec_size
;
9329 /* Adjust for AltiVec case. */
9330 info_ptr
->toc_save_offset
9331 = info_ptr
->altivec_save_offset
- info_ptr
->toc_size
;
9334 info_ptr
->toc_save_offset
= info_ptr
->cr_save_offset
- info_ptr
->toc_size
;
9335 info_ptr
->ehrd_offset
= info_ptr
->toc_save_offset
- ehrd_size
;
9336 info_ptr
->lr_save_offset
= reg_size
;
9340 info_ptr
->save_size
= RS6000_ALIGN (info_ptr
->fp_size
9342 + info_ptr
->altivec_size
9343 + info_ptr
->altivec_padding_size
9344 + info_ptr
->vrsave_size
9345 + info_ptr
->spe_gp_size
9346 + info_ptr
->spe_padding_size
9350 + info_ptr
->vrsave_size
9351 + info_ptr
->toc_size
,
9352 (TARGET_ALTIVEC_ABI
|| ABI_DARWIN
)
9355 total_raw_size
= (info_ptr
->vars_size
9356 + info_ptr
->parm_size
9357 + info_ptr
->save_size
9358 + info_ptr
->varargs_size
9359 + info_ptr
->fixed_size
);
9361 info_ptr
->total_size
=
9362 RS6000_ALIGN (total_raw_size
, ABI_STACK_BOUNDARY
/ BITS_PER_UNIT
);
9364 /* Determine if we need to allocate any stack frame:
9366 For AIX we need to push the stack if a frame pointer is needed
9367 (because the stack might be dynamically adjusted), if we are
9368 debugging, if we make calls, or if the sum of fp_save, gp_save,
9369 and local variables are more than the space needed to save all
9370 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9371 + 18*8 = 288 (GPR13 reserved).
9373 For V.4 we don't have the stack cushion that AIX uses, but assume
9374 that the debugger can handle stackless frames. */
9376 if (info_ptr
->calls_p
)
9377 info_ptr
->push_p
= 1;
9379 else if (abi
== ABI_V4
)
9380 info_ptr
->push_p
= total_raw_size
> info_ptr
->fixed_size
;
9383 info_ptr
->push_p
= (frame_pointer_needed
9384 || (abi
!= ABI_DARWIN
&& write_symbols
!= NO_DEBUG
)
9385 || ((total_raw_size
- info_ptr
->fixed_size
)
9386 > (TARGET_32BIT
? 220 : 288)));
9388 /* Zero offsets if we're not saving those registers. */
9389 if (info_ptr
->fp_size
== 0)
9390 info_ptr
->fp_save_offset
= 0;
9392 if (info_ptr
->gp_size
== 0)
9393 info_ptr
->gp_save_offset
= 0;
9395 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->altivec_size
== 0)
9396 info_ptr
->altivec_save_offset
= 0;
9398 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->vrsave_mask
== 0)
9399 info_ptr
->vrsave_save_offset
= 0;
9401 if (! TARGET_SPE_ABI
|| info_ptr
->spe_gp_size
== 0)
9402 info_ptr
->spe_gp_save_offset
= 0;
9404 if (! info_ptr
->lr_save_p
)
9405 info_ptr
->lr_save_offset
= 0;
9407 if (! info_ptr
->cr_save_p
)
9408 info_ptr
->cr_save_offset
= 0;
9410 if (! info_ptr
->toc_save_p
)
9411 info_ptr
->toc_save_offset
= 0;
9417 debug_stack_info (info
)
9418 rs6000_stack_t
*info
;
9420 const char *abi_string
;
9423 info
= rs6000_stack_info ();
9425 fprintf (stderr
, "\nStack information for function %s:\n",
9426 ((current_function_decl
&& DECL_NAME (current_function_decl
))
9427 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl
))
9432 default: abi_string
= "Unknown"; break;
9433 case ABI_NONE
: abi_string
= "NONE"; break;
9435 case ABI_AIX_NODESC
: abi_string
= "AIX"; break;
9436 case ABI_DARWIN
: abi_string
= "Darwin"; break;
9437 case ABI_V4
: abi_string
= "V.4"; break;
9440 fprintf (stderr
, "\tABI = %5s\n", abi_string
);
9442 if (TARGET_ALTIVEC_ABI
)
9443 fprintf (stderr
, "\tALTIVEC ABI extensions enabled.\n");
9446 fprintf (stderr
, "\tSPE ABI extensions enabled.\n");
9448 if (info
->first_gp_reg_save
!= 32)
9449 fprintf (stderr
, "\tfirst_gp_reg_save = %5d\n", info
->first_gp_reg_save
);
9451 if (info
->first_fp_reg_save
!= 64)
9452 fprintf (stderr
, "\tfirst_fp_reg_save = %5d\n", info
->first_fp_reg_save
);
9454 if (info
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
)
9455 fprintf (stderr
, "\tfirst_altivec_reg_save = %5d\n",
9456 info
->first_altivec_reg_save
);
9458 if (info
->lr_save_p
)
9459 fprintf (stderr
, "\tlr_save_p = %5d\n", info
->lr_save_p
);
9461 if (info
->cr_save_p
)
9462 fprintf (stderr
, "\tcr_save_p = %5d\n", info
->cr_save_p
);
9464 if (info
->toc_save_p
)
9465 fprintf (stderr
, "\ttoc_save_p = %5d\n", info
->toc_save_p
);
9467 if (info
->vrsave_mask
)
9468 fprintf (stderr
, "\tvrsave_mask = 0x%x\n", info
->vrsave_mask
);
9471 fprintf (stderr
, "\tpush_p = %5d\n", info
->push_p
);
9474 fprintf (stderr
, "\tcalls_p = %5d\n", info
->calls_p
);
9476 if (info
->gp_save_offset
)
9477 fprintf (stderr
, "\tgp_save_offset = %5d\n", info
->gp_save_offset
);
9479 if (info
->fp_save_offset
)
9480 fprintf (stderr
, "\tfp_save_offset = %5d\n", info
->fp_save_offset
);
9482 if (info
->altivec_save_offset
)
9483 fprintf (stderr
, "\taltivec_save_offset = %5d\n",
9484 info
->altivec_save_offset
);
9486 if (info
->spe_gp_save_offset
)
9487 fprintf (stderr
, "\tspe_gp_save_offset = %5d\n",
9488 info
->spe_gp_save_offset
);
9490 if (info
->vrsave_save_offset
)
9491 fprintf (stderr
, "\tvrsave_save_offset = %5d\n",
9492 info
->vrsave_save_offset
);
9494 if (info
->lr_save_offset
)
9495 fprintf (stderr
, "\tlr_save_offset = %5d\n", info
->lr_save_offset
);
9497 if (info
->cr_save_offset
)
9498 fprintf (stderr
, "\tcr_save_offset = %5d\n", info
->cr_save_offset
);
9500 if (info
->toc_save_offset
)
9501 fprintf (stderr
, "\ttoc_save_offset = %5d\n", info
->toc_save_offset
);
9503 if (info
->varargs_save_offset
)
9504 fprintf (stderr
, "\tvarargs_save_offset = %5d\n", info
->varargs_save_offset
);
9506 if (info
->total_size
)
9507 fprintf (stderr
, "\ttotal_size = %5d\n", info
->total_size
);
9509 if (info
->varargs_size
)
9510 fprintf (stderr
, "\tvarargs_size = %5d\n", info
->varargs_size
);
9512 if (info
->vars_size
)
9513 fprintf (stderr
, "\tvars_size = %5d\n", info
->vars_size
);
9515 if (info
->parm_size
)
9516 fprintf (stderr
, "\tparm_size = %5d\n", info
->parm_size
);
9518 if (info
->fixed_size
)
9519 fprintf (stderr
, "\tfixed_size = %5d\n", info
->fixed_size
);
9522 fprintf (stderr
, "\tgp_size = %5d\n", info
->gp_size
);
9524 if (info
->spe_gp_size
)
9525 fprintf (stderr
, "\tspe_gp_size = %5d\n", info
->spe_gp_size
);
9528 fprintf (stderr
, "\tfp_size = %5d\n", info
->fp_size
);
9530 if (info
->altivec_size
)
9531 fprintf (stderr
, "\taltivec_size = %5d\n", info
->altivec_size
);
9533 if (info
->vrsave_size
)
9534 fprintf (stderr
, "\tvrsave_size = %5d\n", info
->vrsave_size
);
9536 if (info
->altivec_padding_size
)
9537 fprintf (stderr
, "\taltivec_padding_size= %5d\n",
9538 info
->altivec_padding_size
);
9540 if (info
->spe_padding_size
)
9541 fprintf (stderr
, "\tspe_padding_size = %5d\n",
9542 info
->spe_padding_size
);
9545 fprintf (stderr
, "\tlr_size = %5d\n", info
->lr_size
);
9548 fprintf (stderr
, "\tcr_size = %5d\n", info
->cr_size
);
9551 fprintf (stderr
, "\ttoc_size = %5d\n", info
->toc_size
);
9553 if (info
->save_size
)
9554 fprintf (stderr
, "\tsave_size = %5d\n", info
->save_size
);
9556 if (info
->reg_size
!= 4)
9557 fprintf (stderr
, "\treg_size = %5d\n", info
->reg_size
);
9559 fprintf (stderr
, "\n");
9563 rs6000_return_addr (count
, frame
)
9567 /* Currently we don't optimize very well between prolog and body
9568 code and for PIC code the code can be actually quite bad, so
9569 don't try to be too clever here. */
9570 if (count
!= 0 || flag_pic
!= 0)
9572 cfun
->machine
->ra_needs_full_frame
= 1;
9579 plus_constant (copy_to_reg
9580 (gen_rtx_MEM (Pmode
,
9581 memory_address (Pmode
, frame
))),
9582 RETURN_ADDRESS_OFFSET
)));
9585 return get_hard_reg_initial_val (Pmode
, LINK_REGISTER_REGNUM
);
9588 /* Say whether a function is a candidate for sibcall handling or not.
9589 We do not allow indirect calls to be optimized into sibling calls.
9590 Also, we can't do it if there are any vector parameters; there's
9591 nowhere to put the VRsave code so it works; note that functions with
9592 vector parameters are required to have a prototype, so the argument
9593 type info must be available here. (The tail recursion case can work
9594 with vector parameters, but there's no way to distinguish here.) */
9596 rs6000_function_ok_for_sibcall (decl
, exp
)
9598 tree exp ATTRIBUTE_UNUSED
;
9603 if (TARGET_ALTIVEC_VRSAVE
)
9605 for (type
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
9606 type
; type
= TREE_CHAIN (type
))
9608 if (TREE_CODE (TREE_VALUE (type
)) == VECTOR_TYPE
)
9612 if (DEFAULT_ABI
== ABI_DARWIN
9613 || (*targetm
.binds_local_p
) (decl
))
9615 tree attr_list
= TYPE_ATTRIBUTES (TREE_TYPE (decl
));
9617 if (!lookup_attribute ("longcall", attr_list
)
9618 || lookup_attribute ("shortcall", attr_list
))
9626 rs6000_ra_ever_killed ()
9632 /* Irritatingly, there are two kinds of thunks -- those created with
9633 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
9634 through the regular part of the compiler. This is a very hacky
9635 way to tell them apart. */
9636 if (current_function_is_thunk
&& !no_new_pseudos
)
9639 /* regs_ever_live has LR marked as used if any sibcalls are present,
9640 but this should not force saving and restoring in the
9641 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
9642 clobbers LR, so that is inappropriate. */
9644 /* Also, the prologue can generate a store into LR that
9645 doesn't really count, like this:
9648 bcl to set PIC register
9652 When we're called from the epilogue, we need to avoid counting
9655 push_topmost_sequence ();
9657 pop_topmost_sequence ();
9658 reg
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
9660 for (insn
= NEXT_INSN (top
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
9664 if (FIND_REG_INC_NOTE (insn
, reg
))
9666 else if (GET_CODE (insn
) == CALL_INSN
9667 && !SIBLING_CALL_P (insn
))
9669 else if (set_of (reg
, insn
) != NULL_RTX
9670 && !prologue_epilogue_contains (insn
))
9677 /* Add a REG_MAYBE_DEAD note to the insn. */
9679 rs6000_maybe_dead (insn
)
9682 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
,
9687 /* Emit instructions needed to load the TOC register.
9688 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9689 a constant pool; or for SVR4 -fpic. */
9692 rs6000_emit_load_toc_table (fromprolog
)
9696 dest
= gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
);
9698 if (TARGET_ELF
&& DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
9700 rtx temp
= (fromprolog
9701 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
9702 : gen_reg_rtx (Pmode
));
9703 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp
)));
9704 rs6000_maybe_dead (emit_move_insn (dest
, temp
));
9706 else if (TARGET_ELF
&& DEFAULT_ABI
!= ABI_AIX
&& flag_pic
== 2)
9709 rtx tempLR
= (fromprolog
9710 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
9711 : gen_reg_rtx (Pmode
));
9712 rtx temp0
= (fromprolog
9713 ? gen_rtx_REG (Pmode
, 0)
9714 : gen_reg_rtx (Pmode
));
9717 /* possibly create the toc section */
9718 if (! toc_initialized
)
9721 function_section (current_function_decl
);
9728 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
9729 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9731 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCL", rs6000_pic_labelno
);
9732 symL
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9734 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR
,
9736 rs6000_maybe_dead (emit_move_insn (dest
, tempLR
));
9737 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0
, dest
,
9744 static int reload_toc_labelno
= 0;
9746 tocsym
= gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
);
9748 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCG", reload_toc_labelno
++);
9749 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9751 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR
,
9754 rs6000_maybe_dead (emit_move_insn (dest
, tempLR
));
9755 rs6000_maybe_dead (emit_move_insn (temp0
,
9756 gen_rtx_MEM (Pmode
, dest
)));
9758 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest
, temp0
, dest
)));
9760 else if (TARGET_ELF
&& !TARGET_AIX
&& flag_pic
== 0 && TARGET_MINIMAL_TOC
)
9762 /* This is for AIX code running in non-PIC ELF32. */
9765 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCTOC", 1);
9766 realsym
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9768 rs6000_maybe_dead (emit_insn (gen_elf_high (dest
, realsym
)));
9769 rs6000_maybe_dead (emit_insn (gen_elf_low (dest
, dest
, realsym
)));
9771 else if (DEFAULT_ABI
== ABI_AIX
)
9774 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest
)));
9776 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest
)));
9783 get_TOC_alias_set ()
9785 static int set
= -1;
9787 set
= new_alias_set ();
9791 /* This retuns nonzero if the current function uses the TOC. This is
9792 determined by the presence of (unspec ... 7), which is generated by
9793 the various load_toc_* patterns. */
9800 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
9803 rtx pat
= PATTERN (insn
);
9806 if (GET_CODE (pat
) == PARALLEL
)
9807 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
9808 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, i
)) == UNSPEC
9809 && XINT (XVECEXP (PATTERN (insn
), 0, i
), 1) == 7)
9816 create_TOC_reference (symbol
)
9819 return gen_rtx_PLUS (Pmode
,
9820 gen_rtx_REG (Pmode
, TOC_REGISTER
),
9821 gen_rtx_CONST (Pmode
,
9822 gen_rtx_MINUS (Pmode
, symbol
,
9823 gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
))));
9827 /* __throw will restore its own return address to be the same as the
9828 return address of the function that the throw is being made to.
9829 This is unfortunate, because we want to check the original
9830 return address to see if we need to restore the TOC.
9831 So we have to squirrel it away here.
9832 This is used only in compiling __throw and __rethrow.
9834 Most of this code should be removed by CSE. */
9835 static rtx insn_after_throw
;
9837 /* This does the saving... */
9839 rs6000_aix_emit_builtin_unwind_init ()
9842 rtx stack_top
= gen_reg_rtx (Pmode
);
9843 rtx opcode_addr
= gen_reg_rtx (Pmode
);
9845 insn_after_throw
= gen_reg_rtx (SImode
);
9847 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
9848 emit_move_insn (stack_top
, mem
);
9850 mem
= gen_rtx_MEM (Pmode
,
9851 gen_rtx_PLUS (Pmode
, stack_top
,
9852 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
9853 emit_move_insn (opcode_addr
, mem
);
9854 emit_move_insn (insn_after_throw
, gen_rtx_MEM (SImode
, opcode_addr
));
9857 /* Emit insns to _restore_ the TOC register, at runtime (specifically
9858 in _eh.o). Only used on AIX.
9860 The idea is that on AIX, function calls look like this:
9861 bl somefunction-trampoline
9865 somefunction-trampoline:
9867 ... load function address in the count register ...
9869 or like this, if the linker determines that this is not a cross-module call
9870 and so the TOC need not be restored:
9873 or like this, if the compiler could determine that this is not a
9876 now, the tricky bit here is that register 2 is saved and restored
9877 by the _linker_, so we can't readily generate debugging information
9878 for it. So we need to go back up the call chain looking at the
9879 insns at return addresses to see which calls saved the TOC register
9880 and so see where it gets restored from.
9882 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
9883 just before the actual epilogue.
9885 On the bright side, this incurs no space or time overhead unless an
9886 exception is thrown, except for the extra code in libgcc.a.
9888 The parameter STACKSIZE is a register containing (at runtime)
9889 the amount to be popped off the stack in addition to the stack frame
9890 of this routine (which will be __throw or __rethrow, and so is
9891 guaranteed to have a stack frame). */
9894 rs6000_emit_eh_toc_restore (stacksize
)
9898 rtx bottom_of_stack
= gen_reg_rtx (Pmode
);
9899 rtx tocompare
= gen_reg_rtx (SImode
);
9900 rtx opcode
= gen_reg_rtx (SImode
);
9901 rtx opcode_addr
= gen_reg_rtx (Pmode
);
9903 rtx loop_start
= gen_label_rtx ();
9904 rtx no_toc_restore_needed
= gen_label_rtx ();
9905 rtx loop_exit
= gen_label_rtx ();
9907 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
9908 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9909 emit_move_insn (bottom_of_stack
, mem
);
9911 top_of_stack
= expand_binop (Pmode
, add_optab
,
9912 bottom_of_stack
, stacksize
,
9913 NULL_RTX
, 1, OPTAB_WIDEN
);
9915 emit_move_insn (tocompare
, gen_int_mode (TARGET_32BIT
? 0x80410014
9916 : 0xE8410028, SImode
));
9918 if (insn_after_throw
== NULL_RTX
)
9920 emit_move_insn (opcode
, insn_after_throw
);
9922 emit_note (NULL
, NOTE_INSN_LOOP_BEG
);
9923 emit_label (loop_start
);
9925 do_compare_rtx_and_jump (opcode
, tocompare
, NE
, 1,
9926 SImode
, NULL_RTX
, NULL_RTX
,
9927 no_toc_restore_needed
);
9929 mem
= gen_rtx_MEM (Pmode
,
9930 gen_rtx_PLUS (Pmode
, bottom_of_stack
,
9931 GEN_INT (5 * GET_MODE_SIZE (Pmode
))));
9932 emit_move_insn (gen_rtx_REG (Pmode
, 2), mem
);
9934 emit_label (no_toc_restore_needed
);
9935 do_compare_rtx_and_jump (top_of_stack
, bottom_of_stack
, EQ
, 1,
9936 Pmode
, NULL_RTX
, NULL_RTX
,
9939 mem
= gen_rtx_MEM (Pmode
, bottom_of_stack
);
9940 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9941 emit_move_insn (bottom_of_stack
, mem
);
9943 mem
= gen_rtx_MEM (Pmode
,
9944 gen_rtx_PLUS (Pmode
, bottom_of_stack
,
9945 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
9946 emit_move_insn (opcode_addr
, mem
);
9947 emit_move_insn (opcode
, gen_rtx_MEM (SImode
, opcode_addr
));
9949 emit_note (NULL
, NOTE_INSN_LOOP_CONT
);
9950 emit_jump (loop_start
);
9951 emit_note (NULL
, NOTE_INSN_LOOP_END
);
9952 emit_label (loop_exit
);
9954 #endif /* TARGET_AIX */
9956 /* This ties together stack memory (MEM with an alias set of
9957 rs6000_sr_alias_set) and the change to the stack pointer. */
9960 rs6000_emit_stack_tie ()
9962 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
));
9964 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9965 emit_insn (gen_stack_tie (mem
));
9968 /* Emit the correct code for allocating stack space, as insns.
9969 If COPY_R12, make sure a copy of the old frame is left in r12.
9970 The generated code may use hard register 0 as a temporary. */
9973 rs6000_emit_allocate_stack (size
, copy_r12
)
9978 rtx stack_reg
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
9979 rtx tmp_reg
= gen_rtx_REG (Pmode
, 0);
9980 rtx todec
= GEN_INT (-size
);
9982 if (current_function_limit_stack
)
9984 if (REG_P (stack_limit_rtx
)
9985 && REGNO (stack_limit_rtx
) > 1
9986 && REGNO (stack_limit_rtx
) <= 31)
9988 emit_insn (TARGET_32BIT
9989 ? gen_addsi3 (tmp_reg
,
9992 : gen_adddi3 (tmp_reg
,
9996 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
9999 else if (GET_CODE (stack_limit_rtx
) == SYMBOL_REF
10001 && DEFAULT_ABI
== ABI_V4
)
10003 rtx toload
= gen_rtx_CONST (VOIDmode
,
10004 gen_rtx_PLUS (Pmode
,
10008 emit_insn (gen_elf_high (tmp_reg
, toload
));
10009 emit_insn (gen_elf_low (tmp_reg
, tmp_reg
, toload
));
10010 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
10014 warning ("stack limit expression is not supported");
10017 if (copy_r12
|| ! TARGET_UPDATE
)
10018 emit_move_insn (gen_rtx_REG (Pmode
, 12), stack_reg
);
10024 /* Need a note here so that try_split doesn't get confused. */
10025 if (get_last_insn() == NULL_RTX
)
10026 emit_note (0, NOTE_INSN_DELETED
);
10027 insn
= emit_move_insn (tmp_reg
, todec
);
10028 try_split (PATTERN (insn
), insn
, 0);
10032 insn
= emit_insn (TARGET_32BIT
10033 ? gen_movsi_update (stack_reg
, stack_reg
,
10035 : gen_movdi_update (stack_reg
, stack_reg
,
10036 todec
, stack_reg
));
10040 insn
= emit_insn (TARGET_32BIT
10041 ? gen_addsi3 (stack_reg
, stack_reg
, todec
)
10042 : gen_adddi3 (stack_reg
, stack_reg
, todec
));
10043 emit_move_insn (gen_rtx_MEM (Pmode
, stack_reg
),
10044 gen_rtx_REG (Pmode
, 12));
10047 RTX_FRAME_RELATED_P (insn
) = 1;
10049 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
10050 gen_rtx_SET (VOIDmode
, stack_reg
,
10051 gen_rtx_PLUS (Pmode
, stack_reg
,
10056 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
10059 (mem (plus (blah) (regXX)))
10063 (mem (plus (blah) (const VALUE_OF_REGXX))). */
10066 altivec_frame_fixup (insn
, reg
, val
)
10072 real
= copy_rtx (PATTERN (insn
));
10074 real
= replace_rtx (real
, reg
, GEN_INT (val
));
10076 RTX_FRAME_RELATED_P (insn
) = 1;
10077 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
10082 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10083 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10084 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10085 deduce these equivalences by itself so it wasn't necessary to hold
10086 its hand so much. */
10089 rs6000_frame_related (insn
, reg
, val
, reg2
, rreg
)
10098 /* copy_rtx will not make unique copies of registers, so we need to
10099 ensure we don't have unwanted sharing here. */
10101 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
10104 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
10106 real
= copy_rtx (PATTERN (insn
));
10108 if (reg2
!= NULL_RTX
)
10109 real
= replace_rtx (real
, reg2
, rreg
);
10111 real
= replace_rtx (real
, reg
,
10112 gen_rtx_PLUS (Pmode
, gen_rtx_REG (Pmode
,
10113 STACK_POINTER_REGNUM
),
10116 /* We expect that 'real' is either a SET or a PARALLEL containing
10117 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10118 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10120 if (GET_CODE (real
) == SET
)
10124 temp
= simplify_rtx (SET_SRC (set
));
10126 SET_SRC (set
) = temp
;
10127 temp
= simplify_rtx (SET_DEST (set
));
10129 SET_DEST (set
) = temp
;
10130 if (GET_CODE (SET_DEST (set
)) == MEM
)
10132 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
10134 XEXP (SET_DEST (set
), 0) = temp
;
10137 else if (GET_CODE (real
) == PARALLEL
)
10140 for (i
= 0; i
< XVECLEN (real
, 0); i
++)
10141 if (GET_CODE (XVECEXP (real
, 0, i
)) == SET
)
10143 rtx set
= XVECEXP (real
, 0, i
);
10145 temp
= simplify_rtx (SET_SRC (set
));
10147 SET_SRC (set
) = temp
;
10148 temp
= simplify_rtx (SET_DEST (set
));
10150 SET_DEST (set
) = temp
;
10151 if (GET_CODE (SET_DEST (set
)) == MEM
)
10153 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
10155 XEXP (SET_DEST (set
), 0) = temp
;
10157 RTX_FRAME_RELATED_P (set
) = 1;
10163 RTX_FRAME_RELATED_P (insn
) = 1;
10164 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
10169 /* Returns an insn that has a vrsave set operation with the
10170 appropriate CLOBBERs. */
10173 generate_set_vrsave (reg
, info
, epiloguep
)
10175 rs6000_stack_t
*info
;
10179 rtx insn
, clobs
[TOTAL_ALTIVEC_REGS
+ 1];
10180 rtx vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
10183 = gen_rtx_SET (VOIDmode
,
10185 gen_rtx_UNSPEC_VOLATILE (SImode
,
10186 gen_rtvec (2, reg
, vrsave
),
10191 /* We need to clobber the registers in the mask so the scheduler
10192 does not move sets to VRSAVE before sets of AltiVec registers.
10194 However, if the function receives nonlocal gotos, reload will set
10195 all call saved registers live. We will end up with:
10197 (set (reg 999) (mem))
10198 (parallel [ (set (reg vrsave) (unspec blah))
10199 (clobber (reg 999))])
10201 The clobber will cause the store into reg 999 to be dead, and
10202 flow will attempt to delete an epilogue insn. In this case, we
10203 need an unspec use/set of the register. */
10205 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10206 if (info
->vrsave_mask
!= 0 && ALTIVEC_REG_BIT (i
) != 0)
10208 if (!epiloguep
|| call_used_regs
[i
])
10209 clobs
[nclobs
++] = gen_rtx_CLOBBER (VOIDmode
,
10210 gen_rtx_REG (V4SImode
, i
));
10213 rtx reg
= gen_rtx_REG (V4SImode
, i
);
10216 = gen_rtx_SET (VOIDmode
,
10218 gen_rtx_UNSPEC (V4SImode
,
10219 gen_rtvec (1, reg
), 27));
10223 insn
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nclobs
));
10225 for (i
= 0; i
< nclobs
; ++i
)
10226 XVECEXP (insn
, 0, i
) = clobs
[i
];
10231 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10232 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
10235 emit_frame_save (frame_reg
, frame_ptr
, mode
, regno
, offset
, total_size
)
10238 enum machine_mode mode
;
10239 unsigned int regno
;
10243 rtx reg
, offset_rtx
, insn
, mem
, addr
, int_rtx
;
10244 rtx replacea
, replaceb
;
10246 int_rtx
= GEN_INT (offset
);
10248 /* Some cases that need register indexed addressing. */
10249 if ((TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
10251 && SPE_VECTOR_MODE (mode
)
10252 && !SPE_CONST_OFFSET_OK (offset
)))
10254 /* Whomever calls us must make sure r11 is available in the
10255 flow path of instructions in the prologue. */
10256 offset_rtx
= gen_rtx_REG (Pmode
, 11);
10257 emit_move_insn (offset_rtx
, int_rtx
);
10259 replacea
= offset_rtx
;
10260 replaceb
= int_rtx
;
10264 offset_rtx
= int_rtx
;
10265 replacea
= NULL_RTX
;
10266 replaceb
= NULL_RTX
;
10269 reg
= gen_rtx_REG (mode
, regno
);
10270 addr
= gen_rtx_PLUS (Pmode
, frame_reg
, offset_rtx
);
10271 mem
= gen_rtx_MEM (mode
, addr
);
10272 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10274 insn
= emit_move_insn (mem
, reg
);
10276 rs6000_frame_related (insn
, frame_ptr
, total_size
, replacea
, replaceb
);
10279 /* Emit an offset memory reference suitable for a frame store, while
10280 converting to a valid addressing mode. */
10283 gen_frame_mem_offset (mode
, reg
, offset
)
10284 enum machine_mode mode
;
10288 rtx int_rtx
, offset_rtx
;
10290 int_rtx
= GEN_INT (offset
);
10292 if (TARGET_SPE_ABI
&& SPE_VECTOR_MODE (mode
))
10294 offset_rtx
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
10295 emit_move_insn (offset_rtx
, int_rtx
);
10298 offset_rtx
= int_rtx
;
10300 return gen_rtx_MEM (mode
, gen_rtx_PLUS (Pmode
, reg
, offset_rtx
));
10303 /* Emit function prologue as insns. */
10306 rs6000_emit_prologue ()
10308 rs6000_stack_t
*info
= rs6000_stack_info ();
10309 enum machine_mode reg_mode
= TARGET_POWERPC64
? DImode
: SImode
;
10310 int reg_size
= TARGET_POWERPC64
? 8 : 4;
10311 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
10312 rtx frame_ptr_rtx
= gen_rtx_REG (Pmode
, 12);
10313 rtx frame_reg_rtx
= sp_reg_rtx
;
10314 rtx cr_save_rtx
= NULL
;
10316 int saving_FPRs_inline
;
10317 int using_store_multiple
;
10318 HOST_WIDE_INT sp_offset
= 0;
10320 if (TARGET_SPE_ABI
)
10322 reg_mode
= V2SImode
;
10326 using_store_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
10328 && info
->first_gp_reg_save
< 31);
10329 saving_FPRs_inline
= (info
->first_fp_reg_save
== 64
10330 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
10332 /* For V.4, update stack before we do any saving and set back pointer. */
10333 if (info
->push_p
&& DEFAULT_ABI
== ABI_V4
)
10335 if (info
->total_size
< 32767)
10336 sp_offset
= info
->total_size
;
10338 frame_reg_rtx
= frame_ptr_rtx
;
10339 rs6000_emit_allocate_stack (info
->total_size
,
10340 (frame_reg_rtx
!= sp_reg_rtx
10341 && (info
->cr_save_p
10343 || info
->first_fp_reg_save
< 64
10344 || info
->first_gp_reg_save
< 32
10346 if (frame_reg_rtx
!= sp_reg_rtx
)
10347 rs6000_emit_stack_tie ();
10350 /* Save AltiVec registers if needed. */
10351 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
10355 /* There should be a non inline version of this, for when we
10356 are saving lots of vector registers. */
10357 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10358 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
10360 rtx areg
, savereg
, mem
;
10363 offset
= info
->altivec_save_offset
+ sp_offset
10364 + 16 * (i
- info
->first_altivec_reg_save
);
10366 savereg
= gen_rtx_REG (V4SImode
, i
);
10368 areg
= gen_rtx_REG (Pmode
, 0);
10369 emit_move_insn (areg
, GEN_INT (offset
));
10371 /* AltiVec addressing mode is [reg+reg]. */
10372 mem
= gen_rtx_MEM (V4SImode
,
10373 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
));
10375 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10377 insn
= emit_move_insn (mem
, savereg
);
10379 altivec_frame_fixup (insn
, areg
, offset
);
10383 /* VRSAVE is a bit vector representing which AltiVec registers
10384 are used. The OS uses this to determine which vector
10385 registers to save on a context switch. We need to save
10386 VRSAVE on the stack frame, add whatever AltiVec registers we
10387 used in this function, and do the corresponding magic in the
10390 if (TARGET_ALTIVEC
&& info
->vrsave_mask
!= 0)
10392 rtx reg
, mem
, vrsave
;
10395 /* Get VRSAVE onto a GPR. */
10396 reg
= gen_rtx_REG (SImode
, 12);
10397 vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
10399 emit_insn (gen_get_vrsave_internal (reg
));
10401 emit_insn (gen_rtx_SET (VOIDmode
, reg
, vrsave
));
10404 offset
= info
->vrsave_save_offset
+ sp_offset
;
10406 = gen_rtx_MEM (SImode
,
10407 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, GEN_INT (offset
)));
10408 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10409 insn
= emit_move_insn (mem
, reg
);
10411 /* Include the registers in the mask. */
10412 emit_insn (gen_iorsi3 (reg
, reg
, GEN_INT ((int) info
->vrsave_mask
)));
10414 insn
= emit_insn (generate_set_vrsave (reg
, info
, 0));
10417 /* If we use the link register, get it into r0. */
10418 if (info
->lr_save_p
)
10419 emit_move_insn (gen_rtx_REG (Pmode
, 0),
10420 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10422 /* If we need to save CR, put it into r12. */
10423 if (info
->cr_save_p
&& frame_reg_rtx
!= frame_ptr_rtx
)
10425 cr_save_rtx
= gen_rtx_REG (SImode
, 12);
10426 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
10429 /* Do any required saving of fpr's. If only one or two to save, do
10430 it ourselves. Otherwise, call function. */
10431 if (saving_FPRs_inline
)
10434 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10435 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
10436 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
10437 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, DFmode
,
10438 info
->first_fp_reg_save
+ i
,
10439 info
->fp_save_offset
+ sp_offset
+ 8 * i
,
10442 else if (info
->first_fp_reg_save
!= 64)
10446 const char *alloc_rname
;
10448 p
= rtvec_alloc (2 + 64 - info
->first_fp_reg_save
);
10450 RTVEC_ELT (p
, 0) = gen_rtx_CLOBBER (VOIDmode
,
10451 gen_rtx_REG (Pmode
,
10452 LINK_REGISTER_REGNUM
));
10453 sprintf (rname
, "%s%d%s", SAVE_FP_PREFIX
,
10454 info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
);
10455 alloc_rname
= ggc_strdup (rname
);
10456 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
10457 gen_rtx_SYMBOL_REF (Pmode
,
10459 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10461 rtx addr
, reg
, mem
;
10462 reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
10463 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10464 GEN_INT (info
->fp_save_offset
10465 + sp_offset
+ 8*i
));
10466 mem
= gen_rtx_MEM (DFmode
, addr
);
10467 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10469 RTVEC_ELT (p
, i
+ 2) = gen_rtx_SET (VOIDmode
, mem
, reg
);
10471 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10472 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10473 NULL_RTX
, NULL_RTX
);
10476 /* Save GPRs. This is done as a PARALLEL if we are using
10477 the store-multiple instructions. */
10478 if (using_store_multiple
)
10482 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
10483 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10485 rtx addr
, reg
, mem
;
10486 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
10487 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10488 GEN_INT (info
->gp_save_offset
10491 mem
= gen_rtx_MEM (reg_mode
, addr
);
10492 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10494 RTVEC_ELT (p
, i
) = gen_rtx_SET (VOIDmode
, mem
, reg
);
10496 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10497 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10498 NULL_RTX
, NULL_RTX
);
10503 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10504 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
10505 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
10506 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
10507 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
10508 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
10510 rtx addr
, reg
, mem
;
10511 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
10513 if (TARGET_SPE_ABI
)
10515 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
10518 if (!SPE_CONST_OFFSET_OK (offset
))
10520 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
10521 emit_move_insn (b
, GEN_INT (offset
));
10524 b
= GEN_INT (offset
);
10526 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
10527 mem
= gen_rtx_MEM (V2SImode
, addr
);
10528 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10529 insn
= emit_move_insn (mem
, reg
);
10531 if (GET_CODE (b
) == CONST_INT
)
10532 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10533 NULL_RTX
, NULL_RTX
);
10535 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10536 b
, GEN_INT (offset
));
10540 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10541 GEN_INT (info
->gp_save_offset
10544 mem
= gen_rtx_MEM (reg_mode
, addr
);
10545 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10547 insn
= emit_move_insn (mem
, reg
);
10548 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10549 NULL_RTX
, NULL_RTX
);
10554 /* ??? There's no need to emit actual instructions here, but it's the
10555 easiest way to get the frame unwind information emitted. */
10556 if (current_function_calls_eh_return
)
10558 unsigned int i
, regno
;
10562 regno
= EH_RETURN_DATA_REGNO (i
);
10563 if (regno
== INVALID_REGNUM
)
10566 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, reg_mode
, regno
,
10567 info
->ehrd_offset
+ sp_offset
10568 + reg_size
* (int) i
,
10573 /* Save lr if we used it. */
10574 if (info
->lr_save_p
)
10576 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10577 GEN_INT (info
->lr_save_offset
+ sp_offset
));
10578 rtx reg
= gen_rtx_REG (Pmode
, 0);
10579 rtx mem
= gen_rtx_MEM (Pmode
, addr
);
10580 /* This should not be of rs6000_sr_alias_set, because of
10581 __builtin_return_address. */
10583 insn
= emit_move_insn (mem
, reg
);
10584 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10585 reg
, gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10588 /* Save CR if we use any that must be preserved. */
10589 if (info
->cr_save_p
)
10591 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10592 GEN_INT (info
->cr_save_offset
+ sp_offset
));
10593 rtx mem
= gen_rtx_MEM (SImode
, addr
);
10595 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10597 /* If r12 was used to hold the original sp, copy cr into r0 now
10599 if (REGNO (frame_reg_rtx
) == 12)
10601 cr_save_rtx
= gen_rtx_REG (SImode
, 0);
10602 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
10604 insn
= emit_move_insn (mem
, cr_save_rtx
);
10606 /* Now, there's no way that dwarf2out_frame_debug_expr is going
10607 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
10608 OK. All we have to do is specify that _one_ condition code
10609 register is saved in this stack slot. The thrower's epilogue
10610 will then restore all the call-saved registers.
10611 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
10612 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10613 cr_save_rtx
, gen_rtx_REG (SImode
, CR2_REGNO
));
10616 /* Update stack and set back pointer unless this is V.4,
10617 for which it was done previously. */
10618 if (info
->push_p
&& DEFAULT_ABI
!= ABI_V4
)
10619 rs6000_emit_allocate_stack (info
->total_size
, FALSE
);
10621 /* Set frame pointer, if needed. */
10622 if (frame_pointer_needed
)
10624 insn
= emit_move_insn (gen_rtx_REG (Pmode
, FRAME_POINTER_REGNUM
),
10626 RTX_FRAME_RELATED_P (insn
) = 1;
10629 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
10630 if ((TARGET_TOC
&& TARGET_MINIMAL_TOC
&& get_pool_size () != 0)
10631 || (DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1
10632 && regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
]))
10634 /* If emit_load_toc_table will use the link register, we need to save
10635 it. We use R11 for this purpose because emit_load_toc_table
10636 can use register 0. This allows us to use a plain 'blr' to return
10637 from the procedure more often. */
10638 int save_LR_around_toc_setup
= (TARGET_ELF
&& flag_pic
!= 0
10639 && ! info
->lr_save_p
10640 && EXIT_BLOCK_PTR
->pred
!= NULL
);
10641 if (save_LR_around_toc_setup
)
10642 emit_move_insn (gen_rtx_REG (Pmode
, 11),
10643 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10645 rs6000_emit_load_toc_table (TRUE
);
10647 if (save_LR_around_toc_setup
)
10648 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
10649 gen_rtx_REG (Pmode
, 11));
10653 if (DEFAULT_ABI
== ABI_DARWIN
10654 && flag_pic
&& current_function_uses_pic_offset_table
)
10656 rtx dest
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
10657 const char *picbase
= machopic_function_base_name ();
10658 rtx src
= gen_rtx_SYMBOL_REF (Pmode
, ggc_alloc_string (picbase
, -1));
10660 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest
, src
)));
10662 rs6000_maybe_dead (
10663 emit_move_insn (gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
),
10664 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)));
10669 /* Write function prologue. */
10672 rs6000_output_function_prologue (file
, size
)
10674 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
10676 rs6000_stack_t
*info
= rs6000_stack_info ();
10678 if (TARGET_DEBUG_STACK
)
10679 debug_stack_info (info
);
10681 /* Write .extern for any function we will call to save and restore
10683 if (info
->first_fp_reg_save
< 64
10684 && !FP_SAVE_INLINE (info
->first_fp_reg_save
))
10685 fprintf (file
, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
10686 SAVE_FP_PREFIX
, info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
,
10687 RESTORE_FP_PREFIX
, info
->first_fp_reg_save
- 32,
10688 RESTORE_FP_SUFFIX
);
10690 /* Write .extern for AIX common mode routines, if needed. */
10691 if (! TARGET_POWER
&& ! TARGET_POWERPC
&& ! common_mode_defined
)
10693 fputs ("\t.extern __mulh\n", file
);
10694 fputs ("\t.extern __mull\n", file
);
10695 fputs ("\t.extern __divss\n", file
);
10696 fputs ("\t.extern __divus\n", file
);
10697 fputs ("\t.extern __quoss\n", file
);
10698 fputs ("\t.extern __quous\n", file
);
10699 common_mode_defined
= 1;
10702 if (! HAVE_prologue
)
10706 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
10707 the "toplevel" insn chain. */
10708 emit_note (0, NOTE_INSN_DELETED
);
10709 rs6000_emit_prologue ();
10710 emit_note (0, NOTE_INSN_DELETED
);
10712 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10716 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
10718 INSN_ADDRESSES_NEW (insn
, addr
);
10723 if (TARGET_DEBUG_STACK
)
10724 debug_rtx_list (get_insns (), 100);
10725 final (get_insns (), file
, FALSE
, FALSE
);
10729 rs6000_pic_labelno
++;
10732 /* Emit function epilogue as insns.
10734 At present, dwarf2out_frame_debug_expr doesn't understand
10735 register restores, so we don't bother setting RTX_FRAME_RELATED_P
10736 anywhere in the epilogue. Most of the insns below would in any case
10737 need special notes to explain where r11 is in relation to the stack. */
10740 rs6000_emit_epilogue (sibcall
)
10743 rs6000_stack_t
*info
;
10744 int restoring_FPRs_inline
;
10745 int using_load_multiple
;
10746 int using_mfcr_multiple
;
10747 int use_backchain_to_restore_sp
;
10749 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, 1);
10750 rtx frame_reg_rtx
= sp_reg_rtx
;
10751 enum machine_mode reg_mode
= TARGET_POWERPC64
? DImode
: SImode
;
10752 int reg_size
= TARGET_POWERPC64
? 8 : 4;
10755 if (TARGET_SPE_ABI
)
10757 reg_mode
= V2SImode
;
10761 info
= rs6000_stack_info ();
10762 using_load_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
10764 && info
->first_gp_reg_save
< 31);
10765 restoring_FPRs_inline
= (sibcall
10766 || current_function_calls_eh_return
10767 || info
->first_fp_reg_save
== 64
10768 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
10769 use_backchain_to_restore_sp
= (frame_pointer_needed
10770 || current_function_calls_alloca
10771 || info
->total_size
> 32767);
10772 using_mfcr_multiple
= (rs6000_cpu
== PROCESSOR_PPC601
10773 || rs6000_cpu
== PROCESSOR_PPC603
10774 || rs6000_cpu
== PROCESSOR_PPC750
10777 /* If we have a frame pointer, a call to alloca, or a large stack
10778 frame, restore the old stack pointer using the backchain. Otherwise,
10779 we know what size to update it with. */
10780 if (use_backchain_to_restore_sp
)
10782 /* Under V.4, don't reset the stack pointer until after we're done
10783 loading the saved registers. */
10784 if (DEFAULT_ABI
== ABI_V4
)
10785 frame_reg_rtx
= gen_rtx_REG (Pmode
, 11);
10787 emit_move_insn (frame_reg_rtx
,
10788 gen_rtx_MEM (Pmode
, sp_reg_rtx
));
10791 else if (info
->push_p
)
10793 if (DEFAULT_ABI
== ABI_V4
)
10794 sp_offset
= info
->total_size
;
10797 emit_insn (TARGET_32BIT
10798 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
10799 GEN_INT (info
->total_size
))
10800 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
10801 GEN_INT (info
->total_size
)));
10805 /* Restore AltiVec registers if needed. */
10806 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
10810 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10811 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
10813 rtx addr
, areg
, mem
;
10815 areg
= gen_rtx_REG (Pmode
, 0);
10817 (areg
, GEN_INT (info
->altivec_save_offset
10819 + 16 * (i
- info
->first_altivec_reg_save
)));
10821 /* AltiVec addressing mode is [reg+reg]. */
10822 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
);
10823 mem
= gen_rtx_MEM (V4SImode
, addr
);
10824 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10826 emit_move_insn (gen_rtx_REG (V4SImode
, i
), mem
);
10830 /* Restore VRSAVE if needed. */
10831 if (TARGET_ALTIVEC_ABI
&& info
->vrsave_mask
!= 0)
10833 rtx addr
, mem
, reg
;
10835 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10836 GEN_INT (info
->vrsave_save_offset
+ sp_offset
));
10837 mem
= gen_rtx_MEM (SImode
, addr
);
10838 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10839 reg
= gen_rtx_REG (SImode
, 12);
10840 emit_move_insn (reg
, mem
);
10842 emit_insn (generate_set_vrsave (reg
, info
, 1));
10845 /* Get the old lr if we saved it. */
10846 if (info
->lr_save_p
)
10848 rtx mem
= gen_frame_mem_offset (Pmode
, frame_reg_rtx
,
10849 info
->lr_save_offset
+ sp_offset
);
10851 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10853 emit_move_insn (gen_rtx_REG (Pmode
, 0), mem
);
10856 /* Get the old cr if we saved it. */
10857 if (info
->cr_save_p
)
10859 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10860 GEN_INT (info
->cr_save_offset
+ sp_offset
));
10861 rtx mem
= gen_rtx_MEM (SImode
, addr
);
10863 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10865 emit_move_insn (gen_rtx_REG (SImode
, 12), mem
);
10868 /* Set LR here to try to overlap restores below. */
10869 if (info
->lr_save_p
)
10870 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
10871 gen_rtx_REG (Pmode
, 0));
10873 /* Load exception handler data registers, if needed. */
10874 if (current_function_calls_eh_return
)
10876 unsigned int i
, regno
;
10882 regno
= EH_RETURN_DATA_REGNO (i
);
10883 if (regno
== INVALID_REGNUM
)
10886 mem
= gen_frame_mem_offset (reg_mode
, frame_reg_rtx
,
10887 info
->ehrd_offset
+ sp_offset
10888 + reg_size
* (int) i
);
10889 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10891 emit_move_insn (gen_rtx_REG (reg_mode
, regno
), mem
);
10895 /* Restore GPRs. This is done as a PARALLEL if we are using
10896 the load-multiple instructions. */
10897 if (using_load_multiple
)
10900 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
10901 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10903 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10904 GEN_INT (info
->gp_save_offset
10907 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
10909 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10912 gen_rtx_SET (VOIDmode
,
10913 gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
),
10916 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10919 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10920 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
10921 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
10922 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
10923 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
10924 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
10926 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10927 GEN_INT (info
->gp_save_offset
10930 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
10932 /* Restore 64-bit quantities for SPE. */
10933 if (TARGET_SPE_ABI
)
10935 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
10938 if (!SPE_CONST_OFFSET_OK (offset
))
10940 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
10941 emit_move_insn (b
, GEN_INT (offset
));
10944 b
= GEN_INT (offset
);
10946 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
10947 mem
= gen_rtx_MEM (V2SImode
, addr
);
10950 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10952 emit_move_insn (gen_rtx_REG (reg_mode
,
10953 info
->first_gp_reg_save
+ i
), mem
);
10956 /* Restore fpr's if we need to do it without calling a function. */
10957 if (restoring_FPRs_inline
)
10958 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10959 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
10960 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
10963 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10964 GEN_INT (info
->fp_save_offset
10967 mem
= gen_rtx_MEM (DFmode
, addr
);
10968 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10970 emit_move_insn (gen_rtx_REG (DFmode
,
10971 info
->first_fp_reg_save
+ i
),
10975 /* If we saved cr, restore it here. Just those that were used. */
10976 if (info
->cr_save_p
)
10978 rtx r12_rtx
= gen_rtx_REG (SImode
, 12);
10981 if (using_mfcr_multiple
)
10983 for (i
= 0; i
< 8; i
++)
10984 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
10990 if (using_mfcr_multiple
&& count
> 1)
10995 p
= rtvec_alloc (count
);
10998 for (i
= 0; i
< 8; i
++)
10999 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
11001 rtvec r
= rtvec_alloc (2);
11002 RTVEC_ELT (r
, 0) = r12_rtx
;
11003 RTVEC_ELT (r
, 1) = GEN_INT (1 << (7-i
));
11004 RTVEC_ELT (p
, ndx
) =
11005 gen_rtx_SET (VOIDmode
, gen_rtx_REG (CCmode
, CR0_REGNO
+i
),
11006 gen_rtx_UNSPEC (CCmode
, r
, 20));
11009 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11014 for (i
= 0; i
< 8; i
++)
11015 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
11017 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode
,
11023 /* If this is V.4, unwind the stack pointer after all of the loads
11024 have been done. We need to emit a block here so that sched
11025 doesn't decide to move the sp change before the register restores
11026 (which may not have any obvious dependency on the stack). This
11027 doesn't hurt performance, because there is no scheduling that can
11028 be done after this point. */
11029 if (DEFAULT_ABI
== ABI_V4
)
11031 if (frame_reg_rtx
!= sp_reg_rtx
)
11032 rs6000_emit_stack_tie ();
11034 if (use_backchain_to_restore_sp
)
11036 emit_move_insn (sp_reg_rtx
, frame_reg_rtx
);
11038 else if (sp_offset
!= 0)
11040 emit_insn (TARGET_32BIT
11041 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
11042 GEN_INT (sp_offset
))
11043 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
11044 GEN_INT (sp_offset
)));
11048 if (current_function_calls_eh_return
)
11050 rtx sa
= EH_RETURN_STACKADJ_RTX
;
11051 emit_insn (TARGET_32BIT
11052 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
, sa
)
11053 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
, sa
));
11059 if (! restoring_FPRs_inline
)
11060 p
= rtvec_alloc (3 + 64 - info
->first_fp_reg_save
);
11062 p
= rtvec_alloc (2);
11064 RTVEC_ELT (p
, 0) = gen_rtx_RETURN (VOIDmode
);
11065 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
11066 gen_rtx_REG (Pmode
,
11067 LINK_REGISTER_REGNUM
));
11069 /* If we have to restore more than two FP registers, branch to the
11070 restore function. It will return to our caller. */
11071 if (! restoring_FPRs_inline
)
11075 const char *alloc_rname
;
11077 sprintf (rname
, "%s%d%s", RESTORE_FP_PREFIX
,
11078 info
->first_fp_reg_save
- 32, RESTORE_FP_SUFFIX
);
11079 alloc_rname
= ggc_strdup (rname
);
11080 RTVEC_ELT (p
, 2) = gen_rtx_USE (VOIDmode
,
11081 gen_rtx_SYMBOL_REF (Pmode
,
11084 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
11087 addr
= gen_rtx_PLUS (Pmode
, sp_reg_rtx
,
11088 GEN_INT (info
->fp_save_offset
+ 8*i
));
11089 mem
= gen_rtx_MEM (DFmode
, addr
);
11090 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11092 RTVEC_ELT (p
, i
+3) =
11093 gen_rtx_SET (VOIDmode
,
11094 gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
),
11099 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11103 /* Write function epilogue. */
11106 rs6000_output_function_epilogue (file
, size
)
11108 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
11110 rs6000_stack_t
*info
= rs6000_stack_info ();
11112 if (! HAVE_epilogue
)
11114 rtx insn
= get_last_insn ();
11115 /* If the last insn was a BARRIER, we don't have to write anything except
11116 the trace table. */
11117 if (GET_CODE (insn
) == NOTE
)
11118 insn
= prev_nonnote_insn (insn
);
11119 if (insn
== 0 || GET_CODE (insn
) != BARRIER
)
11121 /* This is slightly ugly, but at least we don't have two
11122 copies of the epilogue-emitting code. */
11125 /* A NOTE_INSN_DELETED is supposed to be at the start
11126 and end of the "toplevel" insn chain. */
11127 emit_note (0, NOTE_INSN_DELETED
);
11128 rs6000_emit_epilogue (FALSE
);
11129 emit_note (0, NOTE_INSN_DELETED
);
11131 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11135 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
11137 INSN_ADDRESSES_NEW (insn
, addr
);
11142 if (TARGET_DEBUG_STACK
)
11143 debug_rtx_list (get_insns (), 100);
11144 final (get_insns (), file
, FALSE
, FALSE
);
11149 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11152 We don't output a traceback table if -finhibit-size-directive was
11153 used. The documentation for -finhibit-size-directive reads
11154 ``don't output a @code{.size} assembler directive, or anything
11155 else that would cause trouble if the function is split in the
11156 middle, and the two halves are placed at locations far apart in
11157 memory.'' The traceback table has this property, since it
11158 includes the offset from the start of the function to the
11159 traceback table itself.
11161 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11162 different traceback table. */
11163 if (DEFAULT_ABI
== ABI_AIX
&& ! flag_inhibit_size_directive
11164 && rs6000_traceback
!= traceback_none
)
11166 const char *fname
= NULL
;
11167 const char *language_string
= lang_hooks
.name
;
11168 int fixed_parms
= 0, float_parms
= 0, parm_info
= 0;
11170 int optional_tbtab
;
11172 if (rs6000_traceback
== traceback_full
)
11173 optional_tbtab
= 1;
11174 else if (rs6000_traceback
== traceback_part
)
11175 optional_tbtab
= 0;
11177 optional_tbtab
= !optimize_size
&& !TARGET_ELF
;
11179 if (optional_tbtab
)
11181 fname
= XSTR (XEXP (DECL_RTL (current_function_decl
), 0), 0);
11182 while (*fname
== '.') /* V.4 encodes . in the name */
11185 /* Need label immediately before tbtab, so we can compute
11186 its offset from the function start. */
11187 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
11188 ASM_OUTPUT_LABEL (file
, fname
);
11191 /* The .tbtab pseudo-op can only be used for the first eight
11192 expressions, since it can't handle the possibly variable
11193 length fields that follow. However, if you omit the optional
11194 fields, the assembler outputs zeros for all optional fields
11195 anyways, giving each variable length field is minimum length
11196 (as defined in sys/debug.h). Thus we can not use the .tbtab
11197 pseudo-op at all. */
11199 /* An all-zero word flags the start of the tbtab, for debuggers
11200 that have to find it by searching forward from the entry
11201 point or from the current pc. */
11202 fputs ("\t.long 0\n", file
);
11204 /* Tbtab format type. Use format type 0. */
11205 fputs ("\t.byte 0,", file
);
11207 /* Language type. Unfortunately, there doesn't seem to be any
11208 official way to get this info, so we use language_string. C
11209 is 0. C++ is 9. No number defined for Obj-C, so use the
11210 value for C for now. There is no official value for Java,
11211 although IBM appears to be using 13. There is no official value
11212 for Chill, so we've chosen 44 pseudo-randomly. */
11213 if (! strcmp (language_string
, "GNU C")
11214 || ! strcmp (language_string
, "GNU Objective-C"))
11216 else if (! strcmp (language_string
, "GNU F77"))
11218 else if (! strcmp (language_string
, "GNU Ada"))
11220 else if (! strcmp (language_string
, "GNU Pascal"))
11222 else if (! strcmp (language_string
, "GNU C++"))
11224 else if (! strcmp (language_string
, "GNU Java"))
11226 else if (! strcmp (language_string
, "GNU CHILL"))
11230 fprintf (file
, "%d,", i
);
11232 /* 8 single bit fields: global linkage (not set for C extern linkage,
11233 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
11234 from start of procedure stored in tbtab, internal function, function
11235 has controlled storage, function has no toc, function uses fp,
11236 function logs/aborts fp operations. */
11237 /* Assume that fp operations are used if any fp reg must be saved. */
11238 fprintf (file
, "%d,",
11239 (optional_tbtab
<< 5) | ((info
->first_fp_reg_save
!= 64) << 1));
11241 /* 6 bitfields: function is interrupt handler, name present in
11242 proc table, function calls alloca, on condition directives
11243 (controls stack walks, 3 bits), saves condition reg, saves
11245 /* The `function calls alloca' bit seems to be set whenever reg 31 is
11246 set up as a frame pointer, even when there is no alloca call. */
11247 fprintf (file
, "%d,",
11248 ((optional_tbtab
<< 6)
11249 | ((optional_tbtab
& frame_pointer_needed
) << 5)
11250 | (info
->cr_save_p
<< 1)
11251 | (info
->lr_save_p
)));
11253 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
11255 fprintf (file
, "%d,",
11256 (info
->push_p
<< 7) | (64 - info
->first_fp_reg_save
));
11258 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
11259 fprintf (file
, "%d,", (32 - first_reg_to_save ()));
11261 if (optional_tbtab
)
11263 /* Compute the parameter info from the function decl argument
11266 int next_parm_info_bit
= 31;
11268 for (decl
= DECL_ARGUMENTS (current_function_decl
);
11269 decl
; decl
= TREE_CHAIN (decl
))
11271 rtx parameter
= DECL_INCOMING_RTL (decl
);
11272 enum machine_mode mode
= GET_MODE (parameter
);
11274 if (GET_CODE (parameter
) == REG
)
11276 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
11282 if (mode
== SFmode
)
11284 else if (mode
== DFmode
|| mode
== TFmode
)
11289 /* If only one bit will fit, don't or in this entry. */
11290 if (next_parm_info_bit
> 0)
11291 parm_info
|= (bits
<< (next_parm_info_bit
- 1));
11292 next_parm_info_bit
-= 2;
11296 fixed_parms
+= ((GET_MODE_SIZE (mode
)
11297 + (UNITS_PER_WORD
- 1))
11299 next_parm_info_bit
-= 1;
11305 /* Number of fixed point parameters. */
11306 /* This is actually the number of words of fixed point parameters; thus
11307 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11308 fprintf (file
, "%d,", fixed_parms
);
11310 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11312 /* This is actually the number of fp registers that hold parameters;
11313 and thus the maximum value is 13. */
11314 /* Set parameters on stack bit if parameters are not in their original
11315 registers, regardless of whether they are on the stack? Xlc
11316 seems to set the bit when not optimizing. */
11317 fprintf (file
, "%d\n", ((float_parms
<< 1) | (! optimize
)));
11319 if (! optional_tbtab
)
11322 /* Optional fields follow. Some are variable length. */
11324 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11325 11 double float. */
11326 /* There is an entry for each parameter in a register, in the order that
11327 they occur in the parameter list. Any intervening arguments on the
11328 stack are ignored. If the list overflows a long (max possible length
11329 34 bits) then completely leave off all elements that don't fit. */
11330 /* Only emit this long if there was at least one parameter. */
11331 if (fixed_parms
|| float_parms
)
11332 fprintf (file
, "\t.long %d\n", parm_info
);
11334 /* Offset from start of code to tb table. */
11335 fputs ("\t.long ", file
);
11336 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
11338 RS6000_OUTPUT_BASENAME (file
, fname
);
11340 assemble_name (file
, fname
);
11342 fputs ("-.", file
);
11344 RS6000_OUTPUT_BASENAME (file
, fname
);
11346 assemble_name (file
, fname
);
11350 /* Interrupt handler mask. */
11351 /* Omit this long, since we never set the interrupt handler bit
11354 /* Number of CTL (controlled storage) anchors. */
11355 /* Omit this long, since the has_ctl bit is never set above. */
11357 /* Displacement into stack of each CTL anchor. */
11358 /* Omit this list of longs, because there are no CTL anchors. */
11360 /* Length of function name. */
11363 fprintf (file
, "\t.short %d\n", (int) strlen (fname
));
11365 /* Function name. */
11366 assemble_string (fname
, strlen (fname
));
11368 /* Register for alloca automatic storage; this is always reg 31.
11369 Only emit this if the alloca bit was set above. */
11370 if (frame_pointer_needed
)
11371 fputs ("\t.byte 31\n", file
);
11373 fputs ("\t.align 2\n", file
);
11377 /* A C compound statement that outputs the assembler code for a thunk
11378 function, used to implement C++ virtual function calls with
11379 multiple inheritance. The thunk acts as a wrapper around a virtual
11380 function, adjusting the implicit object parameter before handing
11381 control off to the real function.
11383 First, emit code to add the integer DELTA to the location that
11384 contains the incoming first argument. Assume that this argument
11385 contains a pointer, and is the one used to pass the `this' pointer
11386 in C++. This is the incoming argument *before* the function
11387 prologue, e.g. `%o0' on a sparc. The addition must preserve the
11388 values of all other incoming arguments.
11390 After the addition, emit code to jump to FUNCTION, which is a
11391 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
11392 not touch the return address. Hence returning from FUNCTION will
11393 return to whoever called the current `thunk'.
11395 The effect must be as if FUNCTION had been called directly with the
11396 adjusted first argument. This macro is responsible for emitting
11397 all of the code for a thunk function; output_function_prologue()
11398 and output_function_epilogue() are not invoked.
11400 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
11401 been extracted from it.) It might possibly be useful on some
11402 targets, but probably not.
11404 If you do not define this macro, the target-independent code in the
11405 C++ frontend will generate a less efficient heavyweight thunk that
11406 calls FUNCTION instead of jumping to it. The generic approach does
11407 not support varargs. */
11410 rs6000_output_mi_thunk (file
, thunk_fndecl
, delta
, vcall_offset
, function
)
11412 tree thunk_fndecl ATTRIBUTE_UNUSED
;
11413 HOST_WIDE_INT delta
;
11414 HOST_WIDE_INT vcall_offset
;
11417 rtx
this, insn
, funexp
;
11419 reload_completed
= 1;
11420 no_new_pseudos
= 1;
11422 /* Mark the end of the (empty) prologue. */
11423 emit_note (NULL
, NOTE_INSN_PROLOGUE_END
);
11425 /* Find the "this" pointer. If the function returns a structure,
11426 the structure return pointer is in r3. */
11427 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
))))
11428 this = gen_rtx_REG (Pmode
, 4);
11430 this = gen_rtx_REG (Pmode
, 3);
11432 /* Apply the constant offset, if required. */
11435 rtx delta_rtx
= GEN_INT (delta
);
11436 emit_insn (TARGET_32BIT
11437 ? gen_addsi3 (this, this, delta_rtx
)
11438 : gen_adddi3 (this, this, delta_rtx
));
11441 /* Apply the offset from the vtable, if required. */
11444 rtx vcall_offset_rtx
= GEN_INT (vcall_offset
);
11445 rtx tmp
= gen_rtx_REG (Pmode
, 12);
11447 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, this));
11448 emit_insn (TARGET_32BIT
11449 ? gen_addsi3 (tmp
, tmp
, vcall_offset_rtx
)
11450 : gen_adddi3 (tmp
, tmp
, vcall_offset_rtx
));
11451 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, tmp
));
11452 emit_insn (TARGET_32BIT
11453 ? gen_addsi3 (this, this, tmp
)
11454 : gen_adddi3 (this, this, tmp
));
11457 /* Generate a tail call to the target function. */
11458 if (!TREE_USED (function
))
11460 assemble_external (function
);
11461 TREE_USED (function
) = 1;
11463 funexp
= XEXP (DECL_RTL (function
), 0);
11465 SYMBOL_REF_FLAG (funexp
) = 0;
11466 if (current_file_function_operand (funexp
, VOIDmode
)
11467 && (! lookup_attribute ("longcall",
11468 TYPE_ATTRIBUTES (TREE_TYPE (function
)))
11469 || lookup_attribute ("shortcall",
11470 TYPE_ATTRIBUTES (TREE_TYPE (function
)))))
11471 SYMBOL_REF_FLAG (funexp
) = 1;
11473 funexp
= gen_rtx_MEM (FUNCTION_MODE
, funexp
);
11477 funexp
= machopic_indirect_call_target (funexp
);
11480 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
11481 generate sibcall RTL explicitly to avoid constraint abort. */
11482 insn
= emit_call_insn (
11483 gen_rtx_PARALLEL (VOIDmode
,
11485 gen_rtx_CALL (VOIDmode
,
11486 funexp
, const0_rtx
),
11487 gen_rtx_USE (VOIDmode
, const0_rtx
),
11488 gen_rtx_USE (VOIDmode
,
11489 gen_rtx_REG (SImode
,
11490 LINK_REGISTER_REGNUM
)),
11491 gen_rtx_RETURN (VOIDmode
))));
11492 SIBLING_CALL_P (insn
) = 1;
11495 /* Run just enough of rest_of_compilation to get the insns emitted.
11496 There's not really enough bulk here to make other passes such as
11497 instruction scheduling worth while. Note that use_thunk calls
11498 assemble_start_function and assemble_end_function. */
11499 insn
= get_insns ();
11500 shorten_branches (insn
);
11501 final_start_function (insn
, file
, 1);
11502 final (insn
, file
, 1, 0);
11503 final_end_function ();
11505 reload_completed
= 0;
11506 no_new_pseudos
= 0;
11509 /* A quick summary of the various types of 'constant-pool tables'
11512 Target Flags Name One table per
11513 AIX (none) AIX TOC object file
11514 AIX -mfull-toc AIX TOC object file
11515 AIX -mminimal-toc AIX minimal TOC translation unit
11516 SVR4/EABI (none) SVR4 SDATA object file
11517 SVR4/EABI -fpic SVR4 pic object file
11518 SVR4/EABI -fPIC SVR4 PIC translation unit
11519 SVR4/EABI -mrelocatable EABI TOC function
11520 SVR4/EABI -maix AIX TOC object file
11521 SVR4/EABI -maix -mminimal-toc
11522 AIX minimal TOC translation unit
11524 Name Reg. Set by entries contains:
11525 made by addrs? fp? sum?
11527 AIX TOC 2 crt0 as Y option option
11528 AIX minimal TOC 30 prolog gcc Y Y option
11529 SVR4 SDATA 13 crt0 gcc N Y N
11530 SVR4 pic 30 prolog ld Y not yet N
11531 SVR4 PIC 30 prolog gcc Y option option
11532 EABI TOC 30 prolog gcc Y option option
11536 /* Hash functions for the hash table. */
11539 rs6000_hash_constant (k
)
11542 enum rtx_code code
= GET_CODE (k
);
11543 enum machine_mode mode
= GET_MODE (k
);
11544 unsigned result
= (code
<< 3) ^ mode
;
11545 const char *format
;
11548 format
= GET_RTX_FORMAT (code
);
11549 flen
= strlen (format
);
11555 return result
* 1231 + (unsigned) INSN_UID (XEXP (k
, 0));
11558 if (mode
!= VOIDmode
)
11559 return real_hash (CONST_DOUBLE_REAL_VALUE (k
)) * result
;
11571 for (; fidx
< flen
; fidx
++)
11572 switch (format
[fidx
])
11577 const char *str
= XSTR (k
, fidx
);
11578 len
= strlen (str
);
11579 result
= result
* 613 + len
;
11580 for (i
= 0; i
< len
; i
++)
11581 result
= result
* 613 + (unsigned) str
[i
];
11586 result
= result
* 1231 + rs6000_hash_constant (XEXP (k
, fidx
));
11590 result
= result
* 613 + (unsigned) XINT (k
, fidx
);
11593 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT
))
11594 result
= result
* 613 + (unsigned) XWINT (k
, fidx
);
11598 for (i
= 0; i
< sizeof(HOST_WIDE_INT
)/sizeof(unsigned); i
++)
11599 result
= result
* 613 + (unsigned) (XWINT (k
, fidx
)
11611 toc_hash_function (hash_entry
)
11612 const void * hash_entry
;
11614 const struct toc_hash_struct
*thc
=
11615 (const struct toc_hash_struct
*) hash_entry
;
11616 return rs6000_hash_constant (thc
->key
) ^ thc
->key_mode
;
11619 /* Compare H1 and H2 for equivalence. */
11622 toc_hash_eq (h1
, h2
)
11626 rtx r1
= ((const struct toc_hash_struct
*) h1
)->key
;
11627 rtx r2
= ((const struct toc_hash_struct
*) h2
)->key
;
11629 if (((const struct toc_hash_struct
*) h1
)->key_mode
11630 != ((const struct toc_hash_struct
*) h2
)->key_mode
)
11633 return rtx_equal_p (r1
, r2
);
11636 /* These are the names given by the C++ front-end to vtables, and
11637 vtable-like objects. Ideally, this logic should not be here;
11638 instead, there should be some programmatic way of inquiring as
11639 to whether or not an object is a vtable. */
11641 #define VTABLE_NAME_P(NAME) \
11642 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
11643 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
11644 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
11645 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
11648 rs6000_output_symbol_ref (file
, x
)
11652 /* Currently C++ toc references to vtables can be emitted before it
11653 is decided whether the vtable is public or private. If this is
11654 the case, then the linker will eventually complain that there is
11655 a reference to an unknown section. Thus, for vtables only,
11656 we emit the TOC reference to reference the symbol and not the
11658 const char *name
= XSTR (x
, 0);
11660 if (VTABLE_NAME_P (name
))
11662 RS6000_OUTPUT_BASENAME (file
, name
);
11665 assemble_name (file
, name
);
11668 /* Output a TOC entry. We derive the entry name from what is being
11672 output_toc (file
, x
, labelno
, mode
)
11676 enum machine_mode mode
;
11679 const char *name
= buf
;
11680 const char *real_name
;
11687 /* When the linker won't eliminate them, don't output duplicate
11688 TOC entries (this happens on AIX if there is any kind of TOC,
11689 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
11691 if (TARGET_TOC
&& GET_CODE (x
) != LABEL_REF
)
11693 struct toc_hash_struct
*h
;
11696 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
11697 time because GGC is not initialised at that point. */
11698 if (toc_hash_table
== NULL
)
11699 toc_hash_table
= htab_create_ggc (1021, toc_hash_function
,
11700 toc_hash_eq
, NULL
);
11702 h
= ggc_alloc (sizeof (*h
));
11704 h
->key_mode
= mode
;
11705 h
->labelno
= labelno
;
11707 found
= htab_find_slot (toc_hash_table
, h
, 1);
11708 if (*found
== NULL
)
11710 else /* This is indeed a duplicate.
11711 Set this label equal to that label. */
11713 fputs ("\t.set ", file
);
11714 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
11715 fprintf (file
, "%d,", labelno
);
11716 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
11717 fprintf (file
, "%d\n", ((*(const struct toc_hash_struct
**)
11723 /* If we're going to put a double constant in the TOC, make sure it's
11724 aligned properly when strict alignment is on. */
11725 if (GET_CODE (x
) == CONST_DOUBLE
11726 && STRICT_ALIGNMENT
11727 && GET_MODE_BITSIZE (mode
) >= 64
11728 && ! (TARGET_NO_FP_IN_TOC
&& ! TARGET_MINIMAL_TOC
)) {
11729 ASM_OUTPUT_ALIGN (file
, 3);
11732 (*targetm
.asm_out
.internal_label
) (file
, "LC", labelno
);
11734 /* Handle FP constants specially. Note that if we have a minimal
11735 TOC, things we put here aren't actually in the TOC, so we can allow
11737 if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == TFmode
)
11739 REAL_VALUE_TYPE rv
;
11742 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
11743 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
11747 if (TARGET_MINIMAL_TOC
)
11748 fputs (DOUBLE_INT_ASM_OP
, file
);
11750 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
11751 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
11752 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
11753 fprintf (file
, "0x%lx%08lx,0x%lx%08lx\n",
11754 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
11755 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
11760 if (TARGET_MINIMAL_TOC
)
11761 fputs ("\t.long ", file
);
11763 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
11764 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
11765 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
11766 fprintf (file
, "0x%lx,0x%lx,0x%lx,0x%lx\n",
11767 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
11768 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
11772 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
11774 REAL_VALUE_TYPE rv
;
11777 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
11778 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
11782 if (TARGET_MINIMAL_TOC
)
11783 fputs (DOUBLE_INT_ASM_OP
, file
);
11785 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
11786 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11787 fprintf (file
, "0x%lx%08lx\n",
11788 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11793 if (TARGET_MINIMAL_TOC
)
11794 fputs ("\t.long ", file
);
11796 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
11797 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11798 fprintf (file
, "0x%lx,0x%lx\n",
11799 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11803 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
11805 REAL_VALUE_TYPE rv
;
11808 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
11809 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
11813 if (TARGET_MINIMAL_TOC
)
11814 fputs (DOUBLE_INT_ASM_OP
, file
);
11816 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
11817 fprintf (file
, "0x%lx00000000\n", l
& 0xffffffff);
11822 if (TARGET_MINIMAL_TOC
)
11823 fputs ("\t.long ", file
);
11825 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
11826 fprintf (file
, "0x%lx\n", l
& 0xffffffff);
11830 else if (GET_MODE (x
) == VOIDmode
11831 && (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
))
11833 unsigned HOST_WIDE_INT low
;
11834 HOST_WIDE_INT high
;
11836 if (GET_CODE (x
) == CONST_DOUBLE
)
11838 low
= CONST_DOUBLE_LOW (x
);
11839 high
= CONST_DOUBLE_HIGH (x
);
11842 #if HOST_BITS_PER_WIDE_INT == 32
11845 high
= (low
& 0x80000000) ? ~0 : 0;
11849 low
= INTVAL (x
) & 0xffffffff;
11850 high
= (HOST_WIDE_INT
) INTVAL (x
) >> 32;
11854 /* TOC entries are always Pmode-sized, but since this
11855 is a bigendian machine then if we're putting smaller
11856 integer constants in the TOC we have to pad them.
11857 (This is still a win over putting the constants in
11858 a separate constant pool, because then we'd have
11859 to have both a TOC entry _and_ the actual constant.)
11861 For a 32-bit target, CONST_INT values are loaded and shifted
11862 entirely within `low' and can be stored in one TOC entry. */
11864 if (TARGET_64BIT
&& POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
11865 abort ();/* It would be easy to make this work, but it doesn't now. */
11867 if (POINTER_SIZE
> GET_MODE_BITSIZE (mode
))
11869 #if HOST_BITS_PER_WIDE_INT == 32
11870 lshift_double (low
, high
, POINTER_SIZE
- GET_MODE_BITSIZE (mode
),
11871 POINTER_SIZE
, &low
, &high
, 0);
11874 low
<<= POINTER_SIZE
- GET_MODE_BITSIZE (mode
);
11875 high
= (HOST_WIDE_INT
) low
>> 32;
11882 if (TARGET_MINIMAL_TOC
)
11883 fputs (DOUBLE_INT_ASM_OP
, file
);
11885 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
11886 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
11887 fprintf (file
, "0x%lx%08lx\n",
11888 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
11893 if (POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
11895 if (TARGET_MINIMAL_TOC
)
11896 fputs ("\t.long ", file
);
11898 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
11899 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
11900 fprintf (file
, "0x%lx,0x%lx\n",
11901 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
11905 if (TARGET_MINIMAL_TOC
)
11906 fputs ("\t.long ", file
);
11908 fprintf (file
, "\t.tc IS_%lx[TC],", (long) low
& 0xffffffff);
11909 fprintf (file
, "0x%lx\n", (long) low
& 0xffffffff);
11915 if (GET_CODE (x
) == CONST
)
11917 if (GET_CODE (XEXP (x
, 0)) != PLUS
)
11920 base
= XEXP (XEXP (x
, 0), 0);
11921 offset
= INTVAL (XEXP (XEXP (x
, 0), 1));
11924 if (GET_CODE (base
) == SYMBOL_REF
)
11925 name
= XSTR (base
, 0);
11926 else if (GET_CODE (base
) == LABEL_REF
)
11927 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (XEXP (base
, 0)));
11928 else if (GET_CODE (base
) == CODE_LABEL
)
11929 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (base
));
11933 real_name
= (*targetm
.strip_name_encoding
) (name
);
11934 if (TARGET_MINIMAL_TOC
)
11935 fputs (TARGET_32BIT
? "\t.long " : DOUBLE_INT_ASM_OP
, file
);
11938 fprintf (file
, "\t.tc %s", real_name
);
11941 fprintf (file
, ".N%d", - offset
);
11943 fprintf (file
, ".P%d", offset
);
11945 fputs ("[TC],", file
);
11948 /* Currently C++ toc references to vtables can be emitted before it
11949 is decided whether the vtable is public or private. If this is
11950 the case, then the linker will eventually complain that there is
11951 a TOC reference to an unknown section. Thus, for vtables only,
11952 we emit the TOC reference to reference the symbol and not the
11954 if (VTABLE_NAME_P (name
))
11956 RS6000_OUTPUT_BASENAME (file
, name
);
11958 fprintf (file
, "%d", offset
);
11959 else if (offset
> 0)
11960 fprintf (file
, "+%d", offset
);
11963 output_addr_const (file
, x
);
11967 /* Output an assembler pseudo-op to write an ASCII string of N characters
11968 starting at P to FILE.
11970 On the RS/6000, we have to do this using the .byte operation and
11971 write out special characters outside the quoted string.
11972 Also, the assembler is broken; very long strings are truncated,
11973 so we must artificially break them up early. */
11976 output_ascii (file
, p
, n
)
11982 int i
, count_string
;
11983 const char *for_string
= "\t.byte \"";
11984 const char *for_decimal
= "\t.byte ";
11985 const char *to_close
= NULL
;
11988 for (i
= 0; i
< n
; i
++)
11991 if (c
>= ' ' && c
< 0177)
11994 fputs (for_string
, file
);
11997 /* Write two quotes to get one. */
12005 for_decimal
= "\"\n\t.byte ";
12009 if (count_string
>= 512)
12011 fputs (to_close
, file
);
12013 for_string
= "\t.byte \"";
12014 for_decimal
= "\t.byte ";
12022 fputs (for_decimal
, file
);
12023 fprintf (file
, "%d", c
);
12025 for_string
= "\n\t.byte \"";
12026 for_decimal
= ", ";
12032 /* Now close the string if we have written one. Then end the line. */
12034 fputs (to_close
, file
);
12037 /* Generate a unique section name for FILENAME for a section type
12038 represented by SECTION_DESC. Output goes into BUF.
12040 SECTION_DESC can be any string, as long as it is different for each
12041 possible section type.
12043 We name the section in the same manner as xlc. The name begins with an
12044 underscore followed by the filename (after stripping any leading directory
12045 names) with the last period replaced by the string SECTION_DESC. If
12046 FILENAME does not contain a period, SECTION_DESC is appended to the end of
12050 rs6000_gen_section_name (buf
, filename
, section_desc
)
12052 const char *filename
;
12053 const char *section_desc
;
12055 const char *q
, *after_last_slash
, *last_period
= 0;
12059 after_last_slash
= filename
;
12060 for (q
= filename
; *q
; q
++)
12063 after_last_slash
= q
+ 1;
12064 else if (*q
== '.')
12068 len
= strlen (after_last_slash
) + strlen (section_desc
) + 2;
12069 *buf
= (char *) xmalloc (len
);
12074 for (q
= after_last_slash
; *q
; q
++)
12076 if (q
== last_period
)
12078 strcpy (p
, section_desc
);
12079 p
+= strlen (section_desc
);
12083 else if (ISALNUM (*q
))
12087 if (last_period
== 0)
12088 strcpy (p
, section_desc
);
12093 /* Emit profile function. */
12096 output_profile_hook (labelno
)
12097 int labelno ATTRIBUTE_UNUSED
;
12099 if (DEFAULT_ABI
== ABI_AIX
)
12101 #ifdef NO_PROFILE_COUNTERS
12102 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 0);
12105 const char *label_name
;
12108 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
12109 label_name
= (*targetm
.strip_name_encoding
) (ggc_strdup (buf
));
12110 fun
= gen_rtx_SYMBOL_REF (Pmode
, label_name
);
12112 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 1,
12116 else if (DEFAULT_ABI
== ABI_DARWIN
)
12118 const char *mcount_name
= RS6000_MCOUNT
;
12119 int caller_addr_regno
= LINK_REGISTER_REGNUM
;
12121 /* Be conservative and always set this, at least for now. */
12122 current_function_uses_pic_offset_table
= 1;
12125 /* For PIC code, set up a stub and collect the caller's address
12126 from r0, which is where the prologue puts it. */
12129 mcount_name
= machopic_stub_name (mcount_name
);
12130 if (current_function_uses_pic_offset_table
)
12131 caller_addr_regno
= 0;
12134 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, mcount_name
),
12136 gen_rtx_REG (Pmode
, caller_addr_regno
), Pmode
);
12140 /* Write function profiler code. */
12143 output_function_profiler (file
, labelno
)
12150 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
12151 switch (DEFAULT_ABI
)
12158 /* Fall through. */
12160 case ABI_AIX_NODESC
:
12163 warning ("no profiling of 64-bit code for this ABI");
12166 fprintf (file
, "\tmflr %s\n", reg_names
[0]);
12169 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file
);
12170 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12171 reg_names
[0], save_lr
, reg_names
[1]);
12172 asm_fprintf (file
, "\tmflr %s\n", reg_names
[12]);
12173 asm_fprintf (file
, "\t{l|lwz} %s,", reg_names
[0]);
12174 assemble_name (file
, buf
);
12175 asm_fprintf (file
, "@got(%s)\n", reg_names
[12]);
12177 else if (flag_pic
> 1)
12179 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12180 reg_names
[0], save_lr
, reg_names
[1]);
12181 /* Now, we need to get the address of the label. */
12182 fputs ("\tbl 1f\n\t.long ", file
);
12183 assemble_name (file
, buf
);
12184 fputs ("-.\n1:", file
);
12185 asm_fprintf (file
, "\tmflr %s\n", reg_names
[11]);
12186 asm_fprintf (file
, "\t{l|lwz} %s,0(%s)\n",
12187 reg_names
[0], reg_names
[11]);
12188 asm_fprintf (file
, "\t{cax|add} %s,%s,%s\n",
12189 reg_names
[0], reg_names
[0], reg_names
[11]);
12193 asm_fprintf (file
, "\t{liu|lis} %s,", reg_names
[12]);
12194 assemble_name (file
, buf
);
12195 fputs ("@ha\n", file
);
12196 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12197 reg_names
[0], save_lr
, reg_names
[1]);
12198 asm_fprintf (file
, "\t{cal|la} %s,", reg_names
[0]);
12199 assemble_name (file
, buf
);
12200 asm_fprintf (file
, "@l(%s)\n", reg_names
[12]);
12203 if (current_function_needs_context
&& DEFAULT_ABI
== ABI_AIX_NODESC
)
12205 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12206 reg_names
[STATIC_CHAIN_REGNUM
],
12208 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
12209 asm_fprintf (file
, "\t{l|lwz} %s,%d(%s)\n",
12210 reg_names
[STATIC_CHAIN_REGNUM
],
12214 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12215 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
12220 /* Don't do anything, done in output_profile_hook (). */
12227 rs6000_use_dfa_pipeline_interface ()
12232 /* Power4 load update and store update instructions are cracked into a
12233 load or store and an integer insn which are executed in the same cycle.
12234 Branches have their own dispatch slot which does not count against the
12235 GCC issue rate, but it changes the program flow so there are no other
12236 instructions to issue in this cycle. */
12239 rs6000_variable_issue (stream
, verbose
, insn
, more
)
12240 FILE *stream ATTRIBUTE_UNUSED
;
12241 int verbose ATTRIBUTE_UNUSED
;
12245 if (GET_CODE (PATTERN (insn
)) == USE
12246 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
12249 if (rs6000_cpu
== PROCESSOR_POWER4
)
12251 enum attr_type type
= get_attr_type (insn
);
12252 if (type
== TYPE_LOAD_EXT_U
|| type
== TYPE_LOAD_EXT_UX
12253 || type
== TYPE_LOAD_UX
|| type
== TYPE_STORE_UX
12254 || type
== TYPE_FPLOAD_UX
|| type
== TYPE_FPSTORE_UX
)
12256 else if (type
== TYPE_LOAD_U
|| type
== TYPE_STORE_U
12257 || type
== TYPE_FPLOAD_U
|| type
== TYPE_FPSTORE_U
12258 || type
== TYPE_LOAD_EXT
|| type
== TYPE_DELAYED_CR
)
12259 return more
> 2 ? more
- 2 : 0;
12265 /* Adjust the cost of a scheduling dependency. Return the new cost of
12266 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
12269 rs6000_adjust_cost (insn
, link
, dep_insn
, cost
)
12272 rtx dep_insn ATTRIBUTE_UNUSED
;
12275 if (! recog_memoized (insn
))
12278 if (REG_NOTE_KIND (link
) != 0)
12281 if (REG_NOTE_KIND (link
) == 0)
12283 /* Data dependency; DEP_INSN writes a register that INSN reads
12284 some cycles later. */
12285 switch (get_attr_type (insn
))
12288 /* Tell the first scheduling pass about the latency between
12289 a mtctr and bctr (and mtlr and br/blr). The first
12290 scheduling pass will not know about this latency since
12291 the mtctr instruction, which has the latency associated
12292 to it, will be generated by reload. */
12293 return TARGET_POWER
? 5 : 4;
12295 /* Leave some extra cycles between a compare and its
12296 dependent branch, to inhibit expensive mispredicts. */
12297 if ((rs6000_cpu_attr
== CPU_PPC603
12298 || rs6000_cpu_attr
== CPU_PPC604
12299 || rs6000_cpu_attr
== CPU_PPC604E
12300 || rs6000_cpu_attr
== CPU_PPC620
12301 || rs6000_cpu_attr
== CPU_PPC630
12302 || rs6000_cpu_attr
== CPU_PPC750
12303 || rs6000_cpu_attr
== CPU_PPC7400
12304 || rs6000_cpu_attr
== CPU_PPC7450
12305 || rs6000_cpu_attr
== CPU_POWER4
)
12306 && recog_memoized (dep_insn
)
12307 && (INSN_CODE (dep_insn
) >= 0)
12308 && (get_attr_type (dep_insn
) == TYPE_CMP
12309 || get_attr_type (dep_insn
) == TYPE_COMPARE
12310 || get_attr_type (dep_insn
) == TYPE_DELAYED_COMPARE
12311 || get_attr_type (dep_insn
) == TYPE_FPCOMPARE
12312 || get_attr_type (dep_insn
) == TYPE_CR_LOGICAL
12313 || get_attr_type (dep_insn
) == TYPE_DELAYED_CR
))
12318 /* Fall out to return default cost. */
12324 /* A C statement (sans semicolon) to update the integer scheduling
12325 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12326 INSN earlier, increase the priority to execute INSN later. Do not
12327 define this macro if you do not need to adjust the scheduling
12328 priorities of insns. */
12331 rs6000_adjust_priority (insn
, priority
)
12332 rtx insn ATTRIBUTE_UNUSED
;
12335 /* On machines (like the 750) which have asymmetric integer units,
12336 where one integer unit can do multiply and divides and the other
12337 can't, reduce the priority of multiply/divide so it is scheduled
12338 before other integer operations. */
12341 if (! INSN_P (insn
))
12344 if (GET_CODE (PATTERN (insn
)) == USE
)
12347 switch (rs6000_cpu_attr
) {
12349 switch (get_attr_type (insn
))
12356 fprintf (stderr
, "priority was %#x (%d) before adjustment\n",
12357 priority
, priority
);
12358 if (priority
>= 0 && priority
< 0x01000000)
12368 /* Return how many instructions the machine can issue per cycle. */
12371 rs6000_issue_rate ()
12373 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
12374 if (!reload_completed
)
12377 switch (rs6000_cpu_attr
) {
12378 case CPU_RIOS1
: /* ? */
12380 case CPU_PPC601
: /* ? */
12401 /* Length in units of the trampoline for entering a nested function. */
12404 rs6000_trampoline_size ()
12408 switch (DEFAULT_ABI
)
12414 ret
= (TARGET_32BIT
) ? 12 : 24;
12419 case ABI_AIX_NODESC
:
12420 ret
= (TARGET_32BIT
) ? 40 : 48;
12427 /* Emit RTL insns to initialize the variable parts of a trampoline.
12428 FNADDR is an RTX for the address of the function's pure code.
12429 CXT is an RTX for the static chain value for the function. */
12432 rs6000_initialize_trampoline (addr
, fnaddr
, cxt
)
12437 enum machine_mode pmode
= Pmode
;
12438 int regsize
= (TARGET_32BIT
) ? 4 : 8;
12439 rtx ctx_reg
= force_reg (pmode
, cxt
);
12441 switch (DEFAULT_ABI
)
12446 /* Macros to shorten the code expansions below. */
12447 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12448 #define MEM_PLUS(addr,offset) \
12449 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12451 /* Under AIX, just build the 3 word function descriptor */
12454 rtx fn_reg
= gen_reg_rtx (pmode
);
12455 rtx toc_reg
= gen_reg_rtx (pmode
);
12456 emit_move_insn (fn_reg
, MEM_DEREF (fnaddr
));
12457 emit_move_insn (toc_reg
, MEM_PLUS (fnaddr
, regsize
));
12458 emit_move_insn (MEM_DEREF (addr
), fn_reg
);
12459 emit_move_insn (MEM_PLUS (addr
, regsize
), toc_reg
);
12460 emit_move_insn (MEM_PLUS (addr
, 2*regsize
), ctx_reg
);
12464 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
12467 case ABI_AIX_NODESC
:
12468 emit_library_call (gen_rtx_SYMBOL_REF (SImode
, "__trampoline_setup"),
12469 FALSE
, VOIDmode
, 4,
12471 GEN_INT (rs6000_trampoline_size ()), SImode
,
12481 /* Table of valid machine attributes. */
12483 const struct attribute_spec rs6000_attribute_table
[] =
12485 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12486 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
12487 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
12488 { NULL
, 0, 0, false, false, false, NULL
}
12491 /* Handle a "longcall" or "shortcall" attribute; arguments as in
12492 struct attribute_spec.handler. */
12495 rs6000_handle_longcall_attribute (node
, name
, args
, flags
, no_add_attrs
)
12498 tree args ATTRIBUTE_UNUSED
;
12499 int flags ATTRIBUTE_UNUSED
;
12500 bool *no_add_attrs
;
12502 if (TREE_CODE (*node
) != FUNCTION_TYPE
12503 && TREE_CODE (*node
) != FIELD_DECL
12504 && TREE_CODE (*node
) != TYPE_DECL
)
12506 warning ("`%s' attribute only applies to functions",
12507 IDENTIFIER_POINTER (name
));
12508 *no_add_attrs
= true;
12514 /* Set longcall attributes on all functions declared when
12515 rs6000_default_long_calls is true. */
12517 rs6000_set_default_type_attributes (type
)
12520 if (rs6000_default_long_calls
12521 && (TREE_CODE (type
) == FUNCTION_TYPE
12522 || TREE_CODE (type
) == METHOD_TYPE
))
12523 TYPE_ATTRIBUTES (type
) = tree_cons (get_identifier ("longcall"),
12525 TYPE_ATTRIBUTES (type
));
12528 /* Return a reference suitable for calling a function with the
12529 longcall attribute. */
12532 rs6000_longcall_ref (call_ref
)
12535 const char *call_name
;
12538 if (GET_CODE (call_ref
) != SYMBOL_REF
)
12541 /* System V adds '.' to the internal name, so skip them. */
12542 call_name
= XSTR (call_ref
, 0);
12543 if (*call_name
== '.')
12545 while (*call_name
== '.')
12548 node
= get_identifier (call_name
);
12549 call_ref
= gen_rtx_SYMBOL_REF (VOIDmode
, IDENTIFIER_POINTER (node
));
12552 return force_reg (Pmode
, call_ref
);
12556 #ifdef USING_ELFOS_H
12558 /* A C statement or statements to switch to the appropriate section
12559 for output of RTX in mode MODE. You can assume that RTX is some
12560 kind of constant in RTL. The argument MODE is redundant except in
12561 the case of a `const_int' rtx. Select the section by calling
12562 `text_section' or one of the alternatives for other sections.
12564 Do not define this macro if you put all constants in the read-only
12568 rs6000_elf_select_rtx_section (mode
, x
, align
)
12569 enum machine_mode mode
;
12571 unsigned HOST_WIDE_INT align
;
12573 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
12576 default_elf_select_rtx_section (mode
, x
, align
);
12579 /* A C statement or statements to switch to the appropriate
12580 section for output of DECL. DECL is either a `VAR_DECL' node
12581 or a constant of some sort. RELOC indicates whether forming
12582 the initial value of DECL requires link-time relocations. */
12585 rs6000_elf_select_section (decl
, reloc
, align
)
12588 unsigned HOST_WIDE_INT align
;
12590 default_elf_select_section_1 (decl
, reloc
, align
,
12591 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
12594 /* A C statement to build up a unique section name, expressed as a
12595 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
12596 RELOC indicates whether the initial value of EXP requires
12597 link-time relocations. If you do not define this macro, GCC will use
12598 the symbol name prefixed by `.' as the section name. Note - this
12599 macro can now be called for uninitialized data items as well as
12600 initialized data and functions. */
12603 rs6000_elf_unique_section (decl
, reloc
)
12607 default_unique_section_1 (decl
, reloc
,
12608 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
12612 /* If we are referencing a function that is static or is known to be
12613 in this file, make the SYMBOL_REF special. We can use this to indicate
12614 that we can branch to this function without emitting a no-op after the
12615 call. For real AIX calling sequences, we also replace the
12616 function name with the real name (1 or 2 leading .'s), rather than
12617 the function descriptor name. This saves a lot of overriding code
12618 to read the prefixes. */
12621 rs6000_elf_encode_section_info (decl
, first
)
12628 if (TREE_CODE (decl
) == FUNCTION_DECL
)
12630 rtx sym_ref
= XEXP (DECL_RTL (decl
), 0);
12631 if ((*targetm
.binds_local_p
) (decl
))
12632 SYMBOL_REF_FLAG (sym_ref
) = 1;
12634 if (DEFAULT_ABI
== ABI_AIX
)
12636 size_t len1
= (DEFAULT_ABI
== ABI_AIX
) ? 1 : 2;
12637 size_t len2
= strlen (XSTR (sym_ref
, 0));
12638 char *str
= alloca (len1
+ len2
+ 1);
12641 memcpy (str
+ len1
, XSTR (sym_ref
, 0), len2
+ 1);
12643 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len1
+ len2
);
12646 else if (rs6000_sdata
!= SDATA_NONE
12647 && DEFAULT_ABI
== ABI_V4
12648 && TREE_CODE (decl
) == VAR_DECL
)
12650 rtx sym_ref
= XEXP (DECL_RTL (decl
), 0);
12651 int size
= int_size_in_bytes (TREE_TYPE (decl
));
12652 tree section_name
= DECL_SECTION_NAME (decl
);
12653 const char *name
= (char *)0;
12656 if ((*targetm
.binds_local_p
) (decl
))
12657 SYMBOL_REF_FLAG (sym_ref
) = 1;
12661 if (TREE_CODE (section_name
) == STRING_CST
)
12663 name
= TREE_STRING_POINTER (section_name
);
12664 len
= TREE_STRING_LENGTH (section_name
);
12671 ? ((len
== sizeof (".sdata") - 1
12672 && strcmp (name
, ".sdata") == 0)
12673 || (len
== sizeof (".sdata2") - 1
12674 && strcmp (name
, ".sdata2") == 0)
12675 || (len
== sizeof (".sbss") - 1
12676 && strcmp (name
, ".sbss") == 0)
12677 || (len
== sizeof (".sbss2") - 1
12678 && strcmp (name
, ".sbss2") == 0)
12679 || (len
== sizeof (".PPC.EMB.sdata0") - 1
12680 && strcmp (name
, ".PPC.EMB.sdata0") == 0)
12681 || (len
== sizeof (".PPC.EMB.sbss0") - 1
12682 && strcmp (name
, ".PPC.EMB.sbss0") == 0))
12683 : (size
> 0 && size
<= g_switch_value
))
12685 size_t len
= strlen (XSTR (sym_ref
, 0));
12686 char *str
= alloca (len
+ 2);
12689 memcpy (str
+ 1, XSTR (sym_ref
, 0), len
+ 1);
12690 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len
+ 1);
12695 static const char *
12696 rs6000_elf_strip_name_encoding (str
)
12699 while (*str
== '*' || *str
== '@')
12705 rs6000_elf_in_small_data_p (decl
)
12708 if (rs6000_sdata
== SDATA_NONE
)
12711 if (TREE_CODE (decl
) == VAR_DECL
&& DECL_SECTION_NAME (decl
))
12713 const char *section
= TREE_STRING_POINTER (DECL_SECTION_NAME (decl
));
12714 if (strcmp (section
, ".sdata") == 0
12715 || strcmp (section
, ".sdata2") == 0
12716 || strcmp (section
, ".sbss") == 0)
12721 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
12724 && size
<= g_switch_value
12725 && (rs6000_sdata
!= SDATA_DATA
|| TREE_PUBLIC (decl
)))
12732 #endif /* USING_ELFOS_H */
12735 /* Return a REG that occurs in ADDR with coefficient 1.
12736 ADDR can be effectively incremented by incrementing REG.
12738 r0 is special and we must not select it as an address
12739 register by this routine since our caller will try to
12740 increment the returned register via an "la" instruction. */
12743 find_addr_reg (addr
)
12746 while (GET_CODE (addr
) == PLUS
)
12748 if (GET_CODE (XEXP (addr
, 0)) == REG
12749 && REGNO (XEXP (addr
, 0)) != 0)
12750 addr
= XEXP (addr
, 0);
12751 else if (GET_CODE (XEXP (addr
, 1)) == REG
12752 && REGNO (XEXP (addr
, 1)) != 0)
12753 addr
= XEXP (addr
, 1);
12754 else if (CONSTANT_P (XEXP (addr
, 0)))
12755 addr
= XEXP (addr
, 1);
12756 else if (CONSTANT_P (XEXP (addr
, 1)))
12757 addr
= XEXP (addr
, 0);
12761 if (GET_CODE (addr
) == REG
&& REGNO (addr
) != 0)
12767 rs6000_fatal_bad_address (op
)
12770 fatal_insn ("bad address", op
);
12776 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
12777 reference and a constant. */
12780 symbolic_operand (op
)
12783 switch (GET_CODE (op
))
12790 return (GET_CODE (op
) == SYMBOL_REF
||
12791 (GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
12792 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
12793 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
12800 #ifdef RS6000_LONG_BRANCH
12802 static tree stub_list
= 0;
12804 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
12805 procedure calls to the linked list. */
12808 add_compiler_stub (label_name
, function_name
, line_number
)
12810 tree function_name
;
12813 tree stub
= build_tree_list (function_name
, label_name
);
12814 TREE_TYPE (stub
) = build_int_2 (line_number
, 0);
12815 TREE_CHAIN (stub
) = stub_list
;
12819 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
12820 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
12821 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
12823 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
12824 handling procedure calls from the linked list and initializes the
12828 output_compiler_stub ()
12831 char label_buf
[256];
12835 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
12837 fprintf (asm_out_file
,
12838 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub
)));
12840 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12841 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
12842 fprintf (asm_out_file
, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub
));
12843 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12845 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
))[0] == '*')
12847 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
))+1);
12850 label_buf
[0] = '_';
12851 strcpy (label_buf
+1,
12852 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
)));
12855 strcpy (tmp_buf
, "lis r12,hi16(");
12856 strcat (tmp_buf
, label_buf
);
12857 strcat (tmp_buf
, ")\n\tori r12,r12,lo16(");
12858 strcat (tmp_buf
, label_buf
);
12859 strcat (tmp_buf
, ")\n\tmtctr r12\n\tbctr");
12860 output_asm_insn (tmp_buf
, 0);
12862 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12863 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
12864 fprintf(asm_out_file
, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub
));
12865 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12871 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
12872 already there or not. */
12875 no_previous_def (function_name
)
12876 tree function_name
;
12879 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
12880 if (function_name
== STUB_FUNCTION_NAME (stub
))
12885 /* GET_PREV_LABEL gets the label name from the previous definition of
12889 get_prev_label (function_name
)
12890 tree function_name
;
12893 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
12894 if (function_name
== STUB_FUNCTION_NAME (stub
))
12895 return STUB_LABEL_NAME (stub
);
12899 /* INSN is either a function call or a millicode call. It may have an
12900 unconditional jump in its delay slot.
12902 CALL_DEST is the routine we are calling. */
12905 output_call (insn
, call_dest
, operand_number
)
12908 int operand_number
;
12910 static char buf
[256];
12911 if (GET_CODE (call_dest
) == SYMBOL_REF
&& TARGET_LONG_BRANCH
&& !flag_pic
)
12914 tree funname
= get_identifier (XSTR (call_dest
, 0));
12916 if (no_previous_def (funname
))
12918 int line_number
= 0;
12919 rtx label_rtx
= gen_label_rtx ();
12920 char *label_buf
, temp_buf
[256];
12921 ASM_GENERATE_INTERNAL_LABEL (temp_buf
, "L",
12922 CODE_LABEL_NUMBER (label_rtx
));
12923 label_buf
= temp_buf
[0] == '*' ? temp_buf
+ 1 : temp_buf
;
12924 labelname
= get_identifier (label_buf
);
12925 for (; insn
&& GET_CODE (insn
) != NOTE
; insn
= PREV_INSN (insn
));
12927 line_number
= NOTE_LINE_NUMBER (insn
);
12928 add_compiler_stub (labelname
, funname
, line_number
);
12931 labelname
= get_prev_label (funname
);
12933 sprintf (buf
, "jbsr %%z%d,%.246s",
12934 operand_number
, IDENTIFIER_POINTER (labelname
));
12939 sprintf (buf
, "bl %%z%d", operand_number
);
12944 #endif /* RS6000_LONG_BRANCH */
12946 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
12948 const char *const symbol_ = (SYMBOL); \
12949 char *buffer_ = (BUF); \
12950 if (symbol_[0] == '"') \
12952 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
12954 else if (name_needs_quotes(symbol_)) \
12956 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
12960 sprintf(buffer_, "L%d$%s", (N), symbol_); \
12965 /* Generate PIC and indirect symbol stubs. */
12968 machopic_output_stub (file
, symb
, stub
)
12970 const char *symb
, *stub
;
12972 unsigned int length
;
12973 char *symbol_name
, *lazy_ptr_name
;
12974 char *local_label_0
;
12975 static int label
= 0;
12977 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
12978 symb
= (*targetm
.strip_name_encoding
) (symb
);
12982 length
= strlen (symb
);
12983 symbol_name
= alloca (length
+ 32);
12984 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name
, symb
, length
);
12986 lazy_ptr_name
= alloca (length
+ 32);
12987 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name
, symb
, length
);
12989 local_label_0
= alloca (length
+ 32);
12990 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0
, symb
, length
, 0);
12993 machopic_picsymbol_stub1_section ();
12995 machopic_symbol_stub1_section ();
12996 fprintf (file
, "\t.align 2\n");
12998 fprintf (file
, "%s:\n", stub
);
12999 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
13003 fprintf (file
, "\tmflr r0\n");
13004 fprintf (file
, "\tbcl 20,31,%s\n", local_label_0
);
13005 fprintf (file
, "%s:\n\tmflr r11\n", local_label_0
);
13006 fprintf (file
, "\taddis r11,r11,ha16(%s-%s)\n",
13007 lazy_ptr_name
, local_label_0
);
13008 fprintf (file
, "\tmtlr r0\n");
13009 fprintf (file
, "\tlwzu r12,lo16(%s-%s)(r11)\n",
13010 lazy_ptr_name
, local_label_0
);
13011 fprintf (file
, "\tmtctr r12\n");
13012 fprintf (file
, "\tbctr\n");
13015 fprintf (file
, "non-pure not supported\n");
13017 machopic_lazy_symbol_ptr_section ();
13018 fprintf (file
, "%s:\n", lazy_ptr_name
);
13019 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
13020 fprintf (file
, "\t.long dyld_stub_binding_helper\n");
13023 /* Legitimize PIC addresses. If the address is already
13024 position-independent, we return ORIG. Newly generated
13025 position-independent addresses go into a reg. This is REG if non
13026 zero, otherwise we allocate register(s) as necessary. */
13028 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13031 rs6000_machopic_legitimize_pic_address (orig
, mode
, reg
)
13033 enum machine_mode mode
;
13038 if (reg
== NULL
&& ! reload_in_progress
&& ! reload_completed
)
13039 reg
= gen_reg_rtx (Pmode
);
13041 if (GET_CODE (orig
) == CONST
)
13043 if (GET_CODE (XEXP (orig
, 0)) == PLUS
13044 && XEXP (XEXP (orig
, 0), 0) == pic_offset_table_rtx
)
13047 if (GET_CODE (XEXP (orig
, 0)) == PLUS
)
13050 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0),
13053 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1),
13059 if (GET_CODE (offset
) == CONST_INT
)
13061 if (SMALL_INT (offset
))
13062 return plus_constant (base
, INTVAL (offset
));
13063 else if (! reload_in_progress
&& ! reload_completed
)
13064 offset
= force_reg (Pmode
, offset
);
13067 rtx mem
= force_const_mem (Pmode
, orig
);
13068 return machopic_legitimize_pic_address (mem
, Pmode
, reg
);
13071 return gen_rtx (PLUS
, Pmode
, base
, offset
);
13074 /* Fall back on generic machopic code. */
13075 return machopic_legitimize_pic_address (orig
, mode
, reg
);
13078 /* This is just a placeholder to make linking work without having to
13079 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13080 ever needed for Darwin (not too likely!) this would have to get a
13081 real definition. */
13088 #endif /* TARGET_MACHO */
13091 static unsigned int
13092 rs6000_elf_section_type_flags (decl
, name
, reloc
)
13098 = default_section_type_flags_1 (decl
, name
, reloc
,
13099 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
13101 if (TARGET_RELOCATABLE
)
13102 flags
|= SECTION_WRITE
;
13107 /* Record an element in the table of global constructors. SYMBOL is
13108 a SYMBOL_REF of the function to be called; PRIORITY is a number
13109 between 0 and MAX_INIT_PRIORITY.
13111 This differs from default_named_section_asm_out_constructor in
13112 that we have special handling for -mrelocatable. */
13115 rs6000_elf_asm_out_constructor (symbol
, priority
)
13119 const char *section
= ".ctors";
13122 if (priority
!= DEFAULT_INIT_PRIORITY
)
13124 sprintf (buf
, ".ctors.%.5u",
13125 /* Invert the numbering so the linker puts us in the proper
13126 order; constructors are run from right to left, and the
13127 linker sorts in increasing order. */
13128 MAX_INIT_PRIORITY
- priority
);
13132 named_section_flags (section
, SECTION_WRITE
);
13133 assemble_align (POINTER_SIZE
);
13135 if (TARGET_RELOCATABLE
)
13137 fputs ("\t.long (", asm_out_file
);
13138 output_addr_const (asm_out_file
, symbol
);
13139 fputs (")@fixup\n", asm_out_file
);
13142 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
13146 rs6000_elf_asm_out_destructor (symbol
, priority
)
13150 const char *section
= ".dtors";
13153 if (priority
!= DEFAULT_INIT_PRIORITY
)
13155 sprintf (buf
, ".dtors.%.5u",
13156 /* Invert the numbering so the linker puts us in the proper
13157 order; constructors are run from right to left, and the
13158 linker sorts in increasing order. */
13159 MAX_INIT_PRIORITY
- priority
);
13163 named_section_flags (section
, SECTION_WRITE
);
13164 assemble_align (POINTER_SIZE
);
13166 if (TARGET_RELOCATABLE
)
13168 fputs ("\t.long (", asm_out_file
);
13169 output_addr_const (asm_out_file
, symbol
);
13170 fputs (")@fixup\n", asm_out_file
);
13173 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
13179 rs6000_xcoff_asm_globalize_label (stream
, name
)
13183 fputs (GLOBAL_ASM_OP
, stream
);
13184 RS6000_OUTPUT_BASENAME (stream
, name
);
13185 putc ('\n', stream
);
13189 rs6000_xcoff_asm_named_section (name
, flags
)
13191 unsigned int flags
;
13194 static const char * const suffix
[3] = { "PR", "RO", "RW" };
13196 if (flags
& SECTION_CODE
)
13198 else if (flags
& SECTION_WRITE
)
13203 fprintf (asm_out_file
, "\t.csect %s%s[%s],%u\n",
13204 (flags
& SECTION_CODE
) ? "." : "",
13205 name
, suffix
[smclass
], flags
& SECTION_ENTSIZE
);
13209 rs6000_xcoff_select_section (decl
, reloc
, align
)
13212 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
13214 if (decl_readonly_section_1 (decl
, reloc
, 1))
13216 if (TREE_PUBLIC (decl
))
13217 read_only_data_section ();
13219 read_only_private_data_section ();
13223 if (TREE_PUBLIC (decl
))
13226 private_data_section ();
13231 rs6000_xcoff_unique_section (decl
, reloc
)
13233 int reloc ATTRIBUTE_UNUSED
;
13237 /* Use select_section for private and uninitialized data. */
13238 if (!TREE_PUBLIC (decl
)
13239 || DECL_COMMON (decl
)
13240 || DECL_INITIAL (decl
) == NULL_TREE
13241 || DECL_INITIAL (decl
) == error_mark_node
13242 || (flag_zero_initialized_in_bss
13243 && initializer_zerop (DECL_INITIAL (decl
))))
13246 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
13247 name
= (*targetm
.strip_name_encoding
) (name
);
13248 DECL_SECTION_NAME (decl
) = build_string (strlen (name
), name
);
13251 /* Select section for constant in constant pool.
13253 On RS/6000, all constants are in the private read-only data area.
13254 However, if this is being placed in the TOC it must be output as a
13258 rs6000_xcoff_select_rtx_section (mode
, x
, align
)
13259 enum machine_mode mode
;
13261 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
13263 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
13266 read_only_private_data_section ();
13269 /* Remove any trailing [DS] or the like from the symbol name. */
13271 static const char *
13272 rs6000_xcoff_strip_name_encoding (name
)
13278 len
= strlen (name
);
13279 if (name
[len
- 1] == ']')
13280 return ggc_alloc_string (name
, len
- 4);
13285 /* Section attributes. AIX is always PIC. */
13287 static unsigned int
13288 rs6000_xcoff_section_type_flags (decl
, name
, reloc
)
13293 unsigned int align
;
13294 unsigned int flags
= default_section_type_flags_1 (decl
, name
, reloc
, 1);
13296 /* Align to at least UNIT size. */
13297 if (flags
& SECTION_CODE
)
13298 align
= MIN_UNITS_PER_WORD
;
13300 /* Increase alignment of large objects if not already stricter. */
13301 align
= MAX ((DECL_ALIGN (decl
) / BITS_PER_UNIT
),
13302 int_size_in_bytes (TREE_TYPE (decl
)) > MIN_UNITS_PER_WORD
13303 ? UNITS_PER_FP_WORD
: MIN_UNITS_PER_WORD
);
13305 return flags
| (exact_log2 (align
) & SECTION_ENTSIZE
);
13308 #endif /* TARGET_XCOFF */
13310 /* Note that this is also used for PPC64 Linux. */
13313 rs6000_xcoff_encode_section_info (decl
, first
)
13315 int first ATTRIBUTE_UNUSED
;
13317 if (TREE_CODE (decl
) == FUNCTION_DECL
13318 && (*targetm
.binds_local_p
) (decl
))
13319 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl
), 0)) = 1;
13322 /* Cross-module name binding. For AIX and PPC64 Linux, which always are
13323 PIC, use private copy of flag_pic. Darwin does not support overriding
13324 functions at dynamic-link time. */
13327 rs6000_binds_local_p (decl
)
13330 return default_binds_local_p_1 (decl
,
13331 DEFAULT_ABI
== ABI_DARWIN
? 0 : flag_pic
|| rs6000_flag_pic
);
13334 /* Compute a (partial) cost for rtx X. Return true if the complete
13335 cost has been computed, and false if subexpressions should be
13336 scanned. In either case, *TOTAL contains the cost result. */
13339 rs6000_rtx_costs (x
, code
, outer_code
, total
)
13341 int code
, outer_code ATTRIBUTE_UNUSED
;
13346 /* On the RS/6000, if it is valid in the insn, it is free.
13347 So this always returns 0. */
13358 *total
= ((GET_CODE (XEXP (x
, 1)) == CONST_INT
13359 && ((unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1))
13360 + 0x8000) >= 0x10000)
13361 && ((INTVAL (XEXP (x
, 1)) & 0xffff) != 0))
13362 ? COSTS_N_INSNS (2)
13363 : COSTS_N_INSNS (1));
13369 *total
= ((GET_CODE (XEXP (x
, 1)) == CONST_INT
13370 && (INTVAL (XEXP (x
, 1)) & (~ (HOST_WIDE_INT
) 0xffff)) != 0
13371 && ((INTVAL (XEXP (x
, 1)) & 0xffff) != 0))
13372 ? COSTS_N_INSNS (2)
13373 : COSTS_N_INSNS (1));
13379 *total
= COSTS_N_INSNS (2);
13382 switch (rs6000_cpu
)
13384 case PROCESSOR_RIOS1
:
13385 case PROCESSOR_PPC405
:
13386 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13387 ? COSTS_N_INSNS (5)
13388 : (INTVAL (XEXP (x
, 1)) >= -256
13389 && INTVAL (XEXP (x
, 1)) <= 255)
13390 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13393 case PROCESSOR_RS64A
:
13394 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13395 ? GET_MODE (XEXP (x
, 1)) != DImode
13396 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
13397 : (INTVAL (XEXP (x
, 1)) >= -256
13398 && INTVAL (XEXP (x
, 1)) <= 255)
13399 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
13402 case PROCESSOR_RIOS2
:
13403 case PROCESSOR_MPCCORE
:
13404 case PROCESSOR_PPC604e
:
13405 *total
= COSTS_N_INSNS (2);
13408 case PROCESSOR_PPC601
:
13409 *total
= COSTS_N_INSNS (5);
13412 case PROCESSOR_PPC603
:
13413 case PROCESSOR_PPC7400
:
13414 case PROCESSOR_PPC750
:
13415 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13416 ? COSTS_N_INSNS (5)
13417 : (INTVAL (XEXP (x
, 1)) >= -256
13418 && INTVAL (XEXP (x
, 1)) <= 255)
13419 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
13422 case PROCESSOR_PPC7450
:
13423 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13424 ? COSTS_N_INSNS (4)
13425 : COSTS_N_INSNS (3));
13428 case PROCESSOR_PPC403
:
13429 case PROCESSOR_PPC604
:
13430 case PROCESSOR_PPC8540
:
13431 *total
= COSTS_N_INSNS (4);
13434 case PROCESSOR_PPC620
:
13435 case PROCESSOR_PPC630
:
13436 case PROCESSOR_POWER4
:
13437 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13438 ? GET_MODE (XEXP (x
, 1)) != DImode
13439 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
13440 : (INTVAL (XEXP (x
, 1)) >= -256
13441 && INTVAL (XEXP (x
, 1)) <= 255)
13442 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13451 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
13452 && exact_log2 (INTVAL (XEXP (x
, 1))) >= 0)
13454 *total
= COSTS_N_INSNS (2);
13461 switch (rs6000_cpu
)
13463 case PROCESSOR_RIOS1
:
13464 *total
= COSTS_N_INSNS (19);
13467 case PROCESSOR_RIOS2
:
13468 *total
= COSTS_N_INSNS (13);
13471 case PROCESSOR_RS64A
:
13472 *total
= (GET_MODE (XEXP (x
, 1)) != DImode
13473 ? COSTS_N_INSNS (65)
13474 : COSTS_N_INSNS (67));
13477 case PROCESSOR_MPCCORE
:
13478 *total
= COSTS_N_INSNS (6);
13481 case PROCESSOR_PPC403
:
13482 *total
= COSTS_N_INSNS (33);
13485 case PROCESSOR_PPC405
:
13486 *total
= COSTS_N_INSNS (35);
13489 case PROCESSOR_PPC601
:
13490 *total
= COSTS_N_INSNS (36);
13493 case PROCESSOR_PPC603
:
13494 *total
= COSTS_N_INSNS (37);
13497 case PROCESSOR_PPC604
:
13498 case PROCESSOR_PPC604e
:
13499 *total
= COSTS_N_INSNS (20);
13502 case PROCESSOR_PPC620
:
13503 case PROCESSOR_PPC630
:
13504 case PROCESSOR_POWER4
:
13505 *total
= (GET_MODE (XEXP (x
, 1)) != DImode
13506 ? COSTS_N_INSNS (21)
13507 : COSTS_N_INSNS (37));
13510 case PROCESSOR_PPC750
:
13511 case PROCESSOR_PPC8540
:
13512 case PROCESSOR_PPC7400
:
13513 *total
= COSTS_N_INSNS (19);
13516 case PROCESSOR_PPC7450
:
13517 *total
= COSTS_N_INSNS (23);
13525 *total
= COSTS_N_INSNS (4);
13529 /* MEM should be slightly more expensive than (plus (reg) (const)) */
13538 /* A C expression returning the cost of moving data from a register of class
13539 CLASS1 to one of CLASS2. */
13542 rs6000_register_move_cost (mode
, from
, to
)
13543 enum machine_mode mode
;
13544 enum reg_class from
, to
;
13546 /* Moves from/to GENERAL_REGS. */
13547 if (reg_classes_intersect_p (to
, GENERAL_REGS
)
13548 || reg_classes_intersect_p (from
, GENERAL_REGS
))
13550 if (! reg_classes_intersect_p (to
, GENERAL_REGS
))
13553 if (from
== FLOAT_REGS
|| from
== ALTIVEC_REGS
)
13554 return (rs6000_memory_move_cost (mode
, from
, 0)
13555 + rs6000_memory_move_cost (mode
, GENERAL_REGS
, 0));
13557 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
13558 else if (from
== CR_REGS
)
13562 /* A move will cost one instruction per GPR moved. */
13563 return 2 * HARD_REGNO_NREGS (0, mode
);
13566 /* Moving between two similar registers is just one instruction. */
13567 else if (reg_classes_intersect_p (to
, from
))
13568 return mode
== TFmode
? 4 : 2;
13570 /* Everything else has to go through GENERAL_REGS. */
13572 return (rs6000_register_move_cost (mode
, GENERAL_REGS
, to
)
13573 + rs6000_register_move_cost (mode
, from
, GENERAL_REGS
));
13576 /* A C expressions returning the cost of moving data of MODE from a register to
13580 rs6000_memory_move_cost (mode
, class, in
)
13581 enum machine_mode mode
;
13582 enum reg_class
class;
13583 int in ATTRIBUTE_UNUSED
;
13585 if (reg_classes_intersect_p (class, GENERAL_REGS
))
13586 return 4 * HARD_REGNO_NREGS (0, mode
);
13587 else if (reg_classes_intersect_p (class, FLOAT_REGS
))
13588 return 4 * HARD_REGNO_NREGS (32, mode
);
13589 else if (reg_classes_intersect_p (class, ALTIVEC_REGS
))
13590 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO
, mode
);
13592 return 4 + rs6000_register_move_cost (mode
, class, GENERAL_REGS
);
13595 /* Return true if TYPE is of type __ev64_opaque__. */
13598 is_ev64_opaque_type (type
)
13602 && TREE_CODE (type
) == VECTOR_TYPE
13603 && TYPE_NAME (type
)
13604 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
13605 && DECL_NAME (TYPE_NAME (type
))
13606 && strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type
))),
13607 "__ev64_opaque__") == 0);
13611 rs6000_dwarf_register_span (reg
)
13616 if (!TARGET_SPE
|| !SPE_VECTOR_MODE (GET_MODE (reg
)))
13619 regno
= REGNO (reg
);
13621 /* The duality of the SPE register size wreaks all kinds of havoc.
13622 This is a way of distinguishing r0 in 32-bits from r0 in
13625 gen_rtx_PARALLEL (VOIDmode
,
13627 gen_rtx_REG (SImode
, regno
),
13628 /* Who, where, what? 1200? This
13629 will get changed to a sane value
13630 when the SPE ABI finalizes. */
13631 gen_rtx_REG (SImode
, regno
+ 1200)));
13634 #include "gt-rs6000.h"