1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
54 #ifndef TARGET_NO_PROTOTYPE
55 #define TARGET_NO_PROTOTYPE 0
58 #define min(A,B) ((A) < (B) ? (A) : (B))
59 #define max(A,B) ((A) > (B) ? (A) : (B))
63 enum processor_type rs6000_cpu
;
64 struct rs6000_cpu_select rs6000_select
[3] =
66 /* switch name, tune arch */
67 { (const char *)0, "--with-cpu=", 1, 1 },
68 { (const char *)0, "-mcpu=", 1, 1 },
69 { (const char *)0, "-mtune=", 1, 0 },
72 /* Size of long double */
73 const char *rs6000_long_double_size_string
;
74 int rs6000_long_double_type_size
;
76 /* Whether -mabi=altivec has appeared */
77 int rs6000_altivec_abi
;
79 /* Whether VRSAVE instructions should be generated. */
80 int rs6000_altivec_vrsave
;
82 /* String from -mvrsave= option. */
83 const char *rs6000_altivec_vrsave_string
;
85 /* Nonzero if we want SPE ABI extensions. */
88 /* Whether isel instructions should be generated. */
91 /* Nonzero if we have FPRs. */
94 /* String from -misel=. */
95 const char *rs6000_isel_string
;
97 /* Set to nonzero once AIX common-mode calls have been defined. */
98 static GTY(()) int common_mode_defined
;
100 /* Save information from a "cmpxx" operation until the branch or scc is
102 rtx rs6000_compare_op0
, rs6000_compare_op1
;
103 int rs6000_compare_fp_p
;
105 /* Label number of label created for -mrelocatable, to call to so we can
106 get the address of the GOT section */
107 int rs6000_pic_labelno
;
110 /* Which abi to adhere to */
111 const char *rs6000_abi_name
= RS6000_ABI_NAME
;
113 /* Semantics of the small data area */
114 enum rs6000_sdata_type rs6000_sdata
= SDATA_DATA
;
116 /* Which small data model to use */
117 const char *rs6000_sdata_name
= (char *)0;
119 /* Counter for labels which are to be placed in .fixup. */
120 int fixuplabelno
= 0;
123 /* ABI enumeration available for subtarget to use. */
124 enum rs6000_abi rs6000_current_abi
;
126 /* ABI string from -mabi= option. */
127 const char *rs6000_abi_string
;
130 const char *rs6000_debug_name
;
131 int rs6000_debug_stack
; /* debug stack applications */
132 int rs6000_debug_arg
; /* debug argument handling */
134 const char *rs6000_traceback_name
;
136 traceback_default
= 0,
142 /* Flag to say the TOC is initialized */
144 char toc_label_name
[10];
146 /* Alias set for saves and restores from the rs6000 stack. */
147 static int rs6000_sr_alias_set
;
149 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
150 The only place that looks at this is rs6000_set_default_type_attributes;
151 everywhere else should rely on the presence or absence of a longcall
152 attribute on the function declaration. */
153 int rs6000_default_long_calls
;
154 const char *rs6000_longcall_switch
;
156 struct builtin_description
158 /* mask is not const because we're going to alter it below. This
159 nonsense will go away when we rewrite the -march infrastructure
160 to give us more target flag bits. */
162 const enum insn_code icode
;
163 const char *const name
;
164 const enum rs6000_builtins code
;
167 static bool rs6000_function_ok_for_sibcall
PARAMS ((tree
, tree
));
168 static int num_insns_constant_wide
PARAMS ((HOST_WIDE_INT
));
169 static void validate_condition_mode
170 PARAMS ((enum rtx_code
, enum machine_mode
));
171 static rtx rs6000_generate_compare
PARAMS ((enum rtx_code
));
172 static void rs6000_maybe_dead
PARAMS ((rtx
));
173 static void rs6000_emit_stack_tie
PARAMS ((void));
174 static void rs6000_frame_related
PARAMS ((rtx
, rtx
, HOST_WIDE_INT
, rtx
, rtx
));
175 static rtx spe_synthesize_frame_save
PARAMS ((rtx
));
176 static bool spe_func_has_64bit_regs_p
PARAMS ((void));
177 static void emit_frame_save
PARAMS ((rtx
, rtx
, enum machine_mode
,
178 unsigned int, int, int));
179 static rtx gen_frame_mem_offset
PARAMS ((enum machine_mode
, rtx
, int));
180 static void rs6000_emit_allocate_stack
PARAMS ((HOST_WIDE_INT
, int));
181 static unsigned rs6000_hash_constant
PARAMS ((rtx
));
182 static unsigned toc_hash_function
PARAMS ((const void *));
183 static int toc_hash_eq
PARAMS ((const void *, const void *));
184 static int constant_pool_expr_1
PARAMS ((rtx
, int *, int *));
185 static struct machine_function
* rs6000_init_machine_status
PARAMS ((void));
186 static bool rs6000_assemble_integer
PARAMS ((rtx
, unsigned int, int));
187 #ifdef HAVE_GAS_HIDDEN
188 static void rs6000_assemble_visibility
PARAMS ((tree
, int));
190 static int rs6000_ra_ever_killed
PARAMS ((void));
191 static tree rs6000_handle_longcall_attribute
PARAMS ((tree
*, tree
, tree
, int, bool *));
192 const struct attribute_spec rs6000_attribute_table
[];
193 static void rs6000_set_default_type_attributes
PARAMS ((tree
));
194 static void rs6000_output_function_prologue
PARAMS ((FILE *, HOST_WIDE_INT
));
195 static void rs6000_output_function_epilogue
PARAMS ((FILE *, HOST_WIDE_INT
));
196 static void rs6000_output_mi_thunk
PARAMS ((FILE *, tree
, HOST_WIDE_INT
,
197 HOST_WIDE_INT
, tree
));
198 static rtx rs6000_emit_set_long_const
PARAMS ((rtx
,
199 HOST_WIDE_INT
, HOST_WIDE_INT
));
201 static unsigned int rs6000_elf_section_type_flags
PARAMS ((tree
, const char *,
203 static void rs6000_elf_asm_out_constructor
PARAMS ((rtx
, int));
204 static void rs6000_elf_asm_out_destructor
PARAMS ((rtx
, int));
205 static void rs6000_elf_select_section
PARAMS ((tree
, int,
206 unsigned HOST_WIDE_INT
));
207 static void rs6000_elf_unique_section
PARAMS ((tree
, int));
208 static void rs6000_elf_select_rtx_section
PARAMS ((enum machine_mode
, rtx
,
209 unsigned HOST_WIDE_INT
));
210 static void rs6000_elf_encode_section_info
PARAMS ((tree
, int))
212 static const char *rs6000_elf_strip_name_encoding
PARAMS ((const char *));
213 static bool rs6000_elf_in_small_data_p
PARAMS ((tree
));
216 static void rs6000_xcoff_asm_globalize_label
PARAMS ((FILE *, const char *));
217 static void rs6000_xcoff_asm_named_section
PARAMS ((const char *, unsigned int));
218 static void rs6000_xcoff_select_section
PARAMS ((tree
, int,
219 unsigned HOST_WIDE_INT
));
220 static void rs6000_xcoff_unique_section
PARAMS ((tree
, int));
221 static void rs6000_xcoff_select_rtx_section
PARAMS ((enum machine_mode
, rtx
,
222 unsigned HOST_WIDE_INT
));
223 static const char * rs6000_xcoff_strip_name_encoding
PARAMS ((const char *));
224 static unsigned int rs6000_xcoff_section_type_flags
PARAMS ((tree
, const char *, int));
225 static void rs6000_xcoff_encode_section_info
PARAMS ((tree
, int))
229 static bool rs6000_binds_local_p
PARAMS ((tree
));
231 static int rs6000_use_dfa_pipeline_interface
PARAMS ((void));
232 static int rs6000_variable_issue
PARAMS ((FILE *, int, rtx
, int));
233 static bool rs6000_rtx_costs
PARAMS ((rtx
, int, int, int *));
234 static int rs6000_adjust_cost
PARAMS ((rtx
, rtx
, rtx
, int));
235 static int rs6000_adjust_priority
PARAMS ((rtx
, int));
236 static int rs6000_issue_rate
PARAMS ((void));
238 static void rs6000_init_builtins
PARAMS ((void));
239 static rtx rs6000_expand_unop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
240 static rtx rs6000_expand_binop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
241 static rtx rs6000_expand_ternop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
242 static rtx rs6000_expand_builtin
PARAMS ((tree
, rtx
, rtx
, enum machine_mode
, int));
243 static void altivec_init_builtins
PARAMS ((void));
244 static void rs6000_common_init_builtins
PARAMS ((void));
246 static void enable_mask_for_builtins
PARAMS ((struct builtin_description
*,
247 int, enum rs6000_builtins
,
248 enum rs6000_builtins
));
249 static void spe_init_builtins
PARAMS ((void));
250 static rtx spe_expand_builtin
PARAMS ((tree
, rtx
, bool *));
251 static rtx spe_expand_predicate_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
252 static rtx spe_expand_evsel_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
253 static int rs6000_emit_int_cmove
PARAMS ((rtx
, rtx
, rtx
, rtx
));
255 static rtx altivec_expand_builtin
PARAMS ((tree
, rtx
, bool *));
256 static rtx altivec_expand_ld_builtin
PARAMS ((tree
, rtx
, bool *));
257 static rtx altivec_expand_st_builtin
PARAMS ((tree
, rtx
, bool *));
258 static rtx altivec_expand_dst_builtin
PARAMS ((tree
, rtx
, bool *));
259 static rtx altivec_expand_abs_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
260 static rtx altivec_expand_predicate_builtin
PARAMS ((enum insn_code
, const char *, tree
, rtx
));
261 static rtx altivec_expand_stv_builtin
PARAMS ((enum insn_code
, tree
));
262 static void rs6000_parse_abi_options
PARAMS ((void));
263 static void rs6000_parse_vrsave_option
PARAMS ((void));
264 static void rs6000_parse_isel_option
PARAMS ((void));
265 static int first_altivec_reg_to_save
PARAMS ((void));
266 static unsigned int compute_vrsave_mask
PARAMS ((void));
267 static void is_altivec_return_reg
PARAMS ((rtx
, void *));
268 static rtx generate_set_vrsave
PARAMS ((rtx
, rs6000_stack_t
*, int));
269 static int easy_vector_constant
PARAMS ((rtx
));
270 static bool is_ev64_opaque_type
PARAMS ((tree
));
271 static rtx rs6000_dwarf_register_span
PARAMS ((rtx
));
273 /* Hash table stuff for keeping track of TOC entries. */
275 struct toc_hash_struct
GTY(())
277 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
278 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
280 enum machine_mode key_mode
;
284 static GTY ((param_is (struct toc_hash_struct
))) htab_t toc_hash_table
;
286 /* Default register names. */
287 char rs6000_reg_names
[][8] =
289 "0", "1", "2", "3", "4", "5", "6", "7",
290 "8", "9", "10", "11", "12", "13", "14", "15",
291 "16", "17", "18", "19", "20", "21", "22", "23",
292 "24", "25", "26", "27", "28", "29", "30", "31",
293 "0", "1", "2", "3", "4", "5", "6", "7",
294 "8", "9", "10", "11", "12", "13", "14", "15",
295 "16", "17", "18", "19", "20", "21", "22", "23",
296 "24", "25", "26", "27", "28", "29", "30", "31",
297 "mq", "lr", "ctr","ap",
298 "0", "1", "2", "3", "4", "5", "6", "7",
300 /* AltiVec registers. */
301 "0", "1", "2", "3", "4", "5", "6", "7",
302 "8", "9", "10", "11", "12", "13", "14", "15",
303 "16", "17", "18", "19", "20", "21", "22", "23",
304 "24", "25", "26", "27", "28", "29", "30", "31",
310 #ifdef TARGET_REGNAMES
311 static const char alt_reg_names
[][8] =
313 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
314 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
315 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
316 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
317 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
318 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
319 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
320 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
321 "mq", "lr", "ctr", "ap",
322 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
324 /* AltiVec registers. */
325 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
326 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
327 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
328 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
335 #ifndef MASK_STRICT_ALIGN
336 #define MASK_STRICT_ALIGN 0
338 #ifndef TARGET_PROFILE_KERNEL
339 #define TARGET_PROFILE_KERNEL 0
342 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
343 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
345 /* Initialize the GCC target structure. */
346 #undef TARGET_ATTRIBUTE_TABLE
347 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
348 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
349 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
351 #undef TARGET_ASM_ALIGNED_DI_OP
352 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
354 /* Default unaligned ops are only provided for ELF. Find the ops needed
355 for non-ELF systems. */
356 #ifndef OBJECT_FORMAT_ELF
358 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
360 #undef TARGET_ASM_UNALIGNED_HI_OP
361 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
362 #undef TARGET_ASM_UNALIGNED_SI_OP
363 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
364 #undef TARGET_ASM_UNALIGNED_DI_OP
365 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
368 #undef TARGET_ASM_UNALIGNED_HI_OP
369 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
370 #undef TARGET_ASM_UNALIGNED_SI_OP
371 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
375 /* This hook deals with fixups for relocatable code and DI-mode objects
377 #undef TARGET_ASM_INTEGER
378 #define TARGET_ASM_INTEGER rs6000_assemble_integer
380 #ifdef HAVE_GAS_HIDDEN
381 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
382 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
385 #undef TARGET_ASM_FUNCTION_PROLOGUE
386 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
387 #undef TARGET_ASM_FUNCTION_EPILOGUE
388 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
390 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
391 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
392 #undef TARGET_SCHED_VARIABLE_ISSUE
393 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
395 #undef TARGET_SCHED_ISSUE_RATE
396 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
397 #undef TARGET_SCHED_ADJUST_COST
398 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
399 #undef TARGET_SCHED_ADJUST_PRIORITY
400 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
402 #undef TARGET_INIT_BUILTINS
403 #define TARGET_INIT_BUILTINS rs6000_init_builtins
405 #undef TARGET_EXPAND_BUILTIN
406 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
409 #undef TARGET_BINDS_LOCAL_P
410 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
413 #undef TARGET_ASM_OUTPUT_MI_THUNK
414 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
416 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
417 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
419 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
420 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
422 #undef TARGET_RTX_COSTS
423 #define TARGET_RTX_COSTS rs6000_rtx_costs
424 #undef TARGET_ADDRESS_COST
425 #define TARGET_ADDRESS_COST hook_int_rtx_0
427 #undef TARGET_VECTOR_OPAQUE_P
428 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
430 #undef TARGET_DWARF_REGISTER_SPAN
431 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
433 struct gcc_target targetm
= TARGET_INITIALIZER
;
435 /* Override command line options. Mostly we process the processor
436 type and sometimes adjust other TARGET_ options. */
439 rs6000_override_options (default_cpu
)
440 const char *default_cpu
;
443 struct rs6000_cpu_select
*ptr
;
445 /* Simplify the entries below by making a mask for any POWER
446 variant and any PowerPC variant. */
448 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
449 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
450 | MASK_PPC_GFXOPT | MASK_POWERPC64)
451 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
455 const char *const name
; /* Canonical processor name. */
456 const enum processor_type processor
; /* Processor type enum value. */
457 const int target_enable
; /* Target flags to enable. */
458 const int target_disable
; /* Target flags to disable. */
459 } const processor_target_table
[]
460 = {{"common", PROCESSOR_COMMON
, MASK_NEW_MNEMONICS
,
461 POWER_MASKS
| POWERPC_MASKS
},
462 {"power", PROCESSOR_POWER
,
463 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
464 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
465 {"power2", PROCESSOR_POWER
,
466 MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
,
467 POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
468 {"power3", PROCESSOR_PPC630
,
469 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
471 {"power4", PROCESSOR_POWER4
,
472 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
474 {"powerpc", PROCESSOR_POWERPC
,
475 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
476 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
477 {"powerpc64", PROCESSOR_POWERPC64
,
478 MASK_POWERPC
| MASK_POWERPC64
| MASK_NEW_MNEMONICS
,
479 POWER_MASKS
| POWERPC_OPT_MASKS
},
480 {"rios", PROCESSOR_RIOS1
,
481 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
482 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
483 {"rios1", PROCESSOR_RIOS1
,
484 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
485 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
486 {"rsc", PROCESSOR_PPC601
,
487 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
488 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
489 {"rsc1", PROCESSOR_PPC601
,
490 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
491 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
492 {"rios2", PROCESSOR_RIOS2
,
493 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
| MASK_POWER2
,
494 POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
495 {"rs64a", PROCESSOR_RS64A
,
496 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
497 POWER_MASKS
| POWERPC_OPT_MASKS
},
498 {"401", PROCESSOR_PPC403
,
499 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
500 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
501 {"403", PROCESSOR_PPC403
,
502 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
| MASK_STRICT_ALIGN
,
503 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
504 {"405", PROCESSOR_PPC405
,
505 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
506 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
507 {"405f", PROCESSOR_PPC405
,
508 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
509 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
510 {"505", PROCESSOR_MPCCORE
,
511 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
512 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
513 {"601", PROCESSOR_PPC601
,
514 MASK_POWER
| MASK_POWERPC
| MASK_NEW_MNEMONICS
| MASK_MULTIPLE
| MASK_STRING
,
515 MASK_POWER2
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
516 {"602", PROCESSOR_PPC603
,
517 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
518 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
519 {"603", PROCESSOR_PPC603
,
520 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
521 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
522 {"603e", PROCESSOR_PPC603
,
523 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
524 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
525 {"ec603e", PROCESSOR_PPC603
,
526 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
527 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
528 {"604", PROCESSOR_PPC604
,
529 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
530 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
531 {"604e", PROCESSOR_PPC604e
,
532 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
533 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
534 {"620", PROCESSOR_PPC620
,
535 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
537 {"630", PROCESSOR_PPC630
,
538 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
540 {"740", PROCESSOR_PPC750
,
541 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
542 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
543 {"750", PROCESSOR_PPC750
,
544 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
545 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
546 {"7400", PROCESSOR_PPC7400
,
547 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
548 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
549 {"7450", PROCESSOR_PPC7450
,
550 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
551 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
552 {"8540", PROCESSOR_PPC8540
,
553 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
554 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
555 {"801", PROCESSOR_MPCCORE
,
556 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
557 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
558 {"821", PROCESSOR_MPCCORE
,
559 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
560 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
561 {"823", PROCESSOR_MPCCORE
,
562 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
563 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
564 {"860", PROCESSOR_MPCCORE
,
565 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
566 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
}};
568 const size_t ptt_size
= ARRAY_SIZE (processor_target_table
);
570 /* Save current -mmultiple/-mno-multiple status. */
571 int multiple
= TARGET_MULTIPLE
;
572 /* Save current -mstring/-mno-string status. */
573 int string
= TARGET_STRING
;
575 /* Identify the processor type. */
576 rs6000_select
[0].string
= default_cpu
;
577 rs6000_cpu
= TARGET_POWERPC64
? PROCESSOR_DEFAULT64
: PROCESSOR_DEFAULT
;
579 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
581 ptr
= &rs6000_select
[i
];
582 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
584 for (j
= 0; j
< ptt_size
; j
++)
585 if (! strcmp (ptr
->string
, processor_target_table
[j
].name
))
588 rs6000_cpu
= processor_target_table
[j
].processor
;
592 target_flags
|= processor_target_table
[j
].target_enable
;
593 target_flags
&= ~processor_target_table
[j
].target_disable
;
599 error ("bad value (%s) for %s switch", ptr
->string
, ptr
->name
);
603 if (rs6000_cpu
== PROCESSOR_PPC8540
)
606 /* If we are optimizing big endian systems for space, use the load/store
607 multiple and string instructions. */
608 if (BYTES_BIG_ENDIAN
&& optimize_size
)
609 target_flags
|= MASK_MULTIPLE
| MASK_STRING
;
611 /* If -mmultiple or -mno-multiple was explicitly used, don't
612 override with the processor default */
613 if ((target_flags_explicit
& MASK_MULTIPLE
) != 0)
614 target_flags
= (target_flags
& ~MASK_MULTIPLE
) | multiple
;
616 /* If -mstring or -mno-string was explicitly used, don't override
617 with the processor default. */
618 if ((target_flags_explicit
& MASK_STRING
) != 0)
619 target_flags
= (target_flags
& ~MASK_STRING
) | string
;
621 /* Don't allow -mmultiple or -mstring on little endian systems
622 unless the cpu is a 750, because the hardware doesn't support the
623 instructions used in little endian mode, and causes an alignment
624 trap. The 750 does not cause an alignment trap (except when the
625 target is unaligned). */
627 if (!BYTES_BIG_ENDIAN
&& rs6000_cpu
!= PROCESSOR_PPC750
)
631 target_flags
&= ~MASK_MULTIPLE
;
632 if ((target_flags_explicit
& MASK_MULTIPLE
) != 0)
633 warning ("-mmultiple is not supported on little endian systems");
638 target_flags
&= ~MASK_STRING
;
639 if ((target_flags_explicit
& MASK_STRING
) != 0)
640 warning ("-mstring is not supported on little endian systems");
644 /* Set debug flags */
645 if (rs6000_debug_name
)
647 if (! strcmp (rs6000_debug_name
, "all"))
648 rs6000_debug_stack
= rs6000_debug_arg
= 1;
649 else if (! strcmp (rs6000_debug_name
, "stack"))
650 rs6000_debug_stack
= 1;
651 else if (! strcmp (rs6000_debug_name
, "arg"))
652 rs6000_debug_arg
= 1;
654 error ("unknown -mdebug-%s switch", rs6000_debug_name
);
657 if (rs6000_traceback_name
)
659 if (! strncmp (rs6000_traceback_name
, "full", 4))
660 rs6000_traceback
= traceback_full
;
661 else if (! strncmp (rs6000_traceback_name
, "part", 4))
662 rs6000_traceback
= traceback_part
;
663 else if (! strncmp (rs6000_traceback_name
, "no", 2))
664 rs6000_traceback
= traceback_none
;
666 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
667 rs6000_traceback_name
);
670 /* Set size of long double */
671 rs6000_long_double_type_size
= 64;
672 if (rs6000_long_double_size_string
)
675 int size
= strtol (rs6000_long_double_size_string
, &tail
, 10);
676 if (*tail
!= '\0' || (size
!= 64 && size
!= 128))
677 error ("Unknown switch -mlong-double-%s",
678 rs6000_long_double_size_string
);
680 rs6000_long_double_type_size
= size
;
683 /* Handle -mabi= options. */
684 rs6000_parse_abi_options ();
686 /* Handle -mvrsave= option. */
687 rs6000_parse_vrsave_option ();
689 /* Handle -misel= option. */
690 rs6000_parse_isel_option ();
692 #ifdef SUBTARGET_OVERRIDE_OPTIONS
693 SUBTARGET_OVERRIDE_OPTIONS
;
695 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
696 SUBSUBTARGET_OVERRIDE_OPTIONS
;
699 /* The e500 does not have string instructions, and we set
700 MASK_STRING above when optimizing for size. */
701 if (rs6000_cpu
== PROCESSOR_PPC8540
&& (target_flags
& MASK_STRING
) != 0)
702 target_flags
= target_flags
& ~MASK_STRING
;
704 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
705 using TARGET_OPTIONS to handle a toggle switch, but we're out of
706 bits in target_flags so TARGET_SWITCHES cannot be used.
707 Assumption here is that rs6000_longcall_switch points into the
708 text of the complete option, rather than being a copy, so we can
709 scan back for the presence or absence of the no- modifier. */
710 if (rs6000_longcall_switch
)
712 const char *base
= rs6000_longcall_switch
;
713 while (base
[-1] != 'm') base
--;
715 if (*rs6000_longcall_switch
!= '\0')
716 error ("invalid option `%s'", base
);
717 rs6000_default_long_calls
= (base
[0] != 'n');
720 #ifdef TARGET_REGNAMES
721 /* If the user desires alternate register names, copy in the
722 alternate names now. */
724 memcpy (rs6000_reg_names
, alt_reg_names
, sizeof (rs6000_reg_names
));
727 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
728 If -maix-struct-return or -msvr4-struct-return was explicitly
729 used, don't override with the ABI default. */
730 if ((target_flags_explicit
& MASK_AIX_STRUCT_RET
) == 0)
732 if (DEFAULT_ABI
== ABI_V4
&& !DRAFT_V4_STRUCT_RET
)
733 target_flags
= (target_flags
& ~MASK_AIX_STRUCT_RET
);
735 target_flags
|= MASK_AIX_STRUCT_RET
;
738 if (TARGET_LONG_DOUBLE_128
739 && (DEFAULT_ABI
== ABI_AIX
|| DEFAULT_ABI
== ABI_DARWIN
))
740 real_format_for_mode
[TFmode
- QFmode
] = &ibm_extended_format
;
742 /* Allocate an alias set for register saves & restores from stack. */
743 rs6000_sr_alias_set
= new_alias_set ();
746 ASM_GENERATE_INTERNAL_LABEL (toc_label_name
, "LCTOC", 1);
748 /* We can only guarantee the availability of DI pseudo-ops when
749 assembling for 64-bit targets. */
752 targetm
.asm_out
.aligned_op
.di
= NULL
;
753 targetm
.asm_out
.unaligned_op
.di
= NULL
;
756 /* Set maximum branch target alignment at two instructions, eight bytes. */
757 align_jumps_max_skip
= 8;
758 align_loops_max_skip
= 8;
760 /* Arrange to save and restore machine status around nested functions. */
761 init_machine_status
= rs6000_init_machine_status
;
764 /* Handle -misel= option. */
766 rs6000_parse_isel_option ()
768 if (rs6000_isel_string
== 0)
770 else if (! strcmp (rs6000_isel_string
, "yes"))
772 else if (! strcmp (rs6000_isel_string
, "no"))
775 error ("unknown -misel= option specified: '%s'",
779 /* Handle -mvrsave= options. */
781 rs6000_parse_vrsave_option ()
783 /* Generate VRSAVE instructions by default. */
784 if (rs6000_altivec_vrsave_string
== 0
785 || ! strcmp (rs6000_altivec_vrsave_string
, "yes"))
786 rs6000_altivec_vrsave
= 1;
787 else if (! strcmp (rs6000_altivec_vrsave_string
, "no"))
788 rs6000_altivec_vrsave
= 0;
790 error ("unknown -mvrsave= option specified: '%s'",
791 rs6000_altivec_vrsave_string
);
794 /* Handle -mabi= options. */
796 rs6000_parse_abi_options ()
798 if (rs6000_abi_string
== 0)
800 else if (! strcmp (rs6000_abi_string
, "altivec"))
801 rs6000_altivec_abi
= 1;
802 else if (! strcmp (rs6000_abi_string
, "no-altivec"))
803 rs6000_altivec_abi
= 0;
804 else if (! strcmp (rs6000_abi_string
, "spe"))
808 error ("not configured for ABI: '%s'", rs6000_abi_string
);
811 else if (! strcmp (rs6000_abi_string
, "no-spe"))
814 error ("unknown ABI specified: '%s'", rs6000_abi_string
);
818 optimization_options (level
, size
)
819 int level ATTRIBUTE_UNUSED
;
820 int size ATTRIBUTE_UNUSED
;
824 /* Do anything needed at the start of the asm file. */
827 rs6000_file_start (file
, default_cpu
)
829 const char *default_cpu
;
833 const char *start
= buffer
;
834 struct rs6000_cpu_select
*ptr
;
836 if (flag_verbose_asm
)
838 sprintf (buffer
, "\n%s rs6000/powerpc options:", ASM_COMMENT_START
);
839 rs6000_select
[0].string
= default_cpu
;
841 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
843 ptr
= &rs6000_select
[i
];
844 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
846 fprintf (file
, "%s %s%s", start
, ptr
->name
, ptr
->string
);
852 switch (rs6000_sdata
)
854 case SDATA_NONE
: fprintf (file
, "%s -msdata=none", start
); start
= ""; break;
855 case SDATA_DATA
: fprintf (file
, "%s -msdata=data", start
); start
= ""; break;
856 case SDATA_SYSV
: fprintf (file
, "%s -msdata=sysv", start
); start
= ""; break;
857 case SDATA_EABI
: fprintf (file
, "%s -msdata=eabi", start
); start
= ""; break;
860 if (rs6000_sdata
&& g_switch_value
)
862 fprintf (file
, "%s -G %d", start
, g_switch_value
);
872 /* Return nonzero if this function is known to have a null epilogue. */
877 if (reload_completed
)
879 rs6000_stack_t
*info
= rs6000_stack_info ();
881 if (info
->first_gp_reg_save
== 32
882 && info
->first_fp_reg_save
== 64
883 && info
->first_altivec_reg_save
== LAST_ALTIVEC_REGNO
+ 1
886 && info
->vrsave_mask
== 0
894 /* Returns 1 always. */
897 any_operand (op
, mode
)
898 rtx op ATTRIBUTE_UNUSED
;
899 enum machine_mode mode ATTRIBUTE_UNUSED
;
904 /* Returns 1 if op is the count register. */
906 count_register_operand (op
, mode
)
908 enum machine_mode mode ATTRIBUTE_UNUSED
;
910 if (GET_CODE (op
) != REG
)
913 if (REGNO (op
) == COUNT_REGISTER_REGNUM
)
916 if (REGNO (op
) > FIRST_PSEUDO_REGISTER
)
922 /* Returns 1 if op is an altivec register. */
924 altivec_register_operand (op
, mode
)
926 enum machine_mode mode ATTRIBUTE_UNUSED
;
929 return (register_operand (op
, mode
)
930 && (GET_CODE (op
) != REG
931 || REGNO (op
) > FIRST_PSEUDO_REGISTER
932 || ALTIVEC_REGNO_P (REGNO (op
))));
936 xer_operand (op
, mode
)
938 enum machine_mode mode ATTRIBUTE_UNUSED
;
940 if (GET_CODE (op
) != REG
)
943 if (XER_REGNO_P (REGNO (op
)))
949 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
950 by such constants completes more quickly. */
953 s8bit_cint_operand (op
, mode
)
955 enum machine_mode mode ATTRIBUTE_UNUSED
;
957 return ( GET_CODE (op
) == CONST_INT
958 && (INTVAL (op
) >= -128 && INTVAL (op
) <= 127));
961 /* Return 1 if OP is a constant that can fit in a D field. */
964 short_cint_operand (op
, mode
)
966 enum machine_mode mode ATTRIBUTE_UNUSED
;
968 return (GET_CODE (op
) == CONST_INT
969 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I'));
972 /* Similar for an unsigned D field. */
975 u_short_cint_operand (op
, mode
)
977 enum machine_mode mode ATTRIBUTE_UNUSED
;
979 return (GET_CODE (op
) == CONST_INT
980 && CONST_OK_FOR_LETTER_P (INTVAL (op
) & GET_MODE_MASK (mode
), 'K'));
983 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
986 non_short_cint_operand (op
, mode
)
988 enum machine_mode mode ATTRIBUTE_UNUSED
;
990 return (GET_CODE (op
) == CONST_INT
991 && (unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x8000) >= 0x10000);
994 /* Returns 1 if OP is a CONST_INT that is a positive value
995 and an exact power of 2. */
998 exact_log2_cint_operand (op
, mode
)
1000 enum machine_mode mode ATTRIBUTE_UNUSED
;
1002 return (GET_CODE (op
) == CONST_INT
1004 && exact_log2 (INTVAL (op
)) >= 0);
1007 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1011 gpc_reg_operand (op
, mode
)
1013 enum machine_mode mode
;
1015 return (register_operand (op
, mode
)
1016 && (GET_CODE (op
) != REG
1017 || (REGNO (op
) >= ARG_POINTER_REGNUM
1018 && !XER_REGNO_P (REGNO (op
)))
1019 || REGNO (op
) < MQ_REGNO
));
1022 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1026 cc_reg_operand (op
, mode
)
1028 enum machine_mode mode
;
1030 return (register_operand (op
, mode
)
1031 && (GET_CODE (op
) != REG
1032 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
1033 || CR_REGNO_P (REGNO (op
))));
1036 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1037 CR field that isn't CR0. */
1040 cc_reg_not_cr0_operand (op
, mode
)
1042 enum machine_mode mode
;
1044 return (register_operand (op
, mode
)
1045 && (GET_CODE (op
) != REG
1046 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
1047 || CR_REGNO_NOT_CR0_P (REGNO (op
))));
1050 /* Returns 1 if OP is either a constant integer valid for a D-field or
1051 a non-special register. If a register, it must be in the proper
1052 mode unless MODE is VOIDmode. */
1055 reg_or_short_operand (op
, mode
)
1057 enum machine_mode mode
;
1059 return short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1062 /* Similar, except check if the negation of the constant would be
1063 valid for a D-field. */
1066 reg_or_neg_short_operand (op
, mode
)
1068 enum machine_mode mode
;
1070 if (GET_CODE (op
) == CONST_INT
)
1071 return CONST_OK_FOR_LETTER_P (INTVAL (op
), 'P');
1073 return gpc_reg_operand (op
, mode
);
1076 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1077 a non-special register. If a register, it must be in the proper
1078 mode unless MODE is VOIDmode. */
1081 reg_or_aligned_short_operand (op
, mode
)
1083 enum machine_mode mode
;
1085 if (gpc_reg_operand (op
, mode
))
1087 else if (short_cint_operand (op
, mode
) && !(INTVAL (op
) & 3))
1094 /* Return 1 if the operand is either a register or an integer whose
1095 high-order 16 bits are zero. */
1098 reg_or_u_short_operand (op
, mode
)
1100 enum machine_mode mode
;
1102 return u_short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1105 /* Return 1 is the operand is either a non-special register or ANY
1106 constant integer. */
1109 reg_or_cint_operand (op
, mode
)
1111 enum machine_mode mode
;
1113 return (GET_CODE (op
) == CONST_INT
|| gpc_reg_operand (op
, mode
));
1116 /* Return 1 is the operand is either a non-special register or ANY
1117 32-bit signed constant integer. */
1120 reg_or_arith_cint_operand (op
, mode
)
1122 enum machine_mode mode
;
1124 return (gpc_reg_operand (op
, mode
)
1125 || (GET_CODE (op
) == CONST_INT
1126 #if HOST_BITS_PER_WIDE_INT != 32
1127 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80000000)
1128 < (unsigned HOST_WIDE_INT
) 0x100000000ll
)
1133 /* Return 1 is the operand is either a non-special register or a 32-bit
1134 signed constant integer valid for 64-bit addition. */
1137 reg_or_add_cint64_operand (op
, mode
)
1139 enum machine_mode mode
;
1141 return (gpc_reg_operand (op
, mode
)
1142 || (GET_CODE (op
) == CONST_INT
1143 #if HOST_BITS_PER_WIDE_INT == 32
1144 && INTVAL (op
) < 0x7fff8000
1146 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80008000)
1152 /* Return 1 is the operand is either a non-special register or a 32-bit
1153 signed constant integer valid for 64-bit subtraction. */
1156 reg_or_sub_cint64_operand (op
, mode
)
1158 enum machine_mode mode
;
1160 return (gpc_reg_operand (op
, mode
)
1161 || (GET_CODE (op
) == CONST_INT
1162 #if HOST_BITS_PER_WIDE_INT == 32
1163 && (- INTVAL (op
)) < 0x7fff8000
1165 && ((unsigned HOST_WIDE_INT
) ((- INTVAL (op
)) + 0x80008000)
1171 /* Return 1 is the operand is either a non-special register or ANY
1172 32-bit unsigned constant integer. */
1175 reg_or_logical_cint_operand (op
, mode
)
1177 enum machine_mode mode
;
1179 if (GET_CODE (op
) == CONST_INT
)
1181 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
)
1183 if (GET_MODE_BITSIZE (mode
) <= 32)
1186 if (INTVAL (op
) < 0)
1190 return ((INTVAL (op
) & GET_MODE_MASK (mode
)
1191 & (~ (unsigned HOST_WIDE_INT
) 0xffffffff)) == 0);
1193 else if (GET_CODE (op
) == CONST_DOUBLE
)
1195 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
1199 return CONST_DOUBLE_HIGH (op
) == 0;
1202 return gpc_reg_operand (op
, mode
);
1205 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1208 got_operand (op
, mode
)
1210 enum machine_mode mode ATTRIBUTE_UNUSED
;
1212 return (GET_CODE (op
) == SYMBOL_REF
1213 || GET_CODE (op
) == CONST
1214 || GET_CODE (op
) == LABEL_REF
);
1217 /* Return 1 if the operand is a simple references that can be loaded via
1218 the GOT (labels involving addition aren't allowed). */
1221 got_no_const_operand (op
, mode
)
1223 enum machine_mode mode ATTRIBUTE_UNUSED
;
1225 return (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
);
1228 /* Return the number of instructions it takes to form a constant in an
1229 integer register. */
1232 num_insns_constant_wide (value
)
1233 HOST_WIDE_INT value
;
1235 /* signed constant loadable with {cal|addi} */
1236 if (CONST_OK_FOR_LETTER_P (value
, 'I'))
1239 /* constant loadable with {cau|addis} */
1240 else if (CONST_OK_FOR_LETTER_P (value
, 'L'))
1243 #if HOST_BITS_PER_WIDE_INT == 64
1244 else if (TARGET_POWERPC64
)
1246 HOST_WIDE_INT low
= ((value
& 0xffffffff) ^ 0x80000000) - 0x80000000;
1247 HOST_WIDE_INT high
= value
>> 31;
1249 if (high
== 0 || high
== -1)
1255 return num_insns_constant_wide (high
) + 1;
1257 return (num_insns_constant_wide (high
)
1258 + num_insns_constant_wide (low
) + 1);
1267 num_insns_constant (op
, mode
)
1269 enum machine_mode mode
;
1271 if (GET_CODE (op
) == CONST_INT
)
1273 #if HOST_BITS_PER_WIDE_INT == 64
1274 if ((INTVAL (op
) >> 31) != 0 && (INTVAL (op
) >> 31) != -1
1275 && mask64_operand (op
, mode
))
1279 return num_insns_constant_wide (INTVAL (op
));
1282 else if (GET_CODE (op
) == CONST_DOUBLE
&& mode
== SFmode
)
1287 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1288 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1289 return num_insns_constant_wide ((HOST_WIDE_INT
) l
);
1292 else if (GET_CODE (op
) == CONST_DOUBLE
)
1298 int endian
= (WORDS_BIG_ENDIAN
== 0);
1300 if (mode
== VOIDmode
|| mode
== DImode
)
1302 high
= CONST_DOUBLE_HIGH (op
);
1303 low
= CONST_DOUBLE_LOW (op
);
1307 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1308 REAL_VALUE_TO_TARGET_DOUBLE (rv
, l
);
1310 low
= l
[1 - endian
];
1314 return (num_insns_constant_wide (low
)
1315 + num_insns_constant_wide (high
));
1319 if (high
== 0 && low
>= 0)
1320 return num_insns_constant_wide (low
);
1322 else if (high
== -1 && low
< 0)
1323 return num_insns_constant_wide (low
);
1325 else if (mask64_operand (op
, mode
))
1329 return num_insns_constant_wide (high
) + 1;
1332 return (num_insns_constant_wide (high
)
1333 + num_insns_constant_wide (low
) + 1);
1341 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1342 register with one instruction per word. We only do this if we can
1343 safely read CONST_DOUBLE_{LOW,HIGH}. */
1346 easy_fp_constant (op
, mode
)
1348 enum machine_mode mode
;
1350 if (GET_CODE (op
) != CONST_DOUBLE
1351 || GET_MODE (op
) != mode
1352 || (GET_MODE_CLASS (mode
) != MODE_FLOAT
&& mode
!= DImode
))
1355 /* Consider all constants with -msoft-float to be easy. */
1356 if ((TARGET_SOFT_FLOAT
|| !TARGET_FPRS
)
1360 /* If we are using V.4 style PIC, consider all constants to be hard. */
1361 if (flag_pic
&& DEFAULT_ABI
== ABI_V4
)
1364 #ifdef TARGET_RELOCATABLE
1365 /* Similarly if we are using -mrelocatable, consider all constants
1367 if (TARGET_RELOCATABLE
)
1376 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1377 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
1379 return (num_insns_constant_wide ((HOST_WIDE_INT
) k
[0]) == 1
1380 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[1]) == 1
1381 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[2]) == 1
1382 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[3]) == 1);
1385 else if (mode
== DFmode
)
1390 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1391 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
1393 return (num_insns_constant_wide ((HOST_WIDE_INT
) k
[0]) == 1
1394 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[1]) == 1);
1397 else if (mode
== SFmode
)
1402 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1403 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1405 return num_insns_constant_wide (l
) == 1;
1408 else if (mode
== DImode
)
1409 return ((TARGET_POWERPC64
1410 && GET_CODE (op
) == CONST_DOUBLE
&& CONST_DOUBLE_LOW (op
) == 0)
1411 || (num_insns_constant (op
, DImode
) <= 2));
1413 else if (mode
== SImode
)
1419 /* Return 1 if the operand is a CONST_INT and can be put into a
1420 register with one instruction. */
1423 easy_vector_constant (op
)
1429 if (GET_CODE (op
) != CONST_VECTOR
)
1432 units
= CONST_VECTOR_NUNITS (op
);
1434 /* We can generate 0 easily. Look for that. */
1435 for (i
= 0; i
< units
; ++i
)
1437 elt
= CONST_VECTOR_ELT (op
, i
);
1439 /* We could probably simplify this by just checking for equality
1440 with CONST0_RTX for the current mode, but let's be safe
1443 switch (GET_CODE (elt
))
1446 if (INTVAL (elt
) != 0)
1450 if (CONST_DOUBLE_LOW (elt
) != 0 || CONST_DOUBLE_HIGH (elt
) != 0)
1458 /* We could probably generate a few other constants trivially, but
1459 gcc doesn't generate them yet. FIXME later. */
1463 /* Return 1 if the operand is the constant 0. This works for scalars
1464 as well as vectors. */
1466 zero_constant (op
, mode
)
1468 enum machine_mode mode
;
1470 return op
== CONST0_RTX (mode
);
1473 /* Return 1 if the operand is 0.0. */
1475 zero_fp_constant (op
, mode
)
1477 enum machine_mode mode
;
1479 return GET_MODE_CLASS (mode
) == MODE_FLOAT
&& op
== CONST0_RTX (mode
);
1482 /* Return 1 if the operand is in volatile memory. Note that during
1483 the RTL generation phase, memory_operand does not return TRUE for
1484 volatile memory references. So this function allows us to
1485 recognize volatile references where its safe. */
1488 volatile_mem_operand (op
, mode
)
1490 enum machine_mode mode
;
1492 if (GET_CODE (op
) != MEM
)
1495 if (!MEM_VOLATILE_P (op
))
1498 if (mode
!= GET_MODE (op
))
1501 if (reload_completed
)
1502 return memory_operand (op
, mode
);
1504 if (reload_in_progress
)
1505 return strict_memory_address_p (mode
, XEXP (op
, 0));
1507 return memory_address_p (mode
, XEXP (op
, 0));
1510 /* Return 1 if the operand is an offsettable memory operand. */
1513 offsettable_mem_operand (op
, mode
)
1515 enum machine_mode mode
;
1517 return ((GET_CODE (op
) == MEM
)
1518 && offsettable_address_p (reload_completed
|| reload_in_progress
,
1519 mode
, XEXP (op
, 0)));
1522 /* Return 1 if the operand is either an easy FP constant (see above) or
1526 mem_or_easy_const_operand (op
, mode
)
1528 enum machine_mode mode
;
1530 return memory_operand (op
, mode
) || easy_fp_constant (op
, mode
);
1533 /* Return 1 if the operand is either a non-special register or an item
1534 that can be used as the operand of a `mode' add insn. */
1537 add_operand (op
, mode
)
1539 enum machine_mode mode
;
1541 if (GET_CODE (op
) == CONST_INT
)
1542 return (CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1543 || CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1545 return gpc_reg_operand (op
, mode
);
1548 /* Return 1 if OP is a constant but not a valid add_operand. */
1551 non_add_cint_operand (op
, mode
)
1553 enum machine_mode mode ATTRIBUTE_UNUSED
;
1555 return (GET_CODE (op
) == CONST_INT
1556 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1557 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1560 /* Return 1 if the operand is a non-special register or a constant that
1561 can be used as the operand of an OR or XOR insn on the RS/6000. */
1564 logical_operand (op
, mode
)
1566 enum machine_mode mode
;
1568 HOST_WIDE_INT opl
, oph
;
1570 if (gpc_reg_operand (op
, mode
))
1573 if (GET_CODE (op
) == CONST_INT
)
1575 opl
= INTVAL (op
) & GET_MODE_MASK (mode
);
1577 #if HOST_BITS_PER_WIDE_INT <= 32
1578 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
&& opl
< 0)
1582 else if (GET_CODE (op
) == CONST_DOUBLE
)
1584 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1587 opl
= CONST_DOUBLE_LOW (op
);
1588 oph
= CONST_DOUBLE_HIGH (op
);
1595 return ((opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff) == 0
1596 || (opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff0000) == 0);
1599 /* Return 1 if C is a constant that is not a logical operand (as
1600 above), but could be split into one. */
1603 non_logical_cint_operand (op
, mode
)
1605 enum machine_mode mode
;
1607 return ((GET_CODE (op
) == CONST_INT
|| GET_CODE (op
) == CONST_DOUBLE
)
1608 && ! logical_operand (op
, mode
)
1609 && reg_or_logical_cint_operand (op
, mode
));
1612 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1613 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1614 Reject all ones and all zeros, since these should have been optimized
1615 away and confuse the making of MB and ME. */
1618 mask_operand (op
, mode
)
1620 enum machine_mode mode ATTRIBUTE_UNUSED
;
1622 HOST_WIDE_INT c
, lsb
;
1624 if (GET_CODE (op
) != CONST_INT
)
1629 /* Fail in 64-bit mode if the mask wraps around because the upper
1630 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1631 if (TARGET_POWERPC64
&& (c
& 0x80000001) == 0x80000001)
1634 /* We don't change the number of transitions by inverting,
1635 so make sure we start with the LS bit zero. */
1639 /* Reject all zeros or all ones. */
1643 /* Find the first transition. */
1646 /* Invert to look for a second transition. */
1649 /* Erase first transition. */
1652 /* Find the second transition (if any). */
1655 /* Match if all the bits above are 1's (or c is zero). */
1659 /* Return 1 for the PowerPC64 rlwinm corner case. */
1662 mask_operand_wrap (op
, mode
)
1664 enum machine_mode mode ATTRIBUTE_UNUSED
;
1666 HOST_WIDE_INT c
, lsb
;
1668 if (GET_CODE (op
) != CONST_INT
)
1673 if ((c
& 0x80000001) != 0x80000001)
1687 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1688 It is if there are no more than one 1->0 or 0->1 transitions.
1689 Reject all zeros, since zero should have been optimized away and
1690 confuses the making of MB and ME. */
1693 mask64_operand (op
, mode
)
1695 enum machine_mode mode ATTRIBUTE_UNUSED
;
1697 if (GET_CODE (op
) == CONST_INT
)
1699 HOST_WIDE_INT c
, lsb
;
1703 /* Reject all zeros. */
1707 /* We don't change the number of transitions by inverting,
1708 so make sure we start with the LS bit zero. */
1712 /* Find the transition, and check that all bits above are 1's. */
1715 /* Match if all the bits above are 1's (or c is zero). */
1721 /* Like mask64_operand, but allow up to three transitions. This
1722 predicate is used by insn patterns that generate two rldicl or
1723 rldicr machine insns. */
1726 mask64_2_operand (op
, mode
)
1728 enum machine_mode mode ATTRIBUTE_UNUSED
;
1730 if (GET_CODE (op
) == CONST_INT
)
1732 HOST_WIDE_INT c
, lsb
;
1736 /* Disallow all zeros. */
1740 /* We don't change the number of transitions by inverting,
1741 so make sure we start with the LS bit zero. */
1745 /* Find the first transition. */
1748 /* Invert to look for a second transition. */
1751 /* Erase first transition. */
1754 /* Find the second transition. */
1757 /* Invert to look for a third transition. */
1760 /* Erase second transition. */
1763 /* Find the third transition (if any). */
1766 /* Match if all the bits above are 1's (or c is zero). */
1772 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1773 implement ANDing by the mask IN. */
1775 build_mask64_2_operands (in
, out
)
1779 #if HOST_BITS_PER_WIDE_INT >= 64
1780 unsigned HOST_WIDE_INT c
, lsb
, m1
, m2
;
1783 if (GET_CODE (in
) != CONST_INT
)
1789 /* Assume c initially something like 0x00fff000000fffff. The idea
1790 is to rotate the word so that the middle ^^^^^^ group of zeros
1791 is at the MS end and can be cleared with an rldicl mask. We then
1792 rotate back and clear off the MS ^^ group of zeros with a
1794 c
= ~c
; /* c == 0xff000ffffff00000 */
1795 lsb
= c
& -c
; /* lsb == 0x0000000000100000 */
1796 m1
= -lsb
; /* m1 == 0xfffffffffff00000 */
1797 c
= ~c
; /* c == 0x00fff000000fffff */
1798 c
&= -lsb
; /* c == 0x00fff00000000000 */
1799 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
1800 c
= ~c
; /* c == 0xff000fffffffffff */
1801 c
&= -lsb
; /* c == 0xff00000000000000 */
1803 while ((lsb
>>= 1) != 0)
1804 shift
++; /* shift == 44 on exit from loop */
1805 m1
<<= 64 - shift
; /* m1 == 0xffffff0000000000 */
1806 m1
= ~m1
; /* m1 == 0x000000ffffffffff */
1807 m2
= ~c
; /* m2 == 0x00ffffffffffffff */
1811 /* Assume c initially something like 0xff000f0000000000. The idea
1812 is to rotate the word so that the ^^^ middle group of zeros
1813 is at the LS end and can be cleared with an rldicr mask. We then
1814 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1816 lsb
= c
& -c
; /* lsb == 0x0000010000000000 */
1817 m2
= -lsb
; /* m2 == 0xffffff0000000000 */
1818 c
= ~c
; /* c == 0x00fff0ffffffffff */
1819 c
&= -lsb
; /* c == 0x00fff00000000000 */
1820 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
1821 c
= ~c
; /* c == 0xff000fffffffffff */
1822 c
&= -lsb
; /* c == 0xff00000000000000 */
1824 while ((lsb
>>= 1) != 0)
1825 shift
++; /* shift == 44 on exit from loop */
1826 m1
= ~c
; /* m1 == 0x00ffffffffffffff */
1827 m1
>>= shift
; /* m1 == 0x0000000000000fff */
1828 m1
= ~m1
; /* m1 == 0xfffffffffffff000 */
1831 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1832 masks will be all 1's. We are guaranteed more than one transition. */
1833 out
[0] = GEN_INT (64 - shift
);
1834 out
[1] = GEN_INT (m1
);
1835 out
[2] = GEN_INT (shift
);
1836 out
[3] = GEN_INT (m2
);
1844 /* Return 1 if the operand is either a non-special register or a constant
1845 that can be used as the operand of a PowerPC64 logical AND insn. */
1848 and64_operand (op
, mode
)
1850 enum machine_mode mode
;
1852 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
1853 return (gpc_reg_operand (op
, mode
) || mask64_operand (op
, mode
));
1855 return (logical_operand (op
, mode
) || mask64_operand (op
, mode
));
1858 /* Like the above, but also match constants that can be implemented
1859 with two rldicl or rldicr insns. */
1862 and64_2_operand (op
, mode
)
1864 enum machine_mode mode
;
1866 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
1867 return gpc_reg_operand (op
, mode
) || mask64_2_operand (op
, mode
);
1869 return logical_operand (op
, mode
) || mask64_2_operand (op
, mode
);
1872 /* Return 1 if the operand is either a non-special register or a
1873 constant that can be used as the operand of an RS/6000 logical AND insn. */
1876 and_operand (op
, mode
)
1878 enum machine_mode mode
;
1880 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
1881 return (gpc_reg_operand (op
, mode
) || mask_operand (op
, mode
));
1883 return (logical_operand (op
, mode
) || mask_operand (op
, mode
));
1886 /* Return 1 if the operand is a general register or memory operand. */
1889 reg_or_mem_operand (op
, mode
)
1891 enum machine_mode mode
;
1893 return (gpc_reg_operand (op
, mode
)
1894 || memory_operand (op
, mode
)
1895 || volatile_mem_operand (op
, mode
));
1898 /* Return 1 if the operand is a general register or memory operand without
1899 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1903 lwa_operand (op
, mode
)
1905 enum machine_mode mode
;
1909 if (reload_completed
&& GET_CODE (inner
) == SUBREG
)
1910 inner
= SUBREG_REG (inner
);
1912 return gpc_reg_operand (inner
, mode
)
1913 || (memory_operand (inner
, mode
)
1914 && GET_CODE (XEXP (inner
, 0)) != PRE_INC
1915 && GET_CODE (XEXP (inner
, 0)) != PRE_DEC
1916 && (GET_CODE (XEXP (inner
, 0)) != PLUS
1917 || GET_CODE (XEXP (XEXP (inner
, 0), 1)) != CONST_INT
1918 || INTVAL (XEXP (XEXP (inner
, 0), 1)) % 4 == 0));
1921 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
1924 symbol_ref_operand (op
, mode
)
1926 enum machine_mode mode
;
1928 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1931 return (GET_CODE (op
) == SYMBOL_REF
);
1934 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1935 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
1938 call_operand (op
, mode
)
1940 enum machine_mode mode
;
1942 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1945 return (GET_CODE (op
) == SYMBOL_REF
1946 || (GET_CODE (op
) == REG
1947 && (REGNO (op
) == LINK_REGISTER_REGNUM
1948 || REGNO (op
) == COUNT_REGISTER_REGNUM
1949 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
)));
1952 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1953 this file and the function is not weakly defined. */
1956 current_file_function_operand (op
, mode
)
1958 enum machine_mode mode ATTRIBUTE_UNUSED
;
1960 return (GET_CODE (op
) == SYMBOL_REF
1961 && (SYMBOL_REF_FLAG (op
)
1962 || (op
== XEXP (DECL_RTL (current_function_decl
), 0)
1963 && ! DECL_WEAK (current_function_decl
))));
1966 /* Return 1 if this operand is a valid input for a move insn. */
1969 input_operand (op
, mode
)
1971 enum machine_mode mode
;
1973 /* Memory is always valid. */
1974 if (memory_operand (op
, mode
))
1977 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1978 if (GET_CODE (op
) == CONSTANT_P_RTX
)
1981 /* For floating-point, easy constants are valid. */
1982 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
1984 && easy_fp_constant (op
, mode
))
1987 /* Allow any integer constant. */
1988 if (GET_MODE_CLASS (mode
) == MODE_INT
1989 && (GET_CODE (op
) == CONST_INT
1990 || GET_CODE (op
) == CONST_DOUBLE
))
1993 /* For floating-point or multi-word mode, the only remaining valid type
1995 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
1996 || GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
1997 return register_operand (op
, mode
);
1999 /* The only cases left are integral modes one word or smaller (we
2000 do not get called for MODE_CC values). These can be in any
2002 if (register_operand (op
, mode
))
2005 /* A SYMBOL_REF referring to the TOC is valid. */
2006 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op
))
2009 /* A constant pool expression (relative to the TOC) is valid */
2010 if (TOC_RELATIVE_EXPR_P (op
))
2013 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2015 if (DEFAULT_ABI
== ABI_V4
2016 && (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == CONST
)
2017 && small_data_operand (op
, Pmode
))
2023 /* Return 1 for an operand in small memory on V.4/eabi. */
2026 small_data_operand (op
, mode
)
2027 rtx op ATTRIBUTE_UNUSED
;
2028 enum machine_mode mode ATTRIBUTE_UNUSED
;
2033 if (rs6000_sdata
== SDATA_NONE
|| rs6000_sdata
== SDATA_DATA
)
2036 if (DEFAULT_ABI
!= ABI_V4
)
2039 if (GET_CODE (op
) == SYMBOL_REF
)
2042 else if (GET_CODE (op
) != CONST
2043 || GET_CODE (XEXP (op
, 0)) != PLUS
2044 || GET_CODE (XEXP (XEXP (op
, 0), 0)) != SYMBOL_REF
2045 || GET_CODE (XEXP (XEXP (op
, 0), 1)) != CONST_INT
)
2050 rtx sum
= XEXP (op
, 0);
2051 HOST_WIDE_INT summand
;
2053 /* We have to be careful here, because it is the referenced address
2054 that must be 32k from _SDA_BASE_, not just the symbol. */
2055 summand
= INTVAL (XEXP (sum
, 1));
2056 if (summand
< 0 || summand
> g_switch_value
)
2059 sym_ref
= XEXP (sum
, 0);
2062 if (*XSTR (sym_ref
, 0) != '@')
2073 constant_pool_expr_1 (op
, have_sym
, have_toc
)
2078 switch (GET_CODE(op
))
2081 if (CONSTANT_POOL_ADDRESS_P (op
))
2083 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op
), Pmode
))
2091 else if (! strcmp (XSTR (op
, 0), toc_label_name
))
2100 return (constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
)
2101 && constant_pool_expr_1 (XEXP (op
, 1), have_sym
, have_toc
));
2103 return constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
);
2112 constant_pool_expr_p (op
)
2117 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_sym
;
2121 toc_relative_expr_p (op
)
2126 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_toc
;
2129 /* Try machine-dependent ways of modifying an illegitimate address
2130 to be legitimate. If we find one, return the new, valid address.
2131 This is used from only one place: `memory_address' in explow.c.
2133 OLDX is the address as it was before break_out_memory_refs was
2134 called. In some cases it is useful to look at this to decide what
2137 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2139 It is always safe for this function to do nothing. It exists to
2140 recognize opportunities to optimize the output.
2142 On RS/6000, first check for the sum of a register with a constant
2143 integer that is out of range. If so, generate code to add the
2144 constant with the low-order 16 bits masked to the register and force
2145 this result into another register (this can be done with `cau').
2146 Then generate an address of REG+(CONST&0xffff), allowing for the
2147 possibility of bit 16 being a one.
2149 Then check for the sum of a register and something not constant, try to
2150 load the other things into a register and return the sum. */
2152 rs6000_legitimize_address (x
, oldx
, mode
)
2154 rtx oldx ATTRIBUTE_UNUSED
;
2155 enum machine_mode mode
;
2157 if (GET_CODE (x
) == PLUS
2158 && GET_CODE (XEXP (x
, 0)) == REG
2159 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2160 && (unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1)) + 0x8000) >= 0x10000)
2162 HOST_WIDE_INT high_int
, low_int
;
2164 low_int
= ((INTVAL (XEXP (x
, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2165 high_int
= INTVAL (XEXP (x
, 1)) - low_int
;
2166 sum
= force_operand (gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2167 GEN_INT (high_int
)), 0);
2168 return gen_rtx_PLUS (Pmode
, sum
, GEN_INT (low_int
));
2170 else if (GET_CODE (x
) == PLUS
2171 && GET_CODE (XEXP (x
, 0)) == REG
2172 && GET_CODE (XEXP (x
, 1)) != CONST_INT
2173 && GET_MODE_NUNITS (mode
) == 1
2174 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
2176 || (mode
!= DFmode
&& mode
!= TFmode
))
2177 && (TARGET_POWERPC64
|| mode
!= DImode
)
2180 return gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2181 force_reg (Pmode
, force_operand (XEXP (x
, 1), 0)));
2183 else if (ALTIVEC_VECTOR_MODE (mode
))
2187 /* Make sure both operands are registers. */
2188 if (GET_CODE (x
) == PLUS
)
2189 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, XEXP (x
, 0)),
2190 force_reg (Pmode
, XEXP (x
, 1)));
2192 reg
= force_reg (Pmode
, x
);
2195 else if (SPE_VECTOR_MODE (mode
))
2197 /* We accept [reg + reg] and [reg + OFFSET]. */
2199 if (GET_CODE (x
) == PLUS
)
2201 rtx op1
= XEXP (x
, 0);
2202 rtx op2
= XEXP (x
, 1);
2204 op1
= force_reg (Pmode
, op1
);
2206 if (GET_CODE (op2
) != REG
2207 && (GET_CODE (op2
) != CONST_INT
2208 || !SPE_CONST_OFFSET_OK (INTVAL (op2
))))
2209 op2
= force_reg (Pmode
, op2
);
2211 return gen_rtx_PLUS (Pmode
, op1
, op2
);
2214 return force_reg (Pmode
, x
);
2220 && GET_CODE (x
) != CONST_INT
2221 && GET_CODE (x
) != CONST_DOUBLE
2223 && GET_MODE_NUNITS (mode
) == 1
2224 && (GET_MODE_BITSIZE (mode
) <= 32
2225 || ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) && mode
== DFmode
)))
2227 rtx reg
= gen_reg_rtx (Pmode
);
2228 emit_insn (gen_elf_high (reg
, (x
)));
2229 return gen_rtx_LO_SUM (Pmode
, reg
, (x
));
2231 else if (TARGET_MACHO
&& TARGET_32BIT
&& TARGET_NO_TOC
2234 && ! MACHO_DYNAMIC_NO_PIC_P
2236 && GET_CODE (x
) != CONST_INT
2237 && GET_CODE (x
) != CONST_DOUBLE
2239 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) || mode
!= DFmode
)
2243 rtx reg
= gen_reg_rtx (Pmode
);
2244 emit_insn (gen_macho_high (reg
, (x
)));
2245 return gen_rtx_LO_SUM (Pmode
, reg
, (x
));
2248 && CONSTANT_POOL_EXPR_P (x
)
2249 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), Pmode
))
2251 return create_TOC_reference (x
);
2257 /* The convention appears to be to define this wherever it is used.
2258 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2259 is now used here. */
2260 #ifndef REG_MODE_OK_FOR_BASE_P
2261 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2264 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2265 replace the input X, or the original X if no replacement is called for.
2266 The output parameter *WIN is 1 if the calling macro should goto WIN,
2269 For RS/6000, we wish to handle large displacements off a base
2270 register by splitting the addend across an addiu/addis and the mem insn.
2271 This cuts number of extra insns needed from 3 to 1.
2273 On Darwin, we use this to generate code for floating point constants.
2274 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2275 The Darwin code is inside #if TARGET_MACHO because only then is
2276 machopic_function_base_name() defined. */
2278 rs6000_legitimize_reload_address (x
, mode
, opnum
, type
, ind_levels
, win
)
2280 enum machine_mode mode
;
2283 int ind_levels ATTRIBUTE_UNUSED
;
2286 /* We must recognize output that we have already generated ourselves. */
2287 if (GET_CODE (x
) == PLUS
2288 && GET_CODE (XEXP (x
, 0)) == PLUS
2289 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
2290 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2291 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2293 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2294 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
2295 opnum
, (enum reload_type
)type
);
2301 if (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
2302 && GET_CODE (x
) == LO_SUM
2303 && GET_CODE (XEXP (x
, 0)) == PLUS
2304 && XEXP (XEXP (x
, 0), 0) == pic_offset_table_rtx
2305 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == HIGH
2306 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 0)) == CONST
2307 && XEXP (XEXP (XEXP (x
, 0), 1), 0) == XEXP (x
, 1)
2308 && GET_CODE (XEXP (XEXP (x
, 1), 0)) == MINUS
2309 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 0)) == SYMBOL_REF
2310 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 1)) == SYMBOL_REF
)
2312 /* Result of previous invocation of this function on Darwin
2313 floating point constant. */
2314 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2315 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
2316 opnum
, (enum reload_type
)type
);
2321 if (GET_CODE (x
) == PLUS
2322 && GET_CODE (XEXP (x
, 0)) == REG
2323 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2324 && REG_MODE_OK_FOR_BASE_P (XEXP (x
, 0), mode
)
2325 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2326 && !SPE_VECTOR_MODE (mode
)
2327 && !ALTIVEC_VECTOR_MODE (mode
))
2329 HOST_WIDE_INT val
= INTVAL (XEXP (x
, 1));
2330 HOST_WIDE_INT low
= ((val
& 0xffff) ^ 0x8000) - 0x8000;
2332 = (((val
- low
) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2334 /* Check for 32-bit overflow. */
2335 if (high
+ low
!= val
)
2341 /* Reload the high part into a base reg; leave the low part
2342 in the mem directly. */
2344 x
= gen_rtx_PLUS (GET_MODE (x
),
2345 gen_rtx_PLUS (GET_MODE (x
), XEXP (x
, 0),
2349 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2350 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
2351 opnum
, (enum reload_type
)type
);
2356 if (GET_CODE (x
) == SYMBOL_REF
2357 && DEFAULT_ABI
== ABI_DARWIN
2358 && !ALTIVEC_VECTOR_MODE (mode
)
2361 /* Darwin load of floating point constant. */
2362 rtx offset
= gen_rtx (CONST
, Pmode
,
2363 gen_rtx (MINUS
, Pmode
, x
,
2364 gen_rtx (SYMBOL_REF
, Pmode
,
2365 machopic_function_base_name ())));
2366 x
= gen_rtx (LO_SUM
, GET_MODE (x
),
2367 gen_rtx (PLUS
, Pmode
, pic_offset_table_rtx
,
2368 gen_rtx (HIGH
, Pmode
, offset
)), offset
);
2369 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2370 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
2371 opnum
, (enum reload_type
)type
);
2375 if (GET_CODE (x
) == SYMBOL_REF
2376 && DEFAULT_ABI
== ABI_DARWIN
2377 && !ALTIVEC_VECTOR_MODE (mode
)
2378 && MACHO_DYNAMIC_NO_PIC_P
)
2380 /* Darwin load of floating point constant. */
2381 x
= gen_rtx (LO_SUM
, GET_MODE (x
),
2382 gen_rtx (HIGH
, Pmode
, x
), x
);
2383 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2384 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
2385 opnum
, (enum reload_type
)type
);
2391 && CONSTANT_POOL_EXPR_P (x
)
2392 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), mode
))
2394 (x
) = create_TOC_reference (x
);
2402 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2403 that is a valid memory address for an instruction.
2404 The MODE argument is the machine mode for the MEM expression
2405 that wants to use this address.
2407 On the RS/6000, there are four valid address: a SYMBOL_REF that
2408 refers to a constant pool entry of an address (or the sum of it
2409 plus a constant), a short (16-bit signed) constant plus a register,
2410 the sum of two registers, or a register indirect, possibly with an
2411 auto-increment. For DFmode and DImode with a constant plus register,
2412 we must ensure that both words are addressable or PowerPC64 with offset
2415 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2416 32-bit DImode, TImode), indexed addressing cannot be used because
2417 adjacent memory cells are accessed by adding word-sized offsets
2418 during assembly output. */
2420 rs6000_legitimate_address (mode
, x
, reg_ok_strict
)
2421 enum machine_mode mode
;
2425 if (LEGITIMATE_INDIRECT_ADDRESS_P (x
, reg_ok_strict
))
2427 if ((GET_CODE (x
) == PRE_INC
|| GET_CODE (x
) == PRE_DEC
)
2428 && !ALTIVEC_VECTOR_MODE (mode
)
2429 && !SPE_VECTOR_MODE (mode
)
2431 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x
, 0), reg_ok_strict
))
2433 if (LEGITIMATE_SMALL_DATA_P (mode
, x
))
2435 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x
))
2437 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2439 && GET_CODE (x
) == PLUS
2440 && GET_CODE (XEXP (x
, 0)) == REG
2441 && XEXP (x
, 0) == virtual_stack_vars_rtx
2442 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2444 if (LEGITIMATE_OFFSET_ADDRESS_P (mode
, x
, reg_ok_strict
))
2447 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
2449 || (mode
!= DFmode
&& mode
!= TFmode
))
2450 && (TARGET_POWERPC64
|| mode
!= DImode
)
2451 && LEGITIMATE_INDEXED_ADDRESS_P (x
, reg_ok_strict
))
2453 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode
, x
, reg_ok_strict
))
2458 /* Try to output insns to set TARGET equal to the constant C if it can
2459 be done in less than N insns. Do all computations in MODE.
2460 Returns the place where the output has been placed if it can be
2461 done and the insns have been emitted. If it would take more than N
2462 insns, zero is returned and no insns and emitted. */
2465 rs6000_emit_set_const (dest
, mode
, source
, n
)
2467 enum machine_mode mode
;
2468 int n ATTRIBUTE_UNUSED
;
2470 rtx result
, insn
, set
;
2471 HOST_WIDE_INT c0
, c1
;
2473 if (mode
== QImode
|| mode
== HImode
)
2476 dest
= gen_reg_rtx (mode
);
2477 emit_insn (gen_rtx_SET (VOIDmode
, dest
, source
));
2480 else if (mode
== SImode
)
2482 result
= no_new_pseudos
? dest
: gen_reg_rtx (SImode
);
2484 emit_insn (gen_rtx_SET (VOIDmode
, result
,
2485 GEN_INT (INTVAL (source
)
2486 & (~ (HOST_WIDE_INT
) 0xffff))));
2487 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
2488 gen_rtx_IOR (SImode
, result
,
2489 GEN_INT (INTVAL (source
) & 0xffff))));
2492 else if (mode
== DImode
)
2494 if (GET_CODE (source
) == CONST_INT
)
2496 c0
= INTVAL (source
);
2499 else if (GET_CODE (source
) == CONST_DOUBLE
)
2501 #if HOST_BITS_PER_WIDE_INT >= 64
2502 c0
= CONST_DOUBLE_LOW (source
);
2505 c0
= CONST_DOUBLE_LOW (source
);
2506 c1
= CONST_DOUBLE_HIGH (source
);
2512 result
= rs6000_emit_set_long_const (dest
, c0
, c1
);
2517 insn
= get_last_insn ();
2518 set
= single_set (insn
);
2519 if (! CONSTANT_P (SET_SRC (set
)))
2520 set_unique_reg_note (insn
, REG_EQUAL
, source
);
2525 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2526 fall back to a straight forward decomposition. We do this to avoid
2527 exponential run times encountered when looking for longer sequences
2528 with rs6000_emit_set_const. */
2530 rs6000_emit_set_long_const (dest
, c1
, c2
)
2532 HOST_WIDE_INT c1
, c2
;
2534 if (!TARGET_POWERPC64
)
2536 rtx operand1
, operand2
;
2538 operand1
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
== 0,
2540 operand2
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
!= 0,
2542 emit_move_insn (operand1
, GEN_INT (c1
));
2543 emit_move_insn (operand2
, GEN_INT (c2
));
2547 HOST_WIDE_INT ud1
, ud2
, ud3
, ud4
;
2550 ud2
= (c1
& 0xffff0000) >> 16;
2551 #if HOST_BITS_PER_WIDE_INT >= 64
2555 ud4
= (c2
& 0xffff0000) >> 16;
2557 if ((ud4
== 0xffff && ud3
== 0xffff && ud2
== 0xffff && (ud1
& 0x8000))
2558 || (ud4
== 0 && ud3
== 0 && ud2
== 0 && ! (ud1
& 0x8000)))
2561 emit_move_insn (dest
, GEN_INT (((ud1
^ 0x8000) - 0x8000)));
2563 emit_move_insn (dest
, GEN_INT (ud1
));
2566 else if ((ud4
== 0xffff && ud3
== 0xffff && (ud2
& 0x8000))
2567 || (ud4
== 0 && ud3
== 0 && ! (ud2
& 0x8000)))
2570 emit_move_insn (dest
, GEN_INT (((ud2
<< 16) ^ 0x80000000)
2573 emit_move_insn (dest
, GEN_INT (ud2
<< 16));
2575 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2577 else if ((ud4
== 0xffff && (ud3
& 0x8000))
2578 || (ud4
== 0 && ! (ud3
& 0x8000)))
2581 emit_move_insn (dest
, GEN_INT (((ud3
<< 16) ^ 0x80000000)
2584 emit_move_insn (dest
, GEN_INT (ud3
<< 16));
2587 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud2
)));
2588 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (16)));
2590 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2595 emit_move_insn (dest
, GEN_INT (((ud4
<< 16) ^ 0x80000000)
2598 emit_move_insn (dest
, GEN_INT (ud4
<< 16));
2601 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud3
)));
2603 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (32)));
2605 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
,
2606 GEN_INT (ud2
<< 16)));
2608 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2614 /* Emit a move from SOURCE to DEST in mode MODE. */
2616 rs6000_emit_move (dest
, source
, mode
)
2619 enum machine_mode mode
;
2623 operands
[1] = source
;
2625 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2626 if (GET_CODE (operands
[1]) == CONST_DOUBLE
2627 && ! FLOAT_MODE_P (mode
)
2628 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
2630 /* FIXME. This should never happen. */
2631 /* Since it seems that it does, do the safe thing and convert
2633 operands
[1] = gen_int_mode (CONST_DOUBLE_LOW (operands
[1]), mode
);
2635 if (GET_CODE (operands
[1]) == CONST_DOUBLE
2636 && ! FLOAT_MODE_P (mode
)
2637 && ((CONST_DOUBLE_HIGH (operands
[1]) == 0
2638 && CONST_DOUBLE_LOW (operands
[1]) >= 0)
2639 || (CONST_DOUBLE_HIGH (operands
[1]) == -1
2640 && CONST_DOUBLE_LOW (operands
[1]) < 0)))
2643 /* Check if GCC is setting up a block move that will end up using FP
2644 registers as temporaries. We must make sure this is acceptable. */
2645 if (GET_CODE (operands
[0]) == MEM
2646 && GET_CODE (operands
[1]) == MEM
2648 && (SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[0]))
2649 || SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[1])))
2650 && ! (SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[0]) > 32
2651 ? 32 : MEM_ALIGN (operands
[0])))
2652 || SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[1]) > 32
2654 : MEM_ALIGN (operands
[1]))))
2655 && ! MEM_VOLATILE_P (operands
[0])
2656 && ! MEM_VOLATILE_P (operands
[1]))
2658 emit_move_insn (adjust_address (operands
[0], SImode
, 0),
2659 adjust_address (operands
[1], SImode
, 0));
2660 emit_move_insn (adjust_address (operands
[0], SImode
, 4),
2661 adjust_address (operands
[1], SImode
, 4));
2665 if (!no_new_pseudos
)
2667 if (GET_CODE (operands
[1]) == MEM
&& optimize
> 0
2668 && (mode
== QImode
|| mode
== HImode
|| mode
== SImode
)
2669 && GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
2671 rtx reg
= gen_reg_rtx (word_mode
);
2673 emit_insn (gen_rtx_SET (word_mode
, reg
,
2674 gen_rtx_ZERO_EXTEND (word_mode
,
2676 operands
[1] = gen_lowpart (mode
, reg
);
2678 if (GET_CODE (operands
[0]) != REG
)
2679 operands
[1] = force_reg (mode
, operands
[1]);
2682 if (mode
== SFmode
&& ! TARGET_POWERPC
2683 && TARGET_HARD_FLOAT
&& TARGET_FPRS
2684 && GET_CODE (operands
[0]) == MEM
)
2688 if (reload_in_progress
|| reload_completed
)
2689 regnum
= true_regnum (operands
[1]);
2690 else if (GET_CODE (operands
[1]) == REG
)
2691 regnum
= REGNO (operands
[1]);
2695 /* If operands[1] is a register, on POWER it may have
2696 double-precision data in it, so truncate it to single
2698 if (FP_REGNO_P (regnum
) || regnum
>= FIRST_PSEUDO_REGISTER
)
2701 newreg
= (no_new_pseudos
? operands
[1] : gen_reg_rtx (mode
));
2702 emit_insn (gen_aux_truncdfsf2 (newreg
, operands
[1]));
2703 operands
[1] = newreg
;
2707 /* Handle the case where reload calls us with an invalid address;
2708 and the case of CONSTANT_P_RTX. */
2709 if (!ALTIVEC_VECTOR_MODE (mode
)
2710 && (! general_operand (operands
[1], mode
)
2711 || ! nonimmediate_operand (operands
[0], mode
)
2712 || GET_CODE (operands
[1]) == CONSTANT_P_RTX
))
2714 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2718 /* FIXME: In the long term, this switch statement should go away
2719 and be replaced by a sequence of tests based on things like
2725 if (CONSTANT_P (operands
[1])
2726 && GET_CODE (operands
[1]) != CONST_INT
)
2727 operands
[1] = force_const_mem (mode
, operands
[1]);
2733 if (CONSTANT_P (operands
[1])
2734 && ! easy_fp_constant (operands
[1], mode
))
2735 operands
[1] = force_const_mem (mode
, operands
[1]);
2746 if (CONSTANT_P (operands
[1])
2747 && !easy_vector_constant (operands
[1]))
2748 operands
[1] = force_const_mem (mode
, operands
[1]);
2753 /* Use default pattern for address of ELF small data */
2756 && DEFAULT_ABI
== ABI_V4
2757 && (GET_CODE (operands
[1]) == SYMBOL_REF
2758 || GET_CODE (operands
[1]) == CONST
)
2759 && small_data_operand (operands
[1], mode
))
2761 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2765 if (DEFAULT_ABI
== ABI_V4
2766 && mode
== Pmode
&& mode
== SImode
2767 && flag_pic
== 1 && got_operand (operands
[1], mode
))
2769 emit_insn (gen_movsi_got (operands
[0], operands
[1]));
2773 if ((TARGET_ELF
|| DEFAULT_ABI
== ABI_DARWIN
)
2777 && CONSTANT_P (operands
[1])
2778 && GET_CODE (operands
[1]) != HIGH
2779 && GET_CODE (operands
[1]) != CONST_INT
)
2781 rtx target
= (no_new_pseudos
? operands
[0] : gen_reg_rtx (mode
));
2783 /* If this is a function address on -mcall-aixdesc,
2784 convert it to the address of the descriptor. */
2785 if (DEFAULT_ABI
== ABI_AIX
2786 && GET_CODE (operands
[1]) == SYMBOL_REF
2787 && XSTR (operands
[1], 0)[0] == '.')
2789 const char *name
= XSTR (operands
[1], 0);
2791 while (*name
== '.')
2793 new_ref
= gen_rtx_SYMBOL_REF (Pmode
, name
);
2794 CONSTANT_POOL_ADDRESS_P (new_ref
)
2795 = CONSTANT_POOL_ADDRESS_P (operands
[1]);
2796 SYMBOL_REF_FLAG (new_ref
) = SYMBOL_REF_FLAG (operands
[1]);
2797 SYMBOL_REF_USED (new_ref
) = SYMBOL_REF_USED (operands
[1]);
2798 operands
[1] = new_ref
;
2801 if (DEFAULT_ABI
== ABI_DARWIN
)
2804 if (MACHO_DYNAMIC_NO_PIC_P
)
2806 /* Take care of any required data indirection. */
2807 operands
[1] = rs6000_machopic_legitimize_pic_address (
2808 operands
[1], mode
, operands
[0]);
2809 if (operands
[0] != operands
[1])
2810 emit_insn (gen_rtx_SET (VOIDmode
,
2811 operands
[0], operands
[1]));
2815 emit_insn (gen_macho_high (target
, operands
[1]));
2816 emit_insn (gen_macho_low (operands
[0], target
, operands
[1]));
2820 emit_insn (gen_elf_high (target
, operands
[1]));
2821 emit_insn (gen_elf_low (operands
[0], target
, operands
[1]));
2825 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2826 and we have put it in the TOC, we just need to make a TOC-relative
2829 && GET_CODE (operands
[1]) == SYMBOL_REF
2830 && CONSTANT_POOL_EXPR_P (operands
[1])
2831 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands
[1]),
2832 get_pool_mode (operands
[1])))
2834 operands
[1] = create_TOC_reference (operands
[1]);
2836 else if (mode
== Pmode
2837 && CONSTANT_P (operands
[1])
2838 && ((GET_CODE (operands
[1]) != CONST_INT
2839 && ! easy_fp_constant (operands
[1], mode
))
2840 || (GET_CODE (operands
[1]) == CONST_INT
2841 && num_insns_constant (operands
[1], mode
) > 2)
2842 || (GET_CODE (operands
[0]) == REG
2843 && FP_REGNO_P (REGNO (operands
[0]))))
2844 && GET_CODE (operands
[1]) != HIGH
2845 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands
[1])
2846 && ! TOC_RELATIVE_EXPR_P (operands
[1]))
2848 /* Emit a USE operation so that the constant isn't deleted if
2849 expensive optimizations are turned on because nobody
2850 references it. This should only be done for operands that
2851 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2852 This should not be done for operands that contain LABEL_REFs.
2853 For now, we just handle the obvious case. */
2854 if (GET_CODE (operands
[1]) != LABEL_REF
)
2855 emit_insn (gen_rtx_USE (VOIDmode
, operands
[1]));
2858 /* Darwin uses a special PIC legitimizer. */
2859 if (DEFAULT_ABI
== ABI_DARWIN
&& MACHOPIC_INDIRECT
)
2862 rs6000_machopic_legitimize_pic_address (operands
[1], mode
,
2864 if (operands
[0] != operands
[1])
2865 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2870 /* If we are to limit the number of things we put in the TOC and
2871 this is a symbol plus a constant we can add in one insn,
2872 just put the symbol in the TOC and add the constant. Don't do
2873 this if reload is in progress. */
2874 if (GET_CODE (operands
[1]) == CONST
2875 && TARGET_NO_SUM_IN_TOC
&& ! reload_in_progress
2876 && GET_CODE (XEXP (operands
[1], 0)) == PLUS
2877 && add_operand (XEXP (XEXP (operands
[1], 0), 1), mode
)
2878 && (GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == LABEL_REF
2879 || GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == SYMBOL_REF
)
2880 && ! side_effects_p (operands
[0]))
2883 force_const_mem (mode
, XEXP (XEXP (operands
[1], 0), 0));
2884 rtx other
= XEXP (XEXP (operands
[1], 0), 1);
2886 sym
= force_reg (mode
, sym
);
2888 emit_insn (gen_addsi3 (operands
[0], sym
, other
));
2890 emit_insn (gen_adddi3 (operands
[0], sym
, other
));
2894 operands
[1] = force_const_mem (mode
, operands
[1]);
2897 && CONSTANT_POOL_EXPR_P (XEXP (operands
[1], 0))
2898 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2899 get_pool_constant (XEXP (operands
[1], 0)),
2900 get_pool_mode (XEXP (operands
[1], 0))))
2903 = gen_rtx_MEM (mode
,
2904 create_TOC_reference (XEXP (operands
[1], 0)));
2905 set_mem_alias_set (operands
[1], get_TOC_alias_set ());
2906 RTX_UNCHANGING_P (operands
[1]) = 1;
2912 if (GET_CODE (operands
[0]) == MEM
2913 && GET_CODE (XEXP (operands
[0], 0)) != REG
2914 && ! reload_in_progress
)
2916 = replace_equiv_address (operands
[0],
2917 copy_addr_to_reg (XEXP (operands
[0], 0)));
2919 if (GET_CODE (operands
[1]) == MEM
2920 && GET_CODE (XEXP (operands
[1], 0)) != REG
2921 && ! reload_in_progress
)
2923 = replace_equiv_address (operands
[1],
2924 copy_addr_to_reg (XEXP (operands
[1], 0)));
2927 emit_insn (gen_rtx_PARALLEL (VOIDmode
,
2929 gen_rtx_SET (VOIDmode
,
2930 operands
[0], operands
[1]),
2931 gen_rtx_CLOBBER (VOIDmode
,
2932 gen_rtx_SCRATCH (SImode
)))));
2941 /* Above, we may have called force_const_mem which may have returned
2942 an invalid address. If we can, fix this up; otherwise, reload will
2943 have to deal with it. */
2944 if (GET_CODE (operands
[1]) == MEM
2945 && ! memory_address_p (mode
, XEXP (operands
[1], 0))
2946 && ! reload_in_progress
)
2947 operands
[1] = adjust_address (operands
[1], mode
, 0);
2949 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2953 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2954 for a call to a function whose data type is FNTYPE.
2955 For a library call, FNTYPE is 0.
2957 For incoming args we set the number of arguments in the prototype large
2958 so we never return a PARALLEL. */
2961 init_cumulative_args (cum
, fntype
, libname
, incoming
)
2962 CUMULATIVE_ARGS
*cum
;
2964 rtx libname ATTRIBUTE_UNUSED
;
2967 static CUMULATIVE_ARGS zero_cumulative
;
2969 *cum
= zero_cumulative
;
2971 cum
->fregno
= FP_ARG_MIN_REG
;
2972 cum
->vregno
= ALTIVEC_ARG_MIN_REG
;
2973 cum
->prototype
= (fntype
&& TYPE_ARG_TYPES (fntype
));
2974 cum
->call_cookie
= CALL_NORMAL
;
2975 cum
->sysv_gregno
= GP_ARG_MIN_REG
;
2978 cum
->nargs_prototype
= 1000; /* don't return a PARALLEL */
2980 else if (cum
->prototype
)
2981 cum
->nargs_prototype
= (list_length (TYPE_ARG_TYPES (fntype
)) - 1
2982 + (TYPE_MODE (TREE_TYPE (fntype
)) == BLKmode
2983 || RETURN_IN_MEMORY (TREE_TYPE (fntype
))));
2986 cum
->nargs_prototype
= 0;
2988 cum
->orig_nargs
= cum
->nargs_prototype
;
2990 /* Check for a longcall attribute. */
2992 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
))
2993 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
2994 cum
->call_cookie
= CALL_LONG
;
2996 if (TARGET_DEBUG_ARG
)
2998 fprintf (stderr
, "\ninit_cumulative_args:");
3001 tree ret_type
= TREE_TYPE (fntype
);
3002 fprintf (stderr
, " ret code = %s,",
3003 tree_code_name
[ (int)TREE_CODE (ret_type
) ]);
3006 if (cum
->call_cookie
& CALL_LONG
)
3007 fprintf (stderr
, " longcall,");
3009 fprintf (stderr
, " proto = %d, nargs = %d\n",
3010 cum
->prototype
, cum
->nargs_prototype
);
3014 /* If defined, a C expression which determines whether, and in which
3015 direction, to pad out an argument with extra space. The value
3016 should be of type `enum direction': either `upward' to pad above
3017 the argument, `downward' to pad below, or `none' to inhibit
3020 For the AIX ABI structs are always stored left shifted in their
3024 function_arg_padding (mode
, type
)
3025 enum machine_mode mode
;
3028 if (type
!= 0 && AGGREGATE_TYPE_P (type
))
3031 /* This is the default definition. */
3032 return (! BYTES_BIG_ENDIAN
3035 ? (type
&& TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
3036 && int_size_in_bytes (type
) < (PARM_BOUNDARY
/ BITS_PER_UNIT
))
3037 : GET_MODE_BITSIZE (mode
) < PARM_BOUNDARY
)
3038 ? downward
: upward
));
3041 /* If defined, a C expression that gives the alignment boundary, in bits,
3042 of an argument with the specified mode and type. If it is not defined,
3043 PARM_BOUNDARY is used for all arguments.
3045 V.4 wants long longs to be double word aligned. */
3048 function_arg_boundary (mode
, type
)
3049 enum machine_mode mode
;
3050 tree type ATTRIBUTE_UNUSED
;
3052 if (DEFAULT_ABI
== ABI_V4
&& (mode
== DImode
|| mode
== DFmode
))
3054 else if (SPE_VECTOR_MODE (mode
))
3056 else if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3059 return PARM_BOUNDARY
;
3062 /* Update the data in CUM to advance over an argument
3063 of mode MODE and data type TYPE.
3064 (TYPE is null for libcalls where that information may not be available.) */
3067 function_arg_advance (cum
, mode
, type
, named
)
3068 CUMULATIVE_ARGS
*cum
;
3069 enum machine_mode mode
;
3073 cum
->nargs_prototype
--;
3075 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3077 if (cum
->vregno
<= ALTIVEC_ARG_MAX_REG
&& cum
->nargs_prototype
>= 0)
3080 cum
->words
+= RS6000_ARG_SIZE (mode
, type
);
3082 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
)
3083 && named
&& cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
3085 else if (DEFAULT_ABI
== ABI_V4
)
3087 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
3088 && (mode
== SFmode
|| mode
== DFmode
))
3090 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
3095 cum
->words
+= cum
->words
& 1;
3096 cum
->words
+= RS6000_ARG_SIZE (mode
, type
);
3102 int gregno
= cum
->sysv_gregno
;
3104 /* Aggregates and IEEE quad get passed by reference. */
3105 if ((type
&& AGGREGATE_TYPE_P (type
))
3109 n_words
= RS6000_ARG_SIZE (mode
, type
);
3111 /* Long long and SPE vectors are put in odd registers. */
3112 if (n_words
== 2 && (gregno
& 1) == 0)
3115 /* Long long and SPE vectors are not split between registers
3117 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
3119 /* Long long is aligned on the stack. */
3121 cum
->words
+= cum
->words
& 1;
3122 cum
->words
+= n_words
;
3125 /* Note: continuing to accumulate gregno past when we've started
3126 spilling to the stack indicates the fact that we've started
3127 spilling to the stack to expand_builtin_saveregs. */
3128 cum
->sysv_gregno
= gregno
+ n_words
;
3131 if (TARGET_DEBUG_ARG
)
3133 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
3134 cum
->words
, cum
->fregno
);
3135 fprintf (stderr
, "gregno = %2d, nargs = %4d, proto = %d, ",
3136 cum
->sysv_gregno
, cum
->nargs_prototype
, cum
->prototype
);
3137 fprintf (stderr
, "mode = %4s, named = %d\n",
3138 GET_MODE_NAME (mode
), named
);
3143 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
3144 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
3146 cum
->words
+= align
+ RS6000_ARG_SIZE (mode
, type
);
3148 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
3149 && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3150 cum
->fregno
+= (mode
== TFmode
? 2 : 1);
3152 if (TARGET_DEBUG_ARG
)
3154 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
3155 cum
->words
, cum
->fregno
);
3156 fprintf (stderr
, "nargs = %4d, proto = %d, mode = %4s, ",
3157 cum
->nargs_prototype
, cum
->prototype
, GET_MODE_NAME (mode
));
3158 fprintf (stderr
, "named = %d, align = %d\n", named
, align
);
3163 /* Determine where to put an argument to a function.
3164 Value is zero to push the argument on the stack,
3165 or a hard register in which to store the argument.
3167 MODE is the argument's machine mode.
3168 TYPE is the data type of the argument (as a tree).
3169 This is null for libcalls where that information may
3171 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3172 the preceding args and about the function being called.
3173 NAMED is nonzero if this argument is a named parameter
3174 (otherwise it is an extra parameter matching an ellipsis).
3176 On RS/6000 the first eight words of non-FP are normally in registers
3177 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3178 Under V.4, the first 8 FP args are in registers.
3180 If this is floating-point and no prototype is specified, we use
3181 both an FP and integer register (or possibly FP reg and stack). Library
3182 functions (when TYPE is zero) always have the proper types for args,
3183 so we can pass the FP value just in one register. emit_library_function
3184 doesn't support PARALLEL anyway. */
3187 function_arg (cum
, mode
, type
, named
)
3188 CUMULATIVE_ARGS
*cum
;
3189 enum machine_mode mode
;
3193 enum rs6000_abi abi
= DEFAULT_ABI
;
3195 /* Return a marker to indicate whether CR1 needs to set or clear the
3196 bit that V.4 uses to say fp args were passed in registers.
3197 Assume that we don't need the marker for software floating point,
3198 or compiler generated library calls. */
3199 if (mode
== VOIDmode
)
3202 && cum
->nargs_prototype
< 0
3203 && type
&& (cum
->prototype
|| TARGET_NO_PROTOTYPE
))
3205 /* For the SPE, we need to crxor CR6 always. */
3207 return GEN_INT (cum
->call_cookie
| CALL_V4_SET_FP_ARGS
);
3208 else if (TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3209 return GEN_INT (cum
->call_cookie
3210 | ((cum
->fregno
== FP_ARG_MIN_REG
)
3211 ? CALL_V4_SET_FP_ARGS
3212 : CALL_V4_CLEAR_FP_ARGS
));
3215 return GEN_INT (cum
->call_cookie
);
3218 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3220 if (named
&& cum
->vregno
<= ALTIVEC_ARG_MAX_REG
)
3221 return gen_rtx_REG (mode
, cum
->vregno
);
3225 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
) && named
)
3227 if (cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
3228 return gen_rtx_REG (mode
, cum
->sysv_gregno
);
3232 else if (abi
== ABI_V4
)
3234 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
3235 && (mode
== SFmode
|| mode
== DFmode
))
3237 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
3238 return gen_rtx_REG (mode
, cum
->fregno
);
3245 int gregno
= cum
->sysv_gregno
;
3247 /* Aggregates and IEEE quad get passed by reference. */
3248 if ((type
&& AGGREGATE_TYPE_P (type
))
3252 n_words
= RS6000_ARG_SIZE (mode
, type
);
3254 /* Long long and SPE vectors are put in odd registers. */
3255 if (n_words
== 2 && (gregno
& 1) == 0)
3258 /* Long long and SPE vectors are not split between registers
3260 if (gregno
+ n_words
- 1 <= GP_ARG_MAX_REG
)
3262 /* SPE vectors in ... get split into 2 registers. */
3263 if (TARGET_SPE
&& TARGET_SPE_ABI
3264 && SPE_VECTOR_MODE (mode
) && !named
)
3267 enum machine_mode m
= SImode
;
3269 r1
= gen_rtx_REG (m
, gregno
);
3270 r1
= gen_rtx_EXPR_LIST (m
, r1
, const0_rtx
);
3271 r2
= gen_rtx_REG (m
, gregno
+ 1);
3272 r2
= gen_rtx_EXPR_LIST (m
, r2
, GEN_INT (4));
3273 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r2
));
3275 return gen_rtx_REG (mode
, gregno
);
3283 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
3284 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
3285 int align_words
= cum
->words
+ align
;
3287 if (type
&& TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
3290 if (USE_FP_FOR_ARG_P (*cum
, mode
, type
))
3293 || ((cum
->nargs_prototype
> 0)
3294 /* IBM AIX extended its linkage convention definition always
3295 to require FP args after register save area hole on the
3297 && (DEFAULT_ABI
!= ABI_AIX
3299 || (align_words
< GP_ARG_NUM_REG
))))
3300 return gen_rtx_REG (mode
, cum
->fregno
);
3302 return gen_rtx_PARALLEL (mode
,
3304 gen_rtx_EXPR_LIST (VOIDmode
,
3305 ((align_words
>= GP_ARG_NUM_REG
)
3308 + RS6000_ARG_SIZE (mode
, type
)
3310 /* If this is partially on the stack, then
3311 we only include the portion actually
3312 in registers here. */
3313 ? gen_rtx_REG (SImode
,
3314 GP_ARG_MIN_REG
+ align_words
)
3315 : gen_rtx_REG (mode
,
3316 GP_ARG_MIN_REG
+ align_words
))),
3318 gen_rtx_EXPR_LIST (VOIDmode
,
3319 gen_rtx_REG (mode
, cum
->fregno
),
3322 else if (align_words
< GP_ARG_NUM_REG
)
3323 return gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
3329 /* For an arg passed partly in registers and partly in memory,
3330 this is the number of registers used.
3331 For args passed entirely in registers or entirely in memory, zero. */
3334 function_arg_partial_nregs (cum
, mode
, type
, named
)
3335 CUMULATIVE_ARGS
*cum
;
3336 enum machine_mode mode
;
3338 int named ATTRIBUTE_UNUSED
;
3340 if (DEFAULT_ABI
== ABI_V4
)
3343 if (USE_FP_FOR_ARG_P (*cum
, mode
, type
)
3344 || USE_ALTIVEC_FOR_ARG_P (*cum
, mode
, type
))
3346 if (cum
->nargs_prototype
>= 0)
3350 if (cum
->words
< GP_ARG_NUM_REG
3351 && GP_ARG_NUM_REG
< (cum
->words
+ RS6000_ARG_SIZE (mode
, type
)))
3353 int ret
= GP_ARG_NUM_REG
- cum
->words
;
3354 if (ret
&& TARGET_DEBUG_ARG
)
3355 fprintf (stderr
, "function_arg_partial_nregs: %d\n", ret
);
3363 /* A C expression that indicates when an argument must be passed by
3364 reference. If nonzero for an argument, a copy of that argument is
3365 made in memory and a pointer to the argument is passed instead of
3366 the argument itself. The pointer is passed in whatever way is
3367 appropriate for passing a pointer to that type.
3369 Under V.4, structures and unions are passed by reference.
3371 As an extension to all ABIs, variable sized types are passed by
3375 function_arg_pass_by_reference (cum
, mode
, type
, named
)
3376 CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
;
3377 enum machine_mode mode ATTRIBUTE_UNUSED
;
3379 int named ATTRIBUTE_UNUSED
;
3381 if (DEFAULT_ABI
== ABI_V4
3382 && ((type
&& AGGREGATE_TYPE_P (type
))
3385 if (TARGET_DEBUG_ARG
)
3386 fprintf (stderr
, "function_arg_pass_by_reference: aggregate\n");
3390 return type
&& int_size_in_bytes (type
) <= 0;
3393 /* Perform any needed actions needed for a function that is receiving a
3394 variable number of arguments.
3398 MODE and TYPE are the mode and type of the current parameter.
3400 PRETEND_SIZE is a variable that should be set to the amount of stack
3401 that must be pushed by the prolog to pretend that our caller pushed
3404 Normally, this macro will push all remaining incoming registers on the
3405 stack and set PRETEND_SIZE to the length of the registers pushed. */
3408 setup_incoming_varargs (cum
, mode
, type
, pretend_size
, no_rtl
)
3409 CUMULATIVE_ARGS
*cum
;
3410 enum machine_mode mode
;
3412 int *pretend_size ATTRIBUTE_UNUSED
;
3416 CUMULATIVE_ARGS next_cum
;
3417 int reg_size
= TARGET_32BIT
? 4 : 8;
3418 rtx save_area
= NULL_RTX
, mem
;
3419 int first_reg_offset
, set
;
3423 fntype
= TREE_TYPE (current_function_decl
);
3424 stdarg_p
= (TYPE_ARG_TYPES (fntype
) != 0
3425 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
3426 != void_type_node
));
3428 /* For varargs, we do not want to skip the dummy va_dcl argument.
3429 For stdargs, we do want to skip the last named argument. */
3432 function_arg_advance (&next_cum
, mode
, type
, 1);
3434 if (DEFAULT_ABI
== ABI_V4
)
3436 /* Indicate to allocate space on the stack for varargs save area. */
3437 cfun
->machine
->sysv_varargs_p
= 1;
3439 save_area
= plus_constant (virtual_stack_vars_rtx
,
3440 - RS6000_VARARGS_SIZE
);
3442 first_reg_offset
= next_cum
.sysv_gregno
- GP_ARG_MIN_REG
;
3446 first_reg_offset
= next_cum
.words
;
3447 save_area
= virtual_incoming_args_rtx
;
3448 cfun
->machine
->sysv_varargs_p
= 0;
3450 if (MUST_PASS_IN_STACK (mode
, type
))
3451 first_reg_offset
+= RS6000_ARG_SIZE (TYPE_MODE (type
), type
);
3454 set
= get_varargs_alias_set ();
3455 if (! no_rtl
&& first_reg_offset
< GP_ARG_NUM_REG
)
3457 mem
= gen_rtx_MEM (BLKmode
,
3458 plus_constant (save_area
,
3459 first_reg_offset
* reg_size
)),
3460 set_mem_alias_set (mem
, set
);
3461 set_mem_align (mem
, BITS_PER_WORD
);
3464 (GP_ARG_MIN_REG
+ first_reg_offset
, mem
,
3465 GP_ARG_NUM_REG
- first_reg_offset
,
3466 (GP_ARG_NUM_REG
- first_reg_offset
) * UNITS_PER_WORD
);
3469 /* Save FP registers if needed. */
3470 if (DEFAULT_ABI
== ABI_V4
3471 && TARGET_HARD_FLOAT
&& TARGET_FPRS
3473 && next_cum
.fregno
<= FP_ARG_V4_MAX_REG
)
3475 int fregno
= next_cum
.fregno
;
3476 rtx cr1
= gen_rtx_REG (CCmode
, CR1_REGNO
);
3477 rtx lab
= gen_label_rtx ();
3478 int off
= (GP_ARG_NUM_REG
* reg_size
) + ((fregno
- FP_ARG_MIN_REG
) * 8);
3480 emit_jump_insn (gen_rtx_SET (VOIDmode
,
3482 gen_rtx_IF_THEN_ELSE (VOIDmode
,
3483 gen_rtx_NE (VOIDmode
, cr1
,
3485 gen_rtx_LABEL_REF (VOIDmode
, lab
),
3488 while (fregno
<= FP_ARG_V4_MAX_REG
)
3490 mem
= gen_rtx_MEM (DFmode
, plus_constant (save_area
, off
));
3491 set_mem_alias_set (mem
, set
);
3492 emit_move_insn (mem
, gen_rtx_REG (DFmode
, fregno
));
3501 /* Create the va_list data type. */
3504 rs6000_build_va_list ()
3506 tree f_gpr
, f_fpr
, f_ovf
, f_sav
, record
, type_decl
;
3508 /* For AIX, prefer 'char *' because that's what the system
3509 header files like. */
3510 if (DEFAULT_ABI
!= ABI_V4
)
3511 return build_pointer_type (char_type_node
);
3513 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
3514 type_decl
= build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
3516 f_gpr
= build_decl (FIELD_DECL
, get_identifier ("gpr"),
3517 unsigned_char_type_node
);
3518 f_fpr
= build_decl (FIELD_DECL
, get_identifier ("fpr"),
3519 unsigned_char_type_node
);
3520 f_ovf
= build_decl (FIELD_DECL
, get_identifier ("overflow_arg_area"),
3522 f_sav
= build_decl (FIELD_DECL
, get_identifier ("reg_save_area"),
3525 DECL_FIELD_CONTEXT (f_gpr
) = record
;
3526 DECL_FIELD_CONTEXT (f_fpr
) = record
;
3527 DECL_FIELD_CONTEXT (f_ovf
) = record
;
3528 DECL_FIELD_CONTEXT (f_sav
) = record
;
3530 TREE_CHAIN (record
) = type_decl
;
3531 TYPE_NAME (record
) = type_decl
;
3532 TYPE_FIELDS (record
) = f_gpr
;
3533 TREE_CHAIN (f_gpr
) = f_fpr
;
3534 TREE_CHAIN (f_fpr
) = f_ovf
;
3535 TREE_CHAIN (f_ovf
) = f_sav
;
3537 layout_type (record
);
3539 /* The correct type is an array type of one element. */
3540 return build_array_type (record
, build_index_type (size_zero_node
));
3543 /* Implement va_start. */
3546 rs6000_va_start (valist
, nextarg
)
3550 HOST_WIDE_INT words
, n_gpr
, n_fpr
;
3551 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
3552 tree gpr
, fpr
, ovf
, sav
, t
;
3554 /* Only SVR4 needs something special. */
3555 if (DEFAULT_ABI
!= ABI_V4
)
3557 std_expand_builtin_va_start (valist
, nextarg
);
3561 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
3562 f_fpr
= TREE_CHAIN (f_gpr
);
3563 f_ovf
= TREE_CHAIN (f_fpr
);
3564 f_sav
= TREE_CHAIN (f_ovf
);
3566 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
3567 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
3568 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
3569 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
3570 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
3572 /* Count number of gp and fp argument registers used. */
3573 words
= current_function_args_info
.words
;
3574 n_gpr
= current_function_args_info
.sysv_gregno
- GP_ARG_MIN_REG
;
3575 n_fpr
= current_function_args_info
.fregno
- FP_ARG_MIN_REG
;
3577 if (TARGET_DEBUG_ARG
)
3579 fputs ("va_start: words = ", stderr
);
3580 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, words
);
3581 fputs (", n_gpr = ", stderr
);
3582 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, n_gpr
);
3583 fputs (", n_fpr = ", stderr
);
3584 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, n_fpr
);
3585 putc ('\n', stderr
);
3588 t
= build (MODIFY_EXPR
, TREE_TYPE (gpr
), gpr
, build_int_2 (n_gpr
, 0));
3589 TREE_SIDE_EFFECTS (t
) = 1;
3590 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3592 t
= build (MODIFY_EXPR
, TREE_TYPE (fpr
), fpr
, build_int_2 (n_fpr
, 0));
3593 TREE_SIDE_EFFECTS (t
) = 1;
3594 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3596 /* Find the overflow area. */
3597 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
3599 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), t
,
3600 build_int_2 (words
* UNITS_PER_WORD
, 0));
3601 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
3602 TREE_SIDE_EFFECTS (t
) = 1;
3603 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3605 /* Find the register save area. */
3606 t
= make_tree (TREE_TYPE (sav
), virtual_stack_vars_rtx
);
3607 t
= build (PLUS_EXPR
, TREE_TYPE (sav
), t
,
3608 build_int_2 (-RS6000_VARARGS_SIZE
, -1));
3609 t
= build (MODIFY_EXPR
, TREE_TYPE (sav
), sav
, t
);
3610 TREE_SIDE_EFFECTS (t
) = 1;
3611 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3614 /* Implement va_arg. */
3617 rs6000_va_arg (valist
, type
)
3620 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
3621 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
3622 int indirect_p
, size
, rsize
, n_reg
, sav_ofs
, sav_scale
;
3623 rtx lab_false
, lab_over
, addr_rtx
, r
;
3625 if (DEFAULT_ABI
!= ABI_V4
)
3627 /* Variable sized types are passed by reference. */
3628 if (int_size_in_bytes (type
) <= 0)
3630 u
= build_pointer_type (type
);
3632 /* Args grow upward. */
3633 t
= build (POSTINCREMENT_EXPR
, TREE_TYPE (valist
), valist
,
3634 build_int_2 (POINTER_SIZE
/ BITS_PER_UNIT
, 0));
3635 TREE_SIDE_EFFECTS (t
) = 1;
3637 t
= build1 (NOP_EXPR
, build_pointer_type (u
), t
);
3638 TREE_SIDE_EFFECTS (t
) = 1;
3640 t
= build1 (INDIRECT_REF
, u
, t
);
3641 TREE_SIDE_EFFECTS (t
) = 1;
3643 return expand_expr (t
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
3646 return std_expand_builtin_va_arg (valist
, type
);
3649 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
3650 f_fpr
= TREE_CHAIN (f_gpr
);
3651 f_ovf
= TREE_CHAIN (f_fpr
);
3652 f_sav
= TREE_CHAIN (f_ovf
);
3654 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
3655 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
3656 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
3657 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
3658 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
3660 size
= int_size_in_bytes (type
);
3661 rsize
= (size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
3663 if (AGGREGATE_TYPE_P (type
) || TYPE_MODE (type
) == TFmode
)
3665 /* Aggregates and long doubles are passed by reference. */
3671 size
= UNITS_PER_WORD
;
3674 else if (FLOAT_TYPE_P (type
) && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3676 /* FP args go in FP registers, if present. */
3685 /* Otherwise into GP registers. */
3693 /* Pull the value out of the saved registers ... */
3695 lab_false
= gen_label_rtx ();
3696 lab_over
= gen_label_rtx ();
3697 addr_rtx
= gen_reg_rtx (Pmode
);
3699 /* AltiVec vectors never go in registers. */
3700 if (!TARGET_ALTIVEC
|| TREE_CODE (type
) != VECTOR_TYPE
)
3702 TREE_THIS_VOLATILE (reg
) = 1;
3703 emit_cmp_and_jump_insns
3704 (expand_expr (reg
, NULL_RTX
, QImode
, EXPAND_NORMAL
),
3705 GEN_INT (8 - n_reg
+ 1), GE
, const1_rtx
, QImode
, 1,
3708 /* Long long is aligned in the registers. */
3711 u
= build (BIT_AND_EXPR
, TREE_TYPE (reg
), reg
,
3712 build_int_2 (n_reg
- 1, 0));
3713 u
= build (PLUS_EXPR
, TREE_TYPE (reg
), reg
, u
);
3714 u
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, u
);
3715 TREE_SIDE_EFFECTS (u
) = 1;
3716 expand_expr (u
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3720 t
= build (PLUS_EXPR
, ptr_type_node
, sav
, build_int_2 (sav_ofs
, 0));
3724 u
= build (POSTINCREMENT_EXPR
, TREE_TYPE (reg
), reg
,
3725 build_int_2 (n_reg
, 0));
3726 TREE_SIDE_EFFECTS (u
) = 1;
3728 u
= build1 (CONVERT_EXPR
, integer_type_node
, u
);
3729 TREE_SIDE_EFFECTS (u
) = 1;
3731 u
= build (MULT_EXPR
, integer_type_node
, u
, build_int_2 (sav_scale
, 0));
3732 TREE_SIDE_EFFECTS (u
) = 1;
3734 t
= build (PLUS_EXPR
, ptr_type_node
, t
, u
);
3735 TREE_SIDE_EFFECTS (t
) = 1;
3737 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
3739 emit_move_insn (addr_rtx
, r
);
3741 emit_jump_insn (gen_jump (lab_over
));
3745 emit_label (lab_false
);
3747 /* ... otherwise out of the overflow area. */
3749 /* Make sure we don't find reg 7 for the next int arg.
3751 All AltiVec vectors go in the overflow area. So in the AltiVec
3752 case we need to get the vectors from the overflow area, but
3753 remember where the GPRs and FPRs are. */
3754 if (n_reg
> 1 && (TREE_CODE (type
) != VECTOR_TYPE
3755 || !TARGET_ALTIVEC
))
3757 t
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, build_int_2 (8, 0));
3758 TREE_SIDE_EFFECTS (t
) = 1;
3759 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3762 /* Care for on-stack alignment if needed. */
3769 /* AltiVec vectors are 16 byte aligned. */
3770 if (TARGET_ALTIVEC
&& TREE_CODE (type
) == VECTOR_TYPE
)
3775 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), ovf
, build_int_2 (align
, 0));
3776 t
= build (BIT_AND_EXPR
, TREE_TYPE (t
), t
, build_int_2 (-align
-1, -1));
3780 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
3782 emit_move_insn (addr_rtx
, r
);
3784 t
= build (PLUS_EXPR
, TREE_TYPE (t
), t
, build_int_2 (size
, 0));
3785 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
3786 TREE_SIDE_EFFECTS (t
) = 1;
3787 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3789 emit_label (lab_over
);
3793 r
= gen_rtx_MEM (Pmode
, addr_rtx
);
3794 set_mem_alias_set (r
, get_varargs_alias_set ());
3795 emit_move_insn (addr_rtx
, r
);
3803 #define def_builtin(MASK, NAME, TYPE, CODE) \
3805 if ((MASK) & target_flags) \
3806 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3810 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3812 static const struct builtin_description bdesc_3arg
[] =
3814 { MASK_ALTIVEC
, CODE_FOR_altivec_vmaddfp
, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP
},
3815 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhaddshs
, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS
},
3816 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhraddshs
, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS
},
3817 { MASK_ALTIVEC
, CODE_FOR_altivec_vmladduhm
, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM
},
3818 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumubm
, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM
},
3819 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsummbm
, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM
},
3820 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhm
, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM
},
3821 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshm
, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM
},
3822 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhs
, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS
},
3823 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshs
, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS
},
3824 { MASK_ALTIVEC
, CODE_FOR_altivec_vnmsubfp
, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP
},
3825 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4sf
, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF
},
3826 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4si
, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI
},
3827 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_8hi
, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI
},
3828 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_16qi
, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI
},
3829 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4sf
, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF
},
3830 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4si
, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI
},
3831 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_8hi
, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI
},
3832 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_16qi
, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI
},
3833 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_16qi
, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI
},
3834 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_8hi
, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI
},
3835 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4si
, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI
},
3836 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4sf
, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF
},
3839 /* DST operations: void foo (void *, const int, const char). */
3841 static const struct builtin_description bdesc_dst
[] =
3843 { MASK_ALTIVEC
, CODE_FOR_altivec_dst
, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST
},
3844 { MASK_ALTIVEC
, CODE_FOR_altivec_dstt
, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT
},
3845 { MASK_ALTIVEC
, CODE_FOR_altivec_dstst
, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST
},
3846 { MASK_ALTIVEC
, CODE_FOR_altivec_dststt
, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT
}
3849 /* Simple binary operations: VECc = foo (VECa, VECb). */
3851 static struct builtin_description bdesc_2arg
[] =
3853 { MASK_ALTIVEC
, CODE_FOR_addv16qi3
, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM
},
3854 { MASK_ALTIVEC
, CODE_FOR_addv8hi3
, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM
},
3855 { MASK_ALTIVEC
, CODE_FOR_addv4si3
, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM
},
3856 { MASK_ALTIVEC
, CODE_FOR_addv4sf3
, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP
},
3857 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddcuw
, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW
},
3858 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddubs
, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS
},
3859 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsbs
, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS
},
3860 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduhs
, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS
},
3861 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddshs
, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS
},
3862 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduws
, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS
},
3863 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsws
, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS
},
3864 { MASK_ALTIVEC
, CODE_FOR_andv4si3
, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND
},
3865 { MASK_ALTIVEC
, CODE_FOR_altivec_vandc
, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC
},
3866 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgub
, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB
},
3867 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsb
, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB
},
3868 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguh
, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH
},
3869 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsh
, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH
},
3870 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguw
, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW
},
3871 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsw
, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW
},
3872 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfux
, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX
},
3873 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfsx
, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX
},
3874 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpbfp
, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP
},
3875 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequb
, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB
},
3876 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequh
, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH
},
3877 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequw
, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW
},
3878 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpeqfp
, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP
},
3879 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgefp
, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP
},
3880 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtub
, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB
},
3881 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsb
, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB
},
3882 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuh
, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH
},
3883 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsh
, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH
},
3884 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuw
, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW
},
3885 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsw
, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW
},
3886 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtfp
, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP
},
3887 { MASK_ALTIVEC
, CODE_FOR_altivec_vctsxs
, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS
},
3888 { MASK_ALTIVEC
, CODE_FOR_altivec_vctuxs
, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS
},
3889 { MASK_ALTIVEC
, CODE_FOR_umaxv16qi3
, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB
},
3890 { MASK_ALTIVEC
, CODE_FOR_smaxv16qi3
, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB
},
3891 { MASK_ALTIVEC
, CODE_FOR_umaxv8hi3
, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH
},
3892 { MASK_ALTIVEC
, CODE_FOR_smaxv8hi3
, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH
},
3893 { MASK_ALTIVEC
, CODE_FOR_umaxv4si3
, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW
},
3894 { MASK_ALTIVEC
, CODE_FOR_smaxv4si3
, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW
},
3895 { MASK_ALTIVEC
, CODE_FOR_smaxv4sf3
, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP
},
3896 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghb
, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB
},
3897 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghh
, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH
},
3898 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghw
, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW
},
3899 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglb
, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB
},
3900 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglh
, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH
},
3901 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglw
, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW
},
3902 { MASK_ALTIVEC
, CODE_FOR_uminv16qi3
, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB
},
3903 { MASK_ALTIVEC
, CODE_FOR_sminv16qi3
, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB
},
3904 { MASK_ALTIVEC
, CODE_FOR_uminv8hi3
, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH
},
3905 { MASK_ALTIVEC
, CODE_FOR_sminv8hi3
, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH
},
3906 { MASK_ALTIVEC
, CODE_FOR_uminv4si3
, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW
},
3907 { MASK_ALTIVEC
, CODE_FOR_sminv4si3
, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW
},
3908 { MASK_ALTIVEC
, CODE_FOR_sminv4sf3
, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP
},
3909 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleub
, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB
},
3910 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesb
, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB
},
3911 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleuh
, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH
},
3912 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesh
, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH
},
3913 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuloub
, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB
},
3914 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosb
, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB
},
3915 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulouh
, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH
},
3916 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosh
, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH
},
3917 { MASK_ALTIVEC
, CODE_FOR_altivec_vnor
, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR
},
3918 { MASK_ALTIVEC
, CODE_FOR_iorv4si3
, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR
},
3919 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhum
, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM
},
3920 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwum
, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM
},
3921 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkpx
, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX
},
3922 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhss
, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS
},
3923 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshss
, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS
},
3924 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwss
, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS
},
3925 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswss
, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS
},
3926 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhus
, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS
},
3927 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshus
, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS
},
3928 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwus
, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS
},
3929 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswus
, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS
},
3930 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlb
, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB
},
3931 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlh
, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH
},
3932 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlw
, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW
},
3933 { MASK_ALTIVEC
, CODE_FOR_altivec_vslb
, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB
},
3934 { MASK_ALTIVEC
, CODE_FOR_altivec_vslh
, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH
},
3935 { MASK_ALTIVEC
, CODE_FOR_altivec_vslw
, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW
},
3936 { MASK_ALTIVEC
, CODE_FOR_altivec_vsl
, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL
},
3937 { MASK_ALTIVEC
, CODE_FOR_altivec_vslo
, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO
},
3938 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltb
, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB
},
3939 { MASK_ALTIVEC
, CODE_FOR_altivec_vsplth
, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH
},
3940 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltw
, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW
},
3941 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrb
, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB
},
3942 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrh
, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH
},
3943 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrw
, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW
},
3944 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrab
, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB
},
3945 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrah
, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH
},
3946 { MASK_ALTIVEC
, CODE_FOR_altivec_vsraw
, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW
},
3947 { MASK_ALTIVEC
, CODE_FOR_altivec_vsr
, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR
},
3948 { MASK_ALTIVEC
, CODE_FOR_altivec_vsro
, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO
},
3949 { MASK_ALTIVEC
, CODE_FOR_subv16qi3
, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM
},
3950 { MASK_ALTIVEC
, CODE_FOR_subv8hi3
, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM
},
3951 { MASK_ALTIVEC
, CODE_FOR_subv4si3
, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM
},
3952 { MASK_ALTIVEC
, CODE_FOR_subv4sf3
, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP
},
3953 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubcuw
, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW
},
3954 { MASK_ALTIVEC
, CODE_FOR_altivec_vsububs
, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS
},
3955 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsbs
, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS
},
3956 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuhs
, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS
},
3957 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubshs
, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS
},
3958 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuws
, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS
},
3959 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsws
, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS
},
3960 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4ubs
, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS
},
3961 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4sbs
, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS
},
3962 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4shs
, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS
},
3963 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum2sws
, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS
},
3964 { MASK_ALTIVEC
, CODE_FOR_altivec_vsumsws
, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS
},
3965 { MASK_ALTIVEC
, CODE_FOR_xorv4si3
, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR
},
3967 /* Place holder, leave as first spe builtin. */
3968 { 0, CODE_FOR_spe_evaddw
, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW
},
3969 { 0, CODE_FOR_spe_evand
, "__builtin_spe_evand", SPE_BUILTIN_EVAND
},
3970 { 0, CODE_FOR_spe_evandc
, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC
},
3971 { 0, CODE_FOR_spe_evdivws
, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS
},
3972 { 0, CODE_FOR_spe_evdivwu
, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU
},
3973 { 0, CODE_FOR_spe_eveqv
, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV
},
3974 { 0, CODE_FOR_spe_evfsadd
, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD
},
3975 { 0, CODE_FOR_spe_evfsdiv
, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV
},
3976 { 0, CODE_FOR_spe_evfsmul
, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL
},
3977 { 0, CODE_FOR_spe_evfssub
, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB
},
3978 { 0, CODE_FOR_spe_evmergehi
, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI
},
3979 { 0, CODE_FOR_spe_evmergehilo
, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO
},
3980 { 0, CODE_FOR_spe_evmergelo
, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO
},
3981 { 0, CODE_FOR_spe_evmergelohi
, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI
},
3982 { 0, CODE_FOR_spe_evmhegsmfaa
, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA
},
3983 { 0, CODE_FOR_spe_evmhegsmfan
, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN
},
3984 { 0, CODE_FOR_spe_evmhegsmiaa
, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA
},
3985 { 0, CODE_FOR_spe_evmhegsmian
, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN
},
3986 { 0, CODE_FOR_spe_evmhegumiaa
, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA
},
3987 { 0, CODE_FOR_spe_evmhegumian
, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN
},
3988 { 0, CODE_FOR_spe_evmhesmf
, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF
},
3989 { 0, CODE_FOR_spe_evmhesmfa
, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA
},
3990 { 0, CODE_FOR_spe_evmhesmfaaw
, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW
},
3991 { 0, CODE_FOR_spe_evmhesmfanw
, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW
},
3992 { 0, CODE_FOR_spe_evmhesmi
, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI
},
3993 { 0, CODE_FOR_spe_evmhesmia
, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA
},
3994 { 0, CODE_FOR_spe_evmhesmiaaw
, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW
},
3995 { 0, CODE_FOR_spe_evmhesmianw
, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW
},
3996 { 0, CODE_FOR_spe_evmhessf
, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF
},
3997 { 0, CODE_FOR_spe_evmhessfa
, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA
},
3998 { 0, CODE_FOR_spe_evmhessfaaw
, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW
},
3999 { 0, CODE_FOR_spe_evmhessfanw
, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW
},
4000 { 0, CODE_FOR_spe_evmhessiaaw
, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW
},
4001 { 0, CODE_FOR_spe_evmhessianw
, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW
},
4002 { 0, CODE_FOR_spe_evmheumi
, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI
},
4003 { 0, CODE_FOR_spe_evmheumia
, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA
},
4004 { 0, CODE_FOR_spe_evmheumiaaw
, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW
},
4005 { 0, CODE_FOR_spe_evmheumianw
, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW
},
4006 { 0, CODE_FOR_spe_evmheusiaaw
, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW
},
4007 { 0, CODE_FOR_spe_evmheusianw
, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW
},
4008 { 0, CODE_FOR_spe_evmhogsmfaa
, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA
},
4009 { 0, CODE_FOR_spe_evmhogsmfan
, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN
},
4010 { 0, CODE_FOR_spe_evmhogsmiaa
, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA
},
4011 { 0, CODE_FOR_spe_evmhogsmian
, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN
},
4012 { 0, CODE_FOR_spe_evmhogumiaa
, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA
},
4013 { 0, CODE_FOR_spe_evmhogumian
, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN
},
4014 { 0, CODE_FOR_spe_evmhosmf
, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF
},
4015 { 0, CODE_FOR_spe_evmhosmfa
, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA
},
4016 { 0, CODE_FOR_spe_evmhosmfaaw
, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW
},
4017 { 0, CODE_FOR_spe_evmhosmfanw
, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW
},
4018 { 0, CODE_FOR_spe_evmhosmi
, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI
},
4019 { 0, CODE_FOR_spe_evmhosmia
, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA
},
4020 { 0, CODE_FOR_spe_evmhosmiaaw
, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW
},
4021 { 0, CODE_FOR_spe_evmhosmianw
, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW
},
4022 { 0, CODE_FOR_spe_evmhossf
, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF
},
4023 { 0, CODE_FOR_spe_evmhossfa
, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA
},
4024 { 0, CODE_FOR_spe_evmhossfaaw
, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW
},
4025 { 0, CODE_FOR_spe_evmhossfanw
, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW
},
4026 { 0, CODE_FOR_spe_evmhossiaaw
, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW
},
4027 { 0, CODE_FOR_spe_evmhossianw
, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW
},
4028 { 0, CODE_FOR_spe_evmhoumi
, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI
},
4029 { 0, CODE_FOR_spe_evmhoumia
, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA
},
4030 { 0, CODE_FOR_spe_evmhoumiaaw
, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW
},
4031 { 0, CODE_FOR_spe_evmhoumianw
, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW
},
4032 { 0, CODE_FOR_spe_evmhousiaaw
, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW
},
4033 { 0, CODE_FOR_spe_evmhousianw
, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW
},
4034 { 0, CODE_FOR_spe_evmwhsmf
, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF
},
4035 { 0, CODE_FOR_spe_evmwhsmfa
, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA
},
4036 { 0, CODE_FOR_spe_evmwhsmi
, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI
},
4037 { 0, CODE_FOR_spe_evmwhsmia
, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA
},
4038 { 0, CODE_FOR_spe_evmwhssf
, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF
},
4039 { 0, CODE_FOR_spe_evmwhssfa
, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA
},
4040 { 0, CODE_FOR_spe_evmwhumi
, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI
},
4041 { 0, CODE_FOR_spe_evmwhumia
, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA
},
4042 { 0, CODE_FOR_spe_evmwlsmiaaw
, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW
},
4043 { 0, CODE_FOR_spe_evmwlsmianw
, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW
},
4044 { 0, CODE_FOR_spe_evmwlssiaaw
, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW
},
4045 { 0, CODE_FOR_spe_evmwlssianw
, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW
},
4046 { 0, CODE_FOR_spe_evmwlumi
, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI
},
4047 { 0, CODE_FOR_spe_evmwlumia
, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA
},
4048 { 0, CODE_FOR_spe_evmwlumiaaw
, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW
},
4049 { 0, CODE_FOR_spe_evmwlumianw
, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW
},
4050 { 0, CODE_FOR_spe_evmwlusiaaw
, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW
},
4051 { 0, CODE_FOR_spe_evmwlusianw
, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW
},
4052 { 0, CODE_FOR_spe_evmwsmf
, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF
},
4053 { 0, CODE_FOR_spe_evmwsmfa
, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA
},
4054 { 0, CODE_FOR_spe_evmwsmfaa
, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA
},
4055 { 0, CODE_FOR_spe_evmwsmfan
, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN
},
4056 { 0, CODE_FOR_spe_evmwsmi
, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI
},
4057 { 0, CODE_FOR_spe_evmwsmia
, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA
},
4058 { 0, CODE_FOR_spe_evmwsmiaa
, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA
},
4059 { 0, CODE_FOR_spe_evmwsmian
, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN
},
4060 { 0, CODE_FOR_spe_evmwssf
, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF
},
4061 { 0, CODE_FOR_spe_evmwssfa
, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA
},
4062 { 0, CODE_FOR_spe_evmwssfaa
, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA
},
4063 { 0, CODE_FOR_spe_evmwssfan
, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN
},
4064 { 0, CODE_FOR_spe_evmwumi
, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI
},
4065 { 0, CODE_FOR_spe_evmwumia
, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA
},
4066 { 0, CODE_FOR_spe_evmwumiaa
, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA
},
4067 { 0, CODE_FOR_spe_evmwumian
, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN
},
4068 { 0, CODE_FOR_spe_evnand
, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND
},
4069 { 0, CODE_FOR_spe_evnor
, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR
},
4070 { 0, CODE_FOR_spe_evor
, "__builtin_spe_evor", SPE_BUILTIN_EVOR
},
4071 { 0, CODE_FOR_spe_evorc
, "__builtin_spe_evorc", SPE_BUILTIN_EVORC
},
4072 { 0, CODE_FOR_spe_evrlw
, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW
},
4073 { 0, CODE_FOR_spe_evslw
, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW
},
4074 { 0, CODE_FOR_spe_evsrws
, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS
},
4075 { 0, CODE_FOR_spe_evsrwu
, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU
},
4076 { 0, CODE_FOR_spe_evsubfw
, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW
},
4078 /* SPE binary operations expecting a 5-bit unsigned literal. */
4079 { 0, CODE_FOR_spe_evaddiw
, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW
},
4081 { 0, CODE_FOR_spe_evrlwi
, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI
},
4082 { 0, CODE_FOR_spe_evslwi
, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI
},
4083 { 0, CODE_FOR_spe_evsrwis
, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS
},
4084 { 0, CODE_FOR_spe_evsrwiu
, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU
},
4085 { 0, CODE_FOR_spe_evsubifw
, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW
},
4086 { 0, CODE_FOR_spe_evmwhssfaa
, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA
},
4087 { 0, CODE_FOR_spe_evmwhssmaa
, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA
},
4088 { 0, CODE_FOR_spe_evmwhsmfaa
, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA
},
4089 { 0, CODE_FOR_spe_evmwhsmiaa
, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA
},
4090 { 0, CODE_FOR_spe_evmwhusiaa
, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA
},
4091 { 0, CODE_FOR_spe_evmwhumiaa
, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA
},
4092 { 0, CODE_FOR_spe_evmwhssfan
, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN
},
4093 { 0, CODE_FOR_spe_evmwhssian
, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN
},
4094 { 0, CODE_FOR_spe_evmwhsmfan
, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN
},
4095 { 0, CODE_FOR_spe_evmwhsmian
, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN
},
4096 { 0, CODE_FOR_spe_evmwhusian
, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN
},
4097 { 0, CODE_FOR_spe_evmwhumian
, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN
},
4098 { 0, CODE_FOR_spe_evmwhgssfaa
, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA
},
4099 { 0, CODE_FOR_spe_evmwhgsmfaa
, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA
},
4100 { 0, CODE_FOR_spe_evmwhgsmiaa
, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA
},
4101 { 0, CODE_FOR_spe_evmwhgumiaa
, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA
},
4102 { 0, CODE_FOR_spe_evmwhgssfan
, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN
},
4103 { 0, CODE_FOR_spe_evmwhgsmfan
, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN
},
4104 { 0, CODE_FOR_spe_evmwhgsmian
, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN
},
4105 { 0, CODE_FOR_spe_evmwhgumian
, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN
},
4106 { 0, CODE_FOR_spe_brinc
, "__builtin_spe_brinc", SPE_BUILTIN_BRINC
},
4108 /* Place-holder. Leave as last binary SPE builtin. */
4109 { 0, CODE_FOR_xorv2si3
, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR
},
4112 /* AltiVec predicates. */
4114 struct builtin_description_predicates
4116 const unsigned int mask
;
4117 const enum insn_code icode
;
4119 const char *const name
;
4120 const enum rs6000_builtins code
;
4123 static const struct builtin_description_predicates bdesc_altivec_preds
[] =
4125 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P
},
4126 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P
},
4127 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P
},
4128 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P
},
4129 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P
},
4130 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P
},
4131 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P
},
4132 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P
},
4133 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P
},
4134 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P
},
4135 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P
},
4136 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P
},
4137 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P
}
4140 /* SPE predicates. */
4141 static struct builtin_description bdesc_spe_predicates
[] =
4143 /* Place-holder. Leave as first. */
4144 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ
},
4145 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS
},
4146 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU
},
4147 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS
},
4148 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU
},
4149 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ
},
4150 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT
},
4151 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT
},
4152 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ
},
4153 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT
},
4154 /* Place-holder. Leave as last. */
4155 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT
},
4158 /* SPE evsel predicates. */
4159 static struct builtin_description bdesc_spe_evsel
[] =
4161 /* Place-holder. Leave as first. */
4162 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS
},
4163 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU
},
4164 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS
},
4165 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU
},
4166 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ
},
4167 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT
},
4168 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT
},
4169 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ
},
4170 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT
},
4171 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT
},
4172 /* Place-holder. Leave as last. */
4173 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ
},
4176 /* ABS* operations. */
4178 static const struct builtin_description bdesc_abs
[] =
4180 { MASK_ALTIVEC
, CODE_FOR_absv4si2
, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI
},
4181 { MASK_ALTIVEC
, CODE_FOR_absv8hi2
, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI
},
4182 { MASK_ALTIVEC
, CODE_FOR_absv4sf2
, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF
},
4183 { MASK_ALTIVEC
, CODE_FOR_absv16qi2
, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI
},
4184 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v4si
, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI
},
4185 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v8hi
, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI
},
4186 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v16qi
, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI
}
4189 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4192 static struct builtin_description bdesc_1arg
[] =
4194 { MASK_ALTIVEC
, CODE_FOR_altivec_vexptefp
, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP
},
4195 { MASK_ALTIVEC
, CODE_FOR_altivec_vlogefp
, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP
},
4196 { MASK_ALTIVEC
, CODE_FOR_altivec_vrefp
, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP
},
4197 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfim
, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM
},
4198 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfin
, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN
},
4199 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfip
, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP
},
4200 { MASK_ALTIVEC
, CODE_FOR_ftruncv4sf2
, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ
},
4201 { MASK_ALTIVEC
, CODE_FOR_altivec_vrsqrtefp
, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP
},
4202 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisb
, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB
},
4203 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltish
, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH
},
4204 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisw
, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW
},
4205 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsb
, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB
},
4206 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhpx
, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX
},
4207 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsh
, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH
},
4208 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsb
, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB
},
4209 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklpx
, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX
},
4210 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsh
, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH
},
4212 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4213 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4214 { 0, CODE_FOR_spe_evabs
, "__builtin_spe_evabs", SPE_BUILTIN_EVABS
},
4215 { 0, CODE_FOR_spe_evaddsmiaaw
, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW
},
4216 { 0, CODE_FOR_spe_evaddssiaaw
, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW
},
4217 { 0, CODE_FOR_spe_evaddumiaaw
, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW
},
4218 { 0, CODE_FOR_spe_evaddusiaaw
, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW
},
4219 { 0, CODE_FOR_spe_evcntlsw
, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW
},
4220 { 0, CODE_FOR_spe_evcntlzw
, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW
},
4221 { 0, CODE_FOR_spe_evextsb
, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB
},
4222 { 0, CODE_FOR_spe_evextsh
, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH
},
4223 { 0, CODE_FOR_spe_evfsabs
, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS
},
4224 { 0, CODE_FOR_spe_evfscfsf
, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF
},
4225 { 0, CODE_FOR_spe_evfscfsi
, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI
},
4226 { 0, CODE_FOR_spe_evfscfuf
, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF
},
4227 { 0, CODE_FOR_spe_evfscfui
, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI
},
4228 { 0, CODE_FOR_spe_evfsctsf
, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF
},
4229 { 0, CODE_FOR_spe_evfsctsi
, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI
},
4230 { 0, CODE_FOR_spe_evfsctsiz
, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ
},
4231 { 0, CODE_FOR_spe_evfsctuf
, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF
},
4232 { 0, CODE_FOR_spe_evfsctui
, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI
},
4233 { 0, CODE_FOR_spe_evfsctuiz
, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ
},
4234 { 0, CODE_FOR_spe_evfsnabs
, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS
},
4235 { 0, CODE_FOR_spe_evfsneg
, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG
},
4236 { 0, CODE_FOR_spe_evmra
, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA
},
4237 { 0, CODE_FOR_spe_evneg
, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG
},
4238 { 0, CODE_FOR_spe_evrndw
, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW
},
4239 { 0, CODE_FOR_spe_evsubfsmiaaw
, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW
},
4240 { 0, CODE_FOR_spe_evsubfssiaaw
, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW
},
4241 { 0, CODE_FOR_spe_evsubfumiaaw
, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW
},
4242 { 0, CODE_FOR_spe_evsplatfi
, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI
},
4243 { 0, CODE_FOR_spe_evsplati
, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI
},
4245 /* Place-holder. Leave as last unary SPE builtin. */
4246 { 0, CODE_FOR_spe_evsubfusiaaw
, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW
},
4250 rs6000_expand_unop_builtin (icode
, arglist
, target
)
4251 enum insn_code icode
;
4256 tree arg0
= TREE_VALUE (arglist
);
4257 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4258 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4259 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4261 if (icode
== CODE_FOR_nothing
)
4262 /* Builtin not supported on this processor. */
4265 /* If we got invalid arguments bail out before generating bad rtl. */
4266 if (arg0
== error_mark_node
)
4269 if (icode
== CODE_FOR_altivec_vspltisb
4270 || icode
== CODE_FOR_altivec_vspltish
4271 || icode
== CODE_FOR_altivec_vspltisw
4272 || icode
== CODE_FOR_spe_evsplatfi
4273 || icode
== CODE_FOR_spe_evsplati
)
4275 /* Only allow 5-bit *signed* literals. */
4276 if (GET_CODE (op0
) != CONST_INT
4277 || INTVAL (op0
) > 0x1f
4278 || INTVAL (op0
) < -0x1f)
4280 error ("argument 1 must be a 5-bit signed literal");
4286 || GET_MODE (target
) != tmode
4287 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4288 target
= gen_reg_rtx (tmode
);
4290 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4291 op0
= copy_to_mode_reg (mode0
, op0
);
4293 pat
= GEN_FCN (icode
) (target
, op0
);
4302 altivec_expand_abs_builtin (icode
, arglist
, target
)
4303 enum insn_code icode
;
4307 rtx pat
, scratch1
, scratch2
;
4308 tree arg0
= TREE_VALUE (arglist
);
4309 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4310 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4311 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4313 /* If we have invalid arguments, bail out before generating bad rtl. */
4314 if (arg0
== error_mark_node
)
4318 || GET_MODE (target
) != tmode
4319 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4320 target
= gen_reg_rtx (tmode
);
4322 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4323 op0
= copy_to_mode_reg (mode0
, op0
);
4325 scratch1
= gen_reg_rtx (mode0
);
4326 scratch2
= gen_reg_rtx (mode0
);
4328 pat
= GEN_FCN (icode
) (target
, op0
, scratch1
, scratch2
);
4337 rs6000_expand_binop_builtin (icode
, arglist
, target
)
4338 enum insn_code icode
;
4343 tree arg0
= TREE_VALUE (arglist
);
4344 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4345 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4346 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4347 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4348 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4349 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4351 if (icode
== CODE_FOR_nothing
)
4352 /* Builtin not supported on this processor. */
4355 /* If we got invalid arguments bail out before generating bad rtl. */
4356 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
4359 if (icode
== CODE_FOR_altivec_vcfux
4360 || icode
== CODE_FOR_altivec_vcfsx
4361 || icode
== CODE_FOR_altivec_vctsxs
4362 || icode
== CODE_FOR_altivec_vctuxs
4363 || icode
== CODE_FOR_altivec_vspltb
4364 || icode
== CODE_FOR_altivec_vsplth
4365 || icode
== CODE_FOR_altivec_vspltw
4366 || icode
== CODE_FOR_spe_evaddiw
4367 || icode
== CODE_FOR_spe_evldd
4368 || icode
== CODE_FOR_spe_evldh
4369 || icode
== CODE_FOR_spe_evldw
4370 || icode
== CODE_FOR_spe_evlhhesplat
4371 || icode
== CODE_FOR_spe_evlhhossplat
4372 || icode
== CODE_FOR_spe_evlhhousplat
4373 || icode
== CODE_FOR_spe_evlwhe
4374 || icode
== CODE_FOR_spe_evlwhos
4375 || icode
== CODE_FOR_spe_evlwhou
4376 || icode
== CODE_FOR_spe_evlwhsplat
4377 || icode
== CODE_FOR_spe_evlwwsplat
4378 || icode
== CODE_FOR_spe_evrlwi
4379 || icode
== CODE_FOR_spe_evslwi
4380 || icode
== CODE_FOR_spe_evsrwis
4381 || icode
== CODE_FOR_spe_evsrwiu
)
4383 /* Only allow 5-bit unsigned literals. */
4384 if (TREE_CODE (arg1
) != INTEGER_CST
4385 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
4387 error ("argument 2 must be a 5-bit unsigned literal");
4393 || GET_MODE (target
) != tmode
4394 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4395 target
= gen_reg_rtx (tmode
);
4397 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4398 op0
= copy_to_mode_reg (mode0
, op0
);
4399 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4400 op1
= copy_to_mode_reg (mode1
, op1
);
4402 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
4411 altivec_expand_predicate_builtin (icode
, opcode
, arglist
, target
)
4412 enum insn_code icode
;
4418 tree cr6_form
= TREE_VALUE (arglist
);
4419 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
4420 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4421 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4422 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4423 enum machine_mode tmode
= SImode
;
4424 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4425 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4428 if (TREE_CODE (cr6_form
) != INTEGER_CST
)
4430 error ("argument 1 of __builtin_altivec_predicate must be a constant");
4434 cr6_form_int
= TREE_INT_CST_LOW (cr6_form
);
4439 /* If we have invalid arguments, bail out before generating bad rtl. */
4440 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
4444 || GET_MODE (target
) != tmode
4445 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4446 target
= gen_reg_rtx (tmode
);
4448 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4449 op0
= copy_to_mode_reg (mode0
, op0
);
4450 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4451 op1
= copy_to_mode_reg (mode1
, op1
);
4453 scratch
= gen_reg_rtx (mode0
);
4455 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
,
4456 gen_rtx (SYMBOL_REF
, Pmode
, opcode
));
4461 /* The vec_any* and vec_all* predicates use the same opcodes for two
4462 different operations, but the bits in CR6 will be different
4463 depending on what information we want. So we have to play tricks
4464 with CR6 to get the right bits out.
4466 If you think this is disgusting, look at the specs for the
4467 AltiVec predicates. */
4469 switch (cr6_form_int
)
4472 emit_insn (gen_cr6_test_for_zero (target
));
4475 emit_insn (gen_cr6_test_for_zero_reverse (target
));
4478 emit_insn (gen_cr6_test_for_lt (target
));
4481 emit_insn (gen_cr6_test_for_lt_reverse (target
));
4484 error ("argument 1 of __builtin_altivec_predicate is out of range");
4492 altivec_expand_stv_builtin (icode
, arglist
)
4493 enum insn_code icode
;
4496 tree arg0
= TREE_VALUE (arglist
);
4497 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4498 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4499 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4500 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4501 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4503 enum machine_mode mode0
= insn_data
[icode
].operand
[0].mode
;
4504 enum machine_mode mode1
= insn_data
[icode
].operand
[1].mode
;
4505 enum machine_mode mode2
= insn_data
[icode
].operand
[2].mode
;
4507 /* Invalid arguments. Bail before doing anything stoopid! */
4508 if (arg0
== error_mark_node
4509 || arg1
== error_mark_node
4510 || arg2
== error_mark_node
)
4513 if (! (*insn_data
[icode
].operand
[2].predicate
) (op0
, mode2
))
4514 op0
= copy_to_mode_reg (mode2
, op0
);
4515 if (! (*insn_data
[icode
].operand
[0].predicate
) (op1
, mode0
))
4516 op1
= copy_to_mode_reg (mode0
, op1
);
4517 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
4518 op2
= copy_to_mode_reg (mode1
, op2
);
4520 pat
= GEN_FCN (icode
) (op1
, op2
, op0
);
4527 rs6000_expand_ternop_builtin (icode
, arglist
, target
)
4528 enum insn_code icode
;
4533 tree arg0
= TREE_VALUE (arglist
);
4534 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4535 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4536 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4537 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4538 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4539 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4540 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4541 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4542 enum machine_mode mode2
= insn_data
[icode
].operand
[3].mode
;
4544 if (icode
== CODE_FOR_nothing
)
4545 /* Builtin not supported on this processor. */
4548 /* If we got invalid arguments bail out before generating bad rtl. */
4549 if (arg0
== error_mark_node
4550 || arg1
== error_mark_node
4551 || arg2
== error_mark_node
)
4554 if (icode
== CODE_FOR_altivec_vsldoi_4sf
4555 || icode
== CODE_FOR_altivec_vsldoi_4si
4556 || icode
== CODE_FOR_altivec_vsldoi_8hi
4557 || icode
== CODE_FOR_altivec_vsldoi_16qi
)
4559 /* Only allow 4-bit unsigned literals. */
4560 if (TREE_CODE (arg2
) != INTEGER_CST
4561 || TREE_INT_CST_LOW (arg2
) & ~0xf)
4563 error ("argument 3 must be a 4-bit unsigned literal");
4569 || GET_MODE (target
) != tmode
4570 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4571 target
= gen_reg_rtx (tmode
);
4573 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4574 op0
= copy_to_mode_reg (mode0
, op0
);
4575 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4576 op1
= copy_to_mode_reg (mode1
, op1
);
4577 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
4578 op2
= copy_to_mode_reg (mode2
, op2
);
4580 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
4588 /* Expand the lvx builtins. */
4590 altivec_expand_ld_builtin (exp
, target
, expandedp
)
4595 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4596 tree arglist
= TREE_OPERAND (exp
, 1);
4597 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4599 enum machine_mode tmode
, mode0
;
4601 enum insn_code icode
;
4605 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi
:
4606 icode
= CODE_FOR_altivec_lvx_16qi
;
4608 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi
:
4609 icode
= CODE_FOR_altivec_lvx_8hi
;
4611 case ALTIVEC_BUILTIN_LD_INTERNAL_4si
:
4612 icode
= CODE_FOR_altivec_lvx_4si
;
4614 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf
:
4615 icode
= CODE_FOR_altivec_lvx_4sf
;
4624 arg0
= TREE_VALUE (arglist
);
4625 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4626 tmode
= insn_data
[icode
].operand
[0].mode
;
4627 mode0
= insn_data
[icode
].operand
[1].mode
;
4630 || GET_MODE (target
) != tmode
4631 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4632 target
= gen_reg_rtx (tmode
);
4634 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4635 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
4637 pat
= GEN_FCN (icode
) (target
, op0
);
4644 /* Expand the stvx builtins. */
4646 altivec_expand_st_builtin (exp
, target
, expandedp
)
4648 rtx target ATTRIBUTE_UNUSED
;
4651 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4652 tree arglist
= TREE_OPERAND (exp
, 1);
4653 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4655 enum machine_mode mode0
, mode1
;
4657 enum insn_code icode
;
4661 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi
:
4662 icode
= CODE_FOR_altivec_stvx_16qi
;
4664 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi
:
4665 icode
= CODE_FOR_altivec_stvx_8hi
;
4667 case ALTIVEC_BUILTIN_ST_INTERNAL_4si
:
4668 icode
= CODE_FOR_altivec_stvx_4si
;
4670 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf
:
4671 icode
= CODE_FOR_altivec_stvx_4sf
;
4678 arg0
= TREE_VALUE (arglist
);
4679 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4680 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4681 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4682 mode0
= insn_data
[icode
].operand
[0].mode
;
4683 mode1
= insn_data
[icode
].operand
[1].mode
;
4685 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4686 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
4687 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
4688 op1
= copy_to_mode_reg (mode1
, op1
);
4690 pat
= GEN_FCN (icode
) (op0
, op1
);
4698 /* Expand the dst builtins. */
4700 altivec_expand_dst_builtin (exp
, target
, expandedp
)
4702 rtx target ATTRIBUTE_UNUSED
;
4705 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4706 tree arglist
= TREE_OPERAND (exp
, 1);
4707 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4708 tree arg0
, arg1
, arg2
;
4709 enum machine_mode mode0
, mode1
, mode2
;
4710 rtx pat
, op0
, op1
, op2
;
4711 struct builtin_description
*d
;
4716 /* Handle DST variants. */
4717 d
= (struct builtin_description
*) bdesc_dst
;
4718 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
4719 if (d
->code
== fcode
)
4721 arg0
= TREE_VALUE (arglist
);
4722 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4723 arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4724 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4725 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4726 op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4727 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
4728 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
4729 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
4731 /* Invalid arguments, bail out before generating bad rtl. */
4732 if (arg0
== error_mark_node
4733 || arg1
== error_mark_node
4734 || arg2
== error_mark_node
)
4737 if (TREE_CODE (arg2
) != INTEGER_CST
4738 || TREE_INT_CST_LOW (arg2
) & ~0x3)
4740 error ("argument to `%s' must be a 2-bit unsigned literal", d
->name
);
4744 if (! (*insn_data
[d
->icode
].operand
[0].predicate
) (op0
, mode0
))
4745 op0
= copy_to_mode_reg (mode0
, op0
);
4746 if (! (*insn_data
[d
->icode
].operand
[1].predicate
) (op1
, mode1
))
4747 op1
= copy_to_mode_reg (mode1
, op1
);
4749 pat
= GEN_FCN (d
->icode
) (op0
, op1
, op2
);
4760 /* Expand the builtin in EXP and store the result in TARGET. Store
4761 true in *EXPANDEDP if we found a builtin to expand. */
4763 altivec_expand_builtin (exp
, target
, expandedp
)
4768 struct builtin_description
*d
;
4769 struct builtin_description_predicates
*dp
;
4771 enum insn_code icode
;
4772 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4773 tree arglist
= TREE_OPERAND (exp
, 1);
4776 enum machine_mode tmode
, mode0
;
4777 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4779 target
= altivec_expand_ld_builtin (exp
, target
, expandedp
);
4783 target
= altivec_expand_st_builtin (exp
, target
, expandedp
);
4787 target
= altivec_expand_dst_builtin (exp
, target
, expandedp
);
4795 case ALTIVEC_BUILTIN_STVX
:
4796 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx
, arglist
);
4797 case ALTIVEC_BUILTIN_STVEBX
:
4798 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx
, arglist
);
4799 case ALTIVEC_BUILTIN_STVEHX
:
4800 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx
, arglist
);
4801 case ALTIVEC_BUILTIN_STVEWX
:
4802 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx
, arglist
);
4803 case ALTIVEC_BUILTIN_STVXL
:
4804 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl
, arglist
);
4806 case ALTIVEC_BUILTIN_MFVSCR
:
4807 icode
= CODE_FOR_altivec_mfvscr
;
4808 tmode
= insn_data
[icode
].operand
[0].mode
;
4811 || GET_MODE (target
) != tmode
4812 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4813 target
= gen_reg_rtx (tmode
);
4815 pat
= GEN_FCN (icode
) (target
);
4821 case ALTIVEC_BUILTIN_MTVSCR
:
4822 icode
= CODE_FOR_altivec_mtvscr
;
4823 arg0
= TREE_VALUE (arglist
);
4824 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4825 mode0
= insn_data
[icode
].operand
[0].mode
;
4827 /* If we got invalid arguments bail out before generating bad rtl. */
4828 if (arg0
== error_mark_node
)
4831 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4832 op0
= copy_to_mode_reg (mode0
, op0
);
4834 pat
= GEN_FCN (icode
) (op0
);
4839 case ALTIVEC_BUILTIN_DSSALL
:
4840 emit_insn (gen_altivec_dssall ());
4843 case ALTIVEC_BUILTIN_DSS
:
4844 icode
= CODE_FOR_altivec_dss
;
4845 arg0
= TREE_VALUE (arglist
);
4846 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4847 mode0
= insn_data
[icode
].operand
[0].mode
;
4849 /* If we got invalid arguments bail out before generating bad rtl. */
4850 if (arg0
== error_mark_node
)
4853 if (TREE_CODE (arg0
) != INTEGER_CST
4854 || TREE_INT_CST_LOW (arg0
) & ~0x3)
4856 error ("argument to dss must be a 2-bit unsigned literal");
4860 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4861 op0
= copy_to_mode_reg (mode0
, op0
);
4863 emit_insn (gen_altivec_dss (op0
));
4867 /* Expand abs* operations. */
4868 d
= (struct builtin_description
*) bdesc_abs
;
4869 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
4870 if (d
->code
== fcode
)
4871 return altivec_expand_abs_builtin (d
->icode
, arglist
, target
);
4873 /* Expand the AltiVec predicates. */
4874 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
4875 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
4876 if (dp
->code
== fcode
)
4877 return altivec_expand_predicate_builtin (dp
->icode
, dp
->opcode
, arglist
, target
);
4879 /* LV* are funky. We initialized them differently. */
4882 case ALTIVEC_BUILTIN_LVSL
:
4883 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl
,
4885 case ALTIVEC_BUILTIN_LVSR
:
4886 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr
,
4888 case ALTIVEC_BUILTIN_LVEBX
:
4889 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx
,
4891 case ALTIVEC_BUILTIN_LVEHX
:
4892 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx
,
4894 case ALTIVEC_BUILTIN_LVEWX
:
4895 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx
,
4897 case ALTIVEC_BUILTIN_LVXL
:
4898 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl
,
4900 case ALTIVEC_BUILTIN_LVX
:
4901 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx
,
4912 /* Binops that need to be initialized manually, but can be expanded
4913 automagically by rs6000_expand_binop_builtin. */
4914 static struct builtin_description bdesc_2arg_spe
[] =
4916 { 0, CODE_FOR_spe_evlddx
, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX
},
4917 { 0, CODE_FOR_spe_evldwx
, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX
},
4918 { 0, CODE_FOR_spe_evldhx
, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX
},
4919 { 0, CODE_FOR_spe_evlwhex
, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX
},
4920 { 0, CODE_FOR_spe_evlwhoux
, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX
},
4921 { 0, CODE_FOR_spe_evlwhosx
, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX
},
4922 { 0, CODE_FOR_spe_evlwwsplatx
, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX
},
4923 { 0, CODE_FOR_spe_evlwhsplatx
, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX
},
4924 { 0, CODE_FOR_spe_evlhhesplatx
, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX
},
4925 { 0, CODE_FOR_spe_evlhhousplatx
, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX
},
4926 { 0, CODE_FOR_spe_evlhhossplatx
, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX
},
4927 { 0, CODE_FOR_spe_evldd
, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD
},
4928 { 0, CODE_FOR_spe_evldw
, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW
},
4929 { 0, CODE_FOR_spe_evldh
, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH
},
4930 { 0, CODE_FOR_spe_evlwhe
, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE
},
4931 { 0, CODE_FOR_spe_evlwhou
, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU
},
4932 { 0, CODE_FOR_spe_evlwhos
, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS
},
4933 { 0, CODE_FOR_spe_evlwwsplat
, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT
},
4934 { 0, CODE_FOR_spe_evlwhsplat
, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT
},
4935 { 0, CODE_FOR_spe_evlhhesplat
, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT
},
4936 { 0, CODE_FOR_spe_evlhhousplat
, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT
},
4937 { 0, CODE_FOR_spe_evlhhossplat
, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT
}
4940 /* Expand the builtin in EXP and store the result in TARGET. Store
4941 true in *EXPANDEDP if we found a builtin to expand.
4943 This expands the SPE builtins that are not simple unary and binary
4946 spe_expand_builtin (exp
, target
, expandedp
)
4951 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4952 tree arglist
= TREE_OPERAND (exp
, 1);
4954 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4955 enum insn_code icode
;
4956 enum machine_mode tmode
, mode0
;
4958 struct builtin_description
*d
;
4963 /* Syntax check for a 5-bit unsigned immediate. */
4966 case SPE_BUILTIN_EVSTDD
:
4967 case SPE_BUILTIN_EVSTDH
:
4968 case SPE_BUILTIN_EVSTDW
:
4969 case SPE_BUILTIN_EVSTWHE
:
4970 case SPE_BUILTIN_EVSTWHO
:
4971 case SPE_BUILTIN_EVSTWWE
:
4972 case SPE_BUILTIN_EVSTWWO
:
4973 arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4974 if (TREE_CODE (arg1
) != INTEGER_CST
4975 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
4977 error ("argument 2 must be a 5-bit unsigned literal");
4985 d
= (struct builtin_description
*) bdesc_2arg_spe
;
4986 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg_spe
); ++i
, ++d
)
4987 if (d
->code
== fcode
)
4988 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
4990 d
= (struct builtin_description
*) bdesc_spe_predicates
;
4991 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, ++d
)
4992 if (d
->code
== fcode
)
4993 return spe_expand_predicate_builtin (d
->icode
, arglist
, target
);
4995 d
= (struct builtin_description
*) bdesc_spe_evsel
;
4996 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, ++d
)
4997 if (d
->code
== fcode
)
4998 return spe_expand_evsel_builtin (d
->icode
, arglist
, target
);
5002 case SPE_BUILTIN_EVSTDDX
:
5003 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx
, arglist
);
5004 case SPE_BUILTIN_EVSTDHX
:
5005 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx
, arglist
);
5006 case SPE_BUILTIN_EVSTDWX
:
5007 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx
, arglist
);
5008 case SPE_BUILTIN_EVSTWHEX
:
5009 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex
, arglist
);
5010 case SPE_BUILTIN_EVSTWHOX
:
5011 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox
, arglist
);
5012 case SPE_BUILTIN_EVSTWWEX
:
5013 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex
, arglist
);
5014 case SPE_BUILTIN_EVSTWWOX
:
5015 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox
, arglist
);
5016 case SPE_BUILTIN_EVSTDD
:
5017 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd
, arglist
);
5018 case SPE_BUILTIN_EVSTDH
:
5019 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh
, arglist
);
5020 case SPE_BUILTIN_EVSTDW
:
5021 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw
, arglist
);
5022 case SPE_BUILTIN_EVSTWHE
:
5023 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe
, arglist
);
5024 case SPE_BUILTIN_EVSTWHO
:
5025 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho
, arglist
);
5026 case SPE_BUILTIN_EVSTWWE
:
5027 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe
, arglist
);
5028 case SPE_BUILTIN_EVSTWWO
:
5029 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo
, arglist
);
5030 case SPE_BUILTIN_MFSPEFSCR
:
5031 icode
= CODE_FOR_spe_mfspefscr
;
5032 tmode
= insn_data
[icode
].operand
[0].mode
;
5035 || GET_MODE (target
) != tmode
5036 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5037 target
= gen_reg_rtx (tmode
);
5039 pat
= GEN_FCN (icode
) (target
);
5044 case SPE_BUILTIN_MTSPEFSCR
:
5045 icode
= CODE_FOR_spe_mtspefscr
;
5046 arg0
= TREE_VALUE (arglist
);
5047 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5048 mode0
= insn_data
[icode
].operand
[0].mode
;
5050 if (arg0
== error_mark_node
)
5053 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
5054 op0
= copy_to_mode_reg (mode0
, op0
);
5056 pat
= GEN_FCN (icode
) (op0
);
5069 spe_expand_predicate_builtin (icode
, arglist
, target
)
5070 enum insn_code icode
;
5074 rtx pat
, scratch
, tmp
;
5075 tree form
= TREE_VALUE (arglist
);
5076 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
5077 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5078 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5079 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5080 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5081 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5085 if (TREE_CODE (form
) != INTEGER_CST
)
5087 error ("argument 1 of __builtin_spe_predicate must be a constant");
5091 form_int
= TREE_INT_CST_LOW (form
);
5096 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
5100 || GET_MODE (target
) != SImode
5101 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, SImode
))
5102 target
= gen_reg_rtx (SImode
);
5104 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5105 op0
= copy_to_mode_reg (mode0
, op0
);
5106 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5107 op1
= copy_to_mode_reg (mode1
, op1
);
5109 scratch
= gen_reg_rtx (CCmode
);
5111 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
5116 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5117 _lower_. We use one compare, but look in different bits of the
5118 CR for each variant.
5120 There are 2 elements in each SPE simd type (upper/lower). The CR
5121 bits are set as follows:
5123 BIT0 | BIT 1 | BIT 2 | BIT 3
5124 U | L | (U | L) | (U & L)
5126 So, for an "all" relationship, BIT 3 would be set.
5127 For an "any" relationship, BIT 2 would be set. Etc.
5129 Following traditional nomenclature, these bits map to:
5131 BIT0 | BIT 1 | BIT 2 | BIT 3
5134 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5139 /* All variant. OV bit. */
5141 /* We need to get to the OV bit, which is the ORDERED bit. We
5142 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5143 that's ugly and will trigger a validate_condition_mode abort.
5144 So let's just use another pattern. */
5145 emit_insn (gen_move_from_CR_ov_bit (target
, scratch
));
5147 /* Any variant. EQ bit. */
5151 /* Upper variant. LT bit. */
5155 /* Lower variant. GT bit. */
5160 error ("argument 1 of __builtin_spe_predicate is out of range");
5164 tmp
= gen_rtx_fmt_ee (code
, SImode
, scratch
, const0_rtx
);
5165 emit_move_insn (target
, tmp
);
5170 /* The evsel builtins look like this:
5172 e = __builtin_spe_evsel_OP (a, b, c, d);
5176 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5177 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5181 spe_expand_evsel_builtin (icode
, arglist
, target
)
5182 enum insn_code icode
;
5187 tree arg0
= TREE_VALUE (arglist
);
5188 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5189 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5190 tree arg3
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist
))));
5191 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5192 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5193 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
5194 rtx op3
= expand_expr (arg3
, NULL_RTX
, VOIDmode
, 0);
5195 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5196 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5201 if (arg0
== error_mark_node
|| arg1
== error_mark_node
5202 || arg2
== error_mark_node
|| arg3
== error_mark_node
)
5206 || GET_MODE (target
) != mode0
5207 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, mode0
))
5208 target
= gen_reg_rtx (mode0
);
5210 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5211 op0
= copy_to_mode_reg (mode0
, op0
);
5212 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
5213 op1
= copy_to_mode_reg (mode0
, op1
);
5214 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
5215 op2
= copy_to_mode_reg (mode0
, op2
);
5216 if (! (*insn_data
[icode
].operand
[1].predicate
) (op3
, mode1
))
5217 op3
= copy_to_mode_reg (mode0
, op3
);
5219 /* Generate the compare. */
5220 scratch
= gen_reg_rtx (CCmode
);
5221 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
5226 if (mode0
== V2SImode
)
5227 emit_insn (gen_spe_evsel (target
, op2
, op3
, scratch
));
5229 emit_insn (gen_spe_evsel_fs (target
, op2
, op3
, scratch
));
5234 /* Expand an expression EXP that calls a built-in function,
5235 with result going to TARGET if that's convenient
5236 (and in mode MODE if that's convenient).
5237 SUBTARGET may be used as the target for computing one of EXP's operands.
5238 IGNORE is nonzero if the value is to be ignored. */
5241 rs6000_expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
5244 rtx subtarget ATTRIBUTE_UNUSED
;
5245 enum machine_mode mode ATTRIBUTE_UNUSED
;
5246 int ignore ATTRIBUTE_UNUSED
;
5248 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5249 tree arglist
= TREE_OPERAND (exp
, 1);
5250 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5251 struct builtin_description
*d
;
5258 ret
= altivec_expand_builtin (exp
, target
, &success
);
5265 ret
= spe_expand_builtin (exp
, target
, &success
);
5271 if (TARGET_ALTIVEC
|| TARGET_SPE
)
5273 /* Handle simple unary operations. */
5274 d
= (struct builtin_description
*) bdesc_1arg
;
5275 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
5276 if (d
->code
== fcode
)
5277 return rs6000_expand_unop_builtin (d
->icode
, arglist
, target
);
5279 /* Handle simple binary operations. */
5280 d
= (struct builtin_description
*) bdesc_2arg
;
5281 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5282 if (d
->code
== fcode
)
5283 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
5285 /* Handle simple ternary operations. */
5286 d
= (struct builtin_description
*) bdesc_3arg
;
5287 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
5288 if (d
->code
== fcode
)
5289 return rs6000_expand_ternop_builtin (d
->icode
, arglist
, target
);
5297 rs6000_init_builtins ()
5300 spe_init_builtins ();
5302 altivec_init_builtins ();
5303 if (TARGET_ALTIVEC
|| TARGET_SPE
)
5304 rs6000_common_init_builtins ();
5307 /* Search through a set of builtins and enable the mask bits.
5308 DESC is an array of builtins.
5309 SIZE is the total number of builtins.
5310 START is the builtin enum at which to start.
5311 END is the builtin enum at which to end. */
5313 enable_mask_for_builtins (desc
, size
, start
, end
)
5314 struct builtin_description
*desc
;
5316 enum rs6000_builtins start
, end
;
5320 for (i
= 0; i
< size
; ++i
)
5321 if (desc
[i
].code
== start
)
5327 for (; i
< size
; ++i
)
5329 /* Flip all the bits on. */
5330 desc
[i
].mask
= target_flags
;
5331 if (desc
[i
].code
== end
)
5337 spe_init_builtins ()
5339 tree endlink
= void_list_node
;
5340 tree puint_type_node
= build_pointer_type (unsigned_type_node
);
5341 tree pushort_type_node
= build_pointer_type (short_unsigned_type_node
);
5342 tree pv2si_type_node
= build_pointer_type (V2SI_type_node
);
5343 struct builtin_description
*d
;
5346 tree v2si_ftype_4_v2si
5347 = build_function_type
5349 tree_cons (NULL_TREE
, V2SI_type_node
,
5350 tree_cons (NULL_TREE
, V2SI_type_node
,
5351 tree_cons (NULL_TREE
, V2SI_type_node
,
5352 tree_cons (NULL_TREE
, V2SI_type_node
,
5355 tree v2sf_ftype_4_v2sf
5356 = build_function_type
5358 tree_cons (NULL_TREE
, V2SF_type_node
,
5359 tree_cons (NULL_TREE
, V2SF_type_node
,
5360 tree_cons (NULL_TREE
, V2SF_type_node
,
5361 tree_cons (NULL_TREE
, V2SF_type_node
,
5364 tree int_ftype_int_v2si_v2si
5365 = build_function_type
5367 tree_cons (NULL_TREE
, integer_type_node
,
5368 tree_cons (NULL_TREE
, V2SI_type_node
,
5369 tree_cons (NULL_TREE
, V2SI_type_node
,
5372 tree int_ftype_int_v2sf_v2sf
5373 = build_function_type
5375 tree_cons (NULL_TREE
, integer_type_node
,
5376 tree_cons (NULL_TREE
, V2SF_type_node
,
5377 tree_cons (NULL_TREE
, V2SF_type_node
,
5380 tree void_ftype_v2si_puint_int
5381 = build_function_type (void_type_node
,
5382 tree_cons (NULL_TREE
, V2SI_type_node
,
5383 tree_cons (NULL_TREE
, puint_type_node
,
5384 tree_cons (NULL_TREE
,
5388 tree void_ftype_v2si_puint_char
5389 = build_function_type (void_type_node
,
5390 tree_cons (NULL_TREE
, V2SI_type_node
,
5391 tree_cons (NULL_TREE
, puint_type_node
,
5392 tree_cons (NULL_TREE
,
5396 tree void_ftype_v2si_pv2si_int
5397 = build_function_type (void_type_node
,
5398 tree_cons (NULL_TREE
, V2SI_type_node
,
5399 tree_cons (NULL_TREE
, pv2si_type_node
,
5400 tree_cons (NULL_TREE
,
5404 tree void_ftype_v2si_pv2si_char
5405 = build_function_type (void_type_node
,
5406 tree_cons (NULL_TREE
, V2SI_type_node
,
5407 tree_cons (NULL_TREE
, pv2si_type_node
,
5408 tree_cons (NULL_TREE
,
5413 = build_function_type (void_type_node
,
5414 tree_cons (NULL_TREE
, integer_type_node
, endlink
));
5417 = build_function_type (integer_type_node
,
5418 tree_cons (NULL_TREE
, void_type_node
, endlink
));
5420 tree v2si_ftype_pv2si_int
5421 = build_function_type (V2SI_type_node
,
5422 tree_cons (NULL_TREE
, pv2si_type_node
,
5423 tree_cons (NULL_TREE
, integer_type_node
,
5426 tree v2si_ftype_puint_int
5427 = build_function_type (V2SI_type_node
,
5428 tree_cons (NULL_TREE
, puint_type_node
,
5429 tree_cons (NULL_TREE
, integer_type_node
,
5432 tree v2si_ftype_pushort_int
5433 = build_function_type (V2SI_type_node
,
5434 tree_cons (NULL_TREE
, pushort_type_node
,
5435 tree_cons (NULL_TREE
, integer_type_node
,
5438 /* The initialization of the simple binary and unary builtins is
5439 done in rs6000_common_init_builtins, but we have to enable the
5440 mask bits here manually because we have run out of `target_flags'
5441 bits. We really need to redesign this mask business. */
5443 enable_mask_for_builtins ((struct builtin_description
*) bdesc_2arg
,
5444 ARRAY_SIZE (bdesc_2arg
),
5447 enable_mask_for_builtins ((struct builtin_description
*) bdesc_1arg
,
5448 ARRAY_SIZE (bdesc_1arg
),
5450 SPE_BUILTIN_EVSUBFUSIAAW
);
5451 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_predicates
,
5452 ARRAY_SIZE (bdesc_spe_predicates
),
5453 SPE_BUILTIN_EVCMPEQ
,
5454 SPE_BUILTIN_EVFSTSTLT
);
5455 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_evsel
,
5456 ARRAY_SIZE (bdesc_spe_evsel
),
5457 SPE_BUILTIN_EVSEL_CMPGTS
,
5458 SPE_BUILTIN_EVSEL_FSTSTEQ
);
5460 /* Initialize irregular SPE builtins. */
5462 def_builtin (target_flags
, "__builtin_spe_mtspefscr", void_ftype_int
, SPE_BUILTIN_MTSPEFSCR
);
5463 def_builtin (target_flags
, "__builtin_spe_mfspefscr", int_ftype_void
, SPE_BUILTIN_MFSPEFSCR
);
5464 def_builtin (target_flags
, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDDX
);
5465 def_builtin (target_flags
, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDHX
);
5466 def_builtin (target_flags
, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDWX
);
5467 def_builtin (target_flags
, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHEX
);
5468 def_builtin (target_flags
, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHOX
);
5469 def_builtin (target_flags
, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWEX
);
5470 def_builtin (target_flags
, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWOX
);
5471 def_builtin (target_flags
, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDD
);
5472 def_builtin (target_flags
, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDH
);
5473 def_builtin (target_flags
, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDW
);
5474 def_builtin (target_flags
, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHE
);
5475 def_builtin (target_flags
, "__builtin_spe_evstwho", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHO
);
5476 def_builtin (target_flags
, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWE
);
5477 def_builtin (target_flags
, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWO
);
5480 def_builtin (target_flags
, "__builtin_spe_evlddx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDDX
);
5481 def_builtin (target_flags
, "__builtin_spe_evldwx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDWX
);
5482 def_builtin (target_flags
, "__builtin_spe_evldhx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDHX
);
5483 def_builtin (target_flags
, "__builtin_spe_evlwhex", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHEX
);
5484 def_builtin (target_flags
, "__builtin_spe_evlwhoux", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOUX
);
5485 def_builtin (target_flags
, "__builtin_spe_evlwhosx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOSX
);
5486 def_builtin (target_flags
, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLATX
);
5487 def_builtin (target_flags
, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLATX
);
5488 def_builtin (target_flags
, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLATX
);
5489 def_builtin (target_flags
, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLATX
);
5490 def_builtin (target_flags
, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLATX
);
5491 def_builtin (target_flags
, "__builtin_spe_evldd", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDD
);
5492 def_builtin (target_flags
, "__builtin_spe_evldw", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDW
);
5493 def_builtin (target_flags
, "__builtin_spe_evldh", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDH
);
5494 def_builtin (target_flags
, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLAT
);
5495 def_builtin (target_flags
, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLAT
);
5496 def_builtin (target_flags
, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLAT
);
5497 def_builtin (target_flags
, "__builtin_spe_evlwhe", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHE
);
5498 def_builtin (target_flags
, "__builtin_spe_evlwhos", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOS
);
5499 def_builtin (target_flags
, "__builtin_spe_evlwhou", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOU
);
5500 def_builtin (target_flags
, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLAT
);
5501 def_builtin (target_flags
, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLAT
);
5504 d
= (struct builtin_description
*) bdesc_spe_predicates
;
5505 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, d
++)
5509 switch (insn_data
[d
->icode
].operand
[1].mode
)
5512 type
= int_ftype_int_v2si_v2si
;
5515 type
= int_ftype_int_v2sf_v2sf
;
5521 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5524 /* Evsel predicates. */
5525 d
= (struct builtin_description
*) bdesc_spe_evsel
;
5526 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, d
++)
5530 switch (insn_data
[d
->icode
].operand
[1].mode
)
5533 type
= v2si_ftype_4_v2si
;
5536 type
= v2sf_ftype_4_v2sf
;
5542 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5547 altivec_init_builtins ()
5549 struct builtin_description
*d
;
5550 struct builtin_description_predicates
*dp
;
5552 tree pfloat_type_node
= build_pointer_type (float_type_node
);
5553 tree pint_type_node
= build_pointer_type (integer_type_node
);
5554 tree pshort_type_node
= build_pointer_type (short_integer_type_node
);
5555 tree pchar_type_node
= build_pointer_type (char_type_node
);
5557 tree pvoid_type_node
= build_pointer_type (void_type_node
);
5559 tree pcfloat_type_node
= build_pointer_type (build_qualified_type (float_type_node
, TYPE_QUAL_CONST
));
5560 tree pcint_type_node
= build_pointer_type (build_qualified_type (integer_type_node
, TYPE_QUAL_CONST
));
5561 tree pcshort_type_node
= build_pointer_type (build_qualified_type (short_integer_type_node
, TYPE_QUAL_CONST
));
5562 tree pcchar_type_node
= build_pointer_type (build_qualified_type (char_type_node
, TYPE_QUAL_CONST
));
5564 tree pcvoid_type_node
= build_pointer_type (build_qualified_type (void_type_node
, TYPE_QUAL_CONST
));
5566 tree int_ftype_int_v4si_v4si
5567 = build_function_type_list (integer_type_node
,
5568 integer_type_node
, V4SI_type_node
,
5569 V4SI_type_node
, NULL_TREE
);
5570 tree v4sf_ftype_pcfloat
5571 = build_function_type_list (V4SF_type_node
, pcfloat_type_node
, NULL_TREE
);
5572 tree void_ftype_pfloat_v4sf
5573 = build_function_type_list (void_type_node
,
5574 pfloat_type_node
, V4SF_type_node
, NULL_TREE
);
5575 tree v4si_ftype_pcint
5576 = build_function_type_list (V4SI_type_node
, pcint_type_node
, NULL_TREE
);
5577 tree void_ftype_pint_v4si
5578 = build_function_type_list (void_type_node
,
5579 pint_type_node
, V4SI_type_node
, NULL_TREE
);
5580 tree v8hi_ftype_pcshort
5581 = build_function_type_list (V8HI_type_node
, pcshort_type_node
, NULL_TREE
);
5582 tree void_ftype_pshort_v8hi
5583 = build_function_type_list (void_type_node
,
5584 pshort_type_node
, V8HI_type_node
, NULL_TREE
);
5585 tree v16qi_ftype_pcchar
5586 = build_function_type_list (V16QI_type_node
, pcchar_type_node
, NULL_TREE
);
5587 tree void_ftype_pchar_v16qi
5588 = build_function_type_list (void_type_node
,
5589 pchar_type_node
, V16QI_type_node
, NULL_TREE
);
5590 tree void_ftype_v4si
5591 = build_function_type_list (void_type_node
, V4SI_type_node
, NULL_TREE
);
5592 tree v8hi_ftype_void
5593 = build_function_type (V8HI_type_node
, void_list_node
);
5594 tree void_ftype_void
5595 = build_function_type (void_type_node
, void_list_node
);
5597 = build_function_type_list (void_type_node
, char_type_node
, NULL_TREE
);
5599 tree v16qi_ftype_int_pcvoid
5600 = build_function_type_list (V16QI_type_node
,
5601 integer_type_node
, pcvoid_type_node
, NULL_TREE
);
5602 tree v8hi_ftype_int_pcvoid
5603 = build_function_type_list (V8HI_type_node
,
5604 integer_type_node
, pcvoid_type_node
, NULL_TREE
);
5605 tree v4si_ftype_int_pcvoid
5606 = build_function_type_list (V4SI_type_node
,
5607 integer_type_node
, pcvoid_type_node
, NULL_TREE
);
5609 tree void_ftype_v4si_int_pvoid
5610 = build_function_type_list (void_type_node
,
5611 V4SI_type_node
, integer_type_node
,
5612 pvoid_type_node
, NULL_TREE
);
5613 tree void_ftype_v16qi_int_pvoid
5614 = build_function_type_list (void_type_node
,
5615 V16QI_type_node
, integer_type_node
,
5616 pvoid_type_node
, NULL_TREE
);
5617 tree void_ftype_v8hi_int_pvoid
5618 = build_function_type_list (void_type_node
,
5619 V8HI_type_node
, integer_type_node
,
5620 pvoid_type_node
, NULL_TREE
);
5621 tree int_ftype_int_v8hi_v8hi
5622 = build_function_type_list (integer_type_node
,
5623 integer_type_node
, V8HI_type_node
,
5624 V8HI_type_node
, NULL_TREE
);
5625 tree int_ftype_int_v16qi_v16qi
5626 = build_function_type_list (integer_type_node
,
5627 integer_type_node
, V16QI_type_node
,
5628 V16QI_type_node
, NULL_TREE
);
5629 tree int_ftype_int_v4sf_v4sf
5630 = build_function_type_list (integer_type_node
,
5631 integer_type_node
, V4SF_type_node
,
5632 V4SF_type_node
, NULL_TREE
);
5633 tree v4si_ftype_v4si
5634 = build_function_type_list (V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5635 tree v8hi_ftype_v8hi
5636 = build_function_type_list (V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5637 tree v16qi_ftype_v16qi
5638 = build_function_type_list (V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5639 tree v4sf_ftype_v4sf
5640 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5641 tree void_ftype_pcvoid_int_char
5642 = build_function_type_list (void_type_node
,
5643 pcvoid_type_node
, integer_type_node
,
5644 char_type_node
, NULL_TREE
);
5646 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat
,
5647 ALTIVEC_BUILTIN_LD_INTERNAL_4sf
);
5648 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf
,
5649 ALTIVEC_BUILTIN_ST_INTERNAL_4sf
);
5650 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint
,
5651 ALTIVEC_BUILTIN_LD_INTERNAL_4si
);
5652 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si
,
5653 ALTIVEC_BUILTIN_ST_INTERNAL_4si
);
5654 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort
,
5655 ALTIVEC_BUILTIN_LD_INTERNAL_8hi
);
5656 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi
,
5657 ALTIVEC_BUILTIN_ST_INTERNAL_8hi
);
5658 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar
,
5659 ALTIVEC_BUILTIN_LD_INTERNAL_16qi
);
5660 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi
,
5661 ALTIVEC_BUILTIN_ST_INTERNAL_16qi
);
5662 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mtvscr", void_ftype_v4si
, ALTIVEC_BUILTIN_MTVSCR
);
5663 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mfvscr", v8hi_ftype_void
, ALTIVEC_BUILTIN_MFVSCR
);
5664 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dssall", void_ftype_void
, ALTIVEC_BUILTIN_DSSALL
);
5665 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dss", void_ftype_qi
, ALTIVEC_BUILTIN_DSS
);
5666 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVSL
);
5667 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVSR
);
5668 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVEBX
);
5669 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVEHX
);
5670 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVEWX
);
5671 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVXL
);
5672 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVX
);
5673 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVX
);
5674 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVEWX
);
5675 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVXL
);
5676 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid
, ALTIVEC_BUILTIN_STVEBX
);
5677 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid
, ALTIVEC_BUILTIN_STVEHX
);
5679 /* Add the DST variants. */
5680 d
= (struct builtin_description
*) bdesc_dst
;
5681 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
5682 def_builtin (d
->mask
, d
->name
, void_ftype_pcvoid_int_char
, d
->code
);
5684 /* Initialize the predicates. */
5685 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
5686 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
5688 enum machine_mode mode1
;
5691 mode1
= insn_data
[dp
->icode
].operand
[1].mode
;
5696 type
= int_ftype_int_v4si_v4si
;
5699 type
= int_ftype_int_v8hi_v8hi
;
5702 type
= int_ftype_int_v16qi_v16qi
;
5705 type
= int_ftype_int_v4sf_v4sf
;
5711 def_builtin (dp
->mask
, dp
->name
, type
, dp
->code
);
5714 /* Initialize the abs* operators. */
5715 d
= (struct builtin_description
*) bdesc_abs
;
5716 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
5718 enum machine_mode mode0
;
5721 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
5726 type
= v4si_ftype_v4si
;
5729 type
= v8hi_ftype_v8hi
;
5732 type
= v16qi_ftype_v16qi
;
5735 type
= v4sf_ftype_v4sf
;
5741 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5746 rs6000_common_init_builtins ()
5748 struct builtin_description
*d
;
5751 tree v4sf_ftype_v4sf_v4sf_v16qi
5752 = build_function_type_list (V4SF_type_node
,
5753 V4SF_type_node
, V4SF_type_node
,
5754 V16QI_type_node
, NULL_TREE
);
5755 tree v4si_ftype_v4si_v4si_v16qi
5756 = build_function_type_list (V4SI_type_node
,
5757 V4SI_type_node
, V4SI_type_node
,
5758 V16QI_type_node
, NULL_TREE
);
5759 tree v8hi_ftype_v8hi_v8hi_v16qi
5760 = build_function_type_list (V8HI_type_node
,
5761 V8HI_type_node
, V8HI_type_node
,
5762 V16QI_type_node
, NULL_TREE
);
5763 tree v16qi_ftype_v16qi_v16qi_v16qi
5764 = build_function_type_list (V16QI_type_node
,
5765 V16QI_type_node
, V16QI_type_node
,
5766 V16QI_type_node
, NULL_TREE
);
5767 tree v4si_ftype_char
5768 = build_function_type_list (V4SI_type_node
, char_type_node
, NULL_TREE
);
5769 tree v8hi_ftype_char
5770 = build_function_type_list (V8HI_type_node
, char_type_node
, NULL_TREE
);
5771 tree v16qi_ftype_char
5772 = build_function_type_list (V16QI_type_node
, char_type_node
, NULL_TREE
);
5773 tree v8hi_ftype_v16qi
5774 = build_function_type_list (V8HI_type_node
, V16QI_type_node
, NULL_TREE
);
5775 tree v4sf_ftype_v4sf
5776 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5778 tree v2si_ftype_v2si_v2si
5779 = build_function_type_list (V2SI_type_node
,
5780 V2SI_type_node
, V2SI_type_node
, NULL_TREE
);
5782 tree v2sf_ftype_v2sf_v2sf
5783 = build_function_type_list (V2SF_type_node
,
5784 V2SF_type_node
, V2SF_type_node
, NULL_TREE
);
5786 tree v2si_ftype_int_int
5787 = build_function_type_list (V2SI_type_node
,
5788 integer_type_node
, integer_type_node
,
5791 tree v2si_ftype_v2si
5792 = build_function_type_list (V2SI_type_node
, V2SI_type_node
, NULL_TREE
);
5794 tree v2sf_ftype_v2sf
5795 = build_function_type_list (V2SF_type_node
,
5796 V2SF_type_node
, NULL_TREE
);
5798 tree v2sf_ftype_v2si
5799 = build_function_type_list (V2SF_type_node
,
5800 V2SI_type_node
, NULL_TREE
);
5802 tree v2si_ftype_v2sf
5803 = build_function_type_list (V2SI_type_node
,
5804 V2SF_type_node
, NULL_TREE
);
5806 tree v2si_ftype_v2si_char
5807 = build_function_type_list (V2SI_type_node
,
5808 V2SI_type_node
, char_type_node
, NULL_TREE
);
5810 tree v2si_ftype_int_char
5811 = build_function_type_list (V2SI_type_node
,
5812 integer_type_node
, char_type_node
, NULL_TREE
);
5814 tree v2si_ftype_char
5815 = build_function_type_list (V2SI_type_node
, char_type_node
, NULL_TREE
);
5817 tree int_ftype_int_int
5818 = build_function_type_list (integer_type_node
,
5819 integer_type_node
, integer_type_node
,
5822 tree v4si_ftype_v4si_v4si
5823 = build_function_type_list (V4SI_type_node
,
5824 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5825 tree v4sf_ftype_v4si_char
5826 = build_function_type_list (V4SF_type_node
,
5827 V4SI_type_node
, char_type_node
, NULL_TREE
);
5828 tree v4si_ftype_v4sf_char
5829 = build_function_type_list (V4SI_type_node
,
5830 V4SF_type_node
, char_type_node
, NULL_TREE
);
5831 tree v4si_ftype_v4si_char
5832 = build_function_type_list (V4SI_type_node
,
5833 V4SI_type_node
, char_type_node
, NULL_TREE
);
5834 tree v8hi_ftype_v8hi_char
5835 = build_function_type_list (V8HI_type_node
,
5836 V8HI_type_node
, char_type_node
, NULL_TREE
);
5837 tree v16qi_ftype_v16qi_char
5838 = build_function_type_list (V16QI_type_node
,
5839 V16QI_type_node
, char_type_node
, NULL_TREE
);
5840 tree v16qi_ftype_v16qi_v16qi_char
5841 = build_function_type_list (V16QI_type_node
,
5842 V16QI_type_node
, V16QI_type_node
,
5843 char_type_node
, NULL_TREE
);
5844 tree v8hi_ftype_v8hi_v8hi_char
5845 = build_function_type_list (V8HI_type_node
,
5846 V8HI_type_node
, V8HI_type_node
,
5847 char_type_node
, NULL_TREE
);
5848 tree v4si_ftype_v4si_v4si_char
5849 = build_function_type_list (V4SI_type_node
,
5850 V4SI_type_node
, V4SI_type_node
,
5851 char_type_node
, NULL_TREE
);
5852 tree v4sf_ftype_v4sf_v4sf_char
5853 = build_function_type_list (V4SF_type_node
,
5854 V4SF_type_node
, V4SF_type_node
,
5855 char_type_node
, NULL_TREE
);
5856 tree v4sf_ftype_v4sf_v4sf
5857 = build_function_type_list (V4SF_type_node
,
5858 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5859 tree v4sf_ftype_v4sf_v4sf_v4si
5860 = build_function_type_list (V4SF_type_node
,
5861 V4SF_type_node
, V4SF_type_node
,
5862 V4SI_type_node
, NULL_TREE
);
5863 tree v4sf_ftype_v4sf_v4sf_v4sf
5864 = build_function_type_list (V4SF_type_node
,
5865 V4SF_type_node
, V4SF_type_node
,
5866 V4SF_type_node
, NULL_TREE
);
5867 tree v4si_ftype_v4si_v4si_v4si
5868 = build_function_type_list (V4SI_type_node
,
5869 V4SI_type_node
, V4SI_type_node
,
5870 V4SI_type_node
, NULL_TREE
);
5871 tree v8hi_ftype_v8hi_v8hi
5872 = build_function_type_list (V8HI_type_node
,
5873 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5874 tree v8hi_ftype_v8hi_v8hi_v8hi
5875 = build_function_type_list (V8HI_type_node
,
5876 V8HI_type_node
, V8HI_type_node
,
5877 V8HI_type_node
, NULL_TREE
);
5878 tree v4si_ftype_v8hi_v8hi_v4si
5879 = build_function_type_list (V4SI_type_node
,
5880 V8HI_type_node
, V8HI_type_node
,
5881 V4SI_type_node
, NULL_TREE
);
5882 tree v4si_ftype_v16qi_v16qi_v4si
5883 = build_function_type_list (V4SI_type_node
,
5884 V16QI_type_node
, V16QI_type_node
,
5885 V4SI_type_node
, NULL_TREE
);
5886 tree v16qi_ftype_v16qi_v16qi
5887 = build_function_type_list (V16QI_type_node
,
5888 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5889 tree v4si_ftype_v4sf_v4sf
5890 = build_function_type_list (V4SI_type_node
,
5891 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5892 tree v8hi_ftype_v16qi_v16qi
5893 = build_function_type_list (V8HI_type_node
,
5894 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5895 tree v4si_ftype_v8hi_v8hi
5896 = build_function_type_list (V4SI_type_node
,
5897 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5898 tree v8hi_ftype_v4si_v4si
5899 = build_function_type_list (V8HI_type_node
,
5900 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5901 tree v16qi_ftype_v8hi_v8hi
5902 = build_function_type_list (V16QI_type_node
,
5903 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5904 tree v4si_ftype_v16qi_v4si
5905 = build_function_type_list (V4SI_type_node
,
5906 V16QI_type_node
, V4SI_type_node
, NULL_TREE
);
5907 tree v4si_ftype_v16qi_v16qi
5908 = build_function_type_list (V4SI_type_node
,
5909 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5910 tree v4si_ftype_v8hi_v4si
5911 = build_function_type_list (V4SI_type_node
,
5912 V8HI_type_node
, V4SI_type_node
, NULL_TREE
);
5913 tree v4si_ftype_v8hi
5914 = build_function_type_list (V4SI_type_node
, V8HI_type_node
, NULL_TREE
);
5915 tree int_ftype_v4si_v4si
5916 = build_function_type_list (integer_type_node
,
5917 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5918 tree int_ftype_v4sf_v4sf
5919 = build_function_type_list (integer_type_node
,
5920 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5921 tree int_ftype_v16qi_v16qi
5922 = build_function_type_list (integer_type_node
,
5923 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5924 tree int_ftype_v8hi_v8hi
5925 = build_function_type_list (integer_type_node
,
5926 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5928 /* Add the simple ternary operators. */
5929 d
= (struct builtin_description
*) bdesc_3arg
;
5930 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
5933 enum machine_mode mode0
, mode1
, mode2
, mode3
;
5936 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
5939 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
5940 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
5941 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
5942 mode3
= insn_data
[d
->icode
].operand
[3].mode
;
5944 /* When all four are of the same mode. */
5945 if (mode0
== mode1
&& mode1
== mode2
&& mode2
== mode3
)
5950 type
= v4si_ftype_v4si_v4si_v4si
;
5953 type
= v4sf_ftype_v4sf_v4sf_v4sf
;
5956 type
= v8hi_ftype_v8hi_v8hi_v8hi
;
5959 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
5965 else if (mode0
== mode1
&& mode1
== mode2
&& mode3
== V16QImode
)
5970 type
= v4si_ftype_v4si_v4si_v16qi
;
5973 type
= v4sf_ftype_v4sf_v4sf_v16qi
;
5976 type
= v8hi_ftype_v8hi_v8hi_v16qi
;
5979 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
5985 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
5986 && mode3
== V4SImode
)
5987 type
= v4si_ftype_v16qi_v16qi_v4si
;
5988 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
5989 && mode3
== V4SImode
)
5990 type
= v4si_ftype_v8hi_v8hi_v4si
;
5991 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
5992 && mode3
== V4SImode
)
5993 type
= v4sf_ftype_v4sf_v4sf_v4si
;
5995 /* vchar, vchar, vchar, 4 bit literal. */
5996 else if (mode0
== V16QImode
&& mode1
== mode0
&& mode2
== mode0
5998 type
= v16qi_ftype_v16qi_v16qi_char
;
6000 /* vshort, vshort, vshort, 4 bit literal. */
6001 else if (mode0
== V8HImode
&& mode1
== mode0
&& mode2
== mode0
6003 type
= v8hi_ftype_v8hi_v8hi_char
;
6005 /* vint, vint, vint, 4 bit literal. */
6006 else if (mode0
== V4SImode
&& mode1
== mode0
&& mode2
== mode0
6008 type
= v4si_ftype_v4si_v4si_char
;
6010 /* vfloat, vfloat, vfloat, 4 bit literal. */
6011 else if (mode0
== V4SFmode
&& mode1
== mode0
&& mode2
== mode0
6013 type
= v4sf_ftype_v4sf_v4sf_char
;
6018 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6021 /* Add the simple binary operators. */
6022 d
= (struct builtin_description
*) bdesc_2arg
;
6023 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
6025 enum machine_mode mode0
, mode1
, mode2
;
6028 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
6031 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6032 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
6033 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
6035 /* When all three operands are of the same mode. */
6036 if (mode0
== mode1
&& mode1
== mode2
)
6041 type
= v4sf_ftype_v4sf_v4sf
;
6044 type
= v4si_ftype_v4si_v4si
;
6047 type
= v16qi_ftype_v16qi_v16qi
;
6050 type
= v8hi_ftype_v8hi_v8hi
;
6053 type
= v2si_ftype_v2si_v2si
;
6056 type
= v2sf_ftype_v2sf_v2sf
;
6059 type
= int_ftype_int_int
;
6066 /* A few other combos we really don't want to do manually. */
6068 /* vint, vfloat, vfloat. */
6069 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
)
6070 type
= v4si_ftype_v4sf_v4sf
;
6072 /* vshort, vchar, vchar. */
6073 else if (mode0
== V8HImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
6074 type
= v8hi_ftype_v16qi_v16qi
;
6076 /* vint, vshort, vshort. */
6077 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
6078 type
= v4si_ftype_v8hi_v8hi
;
6080 /* vshort, vint, vint. */
6081 else if (mode0
== V8HImode
&& mode1
== V4SImode
&& mode2
== V4SImode
)
6082 type
= v8hi_ftype_v4si_v4si
;
6084 /* vchar, vshort, vshort. */
6085 else if (mode0
== V16QImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
6086 type
= v16qi_ftype_v8hi_v8hi
;
6088 /* vint, vchar, vint. */
6089 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V4SImode
)
6090 type
= v4si_ftype_v16qi_v4si
;
6092 /* vint, vchar, vchar. */
6093 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
6094 type
= v4si_ftype_v16qi_v16qi
;
6096 /* vint, vshort, vint. */
6097 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V4SImode
)
6098 type
= v4si_ftype_v8hi_v4si
;
6100 /* vint, vint, 5 bit literal. */
6101 else if (mode0
== V4SImode
&& mode1
== V4SImode
&& mode2
== QImode
)
6102 type
= v4si_ftype_v4si_char
;
6104 /* vshort, vshort, 5 bit literal. */
6105 else if (mode0
== V8HImode
&& mode1
== V8HImode
&& mode2
== QImode
)
6106 type
= v8hi_ftype_v8hi_char
;
6108 /* vchar, vchar, 5 bit literal. */
6109 else if (mode0
== V16QImode
&& mode1
== V16QImode
&& mode2
== QImode
)
6110 type
= v16qi_ftype_v16qi_char
;
6112 /* vfloat, vint, 5 bit literal. */
6113 else if (mode0
== V4SFmode
&& mode1
== V4SImode
&& mode2
== QImode
)
6114 type
= v4sf_ftype_v4si_char
;
6116 /* vint, vfloat, 5 bit literal. */
6117 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== QImode
)
6118 type
= v4si_ftype_v4sf_char
;
6120 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== SImode
)
6121 type
= v2si_ftype_int_int
;
6123 else if (mode0
== V2SImode
&& mode1
== V2SImode
&& mode2
== QImode
)
6124 type
= v2si_ftype_v2si_char
;
6126 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== QImode
)
6127 type
= v2si_ftype_int_char
;
6130 else if (mode0
== SImode
)
6135 type
= int_ftype_v4si_v4si
;
6138 type
= int_ftype_v4sf_v4sf
;
6141 type
= int_ftype_v16qi_v16qi
;
6144 type
= int_ftype_v8hi_v8hi
;
6154 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6157 /* Add the simple unary operators. */
6158 d
= (struct builtin_description
*) bdesc_1arg
;
6159 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
6161 enum machine_mode mode0
, mode1
;
6164 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
6167 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6168 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
6170 if (mode0
== V4SImode
&& mode1
== QImode
)
6171 type
= v4si_ftype_char
;
6172 else if (mode0
== V8HImode
&& mode1
== QImode
)
6173 type
= v8hi_ftype_char
;
6174 else if (mode0
== V16QImode
&& mode1
== QImode
)
6175 type
= v16qi_ftype_char
;
6176 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
)
6177 type
= v4sf_ftype_v4sf
;
6178 else if (mode0
== V8HImode
&& mode1
== V16QImode
)
6179 type
= v8hi_ftype_v16qi
;
6180 else if (mode0
== V4SImode
&& mode1
== V8HImode
)
6181 type
= v4si_ftype_v8hi
;
6182 else if (mode0
== V2SImode
&& mode1
== V2SImode
)
6183 type
= v2si_ftype_v2si
;
6184 else if (mode0
== V2SFmode
&& mode1
== V2SFmode
)
6185 type
= v2sf_ftype_v2sf
;
6186 else if (mode0
== V2SFmode
&& mode1
== V2SImode
)
6187 type
= v2sf_ftype_v2si
;
6188 else if (mode0
== V2SImode
&& mode1
== V2SFmode
)
6189 type
= v2si_ftype_v2sf
;
6190 else if (mode0
== V2SImode
&& mode1
== QImode
)
6191 type
= v2si_ftype_char
;
6195 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6200 /* Expand a block move operation, and return 1 if successful. Return 0
6201 if we should let the compiler generate normal code.
6203 operands[0] is the destination
6204 operands[1] is the source
6205 operands[2] is the length
6206 operands[3] is the alignment */
6208 #define MAX_MOVE_REG 4
6211 expand_block_move (operands
)
6214 rtx orig_dest
= operands
[0];
6215 rtx orig_src
= operands
[1];
6216 rtx bytes_rtx
= operands
[2];
6217 rtx align_rtx
= operands
[3];
6218 int constp
= (GET_CODE (bytes_rtx
) == CONST_INT
);
6224 /* If this is not a fixed size move, just call memcpy */
6228 /* If this is not a fixed size alignment, abort */
6229 if (GET_CODE (align_rtx
) != CONST_INT
)
6231 align
= INTVAL (align_rtx
);
6233 /* Anything to move? */
6234 bytes
= INTVAL (bytes_rtx
);
6238 /* store_one_arg depends on expand_block_move to handle at least the size of
6239 reg_parm_stack_space. */
6240 if (bytes
> (TARGET_POWERPC64
? 64 : 32))
6243 if (TARGET_STRING
) /* string instructions are available */
6245 for (offset
= 0; bytes
> 0; offset
+= move_bytes
, bytes
-= move_bytes
)
6248 rtx (*movstrsi
) PARAMS ((rtx
, rtx
, rtx
, rtx
));
6249 rtx (*mov
) PARAMS ((rtx
, rtx
));
6251 enum machine_mode mode
= BLKmode
;
6254 if (bytes
> 24 /* move up to 32 bytes at a time */
6262 && ! fixed_regs
[12])
6264 move_bytes
= (bytes
> 32) ? 32 : bytes
;
6265 gen_func
.movstrsi
= gen_movstrsi_8reg
;
6267 else if (bytes
> 16 /* move up to 24 bytes at a time */
6273 && ! fixed_regs
[10])
6275 move_bytes
= (bytes
> 24) ? 24 : bytes
;
6276 gen_func
.movstrsi
= gen_movstrsi_6reg
;
6278 else if (bytes
> 8 /* move up to 16 bytes at a time */
6284 move_bytes
= (bytes
> 16) ? 16 : bytes
;
6285 gen_func
.movstrsi
= gen_movstrsi_4reg
;
6287 else if (bytes
>= 8 && TARGET_POWERPC64
6288 /* 64-bit loads and stores require word-aligned
6290 && (align
>= 8 || (! STRICT_ALIGNMENT
&& align
>= 4)))
6294 gen_func
.mov
= gen_movdi
;
6296 else if (bytes
> 4 && !TARGET_POWERPC64
)
6297 { /* move up to 8 bytes at a time */
6298 move_bytes
= (bytes
> 8) ? 8 : bytes
;
6299 gen_func
.movstrsi
= gen_movstrsi_2reg
;
6301 else if (bytes
>= 4 && (align
>= 4 || ! STRICT_ALIGNMENT
))
6302 { /* move 4 bytes */
6305 gen_func
.mov
= gen_movsi
;
6307 else if (bytes
== 2 && (align
>= 2 || ! STRICT_ALIGNMENT
))
6308 { /* move 2 bytes */
6311 gen_func
.mov
= gen_movhi
;
6313 else if (bytes
== 1) /* move 1 byte */
6317 gen_func
.mov
= gen_movqi
;
6320 { /* move up to 4 bytes at a time */
6321 move_bytes
= (bytes
> 4) ? 4 : bytes
;
6322 gen_func
.movstrsi
= gen_movstrsi_1reg
;
6325 src
= adjust_address (orig_src
, mode
, offset
);
6326 dest
= adjust_address (orig_dest
, mode
, offset
);
6328 if (mode
== BLKmode
)
6330 /* Move the address into scratch registers. The movstrsi
6331 patterns require zero offset. */
6332 if (!REG_P (XEXP (src
, 0)))
6334 rtx src_reg
= copy_addr_to_reg (XEXP (src
, 0));
6335 src
= replace_equiv_address (src
, src_reg
);
6337 set_mem_size (src
, GEN_INT (move_bytes
));
6339 if (!REG_P (XEXP (dest
, 0)))
6341 rtx dest_reg
= copy_addr_to_reg (XEXP (dest
, 0));
6342 dest
= replace_equiv_address (dest
, dest_reg
);
6344 set_mem_size (dest
, GEN_INT (move_bytes
));
6346 emit_insn ((*gen_func
.movstrsi
) (dest
, src
,
6347 GEN_INT (move_bytes
& 31),
6352 rtx tmp_reg
= gen_reg_rtx (mode
);
6354 emit_insn ((*gen_func
.mov
) (tmp_reg
, src
));
6355 emit_insn ((*gen_func
.mov
) (dest
, tmp_reg
));
6360 else /* string instructions not available */
6362 rtx stores
[MAX_MOVE_REG
];
6366 for (offset
= 0; bytes
> 0; offset
+= move_bytes
, bytes
-= move_bytes
)
6368 rtx (*gen_mov_func
) PARAMS ((rtx
, rtx
));
6369 enum machine_mode mode
;
6370 rtx src
, dest
, tmp_reg
;
6372 /* Generate the appropriate load and store, saving the stores
6374 if (bytes
>= 8 && TARGET_POWERPC64
6375 /* 64-bit loads and stores require word-aligned
6377 && (align
>= 8 || (! STRICT_ALIGNMENT
&& align
>= 4)))
6381 gen_mov_func
= gen_movdi
;
6383 else if (bytes
>= 4 && (align
>= 4 || ! STRICT_ALIGNMENT
))
6387 gen_mov_func
= gen_movsi
;
6389 else if (bytes
>= 2 && (align
>= 2 || ! STRICT_ALIGNMENT
))
6393 gen_mov_func
= gen_movhi
;
6399 gen_mov_func
= gen_movqi
;
6402 src
= adjust_address (orig_src
, mode
, offset
);
6403 dest
= adjust_address (orig_dest
, mode
, offset
);
6404 tmp_reg
= gen_reg_rtx (mode
);
6406 emit_insn ((*gen_mov_func
) (tmp_reg
, src
));
6407 stores
[num_reg
++] = (*gen_mov_func
) (dest
, tmp_reg
);
6409 if (num_reg
>= MAX_MOVE_REG
)
6411 for (i
= 0; i
< num_reg
; i
++)
6412 emit_insn (stores
[i
]);
6417 for (i
= 0; i
< num_reg
; i
++)
6418 emit_insn (stores
[i
]);
6425 /* Return 1 if OP is a load multiple operation. It is known to be a
6426 PARALLEL and the first section will be tested. */
6429 load_multiple_operation (op
, mode
)
6431 enum machine_mode mode ATTRIBUTE_UNUSED
;
6433 int count
= XVECLEN (op
, 0);
6434 unsigned int dest_regno
;
6438 /* Perform a quick check so we don't blow up below. */
6440 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6441 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6442 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
6445 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6446 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
6448 for (i
= 1; i
< count
; i
++)
6450 rtx elt
= XVECEXP (op
, 0, i
);
6452 if (GET_CODE (elt
) != SET
6453 || GET_CODE (SET_DEST (elt
)) != REG
6454 || GET_MODE (SET_DEST (elt
)) != SImode
6455 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
6456 || GET_CODE (SET_SRC (elt
)) != MEM
6457 || GET_MODE (SET_SRC (elt
)) != SImode
6458 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
6459 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
6460 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
6461 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != i
* 4)
6468 /* Similar, but tests for store multiple. Here, the second vector element
6469 is a CLOBBER. It will be tested later. */
6472 store_multiple_operation (op
, mode
)
6474 enum machine_mode mode ATTRIBUTE_UNUSED
;
6476 int count
= XVECLEN (op
, 0) - 1;
6477 unsigned int src_regno
;
6481 /* Perform a quick check so we don't blow up below. */
6483 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6484 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
6485 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
6488 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6489 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
6491 for (i
= 1; i
< count
; i
++)
6493 rtx elt
= XVECEXP (op
, 0, i
+ 1);
6495 if (GET_CODE (elt
) != SET
6496 || GET_CODE (SET_SRC (elt
)) != REG
6497 || GET_MODE (SET_SRC (elt
)) != SImode
6498 || REGNO (SET_SRC (elt
)) != src_regno
+ i
6499 || GET_CODE (SET_DEST (elt
)) != MEM
6500 || GET_MODE (SET_DEST (elt
)) != SImode
6501 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
6502 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
6503 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
6504 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != i
* 4)
6511 /* Return a string to perform a load_multiple operation.
6512 operands[0] is the vector.
6513 operands[1] is the source address.
6514 operands[2] is the first destination register. */
6517 rs6000_output_load_multiple (operands
)
6520 /* We have to handle the case where the pseudo used to contain the address
6521 is assigned to one of the output registers. */
6523 int words
= XVECLEN (operands
[0], 0);
6526 if (XVECLEN (operands
[0], 0) == 1)
6527 return "{l|lwz} %2,0(%1)";
6529 for (i
= 0; i
< words
; i
++)
6530 if (refers_to_regno_p (REGNO (operands
[2]) + i
,
6531 REGNO (operands
[2]) + i
+ 1, operands
[1], 0))
6535 xop
[0] = GEN_INT (4 * (words
-1));
6536 xop
[1] = operands
[1];
6537 xop
[2] = operands
[2];
6538 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop
);
6543 xop
[0] = GEN_INT (4 * (words
-1));
6544 xop
[1] = operands
[1];
6545 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + 1);
6546 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop
);
6551 for (j
= 0; j
< words
; j
++)
6554 xop
[0] = GEN_INT (j
* 4);
6555 xop
[1] = operands
[1];
6556 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + j
);
6557 output_asm_insn ("{l|lwz} %2,%0(%1)", xop
);
6559 xop
[0] = GEN_INT (i
* 4);
6560 xop
[1] = operands
[1];
6561 output_asm_insn ("{l|lwz} %1,%0(%1)", xop
);
6566 return "{lsi|lswi} %2,%1,%N0";
6569 /* Return 1 for a parallel vrsave operation. */
6572 vrsave_operation (op
, mode
)
6574 enum machine_mode mode ATTRIBUTE_UNUSED
;
6576 int count
= XVECLEN (op
, 0);
6577 unsigned int dest_regno
, src_regno
;
6581 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6582 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6583 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC_VOLATILE
)
6586 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6587 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6589 if (dest_regno
!= VRSAVE_REGNO
6590 && src_regno
!= VRSAVE_REGNO
)
6593 for (i
= 1; i
< count
; i
++)
6595 rtx elt
= XVECEXP (op
, 0, i
);
6597 if (GET_CODE (elt
) != CLOBBER
6598 && GET_CODE (elt
) != SET
)
6605 /* Return 1 for an PARALLEL suitable for mtcrf. */
6608 mtcrf_operation (op
, mode
)
6610 enum machine_mode mode ATTRIBUTE_UNUSED
;
6612 int count
= XVECLEN (op
, 0);
6616 /* Perform a quick check so we don't blow up below. */
6618 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6619 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC
6620 || XVECLEN (SET_SRC (XVECEXP (op
, 0, 0)), 0) != 2)
6622 src_reg
= XVECEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0, 0);
6624 if (GET_CODE (src_reg
) != REG
6625 || GET_MODE (src_reg
) != SImode
6626 || ! INT_REGNO_P (REGNO (src_reg
)))
6629 for (i
= 0; i
< count
; i
++)
6631 rtx exp
= XVECEXP (op
, 0, i
);
6635 if (GET_CODE (exp
) != SET
6636 || GET_CODE (SET_DEST (exp
)) != REG
6637 || GET_MODE (SET_DEST (exp
)) != CCmode
6638 || ! CR_REGNO_P (REGNO (SET_DEST (exp
))))
6640 unspec
= SET_SRC (exp
);
6641 maskval
= 1 << (MAX_CR_REGNO
- REGNO (SET_DEST (exp
)));
6643 if (GET_CODE (unspec
) != UNSPEC
6644 || XINT (unspec
, 1) != 20
6645 || XVECLEN (unspec
, 0) != 2
6646 || XVECEXP (unspec
, 0, 0) != src_reg
6647 || GET_CODE (XVECEXP (unspec
, 0, 1)) != CONST_INT
6648 || INTVAL (XVECEXP (unspec
, 0, 1)) != maskval
)
6654 /* Return 1 for an PARALLEL suitable for lmw. */
6657 lmw_operation (op
, mode
)
6659 enum machine_mode mode ATTRIBUTE_UNUSED
;
6661 int count
= XVECLEN (op
, 0);
6662 unsigned int dest_regno
;
6664 unsigned int base_regno
;
6665 HOST_WIDE_INT offset
;
6668 /* Perform a quick check so we don't blow up below. */
6670 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6671 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6672 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
6675 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6676 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
6679 || count
!= 32 - (int) dest_regno
)
6682 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr
, 0))
6685 base_regno
= REGNO (src_addr
);
6686 if (base_regno
== 0)
6689 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, src_addr
, 0))
6691 offset
= INTVAL (XEXP (src_addr
, 1));
6692 base_regno
= REGNO (XEXP (src_addr
, 0));
6697 for (i
= 0; i
< count
; i
++)
6699 rtx elt
= XVECEXP (op
, 0, i
);
6702 HOST_WIDE_INT newoffset
;
6704 if (GET_CODE (elt
) != SET
6705 || GET_CODE (SET_DEST (elt
)) != REG
6706 || GET_MODE (SET_DEST (elt
)) != SImode
6707 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
6708 || GET_CODE (SET_SRC (elt
)) != MEM
6709 || GET_MODE (SET_SRC (elt
)) != SImode
)
6711 newaddr
= XEXP (SET_SRC (elt
), 0);
6712 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr
, 0))
6717 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, newaddr
, 0))
6719 addr_reg
= XEXP (newaddr
, 0);
6720 newoffset
= INTVAL (XEXP (newaddr
, 1));
6724 if (REGNO (addr_reg
) != base_regno
6725 || newoffset
!= offset
+ 4 * i
)
6732 /* Return 1 for an PARALLEL suitable for stmw. */
6735 stmw_operation (op
, mode
)
6737 enum machine_mode mode ATTRIBUTE_UNUSED
;
6739 int count
= XVECLEN (op
, 0);
6740 unsigned int src_regno
;
6742 unsigned int base_regno
;
6743 HOST_WIDE_INT offset
;
6746 /* Perform a quick check so we don't blow up below. */
6748 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6749 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
6750 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
6753 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6754 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
6757 || count
!= 32 - (int) src_regno
)
6760 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr
, 0))
6763 base_regno
= REGNO (dest_addr
);
6764 if (base_regno
== 0)
6767 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, dest_addr
, 0))
6769 offset
= INTVAL (XEXP (dest_addr
, 1));
6770 base_regno
= REGNO (XEXP (dest_addr
, 0));
6775 for (i
= 0; i
< count
; i
++)
6777 rtx elt
= XVECEXP (op
, 0, i
);
6780 HOST_WIDE_INT newoffset
;
6782 if (GET_CODE (elt
) != SET
6783 || GET_CODE (SET_SRC (elt
)) != REG
6784 || GET_MODE (SET_SRC (elt
)) != SImode
6785 || REGNO (SET_SRC (elt
)) != src_regno
+ i
6786 || GET_CODE (SET_DEST (elt
)) != MEM
6787 || GET_MODE (SET_DEST (elt
)) != SImode
)
6789 newaddr
= XEXP (SET_DEST (elt
), 0);
6790 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr
, 0))
6795 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, newaddr
, 0))
6797 addr_reg
= XEXP (newaddr
, 0);
6798 newoffset
= INTVAL (XEXP (newaddr
, 1));
6802 if (REGNO (addr_reg
) != base_regno
6803 || newoffset
!= offset
+ 4 * i
)
6810 /* A validation routine: say whether CODE, a condition code, and MODE
6811 match. The other alternatives either don't make sense or should
6812 never be generated. */
6815 validate_condition_mode (code
, mode
)
6817 enum machine_mode mode
;
6819 if (GET_RTX_CLASS (code
) != '<'
6820 || GET_MODE_CLASS (mode
) != MODE_CC
)
6823 /* These don't make sense. */
6824 if ((code
== GT
|| code
== LT
|| code
== GE
|| code
== LE
)
6825 && mode
== CCUNSmode
)
6828 if ((code
== GTU
|| code
== LTU
|| code
== GEU
|| code
== LEU
)
6829 && mode
!= CCUNSmode
)
6832 if (mode
!= CCFPmode
6833 && (code
== ORDERED
|| code
== UNORDERED
6834 || code
== UNEQ
|| code
== LTGT
6835 || code
== UNGT
|| code
== UNLT
6836 || code
== UNGE
|| code
== UNLE
))
6839 /* These should never be generated except for
6840 flag_unsafe_math_optimizations and flag_finite_math_only. */
6841 if (mode
== CCFPmode
6842 && ! flag_unsafe_math_optimizations
6843 && ! flag_finite_math_only
6844 && (code
== LE
|| code
== GE
6845 || code
== UNEQ
|| code
== LTGT
6846 || code
== UNGT
|| code
== UNLT
))
6849 /* These are invalid; the information is not there. */
6850 if (mode
== CCEQmode
6851 && code
!= EQ
&& code
!= NE
)
6855 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
6856 We only check the opcode against the mode of the CC value here. */
6859 branch_comparison_operator (op
, mode
)
6861 enum machine_mode mode ATTRIBUTE_UNUSED
;
6863 enum rtx_code code
= GET_CODE (op
);
6864 enum machine_mode cc_mode
;
6866 if (GET_RTX_CLASS (code
) != '<')
6869 cc_mode
= GET_MODE (XEXP (op
, 0));
6870 if (GET_MODE_CLASS (cc_mode
) != MODE_CC
)
6873 validate_condition_mode (code
, cc_mode
);
6878 /* Return 1 if OP is a comparison operation that is valid for a branch
6879 insn and which is true if the corresponding bit in the CC register
6883 branch_positive_comparison_operator (op
, mode
)
6885 enum machine_mode mode
;
6889 if (! branch_comparison_operator (op
, mode
))
6892 code
= GET_CODE (op
);
6893 return (code
== EQ
|| code
== LT
|| code
== GT
6894 || (TARGET_SPE
&& TARGET_HARD_FLOAT
&& !TARGET_FPRS
&& code
== NE
)
6895 || code
== LTU
|| code
== GTU
6896 || code
== UNORDERED
);
6899 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
6900 We check the opcode against the mode of the CC value and disallow EQ or
6901 NE comparisons for integers. */
6904 scc_comparison_operator (op
, mode
)
6906 enum machine_mode mode
;
6908 enum rtx_code code
= GET_CODE (op
);
6909 enum machine_mode cc_mode
;
6911 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
6914 if (GET_RTX_CLASS (code
) != '<')
6917 cc_mode
= GET_MODE (XEXP (op
, 0));
6918 if (GET_MODE_CLASS (cc_mode
) != MODE_CC
)
6921 validate_condition_mode (code
, cc_mode
);
6923 if (code
== NE
&& cc_mode
!= CCFPmode
)
6930 trap_comparison_operator (op
, mode
)
6932 enum machine_mode mode
;
6934 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
6936 return GET_RTX_CLASS (GET_CODE (op
)) == '<';
6940 boolean_operator (op
, mode
)
6942 enum machine_mode mode ATTRIBUTE_UNUSED
;
6944 enum rtx_code code
= GET_CODE (op
);
6945 return (code
== AND
|| code
== IOR
|| code
== XOR
);
6949 boolean_or_operator (op
, mode
)
6951 enum machine_mode mode ATTRIBUTE_UNUSED
;
6953 enum rtx_code code
= GET_CODE (op
);
6954 return (code
== IOR
|| code
== XOR
);
6958 min_max_operator (op
, mode
)
6960 enum machine_mode mode ATTRIBUTE_UNUSED
;
6962 enum rtx_code code
= GET_CODE (op
);
6963 return (code
== SMIN
|| code
== SMAX
|| code
== UMIN
|| code
== UMAX
);
6966 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
6967 mask required to convert the result of a rotate insn into a shift
6968 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
6971 includes_lshift_p (shiftop
, andop
)
6975 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
6977 shift_mask
<<= INTVAL (shiftop
);
6979 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
6982 /* Similar, but for right shift. */
6985 includes_rshift_p (shiftop
, andop
)
6989 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
6991 shift_mask
>>= INTVAL (shiftop
);
6993 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
6996 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
6997 to perform a left shift. It must have exactly SHIFTOP least
6998 significant 0's, then one or more 1's, then zero or more 0's. */
7001 includes_rldic_lshift_p (shiftop
, andop
)
7005 if (GET_CODE (andop
) == CONST_INT
)
7007 HOST_WIDE_INT c
, lsb
, shift_mask
;
7010 if (c
== 0 || c
== ~0)
7014 shift_mask
<<= INTVAL (shiftop
);
7016 /* Find the least significant one bit. */
7019 /* It must coincide with the LSB of the shift mask. */
7020 if (-lsb
!= shift_mask
)
7023 /* Invert to look for the next transition (if any). */
7026 /* Remove the low group of ones (originally low group of zeros). */
7029 /* Again find the lsb, and check we have all 1's above. */
7033 else if (GET_CODE (andop
) == CONST_DOUBLE
7034 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
7036 HOST_WIDE_INT low
, high
, lsb
;
7037 HOST_WIDE_INT shift_mask_low
, shift_mask_high
;
7039 low
= CONST_DOUBLE_LOW (andop
);
7040 if (HOST_BITS_PER_WIDE_INT
< 64)
7041 high
= CONST_DOUBLE_HIGH (andop
);
7043 if ((low
== 0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== 0))
7044 || (low
== ~0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0)))
7047 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
7049 shift_mask_high
= ~0;
7050 if (INTVAL (shiftop
) > 32)
7051 shift_mask_high
<<= INTVAL (shiftop
) - 32;
7055 if (-lsb
!= shift_mask_high
|| INTVAL (shiftop
) < 32)
7062 return high
== -lsb
;
7065 shift_mask_low
= ~0;
7066 shift_mask_low
<<= INTVAL (shiftop
);
7070 if (-lsb
!= shift_mask_low
)
7073 if (HOST_BITS_PER_WIDE_INT
< 64)
7078 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
7081 return high
== -lsb
;
7085 return low
== -lsb
&& (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0);
7091 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7092 to perform a left shift. It must have SHIFTOP or more least
7093 signifigant 0's, with the remainder of the word 1's. */
7096 includes_rldicr_lshift_p (shiftop
, andop
)
7100 if (GET_CODE (andop
) == CONST_INT
)
7102 HOST_WIDE_INT c
, lsb
, shift_mask
;
7105 shift_mask
<<= INTVAL (shiftop
);
7108 /* Find the least signifigant one bit. */
7111 /* It must be covered by the shift mask.
7112 This test also rejects c == 0. */
7113 if ((lsb
& shift_mask
) == 0)
7116 /* Check we have all 1's above the transition, and reject all 1's. */
7117 return c
== -lsb
&& lsb
!= 1;
7119 else if (GET_CODE (andop
) == CONST_DOUBLE
7120 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
7122 HOST_WIDE_INT low
, lsb
, shift_mask_low
;
7124 low
= CONST_DOUBLE_LOW (andop
);
7126 if (HOST_BITS_PER_WIDE_INT
< 64)
7128 HOST_WIDE_INT high
, shift_mask_high
;
7130 high
= CONST_DOUBLE_HIGH (andop
);
7134 shift_mask_high
= ~0;
7135 if (INTVAL (shiftop
) > 32)
7136 shift_mask_high
<<= INTVAL (shiftop
) - 32;
7140 if ((lsb
& shift_mask_high
) == 0)
7143 return high
== -lsb
;
7149 shift_mask_low
= ~0;
7150 shift_mask_low
<<= INTVAL (shiftop
);
7154 if ((lsb
& shift_mask_low
) == 0)
7157 return low
== -lsb
&& lsb
!= 1;
7163 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7164 for lfq and stfq insns.
7166 Note reg1 and reg2 *must* be hard registers. To be sure we will
7167 abort if we are passed pseudo registers. */
7170 registers_ok_for_quad_peep (reg1
, reg2
)
7173 /* We might have been passed a SUBREG. */
7174 if (GET_CODE (reg1
) != REG
|| GET_CODE (reg2
) != REG
)
7177 return (REGNO (reg1
) == REGNO (reg2
) - 1);
7180 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7181 addr1 and addr2 must be in consecutive memory locations
7182 (addr2 == addr1 + 8). */
7185 addrs_ok_for_quad_peep (addr1
, addr2
)
7192 /* Extract an offset (if used) from the first addr. */
7193 if (GET_CODE (addr1
) == PLUS
)
7195 /* If not a REG, return zero. */
7196 if (GET_CODE (XEXP (addr1
, 0)) != REG
)
7200 reg1
= REGNO (XEXP (addr1
, 0));
7201 /* The offset must be constant! */
7202 if (GET_CODE (XEXP (addr1
, 1)) != CONST_INT
)
7204 offset1
= INTVAL (XEXP (addr1
, 1));
7207 else if (GET_CODE (addr1
) != REG
)
7211 reg1
= REGNO (addr1
);
7212 /* This was a simple (mem (reg)) expression. Offset is 0. */
7216 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
7217 if (GET_CODE (addr2
) != PLUS
)
7220 if (GET_CODE (XEXP (addr2
, 0)) != REG
7221 || GET_CODE (XEXP (addr2
, 1)) != CONST_INT
)
7224 if (reg1
!= REGNO (XEXP (addr2
, 0)))
7227 /* The offset for the second addr must be 8 more than the first addr. */
7228 if (INTVAL (XEXP (addr2
, 1)) != offset1
+ 8)
7231 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7236 /* Return the register class of a scratch register needed to copy IN into
7237 or out of a register in CLASS in MODE. If it can be done directly,
7238 NO_REGS is returned. */
7241 secondary_reload_class (class, mode
, in
)
7242 enum reg_class
class;
7243 enum machine_mode mode ATTRIBUTE_UNUSED
;
7248 if (TARGET_ELF
|| (DEFAULT_ABI
== ABI_DARWIN
7250 && MACHOPIC_INDIRECT
7254 /* We cannot copy a symbolic operand directly into anything
7255 other than BASE_REGS for TARGET_ELF. So indicate that a
7256 register from BASE_REGS is needed as an intermediate
7259 On Darwin, pic addresses require a load from memory, which
7260 needs a base register. */
7261 if (class != BASE_REGS
7262 && (GET_CODE (in
) == SYMBOL_REF
7263 || GET_CODE (in
) == HIGH
7264 || GET_CODE (in
) == LABEL_REF
7265 || GET_CODE (in
) == CONST
))
7269 if (GET_CODE (in
) == REG
)
7272 if (regno
>= FIRST_PSEUDO_REGISTER
)
7274 regno
= true_regnum (in
);
7275 if (regno
>= FIRST_PSEUDO_REGISTER
)
7279 else if (GET_CODE (in
) == SUBREG
)
7281 regno
= true_regnum (in
);
7282 if (regno
>= FIRST_PSEUDO_REGISTER
)
7288 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7290 if (class == GENERAL_REGS
|| class == BASE_REGS
7291 || (regno
>= 0 && INT_REGNO_P (regno
)))
7294 /* Constants, memory, and FP registers can go into FP registers. */
7295 if ((regno
== -1 || FP_REGNO_P (regno
))
7296 && (class == FLOAT_REGS
|| class == NON_SPECIAL_REGS
))
7299 /* Memory, and AltiVec registers can go into AltiVec registers. */
7300 if ((regno
== -1 || ALTIVEC_REGNO_P (regno
))
7301 && class == ALTIVEC_REGS
)
7304 /* We can copy among the CR registers. */
7305 if ((class == CR_REGS
|| class == CR0_REGS
)
7306 && regno
>= 0 && CR_REGNO_P (regno
))
7309 /* Otherwise, we need GENERAL_REGS. */
7310 return GENERAL_REGS
;
7313 /* Given a comparison operation, return the bit number in CCR to test. We
7314 know this is a valid comparison.
7316 SCC_P is 1 if this is for an scc. That means that %D will have been
7317 used instead of %C, so the bits will be in different places.
7319 Return -1 if OP isn't a valid comparison for some reason. */
7326 enum rtx_code code
= GET_CODE (op
);
7327 enum machine_mode cc_mode
;
7332 if (GET_RTX_CLASS (code
) != '<')
7337 if (GET_CODE (reg
) != REG
7338 || ! CR_REGNO_P (REGNO (reg
)))
7341 cc_mode
= GET_MODE (reg
);
7342 cc_regnum
= REGNO (reg
);
7343 base_bit
= 4 * (cc_regnum
- CR0_REGNO
);
7345 validate_condition_mode (code
, cc_mode
);
7350 if (TARGET_SPE
&& TARGET_HARD_FLOAT
&& cc_mode
== CCFPmode
)
7351 return base_bit
+ 1;
7352 return scc_p
? base_bit
+ 3 : base_bit
+ 2;
7354 if (TARGET_SPE
&& TARGET_HARD_FLOAT
&& cc_mode
== CCFPmode
)
7355 return base_bit
+ 1;
7356 return base_bit
+ 2;
7357 case GT
: case GTU
: case UNLE
:
7358 return base_bit
+ 1;
7359 case LT
: case LTU
: case UNGE
:
7361 case ORDERED
: case UNORDERED
:
7362 return base_bit
+ 3;
7365 /* If scc, we will have done a cror to put the bit in the
7366 unordered position. So test that bit. For integer, this is ! LT
7367 unless this is an scc insn. */
7368 return scc_p
? base_bit
+ 3 : base_bit
;
7371 return scc_p
? base_bit
+ 3 : base_bit
+ 1;
7378 /* Return the GOT register. */
7381 rs6000_got_register (value
)
7382 rtx value ATTRIBUTE_UNUSED
;
7384 /* The second flow pass currently (June 1999) can't update
7385 regs_ever_live without disturbing other parts of the compiler, so
7386 update it here to make the prolog/epilogue code happy. */
7387 if (no_new_pseudos
&& ! regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
])
7388 regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
7390 current_function_uses_pic_offset_table
= 1;
7392 return pic_offset_table_rtx
;
7395 /* Function to init struct machine_function.
7396 This will be called, via a pointer variable,
7397 from push_function_context. */
7399 static struct machine_function
*
7400 rs6000_init_machine_status ()
7402 return ggc_alloc_cleared (sizeof (machine_function
));
7405 /* These macros test for integers and extract the low-order bits. */
7407 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7408 && GET_MODE (X) == VOIDmode)
7410 #define INT_LOWPART(X) \
7411 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7418 unsigned long val
= INT_LOWPART (op
);
7420 /* If the high bit is zero, the value is the first 1 bit we find
7422 if ((val
& 0x80000000) == 0)
7424 if ((val
& 0xffffffff) == 0)
7428 while (((val
<<= 1) & 0x80000000) == 0)
7433 /* If the high bit is set and the low bit is not, or the mask is all
7434 1's, the value is zero. */
7435 if ((val
& 1) == 0 || (val
& 0xffffffff) == 0xffffffff)
7438 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7441 while (((val
>>= 1) & 1) != 0)
7452 unsigned long val
= INT_LOWPART (op
);
7454 /* If the low bit is zero, the value is the first 1 bit we find from
7458 if ((val
& 0xffffffff) == 0)
7462 while (((val
>>= 1) & 1) == 0)
7468 /* If the low bit is set and the high bit is not, or the mask is all
7469 1's, the value is 31. */
7470 if ((val
& 0x80000000) == 0 || (val
& 0xffffffff) == 0xffffffff)
7473 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7476 while (((val
<<= 1) & 0x80000000) != 0)
7482 /* Print an operand. Recognize special options, documented below. */
7485 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7486 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7488 #define SMALL_DATA_RELOC "sda21"
7489 #define SMALL_DATA_REG 0
7493 print_operand (file
, x
, code
)
7500 unsigned HOST_WIDE_INT uval
;
7505 /* Write out an instruction after the call which may be replaced
7506 with glue code by the loader. This depends on the AIX version. */
7507 asm_fprintf (file
, RS6000_CALL_GLUE
);
7510 /* %a is output_address. */
7513 /* If X is a constant integer whose low-order 5 bits are zero,
7514 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
7515 in the AIX assembler where "sri" with a zero shift count
7516 writes a trash instruction. */
7517 if (GET_CODE (x
) == CONST_INT
&& (INTVAL (x
) & 31) == 0)
7524 /* If constant, low-order 16 bits of constant, unsigned.
7525 Otherwise, write normally. */
7527 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 0xffff);
7529 print_operand (file
, x
, 0);
7533 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7534 for 64-bit mask direction. */
7535 putc (((INT_LOWPART(x
) & 1) == 0 ? 'r' : 'l'), file
);
7538 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7542 /* There used to be a comment for 'C' reading "This is an
7543 optional cror needed for certain floating-point
7544 comparisons. Otherwise write nothing." */
7546 /* Similar, except that this is for an scc, so we must be able to
7547 encode the test in a single bit that is one. We do the above
7548 for any LE, GE, GEU, or LEU and invert the bit for NE. */
7549 if (GET_CODE (x
) == LE
|| GET_CODE (x
) == GE
7550 || GET_CODE (x
) == LEU
|| GET_CODE (x
) == GEU
)
7552 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7554 fprintf (file
, "cror %d,%d,%d\n\t", base_bit
+ 3,
7556 base_bit
+ (GET_CODE (x
) == GE
|| GET_CODE (x
) == GEU
));
7559 else if (GET_CODE (x
) == NE
)
7561 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7563 fprintf (file
, "crnor %d,%d,%d\n\t", base_bit
+ 3,
7564 base_bit
+ 2, base_bit
+ 2);
7566 else if (TARGET_SPE
&& TARGET_HARD_FLOAT
7567 && GET_CODE (x
) == EQ
7568 && GET_MODE (XEXP (x
, 0)) == CCFPmode
)
7570 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7572 fprintf (file
, "crnor %d,%d,%d\n\t", base_bit
+ 1,
7573 base_bit
+ 1, base_bit
+ 1);
7578 /* X is a CR register. Print the number of the EQ bit of the CR */
7579 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7580 output_operand_lossage ("invalid %%E value");
7582 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
) + 2);
7586 /* X is a CR register. Print the shift count needed to move it
7587 to the high-order four bits. */
7588 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7589 output_operand_lossage ("invalid %%f value");
7591 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
));
7595 /* Similar, but print the count for the rotate in the opposite
7597 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7598 output_operand_lossage ("invalid %%F value");
7600 fprintf (file
, "%d", 32 - 4 * (REGNO (x
) - CR0_REGNO
));
7604 /* X is a constant integer. If it is negative, print "m",
7605 otherwise print "z". This is to make an aze or ame insn. */
7606 if (GET_CODE (x
) != CONST_INT
)
7607 output_operand_lossage ("invalid %%G value");
7608 else if (INTVAL (x
) >= 0)
7615 /* If constant, output low-order five bits. Otherwise, write
7618 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 31);
7620 print_operand (file
, x
, 0);
7624 /* If constant, output low-order six bits. Otherwise, write
7627 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 63);
7629 print_operand (file
, x
, 0);
7633 /* Print `i' if this is a constant, else nothing. */
7639 /* Write the bit number in CCR for jump. */
7642 output_operand_lossage ("invalid %%j code");
7644 fprintf (file
, "%d", i
);
7648 /* Similar, but add one for shift count in rlinm for scc and pass
7649 scc flag to `ccr_bit'. */
7652 output_operand_lossage ("invalid %%J code");
7654 /* If we want bit 31, write a shift count of zero, not 32. */
7655 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
7659 /* X must be a constant. Write the 1's complement of the
7662 output_operand_lossage ("invalid %%k value");
7664 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ~ INT_LOWPART (x
));
7668 /* X must be a symbolic constant on ELF. Write an
7669 expression suitable for an 'addi' that adds in the low 16
7671 if (GET_CODE (x
) != CONST
)
7673 print_operand_address (file
, x
);
7678 if (GET_CODE (XEXP (x
, 0)) != PLUS
7679 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) != SYMBOL_REF
7680 && GET_CODE (XEXP (XEXP (x
, 0), 0)) != LABEL_REF
)
7681 || GET_CODE (XEXP (XEXP (x
, 0), 1)) != CONST_INT
)
7682 output_operand_lossage ("invalid %%K value");
7683 print_operand_address (file
, XEXP (XEXP (x
, 0), 0));
7685 /* For GNU as, there must be a non-alphanumeric character
7686 between 'l' and the number. The '-' is added by
7687 print_operand() already. */
7688 if (INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0)
7690 print_operand (file
, XEXP (XEXP (x
, 0), 1), 0);
7694 /* %l is output_asm_label. */
7697 /* Write second word of DImode or DFmode reference. Works on register
7698 or non-indexed memory only. */
7699 if (GET_CODE (x
) == REG
)
7700 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
7701 else if (GET_CODE (x
) == MEM
)
7703 /* Handle possible auto-increment. Since it is pre-increment and
7704 we have already done it, we can just use an offset of word. */
7705 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
7706 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
7707 output_address (plus_constant (XEXP (XEXP (x
, 0), 0),
7710 output_address (XEXP (adjust_address_nv (x
, SImode
,
7714 if (small_data_operand (x
, GET_MODE (x
)))
7715 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
7716 reg_names
[SMALL_DATA_REG
]);
7721 /* MB value for a mask operand. */
7722 if (! mask_operand (x
, SImode
))
7723 output_operand_lossage ("invalid %%m value");
7725 fprintf (file
, "%d", extract_MB (x
));
7729 /* ME value for a mask operand. */
7730 if (! mask_operand (x
, SImode
))
7731 output_operand_lossage ("invalid %%M value");
7733 fprintf (file
, "%d", extract_ME (x
));
7736 /* %n outputs the negative of its operand. */
7739 /* Write the number of elements in the vector times 4. */
7740 if (GET_CODE (x
) != PARALLEL
)
7741 output_operand_lossage ("invalid %%N value");
7743 fprintf (file
, "%d", XVECLEN (x
, 0) * 4);
7747 /* Similar, but subtract 1 first. */
7748 if (GET_CODE (x
) != PARALLEL
)
7749 output_operand_lossage ("invalid %%O value");
7751 fprintf (file
, "%d", (XVECLEN (x
, 0) - 1) * 4);
7755 /* X is a CONST_INT that is a power of two. Output the logarithm. */
7757 || INT_LOWPART (x
) < 0
7758 || (i
= exact_log2 (INT_LOWPART (x
))) < 0)
7759 output_operand_lossage ("invalid %%p value");
7761 fprintf (file
, "%d", i
);
7765 /* The operand must be an indirect memory reference. The result
7766 is the register number. */
7767 if (GET_CODE (x
) != MEM
|| GET_CODE (XEXP (x
, 0)) != REG
7768 || REGNO (XEXP (x
, 0)) >= 32)
7769 output_operand_lossage ("invalid %%P value");
7771 fprintf (file
, "%d", REGNO (XEXP (x
, 0)));
7775 /* This outputs the logical code corresponding to a boolean
7776 expression. The expression may have one or both operands
7777 negated (if one, only the first one). For condition register
7778 logical operations, it will also treat the negated
7779 CR codes as NOTs, but not handle NOTs of them. */
7781 const char *const *t
= 0;
7783 enum rtx_code code
= GET_CODE (x
);
7784 static const char * const tbl
[3][3] = {
7785 { "and", "andc", "nor" },
7786 { "or", "orc", "nand" },
7787 { "xor", "eqv", "xor" } };
7791 else if (code
== IOR
)
7793 else if (code
== XOR
)
7796 output_operand_lossage ("invalid %%q value");
7798 if (GET_CODE (XEXP (x
, 0)) != NOT
)
7802 if (GET_CODE (XEXP (x
, 1)) == NOT
)
7813 /* X is a CR register. Print the mask for `mtcrf'. */
7814 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7815 output_operand_lossage ("invalid %%R value");
7817 fprintf (file
, "%d", 128 >> (REGNO (x
) - CR0_REGNO
));
7821 /* Low 5 bits of 32 - value */
7823 output_operand_lossage ("invalid %%s value");
7825 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, (32 - INT_LOWPART (x
)) & 31);
7829 /* PowerPC64 mask position. All 0's is excluded.
7830 CONST_INT 32-bit mask is considered sign-extended so any
7831 transition must occur within the CONST_INT, not on the boundary. */
7832 if (! mask64_operand (x
, DImode
))
7833 output_operand_lossage ("invalid %%S value");
7835 uval
= INT_LOWPART (x
);
7837 if (uval
& 1) /* Clear Left */
7839 #if HOST_BITS_PER_WIDE_INT > 64
7840 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
7844 else /* Clear Right */
7847 #if HOST_BITS_PER_WIDE_INT > 64
7848 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
7856 fprintf (file
, "%d", i
);
7860 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
7861 if (GET_CODE (x
) != REG
|| GET_MODE (x
) != CCmode
)
7864 /* Bit 3 is OV bit. */
7865 i
= 4 * (REGNO (x
) - CR0_REGNO
) + 3;
7867 /* If we want bit 31, write a shift count of zero, not 32. */
7868 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
7872 /* Print the symbolic name of a branch target register. */
7873 if (GET_CODE (x
) != REG
|| (REGNO (x
) != LINK_REGISTER_REGNUM
7874 && REGNO (x
) != COUNT_REGISTER_REGNUM
))
7875 output_operand_lossage ("invalid %%T value");
7876 else if (REGNO (x
) == LINK_REGISTER_REGNUM
)
7877 fputs (TARGET_NEW_MNEMONICS
? "lr" : "r", file
);
7879 fputs ("ctr", file
);
7883 /* High-order 16 bits of constant for use in unsigned operand. */
7885 output_operand_lossage ("invalid %%u value");
7887 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
7888 (INT_LOWPART (x
) >> 16) & 0xffff);
7892 /* High-order 16 bits of constant for use in signed operand. */
7894 output_operand_lossage ("invalid %%v value");
7896 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
7897 (INT_LOWPART (x
) >> 16) & 0xffff);
7901 /* Print `u' if this has an auto-increment or auto-decrement. */
7902 if (GET_CODE (x
) == MEM
7903 && (GET_CODE (XEXP (x
, 0)) == PRE_INC
7904 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
))
7909 /* Print the trap code for this operand. */
7910 switch (GET_CODE (x
))
7913 fputs ("eq", file
); /* 4 */
7916 fputs ("ne", file
); /* 24 */
7919 fputs ("lt", file
); /* 16 */
7922 fputs ("le", file
); /* 20 */
7925 fputs ("gt", file
); /* 8 */
7928 fputs ("ge", file
); /* 12 */
7931 fputs ("llt", file
); /* 2 */
7934 fputs ("lle", file
); /* 6 */
7937 fputs ("lgt", file
); /* 1 */
7940 fputs ("lge", file
); /* 5 */
7948 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
7951 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
7952 ((INT_LOWPART (x
) & 0xffff) ^ 0x8000) - 0x8000);
7954 print_operand (file
, x
, 0);
7958 /* MB value for a PowerPC64 rldic operand. */
7959 val
= (GET_CODE (x
) == CONST_INT
7960 ? INTVAL (x
) : CONST_DOUBLE_HIGH (x
));
7965 for (i
= 0; i
< HOST_BITS_PER_WIDE_INT
; i
++)
7966 if ((val
<<= 1) < 0)
7969 #if HOST_BITS_PER_WIDE_INT == 32
7970 if (GET_CODE (x
) == CONST_INT
&& i
>= 0)
7971 i
+= 32; /* zero-extend high-part was all 0's */
7972 else if (GET_CODE (x
) == CONST_DOUBLE
&& i
== 32)
7974 val
= CONST_DOUBLE_LOW (x
);
7981 for ( ; i
< 64; i
++)
7982 if ((val
<<= 1) < 0)
7987 fprintf (file
, "%d", i
+ 1);
7991 if (GET_CODE (x
) == MEM
7992 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x
, 0), 0))
7997 /* Like 'L', for third word of TImode */
7998 if (GET_CODE (x
) == REG
)
7999 fprintf (file
, "%s", reg_names
[REGNO (x
) + 2]);
8000 else if (GET_CODE (x
) == MEM
)
8002 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
8003 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
8004 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 8));
8006 output_address (XEXP (adjust_address_nv (x
, SImode
, 8), 0));
8007 if (small_data_operand (x
, GET_MODE (x
)))
8008 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
8009 reg_names
[SMALL_DATA_REG
]);
8014 /* X is a SYMBOL_REF. Write out the name preceded by a
8015 period and without any trailing data in brackets. Used for function
8016 names. If we are configured for System V (or the embedded ABI) on
8017 the PowerPC, do not emit the period, since those systems do not use
8018 TOCs and the like. */
8019 if (GET_CODE (x
) != SYMBOL_REF
)
8022 if (XSTR (x
, 0)[0] != '.')
8024 switch (DEFAULT_ABI
)
8034 case ABI_AIX_NODESC
:
8040 RS6000_OUTPUT_BASENAME (file
, XSTR (x
, 0));
8042 assemble_name (file
, XSTR (x
, 0));
8047 /* Like 'L', for last word of TImode. */
8048 if (GET_CODE (x
) == REG
)
8049 fprintf (file
, "%s", reg_names
[REGNO (x
) + 3]);
8050 else if (GET_CODE (x
) == MEM
)
8052 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
8053 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
8054 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 12));
8056 output_address (XEXP (adjust_address_nv (x
, SImode
, 12), 0));
8057 if (small_data_operand (x
, GET_MODE (x
)))
8058 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
8059 reg_names
[SMALL_DATA_REG
]);
8063 /* Print AltiVec or SPE memory operand. */
8068 if (GET_CODE (x
) != MEM
)
8076 if (GET_CODE (tmp
) == REG
)
8078 fprintf (file
, "0(%s)", reg_names
[REGNO (tmp
)]);
8081 /* Handle [reg+UIMM]. */
8082 else if (GET_CODE (tmp
) == PLUS
&&
8083 GET_CODE (XEXP (tmp
, 1)) == CONST_INT
)
8087 if (GET_CODE (XEXP (tmp
, 0)) != REG
)
8090 x
= INTVAL (XEXP (tmp
, 1));
8091 fprintf (file
, "%d(%s)", x
, reg_names
[REGNO (XEXP (tmp
, 0))]);
8095 /* Fall through. Must be [reg+reg]. */
8097 if (GET_CODE (tmp
) == REG
)
8098 fprintf (file
, "0,%s", reg_names
[REGNO (tmp
)]);
8099 else if (GET_CODE (tmp
) == PLUS
&& GET_CODE (XEXP (tmp
, 1)) == REG
)
8101 if (REGNO (XEXP (tmp
, 0)) == 0)
8102 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 1)) ],
8103 reg_names
[ REGNO (XEXP (tmp
, 0)) ]);
8105 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 0)) ],
8106 reg_names
[ REGNO (XEXP (tmp
, 1)) ]);
8114 if (GET_CODE (x
) == REG
)
8115 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
8116 else if (GET_CODE (x
) == MEM
)
8118 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8119 know the width from the mode. */
8120 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
)
8121 fprintf (file
, "%d(%s)", GET_MODE_SIZE (GET_MODE (x
)),
8122 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
8123 else if (GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
8124 fprintf (file
, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x
)),
8125 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
8127 output_address (XEXP (x
, 0));
8130 output_addr_const (file
, x
);
8134 output_operand_lossage ("invalid %%xn code");
8138 /* Print the address of an operand. */
8141 print_operand_address (file
, x
)
8145 if (GET_CODE (x
) == REG
)
8146 fprintf (file
, "0(%s)", reg_names
[ REGNO (x
) ]);
8147 else if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
8148 || GET_CODE (x
) == LABEL_REF
)
8150 output_addr_const (file
, x
);
8151 if (small_data_operand (x
, GET_MODE (x
)))
8152 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
8153 reg_names
[SMALL_DATA_REG
]);
8154 else if (TARGET_TOC
)
8157 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == REG
)
8159 if (REGNO (XEXP (x
, 0)) == 0)
8160 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 1)) ],
8161 reg_names
[ REGNO (XEXP (x
, 0)) ]);
8163 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 0)) ],
8164 reg_names
[ REGNO (XEXP (x
, 1)) ]);
8166 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
8168 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (XEXP (x
, 1)));
8169 fprintf (file
, "(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8172 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
8173 && CONSTANT_P (XEXP (x
, 1)))
8175 output_addr_const (file
, XEXP (x
, 1));
8176 fprintf (file
, "@l(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8180 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
8181 && CONSTANT_P (XEXP (x
, 1)))
8183 fprintf (file
, "lo16(");
8184 output_addr_const (file
, XEXP (x
, 1));
8185 fprintf (file
, ")(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8188 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x
))
8190 if (TARGET_AIX
&& (!TARGET_ELF
|| !TARGET_MINIMAL_TOC
))
8192 rtx contains_minus
= XEXP (x
, 1);
8196 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8197 turn it into (sym) for output_addr_const. */
8198 while (GET_CODE (XEXP (contains_minus
, 0)) != MINUS
)
8199 contains_minus
= XEXP (contains_minus
, 0);
8201 minus
= XEXP (contains_minus
, 0);
8202 symref
= XEXP (minus
, 0);
8203 XEXP (contains_minus
, 0) = symref
;
8208 name
= XSTR (symref
, 0);
8209 newname
= alloca (strlen (name
) + sizeof ("@toc"));
8210 strcpy (newname
, name
);
8211 strcat (newname
, "@toc");
8212 XSTR (symref
, 0) = newname
;
8214 output_addr_const (file
, XEXP (x
, 1));
8216 XSTR (symref
, 0) = name
;
8217 XEXP (contains_minus
, 0) = minus
;
8220 output_addr_const (file
, XEXP (x
, 1));
8222 fprintf (file
, "(%s)", reg_names
[REGNO (XEXP (x
, 0))]);
8228 /* Target hook for assembling integer objects. The PowerPC version has
8229 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8230 is defined. It also needs to handle DI-mode objects on 64-bit
8234 rs6000_assemble_integer (x
, size
, aligned_p
)
8239 #ifdef RELOCATABLE_NEEDS_FIXUP
8240 /* Special handling for SI values. */
8241 if (size
== 4 && aligned_p
)
8243 extern int in_toc_section
PARAMS ((void));
8244 static int recurse
= 0;
8246 /* For -mrelocatable, we mark all addresses that need to be fixed up
8247 in the .fixup section. */
8248 if (TARGET_RELOCATABLE
8249 && !in_toc_section ()
8250 && !in_text_section ()
8252 && GET_CODE (x
) != CONST_INT
8253 && GET_CODE (x
) != CONST_DOUBLE
8259 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCP", fixuplabelno
);
8261 ASM_OUTPUT_LABEL (asm_out_file
, buf
);
8262 fprintf (asm_out_file
, "\t.long\t(");
8263 output_addr_const (asm_out_file
, x
);
8264 fprintf (asm_out_file
, ")@fixup\n");
8265 fprintf (asm_out_file
, "\t.section\t\".fixup\",\"aw\"\n");
8266 ASM_OUTPUT_ALIGN (asm_out_file
, 2);
8267 fprintf (asm_out_file
, "\t.long\t");
8268 assemble_name (asm_out_file
, buf
);
8269 fprintf (asm_out_file
, "\n\t.previous\n");
8273 /* Remove initial .'s to turn a -mcall-aixdesc function
8274 address into the address of the descriptor, not the function
8276 else if (GET_CODE (x
) == SYMBOL_REF
8277 && XSTR (x
, 0)[0] == '.'
8278 && DEFAULT_ABI
== ABI_AIX
)
8280 const char *name
= XSTR (x
, 0);
8281 while (*name
== '.')
8284 fprintf (asm_out_file
, "\t.long\t%s\n", name
);
8288 #endif /* RELOCATABLE_NEEDS_FIXUP */
8289 return default_assemble_integer (x
, size
, aligned_p
);
8292 #ifdef HAVE_GAS_HIDDEN
8293 /* Emit an assembler directive to set symbol visibility for DECL to
8297 rs6000_assemble_visibility (decl
, vis
)
8301 /* Functions need to have their entry point symbol visibility set as
8302 well as their descriptor symbol visibility. */
8303 if (DEFAULT_ABI
== ABI_AIX
&& TREE_CODE (decl
) == FUNCTION_DECL
)
8305 static const char * const visibility_types
[] = {
8306 NULL
, "internal", "hidden", "protected"
8309 const char *name
, *type
;
8311 name
= ((* targetm
.strip_name_encoding
)
8312 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
))));
8313 type
= visibility_types
[vis
];
8315 fprintf (asm_out_file
, "\t.%s\t%s\n", type
, name
);
8316 fprintf (asm_out_file
, "\t.%s\t.%s\n", type
, name
);
8319 default_assemble_visibility (decl
, vis
);
8324 rs6000_reverse_condition (mode
, code
)
8325 enum machine_mode mode
;
8328 /* Reversal of FP compares takes care -- an ordered compare
8329 becomes an unordered compare and vice versa. */
8330 if (mode
== CCFPmode
&& !flag_unsafe_math_optimizations
)
8331 return reverse_condition_maybe_unordered (code
);
8333 return reverse_condition (code
);
8336 /* Generate a compare for CODE. Return a brand-new rtx that
8337 represents the result of the compare. */
8340 rs6000_generate_compare (code
)
8343 enum machine_mode comp_mode
;
8346 if (rs6000_compare_fp_p
)
8347 comp_mode
= CCFPmode
;
8348 else if (code
== GTU
|| code
== LTU
8349 || code
== GEU
|| code
== LEU
)
8350 comp_mode
= CCUNSmode
;
8354 /* First, the compare. */
8355 compare_result
= gen_reg_rtx (comp_mode
);
8357 /* SPE FP compare instructions on the GPRs. Yuck! */
8358 if ((TARGET_SPE
&& TARGET_HARD_FLOAT
) && rs6000_compare_fp_p
)
8360 rtx cmp
, or1
, or2
, or_result
, compare_result2
;
8368 cmp
= flag_unsafe_math_optimizations
8369 ? gen_tstsfeq_gpr (compare_result
, rs6000_compare_op0
,
8371 : gen_cmpsfeq_gpr (compare_result
, rs6000_compare_op0
,
8372 rs6000_compare_op1
);
8380 cmp
= flag_unsafe_math_optimizations
8381 ? gen_tstsfgt_gpr (compare_result
, rs6000_compare_op0
,
8383 : gen_cmpsfgt_gpr (compare_result
, rs6000_compare_op0
,
8384 rs6000_compare_op1
);
8392 cmp
= flag_unsafe_math_optimizations
8393 ? gen_tstsflt_gpr (compare_result
, rs6000_compare_op0
,
8395 : gen_cmpsflt_gpr (compare_result
, rs6000_compare_op0
,
8396 rs6000_compare_op1
);
8402 /* Synthesize LE and GE from LT/GT || EQ. */
8403 if (code
== LE
|| code
== GE
|| code
== LEU
|| code
== GEU
)
8405 /* Synthesize GE/LE frome GT/LT || EQ. */
8411 case LE
: code
= LT
; break;
8412 case GE
: code
= GT
; break;
8413 case LEU
: code
= LT
; break;
8414 case GEU
: code
= GT
; break;
8418 or1
= gen_reg_rtx (SImode
);
8419 or2
= gen_reg_rtx (SImode
);
8420 or_result
= gen_reg_rtx (CCEQmode
);
8421 compare_result2
= gen_reg_rtx (CCFPmode
);
8424 cmp
= flag_unsafe_math_optimizations
8425 ? gen_tstsfeq_gpr (compare_result2
, rs6000_compare_op0
,
8427 : gen_cmpsfeq_gpr (compare_result2
, rs6000_compare_op0
,
8428 rs6000_compare_op1
);
8431 /* The MC8540 FP compare instructions set the CR bits
8432 differently than other PPC compare instructions. For
8433 that matter, there is no generic test instruction, but a
8434 testgt, testlt, and testeq. For a true condition, bit 2
8435 is set (x1xx) in the CR. Following the traditional CR
8441 ... bit 2 would be a GT CR alias, so later on we
8442 look in the GT bits for the branch instructions.
8443 However, we must be careful to emit correct RTL in
8444 the meantime, so optimizations don't get confused. */
8446 or1
= gen_rtx (NE
, SImode
, compare_result
, const0_rtx
);
8447 or2
= gen_rtx (NE
, SImode
, compare_result2
, const0_rtx
);
8449 /* OR them together. */
8450 cmp
= gen_rtx_SET (VOIDmode
, or_result
,
8451 gen_rtx_COMPARE (CCEQmode
,
8452 gen_rtx_IOR (SImode
, or1
, or2
),
8454 compare_result
= or_result
;
8459 /* We only care about 1 bit (x1xx), so map everything to NE to
8460 maintain rtl sanity. We'll get to the right bit (x1xx) at
8461 code output time. */
8462 if (code
== NE
|| code
== LTGT
)
8463 /* Do the inverse here because we have no cmpne
8464 instruction. We use the cmpeq instruction and expect
8465 to get a 0 instead. */
8474 emit_insn (gen_rtx_SET (VOIDmode
, compare_result
,
8475 gen_rtx_COMPARE (comp_mode
,
8477 rs6000_compare_op1
)));
8479 /* Some kinds of FP comparisons need an OR operation;
8480 except for flag_unsafe_math_optimizations we don't bother. */
8481 if (rs6000_compare_fp_p
8482 && ! flag_unsafe_math_optimizations
8483 && ! (TARGET_HARD_FLOAT
&& TARGET_SPE
)
8484 && (code
== LE
|| code
== GE
8485 || code
== UNEQ
|| code
== LTGT
8486 || code
== UNGT
|| code
== UNLT
))
8488 enum rtx_code or1
, or2
;
8489 rtx or1_rtx
, or2_rtx
, compare2_rtx
;
8490 rtx or_result
= gen_reg_rtx (CCEQmode
);
8494 case LE
: or1
= LT
; or2
= EQ
; break;
8495 case GE
: or1
= GT
; or2
= EQ
; break;
8496 case UNEQ
: or1
= UNORDERED
; or2
= EQ
; break;
8497 case LTGT
: or1
= LT
; or2
= GT
; break;
8498 case UNGT
: or1
= UNORDERED
; or2
= GT
; break;
8499 case UNLT
: or1
= UNORDERED
; or2
= LT
; break;
8502 validate_condition_mode (or1
, comp_mode
);
8503 validate_condition_mode (or2
, comp_mode
);
8504 or1_rtx
= gen_rtx (or1
, SImode
, compare_result
, const0_rtx
);
8505 or2_rtx
= gen_rtx (or2
, SImode
, compare_result
, const0_rtx
);
8506 compare2_rtx
= gen_rtx_COMPARE (CCEQmode
,
8507 gen_rtx_IOR (SImode
, or1_rtx
, or2_rtx
),
8509 emit_insn (gen_rtx_SET (VOIDmode
, or_result
, compare2_rtx
));
8511 compare_result
= or_result
;
8515 validate_condition_mode (code
, GET_MODE (compare_result
));
8517 return gen_rtx (code
, VOIDmode
, compare_result
, const0_rtx
);
8521 /* Emit the RTL for an sCOND pattern. */
8524 rs6000_emit_sCOND (code
, result
)
8529 enum machine_mode op_mode
;
8531 condition_rtx
= rs6000_generate_compare (code
);
8533 op_mode
= GET_MODE (rs6000_compare_op0
);
8534 if (op_mode
== VOIDmode
)
8535 op_mode
= GET_MODE (rs6000_compare_op1
);
8537 if (TARGET_POWERPC64
&& (op_mode
== DImode
|| rs6000_compare_fp_p
))
8539 PUT_MODE (condition_rtx
, DImode
);
8540 convert_move (result
, condition_rtx
, 0);
8544 PUT_MODE (condition_rtx
, SImode
);
8545 emit_insn (gen_rtx_SET (VOIDmode
, result
, condition_rtx
));
8549 /* Emit a branch of kind CODE to location LOC. */
8552 rs6000_emit_cbranch (code
, loc
)
8556 rtx condition_rtx
, loc_ref
;
8558 condition_rtx
= rs6000_generate_compare (code
);
8559 loc_ref
= gen_rtx_LABEL_REF (VOIDmode
, loc
);
8560 emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
,
8561 gen_rtx_IF_THEN_ELSE (VOIDmode
, condition_rtx
,
8565 /* Return the string to output a conditional branch to LABEL, which is
8566 the operand number of the label, or -1 if the branch is really a
8569 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
8570 condition code register and its mode specifies what kind of
8573 REVERSED is nonzero if we should reverse the sense of the comparison.
8575 INSN is the insn. */
8578 output_cbranch (op
, label
, reversed
, insn
)
8584 static char string
[64];
8585 enum rtx_code code
= GET_CODE (op
);
8586 rtx cc_reg
= XEXP (op
, 0);
8587 enum machine_mode mode
= GET_MODE (cc_reg
);
8588 int cc_regno
= REGNO (cc_reg
) - CR0_REGNO
;
8589 int need_longbranch
= label
!= NULL
&& get_attr_length (insn
) == 8;
8590 int really_reversed
= reversed
^ need_longbranch
;
8596 validate_condition_mode (code
, mode
);
8598 /* Work out which way this really branches. We could use
8599 reverse_condition_maybe_unordered here always but this
8600 makes the resulting assembler clearer. */
8601 if (really_reversed
)
8603 /* Reversal of FP compares takes care -- an ordered compare
8604 becomes an unordered compare and vice versa. */
8605 if (mode
== CCFPmode
)
8606 code
= reverse_condition_maybe_unordered (code
);
8608 code
= reverse_condition (code
);
8611 if ((TARGET_SPE
&& TARGET_HARD_FLOAT
) && mode
== CCFPmode
)
8613 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8616 /* Opposite of GT. */
8618 else if (code
== NE
)
8626 /* Not all of these are actually distinct opcodes, but
8627 we distinguish them for clarity of the resulting assembler. */
8629 ccode
= "ne"; break;
8631 ccode
= "eq"; break;
8633 ccode
= "ge"; break;
8634 case GT
: case GTU
: case UNGT
:
8635 ccode
= "gt"; break;
8637 ccode
= "le"; break;
8638 case LT
: case LTU
: case UNLT
:
8639 ccode
= "lt"; break;
8640 case UNORDERED
: ccode
= "un"; break;
8641 case ORDERED
: ccode
= "nu"; break;
8642 case UNGE
: ccode
= "nl"; break;
8643 case UNLE
: ccode
= "ng"; break;
8648 /* Maybe we have a guess as to how likely the branch is.
8649 The old mnemonics don't have a way to specify this information. */
8651 note
= find_reg_note (insn
, REG_BR_PROB
, NULL_RTX
);
8652 if (note
!= NULL_RTX
)
8654 /* PROB is the difference from 50%. */
8655 int prob
= INTVAL (XEXP (note
, 0)) - REG_BR_PROB_BASE
/ 2;
8656 bool always_hint
= rs6000_cpu
!= PROCESSOR_POWER4
;
8658 /* Only hint for highly probable/improbable branches on newer
8659 cpus as static prediction overrides processor dynamic
8660 prediction. For older cpus we may as well always hint, but
8661 assume not taken for branches that are very close to 50% as a
8662 mispredicted taken branch is more expensive than a
8663 mispredicted not-taken branch. */
8665 || abs (prob
) > REG_BR_PROB_BASE
/ 100 * 48)
8667 if (abs (prob
) > REG_BR_PROB_BASE
/ 20
8668 && ((prob
> 0) ^ need_longbranch
))
8676 s
+= sprintf (s
, "{b%sr|b%slr%s} ", ccode
, ccode
, pred
);
8678 s
+= sprintf (s
, "{b%s|b%s%s} ", ccode
, ccode
, pred
);
8680 /* We need to escape any '%' characters in the reg_names string.
8681 Assume they'd only be the first character... */
8682 if (reg_names
[cc_regno
+ CR0_REGNO
][0] == '%')
8684 s
+= sprintf (s
, "%s", reg_names
[cc_regno
+ CR0_REGNO
]);
8688 /* If the branch distance was too far, we may have to use an
8689 unconditional branch to go the distance. */
8690 if (need_longbranch
)
8691 s
+= sprintf (s
, ",$+8\n\tb %s", label
);
8693 s
+= sprintf (s
, ",%s", label
);
8699 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
8700 operands of the last comparison is nonzero/true, FALSE_COND if it
8701 is zero/false. Return 0 if the hardware has no such operation. */
8704 rs6000_emit_cmove (dest
, op
, true_cond
, false_cond
)
8710 enum rtx_code code
= GET_CODE (op
);
8711 rtx op0
= rs6000_compare_op0
;
8712 rtx op1
= rs6000_compare_op1
;
8714 enum machine_mode compare_mode
= GET_MODE (op0
);
8715 enum machine_mode result_mode
= GET_MODE (dest
);
8718 /* These modes should always match. */
8719 if (GET_MODE (op1
) != compare_mode
8720 /* In the isel case however, we can use a compare immediate, so
8721 op1 may be a small constant. */
8722 && (!TARGET_ISEL
|| !short_cint_operand (op1
, VOIDmode
)))
8724 if (GET_MODE (true_cond
) != result_mode
)
8726 if (GET_MODE (false_cond
) != result_mode
)
8729 /* First, work out if the hardware can do this at all, or
8730 if it's too slow... */
8731 if (! rs6000_compare_fp_p
)
8734 return rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
);
8738 /* Eliminate half of the comparisons by switching operands, this
8739 makes the remaining code simpler. */
8740 if (code
== UNLT
|| code
== UNGT
|| code
== UNORDERED
|| code
== NE
8741 || code
== LTGT
|| code
== LT
)
8743 code
= reverse_condition_maybe_unordered (code
);
8745 true_cond
= false_cond
;
8749 /* UNEQ and LTGT take four instructions for a comparison with zero,
8750 it'll probably be faster to use a branch here too. */
8754 if (GET_CODE (op1
) == CONST_DOUBLE
)
8755 REAL_VALUE_FROM_CONST_DOUBLE (c1
, op1
);
8757 /* We're going to try to implement comparisons by performing
8758 a subtract, then comparing against zero. Unfortunately,
8759 Inf - Inf is NaN which is not zero, and so if we don't
8760 know that the operand is finite and the comparison
8761 would treat EQ different to UNORDERED, we can't do it. */
8762 if (! flag_unsafe_math_optimizations
8763 && code
!= GT
&& code
!= UNGE
8764 && (GET_CODE (op1
) != CONST_DOUBLE
|| real_isinf (&c1
))
8765 /* Constructs of the form (a OP b ? a : b) are safe. */
8766 && ((! rtx_equal_p (op0
, false_cond
) && ! rtx_equal_p (op1
, false_cond
))
8767 || (! rtx_equal_p (op0
, true_cond
)
8768 && ! rtx_equal_p (op1
, true_cond
))))
8770 /* At this point we know we can use fsel. */
8772 /* Reduce the comparison to a comparison against zero. */
8773 temp
= gen_reg_rtx (compare_mode
);
8774 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8775 gen_rtx_MINUS (compare_mode
, op0
, op1
)));
8777 op1
= CONST0_RTX (compare_mode
);
8779 /* If we don't care about NaNs we can reduce some of the comparisons
8780 down to faster ones. */
8781 if (flag_unsafe_math_optimizations
)
8787 true_cond
= false_cond
;
8800 /* Now, reduce everything down to a GE. */
8807 temp
= gen_reg_rtx (compare_mode
);
8808 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
8813 temp
= gen_reg_rtx (compare_mode
);
8814 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_ABS (compare_mode
, op0
)));
8819 temp
= gen_reg_rtx (compare_mode
);
8820 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8821 gen_rtx_NEG (compare_mode
,
8822 gen_rtx_ABS (compare_mode
, op0
))));
8827 temp
= gen_reg_rtx (result_mode
);
8828 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8829 gen_rtx_IF_THEN_ELSE (result_mode
,
8830 gen_rtx_GE (VOIDmode
,
8832 true_cond
, false_cond
)));
8834 true_cond
= false_cond
;
8836 temp
= gen_reg_rtx (compare_mode
);
8837 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
8842 temp
= gen_reg_rtx (result_mode
);
8843 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8844 gen_rtx_IF_THEN_ELSE (result_mode
,
8845 gen_rtx_GE (VOIDmode
,
8847 true_cond
, false_cond
)));
8849 false_cond
= true_cond
;
8851 temp
= gen_reg_rtx (compare_mode
);
8852 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
8860 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
8861 gen_rtx_IF_THEN_ELSE (result_mode
,
8862 gen_rtx_GE (VOIDmode
,
8864 true_cond
, false_cond
)));
8868 /* Same as above, but for ints (isel). */
8871 rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
)
8877 rtx condition_rtx
, cr
;
8879 /* All isel implementations thus far are 32-bits. */
8880 if (GET_MODE (rs6000_compare_op0
) != SImode
)
8883 /* We still have to do the compare, because isel doesn't do a
8884 compare, it just looks at the CRx bits set by a previous compare
8886 condition_rtx
= rs6000_generate_compare (GET_CODE (op
));
8887 cr
= XEXP (condition_rtx
, 0);
8889 if (GET_MODE (cr
) == CCmode
)
8890 emit_insn (gen_isel_signed (dest
, condition_rtx
,
8891 true_cond
, false_cond
, cr
));
8893 emit_insn (gen_isel_unsigned (dest
, condition_rtx
,
8894 true_cond
, false_cond
, cr
));
8900 output_isel (operands
)
8905 code
= GET_CODE (operands
[1]);
8906 if (code
== GE
|| code
== GEU
|| code
== LE
|| code
== LEU
|| code
== NE
)
8908 PUT_CODE (operands
[1], reverse_condition (code
));
8909 return "isel %0,%3,%2,%j1";
8912 return "isel %0,%2,%3,%j1";
8916 rs6000_emit_minmax (dest
, code
, op0
, op1
)
8922 enum machine_mode mode
= GET_MODE (op0
);
8926 if (code
== SMAX
|| code
== SMIN
)
8931 if (code
== SMAX
|| code
== UMAX
)
8932 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
8935 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
8937 if (target
== NULL_RTX
)
8940 emit_move_insn (dest
, target
);
8943 /* This page contains routines that are used to determine what the
8944 function prologue and epilogue code will do and write them out. */
8946 /* Return the first fixed-point register that is required to be
8947 saved. 32 if none. */
8950 first_reg_to_save ()
8954 /* Find lowest numbered live register. */
8955 for (first_reg
= 13; first_reg
<= 31; first_reg
++)
8956 if (regs_ever_live
[first_reg
]
8957 && (! call_used_regs
[first_reg
]
8958 || (first_reg
== RS6000_PIC_OFFSET_TABLE_REGNUM
8959 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
8960 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
)))))
8965 && current_function_uses_pic_offset_table
8966 && first_reg
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
8967 return RS6000_PIC_OFFSET_TABLE_REGNUM
;
8973 /* Similar, for FP regs. */
8976 first_fp_reg_to_save ()
8980 /* Find lowest numbered live register. */
8981 for (first_reg
= 14 + 32; first_reg
<= 63; first_reg
++)
8982 if (regs_ever_live
[first_reg
])
8988 /* Similar, for AltiVec regs. */
8991 first_altivec_reg_to_save ()
8995 /* Stack frame remains as is unless we are in AltiVec ABI. */
8996 if (! TARGET_ALTIVEC_ABI
)
8997 return LAST_ALTIVEC_REGNO
+ 1;
8999 /* Find lowest numbered live register. */
9000 for (i
= FIRST_ALTIVEC_REGNO
+ 20; i
<= LAST_ALTIVEC_REGNO
; ++i
)
9001 if (regs_ever_live
[i
])
9007 /* Return a 32-bit mask of the AltiVec registers we need to set in
9008 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
9009 the 32-bit word is 0. */
9012 compute_vrsave_mask ()
9014 unsigned int i
, mask
= 0;
9016 /* First, find out if we use _any_ altivec registers. */
9017 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
9018 if (regs_ever_live
[i
])
9019 mask
|= ALTIVEC_REG_BIT (i
);
9024 /* Next, add all registers that are call-clobbered. We do this
9025 because post-reload register optimizers such as regrename_optimize
9026 may choose to use them. They never change the register class
9027 chosen by reload, so cannot create new uses of altivec registers
9028 if there were none before, so the early exit above is safe. */
9029 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
9030 altivec registers not saved in the mask, which might well make the
9031 adjustments below more effective in eliding the save/restore of
9032 VRSAVE in small functions. */
9033 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
9034 if (call_used_regs
[i
])
9035 mask
|= ALTIVEC_REG_BIT (i
);
9037 /* Next, remove the argument registers from the set. These must
9038 be in the VRSAVE mask set by the caller, so we don't need to add
9039 them in again. More importantly, the mask we compute here is
9040 used to generate CLOBBERs in the set_vrsave insn, and we do not
9041 wish the argument registers to die. */
9042 for (i
= cfun
->args_info
.vregno
; i
>= ALTIVEC_ARG_MIN_REG
; --i
)
9043 mask
&= ~ALTIVEC_REG_BIT (i
);
9045 /* Similarly, remove the return value from the set. */
9048 diddle_return_value (is_altivec_return_reg
, &yes
);
9050 mask
&= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN
);
9057 is_altivec_return_reg (reg
, xyes
)
9061 bool *yes
= (bool *) xyes
;
9062 if (REGNO (reg
) == ALTIVEC_ARG_RETURN
)
9067 /* Calculate the stack information for the current function. This is
9068 complicated by having two separate calling sequences, the AIX calling
9069 sequence and the V.4 calling sequence.
9071 AIX (and Darwin/Mac OS X) stack frames look like:
9073 SP----> +---------------------------------------+
9074 | back chain to caller | 0 0
9075 +---------------------------------------+
9076 | saved CR | 4 8 (8-11)
9077 +---------------------------------------+
9079 +---------------------------------------+
9080 | reserved for compilers | 12 24
9081 +---------------------------------------+
9082 | reserved for binders | 16 32
9083 +---------------------------------------+
9084 | saved TOC pointer | 20 40
9085 +---------------------------------------+
9086 | Parameter save area (P) | 24 48
9087 +---------------------------------------+
9088 | Alloca space (A) | 24+P etc.
9089 +---------------------------------------+
9090 | Local variable space (L) | 24+P+A
9091 +---------------------------------------+
9092 | Float/int conversion temporary (X) | 24+P+A+L
9093 +---------------------------------------+
9094 | Save area for AltiVec registers (W) | 24+P+A+L+X
9095 +---------------------------------------+
9096 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9097 +---------------------------------------+
9098 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9099 +---------------------------------------+
9100 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9101 +---------------------------------------+
9102 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9103 +---------------------------------------+
9104 old SP->| back chain to caller's caller |
9105 +---------------------------------------+
9107 The required alignment for AIX configurations is two words (i.e., 8
9111 V.4 stack frames look like:
9113 SP----> +---------------------------------------+
9114 | back chain to caller | 0
9115 +---------------------------------------+
9116 | caller's saved LR | 4
9117 +---------------------------------------+
9118 | Parameter save area (P) | 8
9119 +---------------------------------------+
9120 | Alloca space (A) | 8+P
9121 +---------------------------------------+
9122 | Varargs save area (V) | 8+P+A
9123 +---------------------------------------+
9124 | Local variable space (L) | 8+P+A+V
9125 +---------------------------------------+
9126 | Float/int conversion temporary (X) | 8+P+A+V+L
9127 +---------------------------------------+
9128 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9129 +---------------------------------------+
9130 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9131 +---------------------------------------+
9132 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9133 +---------------------------------------+
9134 | SPE: area for 64-bit GP registers |
9135 +---------------------------------------+
9136 | SPE alignment padding |
9137 +---------------------------------------+
9138 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9139 +---------------------------------------+
9140 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9141 +---------------------------------------+
9142 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9143 +---------------------------------------+
9144 old SP->| back chain to caller's caller |
9145 +---------------------------------------+
9147 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9148 given. (But note below and in sysv4.h that we require only 8 and
9149 may round up the size of our stack frame anyways. The historical
9150 reason is early versions of powerpc-linux which didn't properly
9151 align the stack at program startup. A happy side-effect is that
9152 -mno-eabi libraries can be used with -meabi programs.)
9154 The EABI configuration defaults to the V.4 layout, unless
9155 -mcall-aix is used, in which case the AIX layout is used. However,
9156 the stack alignment requirements may differ. If -mno-eabi is not
9157 given, the required stack alignment is 8 bytes; if -mno-eabi is
9158 given, the required alignment is 16 bytes. (But see V.4 comment
9161 #ifndef ABI_STACK_BOUNDARY
9162 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9166 rs6000_stack_info ()
9168 static rs6000_stack_t info
, zero_info
;
9169 rs6000_stack_t
*info_ptr
= &info
;
9170 int reg_size
= TARGET_POWERPC64
? 8 : 4;
9174 /* Zero all fields portably. */
9179 /* Cache value so we don't rescan instruction chain over and over. */
9180 if (cfun
->machine
->insn_chain_scanned_p
== 0)
9182 cfun
->machine
->insn_chain_scanned_p
= 1;
9183 info_ptr
->spe_64bit_regs_used
= (int) spe_func_has_64bit_regs_p ();
9187 /* Select which calling sequence. */
9188 info_ptr
->abi
= DEFAULT_ABI
;
9190 /* Calculate which registers need to be saved & save area size. */
9191 info_ptr
->first_gp_reg_save
= first_reg_to_save ();
9192 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9193 even if it currently looks like we won't. */
9194 if (((TARGET_TOC
&& TARGET_MINIMAL_TOC
)
9195 || (flag_pic
== 1 && DEFAULT_ABI
== ABI_V4
)
9196 || (flag_pic
&& DEFAULT_ABI
== ABI_DARWIN
))
9197 && info_ptr
->first_gp_reg_save
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
9198 info_ptr
->gp_size
= reg_size
* (32 - RS6000_PIC_OFFSET_TABLE_REGNUM
);
9200 info_ptr
->gp_size
= reg_size
* (32 - info_ptr
->first_gp_reg_save
);
9202 /* For the SPE, we have an additional upper 32-bits on each GPR.
9203 Ideally we should save the entire 64-bits only when the upper
9204 half is used in SIMD instructions. Since we only record
9205 registers live (not the size they are used in), this proves
9206 difficult because we'd have to traverse the instruction chain at
9207 the right time, taking reload into account. This is a real pain,
9208 so we opt to save the GPRs in 64-bits always if but one register
9209 gets used in 64-bits. Otherwise, all the registers in the frame
9210 get saved in 32-bits.
9212 So... since when we save all GPRs (except the SP) in 64-bits, the
9213 traditional GP save area will be empty. */
9214 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
9215 info_ptr
->gp_size
= 0;
9217 info_ptr
->first_fp_reg_save
= first_fp_reg_to_save ();
9218 info_ptr
->fp_size
= 8 * (64 - info_ptr
->first_fp_reg_save
);
9220 info_ptr
->first_altivec_reg_save
= first_altivec_reg_to_save ();
9221 info_ptr
->altivec_size
= 16 * (LAST_ALTIVEC_REGNO
+ 1
9222 - info_ptr
->first_altivec_reg_save
);
9224 /* Does this function call anything? */
9225 info_ptr
->calls_p
= (! current_function_is_leaf
9226 || cfun
->machine
->ra_needs_full_frame
);
9228 /* Determine if we need to save the link register. */
9229 if (rs6000_ra_ever_killed ()
9230 || (DEFAULT_ABI
== ABI_AIX
9231 && current_function_profile
9232 && !TARGET_PROFILE_KERNEL
)
9233 #ifdef TARGET_RELOCATABLE
9234 || (TARGET_RELOCATABLE
&& (get_pool_size () != 0))
9236 || (info_ptr
->first_fp_reg_save
!= 64
9237 && !FP_SAVE_INLINE (info_ptr
->first_fp_reg_save
))
9238 || info_ptr
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
9239 || (DEFAULT_ABI
== ABI_V4
&& current_function_calls_alloca
)
9240 || (DEFAULT_ABI
== ABI_DARWIN
9242 && current_function_uses_pic_offset_table
)
9243 || info_ptr
->calls_p
)
9245 info_ptr
->lr_save_p
= 1;
9246 regs_ever_live
[LINK_REGISTER_REGNUM
] = 1;
9249 /* Determine if we need to save the condition code registers. */
9250 if (regs_ever_live
[CR2_REGNO
]
9251 || regs_ever_live
[CR3_REGNO
]
9252 || regs_ever_live
[CR4_REGNO
])
9254 info_ptr
->cr_save_p
= 1;
9255 if (DEFAULT_ABI
== ABI_V4
)
9256 info_ptr
->cr_size
= reg_size
;
9259 /* If the current function calls __builtin_eh_return, then we need
9260 to allocate stack space for registers that will hold data for
9261 the exception handler. */
9262 if (current_function_calls_eh_return
)
9265 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; ++i
)
9268 /* SPE saves EH registers in 64-bits. */
9269 ehrd_size
= i
* (TARGET_SPE_ABI
9270 && info_ptr
->spe_64bit_regs_used
!= 0
9271 ? UNITS_PER_SPE_WORD
: UNITS_PER_WORD
);
9276 /* Determine various sizes. */
9277 info_ptr
->reg_size
= reg_size
;
9278 info_ptr
->fixed_size
= RS6000_SAVE_AREA
;
9279 info_ptr
->varargs_size
= RS6000_VARARGS_AREA
;
9280 info_ptr
->vars_size
= RS6000_ALIGN (get_frame_size (), 8);
9281 info_ptr
->parm_size
= RS6000_ALIGN (current_function_outgoing_args_size
,
9284 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
9285 info_ptr
->spe_gp_size
= 8 * (32 - info_ptr
->first_gp_reg_save
);
9287 info_ptr
->spe_gp_size
= 0;
9289 if (TARGET_ALTIVEC_ABI
&& TARGET_ALTIVEC_VRSAVE
)
9291 info_ptr
->vrsave_mask
= compute_vrsave_mask ();
9292 info_ptr
->vrsave_size
= info_ptr
->vrsave_mask
? 4 : 0;
9296 info_ptr
->vrsave_mask
= 0;
9297 info_ptr
->vrsave_size
= 0;
9300 /* Calculate the offsets. */
9301 switch (DEFAULT_ABI
)
9308 case ABI_AIX_NODESC
:
9310 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
9311 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
9313 if (TARGET_ALTIVEC_ABI
)
9315 info_ptr
->vrsave_save_offset
9316 = info_ptr
->gp_save_offset
- info_ptr
->vrsave_size
;
9318 /* Align stack so vector save area is on a quadword boundary. */
9319 if (info_ptr
->altivec_size
!= 0)
9320 info_ptr
->altivec_padding_size
9321 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
9323 info_ptr
->altivec_padding_size
= 0;
9325 info_ptr
->altivec_save_offset
9326 = info_ptr
->vrsave_save_offset
9327 - info_ptr
->altivec_padding_size
9328 - info_ptr
->altivec_size
;
9330 /* Adjust for AltiVec case. */
9331 info_ptr
->ehrd_offset
= info_ptr
->altivec_save_offset
- ehrd_size
;
9334 info_ptr
->ehrd_offset
= info_ptr
->gp_save_offset
- ehrd_size
;
9335 info_ptr
->cr_save_offset
= reg_size
; /* first word when 64-bit. */
9336 info_ptr
->lr_save_offset
= 2*reg_size
;
9340 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
9341 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
9342 info_ptr
->cr_save_offset
= info_ptr
->gp_save_offset
- info_ptr
->cr_size
;
9344 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
9346 /* Align stack so SPE GPR save area is aligned on a
9347 double-word boundary. */
9348 if (info_ptr
->spe_gp_size
!= 0)
9349 info_ptr
->spe_padding_size
9350 = 8 - (-info_ptr
->cr_save_offset
% 8);
9352 info_ptr
->spe_padding_size
= 0;
9354 info_ptr
->spe_gp_save_offset
9355 = info_ptr
->cr_save_offset
9356 - info_ptr
->spe_padding_size
9357 - info_ptr
->spe_gp_size
;
9359 /* Adjust for SPE case. */
9360 info_ptr
->toc_save_offset
9361 = info_ptr
->spe_gp_save_offset
- info_ptr
->toc_size
;
9363 else if (TARGET_ALTIVEC_ABI
)
9365 info_ptr
->vrsave_save_offset
9366 = info_ptr
->cr_save_offset
- info_ptr
->vrsave_size
;
9368 /* Align stack so vector save area is on a quadword boundary. */
9369 if (info_ptr
->altivec_size
!= 0)
9370 info_ptr
->altivec_padding_size
9371 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
9373 info_ptr
->altivec_padding_size
= 0;
9375 info_ptr
->altivec_save_offset
9376 = info_ptr
->vrsave_save_offset
9377 - info_ptr
->altivec_padding_size
9378 - info_ptr
->altivec_size
;
9380 /* Adjust for AltiVec case. */
9381 info_ptr
->toc_save_offset
9382 = info_ptr
->altivec_save_offset
- info_ptr
->toc_size
;
9385 info_ptr
->toc_save_offset
= info_ptr
->cr_save_offset
- info_ptr
->toc_size
;
9386 info_ptr
->ehrd_offset
= info_ptr
->toc_save_offset
- ehrd_size
;
9387 info_ptr
->lr_save_offset
= reg_size
;
9391 info_ptr
->save_size
= RS6000_ALIGN (info_ptr
->fp_size
9393 + info_ptr
->altivec_size
9394 + info_ptr
->altivec_padding_size
9395 + info_ptr
->vrsave_size
9396 + info_ptr
->spe_gp_size
9397 + info_ptr
->spe_padding_size
9401 + info_ptr
->vrsave_size
9402 + info_ptr
->toc_size
,
9403 (TARGET_ALTIVEC_ABI
|| ABI_DARWIN
)
9406 total_raw_size
= (info_ptr
->vars_size
9407 + info_ptr
->parm_size
9408 + info_ptr
->save_size
9409 + info_ptr
->varargs_size
9410 + info_ptr
->fixed_size
);
9412 info_ptr
->total_size
=
9413 RS6000_ALIGN (total_raw_size
, ABI_STACK_BOUNDARY
/ BITS_PER_UNIT
);
9415 /* Determine if we need to allocate any stack frame:
9417 For AIX we need to push the stack if a frame pointer is needed
9418 (because the stack might be dynamically adjusted), if we are
9419 debugging, if we make calls, or if the sum of fp_save, gp_save,
9420 and local variables are more than the space needed to save all
9421 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9422 + 18*8 = 288 (GPR13 reserved).
9424 For V.4 we don't have the stack cushion that AIX uses, but assume
9425 that the debugger can handle stackless frames. */
9427 if (info_ptr
->calls_p
)
9428 info_ptr
->push_p
= 1;
9430 else if (DEFAULT_ABI
== ABI_V4
)
9431 info_ptr
->push_p
= total_raw_size
> info_ptr
->fixed_size
;
9433 else if (frame_pointer_needed
)
9434 info_ptr
->push_p
= 1;
9436 else if (TARGET_XCOFF
&& write_symbols
!= NO_DEBUG
)
9437 info_ptr
->push_p
= 1;
9441 = total_raw_size
- info_ptr
->fixed_size
> (TARGET_32BIT
? 220 : 288);
9443 /* Zero offsets if we're not saving those registers. */
9444 if (info_ptr
->fp_size
== 0)
9445 info_ptr
->fp_save_offset
= 0;
9447 if (info_ptr
->gp_size
== 0)
9448 info_ptr
->gp_save_offset
= 0;
9450 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->altivec_size
== 0)
9451 info_ptr
->altivec_save_offset
= 0;
9453 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->vrsave_mask
== 0)
9454 info_ptr
->vrsave_save_offset
= 0;
9456 if (! TARGET_SPE_ABI
9457 || info_ptr
->spe_64bit_regs_used
== 0
9458 || info_ptr
->spe_gp_size
== 0)
9459 info_ptr
->spe_gp_save_offset
= 0;
9461 if (! info_ptr
->lr_save_p
)
9462 info_ptr
->lr_save_offset
= 0;
9464 if (! info_ptr
->cr_save_p
)
9465 info_ptr
->cr_save_offset
= 0;
9467 if (! info_ptr
->toc_save_p
)
9468 info_ptr
->toc_save_offset
= 0;
9473 /* Return true if the current function uses any GPRs in 64-bit SIMD
9477 spe_func_has_64bit_regs_p ()
9481 /* Functions that save and restore all the call-saved registers will
9482 need to save/restore the registers in 64-bits. */
9483 if (current_function_calls_eh_return
9484 || current_function_calls_setjmp
9485 || current_function_has_nonlocal_goto
)
9488 insns
= get_insns ();
9490 for (insn
= NEXT_INSN (insns
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
9497 if (GET_CODE (i
) == SET
9498 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i
))))
9507 debug_stack_info (info
)
9508 rs6000_stack_t
*info
;
9510 const char *abi_string
;
9513 info
= rs6000_stack_info ();
9515 fprintf (stderr
, "\nStack information for function %s:\n",
9516 ((current_function_decl
&& DECL_NAME (current_function_decl
))
9517 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl
))
9522 default: abi_string
= "Unknown"; break;
9523 case ABI_NONE
: abi_string
= "NONE"; break;
9525 case ABI_AIX_NODESC
: abi_string
= "AIX"; break;
9526 case ABI_DARWIN
: abi_string
= "Darwin"; break;
9527 case ABI_V4
: abi_string
= "V.4"; break;
9530 fprintf (stderr
, "\tABI = %5s\n", abi_string
);
9532 if (TARGET_ALTIVEC_ABI
)
9533 fprintf (stderr
, "\tALTIVEC ABI extensions enabled.\n");
9536 fprintf (stderr
, "\tSPE ABI extensions enabled.\n");
9538 if (info
->first_gp_reg_save
!= 32)
9539 fprintf (stderr
, "\tfirst_gp_reg_save = %5d\n", info
->first_gp_reg_save
);
9541 if (info
->first_fp_reg_save
!= 64)
9542 fprintf (stderr
, "\tfirst_fp_reg_save = %5d\n", info
->first_fp_reg_save
);
9544 if (info
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
)
9545 fprintf (stderr
, "\tfirst_altivec_reg_save = %5d\n",
9546 info
->first_altivec_reg_save
);
9548 if (info
->lr_save_p
)
9549 fprintf (stderr
, "\tlr_save_p = %5d\n", info
->lr_save_p
);
9551 if (info
->cr_save_p
)
9552 fprintf (stderr
, "\tcr_save_p = %5d\n", info
->cr_save_p
);
9554 if (info
->toc_save_p
)
9555 fprintf (stderr
, "\ttoc_save_p = %5d\n", info
->toc_save_p
);
9557 if (info
->vrsave_mask
)
9558 fprintf (stderr
, "\tvrsave_mask = 0x%x\n", info
->vrsave_mask
);
9561 fprintf (stderr
, "\tpush_p = %5d\n", info
->push_p
);
9564 fprintf (stderr
, "\tcalls_p = %5d\n", info
->calls_p
);
9566 if (info
->gp_save_offset
)
9567 fprintf (stderr
, "\tgp_save_offset = %5d\n", info
->gp_save_offset
);
9569 if (info
->fp_save_offset
)
9570 fprintf (stderr
, "\tfp_save_offset = %5d\n", info
->fp_save_offset
);
9572 if (info
->altivec_save_offset
)
9573 fprintf (stderr
, "\taltivec_save_offset = %5d\n",
9574 info
->altivec_save_offset
);
9576 if (info
->spe_gp_save_offset
)
9577 fprintf (stderr
, "\tspe_gp_save_offset = %5d\n",
9578 info
->spe_gp_save_offset
);
9580 if (info
->vrsave_save_offset
)
9581 fprintf (stderr
, "\tvrsave_save_offset = %5d\n",
9582 info
->vrsave_save_offset
);
9584 if (info
->lr_save_offset
)
9585 fprintf (stderr
, "\tlr_save_offset = %5d\n", info
->lr_save_offset
);
9587 if (info
->cr_save_offset
)
9588 fprintf (stderr
, "\tcr_save_offset = %5d\n", info
->cr_save_offset
);
9590 if (info
->toc_save_offset
)
9591 fprintf (stderr
, "\ttoc_save_offset = %5d\n", info
->toc_save_offset
);
9593 if (info
->varargs_save_offset
)
9594 fprintf (stderr
, "\tvarargs_save_offset = %5d\n", info
->varargs_save_offset
);
9596 if (info
->total_size
)
9597 fprintf (stderr
, "\ttotal_size = %5d\n", info
->total_size
);
9599 if (info
->varargs_size
)
9600 fprintf (stderr
, "\tvarargs_size = %5d\n", info
->varargs_size
);
9602 if (info
->vars_size
)
9603 fprintf (stderr
, "\tvars_size = %5d\n", info
->vars_size
);
9605 if (info
->parm_size
)
9606 fprintf (stderr
, "\tparm_size = %5d\n", info
->parm_size
);
9608 if (info
->fixed_size
)
9609 fprintf (stderr
, "\tfixed_size = %5d\n", info
->fixed_size
);
9612 fprintf (stderr
, "\tgp_size = %5d\n", info
->gp_size
);
9614 if (info
->spe_gp_size
)
9615 fprintf (stderr
, "\tspe_gp_size = %5d\n", info
->spe_gp_size
);
9618 fprintf (stderr
, "\tfp_size = %5d\n", info
->fp_size
);
9620 if (info
->altivec_size
)
9621 fprintf (stderr
, "\taltivec_size = %5d\n", info
->altivec_size
);
9623 if (info
->vrsave_size
)
9624 fprintf (stderr
, "\tvrsave_size = %5d\n", info
->vrsave_size
);
9626 if (info
->altivec_padding_size
)
9627 fprintf (stderr
, "\taltivec_padding_size= %5d\n",
9628 info
->altivec_padding_size
);
9630 if (info
->spe_padding_size
)
9631 fprintf (stderr
, "\tspe_padding_size = %5d\n",
9632 info
->spe_padding_size
);
9635 fprintf (stderr
, "\tlr_size = %5d\n", info
->lr_size
);
9638 fprintf (stderr
, "\tcr_size = %5d\n", info
->cr_size
);
9641 fprintf (stderr
, "\ttoc_size = %5d\n", info
->toc_size
);
9643 if (info
->save_size
)
9644 fprintf (stderr
, "\tsave_size = %5d\n", info
->save_size
);
9646 if (info
->reg_size
!= 4)
9647 fprintf (stderr
, "\treg_size = %5d\n", info
->reg_size
);
9649 fprintf (stderr
, "\n");
9653 rs6000_return_addr (count
, frame
)
9657 /* Currently we don't optimize very well between prolog and body
9658 code and for PIC code the code can be actually quite bad, so
9659 don't try to be too clever here. */
9660 if (count
!= 0 || (DEFAULT_ABI
!= ABI_AIX
&& flag_pic
))
9662 cfun
->machine
->ra_needs_full_frame
= 1;
9669 plus_constant (copy_to_reg
9670 (gen_rtx_MEM (Pmode
,
9671 memory_address (Pmode
, frame
))),
9672 RETURN_ADDRESS_OFFSET
)));
9675 return get_hard_reg_initial_val (Pmode
, LINK_REGISTER_REGNUM
);
9678 /* Say whether a function is a candidate for sibcall handling or not.
9679 We do not allow indirect calls to be optimized into sibling calls.
9680 Also, we can't do it if there are any vector parameters; there's
9681 nowhere to put the VRsave code so it works; note that functions with
9682 vector parameters are required to have a prototype, so the argument
9683 type info must be available here. (The tail recursion case can work
9684 with vector parameters, but there's no way to distinguish here.) */
9686 rs6000_function_ok_for_sibcall (decl
, exp
)
9688 tree exp ATTRIBUTE_UNUSED
;
9693 if (TARGET_ALTIVEC_VRSAVE
)
9695 for (type
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
9696 type
; type
= TREE_CHAIN (type
))
9698 if (TREE_CODE (TREE_VALUE (type
)) == VECTOR_TYPE
)
9702 if (DEFAULT_ABI
== ABI_DARWIN
9703 || (*targetm
.binds_local_p
) (decl
))
9705 tree attr_list
= TYPE_ATTRIBUTES (TREE_TYPE (decl
));
9707 if (!lookup_attribute ("longcall", attr_list
)
9708 || lookup_attribute ("shortcall", attr_list
))
9716 rs6000_ra_ever_killed ()
9722 /* Irritatingly, there are two kinds of thunks -- those created with
9723 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
9724 through the regular part of the compiler. This is a very hacky
9725 way to tell them apart. */
9726 if (current_function_is_thunk
&& !no_new_pseudos
)
9729 /* regs_ever_live has LR marked as used if any sibcalls are present,
9730 but this should not force saving and restoring in the
9731 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
9732 clobbers LR, so that is inappropriate. */
9734 /* Also, the prologue can generate a store into LR that
9735 doesn't really count, like this:
9738 bcl to set PIC register
9742 When we're called from the epilogue, we need to avoid counting
9745 push_topmost_sequence ();
9747 pop_topmost_sequence ();
9748 reg
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
9750 for (insn
= NEXT_INSN (top
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
9754 if (FIND_REG_INC_NOTE (insn
, reg
))
9756 else if (GET_CODE (insn
) == CALL_INSN
9757 && !SIBLING_CALL_P (insn
))
9759 else if (set_of (reg
, insn
) != NULL_RTX
9760 && !prologue_epilogue_contains (insn
))
9767 /* Add a REG_MAYBE_DEAD note to the insn. */
9769 rs6000_maybe_dead (insn
)
9772 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
,
9777 /* Emit instructions needed to load the TOC register.
9778 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9779 a constant pool; or for SVR4 -fpic. */
9782 rs6000_emit_load_toc_table (fromprolog
)
9786 dest
= gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
);
9788 if (TARGET_ELF
&& DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
9790 rtx temp
= (fromprolog
9791 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
9792 : gen_reg_rtx (Pmode
));
9793 insn
= emit_insn (gen_load_toc_v4_pic_si (temp
));
9795 rs6000_maybe_dead (insn
);
9796 insn
= emit_move_insn (dest
, temp
);
9798 rs6000_maybe_dead (insn
);
9800 else if (TARGET_ELF
&& DEFAULT_ABI
!= ABI_AIX
&& flag_pic
== 2)
9803 rtx tempLR
= (fromprolog
9804 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
9805 : gen_reg_rtx (Pmode
));
9806 rtx temp0
= (fromprolog
9807 ? gen_rtx_REG (Pmode
, 0)
9808 : gen_reg_rtx (Pmode
));
9811 /* possibly create the toc section */
9812 if (! toc_initialized
)
9815 function_section (current_function_decl
);
9822 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
9823 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9825 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCL", rs6000_pic_labelno
);
9826 symL
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9828 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR
,
9830 rs6000_maybe_dead (emit_move_insn (dest
, tempLR
));
9831 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0
, dest
,
9838 static int reload_toc_labelno
= 0;
9840 tocsym
= gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
);
9842 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCG", reload_toc_labelno
++);
9843 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9845 emit_insn (gen_load_toc_v4_PIC_1b (tempLR
, symF
, tocsym
));
9846 emit_move_insn (dest
, tempLR
);
9847 emit_move_insn (temp0
, gen_rtx_MEM (Pmode
, dest
));
9849 insn
= emit_insn (gen_addsi3 (dest
, temp0
, dest
));
9851 rs6000_maybe_dead (insn
);
9853 else if (TARGET_ELF
&& !TARGET_AIX
&& flag_pic
== 0 && TARGET_MINIMAL_TOC
)
9855 /* This is for AIX code running in non-PIC ELF32. */
9858 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCTOC", 1);
9859 realsym
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9861 insn
= emit_insn (gen_elf_high (dest
, realsym
));
9863 rs6000_maybe_dead (insn
);
9864 insn
= emit_insn (gen_elf_low (dest
, dest
, realsym
));
9866 rs6000_maybe_dead (insn
);
9868 else if (DEFAULT_ABI
== ABI_AIX
)
9871 insn
= emit_insn (gen_load_toc_aix_si (dest
));
9873 insn
= emit_insn (gen_load_toc_aix_di (dest
));
9875 rs6000_maybe_dead (insn
);
9882 get_TOC_alias_set ()
9884 static int set
= -1;
9886 set
= new_alias_set ();
9890 /* This retuns nonzero if the current function uses the TOC. This is
9891 determined by the presence of (unspec ... 7), which is generated by
9892 the various load_toc_* patterns. */
9899 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
9902 rtx pat
= PATTERN (insn
);
9905 if (GET_CODE (pat
) == PARALLEL
)
9906 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
9907 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, i
)) == UNSPEC
9908 && XINT (XVECEXP (PATTERN (insn
), 0, i
), 1) == 7)
9915 create_TOC_reference (symbol
)
9918 return gen_rtx_PLUS (Pmode
,
9919 gen_rtx_REG (Pmode
, TOC_REGISTER
),
9920 gen_rtx_CONST (Pmode
,
9921 gen_rtx_MINUS (Pmode
, symbol
,
9922 gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
))));
9926 /* __throw will restore its own return address to be the same as the
9927 return address of the function that the throw is being made to.
9928 This is unfortunate, because we want to check the original
9929 return address to see if we need to restore the TOC.
9930 So we have to squirrel it away here.
9931 This is used only in compiling __throw and __rethrow.
9933 Most of this code should be removed by CSE. */
9934 static rtx insn_after_throw
;
9936 /* This does the saving... */
9938 rs6000_aix_emit_builtin_unwind_init ()
9941 rtx stack_top
= gen_reg_rtx (Pmode
);
9942 rtx opcode_addr
= gen_reg_rtx (Pmode
);
9944 insn_after_throw
= gen_reg_rtx (SImode
);
9946 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
9947 emit_move_insn (stack_top
, mem
);
9949 mem
= gen_rtx_MEM (Pmode
,
9950 gen_rtx_PLUS (Pmode
, stack_top
,
9951 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
9952 emit_move_insn (opcode_addr
, mem
);
9953 emit_move_insn (insn_after_throw
, gen_rtx_MEM (SImode
, opcode_addr
));
9956 /* Emit insns to _restore_ the TOC register, at runtime (specifically
9957 in _eh.o). Only used on AIX.
9959 The idea is that on AIX, function calls look like this:
9960 bl somefunction-trampoline
9964 somefunction-trampoline:
9966 ... load function address in the count register ...
9968 or like this, if the linker determines that this is not a cross-module call
9969 and so the TOC need not be restored:
9972 or like this, if the compiler could determine that this is not a
9975 now, the tricky bit here is that register 2 is saved and restored
9976 by the _linker_, so we can't readily generate debugging information
9977 for it. So we need to go back up the call chain looking at the
9978 insns at return addresses to see which calls saved the TOC register
9979 and so see where it gets restored from.
9981 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
9982 just before the actual epilogue.
9984 On the bright side, this incurs no space or time overhead unless an
9985 exception is thrown, except for the extra code in libgcc.a.
9987 The parameter STACKSIZE is a register containing (at runtime)
9988 the amount to be popped off the stack in addition to the stack frame
9989 of this routine (which will be __throw or __rethrow, and so is
9990 guaranteed to have a stack frame). */
9993 rs6000_emit_eh_toc_restore (stacksize
)
9997 rtx bottom_of_stack
= gen_reg_rtx (Pmode
);
9998 rtx tocompare
= gen_reg_rtx (SImode
);
9999 rtx opcode
= gen_reg_rtx (SImode
);
10000 rtx opcode_addr
= gen_reg_rtx (Pmode
);
10002 rtx loop_start
= gen_label_rtx ();
10003 rtx no_toc_restore_needed
= gen_label_rtx ();
10004 rtx loop_exit
= gen_label_rtx ();
10006 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
10007 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10008 emit_move_insn (bottom_of_stack
, mem
);
10010 top_of_stack
= expand_binop (Pmode
, add_optab
,
10011 bottom_of_stack
, stacksize
,
10012 NULL_RTX
, 1, OPTAB_WIDEN
);
10014 emit_move_insn (tocompare
, gen_int_mode (TARGET_32BIT
? 0x80410014
10015 : 0xE8410028, SImode
));
10017 if (insn_after_throw
== NULL_RTX
)
10019 emit_move_insn (opcode
, insn_after_throw
);
10021 emit_note (NULL
, NOTE_INSN_LOOP_BEG
);
10022 emit_label (loop_start
);
10024 do_compare_rtx_and_jump (opcode
, tocompare
, NE
, 1,
10025 SImode
, NULL_RTX
, NULL_RTX
,
10026 no_toc_restore_needed
);
10028 mem
= gen_rtx_MEM (Pmode
,
10029 gen_rtx_PLUS (Pmode
, bottom_of_stack
,
10030 GEN_INT (5 * GET_MODE_SIZE (Pmode
))));
10031 emit_move_insn (gen_rtx_REG (Pmode
, 2), mem
);
10033 emit_label (no_toc_restore_needed
);
10034 do_compare_rtx_and_jump (top_of_stack
, bottom_of_stack
, EQ
, 1,
10035 Pmode
, NULL_RTX
, NULL_RTX
,
10038 mem
= gen_rtx_MEM (Pmode
, bottom_of_stack
);
10039 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10040 emit_move_insn (bottom_of_stack
, mem
);
10042 mem
= gen_rtx_MEM (Pmode
,
10043 gen_rtx_PLUS (Pmode
, bottom_of_stack
,
10044 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
10045 emit_move_insn (opcode_addr
, mem
);
10046 emit_move_insn (opcode
, gen_rtx_MEM (SImode
, opcode_addr
));
10048 emit_note (NULL
, NOTE_INSN_LOOP_CONT
);
10049 emit_jump (loop_start
);
10050 emit_note (NULL
, NOTE_INSN_LOOP_END
);
10051 emit_label (loop_exit
);
10053 #endif /* TARGET_AIX */
10055 /* This ties together stack memory (MEM with an alias set of
10056 rs6000_sr_alias_set) and the change to the stack pointer. */
10059 rs6000_emit_stack_tie ()
10061 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
));
10063 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10064 emit_insn (gen_stack_tie (mem
));
10067 /* Emit the correct code for allocating stack space, as insns.
10068 If COPY_R12, make sure a copy of the old frame is left in r12.
10069 The generated code may use hard register 0 as a temporary. */
10072 rs6000_emit_allocate_stack (size
, copy_r12
)
10073 HOST_WIDE_INT size
;
10077 rtx stack_reg
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
10078 rtx tmp_reg
= gen_rtx_REG (Pmode
, 0);
10079 rtx todec
= GEN_INT (-size
);
10081 if (current_function_limit_stack
)
10083 if (REG_P (stack_limit_rtx
)
10084 && REGNO (stack_limit_rtx
) > 1
10085 && REGNO (stack_limit_rtx
) <= 31)
10087 emit_insn (TARGET_32BIT
10088 ? gen_addsi3 (tmp_reg
,
10091 : gen_adddi3 (tmp_reg
,
10095 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
10098 else if (GET_CODE (stack_limit_rtx
) == SYMBOL_REF
10100 && DEFAULT_ABI
== ABI_V4
)
10102 rtx toload
= gen_rtx_CONST (VOIDmode
,
10103 gen_rtx_PLUS (Pmode
,
10107 emit_insn (gen_elf_high (tmp_reg
, toload
));
10108 emit_insn (gen_elf_low (tmp_reg
, tmp_reg
, toload
));
10109 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
10113 warning ("stack limit expression is not supported");
10116 if (copy_r12
|| ! TARGET_UPDATE
)
10117 emit_move_insn (gen_rtx_REG (Pmode
, 12), stack_reg
);
10123 /* Need a note here so that try_split doesn't get confused. */
10124 if (get_last_insn() == NULL_RTX
)
10125 emit_note (0, NOTE_INSN_DELETED
);
10126 insn
= emit_move_insn (tmp_reg
, todec
);
10127 try_split (PATTERN (insn
), insn
, 0);
10131 insn
= emit_insn (TARGET_32BIT
10132 ? gen_movsi_update (stack_reg
, stack_reg
,
10134 : gen_movdi_update (stack_reg
, stack_reg
,
10135 todec
, stack_reg
));
10139 insn
= emit_insn (TARGET_32BIT
10140 ? gen_addsi3 (stack_reg
, stack_reg
, todec
)
10141 : gen_adddi3 (stack_reg
, stack_reg
, todec
));
10142 emit_move_insn (gen_rtx_MEM (Pmode
, stack_reg
),
10143 gen_rtx_REG (Pmode
, 12));
10146 RTX_FRAME_RELATED_P (insn
) = 1;
10148 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
10149 gen_rtx_SET (VOIDmode
, stack_reg
,
10150 gen_rtx_PLUS (Pmode
, stack_reg
,
10155 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10156 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10157 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10158 deduce these equivalences by itself so it wasn't necessary to hold
10159 its hand so much. */
10162 rs6000_frame_related (insn
, reg
, val
, reg2
, rreg
)
10171 /* copy_rtx will not make unique copies of registers, so we need to
10172 ensure we don't have unwanted sharing here. */
10174 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
10177 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
10179 real
= copy_rtx (PATTERN (insn
));
10181 if (reg2
!= NULL_RTX
)
10182 real
= replace_rtx (real
, reg2
, rreg
);
10184 real
= replace_rtx (real
, reg
,
10185 gen_rtx_PLUS (Pmode
, gen_rtx_REG (Pmode
,
10186 STACK_POINTER_REGNUM
),
10189 /* We expect that 'real' is either a SET or a PARALLEL containing
10190 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10191 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10193 if (GET_CODE (real
) == SET
)
10197 temp
= simplify_rtx (SET_SRC (set
));
10199 SET_SRC (set
) = temp
;
10200 temp
= simplify_rtx (SET_DEST (set
));
10202 SET_DEST (set
) = temp
;
10203 if (GET_CODE (SET_DEST (set
)) == MEM
)
10205 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
10207 XEXP (SET_DEST (set
), 0) = temp
;
10210 else if (GET_CODE (real
) == PARALLEL
)
10213 for (i
= 0; i
< XVECLEN (real
, 0); i
++)
10214 if (GET_CODE (XVECEXP (real
, 0, i
)) == SET
)
10216 rtx set
= XVECEXP (real
, 0, i
);
10218 temp
= simplify_rtx (SET_SRC (set
));
10220 SET_SRC (set
) = temp
;
10221 temp
= simplify_rtx (SET_DEST (set
));
10223 SET_DEST (set
) = temp
;
10224 if (GET_CODE (SET_DEST (set
)) == MEM
)
10226 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
10228 XEXP (SET_DEST (set
), 0) = temp
;
10230 RTX_FRAME_RELATED_P (set
) = 1;
10237 real
= spe_synthesize_frame_save (real
);
10239 RTX_FRAME_RELATED_P (insn
) = 1;
10240 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
10245 /* Given an SPE frame note, return a PARALLEL of SETs with the
10246 original note, plus a synthetic register save. */
10249 spe_synthesize_frame_save (real
)
10252 rtx synth
, offset
, reg
, real2
;
10254 if (GET_CODE (real
) != SET
10255 || GET_MODE (SET_SRC (real
)) != V2SImode
)
10258 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
10259 frame related note. The parallel contains a set of the register
10260 being saved, and another set to a synthetic register (n+1200).
10261 This is so we can differentiate between 64-bit and 32-bit saves.
10262 Words cannot describe this nastiness. */
10264 if (GET_CODE (SET_DEST (real
)) != MEM
10265 || GET_CODE (XEXP (SET_DEST (real
), 0)) != PLUS
10266 || GET_CODE (SET_SRC (real
)) != REG
)
10270 (set (mem (plus (reg x) (const y)))
10273 (set (mem (plus (reg x) (const y+4)))
10277 real2
= copy_rtx (real
);
10278 PUT_MODE (SET_DEST (real2
), SImode
);
10279 reg
= SET_SRC (real2
);
10280 real2
= replace_rtx (real2
, reg
, gen_rtx_REG (SImode
, REGNO (reg
)));
10281 synth
= copy_rtx (real2
);
10283 if (BYTES_BIG_ENDIAN
)
10285 offset
= XEXP (XEXP (SET_DEST (real2
), 0), 1);
10286 real2
= replace_rtx (real2
, offset
, GEN_INT (INTVAL (offset
) + 4));
10289 reg
= SET_SRC (synth
);
10291 synth
= replace_rtx (synth
, reg
,
10292 gen_rtx_REG (SImode
, REGNO (reg
) + 1200));
10294 offset
= XEXP (XEXP (SET_DEST (synth
), 0), 1);
10295 synth
= replace_rtx (synth
, offset
,
10296 GEN_INT (INTVAL (offset
)
10297 + (BYTES_BIG_ENDIAN
? 0 : 4)));
10299 RTX_FRAME_RELATED_P (synth
) = 1;
10300 RTX_FRAME_RELATED_P (real2
) = 1;
10301 if (BYTES_BIG_ENDIAN
)
10302 real
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, synth
, real2
));
10304 real
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, real2
, synth
));
10309 /* Returns an insn that has a vrsave set operation with the
10310 appropriate CLOBBERs. */
10313 generate_set_vrsave (reg
, info
, epiloguep
)
10315 rs6000_stack_t
*info
;
10319 rtx insn
, clobs
[TOTAL_ALTIVEC_REGS
+ 1];
10320 rtx vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
10323 = gen_rtx_SET (VOIDmode
,
10325 gen_rtx_UNSPEC_VOLATILE (SImode
,
10326 gen_rtvec (2, reg
, vrsave
),
10331 /* We need to clobber the registers in the mask so the scheduler
10332 does not move sets to VRSAVE before sets of AltiVec registers.
10334 However, if the function receives nonlocal gotos, reload will set
10335 all call saved registers live. We will end up with:
10337 (set (reg 999) (mem))
10338 (parallel [ (set (reg vrsave) (unspec blah))
10339 (clobber (reg 999))])
10341 The clobber will cause the store into reg 999 to be dead, and
10342 flow will attempt to delete an epilogue insn. In this case, we
10343 need an unspec use/set of the register. */
10345 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10346 if (info
->vrsave_mask
!= 0 && ALTIVEC_REG_BIT (i
) != 0)
10348 if (!epiloguep
|| call_used_regs
[i
])
10349 clobs
[nclobs
++] = gen_rtx_CLOBBER (VOIDmode
,
10350 gen_rtx_REG (V4SImode
, i
));
10353 rtx reg
= gen_rtx_REG (V4SImode
, i
);
10356 = gen_rtx_SET (VOIDmode
,
10358 gen_rtx_UNSPEC (V4SImode
,
10359 gen_rtvec (1, reg
), 27));
10363 insn
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nclobs
));
10365 for (i
= 0; i
< nclobs
; ++i
)
10366 XVECEXP (insn
, 0, i
) = clobs
[i
];
10371 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10372 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
10375 emit_frame_save (frame_reg
, frame_ptr
, mode
, regno
, offset
, total_size
)
10378 enum machine_mode mode
;
10379 unsigned int regno
;
10383 rtx reg
, offset_rtx
, insn
, mem
, addr
, int_rtx
;
10384 rtx replacea
, replaceb
;
10386 int_rtx
= GEN_INT (offset
);
10388 /* Some cases that need register indexed addressing. */
10389 if ((TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
10391 && SPE_VECTOR_MODE (mode
)
10392 && !SPE_CONST_OFFSET_OK (offset
)))
10394 /* Whomever calls us must make sure r11 is available in the
10395 flow path of instructions in the prologue. */
10396 offset_rtx
= gen_rtx_REG (Pmode
, 11);
10397 emit_move_insn (offset_rtx
, int_rtx
);
10399 replacea
= offset_rtx
;
10400 replaceb
= int_rtx
;
10404 offset_rtx
= int_rtx
;
10405 replacea
= NULL_RTX
;
10406 replaceb
= NULL_RTX
;
10409 reg
= gen_rtx_REG (mode
, regno
);
10410 addr
= gen_rtx_PLUS (Pmode
, frame_reg
, offset_rtx
);
10411 mem
= gen_rtx_MEM (mode
, addr
);
10412 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10414 insn
= emit_move_insn (mem
, reg
);
10416 rs6000_frame_related (insn
, frame_ptr
, total_size
, replacea
, replaceb
);
10419 /* Emit an offset memory reference suitable for a frame store, while
10420 converting to a valid addressing mode. */
10423 gen_frame_mem_offset (mode
, reg
, offset
)
10424 enum machine_mode mode
;
10428 rtx int_rtx
, offset_rtx
;
10430 int_rtx
= GEN_INT (offset
);
10432 if (TARGET_SPE_ABI
&& SPE_VECTOR_MODE (mode
))
10434 offset_rtx
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
10435 emit_move_insn (offset_rtx
, int_rtx
);
10438 offset_rtx
= int_rtx
;
10440 return gen_rtx_MEM (mode
, gen_rtx_PLUS (Pmode
, reg
, offset_rtx
));
10443 /* Emit function prologue as insns. */
10446 rs6000_emit_prologue ()
10448 rs6000_stack_t
*info
= rs6000_stack_info ();
10449 enum machine_mode reg_mode
= TARGET_POWERPC64
? DImode
: SImode
;
10450 int reg_size
= TARGET_POWERPC64
? 8 : 4;
10451 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
10452 rtx frame_ptr_rtx
= gen_rtx_REG (Pmode
, 12);
10453 rtx frame_reg_rtx
= sp_reg_rtx
;
10454 rtx cr_save_rtx
= NULL
;
10456 int saving_FPRs_inline
;
10457 int using_store_multiple
;
10458 HOST_WIDE_INT sp_offset
= 0;
10460 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
10462 reg_mode
= V2SImode
;
10466 using_store_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
10467 && (!TARGET_SPE_ABI
10468 || info
->spe_64bit_regs_used
== 0)
10469 && info
->first_gp_reg_save
< 31);
10470 saving_FPRs_inline
= (info
->first_fp_reg_save
== 64
10471 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
10473 /* For V.4, update stack before we do any saving and set back pointer. */
10474 if (info
->push_p
&& DEFAULT_ABI
== ABI_V4
)
10476 if (info
->total_size
< 32767)
10477 sp_offset
= info
->total_size
;
10479 frame_reg_rtx
= frame_ptr_rtx
;
10480 rs6000_emit_allocate_stack (info
->total_size
,
10481 (frame_reg_rtx
!= sp_reg_rtx
10482 && (info
->cr_save_p
10484 || info
->first_fp_reg_save
< 64
10485 || info
->first_gp_reg_save
< 32
10487 if (frame_reg_rtx
!= sp_reg_rtx
)
10488 rs6000_emit_stack_tie ();
10491 /* Save AltiVec registers if needed. */
10492 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
10496 /* There should be a non inline version of this, for when we
10497 are saving lots of vector registers. */
10498 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10499 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
10501 rtx areg
, savereg
, mem
;
10504 offset
= info
->altivec_save_offset
+ sp_offset
10505 + 16 * (i
- info
->first_altivec_reg_save
);
10507 savereg
= gen_rtx_REG (V4SImode
, i
);
10509 areg
= gen_rtx_REG (Pmode
, 0);
10510 emit_move_insn (areg
, GEN_INT (offset
));
10512 /* AltiVec addressing mode is [reg+reg]. */
10513 mem
= gen_rtx_MEM (V4SImode
,
10514 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
));
10516 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10518 insn
= emit_move_insn (mem
, savereg
);
10520 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10521 areg
, GEN_INT (offset
));
10525 /* VRSAVE is a bit vector representing which AltiVec registers
10526 are used. The OS uses this to determine which vector
10527 registers to save on a context switch. We need to save
10528 VRSAVE on the stack frame, add whatever AltiVec registers we
10529 used in this function, and do the corresponding magic in the
10532 if (TARGET_ALTIVEC
&& info
->vrsave_mask
!= 0)
10534 rtx reg
, mem
, vrsave
;
10537 /* Get VRSAVE onto a GPR. */
10538 reg
= gen_rtx_REG (SImode
, 12);
10539 vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
10541 emit_insn (gen_get_vrsave_internal (reg
));
10543 emit_insn (gen_rtx_SET (VOIDmode
, reg
, vrsave
));
10546 offset
= info
->vrsave_save_offset
+ sp_offset
;
10548 = gen_rtx_MEM (SImode
,
10549 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, GEN_INT (offset
)));
10550 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10551 insn
= emit_move_insn (mem
, reg
);
10553 /* Include the registers in the mask. */
10554 emit_insn (gen_iorsi3 (reg
, reg
, GEN_INT ((int) info
->vrsave_mask
)));
10556 insn
= emit_insn (generate_set_vrsave (reg
, info
, 0));
10559 /* If we use the link register, get it into r0. */
10560 if (info
->lr_save_p
)
10561 emit_move_insn (gen_rtx_REG (Pmode
, 0),
10562 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10564 /* If we need to save CR, put it into r12. */
10565 if (info
->cr_save_p
&& frame_reg_rtx
!= frame_ptr_rtx
)
10567 cr_save_rtx
= gen_rtx_REG (SImode
, 12);
10568 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
10571 /* Do any required saving of fpr's. If only one or two to save, do
10572 it ourselves. Otherwise, call function. */
10573 if (saving_FPRs_inline
)
10576 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10577 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
10578 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
10579 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, DFmode
,
10580 info
->first_fp_reg_save
+ i
,
10581 info
->fp_save_offset
+ sp_offset
+ 8 * i
,
10584 else if (info
->first_fp_reg_save
!= 64)
10588 const char *alloc_rname
;
10590 p
= rtvec_alloc (2 + 64 - info
->first_fp_reg_save
);
10592 RTVEC_ELT (p
, 0) = gen_rtx_CLOBBER (VOIDmode
,
10593 gen_rtx_REG (Pmode
,
10594 LINK_REGISTER_REGNUM
));
10595 sprintf (rname
, "%s%d%s", SAVE_FP_PREFIX
,
10596 info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
);
10597 alloc_rname
= ggc_strdup (rname
);
10598 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
10599 gen_rtx_SYMBOL_REF (Pmode
,
10601 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10603 rtx addr
, reg
, mem
;
10604 reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
10605 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10606 GEN_INT (info
->fp_save_offset
10607 + sp_offset
+ 8*i
));
10608 mem
= gen_rtx_MEM (DFmode
, addr
);
10609 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10611 RTVEC_ELT (p
, i
+ 2) = gen_rtx_SET (VOIDmode
, mem
, reg
);
10613 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10614 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10615 NULL_RTX
, NULL_RTX
);
10618 /* Save GPRs. This is done as a PARALLEL if we are using
10619 the store-multiple instructions. */
10620 if (using_store_multiple
)
10624 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
10625 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10627 rtx addr
, reg
, mem
;
10628 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
10629 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10630 GEN_INT (info
->gp_save_offset
10633 mem
= gen_rtx_MEM (reg_mode
, addr
);
10634 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10636 RTVEC_ELT (p
, i
) = gen_rtx_SET (VOIDmode
, mem
, reg
);
10638 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10639 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10640 NULL_RTX
, NULL_RTX
);
10645 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10646 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
10647 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
10648 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
10649 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
10650 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
10652 rtx addr
, reg
, mem
;
10653 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
10655 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
10657 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
10660 if (!SPE_CONST_OFFSET_OK (offset
))
10662 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
10663 emit_move_insn (b
, GEN_INT (offset
));
10666 b
= GEN_INT (offset
);
10668 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
10669 mem
= gen_rtx_MEM (V2SImode
, addr
);
10670 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10671 insn
= emit_move_insn (mem
, reg
);
10673 if (GET_CODE (b
) == CONST_INT
)
10674 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10675 NULL_RTX
, NULL_RTX
);
10677 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10678 b
, GEN_INT (offset
));
10682 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10683 GEN_INT (info
->gp_save_offset
10686 mem
= gen_rtx_MEM (reg_mode
, addr
);
10687 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10689 insn
= emit_move_insn (mem
, reg
);
10690 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10691 NULL_RTX
, NULL_RTX
);
10696 /* ??? There's no need to emit actual instructions here, but it's the
10697 easiest way to get the frame unwind information emitted. */
10698 if (current_function_calls_eh_return
)
10700 unsigned int i
, regno
;
10704 regno
= EH_RETURN_DATA_REGNO (i
);
10705 if (regno
== INVALID_REGNUM
)
10708 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, reg_mode
, regno
,
10709 info
->ehrd_offset
+ sp_offset
10710 + reg_size
* (int) i
,
10715 /* Save lr if we used it. */
10716 if (info
->lr_save_p
)
10718 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10719 GEN_INT (info
->lr_save_offset
+ sp_offset
));
10720 rtx reg
= gen_rtx_REG (Pmode
, 0);
10721 rtx mem
= gen_rtx_MEM (Pmode
, addr
);
10722 /* This should not be of rs6000_sr_alias_set, because of
10723 __builtin_return_address. */
10725 insn
= emit_move_insn (mem
, reg
);
10726 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10727 reg
, gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10730 /* Save CR if we use any that must be preserved. */
10731 if (info
->cr_save_p
)
10733 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10734 GEN_INT (info
->cr_save_offset
+ sp_offset
));
10735 rtx mem
= gen_rtx_MEM (SImode
, addr
);
10737 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10739 /* If r12 was used to hold the original sp, copy cr into r0 now
10741 if (REGNO (frame_reg_rtx
) == 12)
10743 cr_save_rtx
= gen_rtx_REG (SImode
, 0);
10744 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
10746 insn
= emit_move_insn (mem
, cr_save_rtx
);
10748 /* Now, there's no way that dwarf2out_frame_debug_expr is going
10749 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
10750 OK. All we have to do is specify that _one_ condition code
10751 register is saved in this stack slot. The thrower's epilogue
10752 will then restore all the call-saved registers.
10753 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
10754 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10755 cr_save_rtx
, gen_rtx_REG (SImode
, CR2_REGNO
));
10758 /* Update stack and set back pointer unless this is V.4,
10759 for which it was done previously. */
10760 if (info
->push_p
&& DEFAULT_ABI
!= ABI_V4
)
10761 rs6000_emit_allocate_stack (info
->total_size
, FALSE
);
10763 /* Set frame pointer, if needed. */
10764 if (frame_pointer_needed
)
10766 insn
= emit_move_insn (gen_rtx_REG (Pmode
, FRAME_POINTER_REGNUM
),
10768 RTX_FRAME_RELATED_P (insn
) = 1;
10771 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
10772 if ((TARGET_TOC
&& TARGET_MINIMAL_TOC
&& get_pool_size () != 0)
10773 || (DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1
10774 && regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
]))
10776 /* If emit_load_toc_table will use the link register, we need to save
10777 it. We use R11 for this purpose because emit_load_toc_table
10778 can use register 0. This allows us to use a plain 'blr' to return
10779 from the procedure more often. */
10780 int save_LR_around_toc_setup
= (TARGET_ELF
10781 && DEFAULT_ABI
!= ABI_AIX
10783 && ! info
->lr_save_p
10784 && EXIT_BLOCK_PTR
->pred
!= NULL
);
10785 if (save_LR_around_toc_setup
)
10786 emit_move_insn (gen_rtx_REG (Pmode
, 11),
10787 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10789 rs6000_emit_load_toc_table (TRUE
);
10791 if (save_LR_around_toc_setup
)
10792 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
10793 gen_rtx_REG (Pmode
, 11));
10797 if (DEFAULT_ABI
== ABI_DARWIN
10798 && flag_pic
&& current_function_uses_pic_offset_table
)
10800 rtx dest
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
10801 const char *picbase
= machopic_function_base_name ();
10802 rtx src
= gen_rtx_SYMBOL_REF (Pmode
, ggc_alloc_string (picbase
, -1));
10804 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest
, src
)));
10806 rs6000_maybe_dead (
10807 emit_move_insn (gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
),
10808 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)));
10813 /* Write function prologue. */
10816 rs6000_output_function_prologue (file
, size
)
10818 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
10820 rs6000_stack_t
*info
= rs6000_stack_info ();
10822 if (TARGET_DEBUG_STACK
)
10823 debug_stack_info (info
);
10825 /* Write .extern for any function we will call to save and restore
10827 if (info
->first_fp_reg_save
< 64
10828 && !FP_SAVE_INLINE (info
->first_fp_reg_save
))
10829 fprintf (file
, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
10830 SAVE_FP_PREFIX
, info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
,
10831 RESTORE_FP_PREFIX
, info
->first_fp_reg_save
- 32,
10832 RESTORE_FP_SUFFIX
);
10834 /* Write .extern for AIX common mode routines, if needed. */
10835 if (! TARGET_POWER
&& ! TARGET_POWERPC
&& ! common_mode_defined
)
10837 fputs ("\t.extern __mulh\n", file
);
10838 fputs ("\t.extern __mull\n", file
);
10839 fputs ("\t.extern __divss\n", file
);
10840 fputs ("\t.extern __divus\n", file
);
10841 fputs ("\t.extern __quoss\n", file
);
10842 fputs ("\t.extern __quous\n", file
);
10843 common_mode_defined
= 1;
10846 if (! HAVE_prologue
)
10850 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
10851 the "toplevel" insn chain. */
10852 emit_note (0, NOTE_INSN_DELETED
);
10853 rs6000_emit_prologue ();
10854 emit_note (0, NOTE_INSN_DELETED
);
10856 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10860 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
10862 INSN_ADDRESSES_NEW (insn
, addr
);
10867 if (TARGET_DEBUG_STACK
)
10868 debug_rtx_list (get_insns (), 100);
10869 final (get_insns (), file
, FALSE
, FALSE
);
10873 rs6000_pic_labelno
++;
10876 /* Emit function epilogue as insns.
10878 At present, dwarf2out_frame_debug_expr doesn't understand
10879 register restores, so we don't bother setting RTX_FRAME_RELATED_P
10880 anywhere in the epilogue. Most of the insns below would in any case
10881 need special notes to explain where r11 is in relation to the stack. */
10884 rs6000_emit_epilogue (sibcall
)
10887 rs6000_stack_t
*info
;
10888 int restoring_FPRs_inline
;
10889 int using_load_multiple
;
10890 int using_mfcr_multiple
;
10891 int use_backchain_to_restore_sp
;
10893 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, 1);
10894 rtx frame_reg_rtx
= sp_reg_rtx
;
10895 enum machine_mode reg_mode
= TARGET_POWERPC64
? DImode
: SImode
;
10896 int reg_size
= TARGET_POWERPC64
? 8 : 4;
10899 info
= rs6000_stack_info ();
10901 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
10903 reg_mode
= V2SImode
;
10907 using_load_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
10908 && (!TARGET_SPE_ABI
10909 || info
->spe_64bit_regs_used
== 0)
10910 && info
->first_gp_reg_save
< 31);
10911 restoring_FPRs_inline
= (sibcall
10912 || current_function_calls_eh_return
10913 || info
->first_fp_reg_save
== 64
10914 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
10915 use_backchain_to_restore_sp
= (frame_pointer_needed
10916 || current_function_calls_alloca
10917 || info
->total_size
> 32767);
10918 using_mfcr_multiple
= (rs6000_cpu
== PROCESSOR_PPC601
10919 || rs6000_cpu
== PROCESSOR_PPC603
10920 || rs6000_cpu
== PROCESSOR_PPC750
10923 /* If we have a frame pointer, a call to alloca, or a large stack
10924 frame, restore the old stack pointer using the backchain. Otherwise,
10925 we know what size to update it with. */
10926 if (use_backchain_to_restore_sp
)
10928 /* Under V.4, don't reset the stack pointer until after we're done
10929 loading the saved registers. */
10930 if (DEFAULT_ABI
== ABI_V4
)
10931 frame_reg_rtx
= gen_rtx_REG (Pmode
, 11);
10933 emit_move_insn (frame_reg_rtx
,
10934 gen_rtx_MEM (Pmode
, sp_reg_rtx
));
10937 else if (info
->push_p
)
10939 if (DEFAULT_ABI
== ABI_V4
)
10940 sp_offset
= info
->total_size
;
10943 emit_insn (TARGET_32BIT
10944 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
10945 GEN_INT (info
->total_size
))
10946 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
10947 GEN_INT (info
->total_size
)));
10951 /* Restore AltiVec registers if needed. */
10952 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
10956 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10957 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
10959 rtx addr
, areg
, mem
;
10961 areg
= gen_rtx_REG (Pmode
, 0);
10963 (areg
, GEN_INT (info
->altivec_save_offset
10965 + 16 * (i
- info
->first_altivec_reg_save
)));
10967 /* AltiVec addressing mode is [reg+reg]. */
10968 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
);
10969 mem
= gen_rtx_MEM (V4SImode
, addr
);
10970 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10972 emit_move_insn (gen_rtx_REG (V4SImode
, i
), mem
);
10976 /* Restore VRSAVE if needed. */
10977 if (TARGET_ALTIVEC_ABI
&& info
->vrsave_mask
!= 0)
10979 rtx addr
, mem
, reg
;
10981 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10982 GEN_INT (info
->vrsave_save_offset
+ sp_offset
));
10983 mem
= gen_rtx_MEM (SImode
, addr
);
10984 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10985 reg
= gen_rtx_REG (SImode
, 12);
10986 emit_move_insn (reg
, mem
);
10988 emit_insn (generate_set_vrsave (reg
, info
, 1));
10991 /* Get the old lr if we saved it. */
10992 if (info
->lr_save_p
)
10994 rtx mem
= gen_frame_mem_offset (Pmode
, frame_reg_rtx
,
10995 info
->lr_save_offset
+ sp_offset
);
10997 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10999 emit_move_insn (gen_rtx_REG (Pmode
, 0), mem
);
11002 /* Get the old cr if we saved it. */
11003 if (info
->cr_save_p
)
11005 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11006 GEN_INT (info
->cr_save_offset
+ sp_offset
));
11007 rtx mem
= gen_rtx_MEM (SImode
, addr
);
11009 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11011 emit_move_insn (gen_rtx_REG (SImode
, 12), mem
);
11014 /* Set LR here to try to overlap restores below. */
11015 if (info
->lr_save_p
)
11016 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
11017 gen_rtx_REG (Pmode
, 0));
11019 /* Load exception handler data registers, if needed. */
11020 if (current_function_calls_eh_return
)
11022 unsigned int i
, regno
;
11028 regno
= EH_RETURN_DATA_REGNO (i
);
11029 if (regno
== INVALID_REGNUM
)
11032 mem
= gen_frame_mem_offset (reg_mode
, frame_reg_rtx
,
11033 info
->ehrd_offset
+ sp_offset
11034 + reg_size
* (int) i
);
11035 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11037 emit_move_insn (gen_rtx_REG (reg_mode
, regno
), mem
);
11041 /* Restore GPRs. This is done as a PARALLEL if we are using
11042 the load-multiple instructions. */
11043 if (using_load_multiple
)
11046 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
11047 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
11049 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11050 GEN_INT (info
->gp_save_offset
11053 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
11055 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11058 gen_rtx_SET (VOIDmode
,
11059 gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
),
11062 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11065 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
11066 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
11067 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
11068 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
11069 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
11070 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
11072 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11073 GEN_INT (info
->gp_save_offset
11076 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
11078 /* Restore 64-bit quantities for SPE. */
11079 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
11081 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
11084 if (!SPE_CONST_OFFSET_OK (offset
))
11086 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
11087 emit_move_insn (b
, GEN_INT (offset
));
11090 b
= GEN_INT (offset
);
11092 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
11093 mem
= gen_rtx_MEM (V2SImode
, addr
);
11096 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11098 emit_move_insn (gen_rtx_REG (reg_mode
,
11099 info
->first_gp_reg_save
+ i
), mem
);
11102 /* Restore fpr's if we need to do it without calling a function. */
11103 if (restoring_FPRs_inline
)
11104 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
11105 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
11106 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
11109 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11110 GEN_INT (info
->fp_save_offset
11113 mem
= gen_rtx_MEM (DFmode
, addr
);
11114 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11116 emit_move_insn (gen_rtx_REG (DFmode
,
11117 info
->first_fp_reg_save
+ i
),
11121 /* If we saved cr, restore it here. Just those that were used. */
11122 if (info
->cr_save_p
)
11124 rtx r12_rtx
= gen_rtx_REG (SImode
, 12);
11127 if (using_mfcr_multiple
)
11129 for (i
= 0; i
< 8; i
++)
11130 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
11136 if (using_mfcr_multiple
&& count
> 1)
11141 p
= rtvec_alloc (count
);
11144 for (i
= 0; i
< 8; i
++)
11145 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
11147 rtvec r
= rtvec_alloc (2);
11148 RTVEC_ELT (r
, 0) = r12_rtx
;
11149 RTVEC_ELT (r
, 1) = GEN_INT (1 << (7-i
));
11150 RTVEC_ELT (p
, ndx
) =
11151 gen_rtx_SET (VOIDmode
, gen_rtx_REG (CCmode
, CR0_REGNO
+i
),
11152 gen_rtx_UNSPEC (CCmode
, r
, 20));
11155 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11160 for (i
= 0; i
< 8; i
++)
11161 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
11163 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode
,
11169 /* If this is V.4, unwind the stack pointer after all of the loads
11170 have been done. We need to emit a block here so that sched
11171 doesn't decide to move the sp change before the register restores
11172 (which may not have any obvious dependency on the stack). This
11173 doesn't hurt performance, because there is no scheduling that can
11174 be done after this point. */
11175 if (DEFAULT_ABI
== ABI_V4
)
11177 if (frame_reg_rtx
!= sp_reg_rtx
)
11178 rs6000_emit_stack_tie ();
11180 if (use_backchain_to_restore_sp
)
11182 emit_move_insn (sp_reg_rtx
, frame_reg_rtx
);
11184 else if (sp_offset
!= 0)
11186 emit_insn (TARGET_32BIT
11187 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
11188 GEN_INT (sp_offset
))
11189 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
11190 GEN_INT (sp_offset
)));
11194 if (current_function_calls_eh_return
)
11196 rtx sa
= EH_RETURN_STACKADJ_RTX
;
11197 emit_insn (TARGET_32BIT
11198 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
, sa
)
11199 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
, sa
));
11205 if (! restoring_FPRs_inline
)
11206 p
= rtvec_alloc (3 + 64 - info
->first_fp_reg_save
);
11208 p
= rtvec_alloc (2);
11210 RTVEC_ELT (p
, 0) = gen_rtx_RETURN (VOIDmode
);
11211 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
11212 gen_rtx_REG (Pmode
,
11213 LINK_REGISTER_REGNUM
));
11215 /* If we have to restore more than two FP registers, branch to the
11216 restore function. It will return to our caller. */
11217 if (! restoring_FPRs_inline
)
11221 const char *alloc_rname
;
11223 sprintf (rname
, "%s%d%s", RESTORE_FP_PREFIX
,
11224 info
->first_fp_reg_save
- 32, RESTORE_FP_SUFFIX
);
11225 alloc_rname
= ggc_strdup (rname
);
11226 RTVEC_ELT (p
, 2) = gen_rtx_USE (VOIDmode
,
11227 gen_rtx_SYMBOL_REF (Pmode
,
11230 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
11233 addr
= gen_rtx_PLUS (Pmode
, sp_reg_rtx
,
11234 GEN_INT (info
->fp_save_offset
+ 8*i
));
11235 mem
= gen_rtx_MEM (DFmode
, addr
);
11236 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11238 RTVEC_ELT (p
, i
+3) =
11239 gen_rtx_SET (VOIDmode
,
11240 gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
),
11245 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11249 /* Write function epilogue. */
11252 rs6000_output_function_epilogue (file
, size
)
11254 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
11256 rs6000_stack_t
*info
= rs6000_stack_info ();
11258 if (! HAVE_epilogue
)
11260 rtx insn
= get_last_insn ();
11261 /* If the last insn was a BARRIER, we don't have to write anything except
11262 the trace table. */
11263 if (GET_CODE (insn
) == NOTE
)
11264 insn
= prev_nonnote_insn (insn
);
11265 if (insn
== 0 || GET_CODE (insn
) != BARRIER
)
11267 /* This is slightly ugly, but at least we don't have two
11268 copies of the epilogue-emitting code. */
11271 /* A NOTE_INSN_DELETED is supposed to be at the start
11272 and end of the "toplevel" insn chain. */
11273 emit_note (0, NOTE_INSN_DELETED
);
11274 rs6000_emit_epilogue (FALSE
);
11275 emit_note (0, NOTE_INSN_DELETED
);
11277 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11281 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
11283 INSN_ADDRESSES_NEW (insn
, addr
);
11288 if (TARGET_DEBUG_STACK
)
11289 debug_rtx_list (get_insns (), 100);
11290 final (get_insns (), file
, FALSE
, FALSE
);
11295 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11298 We don't output a traceback table if -finhibit-size-directive was
11299 used. The documentation for -finhibit-size-directive reads
11300 ``don't output a @code{.size} assembler directive, or anything
11301 else that would cause trouble if the function is split in the
11302 middle, and the two halves are placed at locations far apart in
11303 memory.'' The traceback table has this property, since it
11304 includes the offset from the start of the function to the
11305 traceback table itself.
11307 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11308 different traceback table. */
11309 if (DEFAULT_ABI
== ABI_AIX
&& ! flag_inhibit_size_directive
11310 && rs6000_traceback
!= traceback_none
)
11312 const char *fname
= NULL
;
11313 const char *language_string
= lang_hooks
.name
;
11314 int fixed_parms
= 0, float_parms
= 0, parm_info
= 0;
11316 int optional_tbtab
;
11318 if (rs6000_traceback
== traceback_full
)
11319 optional_tbtab
= 1;
11320 else if (rs6000_traceback
== traceback_part
)
11321 optional_tbtab
= 0;
11323 optional_tbtab
= !optimize_size
&& !TARGET_ELF
;
11325 if (optional_tbtab
)
11327 fname
= XSTR (XEXP (DECL_RTL (current_function_decl
), 0), 0);
11328 while (*fname
== '.') /* V.4 encodes . in the name */
11331 /* Need label immediately before tbtab, so we can compute
11332 its offset from the function start. */
11333 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
11334 ASM_OUTPUT_LABEL (file
, fname
);
11337 /* The .tbtab pseudo-op can only be used for the first eight
11338 expressions, since it can't handle the possibly variable
11339 length fields that follow. However, if you omit the optional
11340 fields, the assembler outputs zeros for all optional fields
11341 anyways, giving each variable length field is minimum length
11342 (as defined in sys/debug.h). Thus we can not use the .tbtab
11343 pseudo-op at all. */
11345 /* An all-zero word flags the start of the tbtab, for debuggers
11346 that have to find it by searching forward from the entry
11347 point or from the current pc. */
11348 fputs ("\t.long 0\n", file
);
11350 /* Tbtab format type. Use format type 0. */
11351 fputs ("\t.byte 0,", file
);
11353 /* Language type. Unfortunately, there doesn't seem to be any
11354 official way to get this info, so we use language_string. C
11355 is 0. C++ is 9. No number defined for Obj-C, so use the
11356 value for C for now. There is no official value for Java,
11357 although IBM appears to be using 13. There is no official value
11358 for Chill, so we've chosen 44 pseudo-randomly. */
11359 if (! strcmp (language_string
, "GNU C")
11360 || ! strcmp (language_string
, "GNU Objective-C"))
11362 else if (! strcmp (language_string
, "GNU F77"))
11364 else if (! strcmp (language_string
, "GNU Ada"))
11366 else if (! strcmp (language_string
, "GNU Pascal"))
11368 else if (! strcmp (language_string
, "GNU C++"))
11370 else if (! strcmp (language_string
, "GNU Java"))
11372 else if (! strcmp (language_string
, "GNU CHILL"))
11376 fprintf (file
, "%d,", i
);
11378 /* 8 single bit fields: global linkage (not set for C extern linkage,
11379 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
11380 from start of procedure stored in tbtab, internal function, function
11381 has controlled storage, function has no toc, function uses fp,
11382 function logs/aborts fp operations. */
11383 /* Assume that fp operations are used if any fp reg must be saved. */
11384 fprintf (file
, "%d,",
11385 (optional_tbtab
<< 5) | ((info
->first_fp_reg_save
!= 64) << 1));
11387 /* 6 bitfields: function is interrupt handler, name present in
11388 proc table, function calls alloca, on condition directives
11389 (controls stack walks, 3 bits), saves condition reg, saves
11391 /* The `function calls alloca' bit seems to be set whenever reg 31 is
11392 set up as a frame pointer, even when there is no alloca call. */
11393 fprintf (file
, "%d,",
11394 ((optional_tbtab
<< 6)
11395 | ((optional_tbtab
& frame_pointer_needed
) << 5)
11396 | (info
->cr_save_p
<< 1)
11397 | (info
->lr_save_p
)));
11399 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
11401 fprintf (file
, "%d,",
11402 (info
->push_p
<< 7) | (64 - info
->first_fp_reg_save
));
11404 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
11405 fprintf (file
, "%d,", (32 - first_reg_to_save ()));
11407 if (optional_tbtab
)
11409 /* Compute the parameter info from the function decl argument
11412 int next_parm_info_bit
= 31;
11414 for (decl
= DECL_ARGUMENTS (current_function_decl
);
11415 decl
; decl
= TREE_CHAIN (decl
))
11417 rtx parameter
= DECL_INCOMING_RTL (decl
);
11418 enum machine_mode mode
= GET_MODE (parameter
);
11420 if (GET_CODE (parameter
) == REG
)
11422 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
11428 if (mode
== SFmode
)
11430 else if (mode
== DFmode
|| mode
== TFmode
)
11435 /* If only one bit will fit, don't or in this entry. */
11436 if (next_parm_info_bit
> 0)
11437 parm_info
|= (bits
<< (next_parm_info_bit
- 1));
11438 next_parm_info_bit
-= 2;
11442 fixed_parms
+= ((GET_MODE_SIZE (mode
)
11443 + (UNITS_PER_WORD
- 1))
11445 next_parm_info_bit
-= 1;
11451 /* Number of fixed point parameters. */
11452 /* This is actually the number of words of fixed point parameters; thus
11453 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11454 fprintf (file
, "%d,", fixed_parms
);
11456 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11458 /* This is actually the number of fp registers that hold parameters;
11459 and thus the maximum value is 13. */
11460 /* Set parameters on stack bit if parameters are not in their original
11461 registers, regardless of whether they are on the stack? Xlc
11462 seems to set the bit when not optimizing. */
11463 fprintf (file
, "%d\n", ((float_parms
<< 1) | (! optimize
)));
11465 if (! optional_tbtab
)
11468 /* Optional fields follow. Some are variable length. */
11470 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11471 11 double float. */
11472 /* There is an entry for each parameter in a register, in the order that
11473 they occur in the parameter list. Any intervening arguments on the
11474 stack are ignored. If the list overflows a long (max possible length
11475 34 bits) then completely leave off all elements that don't fit. */
11476 /* Only emit this long if there was at least one parameter. */
11477 if (fixed_parms
|| float_parms
)
11478 fprintf (file
, "\t.long %d\n", parm_info
);
11480 /* Offset from start of code to tb table. */
11481 fputs ("\t.long ", file
);
11482 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
11484 RS6000_OUTPUT_BASENAME (file
, fname
);
11486 assemble_name (file
, fname
);
11488 fputs ("-.", file
);
11490 RS6000_OUTPUT_BASENAME (file
, fname
);
11492 assemble_name (file
, fname
);
11496 /* Interrupt handler mask. */
11497 /* Omit this long, since we never set the interrupt handler bit
11500 /* Number of CTL (controlled storage) anchors. */
11501 /* Omit this long, since the has_ctl bit is never set above. */
11503 /* Displacement into stack of each CTL anchor. */
11504 /* Omit this list of longs, because there are no CTL anchors. */
11506 /* Length of function name. */
11509 fprintf (file
, "\t.short %d\n", (int) strlen (fname
));
11511 /* Function name. */
11512 assemble_string (fname
, strlen (fname
));
11514 /* Register for alloca automatic storage; this is always reg 31.
11515 Only emit this if the alloca bit was set above. */
11516 if (frame_pointer_needed
)
11517 fputs ("\t.byte 31\n", file
);
11519 fputs ("\t.align 2\n", file
);
11523 /* A C compound statement that outputs the assembler code for a thunk
11524 function, used to implement C++ virtual function calls with
11525 multiple inheritance. The thunk acts as a wrapper around a virtual
11526 function, adjusting the implicit object parameter before handing
11527 control off to the real function.
11529 First, emit code to add the integer DELTA to the location that
11530 contains the incoming first argument. Assume that this argument
11531 contains a pointer, and is the one used to pass the `this' pointer
11532 in C++. This is the incoming argument *before* the function
11533 prologue, e.g. `%o0' on a sparc. The addition must preserve the
11534 values of all other incoming arguments.
11536 After the addition, emit code to jump to FUNCTION, which is a
11537 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
11538 not touch the return address. Hence returning from FUNCTION will
11539 return to whoever called the current `thunk'.
11541 The effect must be as if FUNCTION had been called directly with the
11542 adjusted first argument. This macro is responsible for emitting
11543 all of the code for a thunk function; output_function_prologue()
11544 and output_function_epilogue() are not invoked.
11546 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
11547 been extracted from it.) It might possibly be useful on some
11548 targets, but probably not.
11550 If you do not define this macro, the target-independent code in the
11551 C++ frontend will generate a less efficient heavyweight thunk that
11552 calls FUNCTION instead of jumping to it. The generic approach does
11553 not support varargs. */
11556 rs6000_output_mi_thunk (file
, thunk_fndecl
, delta
, vcall_offset
, function
)
11558 tree thunk_fndecl ATTRIBUTE_UNUSED
;
11559 HOST_WIDE_INT delta
;
11560 HOST_WIDE_INT vcall_offset
;
11563 rtx
this, insn
, funexp
;
11565 reload_completed
= 1;
11566 no_new_pseudos
= 1;
11568 /* Mark the end of the (empty) prologue. */
11569 emit_note (NULL
, NOTE_INSN_PROLOGUE_END
);
11571 /* Find the "this" pointer. If the function returns a structure,
11572 the structure return pointer is in r3. */
11573 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
))))
11574 this = gen_rtx_REG (Pmode
, 4);
11576 this = gen_rtx_REG (Pmode
, 3);
11578 /* Apply the constant offset, if required. */
11581 rtx delta_rtx
= GEN_INT (delta
);
11582 emit_insn (TARGET_32BIT
11583 ? gen_addsi3 (this, this, delta_rtx
)
11584 : gen_adddi3 (this, this, delta_rtx
));
11587 /* Apply the offset from the vtable, if required. */
11590 rtx vcall_offset_rtx
= GEN_INT (vcall_offset
);
11591 rtx tmp
= gen_rtx_REG (Pmode
, 12);
11593 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, this));
11594 emit_insn (TARGET_32BIT
11595 ? gen_addsi3 (tmp
, tmp
, vcall_offset_rtx
)
11596 : gen_adddi3 (tmp
, tmp
, vcall_offset_rtx
));
11597 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, tmp
));
11598 emit_insn (TARGET_32BIT
11599 ? gen_addsi3 (this, this, tmp
)
11600 : gen_adddi3 (this, this, tmp
));
11603 /* Generate a tail call to the target function. */
11604 if (!TREE_USED (function
))
11606 assemble_external (function
);
11607 TREE_USED (function
) = 1;
11609 funexp
= XEXP (DECL_RTL (function
), 0);
11611 SYMBOL_REF_FLAG (funexp
) = 0;
11612 if (current_file_function_operand (funexp
, VOIDmode
)
11613 && (! lookup_attribute ("longcall",
11614 TYPE_ATTRIBUTES (TREE_TYPE (function
)))
11615 || lookup_attribute ("shortcall",
11616 TYPE_ATTRIBUTES (TREE_TYPE (function
)))))
11617 SYMBOL_REF_FLAG (funexp
) = 1;
11619 funexp
= gen_rtx_MEM (FUNCTION_MODE
, funexp
);
11622 if (MACHOPIC_INDIRECT
)
11623 funexp
= machopic_indirect_call_target (funexp
);
11626 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
11627 generate sibcall RTL explicitly to avoid constraint abort. */
11628 insn
= emit_call_insn (
11629 gen_rtx_PARALLEL (VOIDmode
,
11631 gen_rtx_CALL (VOIDmode
,
11632 funexp
, const0_rtx
),
11633 gen_rtx_USE (VOIDmode
, const0_rtx
),
11634 gen_rtx_USE (VOIDmode
,
11635 gen_rtx_REG (SImode
,
11636 LINK_REGISTER_REGNUM
)),
11637 gen_rtx_RETURN (VOIDmode
))));
11638 SIBLING_CALL_P (insn
) = 1;
11641 /* Run just enough of rest_of_compilation to get the insns emitted.
11642 There's not really enough bulk here to make other passes such as
11643 instruction scheduling worth while. Note that use_thunk calls
11644 assemble_start_function and assemble_end_function. */
11645 insn
= get_insns ();
11646 shorten_branches (insn
);
11647 final_start_function (insn
, file
, 1);
11648 final (insn
, file
, 1, 0);
11649 final_end_function ();
11651 reload_completed
= 0;
11652 no_new_pseudos
= 0;
11655 /* A quick summary of the various types of 'constant-pool tables'
11658 Target Flags Name One table per
11659 AIX (none) AIX TOC object file
11660 AIX -mfull-toc AIX TOC object file
11661 AIX -mminimal-toc AIX minimal TOC translation unit
11662 SVR4/EABI (none) SVR4 SDATA object file
11663 SVR4/EABI -fpic SVR4 pic object file
11664 SVR4/EABI -fPIC SVR4 PIC translation unit
11665 SVR4/EABI -mrelocatable EABI TOC function
11666 SVR4/EABI -maix AIX TOC object file
11667 SVR4/EABI -maix -mminimal-toc
11668 AIX minimal TOC translation unit
11670 Name Reg. Set by entries contains:
11671 made by addrs? fp? sum?
11673 AIX TOC 2 crt0 as Y option option
11674 AIX minimal TOC 30 prolog gcc Y Y option
11675 SVR4 SDATA 13 crt0 gcc N Y N
11676 SVR4 pic 30 prolog ld Y not yet N
11677 SVR4 PIC 30 prolog gcc Y option option
11678 EABI TOC 30 prolog gcc Y option option
11682 /* Hash functions for the hash table. */
11685 rs6000_hash_constant (k
)
11688 enum rtx_code code
= GET_CODE (k
);
11689 enum machine_mode mode
= GET_MODE (k
);
11690 unsigned result
= (code
<< 3) ^ mode
;
11691 const char *format
;
11694 format
= GET_RTX_FORMAT (code
);
11695 flen
= strlen (format
);
11701 return result
* 1231 + (unsigned) INSN_UID (XEXP (k
, 0));
11704 if (mode
!= VOIDmode
)
11705 return real_hash (CONST_DOUBLE_REAL_VALUE (k
)) * result
;
11717 for (; fidx
< flen
; fidx
++)
11718 switch (format
[fidx
])
11723 const char *str
= XSTR (k
, fidx
);
11724 len
= strlen (str
);
11725 result
= result
* 613 + len
;
11726 for (i
= 0; i
< len
; i
++)
11727 result
= result
* 613 + (unsigned) str
[i
];
11732 result
= result
* 1231 + rs6000_hash_constant (XEXP (k
, fidx
));
11736 result
= result
* 613 + (unsigned) XINT (k
, fidx
);
11739 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT
))
11740 result
= result
* 613 + (unsigned) XWINT (k
, fidx
);
11744 for (i
= 0; i
< sizeof(HOST_WIDE_INT
)/sizeof(unsigned); i
++)
11745 result
= result
* 613 + (unsigned) (XWINT (k
, fidx
)
11757 toc_hash_function (hash_entry
)
11758 const void * hash_entry
;
11760 const struct toc_hash_struct
*thc
=
11761 (const struct toc_hash_struct
*) hash_entry
;
11762 return rs6000_hash_constant (thc
->key
) ^ thc
->key_mode
;
11765 /* Compare H1 and H2 for equivalence. */
11768 toc_hash_eq (h1
, h2
)
11772 rtx r1
= ((const struct toc_hash_struct
*) h1
)->key
;
11773 rtx r2
= ((const struct toc_hash_struct
*) h2
)->key
;
11775 if (((const struct toc_hash_struct
*) h1
)->key_mode
11776 != ((const struct toc_hash_struct
*) h2
)->key_mode
)
11779 return rtx_equal_p (r1
, r2
);
11782 /* These are the names given by the C++ front-end to vtables, and
11783 vtable-like objects. Ideally, this logic should not be here;
11784 instead, there should be some programmatic way of inquiring as
11785 to whether or not an object is a vtable. */
11787 #define VTABLE_NAME_P(NAME) \
11788 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
11789 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
11790 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
11791 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
11794 rs6000_output_symbol_ref (file
, x
)
11798 /* Currently C++ toc references to vtables can be emitted before it
11799 is decided whether the vtable is public or private. If this is
11800 the case, then the linker will eventually complain that there is
11801 a reference to an unknown section. Thus, for vtables only,
11802 we emit the TOC reference to reference the symbol and not the
11804 const char *name
= XSTR (x
, 0);
11806 if (VTABLE_NAME_P (name
))
11808 RS6000_OUTPUT_BASENAME (file
, name
);
11811 assemble_name (file
, name
);
11814 /* Output a TOC entry. We derive the entry name from what is being
11818 output_toc (file
, x
, labelno
, mode
)
11822 enum machine_mode mode
;
11825 const char *name
= buf
;
11826 const char *real_name
;
11833 /* When the linker won't eliminate them, don't output duplicate
11834 TOC entries (this happens on AIX if there is any kind of TOC,
11835 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
11837 if (TARGET_TOC
&& GET_CODE (x
) != LABEL_REF
)
11839 struct toc_hash_struct
*h
;
11842 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
11843 time because GGC is not initialised at that point. */
11844 if (toc_hash_table
== NULL
)
11845 toc_hash_table
= htab_create_ggc (1021, toc_hash_function
,
11846 toc_hash_eq
, NULL
);
11848 h
= ggc_alloc (sizeof (*h
));
11850 h
->key_mode
= mode
;
11851 h
->labelno
= labelno
;
11853 found
= htab_find_slot (toc_hash_table
, h
, 1);
11854 if (*found
== NULL
)
11856 else /* This is indeed a duplicate.
11857 Set this label equal to that label. */
11859 fputs ("\t.set ", file
);
11860 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
11861 fprintf (file
, "%d,", labelno
);
11862 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
11863 fprintf (file
, "%d\n", ((*(const struct toc_hash_struct
**)
11869 /* If we're going to put a double constant in the TOC, make sure it's
11870 aligned properly when strict alignment is on. */
11871 if (GET_CODE (x
) == CONST_DOUBLE
11872 && STRICT_ALIGNMENT
11873 && GET_MODE_BITSIZE (mode
) >= 64
11874 && ! (TARGET_NO_FP_IN_TOC
&& ! TARGET_MINIMAL_TOC
)) {
11875 ASM_OUTPUT_ALIGN (file
, 3);
11878 (*targetm
.asm_out
.internal_label
) (file
, "LC", labelno
);
11880 /* Handle FP constants specially. Note that if we have a minimal
11881 TOC, things we put here aren't actually in the TOC, so we can allow
11883 if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == TFmode
)
11885 REAL_VALUE_TYPE rv
;
11888 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
11889 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
11893 if (TARGET_MINIMAL_TOC
)
11894 fputs (DOUBLE_INT_ASM_OP
, file
);
11896 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
11897 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
11898 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
11899 fprintf (file
, "0x%lx%08lx,0x%lx%08lx\n",
11900 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
11901 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
11906 if (TARGET_MINIMAL_TOC
)
11907 fputs ("\t.long ", file
);
11909 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
11910 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
11911 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
11912 fprintf (file
, "0x%lx,0x%lx,0x%lx,0x%lx\n",
11913 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
11914 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
11918 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
11920 REAL_VALUE_TYPE rv
;
11923 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
11924 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
11928 if (TARGET_MINIMAL_TOC
)
11929 fputs (DOUBLE_INT_ASM_OP
, file
);
11931 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
11932 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11933 fprintf (file
, "0x%lx%08lx\n",
11934 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11939 if (TARGET_MINIMAL_TOC
)
11940 fputs ("\t.long ", file
);
11942 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
11943 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11944 fprintf (file
, "0x%lx,0x%lx\n",
11945 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11949 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
11951 REAL_VALUE_TYPE rv
;
11954 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
11955 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
11959 if (TARGET_MINIMAL_TOC
)
11960 fputs (DOUBLE_INT_ASM_OP
, file
);
11962 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
11963 fprintf (file
, "0x%lx00000000\n", l
& 0xffffffff);
11968 if (TARGET_MINIMAL_TOC
)
11969 fputs ("\t.long ", file
);
11971 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
11972 fprintf (file
, "0x%lx\n", l
& 0xffffffff);
11976 else if (GET_MODE (x
) == VOIDmode
11977 && (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
))
11979 unsigned HOST_WIDE_INT low
;
11980 HOST_WIDE_INT high
;
11982 if (GET_CODE (x
) == CONST_DOUBLE
)
11984 low
= CONST_DOUBLE_LOW (x
);
11985 high
= CONST_DOUBLE_HIGH (x
);
11988 #if HOST_BITS_PER_WIDE_INT == 32
11991 high
= (low
& 0x80000000) ? ~0 : 0;
11995 low
= INTVAL (x
) & 0xffffffff;
11996 high
= (HOST_WIDE_INT
) INTVAL (x
) >> 32;
12000 /* TOC entries are always Pmode-sized, but since this
12001 is a bigendian machine then if we're putting smaller
12002 integer constants in the TOC we have to pad them.
12003 (This is still a win over putting the constants in
12004 a separate constant pool, because then we'd have
12005 to have both a TOC entry _and_ the actual constant.)
12007 For a 32-bit target, CONST_INT values are loaded and shifted
12008 entirely within `low' and can be stored in one TOC entry. */
12010 if (TARGET_64BIT
&& POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
12011 abort ();/* It would be easy to make this work, but it doesn't now. */
12013 if (POINTER_SIZE
> GET_MODE_BITSIZE (mode
))
12015 #if HOST_BITS_PER_WIDE_INT == 32
12016 lshift_double (low
, high
, POINTER_SIZE
- GET_MODE_BITSIZE (mode
),
12017 POINTER_SIZE
, &low
, &high
, 0);
12020 low
<<= POINTER_SIZE
- GET_MODE_BITSIZE (mode
);
12021 high
= (HOST_WIDE_INT
) low
>> 32;
12028 if (TARGET_MINIMAL_TOC
)
12029 fputs (DOUBLE_INT_ASM_OP
, file
);
12031 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
12032 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
12033 fprintf (file
, "0x%lx%08lx\n",
12034 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
12039 if (POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
12041 if (TARGET_MINIMAL_TOC
)
12042 fputs ("\t.long ", file
);
12044 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
12045 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
12046 fprintf (file
, "0x%lx,0x%lx\n",
12047 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
12051 if (TARGET_MINIMAL_TOC
)
12052 fputs ("\t.long ", file
);
12054 fprintf (file
, "\t.tc IS_%lx[TC],", (long) low
& 0xffffffff);
12055 fprintf (file
, "0x%lx\n", (long) low
& 0xffffffff);
12061 if (GET_CODE (x
) == CONST
)
12063 if (GET_CODE (XEXP (x
, 0)) != PLUS
)
12066 base
= XEXP (XEXP (x
, 0), 0);
12067 offset
= INTVAL (XEXP (XEXP (x
, 0), 1));
12070 if (GET_CODE (base
) == SYMBOL_REF
)
12071 name
= XSTR (base
, 0);
12072 else if (GET_CODE (base
) == LABEL_REF
)
12073 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (XEXP (base
, 0)));
12074 else if (GET_CODE (base
) == CODE_LABEL
)
12075 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (base
));
12079 real_name
= (*targetm
.strip_name_encoding
) (name
);
12080 if (TARGET_MINIMAL_TOC
)
12081 fputs (TARGET_32BIT
? "\t.long " : DOUBLE_INT_ASM_OP
, file
);
12084 fprintf (file
, "\t.tc %s", real_name
);
12087 fprintf (file
, ".N%d", - offset
);
12089 fprintf (file
, ".P%d", offset
);
12091 fputs ("[TC],", file
);
12094 /* Currently C++ toc references to vtables can be emitted before it
12095 is decided whether the vtable is public or private. If this is
12096 the case, then the linker will eventually complain that there is
12097 a TOC reference to an unknown section. Thus, for vtables only,
12098 we emit the TOC reference to reference the symbol and not the
12100 if (VTABLE_NAME_P (name
))
12102 RS6000_OUTPUT_BASENAME (file
, name
);
12104 fprintf (file
, "%d", offset
);
12105 else if (offset
> 0)
12106 fprintf (file
, "+%d", offset
);
12109 output_addr_const (file
, x
);
12113 /* Output an assembler pseudo-op to write an ASCII string of N characters
12114 starting at P to FILE.
12116 On the RS/6000, we have to do this using the .byte operation and
12117 write out special characters outside the quoted string.
12118 Also, the assembler is broken; very long strings are truncated,
12119 so we must artificially break them up early. */
12122 output_ascii (file
, p
, n
)
12128 int i
, count_string
;
12129 const char *for_string
= "\t.byte \"";
12130 const char *for_decimal
= "\t.byte ";
12131 const char *to_close
= NULL
;
12134 for (i
= 0; i
< n
; i
++)
12137 if (c
>= ' ' && c
< 0177)
12140 fputs (for_string
, file
);
12143 /* Write two quotes to get one. */
12151 for_decimal
= "\"\n\t.byte ";
12155 if (count_string
>= 512)
12157 fputs (to_close
, file
);
12159 for_string
= "\t.byte \"";
12160 for_decimal
= "\t.byte ";
12168 fputs (for_decimal
, file
);
12169 fprintf (file
, "%d", c
);
12171 for_string
= "\n\t.byte \"";
12172 for_decimal
= ", ";
12178 /* Now close the string if we have written one. Then end the line. */
12180 fputs (to_close
, file
);
12183 /* Generate a unique section name for FILENAME for a section type
12184 represented by SECTION_DESC. Output goes into BUF.
12186 SECTION_DESC can be any string, as long as it is different for each
12187 possible section type.
12189 We name the section in the same manner as xlc. The name begins with an
12190 underscore followed by the filename (after stripping any leading directory
12191 names) with the last period replaced by the string SECTION_DESC. If
12192 FILENAME does not contain a period, SECTION_DESC is appended to the end of
12196 rs6000_gen_section_name (buf
, filename
, section_desc
)
12198 const char *filename
;
12199 const char *section_desc
;
12201 const char *q
, *after_last_slash
, *last_period
= 0;
12205 after_last_slash
= filename
;
12206 for (q
= filename
; *q
; q
++)
12209 after_last_slash
= q
+ 1;
12210 else if (*q
== '.')
12214 len
= strlen (after_last_slash
) + strlen (section_desc
) + 2;
12215 *buf
= (char *) xmalloc (len
);
12220 for (q
= after_last_slash
; *q
; q
++)
12222 if (q
== last_period
)
12224 strcpy (p
, section_desc
);
12225 p
+= strlen (section_desc
);
12229 else if (ISALNUM (*q
))
12233 if (last_period
== 0)
12234 strcpy (p
, section_desc
);
12239 /* Emit profile function. */
12242 output_profile_hook (labelno
)
12243 int labelno ATTRIBUTE_UNUSED
;
12245 if (TARGET_PROFILE_KERNEL
)
12248 if (DEFAULT_ABI
== ABI_AIX
)
12250 #ifdef NO_PROFILE_COUNTERS
12251 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 0);
12254 const char *label_name
;
12257 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
12258 label_name
= (*targetm
.strip_name_encoding
) (ggc_strdup (buf
));
12259 fun
= gen_rtx_SYMBOL_REF (Pmode
, label_name
);
12261 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 1,
12265 else if (DEFAULT_ABI
== ABI_DARWIN
)
12267 const char *mcount_name
= RS6000_MCOUNT
;
12268 int caller_addr_regno
= LINK_REGISTER_REGNUM
;
12270 /* Be conservative and always set this, at least for now. */
12271 current_function_uses_pic_offset_table
= 1;
12274 /* For PIC code, set up a stub and collect the caller's address
12275 from r0, which is where the prologue puts it. */
12276 if (MACHOPIC_INDIRECT
)
12278 mcount_name
= machopic_stub_name (mcount_name
);
12279 if (current_function_uses_pic_offset_table
)
12280 caller_addr_regno
= 0;
12283 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, mcount_name
),
12285 gen_rtx_REG (Pmode
, caller_addr_regno
), Pmode
);
12289 /* Write function profiler code. */
12292 output_function_profiler (file
, labelno
)
12299 switch (DEFAULT_ABI
)
12306 /* Fall through. */
12308 case ABI_AIX_NODESC
:
12311 warning ("no profiling of 64-bit code for this ABI");
12314 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
12315 fprintf (file
, "\tmflr %s\n", reg_names
[0]);
12318 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file
);
12319 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12320 reg_names
[0], save_lr
, reg_names
[1]);
12321 asm_fprintf (file
, "\tmflr %s\n", reg_names
[12]);
12322 asm_fprintf (file
, "\t{l|lwz} %s,", reg_names
[0]);
12323 assemble_name (file
, buf
);
12324 asm_fprintf (file
, "@got(%s)\n", reg_names
[12]);
12326 else if (flag_pic
> 1)
12328 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12329 reg_names
[0], save_lr
, reg_names
[1]);
12330 /* Now, we need to get the address of the label. */
12331 fputs ("\tbl 1f\n\t.long ", file
);
12332 assemble_name (file
, buf
);
12333 fputs ("-.\n1:", file
);
12334 asm_fprintf (file
, "\tmflr %s\n", reg_names
[11]);
12335 asm_fprintf (file
, "\t{l|lwz} %s,0(%s)\n",
12336 reg_names
[0], reg_names
[11]);
12337 asm_fprintf (file
, "\t{cax|add} %s,%s,%s\n",
12338 reg_names
[0], reg_names
[0], reg_names
[11]);
12342 asm_fprintf (file
, "\t{liu|lis} %s,", reg_names
[12]);
12343 assemble_name (file
, buf
);
12344 fputs ("@ha\n", file
);
12345 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12346 reg_names
[0], save_lr
, reg_names
[1]);
12347 asm_fprintf (file
, "\t{cal|la} %s,", reg_names
[0]);
12348 assemble_name (file
, buf
);
12349 asm_fprintf (file
, "@l(%s)\n", reg_names
[12]);
12352 if (current_function_needs_context
&& DEFAULT_ABI
== ABI_AIX_NODESC
)
12354 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12355 reg_names
[STATIC_CHAIN_REGNUM
],
12357 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
12358 asm_fprintf (file
, "\t{l|lwz} %s,%d(%s)\n",
12359 reg_names
[STATIC_CHAIN_REGNUM
],
12363 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12364 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
12369 if (!TARGET_PROFILE_KERNEL
)
12371 /* Don't do anything, done in output_profile_hook (). */
12378 asm_fprintf (file
, "\tmflr %s\n", reg_names
[0]);
12379 asm_fprintf (file
, "\tstd %s,16(%s)\n", reg_names
[0], reg_names
[1]);
12381 if (current_function_needs_context
)
12383 asm_fprintf (file
, "\tstd %s,24(%s)\n",
12384 reg_names
[STATIC_CHAIN_REGNUM
], reg_names
[1]);
12385 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
12386 asm_fprintf (file
, "\tld %s,24(%s)\n",
12387 reg_names
[STATIC_CHAIN_REGNUM
], reg_names
[1]);
12390 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
12398 rs6000_use_dfa_pipeline_interface ()
12403 /* Power4 load update and store update instructions are cracked into a
12404 load or store and an integer insn which are executed in the same cycle.
12405 Branches have their own dispatch slot which does not count against the
12406 GCC issue rate, but it changes the program flow so there are no other
12407 instructions to issue in this cycle. */
12410 rs6000_variable_issue (stream
, verbose
, insn
, more
)
12411 FILE *stream ATTRIBUTE_UNUSED
;
12412 int verbose ATTRIBUTE_UNUSED
;
12416 if (GET_CODE (PATTERN (insn
)) == USE
12417 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
12420 if (rs6000_cpu
== PROCESSOR_POWER4
)
12422 enum attr_type type
= get_attr_type (insn
);
12423 if (type
== TYPE_LOAD_EXT_U
|| type
== TYPE_LOAD_EXT_UX
12424 || type
== TYPE_LOAD_UX
|| type
== TYPE_STORE_UX
12425 || type
== TYPE_FPLOAD_UX
|| type
== TYPE_FPSTORE_UX
)
12427 else if (type
== TYPE_LOAD_U
|| type
== TYPE_STORE_U
12428 || type
== TYPE_FPLOAD_U
|| type
== TYPE_FPSTORE_U
12429 || type
== TYPE_LOAD_EXT
|| type
== TYPE_DELAYED_CR
)
12430 return more
> 2 ? more
- 2 : 0;
12436 /* Adjust the cost of a scheduling dependency. Return the new cost of
12437 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
12440 rs6000_adjust_cost (insn
, link
, dep_insn
, cost
)
12443 rtx dep_insn ATTRIBUTE_UNUSED
;
12446 if (! recog_memoized (insn
))
12449 if (REG_NOTE_KIND (link
) != 0)
12452 if (REG_NOTE_KIND (link
) == 0)
12454 /* Data dependency; DEP_INSN writes a register that INSN reads
12455 some cycles later. */
12456 switch (get_attr_type (insn
))
12459 /* Tell the first scheduling pass about the latency between
12460 a mtctr and bctr (and mtlr and br/blr). The first
12461 scheduling pass will not know about this latency since
12462 the mtctr instruction, which has the latency associated
12463 to it, will be generated by reload. */
12464 return TARGET_POWER
? 5 : 4;
12466 /* Leave some extra cycles between a compare and its
12467 dependent branch, to inhibit expensive mispredicts. */
12468 if ((rs6000_cpu_attr
== CPU_PPC603
12469 || rs6000_cpu_attr
== CPU_PPC604
12470 || rs6000_cpu_attr
== CPU_PPC604E
12471 || rs6000_cpu_attr
== CPU_PPC620
12472 || rs6000_cpu_attr
== CPU_PPC630
12473 || rs6000_cpu_attr
== CPU_PPC750
12474 || rs6000_cpu_attr
== CPU_PPC7400
12475 || rs6000_cpu_attr
== CPU_PPC7450
12476 || rs6000_cpu_attr
== CPU_POWER4
)
12477 && recog_memoized (dep_insn
)
12478 && (INSN_CODE (dep_insn
) >= 0)
12479 && (get_attr_type (dep_insn
) == TYPE_CMP
12480 || get_attr_type (dep_insn
) == TYPE_COMPARE
12481 || get_attr_type (dep_insn
) == TYPE_DELAYED_COMPARE
12482 || get_attr_type (dep_insn
) == TYPE_FPCOMPARE
12483 || get_attr_type (dep_insn
) == TYPE_CR_LOGICAL
12484 || get_attr_type (dep_insn
) == TYPE_DELAYED_CR
))
12489 /* Fall out to return default cost. */
12495 /* A C statement (sans semicolon) to update the integer scheduling
12496 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12497 INSN earlier, increase the priority to execute INSN later. Do not
12498 define this macro if you do not need to adjust the scheduling
12499 priorities of insns. */
12502 rs6000_adjust_priority (insn
, priority
)
12503 rtx insn ATTRIBUTE_UNUSED
;
12506 /* On machines (like the 750) which have asymmetric integer units,
12507 where one integer unit can do multiply and divides and the other
12508 can't, reduce the priority of multiply/divide so it is scheduled
12509 before other integer operations. */
12512 if (! INSN_P (insn
))
12515 if (GET_CODE (PATTERN (insn
)) == USE
)
12518 switch (rs6000_cpu_attr
) {
12520 switch (get_attr_type (insn
))
12527 fprintf (stderr
, "priority was %#x (%d) before adjustment\n",
12528 priority
, priority
);
12529 if (priority
>= 0 && priority
< 0x01000000)
12539 /* Return how many instructions the machine can issue per cycle. */
12542 rs6000_issue_rate ()
12544 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
12545 if (!reload_completed
)
12548 switch (rs6000_cpu_attr
) {
12549 case CPU_RIOS1
: /* ? */
12551 case CPU_PPC601
: /* ? */
12572 /* Length in units of the trampoline for entering a nested function. */
12575 rs6000_trampoline_size ()
12579 switch (DEFAULT_ABI
)
12585 ret
= (TARGET_32BIT
) ? 12 : 24;
12590 case ABI_AIX_NODESC
:
12591 ret
= (TARGET_32BIT
) ? 40 : 48;
12598 /* Emit RTL insns to initialize the variable parts of a trampoline.
12599 FNADDR is an RTX for the address of the function's pure code.
12600 CXT is an RTX for the static chain value for the function. */
12603 rs6000_initialize_trampoline (addr
, fnaddr
, cxt
)
12608 enum machine_mode pmode
= Pmode
;
12609 int regsize
= (TARGET_32BIT
) ? 4 : 8;
12610 rtx ctx_reg
= force_reg (pmode
, cxt
);
12612 switch (DEFAULT_ABI
)
12617 /* Macros to shorten the code expansions below. */
12618 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12619 #define MEM_PLUS(addr,offset) \
12620 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12622 /* Under AIX, just build the 3 word function descriptor */
12625 rtx fn_reg
= gen_reg_rtx (pmode
);
12626 rtx toc_reg
= gen_reg_rtx (pmode
);
12627 emit_move_insn (fn_reg
, MEM_DEREF (fnaddr
));
12628 emit_move_insn (toc_reg
, MEM_PLUS (fnaddr
, regsize
));
12629 emit_move_insn (MEM_DEREF (addr
), fn_reg
);
12630 emit_move_insn (MEM_PLUS (addr
, regsize
), toc_reg
);
12631 emit_move_insn (MEM_PLUS (addr
, 2*regsize
), ctx_reg
);
12635 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
12638 case ABI_AIX_NODESC
:
12639 emit_library_call (gen_rtx_SYMBOL_REF (SImode
, "__trampoline_setup"),
12640 FALSE
, VOIDmode
, 4,
12642 GEN_INT (rs6000_trampoline_size ()), SImode
,
12652 /* Table of valid machine attributes. */
12654 const struct attribute_spec rs6000_attribute_table
[] =
12656 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12657 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
12658 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
12659 { NULL
, 0, 0, false, false, false, NULL
}
12662 /* Handle a "longcall" or "shortcall" attribute; arguments as in
12663 struct attribute_spec.handler. */
12666 rs6000_handle_longcall_attribute (node
, name
, args
, flags
, no_add_attrs
)
12669 tree args ATTRIBUTE_UNUSED
;
12670 int flags ATTRIBUTE_UNUSED
;
12671 bool *no_add_attrs
;
12673 if (TREE_CODE (*node
) != FUNCTION_TYPE
12674 && TREE_CODE (*node
) != FIELD_DECL
12675 && TREE_CODE (*node
) != TYPE_DECL
)
12677 warning ("`%s' attribute only applies to functions",
12678 IDENTIFIER_POINTER (name
));
12679 *no_add_attrs
= true;
12685 /* Set longcall attributes on all functions declared when
12686 rs6000_default_long_calls is true. */
12688 rs6000_set_default_type_attributes (type
)
12691 if (rs6000_default_long_calls
12692 && (TREE_CODE (type
) == FUNCTION_TYPE
12693 || TREE_CODE (type
) == METHOD_TYPE
))
12694 TYPE_ATTRIBUTES (type
) = tree_cons (get_identifier ("longcall"),
12696 TYPE_ATTRIBUTES (type
));
12699 /* Return a reference suitable for calling a function with the
12700 longcall attribute. */
12703 rs6000_longcall_ref (call_ref
)
12706 const char *call_name
;
12709 if (GET_CODE (call_ref
) != SYMBOL_REF
)
12712 /* System V adds '.' to the internal name, so skip them. */
12713 call_name
= XSTR (call_ref
, 0);
12714 if (*call_name
== '.')
12716 while (*call_name
== '.')
12719 node
= get_identifier (call_name
);
12720 call_ref
= gen_rtx_SYMBOL_REF (VOIDmode
, IDENTIFIER_POINTER (node
));
12723 return force_reg (Pmode
, call_ref
);
12727 #ifdef USING_ELFOS_H
12729 /* A C statement or statements to switch to the appropriate section
12730 for output of RTX in mode MODE. You can assume that RTX is some
12731 kind of constant in RTL. The argument MODE is redundant except in
12732 the case of a `const_int' rtx. Select the section by calling
12733 `text_section' or one of the alternatives for other sections.
12735 Do not define this macro if you put all constants in the read-only
12739 rs6000_elf_select_rtx_section (mode
, x
, align
)
12740 enum machine_mode mode
;
12742 unsigned HOST_WIDE_INT align
;
12744 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
12747 default_elf_select_rtx_section (mode
, x
, align
);
12750 /* A C statement or statements to switch to the appropriate
12751 section for output of DECL. DECL is either a `VAR_DECL' node
12752 or a constant of some sort. RELOC indicates whether forming
12753 the initial value of DECL requires link-time relocations. */
12756 rs6000_elf_select_section (decl
, reloc
, align
)
12759 unsigned HOST_WIDE_INT align
;
12761 /* Pretend that we're always building for a shared library when
12762 ABI_AIX, because otherwise we end up with dynamic relocations
12763 in read-only sections. This happens for function pointers,
12764 references to vtables in typeinfo, and probably other cases. */
12765 default_elf_select_section_1 (decl
, reloc
, align
,
12766 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
12769 /* A C statement to build up a unique section name, expressed as a
12770 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
12771 RELOC indicates whether the initial value of EXP requires
12772 link-time relocations. If you do not define this macro, GCC will use
12773 the symbol name prefixed by `.' as the section name. Note - this
12774 macro can now be called for uninitialized data items as well as
12775 initialized data and functions. */
12778 rs6000_elf_unique_section (decl
, reloc
)
12782 /* As above, pretend that we're always building for a shared library
12783 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
12784 default_unique_section_1 (decl
, reloc
,
12785 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
12788 /* If we are referencing a function that is static or is known to be
12789 in this file, make the SYMBOL_REF special. We can use this to indicate
12790 that we can branch to this function without emitting a no-op after the
12791 call. For real AIX calling sequences, we also replace the
12792 function name with the real name (1 or 2 leading .'s), rather than
12793 the function descriptor name. This saves a lot of overriding code
12794 to read the prefixes. */
12797 rs6000_elf_encode_section_info (decl
, first
)
12804 if (TREE_CODE (decl
) == FUNCTION_DECL
)
12806 rtx sym_ref
= XEXP (DECL_RTL (decl
), 0);
12807 if ((*targetm
.binds_local_p
) (decl
))
12808 SYMBOL_REF_FLAG (sym_ref
) = 1;
12810 if (!TARGET_AIX
&& DEFAULT_ABI
== ABI_AIX
)
12812 size_t len1
= (DEFAULT_ABI
== ABI_AIX
) ? 1 : 2;
12813 size_t len2
= strlen (XSTR (sym_ref
, 0));
12814 char *str
= alloca (len1
+ len2
+ 1);
12817 memcpy (str
+ len1
, XSTR (sym_ref
, 0), len2
+ 1);
12819 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len1
+ len2
);
12822 else if (rs6000_sdata
!= SDATA_NONE
12823 && DEFAULT_ABI
== ABI_V4
12824 && TREE_CODE (decl
) == VAR_DECL
)
12826 rtx sym_ref
= XEXP (DECL_RTL (decl
), 0);
12827 int size
= int_size_in_bytes (TREE_TYPE (decl
));
12828 tree section_name
= DECL_SECTION_NAME (decl
);
12829 const char *name
= (char *)0;
12832 if ((*targetm
.binds_local_p
) (decl
))
12833 SYMBOL_REF_FLAG (sym_ref
) = 1;
12837 if (TREE_CODE (section_name
) == STRING_CST
)
12839 name
= TREE_STRING_POINTER (section_name
);
12840 len
= TREE_STRING_LENGTH (section_name
);
12847 ? ((len
== sizeof (".sdata") - 1
12848 && strcmp (name
, ".sdata") == 0)
12849 || (len
== sizeof (".sdata2") - 1
12850 && strcmp (name
, ".sdata2") == 0)
12851 || (len
== sizeof (".sbss") - 1
12852 && strcmp (name
, ".sbss") == 0)
12853 || (len
== sizeof (".sbss2") - 1
12854 && strcmp (name
, ".sbss2") == 0)
12855 || (len
== sizeof (".PPC.EMB.sdata0") - 1
12856 && strcmp (name
, ".PPC.EMB.sdata0") == 0)
12857 || (len
== sizeof (".PPC.EMB.sbss0") - 1
12858 && strcmp (name
, ".PPC.EMB.sbss0") == 0))
12859 : (size
> 0 && size
<= g_switch_value
))
12861 size_t len
= strlen (XSTR (sym_ref
, 0));
12862 char *str
= alloca (len
+ 2);
12865 memcpy (str
+ 1, XSTR (sym_ref
, 0), len
+ 1);
12866 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len
+ 1);
12871 static const char *
12872 rs6000_elf_strip_name_encoding (str
)
12875 while (*str
== '*' || *str
== '@')
12881 rs6000_elf_in_small_data_p (decl
)
12884 if (rs6000_sdata
== SDATA_NONE
)
12887 if (TREE_CODE (decl
) == VAR_DECL
&& DECL_SECTION_NAME (decl
))
12889 const char *section
= TREE_STRING_POINTER (DECL_SECTION_NAME (decl
));
12890 if (strcmp (section
, ".sdata") == 0
12891 || strcmp (section
, ".sdata2") == 0
12892 || strcmp (section
, ".sbss") == 0)
12897 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
12900 && size
<= g_switch_value
12901 && (rs6000_sdata
!= SDATA_DATA
|| TREE_PUBLIC (decl
)))
12908 #endif /* USING_ELFOS_H */
12911 /* Return a REG that occurs in ADDR with coefficient 1.
12912 ADDR can be effectively incremented by incrementing REG.
12914 r0 is special and we must not select it as an address
12915 register by this routine since our caller will try to
12916 increment the returned register via an "la" instruction. */
12919 find_addr_reg (addr
)
12922 while (GET_CODE (addr
) == PLUS
)
12924 if (GET_CODE (XEXP (addr
, 0)) == REG
12925 && REGNO (XEXP (addr
, 0)) != 0)
12926 addr
= XEXP (addr
, 0);
12927 else if (GET_CODE (XEXP (addr
, 1)) == REG
12928 && REGNO (XEXP (addr
, 1)) != 0)
12929 addr
= XEXP (addr
, 1);
12930 else if (CONSTANT_P (XEXP (addr
, 0)))
12931 addr
= XEXP (addr
, 1);
12932 else if (CONSTANT_P (XEXP (addr
, 1)))
12933 addr
= XEXP (addr
, 0);
12937 if (GET_CODE (addr
) == REG
&& REGNO (addr
) != 0)
12943 rs6000_fatal_bad_address (op
)
12946 fatal_insn ("bad address", op
);
12952 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
12953 reference and a constant. */
12956 symbolic_operand (op
)
12959 switch (GET_CODE (op
))
12966 return (GET_CODE (op
) == SYMBOL_REF
||
12967 (GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
12968 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
12969 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
12976 #ifdef RS6000_LONG_BRANCH
12978 static tree stub_list
= 0;
12980 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
12981 procedure calls to the linked list. */
12984 add_compiler_stub (label_name
, function_name
, line_number
)
12986 tree function_name
;
12989 tree stub
= build_tree_list (function_name
, label_name
);
12990 TREE_TYPE (stub
) = build_int_2 (line_number
, 0);
12991 TREE_CHAIN (stub
) = stub_list
;
12995 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
12996 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
12997 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
12999 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
13000 handling procedure calls from the linked list and initializes the
13004 output_compiler_stub ()
13007 char label_buf
[256];
13011 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
13013 fprintf (asm_out_file
,
13014 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub
)));
13016 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13017 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
13018 fprintf (asm_out_file
, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub
));
13019 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13021 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
))[0] == '*')
13023 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
))+1);
13026 label_buf
[0] = '_';
13027 strcpy (label_buf
+1,
13028 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
)));
13031 strcpy (tmp_buf
, "lis r12,hi16(");
13032 strcat (tmp_buf
, label_buf
);
13033 strcat (tmp_buf
, ")\n\tori r12,r12,lo16(");
13034 strcat (tmp_buf
, label_buf
);
13035 strcat (tmp_buf
, ")\n\tmtctr r12\n\tbctr");
13036 output_asm_insn (tmp_buf
, 0);
13038 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13039 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
13040 fprintf(asm_out_file
, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub
));
13041 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13047 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
13048 already there or not. */
13051 no_previous_def (function_name
)
13052 tree function_name
;
13055 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
13056 if (function_name
== STUB_FUNCTION_NAME (stub
))
13061 /* GET_PREV_LABEL gets the label name from the previous definition of
13065 get_prev_label (function_name
)
13066 tree function_name
;
13069 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
13070 if (function_name
== STUB_FUNCTION_NAME (stub
))
13071 return STUB_LABEL_NAME (stub
);
13075 /* INSN is either a function call or a millicode call. It may have an
13076 unconditional jump in its delay slot.
13078 CALL_DEST is the routine we are calling. */
13081 output_call (insn
, call_dest
, operand_number
)
13084 int operand_number
;
13086 static char buf
[256];
13087 if (GET_CODE (call_dest
) == SYMBOL_REF
&& TARGET_LONG_BRANCH
&& !flag_pic
)
13090 tree funname
= get_identifier (XSTR (call_dest
, 0));
13092 if (no_previous_def (funname
))
13094 int line_number
= 0;
13095 rtx label_rtx
= gen_label_rtx ();
13096 char *label_buf
, temp_buf
[256];
13097 ASM_GENERATE_INTERNAL_LABEL (temp_buf
, "L",
13098 CODE_LABEL_NUMBER (label_rtx
));
13099 label_buf
= temp_buf
[0] == '*' ? temp_buf
+ 1 : temp_buf
;
13100 labelname
= get_identifier (label_buf
);
13101 for (; insn
&& GET_CODE (insn
) != NOTE
; insn
= PREV_INSN (insn
));
13103 line_number
= NOTE_LINE_NUMBER (insn
);
13104 add_compiler_stub (labelname
, funname
, line_number
);
13107 labelname
= get_prev_label (funname
);
13109 sprintf (buf
, "jbsr %%z%d,%.246s",
13110 operand_number
, IDENTIFIER_POINTER (labelname
));
13115 sprintf (buf
, "bl %%z%d", operand_number
);
13120 #endif /* RS6000_LONG_BRANCH */
13122 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
13124 const char *const symbol_ = (SYMBOL); \
13125 char *buffer_ = (BUF); \
13126 if (symbol_[0] == '"') \
13128 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
13130 else if (name_needs_quotes(symbol_)) \
13132 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
13136 sprintf(buffer_, "L%d$%s", (N), symbol_); \
13141 /* Generate PIC and indirect symbol stubs. */
13144 machopic_output_stub (file
, symb
, stub
)
13146 const char *symb
, *stub
;
13148 unsigned int length
;
13149 char *symbol_name
, *lazy_ptr_name
;
13150 char *local_label_0
;
13151 static int label
= 0;
13153 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
13154 symb
= (*targetm
.strip_name_encoding
) (symb
);
13158 length
= strlen (symb
);
13159 symbol_name
= alloca (length
+ 32);
13160 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name
, symb
, length
);
13162 lazy_ptr_name
= alloca (length
+ 32);
13163 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name
, symb
, length
);
13165 local_label_0
= alloca (length
+ 32);
13166 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0
, symb
, length
, 0);
13169 machopic_picsymbol_stub1_section ();
13171 machopic_symbol_stub1_section ();
13172 fprintf (file
, "\t.align 2\n");
13174 fprintf (file
, "%s:\n", stub
);
13175 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
13179 fprintf (file
, "\tmflr r0\n");
13180 fprintf (file
, "\tbcl 20,31,%s\n", local_label_0
);
13181 fprintf (file
, "%s:\n\tmflr r11\n", local_label_0
);
13182 fprintf (file
, "\taddis r11,r11,ha16(%s-%s)\n",
13183 lazy_ptr_name
, local_label_0
);
13184 fprintf (file
, "\tmtlr r0\n");
13185 fprintf (file
, "\tlwzu r12,lo16(%s-%s)(r11)\n",
13186 lazy_ptr_name
, local_label_0
);
13187 fprintf (file
, "\tmtctr r12\n");
13188 fprintf (file
, "\tbctr\n");
13192 fprintf (file
, "\tlis r11,ha16(%s)\n", lazy_ptr_name
);
13193 fprintf (file
, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name
);
13194 fprintf (file
, "\tmtctr r12\n");
13195 fprintf (file
, "\tbctr\n");
13198 machopic_lazy_symbol_ptr_section ();
13199 fprintf (file
, "%s:\n", lazy_ptr_name
);
13200 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
13201 fprintf (file
, "\t.long dyld_stub_binding_helper\n");
13204 /* Legitimize PIC addresses. If the address is already
13205 position-independent, we return ORIG. Newly generated
13206 position-independent addresses go into a reg. This is REG if non
13207 zero, otherwise we allocate register(s) as necessary. */
13209 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13212 rs6000_machopic_legitimize_pic_address (orig
, mode
, reg
)
13214 enum machine_mode mode
;
13219 if (reg
== NULL
&& ! reload_in_progress
&& ! reload_completed
)
13220 reg
= gen_reg_rtx (Pmode
);
13222 if (GET_CODE (orig
) == CONST
)
13224 if (GET_CODE (XEXP (orig
, 0)) == PLUS
13225 && XEXP (XEXP (orig
, 0), 0) == pic_offset_table_rtx
)
13228 if (GET_CODE (XEXP (orig
, 0)) == PLUS
)
13231 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0),
13234 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1),
13240 if (GET_CODE (offset
) == CONST_INT
)
13242 if (SMALL_INT (offset
))
13243 return plus_constant (base
, INTVAL (offset
));
13244 else if (! reload_in_progress
&& ! reload_completed
)
13245 offset
= force_reg (Pmode
, offset
);
13248 rtx mem
= force_const_mem (Pmode
, orig
);
13249 return machopic_legitimize_pic_address (mem
, Pmode
, reg
);
13252 return gen_rtx (PLUS
, Pmode
, base
, offset
);
13255 /* Fall back on generic machopic code. */
13256 return machopic_legitimize_pic_address (orig
, mode
, reg
);
13259 /* This is just a placeholder to make linking work without having to
13260 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13261 ever needed for Darwin (not too likely!) this would have to get a
13262 real definition. */
13269 #endif /* TARGET_MACHO */
13272 static unsigned int
13273 rs6000_elf_section_type_flags (decl
, name
, reloc
)
13279 = default_section_type_flags_1 (decl
, name
, reloc
,
13280 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
13282 if (TARGET_RELOCATABLE
)
13283 flags
|= SECTION_WRITE
;
13288 /* Record an element in the table of global constructors. SYMBOL is
13289 a SYMBOL_REF of the function to be called; PRIORITY is a number
13290 between 0 and MAX_INIT_PRIORITY.
13292 This differs from default_named_section_asm_out_constructor in
13293 that we have special handling for -mrelocatable. */
13296 rs6000_elf_asm_out_constructor (symbol
, priority
)
13300 const char *section
= ".ctors";
13303 if (priority
!= DEFAULT_INIT_PRIORITY
)
13305 sprintf (buf
, ".ctors.%.5u",
13306 /* Invert the numbering so the linker puts us in the proper
13307 order; constructors are run from right to left, and the
13308 linker sorts in increasing order. */
13309 MAX_INIT_PRIORITY
- priority
);
13313 named_section_flags (section
, SECTION_WRITE
);
13314 assemble_align (POINTER_SIZE
);
13316 if (TARGET_RELOCATABLE
)
13318 fputs ("\t.long (", asm_out_file
);
13319 output_addr_const (asm_out_file
, symbol
);
13320 fputs (")@fixup\n", asm_out_file
);
13323 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
13327 rs6000_elf_asm_out_destructor (symbol
, priority
)
13331 const char *section
= ".dtors";
13334 if (priority
!= DEFAULT_INIT_PRIORITY
)
13336 sprintf (buf
, ".dtors.%.5u",
13337 /* Invert the numbering so the linker puts us in the proper
13338 order; constructors are run from right to left, and the
13339 linker sorts in increasing order. */
13340 MAX_INIT_PRIORITY
- priority
);
13344 named_section_flags (section
, SECTION_WRITE
);
13345 assemble_align (POINTER_SIZE
);
13347 if (TARGET_RELOCATABLE
)
13349 fputs ("\t.long (", asm_out_file
);
13350 output_addr_const (asm_out_file
, symbol
);
13351 fputs (")@fixup\n", asm_out_file
);
13354 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
13360 rs6000_xcoff_asm_globalize_label (stream
, name
)
13364 fputs (GLOBAL_ASM_OP
, stream
);
13365 RS6000_OUTPUT_BASENAME (stream
, name
);
13366 putc ('\n', stream
);
13370 rs6000_xcoff_asm_named_section (name
, flags
)
13372 unsigned int flags
;
13375 static const char * const suffix
[3] = { "PR", "RO", "RW" };
13377 if (flags
& SECTION_CODE
)
13379 else if (flags
& SECTION_WRITE
)
13384 fprintf (asm_out_file
, "\t.csect %s%s[%s],%u\n",
13385 (flags
& SECTION_CODE
) ? "." : "",
13386 name
, suffix
[smclass
], flags
& SECTION_ENTSIZE
);
13390 rs6000_xcoff_select_section (decl
, reloc
, align
)
13393 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
13395 if (decl_readonly_section_1 (decl
, reloc
, 1))
13397 if (TREE_PUBLIC (decl
))
13398 read_only_data_section ();
13400 read_only_private_data_section ();
13404 if (TREE_PUBLIC (decl
))
13407 private_data_section ();
13412 rs6000_xcoff_unique_section (decl
, reloc
)
13414 int reloc ATTRIBUTE_UNUSED
;
13418 /* Use select_section for private and uninitialized data. */
13419 if (!TREE_PUBLIC (decl
)
13420 || DECL_COMMON (decl
)
13421 || DECL_INITIAL (decl
) == NULL_TREE
13422 || DECL_INITIAL (decl
) == error_mark_node
13423 || (flag_zero_initialized_in_bss
13424 && initializer_zerop (DECL_INITIAL (decl
))))
13427 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
13428 name
= (*targetm
.strip_name_encoding
) (name
);
13429 DECL_SECTION_NAME (decl
) = build_string (strlen (name
), name
);
13432 /* Select section for constant in constant pool.
13434 On RS/6000, all constants are in the private read-only data area.
13435 However, if this is being placed in the TOC it must be output as a
13439 rs6000_xcoff_select_rtx_section (mode
, x
, align
)
13440 enum machine_mode mode
;
13442 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
13444 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
13447 read_only_private_data_section ();
13450 /* Remove any trailing [DS] or the like from the symbol name. */
13452 static const char *
13453 rs6000_xcoff_strip_name_encoding (name
)
13459 len
= strlen (name
);
13460 if (name
[len
- 1] == ']')
13461 return ggc_alloc_string (name
, len
- 4);
13466 /* Section attributes. AIX is always PIC. */
13468 static unsigned int
13469 rs6000_xcoff_section_type_flags (decl
, name
, reloc
)
13474 unsigned int align
;
13475 unsigned int flags
= default_section_type_flags_1 (decl
, name
, reloc
, 1);
13477 /* Align to at least UNIT size. */
13478 if (flags
& SECTION_CODE
)
13479 align
= MIN_UNITS_PER_WORD
;
13481 /* Increase alignment of large objects if not already stricter. */
13482 align
= MAX ((DECL_ALIGN (decl
) / BITS_PER_UNIT
),
13483 int_size_in_bytes (TREE_TYPE (decl
)) > MIN_UNITS_PER_WORD
13484 ? UNITS_PER_FP_WORD
: MIN_UNITS_PER_WORD
);
13486 return flags
| (exact_log2 (align
) & SECTION_ENTSIZE
);
13490 rs6000_xcoff_encode_section_info (decl
, first
)
13492 int first ATTRIBUTE_UNUSED
;
13494 if (TREE_CODE (decl
) == FUNCTION_DECL
13495 && (*targetm
.binds_local_p
) (decl
))
13496 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl
), 0)) = 1;
13498 #endif /* TARGET_XCOFF */
13501 /* Cross-module name binding. Darwin does not support overriding
13502 functions at dynamic-link time. */
13505 rs6000_binds_local_p (decl
)
13508 return default_binds_local_p_1 (decl
, 0);
13512 /* Compute a (partial) cost for rtx X. Return true if the complete
13513 cost has been computed, and false if subexpressions should be
13514 scanned. In either case, *TOTAL contains the cost result. */
13517 rs6000_rtx_costs (x
, code
, outer_code
, total
)
13519 int code
, outer_code ATTRIBUTE_UNUSED
;
13524 /* On the RS/6000, if it is valid in the insn, it is free.
13525 So this always returns 0. */
13536 *total
= ((GET_CODE (XEXP (x
, 1)) == CONST_INT
13537 && ((unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1))
13538 + 0x8000) >= 0x10000)
13539 && ((INTVAL (XEXP (x
, 1)) & 0xffff) != 0))
13540 ? COSTS_N_INSNS (2)
13541 : COSTS_N_INSNS (1));
13547 *total
= ((GET_CODE (XEXP (x
, 1)) == CONST_INT
13548 && (INTVAL (XEXP (x
, 1)) & (~ (HOST_WIDE_INT
) 0xffff)) != 0
13549 && ((INTVAL (XEXP (x
, 1)) & 0xffff) != 0))
13550 ? COSTS_N_INSNS (2)
13551 : COSTS_N_INSNS (1));
13557 *total
= COSTS_N_INSNS (2);
13560 switch (rs6000_cpu
)
13562 case PROCESSOR_RIOS1
:
13563 case PROCESSOR_PPC405
:
13564 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13565 ? COSTS_N_INSNS (5)
13566 : (INTVAL (XEXP (x
, 1)) >= -256
13567 && INTVAL (XEXP (x
, 1)) <= 255)
13568 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13571 case PROCESSOR_RS64A
:
13572 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13573 ? GET_MODE (XEXP (x
, 1)) != DImode
13574 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
13575 : (INTVAL (XEXP (x
, 1)) >= -256
13576 && INTVAL (XEXP (x
, 1)) <= 255)
13577 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
13580 case PROCESSOR_RIOS2
:
13581 case PROCESSOR_MPCCORE
:
13582 case PROCESSOR_PPC604e
:
13583 *total
= COSTS_N_INSNS (2);
13586 case PROCESSOR_PPC601
:
13587 *total
= COSTS_N_INSNS (5);
13590 case PROCESSOR_PPC603
:
13591 case PROCESSOR_PPC7400
:
13592 case PROCESSOR_PPC750
:
13593 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13594 ? COSTS_N_INSNS (5)
13595 : (INTVAL (XEXP (x
, 1)) >= -256
13596 && INTVAL (XEXP (x
, 1)) <= 255)
13597 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
13600 case PROCESSOR_PPC7450
:
13601 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13602 ? COSTS_N_INSNS (4)
13603 : COSTS_N_INSNS (3));
13606 case PROCESSOR_PPC403
:
13607 case PROCESSOR_PPC604
:
13608 case PROCESSOR_PPC8540
:
13609 *total
= COSTS_N_INSNS (4);
13612 case PROCESSOR_PPC620
:
13613 case PROCESSOR_PPC630
:
13614 case PROCESSOR_POWER4
:
13615 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13616 ? GET_MODE (XEXP (x
, 1)) != DImode
13617 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
13618 : (INTVAL (XEXP (x
, 1)) >= -256
13619 && INTVAL (XEXP (x
, 1)) <= 255)
13620 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13629 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
13630 && exact_log2 (INTVAL (XEXP (x
, 1))) >= 0)
13632 *total
= COSTS_N_INSNS (2);
13639 switch (rs6000_cpu
)
13641 case PROCESSOR_RIOS1
:
13642 *total
= COSTS_N_INSNS (19);
13645 case PROCESSOR_RIOS2
:
13646 *total
= COSTS_N_INSNS (13);
13649 case PROCESSOR_RS64A
:
13650 *total
= (GET_MODE (XEXP (x
, 1)) != DImode
13651 ? COSTS_N_INSNS (65)
13652 : COSTS_N_INSNS (67));
13655 case PROCESSOR_MPCCORE
:
13656 *total
= COSTS_N_INSNS (6);
13659 case PROCESSOR_PPC403
:
13660 *total
= COSTS_N_INSNS (33);
13663 case PROCESSOR_PPC405
:
13664 *total
= COSTS_N_INSNS (35);
13667 case PROCESSOR_PPC601
:
13668 *total
= COSTS_N_INSNS (36);
13671 case PROCESSOR_PPC603
:
13672 *total
= COSTS_N_INSNS (37);
13675 case PROCESSOR_PPC604
:
13676 case PROCESSOR_PPC604e
:
13677 *total
= COSTS_N_INSNS (20);
13680 case PROCESSOR_PPC620
:
13681 case PROCESSOR_PPC630
:
13682 case PROCESSOR_POWER4
:
13683 *total
= (GET_MODE (XEXP (x
, 1)) != DImode
13684 ? COSTS_N_INSNS (21)
13685 : COSTS_N_INSNS (37));
13688 case PROCESSOR_PPC750
:
13689 case PROCESSOR_PPC8540
:
13690 case PROCESSOR_PPC7400
:
13691 *total
= COSTS_N_INSNS (19);
13694 case PROCESSOR_PPC7450
:
13695 *total
= COSTS_N_INSNS (23);
13703 *total
= COSTS_N_INSNS (4);
13707 /* MEM should be slightly more expensive than (plus (reg) (const)) */
13716 /* A C expression returning the cost of moving data from a register of class
13717 CLASS1 to one of CLASS2. */
13720 rs6000_register_move_cost (mode
, from
, to
)
13721 enum machine_mode mode
;
13722 enum reg_class from
, to
;
13724 /* Moves from/to GENERAL_REGS. */
13725 if (reg_classes_intersect_p (to
, GENERAL_REGS
)
13726 || reg_classes_intersect_p (from
, GENERAL_REGS
))
13728 if (! reg_classes_intersect_p (to
, GENERAL_REGS
))
13731 if (from
== FLOAT_REGS
|| from
== ALTIVEC_REGS
)
13732 return (rs6000_memory_move_cost (mode
, from
, 0)
13733 + rs6000_memory_move_cost (mode
, GENERAL_REGS
, 0));
13735 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
13736 else if (from
== CR_REGS
)
13740 /* A move will cost one instruction per GPR moved. */
13741 return 2 * HARD_REGNO_NREGS (0, mode
);
13744 /* Moving between two similar registers is just one instruction. */
13745 else if (reg_classes_intersect_p (to
, from
))
13746 return mode
== TFmode
? 4 : 2;
13748 /* Everything else has to go through GENERAL_REGS. */
13750 return (rs6000_register_move_cost (mode
, GENERAL_REGS
, to
)
13751 + rs6000_register_move_cost (mode
, from
, GENERAL_REGS
));
13754 /* A C expressions returning the cost of moving data of MODE from a register to
13758 rs6000_memory_move_cost (mode
, class, in
)
13759 enum machine_mode mode
;
13760 enum reg_class
class;
13761 int in ATTRIBUTE_UNUSED
;
13763 if (reg_classes_intersect_p (class, GENERAL_REGS
))
13764 return 4 * HARD_REGNO_NREGS (0, mode
);
13765 else if (reg_classes_intersect_p (class, FLOAT_REGS
))
13766 return 4 * HARD_REGNO_NREGS (32, mode
);
13767 else if (reg_classes_intersect_p (class, ALTIVEC_REGS
))
13768 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO
, mode
);
13770 return 4 + rs6000_register_move_cost (mode
, class, GENERAL_REGS
);
13773 /* Return true if TYPE is of type __ev64_opaque__. */
13776 is_ev64_opaque_type (type
)
13780 && TREE_CODE (type
) == VECTOR_TYPE
13781 && TYPE_NAME (type
)
13782 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
13783 && DECL_NAME (TYPE_NAME (type
))
13784 && strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type
))),
13785 "__ev64_opaque__") == 0);
13789 rs6000_dwarf_register_span (reg
)
13794 if (!TARGET_SPE
|| !SPE_VECTOR_MODE (GET_MODE (reg
)))
13797 regno
= REGNO (reg
);
13799 /* The duality of the SPE register size wreaks all kinds of havoc.
13800 This is a way of distinguishing r0 in 32-bits from r0 in
13803 gen_rtx_PARALLEL (VOIDmode
,
13806 gen_rtx_REG (SImode
, regno
+ 1200),
13807 gen_rtx_REG (SImode
, regno
))
13809 gen_rtx_REG (SImode
, regno
),
13810 gen_rtx_REG (SImode
, regno
+ 1200)));
13813 #include "gt-rs6000.h"