1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
54 #ifndef TARGET_NO_PROTOTYPE
55 #define TARGET_NO_PROTOTYPE 0
58 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
59 && easy_vector_same (x, y))
61 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
63 && easy_vector_same (x, y))
65 #define min(A,B) ((A) < (B) ? (A) : (B))
66 #define max(A,B) ((A) > (B) ? (A) : (B))
70 enum processor_type rs6000_cpu
;
71 struct rs6000_cpu_select rs6000_select
[3] =
73 /* switch name, tune arch */
74 { (const char *)0, "--with-cpu=", 1, 1 },
75 { (const char *)0, "-mcpu=", 1, 1 },
76 { (const char *)0, "-mtune=", 1, 0 },
79 /* Size of long double */
80 const char *rs6000_long_double_size_string
;
81 int rs6000_long_double_type_size
;
83 /* Whether -mabi=altivec has appeared */
84 int rs6000_altivec_abi
;
86 /* Whether VRSAVE instructions should be generated. */
87 int rs6000_altivec_vrsave
;
89 /* String from -mvrsave= option. */
90 const char *rs6000_altivec_vrsave_string
;
92 /* Nonzero if we want SPE ABI extensions. */
95 /* Whether isel instructions should be generated. */
98 /* Whether SPE simd instructions should be generated. */
101 /* Nonzero if floating point operations are done in the GPRs. */
102 int rs6000_float_gprs
= 0;
104 /* String from -mfloat-gprs=. */
105 const char *rs6000_float_gprs_string
;
107 /* String from -misel=. */
108 const char *rs6000_isel_string
;
110 /* String from -mspe=. */
111 const char *rs6000_spe_string
;
113 /* Set to nonzero once AIX common-mode calls have been defined. */
114 static GTY(()) int common_mode_defined
;
116 /* Save information from a "cmpxx" operation until the branch or scc is
118 rtx rs6000_compare_op0
, rs6000_compare_op1
;
119 int rs6000_compare_fp_p
;
121 /* Label number of label created for -mrelocatable, to call to so we can
122 get the address of the GOT section */
123 int rs6000_pic_labelno
;
126 /* Which abi to adhere to */
127 const char *rs6000_abi_name
= RS6000_ABI_NAME
;
129 /* Semantics of the small data area */
130 enum rs6000_sdata_type rs6000_sdata
= SDATA_DATA
;
132 /* Which small data model to use */
133 const char *rs6000_sdata_name
= (char *)0;
135 /* Counter for labels which are to be placed in .fixup. */
136 int fixuplabelno
= 0;
139 /* ABI enumeration available for subtarget to use. */
140 enum rs6000_abi rs6000_current_abi
;
142 /* ABI string from -mabi= option. */
143 const char *rs6000_abi_string
;
146 const char *rs6000_debug_name
;
147 int rs6000_debug_stack
; /* debug stack applications */
148 int rs6000_debug_arg
; /* debug argument handling */
150 /* A copy of V2SI_type_node to be used as an opaque type. */
151 static GTY(()) tree opaque_V2SI_type_node
;
153 /* Same, but for V2SF. */
154 static GTY(()) tree opaque_V2SF_type_node
;
156 const char *rs6000_traceback_name
;
158 traceback_default
= 0,
164 /* Flag to say the TOC is initialized */
166 char toc_label_name
[10];
168 /* Alias set for saves and restores from the rs6000 stack. */
169 static int rs6000_sr_alias_set
;
171 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
172 The only place that looks at this is rs6000_set_default_type_attributes;
173 everywhere else should rely on the presence or absence of a longcall
174 attribute on the function declaration. */
175 int rs6000_default_long_calls
;
176 const char *rs6000_longcall_switch
;
178 struct builtin_description
180 /* mask is not const because we're going to alter it below. This
181 nonsense will go away when we rewrite the -march infrastructure
182 to give us more target flag bits. */
184 const enum insn_code icode
;
185 const char *const name
;
186 const enum rs6000_builtins code
;
189 static bool rs6000_function_ok_for_sibcall
PARAMS ((tree
, tree
));
190 static int num_insns_constant_wide
PARAMS ((HOST_WIDE_INT
));
191 static void validate_condition_mode
192 PARAMS ((enum rtx_code
, enum machine_mode
));
193 static rtx rs6000_generate_compare
PARAMS ((enum rtx_code
));
194 static void rs6000_maybe_dead
PARAMS ((rtx
));
195 static void rs6000_emit_stack_tie
PARAMS ((void));
196 static void rs6000_frame_related
PARAMS ((rtx
, rtx
, HOST_WIDE_INT
, rtx
, rtx
));
197 static rtx spe_synthesize_frame_save
PARAMS ((rtx
));
198 static bool spe_func_has_64bit_regs_p
PARAMS ((void));
199 static void emit_frame_save
PARAMS ((rtx
, rtx
, enum machine_mode
,
200 unsigned int, int, int));
201 static rtx gen_frame_mem_offset
PARAMS ((enum machine_mode
, rtx
, int));
202 static void rs6000_emit_allocate_stack
PARAMS ((HOST_WIDE_INT
, int));
203 static unsigned rs6000_hash_constant
PARAMS ((rtx
));
204 static unsigned toc_hash_function
PARAMS ((const void *));
205 static int toc_hash_eq
PARAMS ((const void *, const void *));
206 static int constant_pool_expr_1
PARAMS ((rtx
, int *, int *));
207 static struct machine_function
* rs6000_init_machine_status
PARAMS ((void));
208 static bool rs6000_assemble_integer
PARAMS ((rtx
, unsigned int, int));
209 #ifdef HAVE_GAS_HIDDEN
210 static void rs6000_assemble_visibility
PARAMS ((tree
, int));
212 static int rs6000_ra_ever_killed
PARAMS ((void));
213 static tree rs6000_handle_longcall_attribute
PARAMS ((tree
*, tree
, tree
, int, bool *));
214 const struct attribute_spec rs6000_attribute_table
[];
215 static void rs6000_set_default_type_attributes
PARAMS ((tree
));
216 static void rs6000_output_function_prologue
PARAMS ((FILE *, HOST_WIDE_INT
));
217 static void rs6000_output_function_epilogue
PARAMS ((FILE *, HOST_WIDE_INT
));
218 static void rs6000_output_mi_thunk
PARAMS ((FILE *, tree
, HOST_WIDE_INT
,
219 HOST_WIDE_INT
, tree
));
220 static rtx rs6000_emit_set_long_const
PARAMS ((rtx
,
221 HOST_WIDE_INT
, HOST_WIDE_INT
));
223 static unsigned int rs6000_elf_section_type_flags
PARAMS ((tree
, const char *,
225 static void rs6000_elf_asm_out_constructor
PARAMS ((rtx
, int));
226 static void rs6000_elf_asm_out_destructor
PARAMS ((rtx
, int));
227 static void rs6000_elf_select_section
PARAMS ((tree
, int,
228 unsigned HOST_WIDE_INT
));
229 static void rs6000_elf_unique_section
PARAMS ((tree
, int));
230 static void rs6000_elf_select_rtx_section
PARAMS ((enum machine_mode
, rtx
,
231 unsigned HOST_WIDE_INT
));
232 static void rs6000_elf_encode_section_info
PARAMS ((tree
, rtx
, int))
234 static bool rs6000_elf_in_small_data_p
PARAMS ((tree
));
237 static void rs6000_xcoff_asm_globalize_label
PARAMS ((FILE *, const char *));
238 static void rs6000_xcoff_asm_named_section
PARAMS ((const char *, unsigned int));
239 static void rs6000_xcoff_select_section
PARAMS ((tree
, int,
240 unsigned HOST_WIDE_INT
));
241 static void rs6000_xcoff_unique_section
PARAMS ((tree
, int));
242 static void rs6000_xcoff_select_rtx_section
PARAMS ((enum machine_mode
, rtx
,
243 unsigned HOST_WIDE_INT
));
244 static const char * rs6000_xcoff_strip_name_encoding
PARAMS ((const char *));
245 static unsigned int rs6000_xcoff_section_type_flags
PARAMS ((tree
, const char *, int));
248 static bool rs6000_binds_local_p
PARAMS ((tree
));
250 static int rs6000_use_dfa_pipeline_interface
PARAMS ((void));
251 static int rs6000_variable_issue
PARAMS ((FILE *, int, rtx
, int));
252 static bool rs6000_rtx_costs
PARAMS ((rtx
, int, int, int *));
253 static int rs6000_adjust_cost
PARAMS ((rtx
, rtx
, rtx
, int));
254 static int rs6000_adjust_priority
PARAMS ((rtx
, int));
255 static int rs6000_issue_rate
PARAMS ((void));
256 static int rs6000_use_sched_lookahead
PARAMS ((void));
258 static void rs6000_init_builtins
PARAMS ((void));
259 static rtx rs6000_expand_unop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
260 static rtx rs6000_expand_binop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
261 static rtx rs6000_expand_ternop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
262 static rtx rs6000_expand_builtin
PARAMS ((tree
, rtx
, rtx
, enum machine_mode
, int));
263 static void altivec_init_builtins
PARAMS ((void));
264 static void rs6000_common_init_builtins
PARAMS ((void));
266 static void enable_mask_for_builtins
PARAMS ((struct builtin_description
*,
267 int, enum rs6000_builtins
,
268 enum rs6000_builtins
));
269 static void spe_init_builtins
PARAMS ((void));
270 static rtx spe_expand_builtin
PARAMS ((tree
, rtx
, bool *));
271 static rtx spe_expand_predicate_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
272 static rtx spe_expand_evsel_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
273 static int rs6000_emit_int_cmove
PARAMS ((rtx
, rtx
, rtx
, rtx
));
275 static rtx altivec_expand_builtin
PARAMS ((tree
, rtx
, bool *));
276 static rtx altivec_expand_ld_builtin
PARAMS ((tree
, rtx
, bool *));
277 static rtx altivec_expand_st_builtin
PARAMS ((tree
, rtx
, bool *));
278 static rtx altivec_expand_dst_builtin
PARAMS ((tree
, rtx
, bool *));
279 static rtx altivec_expand_abs_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
280 static rtx altivec_expand_predicate_builtin
PARAMS ((enum insn_code
, const char *, tree
, rtx
));
281 static rtx altivec_expand_stv_builtin
PARAMS ((enum insn_code
, tree
));
282 static void rs6000_parse_abi_options
PARAMS ((void));
283 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
284 static int first_altivec_reg_to_save
PARAMS ((void));
285 static unsigned int compute_vrsave_mask
PARAMS ((void));
286 static void is_altivec_return_reg
PARAMS ((rtx
, void *));
287 static rtx generate_set_vrsave
PARAMS ((rtx
, rs6000_stack_t
*, int));
288 int easy_vector_constant
PARAMS ((rtx
, enum machine_mode
));
289 static int easy_vector_same
PARAMS ((rtx
, enum machine_mode
));
290 static bool is_ev64_opaque_type
PARAMS ((tree
));
291 static rtx rs6000_dwarf_register_span
PARAMS ((rtx
));
293 /* Hash table stuff for keeping track of TOC entries. */
295 struct toc_hash_struct
GTY(())
297 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
298 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
300 enum machine_mode key_mode
;
304 static GTY ((param_is (struct toc_hash_struct
))) htab_t toc_hash_table
;
306 /* Default register names. */
307 char rs6000_reg_names
[][8] =
309 "0", "1", "2", "3", "4", "5", "6", "7",
310 "8", "9", "10", "11", "12", "13", "14", "15",
311 "16", "17", "18", "19", "20", "21", "22", "23",
312 "24", "25", "26", "27", "28", "29", "30", "31",
313 "0", "1", "2", "3", "4", "5", "6", "7",
314 "8", "9", "10", "11", "12", "13", "14", "15",
315 "16", "17", "18", "19", "20", "21", "22", "23",
316 "24", "25", "26", "27", "28", "29", "30", "31",
317 "mq", "lr", "ctr","ap",
318 "0", "1", "2", "3", "4", "5", "6", "7",
320 /* AltiVec registers. */
321 "0", "1", "2", "3", "4", "5", "6", "7",
322 "8", "9", "10", "11", "12", "13", "14", "15",
323 "16", "17", "18", "19", "20", "21", "22", "23",
324 "24", "25", "26", "27", "28", "29", "30", "31",
330 #ifdef TARGET_REGNAMES
331 static const char alt_reg_names
[][8] =
333 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
334 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
335 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
336 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
337 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
338 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
339 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
340 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
341 "mq", "lr", "ctr", "ap",
342 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
344 /* AltiVec registers. */
345 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
346 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
347 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
348 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
355 #ifndef MASK_STRICT_ALIGN
356 #define MASK_STRICT_ALIGN 0
358 #ifndef TARGET_PROFILE_KERNEL
359 #define TARGET_PROFILE_KERNEL 0
362 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
363 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
365 /* Initialize the GCC target structure. */
366 #undef TARGET_ATTRIBUTE_TABLE
367 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
368 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
369 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
371 #undef TARGET_ASM_ALIGNED_DI_OP
372 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
374 /* Default unaligned ops are only provided for ELF. Find the ops needed
375 for non-ELF systems. */
376 #ifndef OBJECT_FORMAT_ELF
378 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
380 #undef TARGET_ASM_UNALIGNED_HI_OP
381 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
382 #undef TARGET_ASM_UNALIGNED_SI_OP
383 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
384 #undef TARGET_ASM_UNALIGNED_DI_OP
385 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
388 #undef TARGET_ASM_UNALIGNED_HI_OP
389 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
390 #undef TARGET_ASM_UNALIGNED_SI_OP
391 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
395 /* This hook deals with fixups for relocatable code and DI-mode objects
397 #undef TARGET_ASM_INTEGER
398 #define TARGET_ASM_INTEGER rs6000_assemble_integer
400 #ifdef HAVE_GAS_HIDDEN
401 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
402 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
405 #undef TARGET_ASM_FUNCTION_PROLOGUE
406 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
407 #undef TARGET_ASM_FUNCTION_EPILOGUE
408 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
410 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
411 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
412 #undef TARGET_SCHED_VARIABLE_ISSUE
413 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
415 #undef TARGET_SCHED_ISSUE_RATE
416 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
417 #undef TARGET_SCHED_ADJUST_COST
418 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
419 #undef TARGET_SCHED_ADJUST_PRIORITY
420 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
422 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
423 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
425 #undef TARGET_INIT_BUILTINS
426 #define TARGET_INIT_BUILTINS rs6000_init_builtins
428 #undef TARGET_EXPAND_BUILTIN
429 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
432 #undef TARGET_BINDS_LOCAL_P
433 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
436 #undef TARGET_ASM_OUTPUT_MI_THUNK
437 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
439 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
440 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
442 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
443 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
445 #undef TARGET_RTX_COSTS
446 #define TARGET_RTX_COSTS rs6000_rtx_costs
447 #undef TARGET_ADDRESS_COST
448 #define TARGET_ADDRESS_COST hook_int_rtx_0
450 #undef TARGET_VECTOR_OPAQUE_P
451 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
453 #undef TARGET_DWARF_REGISTER_SPAN
454 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
456 struct gcc_target targetm
= TARGET_INITIALIZER
;
458 /* Override command line options. Mostly we process the processor
459 type and sometimes adjust other TARGET_ options. */
462 rs6000_override_options (default_cpu
)
463 const char *default_cpu
;
466 struct rs6000_cpu_select
*ptr
;
468 /* Simplify the entries below by making a mask for any POWER
469 variant and any PowerPC variant. */
471 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
472 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
473 | MASK_PPC_GFXOPT | MASK_POWERPC64)
474 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
478 const char *const name
; /* Canonical processor name. */
479 const enum processor_type processor
; /* Processor type enum value. */
480 const int target_enable
; /* Target flags to enable. */
481 const int target_disable
; /* Target flags to disable. */
482 } const processor_target_table
[]
483 = {{"common", PROCESSOR_COMMON
, MASK_NEW_MNEMONICS
,
484 POWER_MASKS
| POWERPC_MASKS
},
485 {"power", PROCESSOR_POWER
,
486 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
487 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
488 {"power2", PROCESSOR_POWER
,
489 MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
,
490 POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
491 {"power3", PROCESSOR_PPC630
,
492 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
494 {"power4", PROCESSOR_POWER4
,
495 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
497 {"powerpc", PROCESSOR_POWERPC
,
498 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
499 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
500 {"powerpc64", PROCESSOR_POWERPC64
,
501 MASK_POWERPC
| MASK_POWERPC64
| MASK_NEW_MNEMONICS
,
502 POWER_MASKS
| POWERPC_OPT_MASKS
},
503 {"rios", PROCESSOR_RIOS1
,
504 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
505 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
506 {"rios1", PROCESSOR_RIOS1
,
507 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
508 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
509 {"rsc", PROCESSOR_PPC601
,
510 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
511 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
512 {"rsc1", PROCESSOR_PPC601
,
513 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
514 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
515 {"rios2", PROCESSOR_RIOS2
,
516 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
| MASK_POWER2
,
517 POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
518 {"rs64a", PROCESSOR_RS64A
,
519 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
520 POWER_MASKS
| POWERPC_OPT_MASKS
},
521 {"401", PROCESSOR_PPC403
,
522 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
523 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
524 {"403", PROCESSOR_PPC403
,
525 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
| MASK_STRICT_ALIGN
,
526 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
527 {"405", PROCESSOR_PPC405
,
528 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
529 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
530 {"405f", PROCESSOR_PPC405
,
531 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
532 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
533 {"505", PROCESSOR_MPCCORE
,
534 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
535 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
536 {"601", PROCESSOR_PPC601
,
537 MASK_POWER
| MASK_POWERPC
| MASK_NEW_MNEMONICS
| MASK_MULTIPLE
| MASK_STRING
,
538 MASK_POWER2
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
539 {"602", PROCESSOR_PPC603
,
540 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
541 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
542 {"603", PROCESSOR_PPC603
,
543 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
544 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
545 {"603e", PROCESSOR_PPC603
,
546 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
547 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
548 {"ec603e", PROCESSOR_PPC603
,
549 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
550 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
551 {"604", PROCESSOR_PPC604
,
552 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
553 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
554 {"604e", PROCESSOR_PPC604e
,
555 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
556 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
557 {"620", PROCESSOR_PPC620
,
558 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
560 {"630", PROCESSOR_PPC630
,
561 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
563 {"740", PROCESSOR_PPC750
,
564 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
565 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
566 {"750", PROCESSOR_PPC750
,
567 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
568 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
569 {"7400", PROCESSOR_PPC7400
,
570 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
571 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
572 {"7450", PROCESSOR_PPC7450
,
573 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
574 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
575 {"8540", PROCESSOR_PPC8540
,
576 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
577 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
578 {"801", PROCESSOR_MPCCORE
,
579 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
580 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
581 {"821", PROCESSOR_MPCCORE
,
582 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
583 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
584 {"823", PROCESSOR_MPCCORE
,
585 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
586 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
587 {"860", PROCESSOR_MPCCORE
,
588 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
589 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
}};
591 const size_t ptt_size
= ARRAY_SIZE (processor_target_table
);
593 /* Save current -mmultiple/-mno-multiple status. */
594 int multiple
= TARGET_MULTIPLE
;
595 /* Save current -mstring/-mno-string status. */
596 int string
= TARGET_STRING
;
598 /* Identify the processor type. */
599 rs6000_select
[0].string
= default_cpu
;
600 rs6000_cpu
= TARGET_POWERPC64
? PROCESSOR_DEFAULT64
: PROCESSOR_DEFAULT
;
602 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
604 ptr
= &rs6000_select
[i
];
605 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
607 for (j
= 0; j
< ptt_size
; j
++)
608 if (! strcmp (ptr
->string
, processor_target_table
[j
].name
))
611 rs6000_cpu
= processor_target_table
[j
].processor
;
615 target_flags
|= processor_target_table
[j
].target_enable
;
616 target_flags
&= ~processor_target_table
[j
].target_disable
;
622 error ("bad value (%s) for %s switch", ptr
->string
, ptr
->name
);
629 /* If we are optimizing big endian systems for space, use the load/store
630 multiple and string instructions. */
631 if (BYTES_BIG_ENDIAN
&& optimize_size
)
632 target_flags
|= MASK_MULTIPLE
| MASK_STRING
;
634 /* If -mmultiple or -mno-multiple was explicitly used, don't
635 override with the processor default */
636 if ((target_flags_explicit
& MASK_MULTIPLE
) != 0)
637 target_flags
= (target_flags
& ~MASK_MULTIPLE
) | multiple
;
639 /* If -mstring or -mno-string was explicitly used, don't override
640 with the processor default. */
641 if ((target_flags_explicit
& MASK_STRING
) != 0)
642 target_flags
= (target_flags
& ~MASK_STRING
) | string
;
644 /* Don't allow -mmultiple or -mstring on little endian systems
645 unless the cpu is a 750, because the hardware doesn't support the
646 instructions used in little endian mode, and causes an alignment
647 trap. The 750 does not cause an alignment trap (except when the
648 target is unaligned). */
650 if (!BYTES_BIG_ENDIAN
&& rs6000_cpu
!= PROCESSOR_PPC750
)
654 target_flags
&= ~MASK_MULTIPLE
;
655 if ((target_flags_explicit
& MASK_MULTIPLE
) != 0)
656 warning ("-mmultiple is not supported on little endian systems");
661 target_flags
&= ~MASK_STRING
;
662 if ((target_flags_explicit
& MASK_STRING
) != 0)
663 warning ("-mstring is not supported on little endian systems");
667 /* Set debug flags */
668 if (rs6000_debug_name
)
670 if (! strcmp (rs6000_debug_name
, "all"))
671 rs6000_debug_stack
= rs6000_debug_arg
= 1;
672 else if (! strcmp (rs6000_debug_name
, "stack"))
673 rs6000_debug_stack
= 1;
674 else if (! strcmp (rs6000_debug_name
, "arg"))
675 rs6000_debug_arg
= 1;
677 error ("unknown -mdebug-%s switch", rs6000_debug_name
);
680 if (rs6000_traceback_name
)
682 if (! strncmp (rs6000_traceback_name
, "full", 4))
683 rs6000_traceback
= traceback_full
;
684 else if (! strncmp (rs6000_traceback_name
, "part", 4))
685 rs6000_traceback
= traceback_part
;
686 else if (! strncmp (rs6000_traceback_name
, "no", 2))
687 rs6000_traceback
= traceback_none
;
689 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
690 rs6000_traceback_name
);
693 /* Set size of long double */
694 rs6000_long_double_type_size
= 64;
695 if (rs6000_long_double_size_string
)
698 int size
= strtol (rs6000_long_double_size_string
, &tail
, 10);
699 if (*tail
!= '\0' || (size
!= 64 && size
!= 128))
700 error ("Unknown switch -mlong-double-%s",
701 rs6000_long_double_size_string
);
703 rs6000_long_double_type_size
= size
;
706 /* Handle -mabi= options. */
707 rs6000_parse_abi_options ();
709 /* Handle generic -mFOO=YES/NO options. */
710 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string
,
711 &rs6000_altivec_vrsave
);
712 rs6000_parse_yes_no_option ("isel", rs6000_isel_string
,
714 rs6000_parse_yes_no_option ("spe", rs6000_spe_string
, &rs6000_spe
);
715 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string
,
718 #ifdef SUBTARGET_OVERRIDE_OPTIONS
719 SUBTARGET_OVERRIDE_OPTIONS
;
721 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
722 SUBSUBTARGET_OVERRIDE_OPTIONS
;
727 /* The e500 does not have string instructions, and we set
728 MASK_STRING above when optimizing for size. */
729 if ((target_flags
& MASK_STRING
) != 0)
730 target_flags
= target_flags
& ~MASK_STRING
;
732 /* No SPE means 64-bit long doubles, even if an E500. */
733 if (rs6000_spe_string
!= 0
734 && !strcmp (rs6000_spe_string
, "no"))
735 rs6000_long_double_type_size
= 64;
737 else if (rs6000_select
[1].string
!= NULL
)
739 /* For the powerpc-eabispe configuration, we set all these by
740 default, so let's unset them if we manually set another
741 CPU that is not the E500. */
742 if (rs6000_abi_string
== 0)
744 if (rs6000_spe_string
== 0)
746 if (rs6000_float_gprs_string
== 0)
747 rs6000_float_gprs
= 0;
748 if (rs6000_isel_string
== 0)
750 if (rs6000_long_double_size_string
== 0)
751 rs6000_long_double_type_size
= 64;
754 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
755 using TARGET_OPTIONS to handle a toggle switch, but we're out of
756 bits in target_flags so TARGET_SWITCHES cannot be used.
757 Assumption here is that rs6000_longcall_switch points into the
758 text of the complete option, rather than being a copy, so we can
759 scan back for the presence or absence of the no- modifier. */
760 if (rs6000_longcall_switch
)
762 const char *base
= rs6000_longcall_switch
;
763 while (base
[-1] != 'm') base
--;
765 if (*rs6000_longcall_switch
!= '\0')
766 error ("invalid option `%s'", base
);
767 rs6000_default_long_calls
= (base
[0] != 'n');
770 #ifdef TARGET_REGNAMES
771 /* If the user desires alternate register names, copy in the
772 alternate names now. */
774 memcpy (rs6000_reg_names
, alt_reg_names
, sizeof (rs6000_reg_names
));
777 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
778 If -maix-struct-return or -msvr4-struct-return was explicitly
779 used, don't override with the ABI default. */
780 if ((target_flags_explicit
& MASK_AIX_STRUCT_RET
) == 0)
782 if (DEFAULT_ABI
== ABI_V4
&& !DRAFT_V4_STRUCT_RET
)
783 target_flags
= (target_flags
& ~MASK_AIX_STRUCT_RET
);
785 target_flags
|= MASK_AIX_STRUCT_RET
;
788 if (TARGET_LONG_DOUBLE_128
789 && (DEFAULT_ABI
== ABI_AIX
|| DEFAULT_ABI
== ABI_DARWIN
))
790 real_format_for_mode
[TFmode
- QFmode
] = &ibm_extended_format
;
792 /* Allocate an alias set for register saves & restores from stack. */
793 rs6000_sr_alias_set
= new_alias_set ();
796 ASM_GENERATE_INTERNAL_LABEL (toc_label_name
, "LCTOC", 1);
798 /* We can only guarantee the availability of DI pseudo-ops when
799 assembling for 64-bit targets. */
802 targetm
.asm_out
.aligned_op
.di
= NULL
;
803 targetm
.asm_out
.unaligned_op
.di
= NULL
;
806 /* Set maximum branch target alignment at two instructions, eight bytes. */
807 align_jumps_max_skip
= 8;
808 align_loops_max_skip
= 8;
810 /* Arrange to save and restore machine status around nested functions. */
811 init_machine_status
= rs6000_init_machine_status
;
814 /* Handle generic options of the form -mfoo=yes/no.
815 NAME is the option name.
816 VALUE is the option value.
817 FLAG is the pointer to the flag where to store a 1 or 0, depending on
818 whether the option value is 'yes' or 'no' respectively. */
820 rs6000_parse_yes_no_option (const char *name
, const char *value
, int *flag
)
824 else if (!strcmp (value
, "yes"))
826 else if (!strcmp (value
, "no"))
829 error ("unknown -m%s= option specified: '%s'", name
, value
);
832 /* Handle -mabi= options. */
834 rs6000_parse_abi_options ()
836 if (rs6000_abi_string
== 0)
838 else if (! strcmp (rs6000_abi_string
, "altivec"))
839 rs6000_altivec_abi
= 1;
840 else if (! strcmp (rs6000_abi_string
, "no-altivec"))
841 rs6000_altivec_abi
= 0;
842 else if (! strcmp (rs6000_abi_string
, "spe"))
846 error ("not configured for ABI: '%s'", rs6000_abi_string
);
849 else if (! strcmp (rs6000_abi_string
, "no-spe"))
852 error ("unknown ABI specified: '%s'", rs6000_abi_string
);
856 optimization_options (level
, size
)
857 int level ATTRIBUTE_UNUSED
;
858 int size ATTRIBUTE_UNUSED
;
862 /* Do anything needed at the start of the asm file. */
865 rs6000_file_start (file
, default_cpu
)
867 const char *default_cpu
;
871 const char *start
= buffer
;
872 struct rs6000_cpu_select
*ptr
;
874 if (flag_verbose_asm
)
876 sprintf (buffer
, "\n%s rs6000/powerpc options:", ASM_COMMENT_START
);
877 rs6000_select
[0].string
= default_cpu
;
879 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
881 ptr
= &rs6000_select
[i
];
882 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
884 fprintf (file
, "%s %s%s", start
, ptr
->name
, ptr
->string
);
890 switch (rs6000_sdata
)
892 case SDATA_NONE
: fprintf (file
, "%s -msdata=none", start
); start
= ""; break;
893 case SDATA_DATA
: fprintf (file
, "%s -msdata=data", start
); start
= ""; break;
894 case SDATA_SYSV
: fprintf (file
, "%s -msdata=sysv", start
); start
= ""; break;
895 case SDATA_EABI
: fprintf (file
, "%s -msdata=eabi", start
); start
= ""; break;
898 if (rs6000_sdata
&& g_switch_value
)
900 fprintf (file
, "%s -G %d", start
, g_switch_value
);
910 /* Return nonzero if this function is known to have a null epilogue. */
915 if (reload_completed
)
917 rs6000_stack_t
*info
= rs6000_stack_info ();
919 if (info
->first_gp_reg_save
== 32
920 && info
->first_fp_reg_save
== 64
921 && info
->first_altivec_reg_save
== LAST_ALTIVEC_REGNO
+ 1
924 && info
->vrsave_mask
== 0
932 /* Returns 1 always. */
935 any_operand (op
, mode
)
936 rtx op ATTRIBUTE_UNUSED
;
937 enum machine_mode mode ATTRIBUTE_UNUSED
;
942 /* Returns 1 if op is the count register. */
944 count_register_operand (op
, mode
)
946 enum machine_mode mode ATTRIBUTE_UNUSED
;
948 if (GET_CODE (op
) != REG
)
951 if (REGNO (op
) == COUNT_REGISTER_REGNUM
)
954 if (REGNO (op
) > FIRST_PSEUDO_REGISTER
)
960 /* Returns 1 if op is an altivec register. */
962 altivec_register_operand (op
, mode
)
964 enum machine_mode mode ATTRIBUTE_UNUSED
;
967 return (register_operand (op
, mode
)
968 && (GET_CODE (op
) != REG
969 || REGNO (op
) > FIRST_PSEUDO_REGISTER
970 || ALTIVEC_REGNO_P (REGNO (op
))));
974 xer_operand (op
, mode
)
976 enum machine_mode mode ATTRIBUTE_UNUSED
;
978 if (GET_CODE (op
) != REG
)
981 if (XER_REGNO_P (REGNO (op
)))
987 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
988 by such constants completes more quickly. */
991 s8bit_cint_operand (op
, mode
)
993 enum machine_mode mode ATTRIBUTE_UNUSED
;
995 return ( GET_CODE (op
) == CONST_INT
996 && (INTVAL (op
) >= -128 && INTVAL (op
) <= 127));
999 /* Return 1 if OP is a constant that can fit in a D field. */
1002 short_cint_operand (op
, mode
)
1004 enum machine_mode mode ATTRIBUTE_UNUSED
;
1006 return (GET_CODE (op
) == CONST_INT
1007 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I'));
1010 /* Similar for an unsigned D field. */
1013 u_short_cint_operand (op
, mode
)
1015 enum machine_mode mode ATTRIBUTE_UNUSED
;
1017 return (GET_CODE (op
) == CONST_INT
1018 && CONST_OK_FOR_LETTER_P (INTVAL (op
) & GET_MODE_MASK (mode
), 'K'));
1021 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1024 non_short_cint_operand (op
, mode
)
1026 enum machine_mode mode ATTRIBUTE_UNUSED
;
1028 return (GET_CODE (op
) == CONST_INT
1029 && (unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x8000) >= 0x10000);
1032 /* Returns 1 if OP is a CONST_INT that is a positive value
1033 and an exact power of 2. */
1036 exact_log2_cint_operand (op
, mode
)
1038 enum machine_mode mode ATTRIBUTE_UNUSED
;
1040 return (GET_CODE (op
) == CONST_INT
1042 && exact_log2 (INTVAL (op
)) >= 0);
1045 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1049 gpc_reg_operand (op
, mode
)
1051 enum machine_mode mode
;
1053 return (register_operand (op
, mode
)
1054 && (GET_CODE (op
) != REG
1055 || (REGNO (op
) >= ARG_POINTER_REGNUM
1056 && !XER_REGNO_P (REGNO (op
)))
1057 || REGNO (op
) < MQ_REGNO
));
1060 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1064 cc_reg_operand (op
, mode
)
1066 enum machine_mode mode
;
1068 return (register_operand (op
, mode
)
1069 && (GET_CODE (op
) != REG
1070 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
1071 || CR_REGNO_P (REGNO (op
))));
1074 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1075 CR field that isn't CR0. */
1078 cc_reg_not_cr0_operand (op
, mode
)
1080 enum machine_mode mode
;
1082 return (register_operand (op
, mode
)
1083 && (GET_CODE (op
) != REG
1084 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
1085 || CR_REGNO_NOT_CR0_P (REGNO (op
))));
1088 /* Returns 1 if OP is either a constant integer valid for a D-field or
1089 a non-special register. If a register, it must be in the proper
1090 mode unless MODE is VOIDmode. */
1093 reg_or_short_operand (op
, mode
)
1095 enum machine_mode mode
;
1097 return short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1100 /* Similar, except check if the negation of the constant would be
1101 valid for a D-field. */
1104 reg_or_neg_short_operand (op
, mode
)
1106 enum machine_mode mode
;
1108 if (GET_CODE (op
) == CONST_INT
)
1109 return CONST_OK_FOR_LETTER_P (INTVAL (op
), 'P');
1111 return gpc_reg_operand (op
, mode
);
1114 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1115 a non-special register. If a register, it must be in the proper
1116 mode unless MODE is VOIDmode. */
1119 reg_or_aligned_short_operand (op
, mode
)
1121 enum machine_mode mode
;
1123 if (gpc_reg_operand (op
, mode
))
1125 else if (short_cint_operand (op
, mode
) && !(INTVAL (op
) & 3))
1132 /* Return 1 if the operand is either a register or an integer whose
1133 high-order 16 bits are zero. */
1136 reg_or_u_short_operand (op
, mode
)
1138 enum machine_mode mode
;
1140 return u_short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1143 /* Return 1 is the operand is either a non-special register or ANY
1144 constant integer. */
1147 reg_or_cint_operand (op
, mode
)
1149 enum machine_mode mode
;
1151 return (GET_CODE (op
) == CONST_INT
|| gpc_reg_operand (op
, mode
));
1154 /* Return 1 is the operand is either a non-special register or ANY
1155 32-bit signed constant integer. */
1158 reg_or_arith_cint_operand (op
, mode
)
1160 enum machine_mode mode
;
1162 return (gpc_reg_operand (op
, mode
)
1163 || (GET_CODE (op
) == CONST_INT
1164 #if HOST_BITS_PER_WIDE_INT != 32
1165 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80000000)
1166 < (unsigned HOST_WIDE_INT
) 0x100000000ll
)
1171 /* Return 1 is the operand is either a non-special register or a 32-bit
1172 signed constant integer valid for 64-bit addition. */
1175 reg_or_add_cint64_operand (op
, mode
)
1177 enum machine_mode mode
;
1179 return (gpc_reg_operand (op
, mode
)
1180 || (GET_CODE (op
) == CONST_INT
1181 #if HOST_BITS_PER_WIDE_INT == 32
1182 && INTVAL (op
) < 0x7fff8000
1184 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80008000)
1190 /* Return 1 is the operand is either a non-special register or a 32-bit
1191 signed constant integer valid for 64-bit subtraction. */
1194 reg_or_sub_cint64_operand (op
, mode
)
1196 enum machine_mode mode
;
1198 return (gpc_reg_operand (op
, mode
)
1199 || (GET_CODE (op
) == CONST_INT
1200 #if HOST_BITS_PER_WIDE_INT == 32
1201 && (- INTVAL (op
)) < 0x7fff8000
1203 && ((unsigned HOST_WIDE_INT
) ((- INTVAL (op
)) + 0x80008000)
1209 /* Return 1 is the operand is either a non-special register or ANY
1210 32-bit unsigned constant integer. */
1213 reg_or_logical_cint_operand (op
, mode
)
1215 enum machine_mode mode
;
1217 if (GET_CODE (op
) == CONST_INT
)
1219 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
)
1221 if (GET_MODE_BITSIZE (mode
) <= 32)
1224 if (INTVAL (op
) < 0)
1228 return ((INTVAL (op
) & GET_MODE_MASK (mode
)
1229 & (~ (unsigned HOST_WIDE_INT
) 0xffffffff)) == 0);
1231 else if (GET_CODE (op
) == CONST_DOUBLE
)
1233 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
1237 return CONST_DOUBLE_HIGH (op
) == 0;
1240 return gpc_reg_operand (op
, mode
);
1243 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1246 got_operand (op
, mode
)
1248 enum machine_mode mode ATTRIBUTE_UNUSED
;
1250 return (GET_CODE (op
) == SYMBOL_REF
1251 || GET_CODE (op
) == CONST
1252 || GET_CODE (op
) == LABEL_REF
);
1255 /* Return 1 if the operand is a simple references that can be loaded via
1256 the GOT (labels involving addition aren't allowed). */
1259 got_no_const_operand (op
, mode
)
1261 enum machine_mode mode ATTRIBUTE_UNUSED
;
1263 return (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
);
1266 /* Return the number of instructions it takes to form a constant in an
1267 integer register. */
1270 num_insns_constant_wide (value
)
1271 HOST_WIDE_INT value
;
1273 /* signed constant loadable with {cal|addi} */
1274 if (CONST_OK_FOR_LETTER_P (value
, 'I'))
1277 /* constant loadable with {cau|addis} */
1278 else if (CONST_OK_FOR_LETTER_P (value
, 'L'))
1281 #if HOST_BITS_PER_WIDE_INT == 64
1282 else if (TARGET_POWERPC64
)
1284 HOST_WIDE_INT low
= ((value
& 0xffffffff) ^ 0x80000000) - 0x80000000;
1285 HOST_WIDE_INT high
= value
>> 31;
1287 if (high
== 0 || high
== -1)
1293 return num_insns_constant_wide (high
) + 1;
1295 return (num_insns_constant_wide (high
)
1296 + num_insns_constant_wide (low
) + 1);
1305 num_insns_constant (op
, mode
)
1307 enum machine_mode mode
;
1309 if (GET_CODE (op
) == CONST_INT
)
1311 #if HOST_BITS_PER_WIDE_INT == 64
1312 if ((INTVAL (op
) >> 31) != 0 && (INTVAL (op
) >> 31) != -1
1313 && mask64_operand (op
, mode
))
1317 return num_insns_constant_wide (INTVAL (op
));
1320 else if (GET_CODE (op
) == CONST_DOUBLE
&& mode
== SFmode
)
1325 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1326 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1327 return num_insns_constant_wide ((HOST_WIDE_INT
) l
);
1330 else if (GET_CODE (op
) == CONST_DOUBLE
)
1336 int endian
= (WORDS_BIG_ENDIAN
== 0);
1338 if (mode
== VOIDmode
|| mode
== DImode
)
1340 high
= CONST_DOUBLE_HIGH (op
);
1341 low
= CONST_DOUBLE_LOW (op
);
1345 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1346 REAL_VALUE_TO_TARGET_DOUBLE (rv
, l
);
1348 low
= l
[1 - endian
];
1352 return (num_insns_constant_wide (low
)
1353 + num_insns_constant_wide (high
));
1357 if (high
== 0 && low
>= 0)
1358 return num_insns_constant_wide (low
);
1360 else if (high
== -1 && low
< 0)
1361 return num_insns_constant_wide (low
);
1363 else if (mask64_operand (op
, mode
))
1367 return num_insns_constant_wide (high
) + 1;
1370 return (num_insns_constant_wide (high
)
1371 + num_insns_constant_wide (low
) + 1);
1379 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1380 register with one instruction per word. We only do this if we can
1381 safely read CONST_DOUBLE_{LOW,HIGH}. */
1384 easy_fp_constant (op
, mode
)
1386 enum machine_mode mode
;
1388 if (GET_CODE (op
) != CONST_DOUBLE
1389 || GET_MODE (op
) != mode
1390 || (GET_MODE_CLASS (mode
) != MODE_FLOAT
&& mode
!= DImode
))
1393 /* Consider all constants with -msoft-float to be easy. */
1394 if ((TARGET_SOFT_FLOAT
|| !TARGET_FPRS
)
1398 /* If we are using V.4 style PIC, consider all constants to be hard. */
1399 if (flag_pic
&& DEFAULT_ABI
== ABI_V4
)
1402 #ifdef TARGET_RELOCATABLE
1403 /* Similarly if we are using -mrelocatable, consider all constants
1405 if (TARGET_RELOCATABLE
)
1414 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1415 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
1417 return (num_insns_constant_wide ((HOST_WIDE_INT
) k
[0]) == 1
1418 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[1]) == 1
1419 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[2]) == 1
1420 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[3]) == 1);
1423 else if (mode
== DFmode
)
1428 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1429 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
1431 return (num_insns_constant_wide ((HOST_WIDE_INT
) k
[0]) == 1
1432 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[1]) == 1);
1435 else if (mode
== SFmode
)
1440 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1441 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1443 return num_insns_constant_wide (l
) == 1;
1446 else if (mode
== DImode
)
1447 return ((TARGET_POWERPC64
1448 && GET_CODE (op
) == CONST_DOUBLE
&& CONST_DOUBLE_LOW (op
) == 0)
1449 || (num_insns_constant (op
, DImode
) <= 2));
1451 else if (mode
== SImode
)
1457 /* Return non zero if all elements of a vector have the same value. */
1460 easy_vector_same (op
, mode
)
1462 enum machine_mode mode ATTRIBUTE_UNUSED
;
1466 units
= CONST_VECTOR_NUNITS (op
);
1468 cst
= INTVAL (CONST_VECTOR_ELT (op
, 0));
1469 for (i
= 1; i
< units
; ++i
)
1470 if (INTVAL (CONST_VECTOR_ELT (op
, i
)) != cst
)
1477 /* Return 1 if the operand is a CONST_INT and can be put into a
1478 register without using memory. */
1481 easy_vector_constant (op
, mode
)
1483 enum machine_mode mode
;
1487 if (GET_CODE (op
) != CONST_VECTOR
1492 if (zero_constant (op
, mode
)
1493 && ((TARGET_ALTIVEC
&& ALTIVEC_VECTOR_MODE (mode
))
1494 || (TARGET_SPE
&& SPE_VECTOR_MODE (mode
))))
1497 if (GET_MODE_CLASS (mode
) != MODE_VECTOR_INT
)
1500 cst
= INTVAL (CONST_VECTOR_ELT (op
, 0));
1501 cst2
= INTVAL (CONST_VECTOR_ELT (op
, 1));
1503 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1505 evmergelo r0, r0, r0
1508 I don't know how efficient it would be to allow bigger constants,
1509 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1510 instructions is better than a 64-bit memory load, but I don't
1511 have the e500 timing specs. */
1512 if (TARGET_SPE
&& mode
== V2SImode
1513 && cst
>= -0x7fff && cst
<= 0x7fff
1514 && cst2
>= -0x7fff && cst
<= 0x7fff)
1517 if (TARGET_ALTIVEC
&& EASY_VECTOR_15 (cst
, op
, mode
))
1520 if (TARGET_ALTIVEC
&& EASY_VECTOR_15_ADD_SELF (cst
, op
, mode
))
1526 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1529 easy_vector_constant_add_self (op
, mode
)
1531 enum machine_mode mode
;
1535 if (!easy_vector_constant (op
, mode
))
1538 cst
= INTVAL (CONST_VECTOR_ELT (op
, 0));
1540 return TARGET_ALTIVEC
&& EASY_VECTOR_15_ADD_SELF (cst
, op
, mode
);
1544 output_vec_const_move (operands
)
1548 enum machine_mode mode
;
1554 cst
= INTVAL (CONST_VECTOR_ELT (vec
, 0));
1555 cst2
= INTVAL (CONST_VECTOR_ELT (vec
, 1));
1556 mode
= GET_MODE (dest
);
1560 if (zero_constant (vec
, mode
))
1561 return "vxor %0,%0,%0";
1562 else if (EASY_VECTOR_15 (cst
, vec
, mode
))
1564 operands
[1] = GEN_INT (cst
);
1568 return "vspltisw %0,%1";
1570 return "vspltish %0,%1";
1572 return "vspltisb %0,%1";
1577 else if (EASY_VECTOR_15_ADD_SELF (cst
, vec
, mode
))
1585 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1586 pattern of V1DI, V4HI, and V2SF.
1588 FIXME: We should probabl return # and add post reload
1589 splitters for these, but this way is so easy ;-).
1591 operands
[1] = GEN_INT (cst
);
1592 operands
[2] = GEN_INT (cst2
);
1594 return "li %0,%1\n\tevmergelo %0,%0,%0";
1596 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1602 /* Return 1 if the operand is the constant 0. This works for scalars
1603 as well as vectors. */
1605 zero_constant (op
, mode
)
1607 enum machine_mode mode
;
1609 return op
== CONST0_RTX (mode
);
1612 /* Return 1 if the operand is 0.0. */
1614 zero_fp_constant (op
, mode
)
1616 enum machine_mode mode
;
1618 return GET_MODE_CLASS (mode
) == MODE_FLOAT
&& op
== CONST0_RTX (mode
);
1621 /* Return 1 if the operand is in volatile memory. Note that during
1622 the RTL generation phase, memory_operand does not return TRUE for
1623 volatile memory references. So this function allows us to
1624 recognize volatile references where its safe. */
1627 volatile_mem_operand (op
, mode
)
1629 enum machine_mode mode
;
1631 if (GET_CODE (op
) != MEM
)
1634 if (!MEM_VOLATILE_P (op
))
1637 if (mode
!= GET_MODE (op
))
1640 if (reload_completed
)
1641 return memory_operand (op
, mode
);
1643 if (reload_in_progress
)
1644 return strict_memory_address_p (mode
, XEXP (op
, 0));
1646 return memory_address_p (mode
, XEXP (op
, 0));
1649 /* Return 1 if the operand is an offsettable memory operand. */
1652 offsettable_mem_operand (op
, mode
)
1654 enum machine_mode mode
;
1656 return ((GET_CODE (op
) == MEM
)
1657 && offsettable_address_p (reload_completed
|| reload_in_progress
,
1658 mode
, XEXP (op
, 0)));
1661 /* Return 1 if the operand is either an easy FP constant (see above) or
1665 mem_or_easy_const_operand (op
, mode
)
1667 enum machine_mode mode
;
1669 return memory_operand (op
, mode
) || easy_fp_constant (op
, mode
);
1672 /* Return 1 if the operand is either a non-special register or an item
1673 that can be used as the operand of a `mode' add insn. */
1676 add_operand (op
, mode
)
1678 enum machine_mode mode
;
1680 if (GET_CODE (op
) == CONST_INT
)
1681 return (CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1682 || CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1684 return gpc_reg_operand (op
, mode
);
1687 /* Return 1 if OP is a constant but not a valid add_operand. */
1690 non_add_cint_operand (op
, mode
)
1692 enum machine_mode mode ATTRIBUTE_UNUSED
;
1694 return (GET_CODE (op
) == CONST_INT
1695 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1696 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1699 /* Return 1 if the operand is a non-special register or a constant that
1700 can be used as the operand of an OR or XOR insn on the RS/6000. */
1703 logical_operand (op
, mode
)
1705 enum machine_mode mode
;
1707 HOST_WIDE_INT opl
, oph
;
1709 if (gpc_reg_operand (op
, mode
))
1712 if (GET_CODE (op
) == CONST_INT
)
1714 opl
= INTVAL (op
) & GET_MODE_MASK (mode
);
1716 #if HOST_BITS_PER_WIDE_INT <= 32
1717 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
&& opl
< 0)
1721 else if (GET_CODE (op
) == CONST_DOUBLE
)
1723 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1726 opl
= CONST_DOUBLE_LOW (op
);
1727 oph
= CONST_DOUBLE_HIGH (op
);
1734 return ((opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff) == 0
1735 || (opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff0000) == 0);
1738 /* Return 1 if C is a constant that is not a logical operand (as
1739 above), but could be split into one. */
1742 non_logical_cint_operand (op
, mode
)
1744 enum machine_mode mode
;
1746 return ((GET_CODE (op
) == CONST_INT
|| GET_CODE (op
) == CONST_DOUBLE
)
1747 && ! logical_operand (op
, mode
)
1748 && reg_or_logical_cint_operand (op
, mode
));
1751 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1752 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1753 Reject all ones and all zeros, since these should have been optimized
1754 away and confuse the making of MB and ME. */
1757 mask_operand (op
, mode
)
1759 enum machine_mode mode ATTRIBUTE_UNUSED
;
1761 HOST_WIDE_INT c
, lsb
;
1763 if (GET_CODE (op
) != CONST_INT
)
1768 /* Fail in 64-bit mode if the mask wraps around because the upper
1769 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1770 if (TARGET_POWERPC64
&& (c
& 0x80000001) == 0x80000001)
1773 /* We don't change the number of transitions by inverting,
1774 so make sure we start with the LS bit zero. */
1778 /* Reject all zeros or all ones. */
1782 /* Find the first transition. */
1785 /* Invert to look for a second transition. */
1788 /* Erase first transition. */
1791 /* Find the second transition (if any). */
1794 /* Match if all the bits above are 1's (or c is zero). */
1798 /* Return 1 for the PowerPC64 rlwinm corner case. */
1801 mask_operand_wrap (op
, mode
)
1803 enum machine_mode mode ATTRIBUTE_UNUSED
;
1805 HOST_WIDE_INT c
, lsb
;
1807 if (GET_CODE (op
) != CONST_INT
)
1812 if ((c
& 0x80000001) != 0x80000001)
1826 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1827 It is if there are no more than one 1->0 or 0->1 transitions.
1828 Reject all zeros, since zero should have been optimized away and
1829 confuses the making of MB and ME. */
1832 mask64_operand (op
, mode
)
1834 enum machine_mode mode ATTRIBUTE_UNUSED
;
1836 if (GET_CODE (op
) == CONST_INT
)
1838 HOST_WIDE_INT c
, lsb
;
1842 /* Reject all zeros. */
1846 /* We don't change the number of transitions by inverting,
1847 so make sure we start with the LS bit zero. */
1851 /* Find the transition, and check that all bits above are 1's. */
1854 /* Match if all the bits above are 1's (or c is zero). */
1860 /* Like mask64_operand, but allow up to three transitions. This
1861 predicate is used by insn patterns that generate two rldicl or
1862 rldicr machine insns. */
1865 mask64_2_operand (op
, mode
)
1867 enum machine_mode mode ATTRIBUTE_UNUSED
;
1869 if (GET_CODE (op
) == CONST_INT
)
1871 HOST_WIDE_INT c
, lsb
;
1875 /* Disallow all zeros. */
1879 /* We don't change the number of transitions by inverting,
1880 so make sure we start with the LS bit zero. */
1884 /* Find the first transition. */
1887 /* Invert to look for a second transition. */
1890 /* Erase first transition. */
1893 /* Find the second transition. */
1896 /* Invert to look for a third transition. */
1899 /* Erase second transition. */
1902 /* Find the third transition (if any). */
1905 /* Match if all the bits above are 1's (or c is zero). */
1911 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1912 implement ANDing by the mask IN. */
1914 build_mask64_2_operands (in
, out
)
1918 #if HOST_BITS_PER_WIDE_INT >= 64
1919 unsigned HOST_WIDE_INT c
, lsb
, m1
, m2
;
1922 if (GET_CODE (in
) != CONST_INT
)
1928 /* Assume c initially something like 0x00fff000000fffff. The idea
1929 is to rotate the word so that the middle ^^^^^^ group of zeros
1930 is at the MS end and can be cleared with an rldicl mask. We then
1931 rotate back and clear off the MS ^^ group of zeros with a
1933 c
= ~c
; /* c == 0xff000ffffff00000 */
1934 lsb
= c
& -c
; /* lsb == 0x0000000000100000 */
1935 m1
= -lsb
; /* m1 == 0xfffffffffff00000 */
1936 c
= ~c
; /* c == 0x00fff000000fffff */
1937 c
&= -lsb
; /* c == 0x00fff00000000000 */
1938 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
1939 c
= ~c
; /* c == 0xff000fffffffffff */
1940 c
&= -lsb
; /* c == 0xff00000000000000 */
1942 while ((lsb
>>= 1) != 0)
1943 shift
++; /* shift == 44 on exit from loop */
1944 m1
<<= 64 - shift
; /* m1 == 0xffffff0000000000 */
1945 m1
= ~m1
; /* m1 == 0x000000ffffffffff */
1946 m2
= ~c
; /* m2 == 0x00ffffffffffffff */
1950 /* Assume c initially something like 0xff000f0000000000. The idea
1951 is to rotate the word so that the ^^^ middle group of zeros
1952 is at the LS end and can be cleared with an rldicr mask. We then
1953 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1955 lsb
= c
& -c
; /* lsb == 0x0000010000000000 */
1956 m2
= -lsb
; /* m2 == 0xffffff0000000000 */
1957 c
= ~c
; /* c == 0x00fff0ffffffffff */
1958 c
&= -lsb
; /* c == 0x00fff00000000000 */
1959 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
1960 c
= ~c
; /* c == 0xff000fffffffffff */
1961 c
&= -lsb
; /* c == 0xff00000000000000 */
1963 while ((lsb
>>= 1) != 0)
1964 shift
++; /* shift == 44 on exit from loop */
1965 m1
= ~c
; /* m1 == 0x00ffffffffffffff */
1966 m1
>>= shift
; /* m1 == 0x0000000000000fff */
1967 m1
= ~m1
; /* m1 == 0xfffffffffffff000 */
1970 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1971 masks will be all 1's. We are guaranteed more than one transition. */
1972 out
[0] = GEN_INT (64 - shift
);
1973 out
[1] = GEN_INT (m1
);
1974 out
[2] = GEN_INT (shift
);
1975 out
[3] = GEN_INT (m2
);
1983 /* Return 1 if the operand is either a non-special register or a constant
1984 that can be used as the operand of a PowerPC64 logical AND insn. */
1987 and64_operand (op
, mode
)
1989 enum machine_mode mode
;
1991 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
1992 return (gpc_reg_operand (op
, mode
) || mask64_operand (op
, mode
));
1994 return (logical_operand (op
, mode
) || mask64_operand (op
, mode
));
1997 /* Like the above, but also match constants that can be implemented
1998 with two rldicl or rldicr insns. */
2001 and64_2_operand (op
, mode
)
2003 enum machine_mode mode
;
2005 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
2006 return gpc_reg_operand (op
, mode
) || mask64_2_operand (op
, mode
);
2008 return logical_operand (op
, mode
) || mask64_2_operand (op
, mode
);
2011 /* Return 1 if the operand is either a non-special register or a
2012 constant that can be used as the operand of an RS/6000 logical AND insn. */
2015 and_operand (op
, mode
)
2017 enum machine_mode mode
;
2019 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
2020 return (gpc_reg_operand (op
, mode
) || mask_operand (op
, mode
));
2022 return (logical_operand (op
, mode
) || mask_operand (op
, mode
));
2025 /* Return 1 if the operand is a general register or memory operand. */
2028 reg_or_mem_operand (op
, mode
)
2030 enum machine_mode mode
;
2032 return (gpc_reg_operand (op
, mode
)
2033 || memory_operand (op
, mode
)
2034 || volatile_mem_operand (op
, mode
));
2037 /* Return 1 if the operand is a general register or memory operand without
2038 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2042 lwa_operand (op
, mode
)
2044 enum machine_mode mode
;
2048 if (reload_completed
&& GET_CODE (inner
) == SUBREG
)
2049 inner
= SUBREG_REG (inner
);
2051 return gpc_reg_operand (inner
, mode
)
2052 || (memory_operand (inner
, mode
)
2053 && GET_CODE (XEXP (inner
, 0)) != PRE_INC
2054 && GET_CODE (XEXP (inner
, 0)) != PRE_DEC
2055 && (GET_CODE (XEXP (inner
, 0)) != PLUS
2056 || GET_CODE (XEXP (XEXP (inner
, 0), 1)) != CONST_INT
2057 || INTVAL (XEXP (XEXP (inner
, 0), 1)) % 4 == 0));
2060 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2063 symbol_ref_operand (op
, mode
)
2065 enum machine_mode mode
;
2067 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
2070 return (GET_CODE (op
) == SYMBOL_REF
);
2073 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2074 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2077 call_operand (op
, mode
)
2079 enum machine_mode mode
;
2081 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
2084 return (GET_CODE (op
) == SYMBOL_REF
2085 || (GET_CODE (op
) == REG
2086 && (REGNO (op
) == LINK_REGISTER_REGNUM
2087 || REGNO (op
) == COUNT_REGISTER_REGNUM
2088 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
)));
2091 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2095 current_file_function_operand (op
, mode
)
2097 enum machine_mode mode ATTRIBUTE_UNUSED
;
2099 if (GET_CODE (op
) == SYMBOL_REF
2100 && (SYMBOL_REF_LOCAL_P (op
)
2101 || (op
== XEXP (DECL_RTL (current_function_decl
), 0))))
2103 #ifdef ENABLE_CHECKING
2104 if (!SYMBOL_REF_FUNCTION_P (op
))
2112 /* Return 1 if this operand is a valid input for a move insn. */
2115 input_operand (op
, mode
)
2117 enum machine_mode mode
;
2119 /* Memory is always valid. */
2120 if (memory_operand (op
, mode
))
2123 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2124 if (GET_CODE (op
) == CONSTANT_P_RTX
)
2127 /* For floating-point, easy constants are valid. */
2128 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
2130 && easy_fp_constant (op
, mode
))
2133 /* Allow any integer constant. */
2134 if (GET_MODE_CLASS (mode
) == MODE_INT
2135 && (GET_CODE (op
) == CONST_INT
2136 || GET_CODE (op
) == CONST_DOUBLE
))
2139 /* Allow easy vector constants. */
2140 if (GET_CODE (op
) == CONST_VECTOR
2141 && easy_vector_constant (op
, mode
))
2144 /* For floating-point or multi-word mode, the only remaining valid type
2146 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
2147 || GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2148 return register_operand (op
, mode
);
2150 /* The only cases left are integral modes one word or smaller (we
2151 do not get called for MODE_CC values). These can be in any
2153 if (register_operand (op
, mode
))
2156 /* A SYMBOL_REF referring to the TOC is valid. */
2157 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op
))
2160 /* A constant pool expression (relative to the TOC) is valid */
2161 if (TOC_RELATIVE_EXPR_P (op
))
2164 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2166 if (DEFAULT_ABI
== ABI_V4
2167 && (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == CONST
)
2168 && small_data_operand (op
, Pmode
))
2174 /* Return 1 for an operand in small memory on V.4/eabi. */
2177 small_data_operand (op
, mode
)
2178 rtx op ATTRIBUTE_UNUSED
;
2179 enum machine_mode mode ATTRIBUTE_UNUSED
;
2184 if (rs6000_sdata
== SDATA_NONE
|| rs6000_sdata
== SDATA_DATA
)
2187 if (DEFAULT_ABI
!= ABI_V4
)
2190 if (GET_CODE (op
) == SYMBOL_REF
)
2193 else if (GET_CODE (op
) != CONST
2194 || GET_CODE (XEXP (op
, 0)) != PLUS
2195 || GET_CODE (XEXP (XEXP (op
, 0), 0)) != SYMBOL_REF
2196 || GET_CODE (XEXP (XEXP (op
, 0), 1)) != CONST_INT
)
2201 rtx sum
= XEXP (op
, 0);
2202 HOST_WIDE_INT summand
;
2204 /* We have to be careful here, because it is the referenced address
2205 that must be 32k from _SDA_BASE_, not just the symbol. */
2206 summand
= INTVAL (XEXP (sum
, 1));
2207 if (summand
< 0 || summand
> g_switch_value
)
2210 sym_ref
= XEXP (sum
, 0);
2213 return SYMBOL_REF_SMALL_V4_P (sym_ref
);
2220 constant_pool_expr_1 (op
, have_sym
, have_toc
)
2225 switch (GET_CODE(op
))
2228 if (CONSTANT_POOL_ADDRESS_P (op
))
2230 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op
), Pmode
))
2238 else if (! strcmp (XSTR (op
, 0), toc_label_name
))
2247 return (constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
)
2248 && constant_pool_expr_1 (XEXP (op
, 1), have_sym
, have_toc
));
2250 return constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
);
2259 constant_pool_expr_p (op
)
2264 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_sym
;
2268 toc_relative_expr_p (op
)
2273 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_toc
;
2276 /* Try machine-dependent ways of modifying an illegitimate address
2277 to be legitimate. If we find one, return the new, valid address.
2278 This is used from only one place: `memory_address' in explow.c.
2280 OLDX is the address as it was before break_out_memory_refs was
2281 called. In some cases it is useful to look at this to decide what
2284 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2286 It is always safe for this function to do nothing. It exists to
2287 recognize opportunities to optimize the output.
2289 On RS/6000, first check for the sum of a register with a constant
2290 integer that is out of range. If so, generate code to add the
2291 constant with the low-order 16 bits masked to the register and force
2292 this result into another register (this can be done with `cau').
2293 Then generate an address of REG+(CONST&0xffff), allowing for the
2294 possibility of bit 16 being a one.
2296 Then check for the sum of a register and something not constant, try to
2297 load the other things into a register and return the sum. */
2299 rs6000_legitimize_address (x
, oldx
, mode
)
2301 rtx oldx ATTRIBUTE_UNUSED
;
2302 enum machine_mode mode
;
2304 if (GET_CODE (x
) == PLUS
2305 && GET_CODE (XEXP (x
, 0)) == REG
2306 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2307 && (unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1)) + 0x8000) >= 0x10000)
2309 HOST_WIDE_INT high_int
, low_int
;
2311 low_int
= ((INTVAL (XEXP (x
, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2312 high_int
= INTVAL (XEXP (x
, 1)) - low_int
;
2313 sum
= force_operand (gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2314 GEN_INT (high_int
)), 0);
2315 return gen_rtx_PLUS (Pmode
, sum
, GEN_INT (low_int
));
2317 else if (GET_CODE (x
) == PLUS
2318 && GET_CODE (XEXP (x
, 0)) == REG
2319 && GET_CODE (XEXP (x
, 1)) != CONST_INT
2320 && GET_MODE_NUNITS (mode
) == 1
2321 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
2323 || (mode
!= DFmode
&& mode
!= TFmode
))
2324 && (TARGET_POWERPC64
|| mode
!= DImode
)
2327 return gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2328 force_reg (Pmode
, force_operand (XEXP (x
, 1), 0)));
2330 else if (ALTIVEC_VECTOR_MODE (mode
))
2334 /* Make sure both operands are registers. */
2335 if (GET_CODE (x
) == PLUS
)
2336 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, XEXP (x
, 0)),
2337 force_reg (Pmode
, XEXP (x
, 1)));
2339 reg
= force_reg (Pmode
, x
);
2342 else if (SPE_VECTOR_MODE (mode
))
2344 /* We accept [reg + reg] and [reg + OFFSET]. */
2346 if (GET_CODE (x
) == PLUS
)
2348 rtx op1
= XEXP (x
, 0);
2349 rtx op2
= XEXP (x
, 1);
2351 op1
= force_reg (Pmode
, op1
);
2353 if (GET_CODE (op2
) != REG
2354 && (GET_CODE (op2
) != CONST_INT
2355 || !SPE_CONST_OFFSET_OK (INTVAL (op2
))))
2356 op2
= force_reg (Pmode
, op2
);
2358 return gen_rtx_PLUS (Pmode
, op1
, op2
);
2361 return force_reg (Pmode
, x
);
2367 && GET_CODE (x
) != CONST_INT
2368 && GET_CODE (x
) != CONST_DOUBLE
2370 && GET_MODE_NUNITS (mode
) == 1
2371 && (GET_MODE_BITSIZE (mode
) <= 32
2372 || ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) && mode
== DFmode
)))
2374 rtx reg
= gen_reg_rtx (Pmode
);
2375 emit_insn (gen_elf_high (reg
, (x
)));
2376 return gen_rtx_LO_SUM (Pmode
, reg
, (x
));
2378 else if (TARGET_MACHO
&& TARGET_32BIT
&& TARGET_NO_TOC
2381 && ! MACHO_DYNAMIC_NO_PIC_P
2383 && GET_CODE (x
) != CONST_INT
2384 && GET_CODE (x
) != CONST_DOUBLE
2386 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) || mode
!= DFmode
)
2390 rtx reg
= gen_reg_rtx (Pmode
);
2391 emit_insn (gen_macho_high (reg
, (x
)));
2392 return gen_rtx_LO_SUM (Pmode
, reg
, (x
));
2395 && CONSTANT_POOL_EXPR_P (x
)
2396 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), Pmode
))
2398 return create_TOC_reference (x
);
2404 /* The convention appears to be to define this wherever it is used.
2405 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2406 is now used here. */
2407 #ifndef REG_MODE_OK_FOR_BASE_P
2408 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2411 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2412 replace the input X, or the original X if no replacement is called for.
2413 The output parameter *WIN is 1 if the calling macro should goto WIN,
2416 For RS/6000, we wish to handle large displacements off a base
2417 register by splitting the addend across an addiu/addis and the mem insn.
2418 This cuts number of extra insns needed from 3 to 1.
2420 On Darwin, we use this to generate code for floating point constants.
2421 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2422 The Darwin code is inside #if TARGET_MACHO because only then is
2423 machopic_function_base_name() defined. */
2425 rs6000_legitimize_reload_address (x
, mode
, opnum
, type
, ind_levels
, win
)
2427 enum machine_mode mode
;
2430 int ind_levels ATTRIBUTE_UNUSED
;
2433 /* We must recognize output that we have already generated ourselves. */
2434 if (GET_CODE (x
) == PLUS
2435 && GET_CODE (XEXP (x
, 0)) == PLUS
2436 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
2437 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2438 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2440 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2441 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
2442 opnum
, (enum reload_type
)type
);
2448 if (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
2449 && GET_CODE (x
) == LO_SUM
2450 && GET_CODE (XEXP (x
, 0)) == PLUS
2451 && XEXP (XEXP (x
, 0), 0) == pic_offset_table_rtx
2452 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == HIGH
2453 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 0)) == CONST
2454 && XEXP (XEXP (XEXP (x
, 0), 1), 0) == XEXP (x
, 1)
2455 && GET_CODE (XEXP (XEXP (x
, 1), 0)) == MINUS
2456 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 0)) == SYMBOL_REF
2457 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 1)) == SYMBOL_REF
)
2459 /* Result of previous invocation of this function on Darwin
2460 floating point constant. */
2461 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2462 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
2463 opnum
, (enum reload_type
)type
);
2468 if (GET_CODE (x
) == PLUS
2469 && GET_CODE (XEXP (x
, 0)) == REG
2470 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2471 && REG_MODE_OK_FOR_BASE_P (XEXP (x
, 0), mode
)
2472 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2473 && !SPE_VECTOR_MODE (mode
)
2474 && !ALTIVEC_VECTOR_MODE (mode
))
2476 HOST_WIDE_INT val
= INTVAL (XEXP (x
, 1));
2477 HOST_WIDE_INT low
= ((val
& 0xffff) ^ 0x8000) - 0x8000;
2479 = (((val
- low
) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2481 /* Check for 32-bit overflow. */
2482 if (high
+ low
!= val
)
2488 /* Reload the high part into a base reg; leave the low part
2489 in the mem directly. */
2491 x
= gen_rtx_PLUS (GET_MODE (x
),
2492 gen_rtx_PLUS (GET_MODE (x
), XEXP (x
, 0),
2496 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2497 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
2498 opnum
, (enum reload_type
)type
);
2503 if (GET_CODE (x
) == SYMBOL_REF
2504 && DEFAULT_ABI
== ABI_DARWIN
2505 && !ALTIVEC_VECTOR_MODE (mode
)
2508 /* Darwin load of floating point constant. */
2509 rtx offset
= gen_rtx (CONST
, Pmode
,
2510 gen_rtx (MINUS
, Pmode
, x
,
2511 gen_rtx (SYMBOL_REF
, Pmode
,
2512 machopic_function_base_name ())));
2513 x
= gen_rtx (LO_SUM
, GET_MODE (x
),
2514 gen_rtx (PLUS
, Pmode
, pic_offset_table_rtx
,
2515 gen_rtx (HIGH
, Pmode
, offset
)), offset
);
2516 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2517 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
2518 opnum
, (enum reload_type
)type
);
2522 if (GET_CODE (x
) == SYMBOL_REF
2523 && DEFAULT_ABI
== ABI_DARWIN
2524 && !ALTIVEC_VECTOR_MODE (mode
)
2525 && MACHO_DYNAMIC_NO_PIC_P
)
2527 /* Darwin load of floating point constant. */
2528 x
= gen_rtx (LO_SUM
, GET_MODE (x
),
2529 gen_rtx (HIGH
, Pmode
, x
), x
);
2530 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2531 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
2532 opnum
, (enum reload_type
)type
);
2538 && CONSTANT_POOL_EXPR_P (x
)
2539 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), mode
))
2541 (x
) = create_TOC_reference (x
);
2549 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2550 that is a valid memory address for an instruction.
2551 The MODE argument is the machine mode for the MEM expression
2552 that wants to use this address.
2554 On the RS/6000, there are four valid address: a SYMBOL_REF that
2555 refers to a constant pool entry of an address (or the sum of it
2556 plus a constant), a short (16-bit signed) constant plus a register,
2557 the sum of two registers, or a register indirect, possibly with an
2558 auto-increment. For DFmode and DImode with a constant plus register,
2559 we must ensure that both words are addressable or PowerPC64 with offset
2562 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2563 32-bit DImode, TImode), indexed addressing cannot be used because
2564 adjacent memory cells are accessed by adding word-sized offsets
2565 during assembly output. */
2567 rs6000_legitimate_address (mode
, x
, reg_ok_strict
)
2568 enum machine_mode mode
;
2572 if (LEGITIMATE_INDIRECT_ADDRESS_P (x
, reg_ok_strict
))
2574 if ((GET_CODE (x
) == PRE_INC
|| GET_CODE (x
) == PRE_DEC
)
2575 && !ALTIVEC_VECTOR_MODE (mode
)
2576 && !SPE_VECTOR_MODE (mode
)
2578 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x
, 0), reg_ok_strict
))
2580 if (LEGITIMATE_SMALL_DATA_P (mode
, x
))
2582 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x
))
2584 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2586 && GET_CODE (x
) == PLUS
2587 && GET_CODE (XEXP (x
, 0)) == REG
2588 && XEXP (x
, 0) == virtual_stack_vars_rtx
2589 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2591 if (LEGITIMATE_OFFSET_ADDRESS_P (mode
, x
, reg_ok_strict
))
2594 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
2596 || (mode
!= DFmode
&& mode
!= TFmode
))
2597 && (TARGET_POWERPC64
|| mode
!= DImode
)
2598 && LEGITIMATE_INDEXED_ADDRESS_P (x
, reg_ok_strict
))
2600 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode
, x
, reg_ok_strict
))
2605 /* Try to output insns to set TARGET equal to the constant C if it can
2606 be done in less than N insns. Do all computations in MODE.
2607 Returns the place where the output has been placed if it can be
2608 done and the insns have been emitted. If it would take more than N
2609 insns, zero is returned and no insns and emitted. */
2612 rs6000_emit_set_const (dest
, mode
, source
, n
)
2614 enum machine_mode mode
;
2615 int n ATTRIBUTE_UNUSED
;
2617 rtx result
, insn
, set
;
2618 HOST_WIDE_INT c0
, c1
;
2620 if (mode
== QImode
|| mode
== HImode
)
2623 dest
= gen_reg_rtx (mode
);
2624 emit_insn (gen_rtx_SET (VOIDmode
, dest
, source
));
2627 else if (mode
== SImode
)
2629 result
= no_new_pseudos
? dest
: gen_reg_rtx (SImode
);
2631 emit_insn (gen_rtx_SET (VOIDmode
, result
,
2632 GEN_INT (INTVAL (source
)
2633 & (~ (HOST_WIDE_INT
) 0xffff))));
2634 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
2635 gen_rtx_IOR (SImode
, result
,
2636 GEN_INT (INTVAL (source
) & 0xffff))));
2639 else if (mode
== DImode
)
2641 if (GET_CODE (source
) == CONST_INT
)
2643 c0
= INTVAL (source
);
2646 else if (GET_CODE (source
) == CONST_DOUBLE
)
2648 #if HOST_BITS_PER_WIDE_INT >= 64
2649 c0
= CONST_DOUBLE_LOW (source
);
2652 c0
= CONST_DOUBLE_LOW (source
);
2653 c1
= CONST_DOUBLE_HIGH (source
);
2659 result
= rs6000_emit_set_long_const (dest
, c0
, c1
);
2664 insn
= get_last_insn ();
2665 set
= single_set (insn
);
2666 if (! CONSTANT_P (SET_SRC (set
)))
2667 set_unique_reg_note (insn
, REG_EQUAL
, source
);
2672 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2673 fall back to a straight forward decomposition. We do this to avoid
2674 exponential run times encountered when looking for longer sequences
2675 with rs6000_emit_set_const. */
2677 rs6000_emit_set_long_const (dest
, c1
, c2
)
2679 HOST_WIDE_INT c1
, c2
;
2681 if (!TARGET_POWERPC64
)
2683 rtx operand1
, operand2
;
2685 operand1
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
== 0,
2687 operand2
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
!= 0,
2689 emit_move_insn (operand1
, GEN_INT (c1
));
2690 emit_move_insn (operand2
, GEN_INT (c2
));
2694 HOST_WIDE_INT ud1
, ud2
, ud3
, ud4
;
2697 ud2
= (c1
& 0xffff0000) >> 16;
2698 #if HOST_BITS_PER_WIDE_INT >= 64
2702 ud4
= (c2
& 0xffff0000) >> 16;
2704 if ((ud4
== 0xffff && ud3
== 0xffff && ud2
== 0xffff && (ud1
& 0x8000))
2705 || (ud4
== 0 && ud3
== 0 && ud2
== 0 && ! (ud1
& 0x8000)))
2708 emit_move_insn (dest
, GEN_INT (((ud1
^ 0x8000) - 0x8000)));
2710 emit_move_insn (dest
, GEN_INT (ud1
));
2713 else if ((ud4
== 0xffff && ud3
== 0xffff && (ud2
& 0x8000))
2714 || (ud4
== 0 && ud3
== 0 && ! (ud2
& 0x8000)))
2717 emit_move_insn (dest
, GEN_INT (((ud2
<< 16) ^ 0x80000000)
2720 emit_move_insn (dest
, GEN_INT (ud2
<< 16));
2722 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2724 else if ((ud4
== 0xffff && (ud3
& 0x8000))
2725 || (ud4
== 0 && ! (ud3
& 0x8000)))
2728 emit_move_insn (dest
, GEN_INT (((ud3
<< 16) ^ 0x80000000)
2731 emit_move_insn (dest
, GEN_INT (ud3
<< 16));
2734 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud2
)));
2735 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (16)));
2737 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2742 emit_move_insn (dest
, GEN_INT (((ud4
<< 16) ^ 0x80000000)
2745 emit_move_insn (dest
, GEN_INT (ud4
<< 16));
2748 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud3
)));
2750 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (32)));
2752 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
,
2753 GEN_INT (ud2
<< 16)));
2755 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2761 /* Emit a move from SOURCE to DEST in mode MODE. */
2763 rs6000_emit_move (dest
, source
, mode
)
2766 enum machine_mode mode
;
2770 operands
[1] = source
;
2772 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2773 if (GET_CODE (operands
[1]) == CONST_DOUBLE
2774 && ! FLOAT_MODE_P (mode
)
2775 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
2777 /* FIXME. This should never happen. */
2778 /* Since it seems that it does, do the safe thing and convert
2780 operands
[1] = gen_int_mode (CONST_DOUBLE_LOW (operands
[1]), mode
);
2782 if (GET_CODE (operands
[1]) == CONST_DOUBLE
2783 && ! FLOAT_MODE_P (mode
)
2784 && ((CONST_DOUBLE_HIGH (operands
[1]) == 0
2785 && CONST_DOUBLE_LOW (operands
[1]) >= 0)
2786 || (CONST_DOUBLE_HIGH (operands
[1]) == -1
2787 && CONST_DOUBLE_LOW (operands
[1]) < 0)))
2790 /* Check if GCC is setting up a block move that will end up using FP
2791 registers as temporaries. We must make sure this is acceptable. */
2792 if (GET_CODE (operands
[0]) == MEM
2793 && GET_CODE (operands
[1]) == MEM
2795 && (SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[0]))
2796 || SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[1])))
2797 && ! (SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[0]) > 32
2798 ? 32 : MEM_ALIGN (operands
[0])))
2799 || SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[1]) > 32
2801 : MEM_ALIGN (operands
[1]))))
2802 && ! MEM_VOLATILE_P (operands
[0])
2803 && ! MEM_VOLATILE_P (operands
[1]))
2805 emit_move_insn (adjust_address (operands
[0], SImode
, 0),
2806 adjust_address (operands
[1], SImode
, 0));
2807 emit_move_insn (adjust_address (operands
[0], SImode
, 4),
2808 adjust_address (operands
[1], SImode
, 4));
2812 if (!no_new_pseudos
)
2814 if (GET_CODE (operands
[1]) == MEM
&& optimize
> 0
2815 && (mode
== QImode
|| mode
== HImode
|| mode
== SImode
)
2816 && GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
2818 rtx reg
= gen_reg_rtx (word_mode
);
2820 emit_insn (gen_rtx_SET (word_mode
, reg
,
2821 gen_rtx_ZERO_EXTEND (word_mode
,
2823 operands
[1] = gen_lowpart (mode
, reg
);
2825 if (GET_CODE (operands
[0]) != REG
)
2826 operands
[1] = force_reg (mode
, operands
[1]);
2829 if (mode
== SFmode
&& ! TARGET_POWERPC
2830 && TARGET_HARD_FLOAT
&& TARGET_FPRS
2831 && GET_CODE (operands
[0]) == MEM
)
2835 if (reload_in_progress
|| reload_completed
)
2836 regnum
= true_regnum (operands
[1]);
2837 else if (GET_CODE (operands
[1]) == REG
)
2838 regnum
= REGNO (operands
[1]);
2842 /* If operands[1] is a register, on POWER it may have
2843 double-precision data in it, so truncate it to single
2845 if (FP_REGNO_P (regnum
) || regnum
>= FIRST_PSEUDO_REGISTER
)
2848 newreg
= (no_new_pseudos
? operands
[1] : gen_reg_rtx (mode
));
2849 emit_insn (gen_aux_truncdfsf2 (newreg
, operands
[1]));
2850 operands
[1] = newreg
;
2854 /* Handle the case where reload calls us with an invalid address. */
2855 if (reload_in_progress
&& mode
== Pmode
2856 && (! general_operand (operands
[1], mode
)
2857 || ! nonimmediate_operand (operands
[0], mode
)))
2860 /* Handle the case of CONSTANT_P_RTX. */
2861 if (GET_CODE (operands
[1]) == CONSTANT_P_RTX
)
2864 /* FIXME: In the long term, this switch statement should go away
2865 and be replaced by a sequence of tests based on things like
2871 if (CONSTANT_P (operands
[1])
2872 && GET_CODE (operands
[1]) != CONST_INT
)
2873 operands
[1] = force_const_mem (mode
, operands
[1]);
2879 if (CONSTANT_P (operands
[1])
2880 && ! easy_fp_constant (operands
[1], mode
))
2881 operands
[1] = force_const_mem (mode
, operands
[1]);
2892 if (CONSTANT_P (operands
[1])
2893 && !easy_vector_constant (operands
[1], mode
))
2894 operands
[1] = force_const_mem (mode
, operands
[1]);
2899 /* Use default pattern for address of ELF small data */
2902 && DEFAULT_ABI
== ABI_V4
2903 && (GET_CODE (operands
[1]) == SYMBOL_REF
2904 || GET_CODE (operands
[1]) == CONST
)
2905 && small_data_operand (operands
[1], mode
))
2907 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2911 if (DEFAULT_ABI
== ABI_V4
2912 && mode
== Pmode
&& mode
== SImode
2913 && flag_pic
== 1 && got_operand (operands
[1], mode
))
2915 emit_insn (gen_movsi_got (operands
[0], operands
[1]));
2919 if ((TARGET_ELF
|| DEFAULT_ABI
== ABI_DARWIN
)
2923 && CONSTANT_P (operands
[1])
2924 && GET_CODE (operands
[1]) != HIGH
2925 && GET_CODE (operands
[1]) != CONST_INT
)
2927 rtx target
= (no_new_pseudos
? operands
[0] : gen_reg_rtx (mode
));
2929 /* If this is a function address on -mcall-aixdesc,
2930 convert it to the address of the descriptor. */
2931 if (DEFAULT_ABI
== ABI_AIX
2932 && GET_CODE (operands
[1]) == SYMBOL_REF
2933 && XSTR (operands
[1], 0)[0] == '.')
2935 const char *name
= XSTR (operands
[1], 0);
2937 while (*name
== '.')
2939 new_ref
= gen_rtx_SYMBOL_REF (Pmode
, name
);
2940 CONSTANT_POOL_ADDRESS_P (new_ref
)
2941 = CONSTANT_POOL_ADDRESS_P (operands
[1]);
2942 SYMBOL_REF_FLAGS (new_ref
) = SYMBOL_REF_FLAGS (operands
[1]);
2943 SYMBOL_REF_USED (new_ref
) = SYMBOL_REF_USED (operands
[1]);
2944 SYMBOL_REF_DECL (new_ref
) = SYMBOL_REF_DECL (operands
[1]);
2945 operands
[1] = new_ref
;
2948 if (DEFAULT_ABI
== ABI_DARWIN
)
2951 if (MACHO_DYNAMIC_NO_PIC_P
)
2953 /* Take care of any required data indirection. */
2954 operands
[1] = rs6000_machopic_legitimize_pic_address (
2955 operands
[1], mode
, operands
[0]);
2956 if (operands
[0] != operands
[1])
2957 emit_insn (gen_rtx_SET (VOIDmode
,
2958 operands
[0], operands
[1]));
2962 emit_insn (gen_macho_high (target
, operands
[1]));
2963 emit_insn (gen_macho_low (operands
[0], target
, operands
[1]));
2967 emit_insn (gen_elf_high (target
, operands
[1]));
2968 emit_insn (gen_elf_low (operands
[0], target
, operands
[1]));
2972 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2973 and we have put it in the TOC, we just need to make a TOC-relative
2976 && GET_CODE (operands
[1]) == SYMBOL_REF
2977 && CONSTANT_POOL_EXPR_P (operands
[1])
2978 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands
[1]),
2979 get_pool_mode (operands
[1])))
2981 operands
[1] = create_TOC_reference (operands
[1]);
2983 else if (mode
== Pmode
2984 && CONSTANT_P (operands
[1])
2985 && ((GET_CODE (operands
[1]) != CONST_INT
2986 && ! easy_fp_constant (operands
[1], mode
))
2987 || (GET_CODE (operands
[1]) == CONST_INT
2988 && num_insns_constant (operands
[1], mode
) > 2)
2989 || (GET_CODE (operands
[0]) == REG
2990 && FP_REGNO_P (REGNO (operands
[0]))))
2991 && GET_CODE (operands
[1]) != HIGH
2992 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands
[1])
2993 && ! TOC_RELATIVE_EXPR_P (operands
[1]))
2995 /* Emit a USE operation so that the constant isn't deleted if
2996 expensive optimizations are turned on because nobody
2997 references it. This should only be done for operands that
2998 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2999 This should not be done for operands that contain LABEL_REFs.
3000 For now, we just handle the obvious case. */
3001 if (GET_CODE (operands
[1]) != LABEL_REF
)
3002 emit_insn (gen_rtx_USE (VOIDmode
, operands
[1]));
3005 /* Darwin uses a special PIC legitimizer. */
3006 if (DEFAULT_ABI
== ABI_DARWIN
&& MACHOPIC_INDIRECT
)
3009 rs6000_machopic_legitimize_pic_address (operands
[1], mode
,
3011 if (operands
[0] != operands
[1])
3012 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
3017 /* If we are to limit the number of things we put in the TOC and
3018 this is a symbol plus a constant we can add in one insn,
3019 just put the symbol in the TOC and add the constant. Don't do
3020 this if reload is in progress. */
3021 if (GET_CODE (operands
[1]) == CONST
3022 && TARGET_NO_SUM_IN_TOC
&& ! reload_in_progress
3023 && GET_CODE (XEXP (operands
[1], 0)) == PLUS
3024 && add_operand (XEXP (XEXP (operands
[1], 0), 1), mode
)
3025 && (GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == LABEL_REF
3026 || GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == SYMBOL_REF
)
3027 && ! side_effects_p (operands
[0]))
3030 force_const_mem (mode
, XEXP (XEXP (operands
[1], 0), 0));
3031 rtx other
= XEXP (XEXP (operands
[1], 0), 1);
3033 sym
= force_reg (mode
, sym
);
3035 emit_insn (gen_addsi3 (operands
[0], sym
, other
));
3037 emit_insn (gen_adddi3 (operands
[0], sym
, other
));
3041 operands
[1] = force_const_mem (mode
, operands
[1]);
3044 && CONSTANT_POOL_EXPR_P (XEXP (operands
[1], 0))
3045 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3046 get_pool_constant (XEXP (operands
[1], 0)),
3047 get_pool_mode (XEXP (operands
[1], 0))))
3050 = gen_rtx_MEM (mode
,
3051 create_TOC_reference (XEXP (operands
[1], 0)));
3052 set_mem_alias_set (operands
[1], get_TOC_alias_set ());
3053 RTX_UNCHANGING_P (operands
[1]) = 1;
3059 if (GET_CODE (operands
[0]) == MEM
3060 && GET_CODE (XEXP (operands
[0], 0)) != REG
3061 && ! reload_in_progress
)
3063 = replace_equiv_address (operands
[0],
3064 copy_addr_to_reg (XEXP (operands
[0], 0)));
3066 if (GET_CODE (operands
[1]) == MEM
3067 && GET_CODE (XEXP (operands
[1], 0)) != REG
3068 && ! reload_in_progress
)
3070 = replace_equiv_address (operands
[1],
3071 copy_addr_to_reg (XEXP (operands
[1], 0)));
3074 emit_insn (gen_rtx_PARALLEL (VOIDmode
,
3076 gen_rtx_SET (VOIDmode
,
3077 operands
[0], operands
[1]),
3078 gen_rtx_CLOBBER (VOIDmode
,
3079 gen_rtx_SCRATCH (SImode
)))));
3088 /* Above, we may have called force_const_mem which may have returned
3089 an invalid address. If we can, fix this up; otherwise, reload will
3090 have to deal with it. */
3091 if (GET_CODE (operands
[1]) == MEM
&& ! reload_in_progress
)
3092 operands
[1] = validize_mem (operands
[1]);
3095 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
3098 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3099 for a call to a function whose data type is FNTYPE.
3100 For a library call, FNTYPE is 0.
3102 For incoming args we set the number of arguments in the prototype large
3103 so we never return a PARALLEL. */
3106 init_cumulative_args (cum
, fntype
, libname
, incoming
)
3107 CUMULATIVE_ARGS
*cum
;
3109 rtx libname ATTRIBUTE_UNUSED
;
3112 static CUMULATIVE_ARGS zero_cumulative
;
3114 *cum
= zero_cumulative
;
3116 cum
->fregno
= FP_ARG_MIN_REG
;
3117 cum
->vregno
= ALTIVEC_ARG_MIN_REG
;
3118 cum
->prototype
= (fntype
&& TYPE_ARG_TYPES (fntype
));
3119 cum
->call_cookie
= CALL_NORMAL
;
3120 cum
->sysv_gregno
= GP_ARG_MIN_REG
;
3123 cum
->nargs_prototype
= 1000; /* don't return a PARALLEL */
3125 else if (cum
->prototype
)
3126 cum
->nargs_prototype
= (list_length (TYPE_ARG_TYPES (fntype
)) - 1
3127 + (TYPE_MODE (TREE_TYPE (fntype
)) == BLKmode
3128 || RETURN_IN_MEMORY (TREE_TYPE (fntype
))));
3131 cum
->nargs_prototype
= 0;
3133 cum
->orig_nargs
= cum
->nargs_prototype
;
3135 /* Check for a longcall attribute. */
3137 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
))
3138 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
3139 cum
->call_cookie
= CALL_LONG
;
3141 if (TARGET_DEBUG_ARG
)
3143 fprintf (stderr
, "\ninit_cumulative_args:");
3146 tree ret_type
= TREE_TYPE (fntype
);
3147 fprintf (stderr
, " ret code = %s,",
3148 tree_code_name
[ (int)TREE_CODE (ret_type
) ]);
3151 if (cum
->call_cookie
& CALL_LONG
)
3152 fprintf (stderr
, " longcall,");
3154 fprintf (stderr
, " proto = %d, nargs = %d\n",
3155 cum
->prototype
, cum
->nargs_prototype
);
3159 /* If defined, a C expression which determines whether, and in which
3160 direction, to pad out an argument with extra space. The value
3161 should be of type `enum direction': either `upward' to pad above
3162 the argument, `downward' to pad below, or `none' to inhibit
3165 For the AIX ABI structs are always stored left shifted in their
3169 function_arg_padding (mode
, type
)
3170 enum machine_mode mode
;
3173 if (type
!= 0 && AGGREGATE_TYPE_P (type
))
3176 /* This is the default definition. */
3177 return (! BYTES_BIG_ENDIAN
3180 ? (type
&& TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
3181 && int_size_in_bytes (type
) < (PARM_BOUNDARY
/ BITS_PER_UNIT
))
3182 : GET_MODE_BITSIZE (mode
) < PARM_BOUNDARY
)
3183 ? downward
: upward
));
3186 /* If defined, a C expression that gives the alignment boundary, in bits,
3187 of an argument with the specified mode and type. If it is not defined,
3188 PARM_BOUNDARY is used for all arguments.
3190 V.4 wants long longs to be double word aligned. */
3193 function_arg_boundary (mode
, type
)
3194 enum machine_mode mode
;
3195 tree type ATTRIBUTE_UNUSED
;
3197 if (DEFAULT_ABI
== ABI_V4
&& (mode
== DImode
|| mode
== DFmode
))
3199 else if (SPE_VECTOR_MODE (mode
))
3201 else if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3204 return PARM_BOUNDARY
;
3207 /* Update the data in CUM to advance over an argument
3208 of mode MODE and data type TYPE.
3209 (TYPE is null for libcalls where that information may not be available.) */
3212 function_arg_advance (cum
, mode
, type
, named
)
3213 CUMULATIVE_ARGS
*cum
;
3214 enum machine_mode mode
;
3218 cum
->nargs_prototype
--;
3220 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3222 if (cum
->vregno
<= ALTIVEC_ARG_MAX_REG
&& cum
->nargs_prototype
>= 0)
3225 cum
->words
+= RS6000_ARG_SIZE (mode
, type
);
3227 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
)
3228 && named
&& cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
3230 else if (DEFAULT_ABI
== ABI_V4
)
3232 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
3233 && (mode
== SFmode
|| mode
== DFmode
))
3235 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
3240 cum
->words
+= cum
->words
& 1;
3241 cum
->words
+= RS6000_ARG_SIZE (mode
, type
);
3247 int gregno
= cum
->sysv_gregno
;
3249 /* Aggregates and IEEE quad get passed by reference. */
3250 if ((type
&& AGGREGATE_TYPE_P (type
))
3254 n_words
= RS6000_ARG_SIZE (mode
, type
);
3256 /* Long long and SPE vectors are put in odd registers. */
3257 if (n_words
== 2 && (gregno
& 1) == 0)
3260 /* Long long and SPE vectors are not split between registers
3262 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
3264 /* Long long is aligned on the stack. */
3266 cum
->words
+= cum
->words
& 1;
3267 cum
->words
+= n_words
;
3270 /* Note: continuing to accumulate gregno past when we've started
3271 spilling to the stack indicates the fact that we've started
3272 spilling to the stack to expand_builtin_saveregs. */
3273 cum
->sysv_gregno
= gregno
+ n_words
;
3276 if (TARGET_DEBUG_ARG
)
3278 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
3279 cum
->words
, cum
->fregno
);
3280 fprintf (stderr
, "gregno = %2d, nargs = %4d, proto = %d, ",
3281 cum
->sysv_gregno
, cum
->nargs_prototype
, cum
->prototype
);
3282 fprintf (stderr
, "mode = %4s, named = %d\n",
3283 GET_MODE_NAME (mode
), named
);
3288 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
3289 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
3291 cum
->words
+= align
+ RS6000_ARG_SIZE (mode
, type
);
3293 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
3294 && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3295 cum
->fregno
+= (mode
== TFmode
? 2 : 1);
3297 if (TARGET_DEBUG_ARG
)
3299 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
3300 cum
->words
, cum
->fregno
);
3301 fprintf (stderr
, "nargs = %4d, proto = %d, mode = %4s, ",
3302 cum
->nargs_prototype
, cum
->prototype
, GET_MODE_NAME (mode
));
3303 fprintf (stderr
, "named = %d, align = %d\n", named
, align
);
3308 /* Determine where to put an argument to a function.
3309 Value is zero to push the argument on the stack,
3310 or a hard register in which to store the argument.
3312 MODE is the argument's machine mode.
3313 TYPE is the data type of the argument (as a tree).
3314 This is null for libcalls where that information may
3316 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3317 the preceding args and about the function being called.
3318 NAMED is nonzero if this argument is a named parameter
3319 (otherwise it is an extra parameter matching an ellipsis).
3321 On RS/6000 the first eight words of non-FP are normally in registers
3322 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3323 Under V.4, the first 8 FP args are in registers.
3325 If this is floating-point and no prototype is specified, we use
3326 both an FP and integer register (or possibly FP reg and stack). Library
3327 functions (when TYPE is zero) always have the proper types for args,
3328 so we can pass the FP value just in one register. emit_library_function
3329 doesn't support PARALLEL anyway. */
3332 function_arg (cum
, mode
, type
, named
)
3333 CUMULATIVE_ARGS
*cum
;
3334 enum machine_mode mode
;
3338 enum rs6000_abi abi
= DEFAULT_ABI
;
3340 /* Return a marker to indicate whether CR1 needs to set or clear the
3341 bit that V.4 uses to say fp args were passed in registers.
3342 Assume that we don't need the marker for software floating point,
3343 or compiler generated library calls. */
3344 if (mode
== VOIDmode
)
3347 && cum
->nargs_prototype
< 0
3348 && type
&& (cum
->prototype
|| TARGET_NO_PROTOTYPE
))
3350 /* For the SPE, we need to crxor CR6 always. */
3352 return GEN_INT (cum
->call_cookie
| CALL_V4_SET_FP_ARGS
);
3353 else if (TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3354 return GEN_INT (cum
->call_cookie
3355 | ((cum
->fregno
== FP_ARG_MIN_REG
)
3356 ? CALL_V4_SET_FP_ARGS
3357 : CALL_V4_CLEAR_FP_ARGS
));
3360 return GEN_INT (cum
->call_cookie
);
3363 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3365 if (named
&& cum
->vregno
<= ALTIVEC_ARG_MAX_REG
)
3366 return gen_rtx_REG (mode
, cum
->vregno
);
3370 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
) && named
)
3372 if (cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
3373 return gen_rtx_REG (mode
, cum
->sysv_gregno
);
3377 else if (abi
== ABI_V4
)
3379 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
3380 && (mode
== SFmode
|| mode
== DFmode
))
3382 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
3383 return gen_rtx_REG (mode
, cum
->fregno
);
3390 int gregno
= cum
->sysv_gregno
;
3392 /* Aggregates and IEEE quad get passed by reference. */
3393 if ((type
&& AGGREGATE_TYPE_P (type
))
3397 n_words
= RS6000_ARG_SIZE (mode
, type
);
3399 /* Long long and SPE vectors are put in odd registers. */
3400 if (n_words
== 2 && (gregno
& 1) == 0)
3403 /* Long long and SPE vectors are not split between registers
3405 if (gregno
+ n_words
- 1 <= GP_ARG_MAX_REG
)
3407 /* SPE vectors in ... get split into 2 registers. */
3408 if (TARGET_SPE
&& TARGET_SPE_ABI
3409 && SPE_VECTOR_MODE (mode
) && !named
)
3412 enum machine_mode m
= SImode
;
3414 r1
= gen_rtx_REG (m
, gregno
);
3415 r1
= gen_rtx_EXPR_LIST (m
, r1
, const0_rtx
);
3416 r2
= gen_rtx_REG (m
, gregno
+ 1);
3417 r2
= gen_rtx_EXPR_LIST (m
, r2
, GEN_INT (4));
3418 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r2
));
3420 return gen_rtx_REG (mode
, gregno
);
3428 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
3429 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
3430 int align_words
= cum
->words
+ align
;
3432 if (type
&& TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
3435 if (USE_FP_FOR_ARG_P (*cum
, mode
, type
))
3438 || ((cum
->nargs_prototype
> 0)
3439 /* IBM AIX extended its linkage convention definition always
3440 to require FP args after register save area hole on the
3442 && (DEFAULT_ABI
!= ABI_AIX
3444 || (align_words
< GP_ARG_NUM_REG
))))
3445 return gen_rtx_REG (mode
, cum
->fregno
);
3447 return gen_rtx_PARALLEL (mode
,
3449 gen_rtx_EXPR_LIST (VOIDmode
,
3450 ((align_words
>= GP_ARG_NUM_REG
)
3453 + RS6000_ARG_SIZE (mode
, type
)
3455 /* If this is partially on the stack, then
3456 we only include the portion actually
3457 in registers here. */
3458 ? gen_rtx_REG (SImode
,
3459 GP_ARG_MIN_REG
+ align_words
)
3460 : gen_rtx_REG (mode
,
3461 GP_ARG_MIN_REG
+ align_words
))),
3463 gen_rtx_EXPR_LIST (VOIDmode
,
3464 gen_rtx_REG (mode
, cum
->fregno
),
3467 else if (align_words
< GP_ARG_NUM_REG
)
3468 return gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
3474 /* For an arg passed partly in registers and partly in memory,
3475 this is the number of registers used.
3476 For args passed entirely in registers or entirely in memory, zero. */
3479 function_arg_partial_nregs (cum
, mode
, type
, named
)
3480 CUMULATIVE_ARGS
*cum
;
3481 enum machine_mode mode
;
3483 int named ATTRIBUTE_UNUSED
;
3485 if (DEFAULT_ABI
== ABI_V4
)
3488 if (USE_FP_FOR_ARG_P (*cum
, mode
, type
)
3489 || USE_ALTIVEC_FOR_ARG_P (*cum
, mode
, type
))
3491 if (cum
->nargs_prototype
>= 0)
3495 if (cum
->words
< GP_ARG_NUM_REG
3496 && GP_ARG_NUM_REG
< (cum
->words
+ RS6000_ARG_SIZE (mode
, type
)))
3498 int ret
= GP_ARG_NUM_REG
- cum
->words
;
3499 if (ret
&& TARGET_DEBUG_ARG
)
3500 fprintf (stderr
, "function_arg_partial_nregs: %d\n", ret
);
3508 /* A C expression that indicates when an argument must be passed by
3509 reference. If nonzero for an argument, a copy of that argument is
3510 made in memory and a pointer to the argument is passed instead of
3511 the argument itself. The pointer is passed in whatever way is
3512 appropriate for passing a pointer to that type.
3514 Under V.4, structures and unions are passed by reference.
3516 As an extension to all ABIs, variable sized types are passed by
3520 function_arg_pass_by_reference (cum
, mode
, type
, named
)
3521 CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
;
3522 enum machine_mode mode ATTRIBUTE_UNUSED
;
3524 int named ATTRIBUTE_UNUSED
;
3526 if (DEFAULT_ABI
== ABI_V4
3527 && ((type
&& AGGREGATE_TYPE_P (type
))
3530 if (TARGET_DEBUG_ARG
)
3531 fprintf (stderr
, "function_arg_pass_by_reference: aggregate\n");
3535 return type
&& int_size_in_bytes (type
) <= 0;
3538 /* Perform any needed actions needed for a function that is receiving a
3539 variable number of arguments.
3543 MODE and TYPE are the mode and type of the current parameter.
3545 PRETEND_SIZE is a variable that should be set to the amount of stack
3546 that must be pushed by the prolog to pretend that our caller pushed
3549 Normally, this macro will push all remaining incoming registers on the
3550 stack and set PRETEND_SIZE to the length of the registers pushed. */
3553 setup_incoming_varargs (cum
, mode
, type
, pretend_size
, no_rtl
)
3554 CUMULATIVE_ARGS
*cum
;
3555 enum machine_mode mode
;
3557 int *pretend_size ATTRIBUTE_UNUSED
;
3561 CUMULATIVE_ARGS next_cum
;
3562 int reg_size
= TARGET_32BIT
? 4 : 8;
3563 rtx save_area
= NULL_RTX
, mem
;
3564 int first_reg_offset
, set
;
3568 fntype
= TREE_TYPE (current_function_decl
);
3569 stdarg_p
= (TYPE_ARG_TYPES (fntype
) != 0
3570 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
3571 != void_type_node
));
3573 /* For varargs, we do not want to skip the dummy va_dcl argument.
3574 For stdargs, we do want to skip the last named argument. */
3577 function_arg_advance (&next_cum
, mode
, type
, 1);
3579 if (DEFAULT_ABI
== ABI_V4
)
3581 /* Indicate to allocate space on the stack for varargs save area. */
3582 cfun
->machine
->sysv_varargs_p
= 1;
3584 save_area
= plus_constant (virtual_stack_vars_rtx
,
3585 - RS6000_VARARGS_SIZE
);
3587 first_reg_offset
= next_cum
.sysv_gregno
- GP_ARG_MIN_REG
;
3591 first_reg_offset
= next_cum
.words
;
3592 save_area
= virtual_incoming_args_rtx
;
3593 cfun
->machine
->sysv_varargs_p
= 0;
3595 if (MUST_PASS_IN_STACK (mode
, type
))
3596 first_reg_offset
+= RS6000_ARG_SIZE (TYPE_MODE (type
), type
);
3599 set
= get_varargs_alias_set ();
3600 if (! no_rtl
&& first_reg_offset
< GP_ARG_NUM_REG
)
3602 mem
= gen_rtx_MEM (BLKmode
,
3603 plus_constant (save_area
,
3604 first_reg_offset
* reg_size
)),
3605 set_mem_alias_set (mem
, set
);
3606 set_mem_align (mem
, BITS_PER_WORD
);
3609 (GP_ARG_MIN_REG
+ first_reg_offset
, mem
,
3610 GP_ARG_NUM_REG
- first_reg_offset
,
3611 (GP_ARG_NUM_REG
- first_reg_offset
) * UNITS_PER_WORD
);
3614 /* Save FP registers if needed. */
3615 if (DEFAULT_ABI
== ABI_V4
3616 && TARGET_HARD_FLOAT
&& TARGET_FPRS
3618 && next_cum
.fregno
<= FP_ARG_V4_MAX_REG
)
3620 int fregno
= next_cum
.fregno
;
3621 rtx cr1
= gen_rtx_REG (CCmode
, CR1_REGNO
);
3622 rtx lab
= gen_label_rtx ();
3623 int off
= (GP_ARG_NUM_REG
* reg_size
) + ((fregno
- FP_ARG_MIN_REG
) * 8);
3625 emit_jump_insn (gen_rtx_SET (VOIDmode
,
3627 gen_rtx_IF_THEN_ELSE (VOIDmode
,
3628 gen_rtx_NE (VOIDmode
, cr1
,
3630 gen_rtx_LABEL_REF (VOIDmode
, lab
),
3633 while (fregno
<= FP_ARG_V4_MAX_REG
)
3635 mem
= gen_rtx_MEM (DFmode
, plus_constant (save_area
, off
));
3636 set_mem_alias_set (mem
, set
);
3637 emit_move_insn (mem
, gen_rtx_REG (DFmode
, fregno
));
3646 /* Create the va_list data type. */
3649 rs6000_build_va_list ()
3651 tree f_gpr
, f_fpr
, f_ovf
, f_sav
, record
, type_decl
;
3653 /* For AIX, prefer 'char *' because that's what the system
3654 header files like. */
3655 if (DEFAULT_ABI
!= ABI_V4
)
3656 return build_pointer_type (char_type_node
);
3658 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
3659 type_decl
= build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
3661 f_gpr
= build_decl (FIELD_DECL
, get_identifier ("gpr"),
3662 unsigned_char_type_node
);
3663 f_fpr
= build_decl (FIELD_DECL
, get_identifier ("fpr"),
3664 unsigned_char_type_node
);
3665 f_ovf
= build_decl (FIELD_DECL
, get_identifier ("overflow_arg_area"),
3667 f_sav
= build_decl (FIELD_DECL
, get_identifier ("reg_save_area"),
3670 DECL_FIELD_CONTEXT (f_gpr
) = record
;
3671 DECL_FIELD_CONTEXT (f_fpr
) = record
;
3672 DECL_FIELD_CONTEXT (f_ovf
) = record
;
3673 DECL_FIELD_CONTEXT (f_sav
) = record
;
3675 TREE_CHAIN (record
) = type_decl
;
3676 TYPE_NAME (record
) = type_decl
;
3677 TYPE_FIELDS (record
) = f_gpr
;
3678 TREE_CHAIN (f_gpr
) = f_fpr
;
3679 TREE_CHAIN (f_fpr
) = f_ovf
;
3680 TREE_CHAIN (f_ovf
) = f_sav
;
3682 layout_type (record
);
3684 /* The correct type is an array type of one element. */
3685 return build_array_type (record
, build_index_type (size_zero_node
));
3688 /* Implement va_start. */
3691 rs6000_va_start (valist
, nextarg
)
3695 HOST_WIDE_INT words
, n_gpr
, n_fpr
;
3696 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
3697 tree gpr
, fpr
, ovf
, sav
, t
;
3699 /* Only SVR4 needs something special. */
3700 if (DEFAULT_ABI
!= ABI_V4
)
3702 std_expand_builtin_va_start (valist
, nextarg
);
3706 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
3707 f_fpr
= TREE_CHAIN (f_gpr
);
3708 f_ovf
= TREE_CHAIN (f_fpr
);
3709 f_sav
= TREE_CHAIN (f_ovf
);
3711 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
3712 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
3713 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
3714 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
3715 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
3717 /* Count number of gp and fp argument registers used. */
3718 words
= current_function_args_info
.words
;
3719 n_gpr
= current_function_args_info
.sysv_gregno
- GP_ARG_MIN_REG
;
3720 n_fpr
= current_function_args_info
.fregno
- FP_ARG_MIN_REG
;
3722 if (TARGET_DEBUG_ARG
)
3724 fputs ("va_start: words = ", stderr
);
3725 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, words
);
3726 fputs (", n_gpr = ", stderr
);
3727 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, n_gpr
);
3728 fputs (", n_fpr = ", stderr
);
3729 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, n_fpr
);
3730 putc ('\n', stderr
);
3733 t
= build (MODIFY_EXPR
, TREE_TYPE (gpr
), gpr
, build_int_2 (n_gpr
, 0));
3734 TREE_SIDE_EFFECTS (t
) = 1;
3735 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3737 t
= build (MODIFY_EXPR
, TREE_TYPE (fpr
), fpr
, build_int_2 (n_fpr
, 0));
3738 TREE_SIDE_EFFECTS (t
) = 1;
3739 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3741 /* Find the overflow area. */
3742 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
3744 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), t
,
3745 build_int_2 (words
* UNITS_PER_WORD
, 0));
3746 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
3747 TREE_SIDE_EFFECTS (t
) = 1;
3748 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3750 /* Find the register save area. */
3751 t
= make_tree (TREE_TYPE (sav
), virtual_stack_vars_rtx
);
3752 t
= build (PLUS_EXPR
, TREE_TYPE (sav
), t
,
3753 build_int_2 (-RS6000_VARARGS_SIZE
, -1));
3754 t
= build (MODIFY_EXPR
, TREE_TYPE (sav
), sav
, t
);
3755 TREE_SIDE_EFFECTS (t
) = 1;
3756 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3759 /* Implement va_arg. */
3762 rs6000_va_arg (valist
, type
)
3765 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
3766 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
3767 int indirect_p
, size
, rsize
, n_reg
, sav_ofs
, sav_scale
;
3768 rtx lab_false
, lab_over
, addr_rtx
, r
;
3770 if (DEFAULT_ABI
!= ABI_V4
)
3772 /* Variable sized types are passed by reference. */
3773 if (int_size_in_bytes (type
) <= 0)
3775 u
= build_pointer_type (type
);
3777 /* Args grow upward. */
3778 t
= build (POSTINCREMENT_EXPR
, TREE_TYPE (valist
), valist
,
3779 build_int_2 (POINTER_SIZE
/ BITS_PER_UNIT
, 0));
3780 TREE_SIDE_EFFECTS (t
) = 1;
3782 t
= build1 (NOP_EXPR
, build_pointer_type (u
), t
);
3783 TREE_SIDE_EFFECTS (t
) = 1;
3785 t
= build1 (INDIRECT_REF
, u
, t
);
3786 TREE_SIDE_EFFECTS (t
) = 1;
3788 return expand_expr (t
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
3791 return std_expand_builtin_va_arg (valist
, type
);
3794 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
3795 f_fpr
= TREE_CHAIN (f_gpr
);
3796 f_ovf
= TREE_CHAIN (f_fpr
);
3797 f_sav
= TREE_CHAIN (f_ovf
);
3799 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
3800 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
3801 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
3802 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
3803 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
3805 size
= int_size_in_bytes (type
);
3806 rsize
= (size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
3808 if (AGGREGATE_TYPE_P (type
) || TYPE_MODE (type
) == TFmode
)
3810 /* Aggregates and long doubles are passed by reference. */
3816 size
= UNITS_PER_WORD
;
3819 else if (FLOAT_TYPE_P (type
) && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3821 /* FP args go in FP registers, if present. */
3830 /* Otherwise into GP registers. */
3838 /* Pull the value out of the saved registers ... */
3840 lab_false
= gen_label_rtx ();
3841 lab_over
= gen_label_rtx ();
3842 addr_rtx
= gen_reg_rtx (Pmode
);
3844 /* AltiVec vectors never go in registers. */
3845 if (!TARGET_ALTIVEC
|| TREE_CODE (type
) != VECTOR_TYPE
)
3847 TREE_THIS_VOLATILE (reg
) = 1;
3848 emit_cmp_and_jump_insns
3849 (expand_expr (reg
, NULL_RTX
, QImode
, EXPAND_NORMAL
),
3850 GEN_INT (8 - n_reg
+ 1), GE
, const1_rtx
, QImode
, 1,
3853 /* Long long is aligned in the registers. */
3856 u
= build (BIT_AND_EXPR
, TREE_TYPE (reg
), reg
,
3857 build_int_2 (n_reg
- 1, 0));
3858 u
= build (PLUS_EXPR
, TREE_TYPE (reg
), reg
, u
);
3859 u
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, u
);
3860 TREE_SIDE_EFFECTS (u
) = 1;
3861 expand_expr (u
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3865 t
= build (PLUS_EXPR
, ptr_type_node
, sav
, build_int_2 (sav_ofs
, 0));
3869 u
= build (POSTINCREMENT_EXPR
, TREE_TYPE (reg
), reg
,
3870 build_int_2 (n_reg
, 0));
3871 TREE_SIDE_EFFECTS (u
) = 1;
3873 u
= build1 (CONVERT_EXPR
, integer_type_node
, u
);
3874 TREE_SIDE_EFFECTS (u
) = 1;
3876 u
= build (MULT_EXPR
, integer_type_node
, u
, build_int_2 (sav_scale
, 0));
3877 TREE_SIDE_EFFECTS (u
) = 1;
3879 t
= build (PLUS_EXPR
, ptr_type_node
, t
, u
);
3880 TREE_SIDE_EFFECTS (t
) = 1;
3882 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
3884 emit_move_insn (addr_rtx
, r
);
3886 emit_jump_insn (gen_jump (lab_over
));
3890 emit_label (lab_false
);
3892 /* ... otherwise out of the overflow area. */
3894 /* Make sure we don't find reg 7 for the next int arg.
3896 All AltiVec vectors go in the overflow area. So in the AltiVec
3897 case we need to get the vectors from the overflow area, but
3898 remember where the GPRs and FPRs are. */
3899 if (n_reg
> 1 && (TREE_CODE (type
) != VECTOR_TYPE
3900 || !TARGET_ALTIVEC
))
3902 t
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, build_int_2 (8, 0));
3903 TREE_SIDE_EFFECTS (t
) = 1;
3904 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3907 /* Care for on-stack alignment if needed. */
3914 /* AltiVec vectors are 16 byte aligned. */
3915 if (TARGET_ALTIVEC
&& TREE_CODE (type
) == VECTOR_TYPE
)
3920 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), ovf
, build_int_2 (align
, 0));
3921 t
= build (BIT_AND_EXPR
, TREE_TYPE (t
), t
, build_int_2 (-align
-1, -1));
3925 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
3927 emit_move_insn (addr_rtx
, r
);
3929 t
= build (PLUS_EXPR
, TREE_TYPE (t
), t
, build_int_2 (size
, 0));
3930 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
3931 TREE_SIDE_EFFECTS (t
) = 1;
3932 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3934 emit_label (lab_over
);
3938 r
= gen_rtx_MEM (Pmode
, addr_rtx
);
3939 set_mem_alias_set (r
, get_varargs_alias_set ());
3940 emit_move_insn (addr_rtx
, r
);
3948 #define def_builtin(MASK, NAME, TYPE, CODE) \
3950 if ((MASK) & target_flags) \
3951 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3955 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3957 static const struct builtin_description bdesc_3arg
[] =
3959 { MASK_ALTIVEC
, CODE_FOR_altivec_vmaddfp
, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP
},
3960 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhaddshs
, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS
},
3961 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhraddshs
, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS
},
3962 { MASK_ALTIVEC
, CODE_FOR_altivec_vmladduhm
, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM
},
3963 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumubm
, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM
},
3964 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsummbm
, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM
},
3965 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhm
, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM
},
3966 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshm
, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM
},
3967 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhs
, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS
},
3968 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshs
, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS
},
3969 { MASK_ALTIVEC
, CODE_FOR_altivec_vnmsubfp
, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP
},
3970 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4sf
, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF
},
3971 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4si
, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI
},
3972 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_8hi
, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI
},
3973 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_16qi
, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI
},
3974 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4sf
, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF
},
3975 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4si
, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI
},
3976 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_8hi
, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI
},
3977 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_16qi
, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI
},
3978 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_16qi
, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI
},
3979 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_8hi
, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI
},
3980 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4si
, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI
},
3981 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4sf
, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF
},
3984 /* DST operations: void foo (void *, const int, const char). */
3986 static const struct builtin_description bdesc_dst
[] =
3988 { MASK_ALTIVEC
, CODE_FOR_altivec_dst
, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST
},
3989 { MASK_ALTIVEC
, CODE_FOR_altivec_dstt
, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT
},
3990 { MASK_ALTIVEC
, CODE_FOR_altivec_dstst
, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST
},
3991 { MASK_ALTIVEC
, CODE_FOR_altivec_dststt
, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT
}
3994 /* Simple binary operations: VECc = foo (VECa, VECb). */
3996 static struct builtin_description bdesc_2arg
[] =
3998 { MASK_ALTIVEC
, CODE_FOR_addv16qi3
, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM
},
3999 { MASK_ALTIVEC
, CODE_FOR_addv8hi3
, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM
},
4000 { MASK_ALTIVEC
, CODE_FOR_addv4si3
, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM
},
4001 { MASK_ALTIVEC
, CODE_FOR_addv4sf3
, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP
},
4002 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddcuw
, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW
},
4003 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddubs
, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS
},
4004 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsbs
, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS
},
4005 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduhs
, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS
},
4006 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddshs
, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS
},
4007 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduws
, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS
},
4008 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsws
, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS
},
4009 { MASK_ALTIVEC
, CODE_FOR_andv4si3
, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND
},
4010 { MASK_ALTIVEC
, CODE_FOR_altivec_vandc
, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC
},
4011 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgub
, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB
},
4012 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsb
, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB
},
4013 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguh
, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH
},
4014 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsh
, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH
},
4015 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguw
, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW
},
4016 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsw
, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW
},
4017 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfux
, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX
},
4018 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfsx
, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX
},
4019 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpbfp
, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP
},
4020 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequb
, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB
},
4021 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequh
, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH
},
4022 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequw
, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW
},
4023 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpeqfp
, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP
},
4024 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgefp
, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP
},
4025 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtub
, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB
},
4026 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsb
, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB
},
4027 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuh
, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH
},
4028 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsh
, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH
},
4029 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuw
, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW
},
4030 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsw
, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW
},
4031 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtfp
, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP
},
4032 { MASK_ALTIVEC
, CODE_FOR_altivec_vctsxs
, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS
},
4033 { MASK_ALTIVEC
, CODE_FOR_altivec_vctuxs
, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS
},
4034 { MASK_ALTIVEC
, CODE_FOR_umaxv16qi3
, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB
},
4035 { MASK_ALTIVEC
, CODE_FOR_smaxv16qi3
, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB
},
4036 { MASK_ALTIVEC
, CODE_FOR_umaxv8hi3
, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH
},
4037 { MASK_ALTIVEC
, CODE_FOR_smaxv8hi3
, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH
},
4038 { MASK_ALTIVEC
, CODE_FOR_umaxv4si3
, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW
},
4039 { MASK_ALTIVEC
, CODE_FOR_smaxv4si3
, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW
},
4040 { MASK_ALTIVEC
, CODE_FOR_smaxv4sf3
, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP
},
4041 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghb
, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB
},
4042 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghh
, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH
},
4043 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghw
, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW
},
4044 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglb
, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB
},
4045 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglh
, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH
},
4046 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglw
, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW
},
4047 { MASK_ALTIVEC
, CODE_FOR_uminv16qi3
, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB
},
4048 { MASK_ALTIVEC
, CODE_FOR_sminv16qi3
, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB
},
4049 { MASK_ALTIVEC
, CODE_FOR_uminv8hi3
, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH
},
4050 { MASK_ALTIVEC
, CODE_FOR_sminv8hi3
, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH
},
4051 { MASK_ALTIVEC
, CODE_FOR_uminv4si3
, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW
},
4052 { MASK_ALTIVEC
, CODE_FOR_sminv4si3
, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW
},
4053 { MASK_ALTIVEC
, CODE_FOR_sminv4sf3
, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP
},
4054 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleub
, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB
},
4055 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesb
, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB
},
4056 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleuh
, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH
},
4057 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesh
, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH
},
4058 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuloub
, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB
},
4059 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosb
, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB
},
4060 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulouh
, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH
},
4061 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosh
, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH
},
4062 { MASK_ALTIVEC
, CODE_FOR_altivec_vnor
, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR
},
4063 { MASK_ALTIVEC
, CODE_FOR_iorv4si3
, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR
},
4064 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhum
, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM
},
4065 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwum
, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM
},
4066 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkpx
, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX
},
4067 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhss
, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS
},
4068 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshss
, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS
},
4069 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwss
, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS
},
4070 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswss
, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS
},
4071 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhus
, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS
},
4072 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshus
, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS
},
4073 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwus
, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS
},
4074 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswus
, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS
},
4075 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlb
, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB
},
4076 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlh
, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH
},
4077 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlw
, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW
},
4078 { MASK_ALTIVEC
, CODE_FOR_altivec_vslb
, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB
},
4079 { MASK_ALTIVEC
, CODE_FOR_altivec_vslh
, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH
},
4080 { MASK_ALTIVEC
, CODE_FOR_altivec_vslw
, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW
},
4081 { MASK_ALTIVEC
, CODE_FOR_altivec_vsl
, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL
},
4082 { MASK_ALTIVEC
, CODE_FOR_altivec_vslo
, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO
},
4083 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltb
, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB
},
4084 { MASK_ALTIVEC
, CODE_FOR_altivec_vsplth
, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH
},
4085 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltw
, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW
},
4086 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrb
, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB
},
4087 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrh
, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH
},
4088 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrw
, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW
},
4089 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrab
, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB
},
4090 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrah
, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH
},
4091 { MASK_ALTIVEC
, CODE_FOR_altivec_vsraw
, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW
},
4092 { MASK_ALTIVEC
, CODE_FOR_altivec_vsr
, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR
},
4093 { MASK_ALTIVEC
, CODE_FOR_altivec_vsro
, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO
},
4094 { MASK_ALTIVEC
, CODE_FOR_subv16qi3
, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM
},
4095 { MASK_ALTIVEC
, CODE_FOR_subv8hi3
, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM
},
4096 { MASK_ALTIVEC
, CODE_FOR_subv4si3
, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM
},
4097 { MASK_ALTIVEC
, CODE_FOR_subv4sf3
, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP
},
4098 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubcuw
, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW
},
4099 { MASK_ALTIVEC
, CODE_FOR_altivec_vsububs
, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS
},
4100 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsbs
, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS
},
4101 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuhs
, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS
},
4102 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubshs
, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS
},
4103 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuws
, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS
},
4104 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsws
, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS
},
4105 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4ubs
, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS
},
4106 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4sbs
, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS
},
4107 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4shs
, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS
},
4108 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum2sws
, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS
},
4109 { MASK_ALTIVEC
, CODE_FOR_altivec_vsumsws
, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS
},
4110 { MASK_ALTIVEC
, CODE_FOR_xorv4si3
, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR
},
4112 /* Place holder, leave as first spe builtin. */
4113 { 0, CODE_FOR_spe_evaddw
, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW
},
4114 { 0, CODE_FOR_spe_evand
, "__builtin_spe_evand", SPE_BUILTIN_EVAND
},
4115 { 0, CODE_FOR_spe_evandc
, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC
},
4116 { 0, CODE_FOR_spe_evdivws
, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS
},
4117 { 0, CODE_FOR_spe_evdivwu
, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU
},
4118 { 0, CODE_FOR_spe_eveqv
, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV
},
4119 { 0, CODE_FOR_spe_evfsadd
, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD
},
4120 { 0, CODE_FOR_spe_evfsdiv
, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV
},
4121 { 0, CODE_FOR_spe_evfsmul
, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL
},
4122 { 0, CODE_FOR_spe_evfssub
, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB
},
4123 { 0, CODE_FOR_spe_evmergehi
, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI
},
4124 { 0, CODE_FOR_spe_evmergehilo
, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO
},
4125 { 0, CODE_FOR_spe_evmergelo
, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO
},
4126 { 0, CODE_FOR_spe_evmergelohi
, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI
},
4127 { 0, CODE_FOR_spe_evmhegsmfaa
, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA
},
4128 { 0, CODE_FOR_spe_evmhegsmfan
, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN
},
4129 { 0, CODE_FOR_spe_evmhegsmiaa
, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA
},
4130 { 0, CODE_FOR_spe_evmhegsmian
, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN
},
4131 { 0, CODE_FOR_spe_evmhegumiaa
, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA
},
4132 { 0, CODE_FOR_spe_evmhegumian
, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN
},
4133 { 0, CODE_FOR_spe_evmhesmf
, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF
},
4134 { 0, CODE_FOR_spe_evmhesmfa
, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA
},
4135 { 0, CODE_FOR_spe_evmhesmfaaw
, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW
},
4136 { 0, CODE_FOR_spe_evmhesmfanw
, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW
},
4137 { 0, CODE_FOR_spe_evmhesmi
, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI
},
4138 { 0, CODE_FOR_spe_evmhesmia
, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA
},
4139 { 0, CODE_FOR_spe_evmhesmiaaw
, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW
},
4140 { 0, CODE_FOR_spe_evmhesmianw
, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW
},
4141 { 0, CODE_FOR_spe_evmhessf
, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF
},
4142 { 0, CODE_FOR_spe_evmhessfa
, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA
},
4143 { 0, CODE_FOR_spe_evmhessfaaw
, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW
},
4144 { 0, CODE_FOR_spe_evmhessfanw
, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW
},
4145 { 0, CODE_FOR_spe_evmhessiaaw
, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW
},
4146 { 0, CODE_FOR_spe_evmhessianw
, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW
},
4147 { 0, CODE_FOR_spe_evmheumi
, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI
},
4148 { 0, CODE_FOR_spe_evmheumia
, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA
},
4149 { 0, CODE_FOR_spe_evmheumiaaw
, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW
},
4150 { 0, CODE_FOR_spe_evmheumianw
, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW
},
4151 { 0, CODE_FOR_spe_evmheusiaaw
, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW
},
4152 { 0, CODE_FOR_spe_evmheusianw
, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW
},
4153 { 0, CODE_FOR_spe_evmhogsmfaa
, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA
},
4154 { 0, CODE_FOR_spe_evmhogsmfan
, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN
},
4155 { 0, CODE_FOR_spe_evmhogsmiaa
, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA
},
4156 { 0, CODE_FOR_spe_evmhogsmian
, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN
},
4157 { 0, CODE_FOR_spe_evmhogumiaa
, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA
},
4158 { 0, CODE_FOR_spe_evmhogumian
, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN
},
4159 { 0, CODE_FOR_spe_evmhosmf
, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF
},
4160 { 0, CODE_FOR_spe_evmhosmfa
, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA
},
4161 { 0, CODE_FOR_spe_evmhosmfaaw
, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW
},
4162 { 0, CODE_FOR_spe_evmhosmfanw
, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW
},
4163 { 0, CODE_FOR_spe_evmhosmi
, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI
},
4164 { 0, CODE_FOR_spe_evmhosmia
, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA
},
4165 { 0, CODE_FOR_spe_evmhosmiaaw
, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW
},
4166 { 0, CODE_FOR_spe_evmhosmianw
, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW
},
4167 { 0, CODE_FOR_spe_evmhossf
, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF
},
4168 { 0, CODE_FOR_spe_evmhossfa
, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA
},
4169 { 0, CODE_FOR_spe_evmhossfaaw
, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW
},
4170 { 0, CODE_FOR_spe_evmhossfanw
, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW
},
4171 { 0, CODE_FOR_spe_evmhossiaaw
, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW
},
4172 { 0, CODE_FOR_spe_evmhossianw
, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW
},
4173 { 0, CODE_FOR_spe_evmhoumi
, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI
},
4174 { 0, CODE_FOR_spe_evmhoumia
, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA
},
4175 { 0, CODE_FOR_spe_evmhoumiaaw
, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW
},
4176 { 0, CODE_FOR_spe_evmhoumianw
, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW
},
4177 { 0, CODE_FOR_spe_evmhousiaaw
, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW
},
4178 { 0, CODE_FOR_spe_evmhousianw
, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW
},
4179 { 0, CODE_FOR_spe_evmwhsmf
, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF
},
4180 { 0, CODE_FOR_spe_evmwhsmfa
, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA
},
4181 { 0, CODE_FOR_spe_evmwhsmi
, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI
},
4182 { 0, CODE_FOR_spe_evmwhsmia
, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA
},
4183 { 0, CODE_FOR_spe_evmwhssf
, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF
},
4184 { 0, CODE_FOR_spe_evmwhssfa
, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA
},
4185 { 0, CODE_FOR_spe_evmwhumi
, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI
},
4186 { 0, CODE_FOR_spe_evmwhumia
, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA
},
4187 { 0, CODE_FOR_spe_evmwlsmiaaw
, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW
},
4188 { 0, CODE_FOR_spe_evmwlsmianw
, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW
},
4189 { 0, CODE_FOR_spe_evmwlssiaaw
, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW
},
4190 { 0, CODE_FOR_spe_evmwlssianw
, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW
},
4191 { 0, CODE_FOR_spe_evmwlumi
, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI
},
4192 { 0, CODE_FOR_spe_evmwlumia
, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA
},
4193 { 0, CODE_FOR_spe_evmwlumiaaw
, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW
},
4194 { 0, CODE_FOR_spe_evmwlumianw
, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW
},
4195 { 0, CODE_FOR_spe_evmwlusiaaw
, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW
},
4196 { 0, CODE_FOR_spe_evmwlusianw
, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW
},
4197 { 0, CODE_FOR_spe_evmwsmf
, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF
},
4198 { 0, CODE_FOR_spe_evmwsmfa
, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA
},
4199 { 0, CODE_FOR_spe_evmwsmfaa
, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA
},
4200 { 0, CODE_FOR_spe_evmwsmfan
, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN
},
4201 { 0, CODE_FOR_spe_evmwsmi
, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI
},
4202 { 0, CODE_FOR_spe_evmwsmia
, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA
},
4203 { 0, CODE_FOR_spe_evmwsmiaa
, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA
},
4204 { 0, CODE_FOR_spe_evmwsmian
, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN
},
4205 { 0, CODE_FOR_spe_evmwssf
, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF
},
4206 { 0, CODE_FOR_spe_evmwssfa
, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA
},
4207 { 0, CODE_FOR_spe_evmwssfaa
, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA
},
4208 { 0, CODE_FOR_spe_evmwssfan
, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN
},
4209 { 0, CODE_FOR_spe_evmwumi
, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI
},
4210 { 0, CODE_FOR_spe_evmwumia
, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA
},
4211 { 0, CODE_FOR_spe_evmwumiaa
, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA
},
4212 { 0, CODE_FOR_spe_evmwumian
, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN
},
4213 { 0, CODE_FOR_spe_evnand
, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND
},
4214 { 0, CODE_FOR_spe_evnor
, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR
},
4215 { 0, CODE_FOR_spe_evor
, "__builtin_spe_evor", SPE_BUILTIN_EVOR
},
4216 { 0, CODE_FOR_spe_evorc
, "__builtin_spe_evorc", SPE_BUILTIN_EVORC
},
4217 { 0, CODE_FOR_spe_evrlw
, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW
},
4218 { 0, CODE_FOR_spe_evslw
, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW
},
4219 { 0, CODE_FOR_spe_evsrws
, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS
},
4220 { 0, CODE_FOR_spe_evsrwu
, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU
},
4221 { 0, CODE_FOR_spe_evsubfw
, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW
},
4223 /* SPE binary operations expecting a 5-bit unsigned literal. */
4224 { 0, CODE_FOR_spe_evaddiw
, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW
},
4226 { 0, CODE_FOR_spe_evrlwi
, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI
},
4227 { 0, CODE_FOR_spe_evslwi
, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI
},
4228 { 0, CODE_FOR_spe_evsrwis
, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS
},
4229 { 0, CODE_FOR_spe_evsrwiu
, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU
},
4230 { 0, CODE_FOR_spe_evsubifw
, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW
},
4231 { 0, CODE_FOR_spe_evmwhssfaa
, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA
},
4232 { 0, CODE_FOR_spe_evmwhssmaa
, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA
},
4233 { 0, CODE_FOR_spe_evmwhsmfaa
, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA
},
4234 { 0, CODE_FOR_spe_evmwhsmiaa
, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA
},
4235 { 0, CODE_FOR_spe_evmwhusiaa
, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA
},
4236 { 0, CODE_FOR_spe_evmwhumiaa
, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA
},
4237 { 0, CODE_FOR_spe_evmwhssfan
, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN
},
4238 { 0, CODE_FOR_spe_evmwhssian
, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN
},
4239 { 0, CODE_FOR_spe_evmwhsmfan
, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN
},
4240 { 0, CODE_FOR_spe_evmwhsmian
, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN
},
4241 { 0, CODE_FOR_spe_evmwhusian
, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN
},
4242 { 0, CODE_FOR_spe_evmwhumian
, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN
},
4243 { 0, CODE_FOR_spe_evmwhgssfaa
, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA
},
4244 { 0, CODE_FOR_spe_evmwhgsmfaa
, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA
},
4245 { 0, CODE_FOR_spe_evmwhgsmiaa
, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA
},
4246 { 0, CODE_FOR_spe_evmwhgumiaa
, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA
},
4247 { 0, CODE_FOR_spe_evmwhgssfan
, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN
},
4248 { 0, CODE_FOR_spe_evmwhgsmfan
, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN
},
4249 { 0, CODE_FOR_spe_evmwhgsmian
, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN
},
4250 { 0, CODE_FOR_spe_evmwhgumian
, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN
},
4251 { 0, CODE_FOR_spe_brinc
, "__builtin_spe_brinc", SPE_BUILTIN_BRINC
},
4253 /* Place-holder. Leave as last binary SPE builtin. */
4254 { 0, CODE_FOR_xorv2si3
, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR
},
4257 /* AltiVec predicates. */
4259 struct builtin_description_predicates
4261 const unsigned int mask
;
4262 const enum insn_code icode
;
4264 const char *const name
;
4265 const enum rs6000_builtins code
;
4268 static const struct builtin_description_predicates bdesc_altivec_preds
[] =
4270 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P
},
4271 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P
},
4272 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P
},
4273 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P
},
4274 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P
},
4275 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P
},
4276 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P
},
4277 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P
},
4278 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P
},
4279 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P
},
4280 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P
},
4281 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P
},
4282 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P
}
4285 /* SPE predicates. */
4286 static struct builtin_description bdesc_spe_predicates
[] =
4288 /* Place-holder. Leave as first. */
4289 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ
},
4290 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS
},
4291 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU
},
4292 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS
},
4293 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU
},
4294 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ
},
4295 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT
},
4296 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT
},
4297 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ
},
4298 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT
},
4299 /* Place-holder. Leave as last. */
4300 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT
},
4303 /* SPE evsel predicates. */
4304 static struct builtin_description bdesc_spe_evsel
[] =
4306 /* Place-holder. Leave as first. */
4307 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS
},
4308 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU
},
4309 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS
},
4310 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU
},
4311 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ
},
4312 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT
},
4313 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT
},
4314 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ
},
4315 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT
},
4316 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT
},
4317 /* Place-holder. Leave as last. */
4318 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ
},
4321 /* ABS* operations. */
4323 static const struct builtin_description bdesc_abs
[] =
4325 { MASK_ALTIVEC
, CODE_FOR_absv4si2
, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI
},
4326 { MASK_ALTIVEC
, CODE_FOR_absv8hi2
, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI
},
4327 { MASK_ALTIVEC
, CODE_FOR_absv4sf2
, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF
},
4328 { MASK_ALTIVEC
, CODE_FOR_absv16qi2
, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI
},
4329 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v4si
, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI
},
4330 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v8hi
, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI
},
4331 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v16qi
, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI
}
4334 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4337 static struct builtin_description bdesc_1arg
[] =
4339 { MASK_ALTIVEC
, CODE_FOR_altivec_vexptefp
, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP
},
4340 { MASK_ALTIVEC
, CODE_FOR_altivec_vlogefp
, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP
},
4341 { MASK_ALTIVEC
, CODE_FOR_altivec_vrefp
, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP
},
4342 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfim
, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM
},
4343 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfin
, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN
},
4344 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfip
, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP
},
4345 { MASK_ALTIVEC
, CODE_FOR_ftruncv4sf2
, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ
},
4346 { MASK_ALTIVEC
, CODE_FOR_altivec_vrsqrtefp
, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP
},
4347 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisb
, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB
},
4348 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltish
, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH
},
4349 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisw
, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW
},
4350 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsb
, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB
},
4351 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhpx
, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX
},
4352 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsh
, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH
},
4353 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsb
, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB
},
4354 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklpx
, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX
},
4355 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsh
, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH
},
4357 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4358 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4359 { 0, CODE_FOR_spe_evabs
, "__builtin_spe_evabs", SPE_BUILTIN_EVABS
},
4360 { 0, CODE_FOR_spe_evaddsmiaaw
, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW
},
4361 { 0, CODE_FOR_spe_evaddssiaaw
, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW
},
4362 { 0, CODE_FOR_spe_evaddumiaaw
, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW
},
4363 { 0, CODE_FOR_spe_evaddusiaaw
, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW
},
4364 { 0, CODE_FOR_spe_evcntlsw
, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW
},
4365 { 0, CODE_FOR_spe_evcntlzw
, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW
},
4366 { 0, CODE_FOR_spe_evextsb
, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB
},
4367 { 0, CODE_FOR_spe_evextsh
, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH
},
4368 { 0, CODE_FOR_spe_evfsabs
, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS
},
4369 { 0, CODE_FOR_spe_evfscfsf
, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF
},
4370 { 0, CODE_FOR_spe_evfscfsi
, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI
},
4371 { 0, CODE_FOR_spe_evfscfuf
, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF
},
4372 { 0, CODE_FOR_spe_evfscfui
, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI
},
4373 { 0, CODE_FOR_spe_evfsctsf
, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF
},
4374 { 0, CODE_FOR_spe_evfsctsi
, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI
},
4375 { 0, CODE_FOR_spe_evfsctsiz
, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ
},
4376 { 0, CODE_FOR_spe_evfsctuf
, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF
},
4377 { 0, CODE_FOR_spe_evfsctui
, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI
},
4378 { 0, CODE_FOR_spe_evfsctuiz
, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ
},
4379 { 0, CODE_FOR_spe_evfsnabs
, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS
},
4380 { 0, CODE_FOR_spe_evfsneg
, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG
},
4381 { 0, CODE_FOR_spe_evmra
, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA
},
4382 { 0, CODE_FOR_spe_evneg
, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG
},
4383 { 0, CODE_FOR_spe_evrndw
, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW
},
4384 { 0, CODE_FOR_spe_evsubfsmiaaw
, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW
},
4385 { 0, CODE_FOR_spe_evsubfssiaaw
, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW
},
4386 { 0, CODE_FOR_spe_evsubfumiaaw
, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW
},
4387 { 0, CODE_FOR_spe_evsplatfi
, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI
},
4388 { 0, CODE_FOR_spe_evsplati
, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI
},
4390 /* Place-holder. Leave as last unary SPE builtin. */
4391 { 0, CODE_FOR_spe_evsubfusiaaw
, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW
},
4395 rs6000_expand_unop_builtin (icode
, arglist
, target
)
4396 enum insn_code icode
;
4401 tree arg0
= TREE_VALUE (arglist
);
4402 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4403 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4404 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4406 if (icode
== CODE_FOR_nothing
)
4407 /* Builtin not supported on this processor. */
4410 /* If we got invalid arguments bail out before generating bad rtl. */
4411 if (arg0
== error_mark_node
)
4414 if (icode
== CODE_FOR_altivec_vspltisb
4415 || icode
== CODE_FOR_altivec_vspltish
4416 || icode
== CODE_FOR_altivec_vspltisw
4417 || icode
== CODE_FOR_spe_evsplatfi
4418 || icode
== CODE_FOR_spe_evsplati
)
4420 /* Only allow 5-bit *signed* literals. */
4421 if (GET_CODE (op0
) != CONST_INT
4422 || INTVAL (op0
) > 0x1f
4423 || INTVAL (op0
) < -0x1f)
4425 error ("argument 1 must be a 5-bit signed literal");
4431 || GET_MODE (target
) != tmode
4432 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4433 target
= gen_reg_rtx (tmode
);
4435 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4436 op0
= copy_to_mode_reg (mode0
, op0
);
4438 pat
= GEN_FCN (icode
) (target
, op0
);
4447 altivec_expand_abs_builtin (icode
, arglist
, target
)
4448 enum insn_code icode
;
4452 rtx pat
, scratch1
, scratch2
;
4453 tree arg0
= TREE_VALUE (arglist
);
4454 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4455 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4456 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4458 /* If we have invalid arguments, bail out before generating bad rtl. */
4459 if (arg0
== error_mark_node
)
4463 || GET_MODE (target
) != tmode
4464 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4465 target
= gen_reg_rtx (tmode
);
4467 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4468 op0
= copy_to_mode_reg (mode0
, op0
);
4470 scratch1
= gen_reg_rtx (mode0
);
4471 scratch2
= gen_reg_rtx (mode0
);
4473 pat
= GEN_FCN (icode
) (target
, op0
, scratch1
, scratch2
);
4482 rs6000_expand_binop_builtin (icode
, arglist
, target
)
4483 enum insn_code icode
;
4488 tree arg0
= TREE_VALUE (arglist
);
4489 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4490 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4491 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4492 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4493 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4494 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4496 if (icode
== CODE_FOR_nothing
)
4497 /* Builtin not supported on this processor. */
4500 /* If we got invalid arguments bail out before generating bad rtl. */
4501 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
4504 if (icode
== CODE_FOR_altivec_vcfux
4505 || icode
== CODE_FOR_altivec_vcfsx
4506 || icode
== CODE_FOR_altivec_vctsxs
4507 || icode
== CODE_FOR_altivec_vctuxs
4508 || icode
== CODE_FOR_altivec_vspltb
4509 || icode
== CODE_FOR_altivec_vsplth
4510 || icode
== CODE_FOR_altivec_vspltw
4511 || icode
== CODE_FOR_spe_evaddiw
4512 || icode
== CODE_FOR_spe_evldd
4513 || icode
== CODE_FOR_spe_evldh
4514 || icode
== CODE_FOR_spe_evldw
4515 || icode
== CODE_FOR_spe_evlhhesplat
4516 || icode
== CODE_FOR_spe_evlhhossplat
4517 || icode
== CODE_FOR_spe_evlhhousplat
4518 || icode
== CODE_FOR_spe_evlwhe
4519 || icode
== CODE_FOR_spe_evlwhos
4520 || icode
== CODE_FOR_spe_evlwhou
4521 || icode
== CODE_FOR_spe_evlwhsplat
4522 || icode
== CODE_FOR_spe_evlwwsplat
4523 || icode
== CODE_FOR_spe_evrlwi
4524 || icode
== CODE_FOR_spe_evslwi
4525 || icode
== CODE_FOR_spe_evsrwis
4526 || icode
== CODE_FOR_spe_evsrwiu
)
4528 /* Only allow 5-bit unsigned literals. */
4529 if (TREE_CODE (arg1
) != INTEGER_CST
4530 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
4532 error ("argument 2 must be a 5-bit unsigned literal");
4538 || GET_MODE (target
) != tmode
4539 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4540 target
= gen_reg_rtx (tmode
);
4542 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4543 op0
= copy_to_mode_reg (mode0
, op0
);
4544 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4545 op1
= copy_to_mode_reg (mode1
, op1
);
4547 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
4556 altivec_expand_predicate_builtin (icode
, opcode
, arglist
, target
)
4557 enum insn_code icode
;
4563 tree cr6_form
= TREE_VALUE (arglist
);
4564 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
4565 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4566 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4567 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4568 enum machine_mode tmode
= SImode
;
4569 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4570 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4573 if (TREE_CODE (cr6_form
) != INTEGER_CST
)
4575 error ("argument 1 of __builtin_altivec_predicate must be a constant");
4579 cr6_form_int
= TREE_INT_CST_LOW (cr6_form
);
4584 /* If we have invalid arguments, bail out before generating bad rtl. */
4585 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
4589 || GET_MODE (target
) != tmode
4590 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4591 target
= gen_reg_rtx (tmode
);
4593 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4594 op0
= copy_to_mode_reg (mode0
, op0
);
4595 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4596 op1
= copy_to_mode_reg (mode1
, op1
);
4598 scratch
= gen_reg_rtx (mode0
);
4600 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
,
4601 gen_rtx (SYMBOL_REF
, Pmode
, opcode
));
4606 /* The vec_any* and vec_all* predicates use the same opcodes for two
4607 different operations, but the bits in CR6 will be different
4608 depending on what information we want. So we have to play tricks
4609 with CR6 to get the right bits out.
4611 If you think this is disgusting, look at the specs for the
4612 AltiVec predicates. */
4614 switch (cr6_form_int
)
4617 emit_insn (gen_cr6_test_for_zero (target
));
4620 emit_insn (gen_cr6_test_for_zero_reverse (target
));
4623 emit_insn (gen_cr6_test_for_lt (target
));
4626 emit_insn (gen_cr6_test_for_lt_reverse (target
));
4629 error ("argument 1 of __builtin_altivec_predicate is out of range");
4637 altivec_expand_stv_builtin (icode
, arglist
)
4638 enum insn_code icode
;
4641 tree arg0
= TREE_VALUE (arglist
);
4642 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4643 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4644 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4645 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4646 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4648 enum machine_mode mode0
= insn_data
[icode
].operand
[0].mode
;
4649 enum machine_mode mode1
= insn_data
[icode
].operand
[1].mode
;
4650 enum machine_mode mode2
= insn_data
[icode
].operand
[2].mode
;
4652 /* Invalid arguments. Bail before doing anything stoopid! */
4653 if (arg0
== error_mark_node
4654 || arg1
== error_mark_node
4655 || arg2
== error_mark_node
)
4658 if (! (*insn_data
[icode
].operand
[2].predicate
) (op0
, mode2
))
4659 op0
= copy_to_mode_reg (mode2
, op0
);
4660 if (! (*insn_data
[icode
].operand
[0].predicate
) (op1
, mode0
))
4661 op1
= copy_to_mode_reg (mode0
, op1
);
4662 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
4663 op2
= copy_to_mode_reg (mode1
, op2
);
4665 pat
= GEN_FCN (icode
) (op1
, op2
, op0
);
4672 rs6000_expand_ternop_builtin (icode
, arglist
, target
)
4673 enum insn_code icode
;
4678 tree arg0
= TREE_VALUE (arglist
);
4679 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4680 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4681 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4682 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4683 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4684 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4685 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4686 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4687 enum machine_mode mode2
= insn_data
[icode
].operand
[3].mode
;
4689 if (icode
== CODE_FOR_nothing
)
4690 /* Builtin not supported on this processor. */
4693 /* If we got invalid arguments bail out before generating bad rtl. */
4694 if (arg0
== error_mark_node
4695 || arg1
== error_mark_node
4696 || arg2
== error_mark_node
)
4699 if (icode
== CODE_FOR_altivec_vsldoi_4sf
4700 || icode
== CODE_FOR_altivec_vsldoi_4si
4701 || icode
== CODE_FOR_altivec_vsldoi_8hi
4702 || icode
== CODE_FOR_altivec_vsldoi_16qi
)
4704 /* Only allow 4-bit unsigned literals. */
4705 if (TREE_CODE (arg2
) != INTEGER_CST
4706 || TREE_INT_CST_LOW (arg2
) & ~0xf)
4708 error ("argument 3 must be a 4-bit unsigned literal");
4714 || GET_MODE (target
) != tmode
4715 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4716 target
= gen_reg_rtx (tmode
);
4718 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4719 op0
= copy_to_mode_reg (mode0
, op0
);
4720 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4721 op1
= copy_to_mode_reg (mode1
, op1
);
4722 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
4723 op2
= copy_to_mode_reg (mode2
, op2
);
4725 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
4733 /* Expand the lvx builtins. */
4735 altivec_expand_ld_builtin (exp
, target
, expandedp
)
4740 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4741 tree arglist
= TREE_OPERAND (exp
, 1);
4742 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4744 enum machine_mode tmode
, mode0
;
4746 enum insn_code icode
;
4750 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi
:
4751 icode
= CODE_FOR_altivec_lvx_16qi
;
4753 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi
:
4754 icode
= CODE_FOR_altivec_lvx_8hi
;
4756 case ALTIVEC_BUILTIN_LD_INTERNAL_4si
:
4757 icode
= CODE_FOR_altivec_lvx_4si
;
4759 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf
:
4760 icode
= CODE_FOR_altivec_lvx_4sf
;
4769 arg0
= TREE_VALUE (arglist
);
4770 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4771 tmode
= insn_data
[icode
].operand
[0].mode
;
4772 mode0
= insn_data
[icode
].operand
[1].mode
;
4775 || GET_MODE (target
) != tmode
4776 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4777 target
= gen_reg_rtx (tmode
);
4779 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4780 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
4782 pat
= GEN_FCN (icode
) (target
, op0
);
4789 /* Expand the stvx builtins. */
4791 altivec_expand_st_builtin (exp
, target
, expandedp
)
4793 rtx target ATTRIBUTE_UNUSED
;
4796 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4797 tree arglist
= TREE_OPERAND (exp
, 1);
4798 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4800 enum machine_mode mode0
, mode1
;
4802 enum insn_code icode
;
4806 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi
:
4807 icode
= CODE_FOR_altivec_stvx_16qi
;
4809 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi
:
4810 icode
= CODE_FOR_altivec_stvx_8hi
;
4812 case ALTIVEC_BUILTIN_ST_INTERNAL_4si
:
4813 icode
= CODE_FOR_altivec_stvx_4si
;
4815 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf
:
4816 icode
= CODE_FOR_altivec_stvx_4sf
;
4823 arg0
= TREE_VALUE (arglist
);
4824 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4825 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4826 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4827 mode0
= insn_data
[icode
].operand
[0].mode
;
4828 mode1
= insn_data
[icode
].operand
[1].mode
;
4830 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4831 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
4832 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
4833 op1
= copy_to_mode_reg (mode1
, op1
);
4835 pat
= GEN_FCN (icode
) (op0
, op1
);
4843 /* Expand the dst builtins. */
4845 altivec_expand_dst_builtin (exp
, target
, expandedp
)
4847 rtx target ATTRIBUTE_UNUSED
;
4850 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4851 tree arglist
= TREE_OPERAND (exp
, 1);
4852 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4853 tree arg0
, arg1
, arg2
;
4854 enum machine_mode mode0
, mode1
, mode2
;
4855 rtx pat
, op0
, op1
, op2
;
4856 struct builtin_description
*d
;
4861 /* Handle DST variants. */
4862 d
= (struct builtin_description
*) bdesc_dst
;
4863 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
4864 if (d
->code
== fcode
)
4866 arg0
= TREE_VALUE (arglist
);
4867 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4868 arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4869 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4870 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4871 op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4872 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
4873 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
4874 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
4876 /* Invalid arguments, bail out before generating bad rtl. */
4877 if (arg0
== error_mark_node
4878 || arg1
== error_mark_node
4879 || arg2
== error_mark_node
)
4882 if (TREE_CODE (arg2
) != INTEGER_CST
4883 || TREE_INT_CST_LOW (arg2
) & ~0x3)
4885 error ("argument to `%s' must be a 2-bit unsigned literal", d
->name
);
4889 if (! (*insn_data
[d
->icode
].operand
[0].predicate
) (op0
, mode0
))
4890 op0
= copy_to_mode_reg (mode0
, op0
);
4891 if (! (*insn_data
[d
->icode
].operand
[1].predicate
) (op1
, mode1
))
4892 op1
= copy_to_mode_reg (mode1
, op1
);
4894 pat
= GEN_FCN (d
->icode
) (op0
, op1
, op2
);
4905 /* Expand the builtin in EXP and store the result in TARGET. Store
4906 true in *EXPANDEDP if we found a builtin to expand. */
4908 altivec_expand_builtin (exp
, target
, expandedp
)
4913 struct builtin_description
*d
;
4914 struct builtin_description_predicates
*dp
;
4916 enum insn_code icode
;
4917 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4918 tree arglist
= TREE_OPERAND (exp
, 1);
4921 enum machine_mode tmode
, mode0
;
4922 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4924 target
= altivec_expand_ld_builtin (exp
, target
, expandedp
);
4928 target
= altivec_expand_st_builtin (exp
, target
, expandedp
);
4932 target
= altivec_expand_dst_builtin (exp
, target
, expandedp
);
4940 case ALTIVEC_BUILTIN_STVX
:
4941 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx
, arglist
);
4942 case ALTIVEC_BUILTIN_STVEBX
:
4943 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx
, arglist
);
4944 case ALTIVEC_BUILTIN_STVEHX
:
4945 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx
, arglist
);
4946 case ALTIVEC_BUILTIN_STVEWX
:
4947 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx
, arglist
);
4948 case ALTIVEC_BUILTIN_STVXL
:
4949 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl
, arglist
);
4951 case ALTIVEC_BUILTIN_MFVSCR
:
4952 icode
= CODE_FOR_altivec_mfvscr
;
4953 tmode
= insn_data
[icode
].operand
[0].mode
;
4956 || GET_MODE (target
) != tmode
4957 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4958 target
= gen_reg_rtx (tmode
);
4960 pat
= GEN_FCN (icode
) (target
);
4966 case ALTIVEC_BUILTIN_MTVSCR
:
4967 icode
= CODE_FOR_altivec_mtvscr
;
4968 arg0
= TREE_VALUE (arglist
);
4969 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4970 mode0
= insn_data
[icode
].operand
[0].mode
;
4972 /* If we got invalid arguments bail out before generating bad rtl. */
4973 if (arg0
== error_mark_node
)
4976 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4977 op0
= copy_to_mode_reg (mode0
, op0
);
4979 pat
= GEN_FCN (icode
) (op0
);
4984 case ALTIVEC_BUILTIN_DSSALL
:
4985 emit_insn (gen_altivec_dssall ());
4988 case ALTIVEC_BUILTIN_DSS
:
4989 icode
= CODE_FOR_altivec_dss
;
4990 arg0
= TREE_VALUE (arglist
);
4991 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4992 mode0
= insn_data
[icode
].operand
[0].mode
;
4994 /* If we got invalid arguments bail out before generating bad rtl. */
4995 if (arg0
== error_mark_node
)
4998 if (TREE_CODE (arg0
) != INTEGER_CST
4999 || TREE_INT_CST_LOW (arg0
) & ~0x3)
5001 error ("argument to dss must be a 2-bit unsigned literal");
5005 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
5006 op0
= copy_to_mode_reg (mode0
, op0
);
5008 emit_insn (gen_altivec_dss (op0
));
5012 /* Expand abs* operations. */
5013 d
= (struct builtin_description
*) bdesc_abs
;
5014 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
5015 if (d
->code
== fcode
)
5016 return altivec_expand_abs_builtin (d
->icode
, arglist
, target
);
5018 /* Expand the AltiVec predicates. */
5019 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
5020 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
5021 if (dp
->code
== fcode
)
5022 return altivec_expand_predicate_builtin (dp
->icode
, dp
->opcode
, arglist
, target
);
5024 /* LV* are funky. We initialized them differently. */
5027 case ALTIVEC_BUILTIN_LVSL
:
5028 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl
,
5030 case ALTIVEC_BUILTIN_LVSR
:
5031 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr
,
5033 case ALTIVEC_BUILTIN_LVEBX
:
5034 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx
,
5036 case ALTIVEC_BUILTIN_LVEHX
:
5037 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx
,
5039 case ALTIVEC_BUILTIN_LVEWX
:
5040 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx
,
5042 case ALTIVEC_BUILTIN_LVXL
:
5043 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl
,
5045 case ALTIVEC_BUILTIN_LVX
:
5046 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx
,
5057 /* Binops that need to be initialized manually, but can be expanded
5058 automagically by rs6000_expand_binop_builtin. */
5059 static struct builtin_description bdesc_2arg_spe
[] =
5061 { 0, CODE_FOR_spe_evlddx
, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX
},
5062 { 0, CODE_FOR_spe_evldwx
, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX
},
5063 { 0, CODE_FOR_spe_evldhx
, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX
},
5064 { 0, CODE_FOR_spe_evlwhex
, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX
},
5065 { 0, CODE_FOR_spe_evlwhoux
, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX
},
5066 { 0, CODE_FOR_spe_evlwhosx
, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX
},
5067 { 0, CODE_FOR_spe_evlwwsplatx
, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX
},
5068 { 0, CODE_FOR_spe_evlwhsplatx
, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX
},
5069 { 0, CODE_FOR_spe_evlhhesplatx
, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX
},
5070 { 0, CODE_FOR_spe_evlhhousplatx
, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX
},
5071 { 0, CODE_FOR_spe_evlhhossplatx
, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX
},
5072 { 0, CODE_FOR_spe_evldd
, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD
},
5073 { 0, CODE_FOR_spe_evldw
, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW
},
5074 { 0, CODE_FOR_spe_evldh
, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH
},
5075 { 0, CODE_FOR_spe_evlwhe
, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE
},
5076 { 0, CODE_FOR_spe_evlwhou
, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU
},
5077 { 0, CODE_FOR_spe_evlwhos
, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS
},
5078 { 0, CODE_FOR_spe_evlwwsplat
, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT
},
5079 { 0, CODE_FOR_spe_evlwhsplat
, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT
},
5080 { 0, CODE_FOR_spe_evlhhesplat
, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT
},
5081 { 0, CODE_FOR_spe_evlhhousplat
, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT
},
5082 { 0, CODE_FOR_spe_evlhhossplat
, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT
}
5085 /* Expand the builtin in EXP and store the result in TARGET. Store
5086 true in *EXPANDEDP if we found a builtin to expand.
5088 This expands the SPE builtins that are not simple unary and binary
5091 spe_expand_builtin (exp
, target
, expandedp
)
5096 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5097 tree arglist
= TREE_OPERAND (exp
, 1);
5099 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5100 enum insn_code icode
;
5101 enum machine_mode tmode
, mode0
;
5103 struct builtin_description
*d
;
5108 /* Syntax check for a 5-bit unsigned immediate. */
5111 case SPE_BUILTIN_EVSTDD
:
5112 case SPE_BUILTIN_EVSTDH
:
5113 case SPE_BUILTIN_EVSTDW
:
5114 case SPE_BUILTIN_EVSTWHE
:
5115 case SPE_BUILTIN_EVSTWHO
:
5116 case SPE_BUILTIN_EVSTWWE
:
5117 case SPE_BUILTIN_EVSTWWO
:
5118 arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5119 if (TREE_CODE (arg1
) != INTEGER_CST
5120 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
5122 error ("argument 2 must be a 5-bit unsigned literal");
5130 d
= (struct builtin_description
*) bdesc_2arg_spe
;
5131 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg_spe
); ++i
, ++d
)
5132 if (d
->code
== fcode
)
5133 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
5135 d
= (struct builtin_description
*) bdesc_spe_predicates
;
5136 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, ++d
)
5137 if (d
->code
== fcode
)
5138 return spe_expand_predicate_builtin (d
->icode
, arglist
, target
);
5140 d
= (struct builtin_description
*) bdesc_spe_evsel
;
5141 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, ++d
)
5142 if (d
->code
== fcode
)
5143 return spe_expand_evsel_builtin (d
->icode
, arglist
, target
);
5147 case SPE_BUILTIN_EVSTDDX
:
5148 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx
, arglist
);
5149 case SPE_BUILTIN_EVSTDHX
:
5150 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx
, arglist
);
5151 case SPE_BUILTIN_EVSTDWX
:
5152 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx
, arglist
);
5153 case SPE_BUILTIN_EVSTWHEX
:
5154 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex
, arglist
);
5155 case SPE_BUILTIN_EVSTWHOX
:
5156 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox
, arglist
);
5157 case SPE_BUILTIN_EVSTWWEX
:
5158 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex
, arglist
);
5159 case SPE_BUILTIN_EVSTWWOX
:
5160 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox
, arglist
);
5161 case SPE_BUILTIN_EVSTDD
:
5162 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd
, arglist
);
5163 case SPE_BUILTIN_EVSTDH
:
5164 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh
, arglist
);
5165 case SPE_BUILTIN_EVSTDW
:
5166 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw
, arglist
);
5167 case SPE_BUILTIN_EVSTWHE
:
5168 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe
, arglist
);
5169 case SPE_BUILTIN_EVSTWHO
:
5170 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho
, arglist
);
5171 case SPE_BUILTIN_EVSTWWE
:
5172 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe
, arglist
);
5173 case SPE_BUILTIN_EVSTWWO
:
5174 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo
, arglist
);
5175 case SPE_BUILTIN_MFSPEFSCR
:
5176 icode
= CODE_FOR_spe_mfspefscr
;
5177 tmode
= insn_data
[icode
].operand
[0].mode
;
5180 || GET_MODE (target
) != tmode
5181 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5182 target
= gen_reg_rtx (tmode
);
5184 pat
= GEN_FCN (icode
) (target
);
5189 case SPE_BUILTIN_MTSPEFSCR
:
5190 icode
= CODE_FOR_spe_mtspefscr
;
5191 arg0
= TREE_VALUE (arglist
);
5192 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5193 mode0
= insn_data
[icode
].operand
[0].mode
;
5195 if (arg0
== error_mark_node
)
5198 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
5199 op0
= copy_to_mode_reg (mode0
, op0
);
5201 pat
= GEN_FCN (icode
) (op0
);
5214 spe_expand_predicate_builtin (icode
, arglist
, target
)
5215 enum insn_code icode
;
5219 rtx pat
, scratch
, tmp
;
5220 tree form
= TREE_VALUE (arglist
);
5221 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
5222 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5223 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5224 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5225 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5226 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5230 if (TREE_CODE (form
) != INTEGER_CST
)
5232 error ("argument 1 of __builtin_spe_predicate must be a constant");
5236 form_int
= TREE_INT_CST_LOW (form
);
5241 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
5245 || GET_MODE (target
) != SImode
5246 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, SImode
))
5247 target
= gen_reg_rtx (SImode
);
5249 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5250 op0
= copy_to_mode_reg (mode0
, op0
);
5251 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5252 op1
= copy_to_mode_reg (mode1
, op1
);
5254 scratch
= gen_reg_rtx (CCmode
);
5256 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
5261 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5262 _lower_. We use one compare, but look in different bits of the
5263 CR for each variant.
5265 There are 2 elements in each SPE simd type (upper/lower). The CR
5266 bits are set as follows:
5268 BIT0 | BIT 1 | BIT 2 | BIT 3
5269 U | L | (U | L) | (U & L)
5271 So, for an "all" relationship, BIT 3 would be set.
5272 For an "any" relationship, BIT 2 would be set. Etc.
5274 Following traditional nomenclature, these bits map to:
5276 BIT0 | BIT 1 | BIT 2 | BIT 3
5279 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5284 /* All variant. OV bit. */
5286 /* We need to get to the OV bit, which is the ORDERED bit. We
5287 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5288 that's ugly and will trigger a validate_condition_mode abort.
5289 So let's just use another pattern. */
5290 emit_insn (gen_move_from_CR_ov_bit (target
, scratch
));
5292 /* Any variant. EQ bit. */
5296 /* Upper variant. LT bit. */
5300 /* Lower variant. GT bit. */
5305 error ("argument 1 of __builtin_spe_predicate is out of range");
5309 tmp
= gen_rtx_fmt_ee (code
, SImode
, scratch
, const0_rtx
);
5310 emit_move_insn (target
, tmp
);
5315 /* The evsel builtins look like this:
5317 e = __builtin_spe_evsel_OP (a, b, c, d);
5321 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5322 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5326 spe_expand_evsel_builtin (icode
, arglist
, target
)
5327 enum insn_code icode
;
5332 tree arg0
= TREE_VALUE (arglist
);
5333 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5334 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5335 tree arg3
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist
))));
5336 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5337 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5338 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
5339 rtx op3
= expand_expr (arg3
, NULL_RTX
, VOIDmode
, 0);
5340 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5341 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5346 if (arg0
== error_mark_node
|| arg1
== error_mark_node
5347 || arg2
== error_mark_node
|| arg3
== error_mark_node
)
5351 || GET_MODE (target
) != mode0
5352 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, mode0
))
5353 target
= gen_reg_rtx (mode0
);
5355 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5356 op0
= copy_to_mode_reg (mode0
, op0
);
5357 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
5358 op1
= copy_to_mode_reg (mode0
, op1
);
5359 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
5360 op2
= copy_to_mode_reg (mode0
, op2
);
5361 if (! (*insn_data
[icode
].operand
[1].predicate
) (op3
, mode1
))
5362 op3
= copy_to_mode_reg (mode0
, op3
);
5364 /* Generate the compare. */
5365 scratch
= gen_reg_rtx (CCmode
);
5366 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
5371 if (mode0
== V2SImode
)
5372 emit_insn (gen_spe_evsel (target
, op2
, op3
, scratch
));
5374 emit_insn (gen_spe_evsel_fs (target
, op2
, op3
, scratch
));
5379 /* Expand an expression EXP that calls a built-in function,
5380 with result going to TARGET if that's convenient
5381 (and in mode MODE if that's convenient).
5382 SUBTARGET may be used as the target for computing one of EXP's operands.
5383 IGNORE is nonzero if the value is to be ignored. */
5386 rs6000_expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
5389 rtx subtarget ATTRIBUTE_UNUSED
;
5390 enum machine_mode mode ATTRIBUTE_UNUSED
;
5391 int ignore ATTRIBUTE_UNUSED
;
5393 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5394 tree arglist
= TREE_OPERAND (exp
, 1);
5395 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5396 struct builtin_description
*d
;
5403 ret
= altivec_expand_builtin (exp
, target
, &success
);
5410 ret
= spe_expand_builtin (exp
, target
, &success
);
5416 if (TARGET_ALTIVEC
|| TARGET_SPE
)
5418 /* Handle simple unary operations. */
5419 d
= (struct builtin_description
*) bdesc_1arg
;
5420 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
5421 if (d
->code
== fcode
)
5422 return rs6000_expand_unop_builtin (d
->icode
, arglist
, target
);
5424 /* Handle simple binary operations. */
5425 d
= (struct builtin_description
*) bdesc_2arg
;
5426 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5427 if (d
->code
== fcode
)
5428 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
5430 /* Handle simple ternary operations. */
5431 d
= (struct builtin_description
*) bdesc_3arg
;
5432 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
5433 if (d
->code
== fcode
)
5434 return rs6000_expand_ternop_builtin (d
->icode
, arglist
, target
);
5442 rs6000_init_builtins ()
5444 opaque_V2SI_type_node
= copy_node (V2SI_type_node
);
5445 opaque_V2SF_type_node
= copy_node (V2SF_type_node
);
5448 spe_init_builtins ();
5450 altivec_init_builtins ();
5451 if (TARGET_ALTIVEC
|| TARGET_SPE
)
5452 rs6000_common_init_builtins ();
5455 /* Search through a set of builtins and enable the mask bits.
5456 DESC is an array of builtins.
5457 SIZE is the total number of builtins.
5458 START is the builtin enum at which to start.
5459 END is the builtin enum at which to end. */
5461 enable_mask_for_builtins (desc
, size
, start
, end
)
5462 struct builtin_description
*desc
;
5464 enum rs6000_builtins start
, end
;
5468 for (i
= 0; i
< size
; ++i
)
5469 if (desc
[i
].code
== start
)
5475 for (; i
< size
; ++i
)
5477 /* Flip all the bits on. */
5478 desc
[i
].mask
= target_flags
;
5479 if (desc
[i
].code
== end
)
5485 spe_init_builtins ()
5487 tree endlink
= void_list_node
;
5488 tree puint_type_node
= build_pointer_type (unsigned_type_node
);
5489 tree pushort_type_node
= build_pointer_type (short_unsigned_type_node
);
5490 tree pv2si_type_node
= build_pointer_type (opaque_V2SI_type_node
);
5491 struct builtin_description
*d
;
5494 tree v2si_ftype_4_v2si
5495 = build_function_type
5496 (opaque_V2SI_type_node
,
5497 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
5498 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
5499 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
5500 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
5503 tree v2sf_ftype_4_v2sf
5504 = build_function_type
5505 (opaque_V2SF_type_node
,
5506 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
5507 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
5508 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
5509 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
5512 tree int_ftype_int_v2si_v2si
5513 = build_function_type
5515 tree_cons (NULL_TREE
, integer_type_node
,
5516 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
5517 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
5520 tree int_ftype_int_v2sf_v2sf
5521 = build_function_type
5523 tree_cons (NULL_TREE
, integer_type_node
,
5524 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
5525 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
5528 tree void_ftype_v2si_puint_int
5529 = build_function_type (void_type_node
,
5530 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
5531 tree_cons (NULL_TREE
, puint_type_node
,
5532 tree_cons (NULL_TREE
,
5536 tree void_ftype_v2si_puint_char
5537 = build_function_type (void_type_node
,
5538 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
5539 tree_cons (NULL_TREE
, puint_type_node
,
5540 tree_cons (NULL_TREE
,
5544 tree void_ftype_v2si_pv2si_int
5545 = build_function_type (void_type_node
,
5546 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
5547 tree_cons (NULL_TREE
, pv2si_type_node
,
5548 tree_cons (NULL_TREE
,
5552 tree void_ftype_v2si_pv2si_char
5553 = build_function_type (void_type_node
,
5554 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
5555 tree_cons (NULL_TREE
, pv2si_type_node
,
5556 tree_cons (NULL_TREE
,
5561 = build_function_type (void_type_node
,
5562 tree_cons (NULL_TREE
, integer_type_node
, endlink
));
5565 = build_function_type (integer_type_node
,
5566 tree_cons (NULL_TREE
, void_type_node
, endlink
));
5568 tree v2si_ftype_pv2si_int
5569 = build_function_type (opaque_V2SI_type_node
,
5570 tree_cons (NULL_TREE
, pv2si_type_node
,
5571 tree_cons (NULL_TREE
, integer_type_node
,
5574 tree v2si_ftype_puint_int
5575 = build_function_type (opaque_V2SI_type_node
,
5576 tree_cons (NULL_TREE
, puint_type_node
,
5577 tree_cons (NULL_TREE
, integer_type_node
,
5580 tree v2si_ftype_pushort_int
5581 = build_function_type (opaque_V2SI_type_node
,
5582 tree_cons (NULL_TREE
, pushort_type_node
,
5583 tree_cons (NULL_TREE
, integer_type_node
,
5586 /* The initialization of the simple binary and unary builtins is
5587 done in rs6000_common_init_builtins, but we have to enable the
5588 mask bits here manually because we have run out of `target_flags'
5589 bits. We really need to redesign this mask business. */
5591 enable_mask_for_builtins ((struct builtin_description
*) bdesc_2arg
,
5592 ARRAY_SIZE (bdesc_2arg
),
5595 enable_mask_for_builtins ((struct builtin_description
*) bdesc_1arg
,
5596 ARRAY_SIZE (bdesc_1arg
),
5598 SPE_BUILTIN_EVSUBFUSIAAW
);
5599 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_predicates
,
5600 ARRAY_SIZE (bdesc_spe_predicates
),
5601 SPE_BUILTIN_EVCMPEQ
,
5602 SPE_BUILTIN_EVFSTSTLT
);
5603 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_evsel
,
5604 ARRAY_SIZE (bdesc_spe_evsel
),
5605 SPE_BUILTIN_EVSEL_CMPGTS
,
5606 SPE_BUILTIN_EVSEL_FSTSTEQ
);
5608 /* Initialize irregular SPE builtins. */
5610 def_builtin (target_flags
, "__builtin_spe_mtspefscr", void_ftype_int
, SPE_BUILTIN_MTSPEFSCR
);
5611 def_builtin (target_flags
, "__builtin_spe_mfspefscr", int_ftype_void
, SPE_BUILTIN_MFSPEFSCR
);
5612 def_builtin (target_flags
, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDDX
);
5613 def_builtin (target_flags
, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDHX
);
5614 def_builtin (target_flags
, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDWX
);
5615 def_builtin (target_flags
, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHEX
);
5616 def_builtin (target_flags
, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHOX
);
5617 def_builtin (target_flags
, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWEX
);
5618 def_builtin (target_flags
, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWOX
);
5619 def_builtin (target_flags
, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDD
);
5620 def_builtin (target_flags
, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDH
);
5621 def_builtin (target_flags
, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDW
);
5622 def_builtin (target_flags
, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHE
);
5623 def_builtin (target_flags
, "__builtin_spe_evstwho", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHO
);
5624 def_builtin (target_flags
, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWE
);
5625 def_builtin (target_flags
, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWO
);
5628 def_builtin (target_flags
, "__builtin_spe_evlddx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDDX
);
5629 def_builtin (target_flags
, "__builtin_spe_evldwx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDWX
);
5630 def_builtin (target_flags
, "__builtin_spe_evldhx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDHX
);
5631 def_builtin (target_flags
, "__builtin_spe_evlwhex", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHEX
);
5632 def_builtin (target_flags
, "__builtin_spe_evlwhoux", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOUX
);
5633 def_builtin (target_flags
, "__builtin_spe_evlwhosx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOSX
);
5634 def_builtin (target_flags
, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLATX
);
5635 def_builtin (target_flags
, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLATX
);
5636 def_builtin (target_flags
, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLATX
);
5637 def_builtin (target_flags
, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLATX
);
5638 def_builtin (target_flags
, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLATX
);
5639 def_builtin (target_flags
, "__builtin_spe_evldd", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDD
);
5640 def_builtin (target_flags
, "__builtin_spe_evldw", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDW
);
5641 def_builtin (target_flags
, "__builtin_spe_evldh", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDH
);
5642 def_builtin (target_flags
, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLAT
);
5643 def_builtin (target_flags
, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLAT
);
5644 def_builtin (target_flags
, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLAT
);
5645 def_builtin (target_flags
, "__builtin_spe_evlwhe", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHE
);
5646 def_builtin (target_flags
, "__builtin_spe_evlwhos", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOS
);
5647 def_builtin (target_flags
, "__builtin_spe_evlwhou", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOU
);
5648 def_builtin (target_flags
, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLAT
);
5649 def_builtin (target_flags
, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLAT
);
5652 d
= (struct builtin_description
*) bdesc_spe_predicates
;
5653 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, d
++)
5657 switch (insn_data
[d
->icode
].operand
[1].mode
)
5660 type
= int_ftype_int_v2si_v2si
;
5663 type
= int_ftype_int_v2sf_v2sf
;
5669 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5672 /* Evsel predicates. */
5673 d
= (struct builtin_description
*) bdesc_spe_evsel
;
5674 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, d
++)
5678 switch (insn_data
[d
->icode
].operand
[1].mode
)
5681 type
= v2si_ftype_4_v2si
;
5684 type
= v2sf_ftype_4_v2sf
;
5690 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5695 altivec_init_builtins ()
5697 struct builtin_description
*d
;
5698 struct builtin_description_predicates
*dp
;
5700 tree pfloat_type_node
= build_pointer_type (float_type_node
);
5701 tree pint_type_node
= build_pointer_type (integer_type_node
);
5702 tree pshort_type_node
= build_pointer_type (short_integer_type_node
);
5703 tree pchar_type_node
= build_pointer_type (char_type_node
);
5705 tree pvoid_type_node
= build_pointer_type (void_type_node
);
5707 tree pcfloat_type_node
= build_pointer_type (build_qualified_type (float_type_node
, TYPE_QUAL_CONST
));
5708 tree pcint_type_node
= build_pointer_type (build_qualified_type (integer_type_node
, TYPE_QUAL_CONST
));
5709 tree pcshort_type_node
= build_pointer_type (build_qualified_type (short_integer_type_node
, TYPE_QUAL_CONST
));
5710 tree pcchar_type_node
= build_pointer_type (build_qualified_type (char_type_node
, TYPE_QUAL_CONST
));
5712 tree pcvoid_type_node
= build_pointer_type (build_qualified_type (void_type_node
, TYPE_QUAL_CONST
));
5714 tree int_ftype_int_v4si_v4si
5715 = build_function_type_list (integer_type_node
,
5716 integer_type_node
, V4SI_type_node
,
5717 V4SI_type_node
, NULL_TREE
);
5718 tree v4sf_ftype_pcfloat
5719 = build_function_type_list (V4SF_type_node
, pcfloat_type_node
, NULL_TREE
);
5720 tree void_ftype_pfloat_v4sf
5721 = build_function_type_list (void_type_node
,
5722 pfloat_type_node
, V4SF_type_node
, NULL_TREE
);
5723 tree v4si_ftype_pcint
5724 = build_function_type_list (V4SI_type_node
, pcint_type_node
, NULL_TREE
);
5725 tree void_ftype_pint_v4si
5726 = build_function_type_list (void_type_node
,
5727 pint_type_node
, V4SI_type_node
, NULL_TREE
);
5728 tree v8hi_ftype_pcshort
5729 = build_function_type_list (V8HI_type_node
, pcshort_type_node
, NULL_TREE
);
5730 tree void_ftype_pshort_v8hi
5731 = build_function_type_list (void_type_node
,
5732 pshort_type_node
, V8HI_type_node
, NULL_TREE
);
5733 tree v16qi_ftype_pcchar
5734 = build_function_type_list (V16QI_type_node
, pcchar_type_node
, NULL_TREE
);
5735 tree void_ftype_pchar_v16qi
5736 = build_function_type_list (void_type_node
,
5737 pchar_type_node
, V16QI_type_node
, NULL_TREE
);
5738 tree void_ftype_v4si
5739 = build_function_type_list (void_type_node
, V4SI_type_node
, NULL_TREE
);
5740 tree v8hi_ftype_void
5741 = build_function_type (V8HI_type_node
, void_list_node
);
5742 tree void_ftype_void
5743 = build_function_type (void_type_node
, void_list_node
);
5745 = build_function_type_list (void_type_node
, char_type_node
, NULL_TREE
);
5747 tree v16qi_ftype_int_pcvoid
5748 = build_function_type_list (V16QI_type_node
,
5749 integer_type_node
, pcvoid_type_node
, NULL_TREE
);
5750 tree v8hi_ftype_int_pcvoid
5751 = build_function_type_list (V8HI_type_node
,
5752 integer_type_node
, pcvoid_type_node
, NULL_TREE
);
5753 tree v4si_ftype_int_pcvoid
5754 = build_function_type_list (V4SI_type_node
,
5755 integer_type_node
, pcvoid_type_node
, NULL_TREE
);
5757 tree void_ftype_v4si_int_pvoid
5758 = build_function_type_list (void_type_node
,
5759 V4SI_type_node
, integer_type_node
,
5760 pvoid_type_node
, NULL_TREE
);
5761 tree void_ftype_v16qi_int_pvoid
5762 = build_function_type_list (void_type_node
,
5763 V16QI_type_node
, integer_type_node
,
5764 pvoid_type_node
, NULL_TREE
);
5765 tree void_ftype_v8hi_int_pvoid
5766 = build_function_type_list (void_type_node
,
5767 V8HI_type_node
, integer_type_node
,
5768 pvoid_type_node
, NULL_TREE
);
5769 tree int_ftype_int_v8hi_v8hi
5770 = build_function_type_list (integer_type_node
,
5771 integer_type_node
, V8HI_type_node
,
5772 V8HI_type_node
, NULL_TREE
);
5773 tree int_ftype_int_v16qi_v16qi
5774 = build_function_type_list (integer_type_node
,
5775 integer_type_node
, V16QI_type_node
,
5776 V16QI_type_node
, NULL_TREE
);
5777 tree int_ftype_int_v4sf_v4sf
5778 = build_function_type_list (integer_type_node
,
5779 integer_type_node
, V4SF_type_node
,
5780 V4SF_type_node
, NULL_TREE
);
5781 tree v4si_ftype_v4si
5782 = build_function_type_list (V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5783 tree v8hi_ftype_v8hi
5784 = build_function_type_list (V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5785 tree v16qi_ftype_v16qi
5786 = build_function_type_list (V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5787 tree v4sf_ftype_v4sf
5788 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5789 tree void_ftype_pcvoid_int_char
5790 = build_function_type_list (void_type_node
,
5791 pcvoid_type_node
, integer_type_node
,
5792 char_type_node
, NULL_TREE
);
5794 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat
,
5795 ALTIVEC_BUILTIN_LD_INTERNAL_4sf
);
5796 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf
,
5797 ALTIVEC_BUILTIN_ST_INTERNAL_4sf
);
5798 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint
,
5799 ALTIVEC_BUILTIN_LD_INTERNAL_4si
);
5800 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si
,
5801 ALTIVEC_BUILTIN_ST_INTERNAL_4si
);
5802 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort
,
5803 ALTIVEC_BUILTIN_LD_INTERNAL_8hi
);
5804 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi
,
5805 ALTIVEC_BUILTIN_ST_INTERNAL_8hi
);
5806 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar
,
5807 ALTIVEC_BUILTIN_LD_INTERNAL_16qi
);
5808 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi
,
5809 ALTIVEC_BUILTIN_ST_INTERNAL_16qi
);
5810 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mtvscr", void_ftype_v4si
, ALTIVEC_BUILTIN_MTVSCR
);
5811 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mfvscr", v8hi_ftype_void
, ALTIVEC_BUILTIN_MFVSCR
);
5812 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dssall", void_ftype_void
, ALTIVEC_BUILTIN_DSSALL
);
5813 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dss", void_ftype_qi
, ALTIVEC_BUILTIN_DSS
);
5814 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVSL
);
5815 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVSR
);
5816 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVEBX
);
5817 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVEHX
);
5818 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVEWX
);
5819 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVXL
);
5820 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVX
);
5821 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVX
);
5822 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVEWX
);
5823 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVXL
);
5824 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid
, ALTIVEC_BUILTIN_STVEBX
);
5825 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid
, ALTIVEC_BUILTIN_STVEHX
);
5827 /* Add the DST variants. */
5828 d
= (struct builtin_description
*) bdesc_dst
;
5829 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
5830 def_builtin (d
->mask
, d
->name
, void_ftype_pcvoid_int_char
, d
->code
);
5832 /* Initialize the predicates. */
5833 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
5834 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
5836 enum machine_mode mode1
;
5839 mode1
= insn_data
[dp
->icode
].operand
[1].mode
;
5844 type
= int_ftype_int_v4si_v4si
;
5847 type
= int_ftype_int_v8hi_v8hi
;
5850 type
= int_ftype_int_v16qi_v16qi
;
5853 type
= int_ftype_int_v4sf_v4sf
;
5859 def_builtin (dp
->mask
, dp
->name
, type
, dp
->code
);
5862 /* Initialize the abs* operators. */
5863 d
= (struct builtin_description
*) bdesc_abs
;
5864 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
5866 enum machine_mode mode0
;
5869 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
5874 type
= v4si_ftype_v4si
;
5877 type
= v8hi_ftype_v8hi
;
5880 type
= v16qi_ftype_v16qi
;
5883 type
= v4sf_ftype_v4sf
;
5889 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5894 rs6000_common_init_builtins ()
5896 struct builtin_description
*d
;
5899 tree v4sf_ftype_v4sf_v4sf_v16qi
5900 = build_function_type_list (V4SF_type_node
,
5901 V4SF_type_node
, V4SF_type_node
,
5902 V16QI_type_node
, NULL_TREE
);
5903 tree v4si_ftype_v4si_v4si_v16qi
5904 = build_function_type_list (V4SI_type_node
,
5905 V4SI_type_node
, V4SI_type_node
,
5906 V16QI_type_node
, NULL_TREE
);
5907 tree v8hi_ftype_v8hi_v8hi_v16qi
5908 = build_function_type_list (V8HI_type_node
,
5909 V8HI_type_node
, V8HI_type_node
,
5910 V16QI_type_node
, NULL_TREE
);
5911 tree v16qi_ftype_v16qi_v16qi_v16qi
5912 = build_function_type_list (V16QI_type_node
,
5913 V16QI_type_node
, V16QI_type_node
,
5914 V16QI_type_node
, NULL_TREE
);
5915 tree v4si_ftype_char
5916 = build_function_type_list (V4SI_type_node
, char_type_node
, NULL_TREE
);
5917 tree v8hi_ftype_char
5918 = build_function_type_list (V8HI_type_node
, char_type_node
, NULL_TREE
);
5919 tree v16qi_ftype_char
5920 = build_function_type_list (V16QI_type_node
, char_type_node
, NULL_TREE
);
5921 tree v8hi_ftype_v16qi
5922 = build_function_type_list (V8HI_type_node
, V16QI_type_node
, NULL_TREE
);
5923 tree v4sf_ftype_v4sf
5924 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5926 tree v2si_ftype_v2si_v2si
5927 = build_function_type_list (opaque_V2SI_type_node
,
5928 opaque_V2SI_type_node
,
5929 opaque_V2SI_type_node
, NULL_TREE
);
5931 tree v2sf_ftype_v2sf_v2sf
5932 = build_function_type_list (opaque_V2SF_type_node
,
5933 opaque_V2SF_type_node
,
5934 opaque_V2SF_type_node
, NULL_TREE
);
5936 tree v2si_ftype_int_int
5937 = build_function_type_list (opaque_V2SI_type_node
,
5938 integer_type_node
, integer_type_node
,
5941 tree v2si_ftype_v2si
5942 = build_function_type_list (opaque_V2SI_type_node
,
5943 opaque_V2SI_type_node
, NULL_TREE
);
5945 tree v2sf_ftype_v2sf
5946 = build_function_type_list (opaque_V2SF_type_node
,
5947 opaque_V2SF_type_node
, NULL_TREE
);
5949 tree v2sf_ftype_v2si
5950 = build_function_type_list (opaque_V2SF_type_node
,
5951 opaque_V2SI_type_node
, NULL_TREE
);
5953 tree v2si_ftype_v2sf
5954 = build_function_type_list (opaque_V2SI_type_node
,
5955 opaque_V2SF_type_node
, NULL_TREE
);
5957 tree v2si_ftype_v2si_char
5958 = build_function_type_list (opaque_V2SI_type_node
,
5959 opaque_V2SI_type_node
,
5960 char_type_node
, NULL_TREE
);
5962 tree v2si_ftype_int_char
5963 = build_function_type_list (opaque_V2SI_type_node
,
5964 integer_type_node
, char_type_node
, NULL_TREE
);
5966 tree v2si_ftype_char
5967 = build_function_type_list (opaque_V2SI_type_node
,
5968 char_type_node
, NULL_TREE
);
5970 tree int_ftype_int_int
5971 = build_function_type_list (integer_type_node
,
5972 integer_type_node
, integer_type_node
,
5975 tree v4si_ftype_v4si_v4si
5976 = build_function_type_list (V4SI_type_node
,
5977 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5978 tree v4sf_ftype_v4si_char
5979 = build_function_type_list (V4SF_type_node
,
5980 V4SI_type_node
, char_type_node
, NULL_TREE
);
5981 tree v4si_ftype_v4sf_char
5982 = build_function_type_list (V4SI_type_node
,
5983 V4SF_type_node
, char_type_node
, NULL_TREE
);
5984 tree v4si_ftype_v4si_char
5985 = build_function_type_list (V4SI_type_node
,
5986 V4SI_type_node
, char_type_node
, NULL_TREE
);
5987 tree v8hi_ftype_v8hi_char
5988 = build_function_type_list (V8HI_type_node
,
5989 V8HI_type_node
, char_type_node
, NULL_TREE
);
5990 tree v16qi_ftype_v16qi_char
5991 = build_function_type_list (V16QI_type_node
,
5992 V16QI_type_node
, char_type_node
, NULL_TREE
);
5993 tree v16qi_ftype_v16qi_v16qi_char
5994 = build_function_type_list (V16QI_type_node
,
5995 V16QI_type_node
, V16QI_type_node
,
5996 char_type_node
, NULL_TREE
);
5997 tree v8hi_ftype_v8hi_v8hi_char
5998 = build_function_type_list (V8HI_type_node
,
5999 V8HI_type_node
, V8HI_type_node
,
6000 char_type_node
, NULL_TREE
);
6001 tree v4si_ftype_v4si_v4si_char
6002 = build_function_type_list (V4SI_type_node
,
6003 V4SI_type_node
, V4SI_type_node
,
6004 char_type_node
, NULL_TREE
);
6005 tree v4sf_ftype_v4sf_v4sf_char
6006 = build_function_type_list (V4SF_type_node
,
6007 V4SF_type_node
, V4SF_type_node
,
6008 char_type_node
, NULL_TREE
);
6009 tree v4sf_ftype_v4sf_v4sf
6010 = build_function_type_list (V4SF_type_node
,
6011 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
6012 tree v4sf_ftype_v4sf_v4sf_v4si
6013 = build_function_type_list (V4SF_type_node
,
6014 V4SF_type_node
, V4SF_type_node
,
6015 V4SI_type_node
, NULL_TREE
);
6016 tree v4sf_ftype_v4sf_v4sf_v4sf
6017 = build_function_type_list (V4SF_type_node
,
6018 V4SF_type_node
, V4SF_type_node
,
6019 V4SF_type_node
, NULL_TREE
);
6020 tree v4si_ftype_v4si_v4si_v4si
6021 = build_function_type_list (V4SI_type_node
,
6022 V4SI_type_node
, V4SI_type_node
,
6023 V4SI_type_node
, NULL_TREE
);
6024 tree v8hi_ftype_v8hi_v8hi
6025 = build_function_type_list (V8HI_type_node
,
6026 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
6027 tree v8hi_ftype_v8hi_v8hi_v8hi
6028 = build_function_type_list (V8HI_type_node
,
6029 V8HI_type_node
, V8HI_type_node
,
6030 V8HI_type_node
, NULL_TREE
);
6031 tree v4si_ftype_v8hi_v8hi_v4si
6032 = build_function_type_list (V4SI_type_node
,
6033 V8HI_type_node
, V8HI_type_node
,
6034 V4SI_type_node
, NULL_TREE
);
6035 tree v4si_ftype_v16qi_v16qi_v4si
6036 = build_function_type_list (V4SI_type_node
,
6037 V16QI_type_node
, V16QI_type_node
,
6038 V4SI_type_node
, NULL_TREE
);
6039 tree v16qi_ftype_v16qi_v16qi
6040 = build_function_type_list (V16QI_type_node
,
6041 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
6042 tree v4si_ftype_v4sf_v4sf
6043 = build_function_type_list (V4SI_type_node
,
6044 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
6045 tree v8hi_ftype_v16qi_v16qi
6046 = build_function_type_list (V8HI_type_node
,
6047 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
6048 tree v4si_ftype_v8hi_v8hi
6049 = build_function_type_list (V4SI_type_node
,
6050 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
6051 tree v8hi_ftype_v4si_v4si
6052 = build_function_type_list (V8HI_type_node
,
6053 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
6054 tree v16qi_ftype_v8hi_v8hi
6055 = build_function_type_list (V16QI_type_node
,
6056 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
6057 tree v4si_ftype_v16qi_v4si
6058 = build_function_type_list (V4SI_type_node
,
6059 V16QI_type_node
, V4SI_type_node
, NULL_TREE
);
6060 tree v4si_ftype_v16qi_v16qi
6061 = build_function_type_list (V4SI_type_node
,
6062 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
6063 tree v4si_ftype_v8hi_v4si
6064 = build_function_type_list (V4SI_type_node
,
6065 V8HI_type_node
, V4SI_type_node
, NULL_TREE
);
6066 tree v4si_ftype_v8hi
6067 = build_function_type_list (V4SI_type_node
, V8HI_type_node
, NULL_TREE
);
6068 tree int_ftype_v4si_v4si
6069 = build_function_type_list (integer_type_node
,
6070 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
6071 tree int_ftype_v4sf_v4sf
6072 = build_function_type_list (integer_type_node
,
6073 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
6074 tree int_ftype_v16qi_v16qi
6075 = build_function_type_list (integer_type_node
,
6076 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
6077 tree int_ftype_v8hi_v8hi
6078 = build_function_type_list (integer_type_node
,
6079 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
6081 /* Add the simple ternary operators. */
6082 d
= (struct builtin_description
*) bdesc_3arg
;
6083 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
6086 enum machine_mode mode0
, mode1
, mode2
, mode3
;
6089 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
6092 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6093 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
6094 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
6095 mode3
= insn_data
[d
->icode
].operand
[3].mode
;
6097 /* When all four are of the same mode. */
6098 if (mode0
== mode1
&& mode1
== mode2
&& mode2
== mode3
)
6103 type
= v4si_ftype_v4si_v4si_v4si
;
6106 type
= v4sf_ftype_v4sf_v4sf_v4sf
;
6109 type
= v8hi_ftype_v8hi_v8hi_v8hi
;
6112 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
6118 else if (mode0
== mode1
&& mode1
== mode2
&& mode3
== V16QImode
)
6123 type
= v4si_ftype_v4si_v4si_v16qi
;
6126 type
= v4sf_ftype_v4sf_v4sf_v16qi
;
6129 type
= v8hi_ftype_v8hi_v8hi_v16qi
;
6132 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
6138 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
6139 && mode3
== V4SImode
)
6140 type
= v4si_ftype_v16qi_v16qi_v4si
;
6141 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
6142 && mode3
== V4SImode
)
6143 type
= v4si_ftype_v8hi_v8hi_v4si
;
6144 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
6145 && mode3
== V4SImode
)
6146 type
= v4sf_ftype_v4sf_v4sf_v4si
;
6148 /* vchar, vchar, vchar, 4 bit literal. */
6149 else if (mode0
== V16QImode
&& mode1
== mode0
&& mode2
== mode0
6151 type
= v16qi_ftype_v16qi_v16qi_char
;
6153 /* vshort, vshort, vshort, 4 bit literal. */
6154 else if (mode0
== V8HImode
&& mode1
== mode0
&& mode2
== mode0
6156 type
= v8hi_ftype_v8hi_v8hi_char
;
6158 /* vint, vint, vint, 4 bit literal. */
6159 else if (mode0
== V4SImode
&& mode1
== mode0
&& mode2
== mode0
6161 type
= v4si_ftype_v4si_v4si_char
;
6163 /* vfloat, vfloat, vfloat, 4 bit literal. */
6164 else if (mode0
== V4SFmode
&& mode1
== mode0
&& mode2
== mode0
6166 type
= v4sf_ftype_v4sf_v4sf_char
;
6171 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6174 /* Add the simple binary operators. */
6175 d
= (struct builtin_description
*) bdesc_2arg
;
6176 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
6178 enum machine_mode mode0
, mode1
, mode2
;
6181 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
6184 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6185 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
6186 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
6188 /* When all three operands are of the same mode. */
6189 if (mode0
== mode1
&& mode1
== mode2
)
6194 type
= v4sf_ftype_v4sf_v4sf
;
6197 type
= v4si_ftype_v4si_v4si
;
6200 type
= v16qi_ftype_v16qi_v16qi
;
6203 type
= v8hi_ftype_v8hi_v8hi
;
6206 type
= v2si_ftype_v2si_v2si
;
6209 type
= v2sf_ftype_v2sf_v2sf
;
6212 type
= int_ftype_int_int
;
6219 /* A few other combos we really don't want to do manually. */
6221 /* vint, vfloat, vfloat. */
6222 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
)
6223 type
= v4si_ftype_v4sf_v4sf
;
6225 /* vshort, vchar, vchar. */
6226 else if (mode0
== V8HImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
6227 type
= v8hi_ftype_v16qi_v16qi
;
6229 /* vint, vshort, vshort. */
6230 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
6231 type
= v4si_ftype_v8hi_v8hi
;
6233 /* vshort, vint, vint. */
6234 else if (mode0
== V8HImode
&& mode1
== V4SImode
&& mode2
== V4SImode
)
6235 type
= v8hi_ftype_v4si_v4si
;
6237 /* vchar, vshort, vshort. */
6238 else if (mode0
== V16QImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
6239 type
= v16qi_ftype_v8hi_v8hi
;
6241 /* vint, vchar, vint. */
6242 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V4SImode
)
6243 type
= v4si_ftype_v16qi_v4si
;
6245 /* vint, vchar, vchar. */
6246 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
6247 type
= v4si_ftype_v16qi_v16qi
;
6249 /* vint, vshort, vint. */
6250 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V4SImode
)
6251 type
= v4si_ftype_v8hi_v4si
;
6253 /* vint, vint, 5 bit literal. */
6254 else if (mode0
== V4SImode
&& mode1
== V4SImode
&& mode2
== QImode
)
6255 type
= v4si_ftype_v4si_char
;
6257 /* vshort, vshort, 5 bit literal. */
6258 else if (mode0
== V8HImode
&& mode1
== V8HImode
&& mode2
== QImode
)
6259 type
= v8hi_ftype_v8hi_char
;
6261 /* vchar, vchar, 5 bit literal. */
6262 else if (mode0
== V16QImode
&& mode1
== V16QImode
&& mode2
== QImode
)
6263 type
= v16qi_ftype_v16qi_char
;
6265 /* vfloat, vint, 5 bit literal. */
6266 else if (mode0
== V4SFmode
&& mode1
== V4SImode
&& mode2
== QImode
)
6267 type
= v4sf_ftype_v4si_char
;
6269 /* vint, vfloat, 5 bit literal. */
6270 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== QImode
)
6271 type
= v4si_ftype_v4sf_char
;
6273 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== SImode
)
6274 type
= v2si_ftype_int_int
;
6276 else if (mode0
== V2SImode
&& mode1
== V2SImode
&& mode2
== QImode
)
6277 type
= v2si_ftype_v2si_char
;
6279 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== QImode
)
6280 type
= v2si_ftype_int_char
;
6283 else if (mode0
== SImode
)
6288 type
= int_ftype_v4si_v4si
;
6291 type
= int_ftype_v4sf_v4sf
;
6294 type
= int_ftype_v16qi_v16qi
;
6297 type
= int_ftype_v8hi_v8hi
;
6307 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6310 /* Add the simple unary operators. */
6311 d
= (struct builtin_description
*) bdesc_1arg
;
6312 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
6314 enum machine_mode mode0
, mode1
;
6317 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
6320 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6321 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
6323 if (mode0
== V4SImode
&& mode1
== QImode
)
6324 type
= v4si_ftype_char
;
6325 else if (mode0
== V8HImode
&& mode1
== QImode
)
6326 type
= v8hi_ftype_char
;
6327 else if (mode0
== V16QImode
&& mode1
== QImode
)
6328 type
= v16qi_ftype_char
;
6329 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
)
6330 type
= v4sf_ftype_v4sf
;
6331 else if (mode0
== V8HImode
&& mode1
== V16QImode
)
6332 type
= v8hi_ftype_v16qi
;
6333 else if (mode0
== V4SImode
&& mode1
== V8HImode
)
6334 type
= v4si_ftype_v8hi
;
6335 else if (mode0
== V2SImode
&& mode1
== V2SImode
)
6336 type
= v2si_ftype_v2si
;
6337 else if (mode0
== V2SFmode
&& mode1
== V2SFmode
)
6338 type
= v2sf_ftype_v2sf
;
6339 else if (mode0
== V2SFmode
&& mode1
== V2SImode
)
6340 type
= v2sf_ftype_v2si
;
6341 else if (mode0
== V2SImode
&& mode1
== V2SFmode
)
6342 type
= v2si_ftype_v2sf
;
6343 else if (mode0
== V2SImode
&& mode1
== QImode
)
6344 type
= v2si_ftype_char
;
6348 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6353 /* Expand a block move operation, and return 1 if successful. Return 0
6354 if we should let the compiler generate normal code.
6356 operands[0] is the destination
6357 operands[1] is the source
6358 operands[2] is the length
6359 operands[3] is the alignment */
6361 #define MAX_MOVE_REG 4
6364 expand_block_move (operands
)
6367 rtx orig_dest
= operands
[0];
6368 rtx orig_src
= operands
[1];
6369 rtx bytes_rtx
= operands
[2];
6370 rtx align_rtx
= operands
[3];
6371 int constp
= (GET_CODE (bytes_rtx
) == CONST_INT
);
6377 /* If this is not a fixed size move, just call memcpy */
6381 /* If this is not a fixed size alignment, abort */
6382 if (GET_CODE (align_rtx
) != CONST_INT
)
6384 align
= INTVAL (align_rtx
);
6386 /* Anything to move? */
6387 bytes
= INTVAL (bytes_rtx
);
6391 /* store_one_arg depends on expand_block_move to handle at least the size of
6392 reg_parm_stack_space. */
6393 if (bytes
> (TARGET_POWERPC64
? 64 : 32))
6396 if (TARGET_STRING
) /* string instructions are available */
6398 for (offset
= 0; bytes
> 0; offset
+= move_bytes
, bytes
-= move_bytes
)
6401 rtx (*movstrsi
) PARAMS ((rtx
, rtx
, rtx
, rtx
));
6402 rtx (*mov
) PARAMS ((rtx
, rtx
));
6404 enum machine_mode mode
= BLKmode
;
6407 if (bytes
> 24 /* move up to 32 bytes at a time */
6415 && ! fixed_regs
[12])
6417 move_bytes
= (bytes
> 32) ? 32 : bytes
;
6418 gen_func
.movstrsi
= gen_movstrsi_8reg
;
6420 else if (bytes
> 16 /* move up to 24 bytes at a time */
6426 && ! fixed_regs
[10])
6428 move_bytes
= (bytes
> 24) ? 24 : bytes
;
6429 gen_func
.movstrsi
= gen_movstrsi_6reg
;
6431 else if (bytes
> 8 /* move up to 16 bytes at a time */
6437 move_bytes
= (bytes
> 16) ? 16 : bytes
;
6438 gen_func
.movstrsi
= gen_movstrsi_4reg
;
6440 else if (bytes
>= 8 && TARGET_POWERPC64
6441 /* 64-bit loads and stores require word-aligned
6443 && (align
>= 8 || (! STRICT_ALIGNMENT
&& align
>= 4)))
6447 gen_func
.mov
= gen_movdi
;
6449 else if (bytes
> 4 && !TARGET_POWERPC64
)
6450 { /* move up to 8 bytes at a time */
6451 move_bytes
= (bytes
> 8) ? 8 : bytes
;
6452 gen_func
.movstrsi
= gen_movstrsi_2reg
;
6454 else if (bytes
>= 4 && (align
>= 4 || ! STRICT_ALIGNMENT
))
6455 { /* move 4 bytes */
6458 gen_func
.mov
= gen_movsi
;
6460 else if (bytes
== 2 && (align
>= 2 || ! STRICT_ALIGNMENT
))
6461 { /* move 2 bytes */
6464 gen_func
.mov
= gen_movhi
;
6466 else if (bytes
== 1) /* move 1 byte */
6470 gen_func
.mov
= gen_movqi
;
6473 { /* move up to 4 bytes at a time */
6474 move_bytes
= (bytes
> 4) ? 4 : bytes
;
6475 gen_func
.movstrsi
= gen_movstrsi_1reg
;
6478 src
= adjust_address (orig_src
, mode
, offset
);
6479 dest
= adjust_address (orig_dest
, mode
, offset
);
6481 if (mode
== BLKmode
)
6483 /* Move the address into scratch registers. The movstrsi
6484 patterns require zero offset. */
6485 if (!REG_P (XEXP (src
, 0)))
6487 rtx src_reg
= copy_addr_to_reg (XEXP (src
, 0));
6488 src
= replace_equiv_address (src
, src_reg
);
6490 set_mem_size (src
, GEN_INT (move_bytes
));
6492 if (!REG_P (XEXP (dest
, 0)))
6494 rtx dest_reg
= copy_addr_to_reg (XEXP (dest
, 0));
6495 dest
= replace_equiv_address (dest
, dest_reg
);
6497 set_mem_size (dest
, GEN_INT (move_bytes
));
6499 emit_insn ((*gen_func
.movstrsi
) (dest
, src
,
6500 GEN_INT (move_bytes
& 31),
6505 rtx tmp_reg
= gen_reg_rtx (mode
);
6507 emit_insn ((*gen_func
.mov
) (tmp_reg
, src
));
6508 emit_insn ((*gen_func
.mov
) (dest
, tmp_reg
));
6513 else /* string instructions not available */
6515 rtx stores
[MAX_MOVE_REG
];
6519 for (offset
= 0; bytes
> 0; offset
+= move_bytes
, bytes
-= move_bytes
)
6521 rtx (*gen_mov_func
) PARAMS ((rtx
, rtx
));
6522 enum machine_mode mode
;
6523 rtx src
, dest
, tmp_reg
;
6525 /* Generate the appropriate load and store, saving the stores
6527 if (bytes
>= 8 && TARGET_POWERPC64
6528 /* 64-bit loads and stores require word-aligned
6530 && (align
>= 8 || (! STRICT_ALIGNMENT
&& align
>= 4)))
6534 gen_mov_func
= gen_movdi
;
6536 else if (bytes
>= 4 && (align
>= 4 || ! STRICT_ALIGNMENT
))
6540 gen_mov_func
= gen_movsi
;
6542 else if (bytes
>= 2 && (align
>= 2 || ! STRICT_ALIGNMENT
))
6546 gen_mov_func
= gen_movhi
;
6552 gen_mov_func
= gen_movqi
;
6555 src
= adjust_address (orig_src
, mode
, offset
);
6556 dest
= adjust_address (orig_dest
, mode
, offset
);
6557 tmp_reg
= gen_reg_rtx (mode
);
6559 emit_insn ((*gen_mov_func
) (tmp_reg
, src
));
6560 stores
[num_reg
++] = (*gen_mov_func
) (dest
, tmp_reg
);
6562 if (num_reg
>= MAX_MOVE_REG
)
6564 for (i
= 0; i
< num_reg
; i
++)
6565 emit_insn (stores
[i
]);
6570 for (i
= 0; i
< num_reg
; i
++)
6571 emit_insn (stores
[i
]);
6578 /* Return 1 if OP is a load multiple operation. It is known to be a
6579 PARALLEL and the first section will be tested. */
6582 load_multiple_operation (op
, mode
)
6584 enum machine_mode mode ATTRIBUTE_UNUSED
;
6586 int count
= XVECLEN (op
, 0);
6587 unsigned int dest_regno
;
6591 /* Perform a quick check so we don't blow up below. */
6593 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6594 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6595 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
6598 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6599 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
6601 for (i
= 1; i
< count
; i
++)
6603 rtx elt
= XVECEXP (op
, 0, i
);
6605 if (GET_CODE (elt
) != SET
6606 || GET_CODE (SET_DEST (elt
)) != REG
6607 || GET_MODE (SET_DEST (elt
)) != SImode
6608 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
6609 || GET_CODE (SET_SRC (elt
)) != MEM
6610 || GET_MODE (SET_SRC (elt
)) != SImode
6611 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
6612 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
6613 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
6614 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != i
* 4)
6621 /* Similar, but tests for store multiple. Here, the second vector element
6622 is a CLOBBER. It will be tested later. */
6625 store_multiple_operation (op
, mode
)
6627 enum machine_mode mode ATTRIBUTE_UNUSED
;
6629 int count
= XVECLEN (op
, 0) - 1;
6630 unsigned int src_regno
;
6634 /* Perform a quick check so we don't blow up below. */
6636 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6637 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
6638 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
6641 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6642 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
6644 for (i
= 1; i
< count
; i
++)
6646 rtx elt
= XVECEXP (op
, 0, i
+ 1);
6648 if (GET_CODE (elt
) != SET
6649 || GET_CODE (SET_SRC (elt
)) != REG
6650 || GET_MODE (SET_SRC (elt
)) != SImode
6651 || REGNO (SET_SRC (elt
)) != src_regno
+ i
6652 || GET_CODE (SET_DEST (elt
)) != MEM
6653 || GET_MODE (SET_DEST (elt
)) != SImode
6654 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
6655 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
6656 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
6657 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != i
* 4)
6664 /* Return a string to perform a load_multiple operation.
6665 operands[0] is the vector.
6666 operands[1] is the source address.
6667 operands[2] is the first destination register. */
6670 rs6000_output_load_multiple (operands
)
6673 /* We have to handle the case where the pseudo used to contain the address
6674 is assigned to one of the output registers. */
6676 int words
= XVECLEN (operands
[0], 0);
6679 if (XVECLEN (operands
[0], 0) == 1)
6680 return "{l|lwz} %2,0(%1)";
6682 for (i
= 0; i
< words
; i
++)
6683 if (refers_to_regno_p (REGNO (operands
[2]) + i
,
6684 REGNO (operands
[2]) + i
+ 1, operands
[1], 0))
6688 xop
[0] = GEN_INT (4 * (words
-1));
6689 xop
[1] = operands
[1];
6690 xop
[2] = operands
[2];
6691 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop
);
6696 xop
[0] = GEN_INT (4 * (words
-1));
6697 xop
[1] = operands
[1];
6698 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + 1);
6699 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop
);
6704 for (j
= 0; j
< words
; j
++)
6707 xop
[0] = GEN_INT (j
* 4);
6708 xop
[1] = operands
[1];
6709 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + j
);
6710 output_asm_insn ("{l|lwz} %2,%0(%1)", xop
);
6712 xop
[0] = GEN_INT (i
* 4);
6713 xop
[1] = operands
[1];
6714 output_asm_insn ("{l|lwz} %1,%0(%1)", xop
);
6719 return "{lsi|lswi} %2,%1,%N0";
6722 /* Return 1 for a parallel vrsave operation. */
6725 vrsave_operation (op
, mode
)
6727 enum machine_mode mode ATTRIBUTE_UNUSED
;
6729 int count
= XVECLEN (op
, 0);
6730 unsigned int dest_regno
, src_regno
;
6734 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6735 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6736 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC_VOLATILE
)
6739 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6740 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6742 if (dest_regno
!= VRSAVE_REGNO
6743 && src_regno
!= VRSAVE_REGNO
)
6746 for (i
= 1; i
< count
; i
++)
6748 rtx elt
= XVECEXP (op
, 0, i
);
6750 if (GET_CODE (elt
) != CLOBBER
6751 && GET_CODE (elt
) != SET
)
6758 /* Return 1 for an PARALLEL suitable for mtcrf. */
6761 mtcrf_operation (op
, mode
)
6763 enum machine_mode mode ATTRIBUTE_UNUSED
;
6765 int count
= XVECLEN (op
, 0);
6769 /* Perform a quick check so we don't blow up below. */
6771 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6772 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC
6773 || XVECLEN (SET_SRC (XVECEXP (op
, 0, 0)), 0) != 2)
6775 src_reg
= XVECEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0, 0);
6777 if (GET_CODE (src_reg
) != REG
6778 || GET_MODE (src_reg
) != SImode
6779 || ! INT_REGNO_P (REGNO (src_reg
)))
6782 for (i
= 0; i
< count
; i
++)
6784 rtx exp
= XVECEXP (op
, 0, i
);
6788 if (GET_CODE (exp
) != SET
6789 || GET_CODE (SET_DEST (exp
)) != REG
6790 || GET_MODE (SET_DEST (exp
)) != CCmode
6791 || ! CR_REGNO_P (REGNO (SET_DEST (exp
))))
6793 unspec
= SET_SRC (exp
);
6794 maskval
= 1 << (MAX_CR_REGNO
- REGNO (SET_DEST (exp
)));
6796 if (GET_CODE (unspec
) != UNSPEC
6797 || XINT (unspec
, 1) != UNSPEC_MOVESI_TO_CR
6798 || XVECLEN (unspec
, 0) != 2
6799 || XVECEXP (unspec
, 0, 0) != src_reg
6800 || GET_CODE (XVECEXP (unspec
, 0, 1)) != CONST_INT
6801 || INTVAL (XVECEXP (unspec
, 0, 1)) != maskval
)
6807 /* Return 1 for an PARALLEL suitable for lmw. */
6810 lmw_operation (op
, mode
)
6812 enum machine_mode mode ATTRIBUTE_UNUSED
;
6814 int count
= XVECLEN (op
, 0);
6815 unsigned int dest_regno
;
6817 unsigned int base_regno
;
6818 HOST_WIDE_INT offset
;
6821 /* Perform a quick check so we don't blow up below. */
6823 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6824 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6825 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
6828 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6829 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
6832 || count
!= 32 - (int) dest_regno
)
6835 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr
, 0))
6838 base_regno
= REGNO (src_addr
);
6839 if (base_regno
== 0)
6842 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, src_addr
, 0))
6844 offset
= INTVAL (XEXP (src_addr
, 1));
6845 base_regno
= REGNO (XEXP (src_addr
, 0));
6850 for (i
= 0; i
< count
; i
++)
6852 rtx elt
= XVECEXP (op
, 0, i
);
6855 HOST_WIDE_INT newoffset
;
6857 if (GET_CODE (elt
) != SET
6858 || GET_CODE (SET_DEST (elt
)) != REG
6859 || GET_MODE (SET_DEST (elt
)) != SImode
6860 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
6861 || GET_CODE (SET_SRC (elt
)) != MEM
6862 || GET_MODE (SET_SRC (elt
)) != SImode
)
6864 newaddr
= XEXP (SET_SRC (elt
), 0);
6865 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr
, 0))
6870 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, newaddr
, 0))
6872 addr_reg
= XEXP (newaddr
, 0);
6873 newoffset
= INTVAL (XEXP (newaddr
, 1));
6877 if (REGNO (addr_reg
) != base_regno
6878 || newoffset
!= offset
+ 4 * i
)
6885 /* Return 1 for an PARALLEL suitable for stmw. */
6888 stmw_operation (op
, mode
)
6890 enum machine_mode mode ATTRIBUTE_UNUSED
;
6892 int count
= XVECLEN (op
, 0);
6893 unsigned int src_regno
;
6895 unsigned int base_regno
;
6896 HOST_WIDE_INT offset
;
6899 /* Perform a quick check so we don't blow up below. */
6901 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6902 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
6903 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
6906 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6907 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
6910 || count
!= 32 - (int) src_regno
)
6913 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr
, 0))
6916 base_regno
= REGNO (dest_addr
);
6917 if (base_regno
== 0)
6920 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, dest_addr
, 0))
6922 offset
= INTVAL (XEXP (dest_addr
, 1));
6923 base_regno
= REGNO (XEXP (dest_addr
, 0));
6928 for (i
= 0; i
< count
; i
++)
6930 rtx elt
= XVECEXP (op
, 0, i
);
6933 HOST_WIDE_INT newoffset
;
6935 if (GET_CODE (elt
) != SET
6936 || GET_CODE (SET_SRC (elt
)) != REG
6937 || GET_MODE (SET_SRC (elt
)) != SImode
6938 || REGNO (SET_SRC (elt
)) != src_regno
+ i
6939 || GET_CODE (SET_DEST (elt
)) != MEM
6940 || GET_MODE (SET_DEST (elt
)) != SImode
)
6942 newaddr
= XEXP (SET_DEST (elt
), 0);
6943 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr
, 0))
6948 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, newaddr
, 0))
6950 addr_reg
= XEXP (newaddr
, 0);
6951 newoffset
= INTVAL (XEXP (newaddr
, 1));
6955 if (REGNO (addr_reg
) != base_regno
6956 || newoffset
!= offset
+ 4 * i
)
6963 /* A validation routine: say whether CODE, a condition code, and MODE
6964 match. The other alternatives either don't make sense or should
6965 never be generated. */
6968 validate_condition_mode (code
, mode
)
6970 enum machine_mode mode
;
6972 if (GET_RTX_CLASS (code
) != '<'
6973 || GET_MODE_CLASS (mode
) != MODE_CC
)
6976 /* These don't make sense. */
6977 if ((code
== GT
|| code
== LT
|| code
== GE
|| code
== LE
)
6978 && mode
== CCUNSmode
)
6981 if ((code
== GTU
|| code
== LTU
|| code
== GEU
|| code
== LEU
)
6982 && mode
!= CCUNSmode
)
6985 if (mode
!= CCFPmode
6986 && (code
== ORDERED
|| code
== UNORDERED
6987 || code
== UNEQ
|| code
== LTGT
6988 || code
== UNGT
|| code
== UNLT
6989 || code
== UNGE
|| code
== UNLE
))
6992 /* These should never be generated except for
6993 flag_unsafe_math_optimizations and flag_finite_math_only. */
6994 if (mode
== CCFPmode
6995 && ! flag_unsafe_math_optimizations
6996 && ! flag_finite_math_only
6997 && (code
== LE
|| code
== GE
6998 || code
== UNEQ
|| code
== LTGT
6999 || code
== UNGT
|| code
== UNLT
))
7002 /* These are invalid; the information is not there. */
7003 if (mode
== CCEQmode
7004 && code
!= EQ
&& code
!= NE
)
7008 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
7009 We only check the opcode against the mode of the CC value here. */
7012 branch_comparison_operator (op
, mode
)
7014 enum machine_mode mode ATTRIBUTE_UNUSED
;
7016 enum rtx_code code
= GET_CODE (op
);
7017 enum machine_mode cc_mode
;
7019 if (GET_RTX_CLASS (code
) != '<')
7022 cc_mode
= GET_MODE (XEXP (op
, 0));
7023 if (GET_MODE_CLASS (cc_mode
) != MODE_CC
)
7026 validate_condition_mode (code
, cc_mode
);
7031 /* Return 1 if OP is a comparison operation that is valid for a branch
7032 insn and which is true if the corresponding bit in the CC register
7036 branch_positive_comparison_operator (op
, mode
)
7038 enum machine_mode mode
;
7042 if (! branch_comparison_operator (op
, mode
))
7045 code
= GET_CODE (op
);
7046 return (code
== EQ
|| code
== LT
|| code
== GT
7047 || (TARGET_E500
&& TARGET_HARD_FLOAT
&& !TARGET_FPRS
&& code
== NE
)
7048 || code
== LTU
|| code
== GTU
7049 || code
== UNORDERED
);
7052 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
7053 We check the opcode against the mode of the CC value and disallow EQ or
7054 NE comparisons for integers. */
7057 scc_comparison_operator (op
, mode
)
7059 enum machine_mode mode
;
7061 enum rtx_code code
= GET_CODE (op
);
7062 enum machine_mode cc_mode
;
7064 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
7067 if (GET_RTX_CLASS (code
) != '<')
7070 cc_mode
= GET_MODE (XEXP (op
, 0));
7071 if (GET_MODE_CLASS (cc_mode
) != MODE_CC
)
7074 validate_condition_mode (code
, cc_mode
);
7076 if (code
== NE
&& cc_mode
!= CCFPmode
)
7083 trap_comparison_operator (op
, mode
)
7085 enum machine_mode mode
;
7087 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
7089 return GET_RTX_CLASS (GET_CODE (op
)) == '<';
7093 boolean_operator (op
, mode
)
7095 enum machine_mode mode ATTRIBUTE_UNUSED
;
7097 enum rtx_code code
= GET_CODE (op
);
7098 return (code
== AND
|| code
== IOR
|| code
== XOR
);
7102 boolean_or_operator (op
, mode
)
7104 enum machine_mode mode ATTRIBUTE_UNUSED
;
7106 enum rtx_code code
= GET_CODE (op
);
7107 return (code
== IOR
|| code
== XOR
);
7111 min_max_operator (op
, mode
)
7113 enum machine_mode mode ATTRIBUTE_UNUSED
;
7115 enum rtx_code code
= GET_CODE (op
);
7116 return (code
== SMIN
|| code
== SMAX
|| code
== UMIN
|| code
== UMAX
);
7119 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
7120 mask required to convert the result of a rotate insn into a shift
7121 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
7124 includes_lshift_p (shiftop
, andop
)
7128 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
7130 shift_mask
<<= INTVAL (shiftop
);
7132 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
7135 /* Similar, but for right shift. */
7138 includes_rshift_p (shiftop
, andop
)
7142 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
7144 shift_mask
>>= INTVAL (shiftop
);
7146 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
7149 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
7150 to perform a left shift. It must have exactly SHIFTOP least
7151 significant 0's, then one or more 1's, then zero or more 0's. */
7154 includes_rldic_lshift_p (shiftop
, andop
)
7158 if (GET_CODE (andop
) == CONST_INT
)
7160 HOST_WIDE_INT c
, lsb
, shift_mask
;
7163 if (c
== 0 || c
== ~0)
7167 shift_mask
<<= INTVAL (shiftop
);
7169 /* Find the least significant one bit. */
7172 /* It must coincide with the LSB of the shift mask. */
7173 if (-lsb
!= shift_mask
)
7176 /* Invert to look for the next transition (if any). */
7179 /* Remove the low group of ones (originally low group of zeros). */
7182 /* Again find the lsb, and check we have all 1's above. */
7186 else if (GET_CODE (andop
) == CONST_DOUBLE
7187 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
7189 HOST_WIDE_INT low
, high
, lsb
;
7190 HOST_WIDE_INT shift_mask_low
, shift_mask_high
;
7192 low
= CONST_DOUBLE_LOW (andop
);
7193 if (HOST_BITS_PER_WIDE_INT
< 64)
7194 high
= CONST_DOUBLE_HIGH (andop
);
7196 if ((low
== 0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== 0))
7197 || (low
== ~0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0)))
7200 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
7202 shift_mask_high
= ~0;
7203 if (INTVAL (shiftop
) > 32)
7204 shift_mask_high
<<= INTVAL (shiftop
) - 32;
7208 if (-lsb
!= shift_mask_high
|| INTVAL (shiftop
) < 32)
7215 return high
== -lsb
;
7218 shift_mask_low
= ~0;
7219 shift_mask_low
<<= INTVAL (shiftop
);
7223 if (-lsb
!= shift_mask_low
)
7226 if (HOST_BITS_PER_WIDE_INT
< 64)
7231 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
7234 return high
== -lsb
;
7238 return low
== -lsb
&& (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0);
7244 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7245 to perform a left shift. It must have SHIFTOP or more least
7246 signifigant 0's, with the remainder of the word 1's. */
7249 includes_rldicr_lshift_p (shiftop
, andop
)
7253 if (GET_CODE (andop
) == CONST_INT
)
7255 HOST_WIDE_INT c
, lsb
, shift_mask
;
7258 shift_mask
<<= INTVAL (shiftop
);
7261 /* Find the least signifigant one bit. */
7264 /* It must be covered by the shift mask.
7265 This test also rejects c == 0. */
7266 if ((lsb
& shift_mask
) == 0)
7269 /* Check we have all 1's above the transition, and reject all 1's. */
7270 return c
== -lsb
&& lsb
!= 1;
7272 else if (GET_CODE (andop
) == CONST_DOUBLE
7273 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
7275 HOST_WIDE_INT low
, lsb
, shift_mask_low
;
7277 low
= CONST_DOUBLE_LOW (andop
);
7279 if (HOST_BITS_PER_WIDE_INT
< 64)
7281 HOST_WIDE_INT high
, shift_mask_high
;
7283 high
= CONST_DOUBLE_HIGH (andop
);
7287 shift_mask_high
= ~0;
7288 if (INTVAL (shiftop
) > 32)
7289 shift_mask_high
<<= INTVAL (shiftop
) - 32;
7293 if ((lsb
& shift_mask_high
) == 0)
7296 return high
== -lsb
;
7302 shift_mask_low
= ~0;
7303 shift_mask_low
<<= INTVAL (shiftop
);
7307 if ((lsb
& shift_mask_low
) == 0)
7310 return low
== -lsb
&& lsb
!= 1;
7316 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7317 for lfq and stfq insns.
7319 Note reg1 and reg2 *must* be hard registers. To be sure we will
7320 abort if we are passed pseudo registers. */
7323 registers_ok_for_quad_peep (reg1
, reg2
)
7326 /* We might have been passed a SUBREG. */
7327 if (GET_CODE (reg1
) != REG
|| GET_CODE (reg2
) != REG
)
7330 return (REGNO (reg1
) == REGNO (reg2
) - 1);
7333 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7334 addr1 and addr2 must be in consecutive memory locations
7335 (addr2 == addr1 + 8). */
7338 addrs_ok_for_quad_peep (addr1
, addr2
)
7345 /* Extract an offset (if used) from the first addr. */
7346 if (GET_CODE (addr1
) == PLUS
)
7348 /* If not a REG, return zero. */
7349 if (GET_CODE (XEXP (addr1
, 0)) != REG
)
7353 reg1
= REGNO (XEXP (addr1
, 0));
7354 /* The offset must be constant! */
7355 if (GET_CODE (XEXP (addr1
, 1)) != CONST_INT
)
7357 offset1
= INTVAL (XEXP (addr1
, 1));
7360 else if (GET_CODE (addr1
) != REG
)
7364 reg1
= REGNO (addr1
);
7365 /* This was a simple (mem (reg)) expression. Offset is 0. */
7369 /* Make sure the second address is a (mem (plus (reg) (const_int)))
7370 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
7371 register as addr1. */
7372 if (offset1
== -8 && GET_CODE (addr2
) == REG
&& reg1
== REGNO (addr2
))
7374 if (GET_CODE (addr2
) != PLUS
)
7377 if (GET_CODE (XEXP (addr2
, 0)) != REG
7378 || GET_CODE (XEXP (addr2
, 1)) != CONST_INT
)
7381 if (reg1
!= REGNO (XEXP (addr2
, 0)))
7384 /* The offset for the second addr must be 8 more than the first addr. */
7385 if (INTVAL (XEXP (addr2
, 1)) != offset1
+ 8)
7388 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7393 /* Return the register class of a scratch register needed to copy IN into
7394 or out of a register in CLASS in MODE. If it can be done directly,
7395 NO_REGS is returned. */
7398 secondary_reload_class (class, mode
, in
)
7399 enum reg_class
class;
7400 enum machine_mode mode ATTRIBUTE_UNUSED
;
7405 if (TARGET_ELF
|| (DEFAULT_ABI
== ABI_DARWIN
7407 && MACHOPIC_INDIRECT
7411 /* We cannot copy a symbolic operand directly into anything
7412 other than BASE_REGS for TARGET_ELF. So indicate that a
7413 register from BASE_REGS is needed as an intermediate
7416 On Darwin, pic addresses require a load from memory, which
7417 needs a base register. */
7418 if (class != BASE_REGS
7419 && (GET_CODE (in
) == SYMBOL_REF
7420 || GET_CODE (in
) == HIGH
7421 || GET_CODE (in
) == LABEL_REF
7422 || GET_CODE (in
) == CONST
))
7426 if (GET_CODE (in
) == REG
)
7429 if (regno
>= FIRST_PSEUDO_REGISTER
)
7431 regno
= true_regnum (in
);
7432 if (regno
>= FIRST_PSEUDO_REGISTER
)
7436 else if (GET_CODE (in
) == SUBREG
)
7438 regno
= true_regnum (in
);
7439 if (regno
>= FIRST_PSEUDO_REGISTER
)
7445 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7447 if (class == GENERAL_REGS
|| class == BASE_REGS
7448 || (regno
>= 0 && INT_REGNO_P (regno
)))
7451 /* Constants, memory, and FP registers can go into FP registers. */
7452 if ((regno
== -1 || FP_REGNO_P (regno
))
7453 && (class == FLOAT_REGS
|| class == NON_SPECIAL_REGS
))
7456 /* Memory, and AltiVec registers can go into AltiVec registers. */
7457 if ((regno
== -1 || ALTIVEC_REGNO_P (regno
))
7458 && class == ALTIVEC_REGS
)
7461 /* We can copy among the CR registers. */
7462 if ((class == CR_REGS
|| class == CR0_REGS
)
7463 && regno
>= 0 && CR_REGNO_P (regno
))
7466 /* Otherwise, we need GENERAL_REGS. */
7467 return GENERAL_REGS
;
7470 /* Given a comparison operation, return the bit number in CCR to test. We
7471 know this is a valid comparison.
7473 SCC_P is 1 if this is for an scc. That means that %D will have been
7474 used instead of %C, so the bits will be in different places.
7476 Return -1 if OP isn't a valid comparison for some reason. */
7483 enum rtx_code code
= GET_CODE (op
);
7484 enum machine_mode cc_mode
;
7489 if (GET_RTX_CLASS (code
) != '<')
7494 if (GET_CODE (reg
) != REG
7495 || ! CR_REGNO_P (REGNO (reg
)))
7498 cc_mode
= GET_MODE (reg
);
7499 cc_regnum
= REGNO (reg
);
7500 base_bit
= 4 * (cc_regnum
- CR0_REGNO
);
7502 validate_condition_mode (code
, cc_mode
);
7507 if (TARGET_E500
&& !TARGET_FPRS
7508 && TARGET_HARD_FLOAT
&& cc_mode
== CCFPmode
)
7509 return base_bit
+ 1;
7510 return scc_p
? base_bit
+ 3 : base_bit
+ 2;
7512 if (TARGET_E500
&& !TARGET_FPRS
7513 && TARGET_HARD_FLOAT
&& cc_mode
== CCFPmode
)
7514 return base_bit
+ 1;
7515 return base_bit
+ 2;
7516 case GT
: case GTU
: case UNLE
:
7517 return base_bit
+ 1;
7518 case LT
: case LTU
: case UNGE
:
7520 case ORDERED
: case UNORDERED
:
7521 return base_bit
+ 3;
7524 /* If scc, we will have done a cror to put the bit in the
7525 unordered position. So test that bit. For integer, this is ! LT
7526 unless this is an scc insn. */
7527 return scc_p
? base_bit
+ 3 : base_bit
;
7530 return scc_p
? base_bit
+ 3 : base_bit
+ 1;
7537 /* Return the GOT register. */
7540 rs6000_got_register (value
)
7541 rtx value ATTRIBUTE_UNUSED
;
7543 /* The second flow pass currently (June 1999) can't update
7544 regs_ever_live without disturbing other parts of the compiler, so
7545 update it here to make the prolog/epilogue code happy. */
7546 if (no_new_pseudos
&& ! regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
])
7547 regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
7549 current_function_uses_pic_offset_table
= 1;
7551 return pic_offset_table_rtx
;
7554 /* Function to init struct machine_function.
7555 This will be called, via a pointer variable,
7556 from push_function_context. */
7558 static struct machine_function
*
7559 rs6000_init_machine_status ()
7561 return ggc_alloc_cleared (sizeof (machine_function
));
7564 /* These macros test for integers and extract the low-order bits. */
7566 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7567 && GET_MODE (X) == VOIDmode)
7569 #define INT_LOWPART(X) \
7570 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7577 unsigned long val
= INT_LOWPART (op
);
7579 /* If the high bit is zero, the value is the first 1 bit we find
7581 if ((val
& 0x80000000) == 0)
7583 if ((val
& 0xffffffff) == 0)
7587 while (((val
<<= 1) & 0x80000000) == 0)
7592 /* If the high bit is set and the low bit is not, or the mask is all
7593 1's, the value is zero. */
7594 if ((val
& 1) == 0 || (val
& 0xffffffff) == 0xffffffff)
7597 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7600 while (((val
>>= 1) & 1) != 0)
7611 unsigned long val
= INT_LOWPART (op
);
7613 /* If the low bit is zero, the value is the first 1 bit we find from
7617 if ((val
& 0xffffffff) == 0)
7621 while (((val
>>= 1) & 1) == 0)
7627 /* If the low bit is set and the high bit is not, or the mask is all
7628 1's, the value is 31. */
7629 if ((val
& 0x80000000) == 0 || (val
& 0xffffffff) == 0xffffffff)
7632 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7635 while (((val
<<= 1) & 0x80000000) != 0)
7641 /* Print an operand. Recognize special options, documented below. */
7644 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7645 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7647 #define SMALL_DATA_RELOC "sda21"
7648 #define SMALL_DATA_REG 0
7652 print_operand (file
, x
, code
)
7659 unsigned HOST_WIDE_INT uval
;
7664 /* Write out an instruction after the call which may be replaced
7665 with glue code by the loader. This depends on the AIX version. */
7666 asm_fprintf (file
, RS6000_CALL_GLUE
);
7669 /* %a is output_address. */
7672 /* If X is a constant integer whose low-order 5 bits are zero,
7673 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
7674 in the AIX assembler where "sri" with a zero shift count
7675 writes a trash instruction. */
7676 if (GET_CODE (x
) == CONST_INT
&& (INTVAL (x
) & 31) == 0)
7683 /* If constant, low-order 16 bits of constant, unsigned.
7684 Otherwise, write normally. */
7686 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 0xffff);
7688 print_operand (file
, x
, 0);
7692 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7693 for 64-bit mask direction. */
7694 putc (((INT_LOWPART(x
) & 1) == 0 ? 'r' : 'l'), file
);
7697 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7701 /* There used to be a comment for 'C' reading "This is an
7702 optional cror needed for certain floating-point
7703 comparisons. Otherwise write nothing." */
7705 /* Similar, except that this is for an scc, so we must be able to
7706 encode the test in a single bit that is one. We do the above
7707 for any LE, GE, GEU, or LEU and invert the bit for NE. */
7708 if (GET_CODE (x
) == LE
|| GET_CODE (x
) == GE
7709 || GET_CODE (x
) == LEU
|| GET_CODE (x
) == GEU
)
7711 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7713 fprintf (file
, "cror %d,%d,%d\n\t", base_bit
+ 3,
7715 base_bit
+ (GET_CODE (x
) == GE
|| GET_CODE (x
) == GEU
));
7718 else if (GET_CODE (x
) == NE
)
7720 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7722 fprintf (file
, "crnor %d,%d,%d\n\t", base_bit
+ 3,
7723 base_bit
+ 2, base_bit
+ 2);
7725 else if (TARGET_E500
&& !TARGET_FPRS
&& TARGET_HARD_FLOAT
7726 && GET_CODE (x
) == EQ
7727 && GET_MODE (XEXP (x
, 0)) == CCFPmode
)
7729 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7731 fprintf (file
, "crnor %d,%d,%d\n\t", base_bit
+ 1,
7732 base_bit
+ 1, base_bit
+ 1);
7737 /* X is a CR register. Print the number of the EQ bit of the CR */
7738 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7739 output_operand_lossage ("invalid %%E value");
7741 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
) + 2);
7745 /* X is a CR register. Print the shift count needed to move it
7746 to the high-order four bits. */
7747 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7748 output_operand_lossage ("invalid %%f value");
7750 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
));
7754 /* Similar, but print the count for the rotate in the opposite
7756 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7757 output_operand_lossage ("invalid %%F value");
7759 fprintf (file
, "%d", 32 - 4 * (REGNO (x
) - CR0_REGNO
));
7763 /* X is a constant integer. If it is negative, print "m",
7764 otherwise print "z". This is to make an aze or ame insn. */
7765 if (GET_CODE (x
) != CONST_INT
)
7766 output_operand_lossage ("invalid %%G value");
7767 else if (INTVAL (x
) >= 0)
7774 /* If constant, output low-order five bits. Otherwise, write
7777 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 31);
7779 print_operand (file
, x
, 0);
7783 /* If constant, output low-order six bits. Otherwise, write
7786 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 63);
7788 print_operand (file
, x
, 0);
7792 /* Print `i' if this is a constant, else nothing. */
7798 /* Write the bit number in CCR for jump. */
7801 output_operand_lossage ("invalid %%j code");
7803 fprintf (file
, "%d", i
);
7807 /* Similar, but add one for shift count in rlinm for scc and pass
7808 scc flag to `ccr_bit'. */
7811 output_operand_lossage ("invalid %%J code");
7813 /* If we want bit 31, write a shift count of zero, not 32. */
7814 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
7818 /* X must be a constant. Write the 1's complement of the
7821 output_operand_lossage ("invalid %%k value");
7823 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ~ INT_LOWPART (x
));
7827 /* X must be a symbolic constant on ELF. Write an
7828 expression suitable for an 'addi' that adds in the low 16
7830 if (GET_CODE (x
) != CONST
)
7832 print_operand_address (file
, x
);
7837 if (GET_CODE (XEXP (x
, 0)) != PLUS
7838 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) != SYMBOL_REF
7839 && GET_CODE (XEXP (XEXP (x
, 0), 0)) != LABEL_REF
)
7840 || GET_CODE (XEXP (XEXP (x
, 0), 1)) != CONST_INT
)
7841 output_operand_lossage ("invalid %%K value");
7842 print_operand_address (file
, XEXP (XEXP (x
, 0), 0));
7844 /* For GNU as, there must be a non-alphanumeric character
7845 between 'l' and the number. The '-' is added by
7846 print_operand() already. */
7847 if (INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0)
7849 print_operand (file
, XEXP (XEXP (x
, 0), 1), 0);
7853 /* %l is output_asm_label. */
7856 /* Write second word of DImode or DFmode reference. Works on register
7857 or non-indexed memory only. */
7858 if (GET_CODE (x
) == REG
)
7859 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
7860 else if (GET_CODE (x
) == MEM
)
7862 /* Handle possible auto-increment. Since it is pre-increment and
7863 we have already done it, we can just use an offset of word. */
7864 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
7865 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
7866 output_address (plus_constant (XEXP (XEXP (x
, 0), 0),
7869 output_address (XEXP (adjust_address_nv (x
, SImode
,
7873 if (small_data_operand (x
, GET_MODE (x
)))
7874 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
7875 reg_names
[SMALL_DATA_REG
]);
7880 /* MB value for a mask operand. */
7881 if (! mask_operand (x
, SImode
))
7882 output_operand_lossage ("invalid %%m value");
7884 fprintf (file
, "%d", extract_MB (x
));
7888 /* ME value for a mask operand. */
7889 if (! mask_operand (x
, SImode
))
7890 output_operand_lossage ("invalid %%M value");
7892 fprintf (file
, "%d", extract_ME (x
));
7895 /* %n outputs the negative of its operand. */
7898 /* Write the number of elements in the vector times 4. */
7899 if (GET_CODE (x
) != PARALLEL
)
7900 output_operand_lossage ("invalid %%N value");
7902 fprintf (file
, "%d", XVECLEN (x
, 0) * 4);
7906 /* Similar, but subtract 1 first. */
7907 if (GET_CODE (x
) != PARALLEL
)
7908 output_operand_lossage ("invalid %%O value");
7910 fprintf (file
, "%d", (XVECLEN (x
, 0) - 1) * 4);
7914 /* X is a CONST_INT that is a power of two. Output the logarithm. */
7916 || INT_LOWPART (x
) < 0
7917 || (i
= exact_log2 (INT_LOWPART (x
))) < 0)
7918 output_operand_lossage ("invalid %%p value");
7920 fprintf (file
, "%d", i
);
7924 /* The operand must be an indirect memory reference. The result
7925 is the register number. */
7926 if (GET_CODE (x
) != MEM
|| GET_CODE (XEXP (x
, 0)) != REG
7927 || REGNO (XEXP (x
, 0)) >= 32)
7928 output_operand_lossage ("invalid %%P value");
7930 fprintf (file
, "%d", REGNO (XEXP (x
, 0)));
7934 /* This outputs the logical code corresponding to a boolean
7935 expression. The expression may have one or both operands
7936 negated (if one, only the first one). For condition register
7937 logical operations, it will also treat the negated
7938 CR codes as NOTs, but not handle NOTs of them. */
7940 const char *const *t
= 0;
7942 enum rtx_code code
= GET_CODE (x
);
7943 static const char * const tbl
[3][3] = {
7944 { "and", "andc", "nor" },
7945 { "or", "orc", "nand" },
7946 { "xor", "eqv", "xor" } };
7950 else if (code
== IOR
)
7952 else if (code
== XOR
)
7955 output_operand_lossage ("invalid %%q value");
7957 if (GET_CODE (XEXP (x
, 0)) != NOT
)
7961 if (GET_CODE (XEXP (x
, 1)) == NOT
)
7972 /* X is a CR register. Print the mask for `mtcrf'. */
7973 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7974 output_operand_lossage ("invalid %%R value");
7976 fprintf (file
, "%d", 128 >> (REGNO (x
) - CR0_REGNO
));
7980 /* Low 5 bits of 32 - value */
7982 output_operand_lossage ("invalid %%s value");
7984 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, (32 - INT_LOWPART (x
)) & 31);
7988 /* PowerPC64 mask position. All 0's is excluded.
7989 CONST_INT 32-bit mask is considered sign-extended so any
7990 transition must occur within the CONST_INT, not on the boundary. */
7991 if (! mask64_operand (x
, DImode
))
7992 output_operand_lossage ("invalid %%S value");
7994 uval
= INT_LOWPART (x
);
7996 if (uval
& 1) /* Clear Left */
7998 #if HOST_BITS_PER_WIDE_INT > 64
7999 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
8003 else /* Clear Right */
8006 #if HOST_BITS_PER_WIDE_INT > 64
8007 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
8015 fprintf (file
, "%d", i
);
8019 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
8020 if (GET_CODE (x
) != REG
|| GET_MODE (x
) != CCmode
)
8023 /* Bit 3 is OV bit. */
8024 i
= 4 * (REGNO (x
) - CR0_REGNO
) + 3;
8026 /* If we want bit 31, write a shift count of zero, not 32. */
8027 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
8031 /* Print the symbolic name of a branch target register. */
8032 if (GET_CODE (x
) != REG
|| (REGNO (x
) != LINK_REGISTER_REGNUM
8033 && REGNO (x
) != COUNT_REGISTER_REGNUM
))
8034 output_operand_lossage ("invalid %%T value");
8035 else if (REGNO (x
) == LINK_REGISTER_REGNUM
)
8036 fputs (TARGET_NEW_MNEMONICS
? "lr" : "r", file
);
8038 fputs ("ctr", file
);
8042 /* High-order 16 bits of constant for use in unsigned operand. */
8044 output_operand_lossage ("invalid %%u value");
8046 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
8047 (INT_LOWPART (x
) >> 16) & 0xffff);
8051 /* High-order 16 bits of constant for use in signed operand. */
8053 output_operand_lossage ("invalid %%v value");
8055 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
8056 (INT_LOWPART (x
) >> 16) & 0xffff);
8060 /* Print `u' if this has an auto-increment or auto-decrement. */
8061 if (GET_CODE (x
) == MEM
8062 && (GET_CODE (XEXP (x
, 0)) == PRE_INC
8063 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
))
8068 /* Print the trap code for this operand. */
8069 switch (GET_CODE (x
))
8072 fputs ("eq", file
); /* 4 */
8075 fputs ("ne", file
); /* 24 */
8078 fputs ("lt", file
); /* 16 */
8081 fputs ("le", file
); /* 20 */
8084 fputs ("gt", file
); /* 8 */
8087 fputs ("ge", file
); /* 12 */
8090 fputs ("llt", file
); /* 2 */
8093 fputs ("lle", file
); /* 6 */
8096 fputs ("lgt", file
); /* 1 */
8099 fputs ("lge", file
); /* 5 */
8107 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
8110 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
8111 ((INT_LOWPART (x
) & 0xffff) ^ 0x8000) - 0x8000);
8113 print_operand (file
, x
, 0);
8117 /* MB value for a PowerPC64 rldic operand. */
8118 val
= (GET_CODE (x
) == CONST_INT
8119 ? INTVAL (x
) : CONST_DOUBLE_HIGH (x
));
8124 for (i
= 0; i
< HOST_BITS_PER_WIDE_INT
; i
++)
8125 if ((val
<<= 1) < 0)
8128 #if HOST_BITS_PER_WIDE_INT == 32
8129 if (GET_CODE (x
) == CONST_INT
&& i
>= 0)
8130 i
+= 32; /* zero-extend high-part was all 0's */
8131 else if (GET_CODE (x
) == CONST_DOUBLE
&& i
== 32)
8133 val
= CONST_DOUBLE_LOW (x
);
8140 for ( ; i
< 64; i
++)
8141 if ((val
<<= 1) < 0)
8146 fprintf (file
, "%d", i
+ 1);
8150 if (GET_CODE (x
) == MEM
8151 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x
, 0), 0))
8156 /* Like 'L', for third word of TImode */
8157 if (GET_CODE (x
) == REG
)
8158 fprintf (file
, "%s", reg_names
[REGNO (x
) + 2]);
8159 else if (GET_CODE (x
) == MEM
)
8161 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
8162 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
8163 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 8));
8165 output_address (XEXP (adjust_address_nv (x
, SImode
, 8), 0));
8166 if (small_data_operand (x
, GET_MODE (x
)))
8167 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
8168 reg_names
[SMALL_DATA_REG
]);
8173 /* X is a SYMBOL_REF. Write out the name preceded by a
8174 period and without any trailing data in brackets. Used for function
8175 names. If we are configured for System V (or the embedded ABI) on
8176 the PowerPC, do not emit the period, since those systems do not use
8177 TOCs and the like. */
8178 if (GET_CODE (x
) != SYMBOL_REF
)
8181 if (XSTR (x
, 0)[0] != '.')
8183 switch (DEFAULT_ABI
)
8198 RS6000_OUTPUT_BASENAME (file
, XSTR (x
, 0));
8200 assemble_name (file
, XSTR (x
, 0));
8205 /* Like 'L', for last word of TImode. */
8206 if (GET_CODE (x
) == REG
)
8207 fprintf (file
, "%s", reg_names
[REGNO (x
) + 3]);
8208 else if (GET_CODE (x
) == MEM
)
8210 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
8211 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
8212 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 12));
8214 output_address (XEXP (adjust_address_nv (x
, SImode
, 12), 0));
8215 if (small_data_operand (x
, GET_MODE (x
)))
8216 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
8217 reg_names
[SMALL_DATA_REG
]);
8221 /* Print AltiVec or SPE memory operand. */
8226 if (GET_CODE (x
) != MEM
)
8234 if (GET_CODE (tmp
) == REG
)
8236 fprintf (file
, "0(%s)", reg_names
[REGNO (tmp
)]);
8239 /* Handle [reg+UIMM]. */
8240 else if (GET_CODE (tmp
) == PLUS
&&
8241 GET_CODE (XEXP (tmp
, 1)) == CONST_INT
)
8245 if (GET_CODE (XEXP (tmp
, 0)) != REG
)
8248 x
= INTVAL (XEXP (tmp
, 1));
8249 fprintf (file
, "%d(%s)", x
, reg_names
[REGNO (XEXP (tmp
, 0))]);
8253 /* Fall through. Must be [reg+reg]. */
8255 if (GET_CODE (tmp
) == REG
)
8256 fprintf (file
, "0,%s", reg_names
[REGNO (tmp
)]);
8257 else if (GET_CODE (tmp
) == PLUS
&& GET_CODE (XEXP (tmp
, 1)) == REG
)
8259 if (REGNO (XEXP (tmp
, 0)) == 0)
8260 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 1)) ],
8261 reg_names
[ REGNO (XEXP (tmp
, 0)) ]);
8263 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 0)) ],
8264 reg_names
[ REGNO (XEXP (tmp
, 1)) ]);
8272 if (GET_CODE (x
) == REG
)
8273 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
8274 else if (GET_CODE (x
) == MEM
)
8276 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8277 know the width from the mode. */
8278 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
)
8279 fprintf (file
, "%d(%s)", GET_MODE_SIZE (GET_MODE (x
)),
8280 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
8281 else if (GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
8282 fprintf (file
, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x
)),
8283 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
8285 output_address (XEXP (x
, 0));
8288 output_addr_const (file
, x
);
8292 output_operand_lossage ("invalid %%xn code");
8296 /* Print the address of an operand. */
8299 print_operand_address (file
, x
)
8303 if (GET_CODE (x
) == REG
)
8304 fprintf (file
, "0(%s)", reg_names
[ REGNO (x
) ]);
8305 else if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
8306 || GET_CODE (x
) == LABEL_REF
)
8308 output_addr_const (file
, x
);
8309 if (small_data_operand (x
, GET_MODE (x
)))
8310 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
8311 reg_names
[SMALL_DATA_REG
]);
8312 else if (TARGET_TOC
)
8315 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == REG
)
8317 if (REGNO (XEXP (x
, 0)) == 0)
8318 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 1)) ],
8319 reg_names
[ REGNO (XEXP (x
, 0)) ]);
8321 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 0)) ],
8322 reg_names
[ REGNO (XEXP (x
, 1)) ]);
8324 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
8326 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (XEXP (x
, 1)));
8327 fprintf (file
, "(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8330 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
8331 && CONSTANT_P (XEXP (x
, 1)))
8333 output_addr_const (file
, XEXP (x
, 1));
8334 fprintf (file
, "@l(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8338 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
8339 && CONSTANT_P (XEXP (x
, 1)))
8341 fprintf (file
, "lo16(");
8342 output_addr_const (file
, XEXP (x
, 1));
8343 fprintf (file
, ")(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8346 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x
))
8348 if (TARGET_AIX
&& (!TARGET_ELF
|| !TARGET_MINIMAL_TOC
))
8350 rtx contains_minus
= XEXP (x
, 1);
8354 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8355 turn it into (sym) for output_addr_const. */
8356 while (GET_CODE (XEXP (contains_minus
, 0)) != MINUS
)
8357 contains_minus
= XEXP (contains_minus
, 0);
8359 minus
= XEXP (contains_minus
, 0);
8360 symref
= XEXP (minus
, 0);
8361 XEXP (contains_minus
, 0) = symref
;
8366 name
= XSTR (symref
, 0);
8367 newname
= alloca (strlen (name
) + sizeof ("@toc"));
8368 strcpy (newname
, name
);
8369 strcat (newname
, "@toc");
8370 XSTR (symref
, 0) = newname
;
8372 output_addr_const (file
, XEXP (x
, 1));
8374 XSTR (symref
, 0) = name
;
8375 XEXP (contains_minus
, 0) = minus
;
8378 output_addr_const (file
, XEXP (x
, 1));
8380 fprintf (file
, "(%s)", reg_names
[REGNO (XEXP (x
, 0))]);
8386 /* Target hook for assembling integer objects. The PowerPC version has
8387 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8388 is defined. It also needs to handle DI-mode objects on 64-bit
8392 rs6000_assemble_integer (x
, size
, aligned_p
)
8397 #ifdef RELOCATABLE_NEEDS_FIXUP
8398 /* Special handling for SI values. */
8399 if (size
== 4 && aligned_p
)
8401 extern int in_toc_section
PARAMS ((void));
8402 static int recurse
= 0;
8404 /* For -mrelocatable, we mark all addresses that need to be fixed up
8405 in the .fixup section. */
8406 if (TARGET_RELOCATABLE
8407 && !in_toc_section ()
8408 && !in_text_section ()
8410 && GET_CODE (x
) != CONST_INT
8411 && GET_CODE (x
) != CONST_DOUBLE
8417 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCP", fixuplabelno
);
8419 ASM_OUTPUT_LABEL (asm_out_file
, buf
);
8420 fprintf (asm_out_file
, "\t.long\t(");
8421 output_addr_const (asm_out_file
, x
);
8422 fprintf (asm_out_file
, ")@fixup\n");
8423 fprintf (asm_out_file
, "\t.section\t\".fixup\",\"aw\"\n");
8424 ASM_OUTPUT_ALIGN (asm_out_file
, 2);
8425 fprintf (asm_out_file
, "\t.long\t");
8426 assemble_name (asm_out_file
, buf
);
8427 fprintf (asm_out_file
, "\n\t.previous\n");
8431 /* Remove initial .'s to turn a -mcall-aixdesc function
8432 address into the address of the descriptor, not the function
8434 else if (GET_CODE (x
) == SYMBOL_REF
8435 && XSTR (x
, 0)[0] == '.'
8436 && DEFAULT_ABI
== ABI_AIX
)
8438 const char *name
= XSTR (x
, 0);
8439 while (*name
== '.')
8442 fprintf (asm_out_file
, "\t.long\t%s\n", name
);
8446 #endif /* RELOCATABLE_NEEDS_FIXUP */
8447 return default_assemble_integer (x
, size
, aligned_p
);
8450 #ifdef HAVE_GAS_HIDDEN
8451 /* Emit an assembler directive to set symbol visibility for DECL to
8455 rs6000_assemble_visibility (decl
, vis
)
8459 /* Functions need to have their entry point symbol visibility set as
8460 well as their descriptor symbol visibility. */
8461 if (DEFAULT_ABI
== ABI_AIX
&& TREE_CODE (decl
) == FUNCTION_DECL
)
8463 static const char * const visibility_types
[] = {
8464 NULL
, "internal", "hidden", "protected"
8467 const char *name
, *type
;
8469 name
= ((* targetm
.strip_name_encoding
)
8470 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
))));
8471 type
= visibility_types
[vis
];
8473 fprintf (asm_out_file
, "\t.%s\t%s\n", type
, name
);
8474 fprintf (asm_out_file
, "\t.%s\t.%s\n", type
, name
);
8477 default_assemble_visibility (decl
, vis
);
8482 rs6000_reverse_condition (mode
, code
)
8483 enum machine_mode mode
;
8486 /* Reversal of FP compares takes care -- an ordered compare
8487 becomes an unordered compare and vice versa. */
8488 if (mode
== CCFPmode
&& !flag_unsafe_math_optimizations
)
8489 return reverse_condition_maybe_unordered (code
);
8491 return reverse_condition (code
);
8494 /* Generate a compare for CODE. Return a brand-new rtx that
8495 represents the result of the compare. */
8498 rs6000_generate_compare (code
)
8501 enum machine_mode comp_mode
;
8504 if (rs6000_compare_fp_p
)
8505 comp_mode
= CCFPmode
;
8506 else if (code
== GTU
|| code
== LTU
8507 || code
== GEU
|| code
== LEU
)
8508 comp_mode
= CCUNSmode
;
8512 /* First, the compare. */
8513 compare_result
= gen_reg_rtx (comp_mode
);
8515 /* SPE FP compare instructions on the GPRs. Yuck! */
8516 if ((TARGET_E500
&& !TARGET_FPRS
&& TARGET_HARD_FLOAT
)
8517 && rs6000_compare_fp_p
)
8519 rtx cmp
, or1
, or2
, or_result
, compare_result2
;
8527 cmp
= flag_unsafe_math_optimizations
8528 ? gen_tstsfeq_gpr (compare_result
, rs6000_compare_op0
,
8530 : gen_cmpsfeq_gpr (compare_result
, rs6000_compare_op0
,
8531 rs6000_compare_op1
);
8539 cmp
= flag_unsafe_math_optimizations
8540 ? gen_tstsfgt_gpr (compare_result
, rs6000_compare_op0
,
8542 : gen_cmpsfgt_gpr (compare_result
, rs6000_compare_op0
,
8543 rs6000_compare_op1
);
8551 cmp
= flag_unsafe_math_optimizations
8552 ? gen_tstsflt_gpr (compare_result
, rs6000_compare_op0
,
8554 : gen_cmpsflt_gpr (compare_result
, rs6000_compare_op0
,
8555 rs6000_compare_op1
);
8561 /* Synthesize LE and GE from LT/GT || EQ. */
8562 if (code
== LE
|| code
== GE
|| code
== LEU
|| code
== GEU
)
8564 /* Synthesize GE/LE frome GT/LT || EQ. */
8570 case LE
: code
= LT
; break;
8571 case GE
: code
= GT
; break;
8572 case LEU
: code
= LT
; break;
8573 case GEU
: code
= GT
; break;
8577 or1
= gen_reg_rtx (SImode
);
8578 or2
= gen_reg_rtx (SImode
);
8579 or_result
= gen_reg_rtx (CCEQmode
);
8580 compare_result2
= gen_reg_rtx (CCFPmode
);
8583 cmp
= flag_unsafe_math_optimizations
8584 ? gen_tstsfeq_gpr (compare_result2
, rs6000_compare_op0
,
8586 : gen_cmpsfeq_gpr (compare_result2
, rs6000_compare_op0
,
8587 rs6000_compare_op1
);
8590 /* The MC8540 FP compare instructions set the CR bits
8591 differently than other PPC compare instructions. For
8592 that matter, there is no generic test instruction, but a
8593 testgt, testlt, and testeq. For a true condition, bit 2
8594 is set (x1xx) in the CR. Following the traditional CR
8600 ... bit 2 would be a GT CR alias, so later on we
8601 look in the GT bits for the branch instructions.
8602 However, we must be careful to emit correct RTL in
8603 the meantime, so optimizations don't get confused. */
8605 or1
= gen_rtx (NE
, SImode
, compare_result
, const0_rtx
);
8606 or2
= gen_rtx (NE
, SImode
, compare_result2
, const0_rtx
);
8608 /* OR them together. */
8609 cmp
= gen_rtx_SET (VOIDmode
, or_result
,
8610 gen_rtx_COMPARE (CCEQmode
,
8611 gen_rtx_IOR (SImode
, or1
, or2
),
8613 compare_result
= or_result
;
8618 /* We only care about 1 bit (x1xx), so map everything to NE to
8619 maintain rtl sanity. We'll get to the right bit (x1xx) at
8620 code output time. */
8621 if (code
== NE
|| code
== LTGT
)
8622 /* Do the inverse here because we have no cmpne
8623 instruction. We use the cmpeq instruction and expect
8624 to get a 0 instead. */
8633 emit_insn (gen_rtx_SET (VOIDmode
, compare_result
,
8634 gen_rtx_COMPARE (comp_mode
,
8636 rs6000_compare_op1
)));
8638 /* Some kinds of FP comparisons need an OR operation;
8639 except for flag_unsafe_math_optimizations we don't bother. */
8640 if (rs6000_compare_fp_p
8641 && ! flag_unsafe_math_optimizations
8642 && ! (TARGET_HARD_FLOAT
&& TARGET_E500
&& !TARGET_FPRS
)
8643 && (code
== LE
|| code
== GE
8644 || code
== UNEQ
|| code
== LTGT
8645 || code
== UNGT
|| code
== UNLT
))
8647 enum rtx_code or1
, or2
;
8648 rtx or1_rtx
, or2_rtx
, compare2_rtx
;
8649 rtx or_result
= gen_reg_rtx (CCEQmode
);
8653 case LE
: or1
= LT
; or2
= EQ
; break;
8654 case GE
: or1
= GT
; or2
= EQ
; break;
8655 case UNEQ
: or1
= UNORDERED
; or2
= EQ
; break;
8656 case LTGT
: or1
= LT
; or2
= GT
; break;
8657 case UNGT
: or1
= UNORDERED
; or2
= GT
; break;
8658 case UNLT
: or1
= UNORDERED
; or2
= LT
; break;
8661 validate_condition_mode (or1
, comp_mode
);
8662 validate_condition_mode (or2
, comp_mode
);
8663 or1_rtx
= gen_rtx (or1
, SImode
, compare_result
, const0_rtx
);
8664 or2_rtx
= gen_rtx (or2
, SImode
, compare_result
, const0_rtx
);
8665 compare2_rtx
= gen_rtx_COMPARE (CCEQmode
,
8666 gen_rtx_IOR (SImode
, or1_rtx
, or2_rtx
),
8668 emit_insn (gen_rtx_SET (VOIDmode
, or_result
, compare2_rtx
));
8670 compare_result
= or_result
;
8674 validate_condition_mode (code
, GET_MODE (compare_result
));
8676 return gen_rtx (code
, VOIDmode
, compare_result
, const0_rtx
);
8680 /* Emit the RTL for an sCOND pattern. */
8683 rs6000_emit_sCOND (code
, result
)
8688 enum machine_mode op_mode
;
8690 condition_rtx
= rs6000_generate_compare (code
);
8692 op_mode
= GET_MODE (rs6000_compare_op0
);
8693 if (op_mode
== VOIDmode
)
8694 op_mode
= GET_MODE (rs6000_compare_op1
);
8696 if (TARGET_POWERPC64
&& (op_mode
== DImode
|| rs6000_compare_fp_p
))
8698 PUT_MODE (condition_rtx
, DImode
);
8699 convert_move (result
, condition_rtx
, 0);
8703 PUT_MODE (condition_rtx
, SImode
);
8704 emit_insn (gen_rtx_SET (VOIDmode
, result
, condition_rtx
));
8708 /* Emit a branch of kind CODE to location LOC. */
8711 rs6000_emit_cbranch (code
, loc
)
8715 rtx condition_rtx
, loc_ref
;
8717 condition_rtx
= rs6000_generate_compare (code
);
8718 loc_ref
= gen_rtx_LABEL_REF (VOIDmode
, loc
);
8719 emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
,
8720 gen_rtx_IF_THEN_ELSE (VOIDmode
, condition_rtx
,
8724 /* Return the string to output a conditional branch to LABEL, which is
8725 the operand number of the label, or -1 if the branch is really a
8728 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
8729 condition code register and its mode specifies what kind of
8732 REVERSED is nonzero if we should reverse the sense of the comparison.
8734 INSN is the insn. */
8737 output_cbranch (op
, label
, reversed
, insn
)
8743 static char string
[64];
8744 enum rtx_code code
= GET_CODE (op
);
8745 rtx cc_reg
= XEXP (op
, 0);
8746 enum machine_mode mode
= GET_MODE (cc_reg
);
8747 int cc_regno
= REGNO (cc_reg
) - CR0_REGNO
;
8748 int need_longbranch
= label
!= NULL
&& get_attr_length (insn
) == 8;
8749 int really_reversed
= reversed
^ need_longbranch
;
8755 validate_condition_mode (code
, mode
);
8757 /* Work out which way this really branches. We could use
8758 reverse_condition_maybe_unordered here always but this
8759 makes the resulting assembler clearer. */
8760 if (really_reversed
)
8762 /* Reversal of FP compares takes care -- an ordered compare
8763 becomes an unordered compare and vice versa. */
8764 if (mode
== CCFPmode
)
8765 code
= reverse_condition_maybe_unordered (code
);
8767 code
= reverse_condition (code
);
8770 if ((TARGET_E500
&& !TARGET_FPRS
&& TARGET_HARD_FLOAT
) && mode
== CCFPmode
)
8772 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8775 /* Opposite of GT. */
8777 else if (code
== NE
)
8785 /* Not all of these are actually distinct opcodes, but
8786 we distinguish them for clarity of the resulting assembler. */
8788 ccode
= "ne"; break;
8790 ccode
= "eq"; break;
8792 ccode
= "ge"; break;
8793 case GT
: case GTU
: case UNGT
:
8794 ccode
= "gt"; break;
8796 ccode
= "le"; break;
8797 case LT
: case LTU
: case UNLT
:
8798 ccode
= "lt"; break;
8799 case UNORDERED
: ccode
= "un"; break;
8800 case ORDERED
: ccode
= "nu"; break;
8801 case UNGE
: ccode
= "nl"; break;
8802 case UNLE
: ccode
= "ng"; break;
8807 /* Maybe we have a guess as to how likely the branch is.
8808 The old mnemonics don't have a way to specify this information. */
8810 note
= find_reg_note (insn
, REG_BR_PROB
, NULL_RTX
);
8811 if (note
!= NULL_RTX
)
8813 /* PROB is the difference from 50%. */
8814 int prob
= INTVAL (XEXP (note
, 0)) - REG_BR_PROB_BASE
/ 2;
8815 bool always_hint
= rs6000_cpu
!= PROCESSOR_POWER4
;
8817 /* Only hint for highly probable/improbable branches on newer
8818 cpus as static prediction overrides processor dynamic
8819 prediction. For older cpus we may as well always hint, but
8820 assume not taken for branches that are very close to 50% as a
8821 mispredicted taken branch is more expensive than a
8822 mispredicted not-taken branch. */
8824 || abs (prob
) > REG_BR_PROB_BASE
/ 100 * 48)
8826 if (abs (prob
) > REG_BR_PROB_BASE
/ 20
8827 && ((prob
> 0) ^ need_longbranch
))
8835 s
+= sprintf (s
, "{b%sr|b%slr%s} ", ccode
, ccode
, pred
);
8837 s
+= sprintf (s
, "{b%s|b%s%s} ", ccode
, ccode
, pred
);
8839 /* We need to escape any '%' characters in the reg_names string.
8840 Assume they'd only be the first character... */
8841 if (reg_names
[cc_regno
+ CR0_REGNO
][0] == '%')
8843 s
+= sprintf (s
, "%s", reg_names
[cc_regno
+ CR0_REGNO
]);
8847 /* If the branch distance was too far, we may have to use an
8848 unconditional branch to go the distance. */
8849 if (need_longbranch
)
8850 s
+= sprintf (s
, ",$+8\n\tb %s", label
);
8852 s
+= sprintf (s
, ",%s", label
);
8858 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
8859 operands of the last comparison is nonzero/true, FALSE_COND if it
8860 is zero/false. Return 0 if the hardware has no such operation. */
8863 rs6000_emit_cmove (dest
, op
, true_cond
, false_cond
)
8869 enum rtx_code code
= GET_CODE (op
);
8870 rtx op0
= rs6000_compare_op0
;
8871 rtx op1
= rs6000_compare_op1
;
8873 enum machine_mode compare_mode
= GET_MODE (op0
);
8874 enum machine_mode result_mode
= GET_MODE (dest
);
8877 /* These modes should always match. */
8878 if (GET_MODE (op1
) != compare_mode
8879 /* In the isel case however, we can use a compare immediate, so
8880 op1 may be a small constant. */
8881 && (!TARGET_ISEL
|| !short_cint_operand (op1
, VOIDmode
)))
8883 if (GET_MODE (true_cond
) != result_mode
)
8885 if (GET_MODE (false_cond
) != result_mode
)
8888 /* First, work out if the hardware can do this at all, or
8889 if it's too slow... */
8890 if (! rs6000_compare_fp_p
)
8893 return rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
);
8897 /* Eliminate half of the comparisons by switching operands, this
8898 makes the remaining code simpler. */
8899 if (code
== UNLT
|| code
== UNGT
|| code
== UNORDERED
|| code
== NE
8900 || code
== LTGT
|| code
== LT
)
8902 code
= reverse_condition_maybe_unordered (code
);
8904 true_cond
= false_cond
;
8908 /* UNEQ and LTGT take four instructions for a comparison with zero,
8909 it'll probably be faster to use a branch here too. */
8913 if (GET_CODE (op1
) == CONST_DOUBLE
)
8914 REAL_VALUE_FROM_CONST_DOUBLE (c1
, op1
);
8916 /* We're going to try to implement comparisons by performing
8917 a subtract, then comparing against zero. Unfortunately,
8918 Inf - Inf is NaN which is not zero, and so if we don't
8919 know that the operand is finite and the comparison
8920 would treat EQ different to UNORDERED, we can't do it. */
8921 if (! flag_unsafe_math_optimizations
8922 && code
!= GT
&& code
!= UNGE
8923 && (GET_CODE (op1
) != CONST_DOUBLE
|| real_isinf (&c1
))
8924 /* Constructs of the form (a OP b ? a : b) are safe. */
8925 && ((! rtx_equal_p (op0
, false_cond
) && ! rtx_equal_p (op1
, false_cond
))
8926 || (! rtx_equal_p (op0
, true_cond
)
8927 && ! rtx_equal_p (op1
, true_cond
))))
8929 /* At this point we know we can use fsel. */
8931 /* Reduce the comparison to a comparison against zero. */
8932 temp
= gen_reg_rtx (compare_mode
);
8933 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8934 gen_rtx_MINUS (compare_mode
, op0
, op1
)));
8936 op1
= CONST0_RTX (compare_mode
);
8938 /* If we don't care about NaNs we can reduce some of the comparisons
8939 down to faster ones. */
8940 if (flag_unsafe_math_optimizations
)
8946 true_cond
= false_cond
;
8959 /* Now, reduce everything down to a GE. */
8966 temp
= gen_reg_rtx (compare_mode
);
8967 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
8972 temp
= gen_reg_rtx (compare_mode
);
8973 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_ABS (compare_mode
, op0
)));
8978 temp
= gen_reg_rtx (compare_mode
);
8979 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8980 gen_rtx_NEG (compare_mode
,
8981 gen_rtx_ABS (compare_mode
, op0
))));
8986 temp
= gen_reg_rtx (result_mode
);
8987 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8988 gen_rtx_IF_THEN_ELSE (result_mode
,
8989 gen_rtx_GE (VOIDmode
,
8991 true_cond
, false_cond
)));
8993 true_cond
= false_cond
;
8995 temp
= gen_reg_rtx (compare_mode
);
8996 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
9001 temp
= gen_reg_rtx (result_mode
);
9002 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
9003 gen_rtx_IF_THEN_ELSE (result_mode
,
9004 gen_rtx_GE (VOIDmode
,
9006 true_cond
, false_cond
)));
9008 false_cond
= true_cond
;
9010 temp
= gen_reg_rtx (compare_mode
);
9011 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
9019 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
9020 gen_rtx_IF_THEN_ELSE (result_mode
,
9021 gen_rtx_GE (VOIDmode
,
9023 true_cond
, false_cond
)));
9027 /* Same as above, but for ints (isel). */
9030 rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
)
9036 rtx condition_rtx
, cr
;
9038 /* All isel implementations thus far are 32-bits. */
9039 if (GET_MODE (rs6000_compare_op0
) != SImode
)
9042 /* We still have to do the compare, because isel doesn't do a
9043 compare, it just looks at the CRx bits set by a previous compare
9045 condition_rtx
= rs6000_generate_compare (GET_CODE (op
));
9046 cr
= XEXP (condition_rtx
, 0);
9048 if (GET_MODE (cr
) == CCmode
)
9049 emit_insn (gen_isel_signed (dest
, condition_rtx
,
9050 true_cond
, false_cond
, cr
));
9052 emit_insn (gen_isel_unsigned (dest
, condition_rtx
,
9053 true_cond
, false_cond
, cr
));
9059 output_isel (operands
)
9064 code
= GET_CODE (operands
[1]);
9065 if (code
== GE
|| code
== GEU
|| code
== LE
|| code
== LEU
|| code
== NE
)
9067 PUT_CODE (operands
[1], reverse_condition (code
));
9068 return "isel %0,%3,%2,%j1";
9071 return "isel %0,%2,%3,%j1";
9075 rs6000_emit_minmax (dest
, code
, op0
, op1
)
9081 enum machine_mode mode
= GET_MODE (op0
);
9085 if (code
== SMAX
|| code
== SMIN
)
9090 if (code
== SMAX
|| code
== UMAX
)
9091 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
9094 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
9096 if (target
== NULL_RTX
)
9099 emit_move_insn (dest
, target
);
9102 /* This page contains routines that are used to determine what the
9103 function prologue and epilogue code will do and write them out. */
9105 /* Return the first fixed-point register that is required to be
9106 saved. 32 if none. */
9109 first_reg_to_save ()
9113 /* Find lowest numbered live register. */
9114 for (first_reg
= 13; first_reg
<= 31; first_reg
++)
9115 if (regs_ever_live
[first_reg
]
9116 && (! call_used_regs
[first_reg
]
9117 || (first_reg
== RS6000_PIC_OFFSET_TABLE_REGNUM
9118 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
9119 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
)))))
9124 && current_function_uses_pic_offset_table
9125 && first_reg
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
9126 return RS6000_PIC_OFFSET_TABLE_REGNUM
;
9132 /* Similar, for FP regs. */
9135 first_fp_reg_to_save ()
9139 /* Find lowest numbered live register. */
9140 for (first_reg
= 14 + 32; first_reg
<= 63; first_reg
++)
9141 if (regs_ever_live
[first_reg
])
9147 /* Similar, for AltiVec regs. */
9150 first_altivec_reg_to_save ()
9154 /* Stack frame remains as is unless we are in AltiVec ABI. */
9155 if (! TARGET_ALTIVEC_ABI
)
9156 return LAST_ALTIVEC_REGNO
+ 1;
9158 /* Find lowest numbered live register. */
9159 for (i
= FIRST_ALTIVEC_REGNO
+ 20; i
<= LAST_ALTIVEC_REGNO
; ++i
)
9160 if (regs_ever_live
[i
])
9166 /* Return a 32-bit mask of the AltiVec registers we need to set in
9167 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
9168 the 32-bit word is 0. */
9171 compute_vrsave_mask ()
9173 unsigned int i
, mask
= 0;
9175 /* First, find out if we use _any_ altivec registers. */
9176 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
9177 if (regs_ever_live
[i
])
9178 mask
|= ALTIVEC_REG_BIT (i
);
9183 /* Next, remove the argument registers from the set. These must
9184 be in the VRSAVE mask set by the caller, so we don't need to add
9185 them in again. More importantly, the mask we compute here is
9186 used to generate CLOBBERs in the set_vrsave insn, and we do not
9187 wish the argument registers to die. */
9188 for (i
= cfun
->args_info
.vregno
; i
>= ALTIVEC_ARG_MIN_REG
; --i
)
9189 mask
&= ~ALTIVEC_REG_BIT (i
);
9191 /* Similarly, remove the return value from the set. */
9194 diddle_return_value (is_altivec_return_reg
, &yes
);
9196 mask
&= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN
);
9203 is_altivec_return_reg (reg
, xyes
)
9207 bool *yes
= (bool *) xyes
;
9208 if (REGNO (reg
) == ALTIVEC_ARG_RETURN
)
9213 /* Calculate the stack information for the current function. This is
9214 complicated by having two separate calling sequences, the AIX calling
9215 sequence and the V.4 calling sequence.
9217 AIX (and Darwin/Mac OS X) stack frames look like:
9219 SP----> +---------------------------------------+
9220 | back chain to caller | 0 0
9221 +---------------------------------------+
9222 | saved CR | 4 8 (8-11)
9223 +---------------------------------------+
9225 +---------------------------------------+
9226 | reserved for compilers | 12 24
9227 +---------------------------------------+
9228 | reserved for binders | 16 32
9229 +---------------------------------------+
9230 | saved TOC pointer | 20 40
9231 +---------------------------------------+
9232 | Parameter save area (P) | 24 48
9233 +---------------------------------------+
9234 | Alloca space (A) | 24+P etc.
9235 +---------------------------------------+
9236 | Local variable space (L) | 24+P+A
9237 +---------------------------------------+
9238 | Float/int conversion temporary (X) | 24+P+A+L
9239 +---------------------------------------+
9240 | Save area for AltiVec registers (W) | 24+P+A+L+X
9241 +---------------------------------------+
9242 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9243 +---------------------------------------+
9244 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9245 +---------------------------------------+
9246 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9247 +---------------------------------------+
9248 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9249 +---------------------------------------+
9250 old SP->| back chain to caller's caller |
9251 +---------------------------------------+
9253 The required alignment for AIX configurations is two words (i.e., 8
9257 V.4 stack frames look like:
9259 SP----> +---------------------------------------+
9260 | back chain to caller | 0
9261 +---------------------------------------+
9262 | caller's saved LR | 4
9263 +---------------------------------------+
9264 | Parameter save area (P) | 8
9265 +---------------------------------------+
9266 | Alloca space (A) | 8+P
9267 +---------------------------------------+
9268 | Varargs save area (V) | 8+P+A
9269 +---------------------------------------+
9270 | Local variable space (L) | 8+P+A+V
9271 +---------------------------------------+
9272 | Float/int conversion temporary (X) | 8+P+A+V+L
9273 +---------------------------------------+
9274 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9275 +---------------------------------------+
9276 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9277 +---------------------------------------+
9278 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9279 +---------------------------------------+
9280 | SPE: area for 64-bit GP registers |
9281 +---------------------------------------+
9282 | SPE alignment padding |
9283 +---------------------------------------+
9284 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9285 +---------------------------------------+
9286 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9287 +---------------------------------------+
9288 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9289 +---------------------------------------+
9290 old SP->| back chain to caller's caller |
9291 +---------------------------------------+
9293 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9294 given. (But note below and in sysv4.h that we require only 8 and
9295 may round up the size of our stack frame anyways. The historical
9296 reason is early versions of powerpc-linux which didn't properly
9297 align the stack at program startup. A happy side-effect is that
9298 -mno-eabi libraries can be used with -meabi programs.)
9300 The EABI configuration defaults to the V.4 layout. However,
9301 the stack alignment requirements may differ. If -mno-eabi is not
9302 given, the required stack alignment is 8 bytes; if -mno-eabi is
9303 given, the required alignment is 16 bytes. (But see V.4 comment
9306 #ifndef ABI_STACK_BOUNDARY
9307 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9311 rs6000_stack_info ()
9313 static rs6000_stack_t info
, zero_info
;
9314 rs6000_stack_t
*info_ptr
= &info
;
9315 int reg_size
= TARGET_POWERPC64
? 8 : 4;
9319 /* Zero all fields portably. */
9324 /* Cache value so we don't rescan instruction chain over and over. */
9325 if (cfun
->machine
->insn_chain_scanned_p
== 0)
9327 cfun
->machine
->insn_chain_scanned_p
= 1;
9328 info_ptr
->spe_64bit_regs_used
= (int) spe_func_has_64bit_regs_p ();
9332 /* Select which calling sequence. */
9333 info_ptr
->abi
= DEFAULT_ABI
;
9335 /* Calculate which registers need to be saved & save area size. */
9336 info_ptr
->first_gp_reg_save
= first_reg_to_save ();
9337 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9338 even if it currently looks like we won't. */
9339 if (((TARGET_TOC
&& TARGET_MINIMAL_TOC
)
9340 || (flag_pic
== 1 && DEFAULT_ABI
== ABI_V4
)
9341 || (flag_pic
&& DEFAULT_ABI
== ABI_DARWIN
))
9342 && info_ptr
->first_gp_reg_save
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
9343 info_ptr
->gp_size
= reg_size
* (32 - RS6000_PIC_OFFSET_TABLE_REGNUM
);
9345 info_ptr
->gp_size
= reg_size
* (32 - info_ptr
->first_gp_reg_save
);
9347 /* For the SPE, we have an additional upper 32-bits on each GPR.
9348 Ideally we should save the entire 64-bits only when the upper
9349 half is used in SIMD instructions. Since we only record
9350 registers live (not the size they are used in), this proves
9351 difficult because we'd have to traverse the instruction chain at
9352 the right time, taking reload into account. This is a real pain,
9353 so we opt to save the GPRs in 64-bits always if but one register
9354 gets used in 64-bits. Otherwise, all the registers in the frame
9355 get saved in 32-bits.
9357 So... since when we save all GPRs (except the SP) in 64-bits, the
9358 traditional GP save area will be empty. */
9359 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
9360 info_ptr
->gp_size
= 0;
9362 info_ptr
->first_fp_reg_save
= first_fp_reg_to_save ();
9363 info_ptr
->fp_size
= 8 * (64 - info_ptr
->first_fp_reg_save
);
9365 info_ptr
->first_altivec_reg_save
= first_altivec_reg_to_save ();
9366 info_ptr
->altivec_size
= 16 * (LAST_ALTIVEC_REGNO
+ 1
9367 - info_ptr
->first_altivec_reg_save
);
9369 /* Does this function call anything? */
9370 info_ptr
->calls_p
= (! current_function_is_leaf
9371 || cfun
->machine
->ra_needs_full_frame
);
9373 /* Determine if we need to save the link register. */
9374 if (rs6000_ra_ever_killed ()
9375 || (DEFAULT_ABI
== ABI_AIX
9376 && current_function_profile
9377 && !TARGET_PROFILE_KERNEL
)
9378 #ifdef TARGET_RELOCATABLE
9379 || (TARGET_RELOCATABLE
&& (get_pool_size () != 0))
9381 || (info_ptr
->first_fp_reg_save
!= 64
9382 && !FP_SAVE_INLINE (info_ptr
->first_fp_reg_save
))
9383 || info_ptr
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
9384 || (DEFAULT_ABI
== ABI_V4
&& current_function_calls_alloca
)
9385 || (DEFAULT_ABI
== ABI_DARWIN
9387 && current_function_uses_pic_offset_table
)
9388 || info_ptr
->calls_p
)
9390 info_ptr
->lr_save_p
= 1;
9391 regs_ever_live
[LINK_REGISTER_REGNUM
] = 1;
9394 /* Determine if we need to save the condition code registers. */
9395 if (regs_ever_live
[CR2_REGNO
]
9396 || regs_ever_live
[CR3_REGNO
]
9397 || regs_ever_live
[CR4_REGNO
])
9399 info_ptr
->cr_save_p
= 1;
9400 if (DEFAULT_ABI
== ABI_V4
)
9401 info_ptr
->cr_size
= reg_size
;
9404 /* If the current function calls __builtin_eh_return, then we need
9405 to allocate stack space for registers that will hold data for
9406 the exception handler. */
9407 if (current_function_calls_eh_return
)
9410 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; ++i
)
9413 /* SPE saves EH registers in 64-bits. */
9414 ehrd_size
= i
* (TARGET_SPE_ABI
9415 && info_ptr
->spe_64bit_regs_used
!= 0
9416 ? UNITS_PER_SPE_WORD
: UNITS_PER_WORD
);
9421 /* Determine various sizes. */
9422 info_ptr
->reg_size
= reg_size
;
9423 info_ptr
->fixed_size
= RS6000_SAVE_AREA
;
9424 info_ptr
->varargs_size
= RS6000_VARARGS_AREA
;
9425 info_ptr
->vars_size
= RS6000_ALIGN (get_frame_size (), 8);
9426 info_ptr
->parm_size
= RS6000_ALIGN (current_function_outgoing_args_size
,
9429 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
9430 info_ptr
->spe_gp_size
= 8 * (32 - info_ptr
->first_gp_reg_save
);
9432 info_ptr
->spe_gp_size
= 0;
9434 if (TARGET_ALTIVEC_ABI
&& TARGET_ALTIVEC_VRSAVE
)
9436 info_ptr
->vrsave_mask
= compute_vrsave_mask ();
9437 info_ptr
->vrsave_size
= info_ptr
->vrsave_mask
? 4 : 0;
9441 info_ptr
->vrsave_mask
= 0;
9442 info_ptr
->vrsave_size
= 0;
9445 /* Calculate the offsets. */
9446 switch (DEFAULT_ABI
)
9454 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
9455 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
9457 if (TARGET_ALTIVEC_ABI
)
9459 info_ptr
->vrsave_save_offset
9460 = info_ptr
->gp_save_offset
- info_ptr
->vrsave_size
;
9462 /* Align stack so vector save area is on a quadword boundary. */
9463 if (info_ptr
->altivec_size
!= 0)
9464 info_ptr
->altivec_padding_size
9465 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
9467 info_ptr
->altivec_padding_size
= 0;
9469 info_ptr
->altivec_save_offset
9470 = info_ptr
->vrsave_save_offset
9471 - info_ptr
->altivec_padding_size
9472 - info_ptr
->altivec_size
;
9474 /* Adjust for AltiVec case. */
9475 info_ptr
->ehrd_offset
= info_ptr
->altivec_save_offset
- ehrd_size
;
9478 info_ptr
->ehrd_offset
= info_ptr
->gp_save_offset
- ehrd_size
;
9479 info_ptr
->cr_save_offset
= reg_size
; /* first word when 64-bit. */
9480 info_ptr
->lr_save_offset
= 2*reg_size
;
9484 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
9485 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
9486 info_ptr
->cr_save_offset
= info_ptr
->gp_save_offset
- info_ptr
->cr_size
;
9488 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
9490 /* Align stack so SPE GPR save area is aligned on a
9491 double-word boundary. */
9492 if (info_ptr
->spe_gp_size
!= 0)
9493 info_ptr
->spe_padding_size
9494 = 8 - (-info_ptr
->cr_save_offset
% 8);
9496 info_ptr
->spe_padding_size
= 0;
9498 info_ptr
->spe_gp_save_offset
9499 = info_ptr
->cr_save_offset
9500 - info_ptr
->spe_padding_size
9501 - info_ptr
->spe_gp_size
;
9503 /* Adjust for SPE case. */
9504 info_ptr
->toc_save_offset
9505 = info_ptr
->spe_gp_save_offset
- info_ptr
->toc_size
;
9507 else if (TARGET_ALTIVEC_ABI
)
9509 info_ptr
->vrsave_save_offset
9510 = info_ptr
->cr_save_offset
- info_ptr
->vrsave_size
;
9512 /* Align stack so vector save area is on a quadword boundary. */
9513 if (info_ptr
->altivec_size
!= 0)
9514 info_ptr
->altivec_padding_size
9515 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
9517 info_ptr
->altivec_padding_size
= 0;
9519 info_ptr
->altivec_save_offset
9520 = info_ptr
->vrsave_save_offset
9521 - info_ptr
->altivec_padding_size
9522 - info_ptr
->altivec_size
;
9524 /* Adjust for AltiVec case. */
9525 info_ptr
->toc_save_offset
9526 = info_ptr
->altivec_save_offset
- info_ptr
->toc_size
;
9529 info_ptr
->toc_save_offset
= info_ptr
->cr_save_offset
- info_ptr
->toc_size
;
9530 info_ptr
->ehrd_offset
= info_ptr
->toc_save_offset
- ehrd_size
;
9531 info_ptr
->lr_save_offset
= reg_size
;
9535 info_ptr
->save_size
= RS6000_ALIGN (info_ptr
->fp_size
9537 + info_ptr
->altivec_size
9538 + info_ptr
->altivec_padding_size
9539 + info_ptr
->vrsave_size
9540 + info_ptr
->spe_gp_size
9541 + info_ptr
->spe_padding_size
9545 + info_ptr
->vrsave_size
9546 + info_ptr
->toc_size
,
9547 (TARGET_ALTIVEC_ABI
|| ABI_DARWIN
)
9550 total_raw_size
= (info_ptr
->vars_size
9551 + info_ptr
->parm_size
9552 + info_ptr
->save_size
9553 + info_ptr
->varargs_size
9554 + info_ptr
->fixed_size
);
9556 info_ptr
->total_size
=
9557 RS6000_ALIGN (total_raw_size
, ABI_STACK_BOUNDARY
/ BITS_PER_UNIT
);
9559 /* Determine if we need to allocate any stack frame:
9561 For AIX we need to push the stack if a frame pointer is needed
9562 (because the stack might be dynamically adjusted), if we are
9563 debugging, if we make calls, or if the sum of fp_save, gp_save,
9564 and local variables are more than the space needed to save all
9565 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9566 + 18*8 = 288 (GPR13 reserved).
9568 For V.4 we don't have the stack cushion that AIX uses, but assume
9569 that the debugger can handle stackless frames. */
9571 if (info_ptr
->calls_p
)
9572 info_ptr
->push_p
= 1;
9574 else if (DEFAULT_ABI
== ABI_V4
)
9575 info_ptr
->push_p
= total_raw_size
> info_ptr
->fixed_size
;
9577 else if (frame_pointer_needed
)
9578 info_ptr
->push_p
= 1;
9580 else if (TARGET_XCOFF
&& write_symbols
!= NO_DEBUG
)
9581 info_ptr
->push_p
= 1;
9585 = total_raw_size
- info_ptr
->fixed_size
> (TARGET_32BIT
? 220 : 288);
9587 /* Zero offsets if we're not saving those registers. */
9588 if (info_ptr
->fp_size
== 0)
9589 info_ptr
->fp_save_offset
= 0;
9591 if (info_ptr
->gp_size
== 0)
9592 info_ptr
->gp_save_offset
= 0;
9594 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->altivec_size
== 0)
9595 info_ptr
->altivec_save_offset
= 0;
9597 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->vrsave_mask
== 0)
9598 info_ptr
->vrsave_save_offset
= 0;
9600 if (! TARGET_SPE_ABI
9601 || info_ptr
->spe_64bit_regs_used
== 0
9602 || info_ptr
->spe_gp_size
== 0)
9603 info_ptr
->spe_gp_save_offset
= 0;
9605 if (! info_ptr
->lr_save_p
)
9606 info_ptr
->lr_save_offset
= 0;
9608 if (! info_ptr
->cr_save_p
)
9609 info_ptr
->cr_save_offset
= 0;
9611 if (! info_ptr
->toc_save_p
)
9612 info_ptr
->toc_save_offset
= 0;
9617 /* Return true if the current function uses any GPRs in 64-bit SIMD
9621 spe_func_has_64bit_regs_p ()
9625 /* Functions that save and restore all the call-saved registers will
9626 need to save/restore the registers in 64-bits. */
9627 if (current_function_calls_eh_return
9628 || current_function_calls_setjmp
9629 || current_function_has_nonlocal_goto
)
9632 insns
= get_insns ();
9634 for (insn
= NEXT_INSN (insns
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
9641 if (GET_CODE (i
) == SET
9642 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i
))))
9651 debug_stack_info (info
)
9652 rs6000_stack_t
*info
;
9654 const char *abi_string
;
9657 info
= rs6000_stack_info ();
9659 fprintf (stderr
, "\nStack information for function %s:\n",
9660 ((current_function_decl
&& DECL_NAME (current_function_decl
))
9661 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl
))
9666 default: abi_string
= "Unknown"; break;
9667 case ABI_NONE
: abi_string
= "NONE"; break;
9668 case ABI_AIX
: abi_string
= "AIX"; break;
9669 case ABI_DARWIN
: abi_string
= "Darwin"; break;
9670 case ABI_V4
: abi_string
= "V.4"; break;
9673 fprintf (stderr
, "\tABI = %5s\n", abi_string
);
9675 if (TARGET_ALTIVEC_ABI
)
9676 fprintf (stderr
, "\tALTIVEC ABI extensions enabled.\n");
9679 fprintf (stderr
, "\tSPE ABI extensions enabled.\n");
9681 if (info
->first_gp_reg_save
!= 32)
9682 fprintf (stderr
, "\tfirst_gp_reg_save = %5d\n", info
->first_gp_reg_save
);
9684 if (info
->first_fp_reg_save
!= 64)
9685 fprintf (stderr
, "\tfirst_fp_reg_save = %5d\n", info
->first_fp_reg_save
);
9687 if (info
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
)
9688 fprintf (stderr
, "\tfirst_altivec_reg_save = %5d\n",
9689 info
->first_altivec_reg_save
);
9691 if (info
->lr_save_p
)
9692 fprintf (stderr
, "\tlr_save_p = %5d\n", info
->lr_save_p
);
9694 if (info
->cr_save_p
)
9695 fprintf (stderr
, "\tcr_save_p = %5d\n", info
->cr_save_p
);
9697 if (info
->toc_save_p
)
9698 fprintf (stderr
, "\ttoc_save_p = %5d\n", info
->toc_save_p
);
9700 if (info
->vrsave_mask
)
9701 fprintf (stderr
, "\tvrsave_mask = 0x%x\n", info
->vrsave_mask
);
9704 fprintf (stderr
, "\tpush_p = %5d\n", info
->push_p
);
9707 fprintf (stderr
, "\tcalls_p = %5d\n", info
->calls_p
);
9709 if (info
->gp_save_offset
)
9710 fprintf (stderr
, "\tgp_save_offset = %5d\n", info
->gp_save_offset
);
9712 if (info
->fp_save_offset
)
9713 fprintf (stderr
, "\tfp_save_offset = %5d\n", info
->fp_save_offset
);
9715 if (info
->altivec_save_offset
)
9716 fprintf (stderr
, "\taltivec_save_offset = %5d\n",
9717 info
->altivec_save_offset
);
9719 if (info
->spe_gp_save_offset
)
9720 fprintf (stderr
, "\tspe_gp_save_offset = %5d\n",
9721 info
->spe_gp_save_offset
);
9723 if (info
->vrsave_save_offset
)
9724 fprintf (stderr
, "\tvrsave_save_offset = %5d\n",
9725 info
->vrsave_save_offset
);
9727 if (info
->lr_save_offset
)
9728 fprintf (stderr
, "\tlr_save_offset = %5d\n", info
->lr_save_offset
);
9730 if (info
->cr_save_offset
)
9731 fprintf (stderr
, "\tcr_save_offset = %5d\n", info
->cr_save_offset
);
9733 if (info
->toc_save_offset
)
9734 fprintf (stderr
, "\ttoc_save_offset = %5d\n", info
->toc_save_offset
);
9736 if (info
->varargs_save_offset
)
9737 fprintf (stderr
, "\tvarargs_save_offset = %5d\n", info
->varargs_save_offset
);
9739 if (info
->total_size
)
9740 fprintf (stderr
, "\ttotal_size = %5d\n", info
->total_size
);
9742 if (info
->varargs_size
)
9743 fprintf (stderr
, "\tvarargs_size = %5d\n", info
->varargs_size
);
9745 if (info
->vars_size
)
9746 fprintf (stderr
, "\tvars_size = %5d\n", info
->vars_size
);
9748 if (info
->parm_size
)
9749 fprintf (stderr
, "\tparm_size = %5d\n", info
->parm_size
);
9751 if (info
->fixed_size
)
9752 fprintf (stderr
, "\tfixed_size = %5d\n", info
->fixed_size
);
9755 fprintf (stderr
, "\tgp_size = %5d\n", info
->gp_size
);
9757 if (info
->spe_gp_size
)
9758 fprintf (stderr
, "\tspe_gp_size = %5d\n", info
->spe_gp_size
);
9761 fprintf (stderr
, "\tfp_size = %5d\n", info
->fp_size
);
9763 if (info
->altivec_size
)
9764 fprintf (stderr
, "\taltivec_size = %5d\n", info
->altivec_size
);
9766 if (info
->vrsave_size
)
9767 fprintf (stderr
, "\tvrsave_size = %5d\n", info
->vrsave_size
);
9769 if (info
->altivec_padding_size
)
9770 fprintf (stderr
, "\taltivec_padding_size= %5d\n",
9771 info
->altivec_padding_size
);
9773 if (info
->spe_padding_size
)
9774 fprintf (stderr
, "\tspe_padding_size = %5d\n",
9775 info
->spe_padding_size
);
9778 fprintf (stderr
, "\tlr_size = %5d\n", info
->lr_size
);
9781 fprintf (stderr
, "\tcr_size = %5d\n", info
->cr_size
);
9784 fprintf (stderr
, "\ttoc_size = %5d\n", info
->toc_size
);
9786 if (info
->save_size
)
9787 fprintf (stderr
, "\tsave_size = %5d\n", info
->save_size
);
9789 if (info
->reg_size
!= 4)
9790 fprintf (stderr
, "\treg_size = %5d\n", info
->reg_size
);
9792 fprintf (stderr
, "\n");
9796 rs6000_return_addr (count
, frame
)
9800 /* Currently we don't optimize very well between prolog and body
9801 code and for PIC code the code can be actually quite bad, so
9802 don't try to be too clever here. */
9803 if (count
!= 0 || (DEFAULT_ABI
!= ABI_AIX
&& flag_pic
))
9805 cfun
->machine
->ra_needs_full_frame
= 1;
9812 plus_constant (copy_to_reg
9813 (gen_rtx_MEM (Pmode
,
9814 memory_address (Pmode
, frame
))),
9815 RETURN_ADDRESS_OFFSET
)));
9818 return get_hard_reg_initial_val (Pmode
, LINK_REGISTER_REGNUM
);
9821 /* Say whether a function is a candidate for sibcall handling or not.
9822 We do not allow indirect calls to be optimized into sibling calls.
9823 Also, we can't do it if there are any vector parameters; there's
9824 nowhere to put the VRsave code so it works; note that functions with
9825 vector parameters are required to have a prototype, so the argument
9826 type info must be available here. (The tail recursion case can work
9827 with vector parameters, but there's no way to distinguish here.) */
9829 rs6000_function_ok_for_sibcall (decl
, exp
)
9831 tree exp ATTRIBUTE_UNUSED
;
9836 if (TARGET_ALTIVEC_VRSAVE
)
9838 for (type
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
9839 type
; type
= TREE_CHAIN (type
))
9841 if (TREE_CODE (TREE_VALUE (type
)) == VECTOR_TYPE
)
9845 if (DEFAULT_ABI
== ABI_DARWIN
9846 || (*targetm
.binds_local_p
) (decl
))
9848 tree attr_list
= TYPE_ATTRIBUTES (TREE_TYPE (decl
));
9850 if (!lookup_attribute ("longcall", attr_list
)
9851 || lookup_attribute ("shortcall", attr_list
))
9859 rs6000_ra_ever_killed ()
9865 /* Irritatingly, there are two kinds of thunks -- those created with
9866 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
9867 through the regular part of the compiler. This is a very hacky
9868 way to tell them apart. */
9869 if (current_function_is_thunk
&& !no_new_pseudos
)
9872 /* regs_ever_live has LR marked as used if any sibcalls are present,
9873 but this should not force saving and restoring in the
9874 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
9875 clobbers LR, so that is inappropriate. */
9877 /* Also, the prologue can generate a store into LR that
9878 doesn't really count, like this:
9881 bcl to set PIC register
9885 When we're called from the epilogue, we need to avoid counting
9888 push_topmost_sequence ();
9890 pop_topmost_sequence ();
9891 reg
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
9893 for (insn
= NEXT_INSN (top
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
9897 if (FIND_REG_INC_NOTE (insn
, reg
))
9899 else if (GET_CODE (insn
) == CALL_INSN
9900 && !SIBLING_CALL_P (insn
))
9902 else if (set_of (reg
, insn
) != NULL_RTX
9903 && !prologue_epilogue_contains (insn
))
9910 /* Add a REG_MAYBE_DEAD note to the insn. */
9912 rs6000_maybe_dead (insn
)
9915 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
,
9920 /* Emit instructions needed to load the TOC register.
9921 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9922 a constant pool; or for SVR4 -fpic. */
9925 rs6000_emit_load_toc_table (fromprolog
)
9929 dest
= gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
);
9931 if (TARGET_ELF
&& DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
9933 rtx temp
= (fromprolog
9934 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
9935 : gen_reg_rtx (Pmode
));
9936 insn
= emit_insn (gen_load_toc_v4_pic_si (temp
));
9938 rs6000_maybe_dead (insn
);
9939 insn
= emit_move_insn (dest
, temp
);
9941 rs6000_maybe_dead (insn
);
9943 else if (TARGET_ELF
&& DEFAULT_ABI
!= ABI_AIX
&& flag_pic
== 2)
9946 rtx tempLR
= (fromprolog
9947 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
9948 : gen_reg_rtx (Pmode
));
9949 rtx temp0
= (fromprolog
9950 ? gen_rtx_REG (Pmode
, 0)
9951 : gen_reg_rtx (Pmode
));
9954 /* possibly create the toc section */
9955 if (! toc_initialized
)
9958 function_section (current_function_decl
);
9965 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
9966 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9968 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCL", rs6000_pic_labelno
);
9969 symL
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9971 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR
,
9973 rs6000_maybe_dead (emit_move_insn (dest
, tempLR
));
9974 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0
, dest
,
9981 static int reload_toc_labelno
= 0;
9983 tocsym
= gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
);
9985 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCG", reload_toc_labelno
++);
9986 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9988 emit_insn (gen_load_toc_v4_PIC_1b (tempLR
, symF
, tocsym
));
9989 emit_move_insn (dest
, tempLR
);
9990 emit_move_insn (temp0
, gen_rtx_MEM (Pmode
, dest
));
9992 insn
= emit_insn (gen_addsi3 (dest
, temp0
, dest
));
9994 rs6000_maybe_dead (insn
);
9996 else if (TARGET_ELF
&& !TARGET_AIX
&& flag_pic
== 0 && TARGET_MINIMAL_TOC
)
9998 /* This is for AIX code running in non-PIC ELF32. */
10001 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCTOC", 1);
10002 realsym
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
10004 insn
= emit_insn (gen_elf_high (dest
, realsym
));
10006 rs6000_maybe_dead (insn
);
10007 insn
= emit_insn (gen_elf_low (dest
, dest
, realsym
));
10009 rs6000_maybe_dead (insn
);
10011 else if (DEFAULT_ABI
== ABI_AIX
)
10014 insn
= emit_insn (gen_load_toc_aix_si (dest
));
10016 insn
= emit_insn (gen_load_toc_aix_di (dest
));
10018 rs6000_maybe_dead (insn
);
10025 get_TOC_alias_set ()
10027 static int set
= -1;
10029 set
= new_alias_set ();
10033 /* This retuns nonzero if the current function uses the TOC. This is
10034 determined by the presence of (unspec ... UNSPEC_TOC), which is
10035 generated by the various load_toc_* patterns. */
10042 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
10045 rtx pat
= PATTERN (insn
);
10048 if (GET_CODE (pat
) == PARALLEL
)
10049 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
10050 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, i
)) == UNSPEC
10051 && XINT (XVECEXP (PATTERN (insn
), 0, i
), 1) == UNSPEC_TOC
)
10058 create_TOC_reference (symbol
)
10061 return gen_rtx_PLUS (Pmode
,
10062 gen_rtx_REG (Pmode
, TOC_REGISTER
),
10063 gen_rtx_CONST (Pmode
,
10064 gen_rtx_MINUS (Pmode
, symbol
,
10065 gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
))));
10069 /* __throw will restore its own return address to be the same as the
10070 return address of the function that the throw is being made to.
10071 This is unfortunate, because we want to check the original
10072 return address to see if we need to restore the TOC.
10073 So we have to squirrel it away here.
10074 This is used only in compiling __throw and __rethrow.
10076 Most of this code should be removed by CSE. */
10077 static rtx insn_after_throw
;
10079 /* This does the saving... */
10081 rs6000_aix_emit_builtin_unwind_init ()
10084 rtx stack_top
= gen_reg_rtx (Pmode
);
10085 rtx opcode_addr
= gen_reg_rtx (Pmode
);
10087 insn_after_throw
= gen_reg_rtx (SImode
);
10089 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
10090 emit_move_insn (stack_top
, mem
);
10092 mem
= gen_rtx_MEM (Pmode
,
10093 gen_rtx_PLUS (Pmode
, stack_top
,
10094 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
10095 emit_move_insn (opcode_addr
, mem
);
10096 emit_move_insn (insn_after_throw
, gen_rtx_MEM (SImode
, opcode_addr
));
10099 /* Emit insns to _restore_ the TOC register, at runtime (specifically
10100 in _eh.o). Only used on AIX.
10102 The idea is that on AIX, function calls look like this:
10103 bl somefunction-trampoline
10107 somefunction-trampoline:
10109 ... load function address in the count register ...
10111 or like this, if the linker determines that this is not a cross-module call
10112 and so the TOC need not be restored:
10115 or like this, if the compiler could determine that this is not a
10118 now, the tricky bit here is that register 2 is saved and restored
10119 by the _linker_, so we can't readily generate debugging information
10120 for it. So we need to go back up the call chain looking at the
10121 insns at return addresses to see which calls saved the TOC register
10122 and so see where it gets restored from.
10124 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
10125 just before the actual epilogue.
10127 On the bright side, this incurs no space or time overhead unless an
10128 exception is thrown, except for the extra code in libgcc.a.
10130 The parameter STACKSIZE is a register containing (at runtime)
10131 the amount to be popped off the stack in addition to the stack frame
10132 of this routine (which will be __throw or __rethrow, and so is
10133 guaranteed to have a stack frame). */
10136 rs6000_emit_eh_toc_restore (stacksize
)
10140 rtx bottom_of_stack
= gen_reg_rtx (Pmode
);
10141 rtx tocompare
= gen_reg_rtx (SImode
);
10142 rtx opcode
= gen_reg_rtx (SImode
);
10143 rtx opcode_addr
= gen_reg_rtx (Pmode
);
10145 rtx loop_start
= gen_label_rtx ();
10146 rtx no_toc_restore_needed
= gen_label_rtx ();
10147 rtx loop_exit
= gen_label_rtx ();
10149 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
10150 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10151 emit_move_insn (bottom_of_stack
, mem
);
10153 top_of_stack
= expand_binop (Pmode
, add_optab
,
10154 bottom_of_stack
, stacksize
,
10155 NULL_RTX
, 1, OPTAB_WIDEN
);
10157 emit_move_insn (tocompare
, gen_int_mode (TARGET_32BIT
? 0x80410014
10158 : 0xE8410028, SImode
));
10160 if (insn_after_throw
== NULL_RTX
)
10162 emit_move_insn (opcode
, insn_after_throw
);
10164 emit_note (NULL
, NOTE_INSN_LOOP_BEG
);
10165 emit_label (loop_start
);
10167 do_compare_rtx_and_jump (opcode
, tocompare
, NE
, 1,
10168 SImode
, NULL_RTX
, NULL_RTX
,
10169 no_toc_restore_needed
);
10171 mem
= gen_rtx_MEM (Pmode
,
10172 gen_rtx_PLUS (Pmode
, bottom_of_stack
,
10173 GEN_INT (5 * GET_MODE_SIZE (Pmode
))));
10174 emit_move_insn (gen_rtx_REG (Pmode
, 2), mem
);
10176 emit_label (no_toc_restore_needed
);
10177 do_compare_rtx_and_jump (top_of_stack
, bottom_of_stack
, EQ
, 1,
10178 Pmode
, NULL_RTX
, NULL_RTX
,
10181 mem
= gen_rtx_MEM (Pmode
, bottom_of_stack
);
10182 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10183 emit_move_insn (bottom_of_stack
, mem
);
10185 mem
= gen_rtx_MEM (Pmode
,
10186 gen_rtx_PLUS (Pmode
, bottom_of_stack
,
10187 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
10188 emit_move_insn (opcode_addr
, mem
);
10189 emit_move_insn (opcode
, gen_rtx_MEM (SImode
, opcode_addr
));
10191 emit_note (NULL
, NOTE_INSN_LOOP_CONT
);
10192 emit_jump (loop_start
);
10193 emit_note (NULL
, NOTE_INSN_LOOP_END
);
10194 emit_label (loop_exit
);
10196 #endif /* TARGET_AIX */
10198 /* This ties together stack memory (MEM with an alias set of
10199 rs6000_sr_alias_set) and the change to the stack pointer. */
10202 rs6000_emit_stack_tie ()
10204 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
));
10206 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10207 emit_insn (gen_stack_tie (mem
));
10210 /* Emit the correct code for allocating stack space, as insns.
10211 If COPY_R12, make sure a copy of the old frame is left in r12.
10212 The generated code may use hard register 0 as a temporary. */
10215 rs6000_emit_allocate_stack (size
, copy_r12
)
10216 HOST_WIDE_INT size
;
10220 rtx stack_reg
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
10221 rtx tmp_reg
= gen_rtx_REG (Pmode
, 0);
10222 rtx todec
= GEN_INT (-size
);
10224 if (current_function_limit_stack
)
10226 if (REG_P (stack_limit_rtx
)
10227 && REGNO (stack_limit_rtx
) > 1
10228 && REGNO (stack_limit_rtx
) <= 31)
10230 emit_insn (TARGET_32BIT
10231 ? gen_addsi3 (tmp_reg
,
10234 : gen_adddi3 (tmp_reg
,
10238 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
10241 else if (GET_CODE (stack_limit_rtx
) == SYMBOL_REF
10243 && DEFAULT_ABI
== ABI_V4
)
10245 rtx toload
= gen_rtx_CONST (VOIDmode
,
10246 gen_rtx_PLUS (Pmode
,
10250 emit_insn (gen_elf_high (tmp_reg
, toload
));
10251 emit_insn (gen_elf_low (tmp_reg
, tmp_reg
, toload
));
10252 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
10256 warning ("stack limit expression is not supported");
10259 if (copy_r12
|| ! TARGET_UPDATE
)
10260 emit_move_insn (gen_rtx_REG (Pmode
, 12), stack_reg
);
10266 /* Need a note here so that try_split doesn't get confused. */
10267 if (get_last_insn() == NULL_RTX
)
10268 emit_note (0, NOTE_INSN_DELETED
);
10269 insn
= emit_move_insn (tmp_reg
, todec
);
10270 try_split (PATTERN (insn
), insn
, 0);
10274 insn
= emit_insn (TARGET_32BIT
10275 ? gen_movsi_update (stack_reg
, stack_reg
,
10277 : gen_movdi_update (stack_reg
, stack_reg
,
10278 todec
, stack_reg
));
10282 insn
= emit_insn (TARGET_32BIT
10283 ? gen_addsi3 (stack_reg
, stack_reg
, todec
)
10284 : gen_adddi3 (stack_reg
, stack_reg
, todec
));
10285 emit_move_insn (gen_rtx_MEM (Pmode
, stack_reg
),
10286 gen_rtx_REG (Pmode
, 12));
10289 RTX_FRAME_RELATED_P (insn
) = 1;
10291 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
10292 gen_rtx_SET (VOIDmode
, stack_reg
,
10293 gen_rtx_PLUS (Pmode
, stack_reg
,
10298 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10299 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10300 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10301 deduce these equivalences by itself so it wasn't necessary to hold
10302 its hand so much. */
10305 rs6000_frame_related (insn
, reg
, val
, reg2
, rreg
)
10314 /* copy_rtx will not make unique copies of registers, so we need to
10315 ensure we don't have unwanted sharing here. */
10317 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
10320 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
10322 real
= copy_rtx (PATTERN (insn
));
10324 if (reg2
!= NULL_RTX
)
10325 real
= replace_rtx (real
, reg2
, rreg
);
10327 real
= replace_rtx (real
, reg
,
10328 gen_rtx_PLUS (Pmode
, gen_rtx_REG (Pmode
,
10329 STACK_POINTER_REGNUM
),
10332 /* We expect that 'real' is either a SET or a PARALLEL containing
10333 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10334 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10336 if (GET_CODE (real
) == SET
)
10340 temp
= simplify_rtx (SET_SRC (set
));
10342 SET_SRC (set
) = temp
;
10343 temp
= simplify_rtx (SET_DEST (set
));
10345 SET_DEST (set
) = temp
;
10346 if (GET_CODE (SET_DEST (set
)) == MEM
)
10348 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
10350 XEXP (SET_DEST (set
), 0) = temp
;
10353 else if (GET_CODE (real
) == PARALLEL
)
10356 for (i
= 0; i
< XVECLEN (real
, 0); i
++)
10357 if (GET_CODE (XVECEXP (real
, 0, i
)) == SET
)
10359 rtx set
= XVECEXP (real
, 0, i
);
10361 temp
= simplify_rtx (SET_SRC (set
));
10363 SET_SRC (set
) = temp
;
10364 temp
= simplify_rtx (SET_DEST (set
));
10366 SET_DEST (set
) = temp
;
10367 if (GET_CODE (SET_DEST (set
)) == MEM
)
10369 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
10371 XEXP (SET_DEST (set
), 0) = temp
;
10373 RTX_FRAME_RELATED_P (set
) = 1;
10380 real
= spe_synthesize_frame_save (real
);
10382 RTX_FRAME_RELATED_P (insn
) = 1;
10383 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
10388 /* Given an SPE frame note, return a PARALLEL of SETs with the
10389 original note, plus a synthetic register save. */
10392 spe_synthesize_frame_save (real
)
10395 rtx synth
, offset
, reg
, real2
;
10397 if (GET_CODE (real
) != SET
10398 || GET_MODE (SET_SRC (real
)) != V2SImode
)
10401 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
10402 frame related note. The parallel contains a set of the register
10403 being saved, and another set to a synthetic register (n+1200).
10404 This is so we can differentiate between 64-bit and 32-bit saves.
10405 Words cannot describe this nastiness. */
10407 if (GET_CODE (SET_DEST (real
)) != MEM
10408 || GET_CODE (XEXP (SET_DEST (real
), 0)) != PLUS
10409 || GET_CODE (SET_SRC (real
)) != REG
)
10413 (set (mem (plus (reg x) (const y)))
10416 (set (mem (plus (reg x) (const y+4)))
10420 real2
= copy_rtx (real
);
10421 PUT_MODE (SET_DEST (real2
), SImode
);
10422 reg
= SET_SRC (real2
);
10423 real2
= replace_rtx (real2
, reg
, gen_rtx_REG (SImode
, REGNO (reg
)));
10424 synth
= copy_rtx (real2
);
10426 if (BYTES_BIG_ENDIAN
)
10428 offset
= XEXP (XEXP (SET_DEST (real2
), 0), 1);
10429 real2
= replace_rtx (real2
, offset
, GEN_INT (INTVAL (offset
) + 4));
10432 reg
= SET_SRC (synth
);
10434 synth
= replace_rtx (synth
, reg
,
10435 gen_rtx_REG (SImode
, REGNO (reg
) + 1200));
10437 offset
= XEXP (XEXP (SET_DEST (synth
), 0), 1);
10438 synth
= replace_rtx (synth
, offset
,
10439 GEN_INT (INTVAL (offset
)
10440 + (BYTES_BIG_ENDIAN
? 0 : 4)));
10442 RTX_FRAME_RELATED_P (synth
) = 1;
10443 RTX_FRAME_RELATED_P (real2
) = 1;
10444 if (BYTES_BIG_ENDIAN
)
10445 real
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, synth
, real2
));
10447 real
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, real2
, synth
));
10452 /* Returns an insn that has a vrsave set operation with the
10453 appropriate CLOBBERs. */
10456 generate_set_vrsave (reg
, info
, epiloguep
)
10458 rs6000_stack_t
*info
;
10462 rtx insn
, clobs
[TOTAL_ALTIVEC_REGS
+ 1];
10463 rtx vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
10466 = gen_rtx_SET (VOIDmode
,
10468 gen_rtx_UNSPEC_VOLATILE (SImode
,
10469 gen_rtvec (2, reg
, vrsave
),
10474 /* We need to clobber the registers in the mask so the scheduler
10475 does not move sets to VRSAVE before sets of AltiVec registers.
10477 However, if the function receives nonlocal gotos, reload will set
10478 all call saved registers live. We will end up with:
10480 (set (reg 999) (mem))
10481 (parallel [ (set (reg vrsave) (unspec blah))
10482 (clobber (reg 999))])
10484 The clobber will cause the store into reg 999 to be dead, and
10485 flow will attempt to delete an epilogue insn. In this case, we
10486 need an unspec use/set of the register. */
10488 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10489 if (info
->vrsave_mask
!= 0 && ALTIVEC_REG_BIT (i
) != 0)
10491 if (!epiloguep
|| call_used_regs
[i
])
10492 clobs
[nclobs
++] = gen_rtx_CLOBBER (VOIDmode
,
10493 gen_rtx_REG (V4SImode
, i
));
10496 rtx reg
= gen_rtx_REG (V4SImode
, i
);
10499 = gen_rtx_SET (VOIDmode
,
10501 gen_rtx_UNSPEC (V4SImode
,
10502 gen_rtvec (1, reg
), 27));
10506 insn
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nclobs
));
10508 for (i
= 0; i
< nclobs
; ++i
)
10509 XVECEXP (insn
, 0, i
) = clobs
[i
];
10514 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10515 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
10518 emit_frame_save (frame_reg
, frame_ptr
, mode
, regno
, offset
, total_size
)
10521 enum machine_mode mode
;
10522 unsigned int regno
;
10526 rtx reg
, offset_rtx
, insn
, mem
, addr
, int_rtx
;
10527 rtx replacea
, replaceb
;
10529 int_rtx
= GEN_INT (offset
);
10531 /* Some cases that need register indexed addressing. */
10532 if ((TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
10534 && SPE_VECTOR_MODE (mode
)
10535 && !SPE_CONST_OFFSET_OK (offset
)))
10537 /* Whomever calls us must make sure r11 is available in the
10538 flow path of instructions in the prologue. */
10539 offset_rtx
= gen_rtx_REG (Pmode
, 11);
10540 emit_move_insn (offset_rtx
, int_rtx
);
10542 replacea
= offset_rtx
;
10543 replaceb
= int_rtx
;
10547 offset_rtx
= int_rtx
;
10548 replacea
= NULL_RTX
;
10549 replaceb
= NULL_RTX
;
10552 reg
= gen_rtx_REG (mode
, regno
);
10553 addr
= gen_rtx_PLUS (Pmode
, frame_reg
, offset_rtx
);
10554 mem
= gen_rtx_MEM (mode
, addr
);
10555 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10557 insn
= emit_move_insn (mem
, reg
);
10559 rs6000_frame_related (insn
, frame_ptr
, total_size
, replacea
, replaceb
);
10562 /* Emit an offset memory reference suitable for a frame store, while
10563 converting to a valid addressing mode. */
10566 gen_frame_mem_offset (mode
, reg
, offset
)
10567 enum machine_mode mode
;
10571 rtx int_rtx
, offset_rtx
;
10573 int_rtx
= GEN_INT (offset
);
10575 if (TARGET_SPE_ABI
&& SPE_VECTOR_MODE (mode
))
10577 offset_rtx
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
10578 emit_move_insn (offset_rtx
, int_rtx
);
10581 offset_rtx
= int_rtx
;
10583 return gen_rtx_MEM (mode
, gen_rtx_PLUS (Pmode
, reg
, offset_rtx
));
10586 /* Emit function prologue as insns. */
10589 rs6000_emit_prologue ()
10591 rs6000_stack_t
*info
= rs6000_stack_info ();
10592 enum machine_mode reg_mode
= TARGET_POWERPC64
? DImode
: SImode
;
10593 int reg_size
= TARGET_POWERPC64
? 8 : 4;
10594 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
10595 rtx frame_ptr_rtx
= gen_rtx_REG (Pmode
, 12);
10596 rtx frame_reg_rtx
= sp_reg_rtx
;
10597 rtx cr_save_rtx
= NULL
;
10599 int saving_FPRs_inline
;
10600 int using_store_multiple
;
10601 HOST_WIDE_INT sp_offset
= 0;
10603 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
10605 reg_mode
= V2SImode
;
10609 using_store_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
10610 && (!TARGET_SPE_ABI
10611 || info
->spe_64bit_regs_used
== 0)
10612 && info
->first_gp_reg_save
< 31);
10613 saving_FPRs_inline
= (info
->first_fp_reg_save
== 64
10614 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
10616 /* For V.4, update stack before we do any saving and set back pointer. */
10617 if (info
->push_p
&& DEFAULT_ABI
== ABI_V4
)
10619 if (info
->total_size
< 32767)
10620 sp_offset
= info
->total_size
;
10622 frame_reg_rtx
= frame_ptr_rtx
;
10623 rs6000_emit_allocate_stack (info
->total_size
,
10624 (frame_reg_rtx
!= sp_reg_rtx
10625 && (info
->cr_save_p
10627 || info
->first_fp_reg_save
< 64
10628 || info
->first_gp_reg_save
< 32
10630 if (frame_reg_rtx
!= sp_reg_rtx
)
10631 rs6000_emit_stack_tie ();
10634 /* Save AltiVec registers if needed. */
10635 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
10639 /* There should be a non inline version of this, for when we
10640 are saving lots of vector registers. */
10641 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10642 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
10644 rtx areg
, savereg
, mem
;
10647 offset
= info
->altivec_save_offset
+ sp_offset
10648 + 16 * (i
- info
->first_altivec_reg_save
);
10650 savereg
= gen_rtx_REG (V4SImode
, i
);
10652 areg
= gen_rtx_REG (Pmode
, 0);
10653 emit_move_insn (areg
, GEN_INT (offset
));
10655 /* AltiVec addressing mode is [reg+reg]. */
10656 mem
= gen_rtx_MEM (V4SImode
,
10657 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
));
10659 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10661 insn
= emit_move_insn (mem
, savereg
);
10663 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10664 areg
, GEN_INT (offset
));
10668 /* VRSAVE is a bit vector representing which AltiVec registers
10669 are used. The OS uses this to determine which vector
10670 registers to save on a context switch. We need to save
10671 VRSAVE on the stack frame, add whatever AltiVec registers we
10672 used in this function, and do the corresponding magic in the
10675 if (TARGET_ALTIVEC
&& info
->vrsave_mask
!= 0)
10677 rtx reg
, mem
, vrsave
;
10680 /* Get VRSAVE onto a GPR. */
10681 reg
= gen_rtx_REG (SImode
, 12);
10682 vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
10684 emit_insn (gen_get_vrsave_internal (reg
));
10686 emit_insn (gen_rtx_SET (VOIDmode
, reg
, vrsave
));
10689 offset
= info
->vrsave_save_offset
+ sp_offset
;
10691 = gen_rtx_MEM (SImode
,
10692 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, GEN_INT (offset
)));
10693 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10694 insn
= emit_move_insn (mem
, reg
);
10696 /* Include the registers in the mask. */
10697 emit_insn (gen_iorsi3 (reg
, reg
, GEN_INT ((int) info
->vrsave_mask
)));
10699 insn
= emit_insn (generate_set_vrsave (reg
, info
, 0));
10702 /* If we use the link register, get it into r0. */
10703 if (info
->lr_save_p
)
10704 emit_move_insn (gen_rtx_REG (Pmode
, 0),
10705 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10707 /* If we need to save CR, put it into r12. */
10708 if (info
->cr_save_p
&& frame_reg_rtx
!= frame_ptr_rtx
)
10710 cr_save_rtx
= gen_rtx_REG (SImode
, 12);
10711 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
10714 /* Do any required saving of fpr's. If only one or two to save, do
10715 it ourselves. Otherwise, call function. */
10716 if (saving_FPRs_inline
)
10719 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10720 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
10721 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
10722 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, DFmode
,
10723 info
->first_fp_reg_save
+ i
,
10724 info
->fp_save_offset
+ sp_offset
+ 8 * i
,
10727 else if (info
->first_fp_reg_save
!= 64)
10731 const char *alloc_rname
;
10733 p
= rtvec_alloc (2 + 64 - info
->first_fp_reg_save
);
10735 RTVEC_ELT (p
, 0) = gen_rtx_CLOBBER (VOIDmode
,
10736 gen_rtx_REG (Pmode
,
10737 LINK_REGISTER_REGNUM
));
10738 sprintf (rname
, "%s%d%s", SAVE_FP_PREFIX
,
10739 info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
);
10740 alloc_rname
= ggc_strdup (rname
);
10741 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
10742 gen_rtx_SYMBOL_REF (Pmode
,
10744 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10746 rtx addr
, reg
, mem
;
10747 reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
10748 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10749 GEN_INT (info
->fp_save_offset
10750 + sp_offset
+ 8*i
));
10751 mem
= gen_rtx_MEM (DFmode
, addr
);
10752 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10754 RTVEC_ELT (p
, i
+ 2) = gen_rtx_SET (VOIDmode
, mem
, reg
);
10756 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10757 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10758 NULL_RTX
, NULL_RTX
);
10761 /* Save GPRs. This is done as a PARALLEL if we are using
10762 the store-multiple instructions. */
10763 if (using_store_multiple
)
10767 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
10768 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10770 rtx addr
, reg
, mem
;
10771 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
10772 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10773 GEN_INT (info
->gp_save_offset
10776 mem
= gen_rtx_MEM (reg_mode
, addr
);
10777 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10779 RTVEC_ELT (p
, i
) = gen_rtx_SET (VOIDmode
, mem
, reg
);
10781 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10782 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10783 NULL_RTX
, NULL_RTX
);
10788 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10789 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
10790 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
10791 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
10792 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
10793 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
10795 rtx addr
, reg
, mem
;
10796 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
10798 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
10800 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
10803 if (!SPE_CONST_OFFSET_OK (offset
))
10805 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
10806 emit_move_insn (b
, GEN_INT (offset
));
10809 b
= GEN_INT (offset
);
10811 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
10812 mem
= gen_rtx_MEM (V2SImode
, addr
);
10813 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10814 insn
= emit_move_insn (mem
, reg
);
10816 if (GET_CODE (b
) == CONST_INT
)
10817 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10818 NULL_RTX
, NULL_RTX
);
10820 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10821 b
, GEN_INT (offset
));
10825 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10826 GEN_INT (info
->gp_save_offset
10829 mem
= gen_rtx_MEM (reg_mode
, addr
);
10830 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10832 insn
= emit_move_insn (mem
, reg
);
10833 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10834 NULL_RTX
, NULL_RTX
);
10839 /* ??? There's no need to emit actual instructions here, but it's the
10840 easiest way to get the frame unwind information emitted. */
10841 if (current_function_calls_eh_return
)
10843 unsigned int i
, regno
;
10847 regno
= EH_RETURN_DATA_REGNO (i
);
10848 if (regno
== INVALID_REGNUM
)
10851 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, reg_mode
, regno
,
10852 info
->ehrd_offset
+ sp_offset
10853 + reg_size
* (int) i
,
10858 /* Save lr if we used it. */
10859 if (info
->lr_save_p
)
10861 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10862 GEN_INT (info
->lr_save_offset
+ sp_offset
));
10863 rtx reg
= gen_rtx_REG (Pmode
, 0);
10864 rtx mem
= gen_rtx_MEM (Pmode
, addr
);
10865 /* This should not be of rs6000_sr_alias_set, because of
10866 __builtin_return_address. */
10868 insn
= emit_move_insn (mem
, reg
);
10869 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10870 reg
, gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10873 /* Save CR if we use any that must be preserved. */
10874 if (info
->cr_save_p
)
10876 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10877 GEN_INT (info
->cr_save_offset
+ sp_offset
));
10878 rtx mem
= gen_rtx_MEM (SImode
, addr
);
10880 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10882 /* If r12 was used to hold the original sp, copy cr into r0 now
10884 if (REGNO (frame_reg_rtx
) == 12)
10886 cr_save_rtx
= gen_rtx_REG (SImode
, 0);
10887 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
10889 insn
= emit_move_insn (mem
, cr_save_rtx
);
10891 /* Now, there's no way that dwarf2out_frame_debug_expr is going
10892 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
10893 But that's OK. All we have to do is specify that _one_ condition
10894 code register is saved in this stack slot. The thrower's epilogue
10895 will then restore all the call-saved registers.
10896 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
10897 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10898 cr_save_rtx
, gen_rtx_REG (SImode
, CR2_REGNO
));
10901 /* Update stack and set back pointer unless this is V.4,
10902 for which it was done previously. */
10903 if (info
->push_p
&& DEFAULT_ABI
!= ABI_V4
)
10904 rs6000_emit_allocate_stack (info
->total_size
, FALSE
);
10906 /* Set frame pointer, if needed. */
10907 if (frame_pointer_needed
)
10909 insn
= emit_move_insn (gen_rtx_REG (Pmode
, FRAME_POINTER_REGNUM
),
10911 RTX_FRAME_RELATED_P (insn
) = 1;
10914 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
10915 if ((TARGET_TOC
&& TARGET_MINIMAL_TOC
&& get_pool_size () != 0)
10916 || (DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1
10917 && regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
]))
10919 /* If emit_load_toc_table will use the link register, we need to save
10920 it. We use R11 for this purpose because emit_load_toc_table
10921 can use register 0. This allows us to use a plain 'blr' to return
10922 from the procedure more often. */
10923 int save_LR_around_toc_setup
= (TARGET_ELF
10924 && DEFAULT_ABI
!= ABI_AIX
10926 && ! info
->lr_save_p
10927 && EXIT_BLOCK_PTR
->pred
!= NULL
);
10928 if (save_LR_around_toc_setup
)
10929 emit_move_insn (gen_rtx_REG (Pmode
, 11),
10930 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10932 rs6000_emit_load_toc_table (TRUE
);
10934 if (save_LR_around_toc_setup
)
10935 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
10936 gen_rtx_REG (Pmode
, 11));
10940 if (DEFAULT_ABI
== ABI_DARWIN
10941 && flag_pic
&& current_function_uses_pic_offset_table
)
10943 rtx dest
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
10944 const char *picbase
= machopic_function_base_name ();
10945 rtx src
= gen_rtx_SYMBOL_REF (Pmode
, picbase
);
10947 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest
, src
)));
10949 rs6000_maybe_dead (
10950 emit_move_insn (gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
),
10951 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)));
10956 /* Write function prologue. */
10959 rs6000_output_function_prologue (file
, size
)
10961 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
10963 rs6000_stack_t
*info
= rs6000_stack_info ();
10965 if (TARGET_DEBUG_STACK
)
10966 debug_stack_info (info
);
10968 /* Write .extern for any function we will call to save and restore
10970 if (info
->first_fp_reg_save
< 64
10971 && !FP_SAVE_INLINE (info
->first_fp_reg_save
))
10972 fprintf (file
, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
10973 SAVE_FP_PREFIX
, info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
,
10974 RESTORE_FP_PREFIX
, info
->first_fp_reg_save
- 32,
10975 RESTORE_FP_SUFFIX
);
10977 /* Write .extern for AIX common mode routines, if needed. */
10978 if (! TARGET_POWER
&& ! TARGET_POWERPC
&& ! common_mode_defined
)
10980 fputs ("\t.extern __mulh\n", file
);
10981 fputs ("\t.extern __mull\n", file
);
10982 fputs ("\t.extern __divss\n", file
);
10983 fputs ("\t.extern __divus\n", file
);
10984 fputs ("\t.extern __quoss\n", file
);
10985 fputs ("\t.extern __quous\n", file
);
10986 common_mode_defined
= 1;
10989 if (! HAVE_prologue
)
10993 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
10994 the "toplevel" insn chain. */
10995 emit_note (0, NOTE_INSN_DELETED
);
10996 rs6000_emit_prologue ();
10997 emit_note (0, NOTE_INSN_DELETED
);
10999 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11003 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
11005 INSN_ADDRESSES_NEW (insn
, addr
);
11010 if (TARGET_DEBUG_STACK
)
11011 debug_rtx_list (get_insns (), 100);
11012 final (get_insns (), file
, FALSE
, FALSE
);
11016 rs6000_pic_labelno
++;
11019 /* Emit function epilogue as insns.
11021 At present, dwarf2out_frame_debug_expr doesn't understand
11022 register restores, so we don't bother setting RTX_FRAME_RELATED_P
11023 anywhere in the epilogue. Most of the insns below would in any case
11024 need special notes to explain where r11 is in relation to the stack. */
11027 rs6000_emit_epilogue (sibcall
)
11030 rs6000_stack_t
*info
;
11031 int restoring_FPRs_inline
;
11032 int using_load_multiple
;
11033 int using_mfcr_multiple
;
11034 int use_backchain_to_restore_sp
;
11036 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, 1);
11037 rtx frame_reg_rtx
= sp_reg_rtx
;
11038 enum machine_mode reg_mode
= TARGET_POWERPC64
? DImode
: SImode
;
11039 int reg_size
= TARGET_POWERPC64
? 8 : 4;
11042 info
= rs6000_stack_info ();
11044 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
11046 reg_mode
= V2SImode
;
11050 using_load_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
11051 && (!TARGET_SPE_ABI
11052 || info
->spe_64bit_regs_used
== 0)
11053 && info
->first_gp_reg_save
< 31);
11054 restoring_FPRs_inline
= (sibcall
11055 || current_function_calls_eh_return
11056 || info
->first_fp_reg_save
== 64
11057 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
11058 use_backchain_to_restore_sp
= (frame_pointer_needed
11059 || current_function_calls_alloca
11060 || info
->total_size
> 32767);
11061 using_mfcr_multiple
= (rs6000_cpu
== PROCESSOR_PPC601
11062 || rs6000_cpu
== PROCESSOR_PPC603
11063 || rs6000_cpu
== PROCESSOR_PPC750
11066 /* If we have a frame pointer, a call to alloca, or a large stack
11067 frame, restore the old stack pointer using the backchain. Otherwise,
11068 we know what size to update it with. */
11069 if (use_backchain_to_restore_sp
)
11071 /* Under V.4, don't reset the stack pointer until after we're done
11072 loading the saved registers. */
11073 if (DEFAULT_ABI
== ABI_V4
)
11074 frame_reg_rtx
= gen_rtx_REG (Pmode
, 11);
11076 emit_move_insn (frame_reg_rtx
,
11077 gen_rtx_MEM (Pmode
, sp_reg_rtx
));
11080 else if (info
->push_p
)
11082 if (DEFAULT_ABI
== ABI_V4
)
11083 sp_offset
= info
->total_size
;
11086 emit_insn (TARGET_32BIT
11087 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
11088 GEN_INT (info
->total_size
))
11089 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
11090 GEN_INT (info
->total_size
)));
11094 /* Restore AltiVec registers if needed. */
11095 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
11099 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
11100 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
11102 rtx addr
, areg
, mem
;
11104 areg
= gen_rtx_REG (Pmode
, 0);
11106 (areg
, GEN_INT (info
->altivec_save_offset
11108 + 16 * (i
- info
->first_altivec_reg_save
)));
11110 /* AltiVec addressing mode is [reg+reg]. */
11111 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
);
11112 mem
= gen_rtx_MEM (V4SImode
, addr
);
11113 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11115 emit_move_insn (gen_rtx_REG (V4SImode
, i
), mem
);
11119 /* Restore VRSAVE if needed. */
11120 if (TARGET_ALTIVEC_ABI
&& info
->vrsave_mask
!= 0)
11122 rtx addr
, mem
, reg
;
11124 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11125 GEN_INT (info
->vrsave_save_offset
+ sp_offset
));
11126 mem
= gen_rtx_MEM (SImode
, addr
);
11127 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11128 reg
= gen_rtx_REG (SImode
, 12);
11129 emit_move_insn (reg
, mem
);
11131 emit_insn (generate_set_vrsave (reg
, info
, 1));
11134 /* Get the old lr if we saved it. */
11135 if (info
->lr_save_p
)
11137 rtx mem
= gen_frame_mem_offset (Pmode
, frame_reg_rtx
,
11138 info
->lr_save_offset
+ sp_offset
);
11140 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11142 emit_move_insn (gen_rtx_REG (Pmode
, 0), mem
);
11145 /* Get the old cr if we saved it. */
11146 if (info
->cr_save_p
)
11148 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11149 GEN_INT (info
->cr_save_offset
+ sp_offset
));
11150 rtx mem
= gen_rtx_MEM (SImode
, addr
);
11152 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11154 emit_move_insn (gen_rtx_REG (SImode
, 12), mem
);
11157 /* Set LR here to try to overlap restores below. */
11158 if (info
->lr_save_p
)
11159 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
11160 gen_rtx_REG (Pmode
, 0));
11162 /* Load exception handler data registers, if needed. */
11163 if (current_function_calls_eh_return
)
11165 unsigned int i
, regno
;
11171 regno
= EH_RETURN_DATA_REGNO (i
);
11172 if (regno
== INVALID_REGNUM
)
11175 mem
= gen_frame_mem_offset (reg_mode
, frame_reg_rtx
,
11176 info
->ehrd_offset
+ sp_offset
11177 + reg_size
* (int) i
);
11178 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11180 emit_move_insn (gen_rtx_REG (reg_mode
, regno
), mem
);
11184 /* Restore GPRs. This is done as a PARALLEL if we are using
11185 the load-multiple instructions. */
11186 if (using_load_multiple
)
11189 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
11190 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
11192 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11193 GEN_INT (info
->gp_save_offset
11196 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
11198 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11201 gen_rtx_SET (VOIDmode
,
11202 gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
),
11205 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11208 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
11209 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
11210 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
11211 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
11212 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
11213 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
11215 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11216 GEN_INT (info
->gp_save_offset
11219 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
11221 /* Restore 64-bit quantities for SPE. */
11222 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
11224 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
11227 if (!SPE_CONST_OFFSET_OK (offset
))
11229 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
11230 emit_move_insn (b
, GEN_INT (offset
));
11233 b
= GEN_INT (offset
);
11235 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
11236 mem
= gen_rtx_MEM (V2SImode
, addr
);
11239 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11241 emit_move_insn (gen_rtx_REG (reg_mode
,
11242 info
->first_gp_reg_save
+ i
), mem
);
11245 /* Restore fpr's if we need to do it without calling a function. */
11246 if (restoring_FPRs_inline
)
11247 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
11248 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
11249 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
11252 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11253 GEN_INT (info
->fp_save_offset
11256 mem
= gen_rtx_MEM (DFmode
, addr
);
11257 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11259 emit_move_insn (gen_rtx_REG (DFmode
,
11260 info
->first_fp_reg_save
+ i
),
11264 /* If we saved cr, restore it here. Just those that were used. */
11265 if (info
->cr_save_p
)
11267 rtx r12_rtx
= gen_rtx_REG (SImode
, 12);
11270 if (using_mfcr_multiple
)
11272 for (i
= 0; i
< 8; i
++)
11273 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
11279 if (using_mfcr_multiple
&& count
> 1)
11284 p
= rtvec_alloc (count
);
11287 for (i
= 0; i
< 8; i
++)
11288 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
11290 rtvec r
= rtvec_alloc (2);
11291 RTVEC_ELT (r
, 0) = r12_rtx
;
11292 RTVEC_ELT (r
, 1) = GEN_INT (1 << (7-i
));
11293 RTVEC_ELT (p
, ndx
) =
11294 gen_rtx_SET (VOIDmode
, gen_rtx_REG (CCmode
, CR0_REGNO
+i
),
11295 gen_rtx_UNSPEC (CCmode
, r
, UNSPEC_MOVESI_TO_CR
));
11298 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11303 for (i
= 0; i
< 8; i
++)
11304 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
11306 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode
,
11312 /* If this is V.4, unwind the stack pointer after all of the loads
11313 have been done. We need to emit a block here so that sched
11314 doesn't decide to move the sp change before the register restores
11315 (which may not have any obvious dependency on the stack). This
11316 doesn't hurt performance, because there is no scheduling that can
11317 be done after this point. */
11318 if (DEFAULT_ABI
== ABI_V4
)
11320 if (frame_reg_rtx
!= sp_reg_rtx
)
11321 rs6000_emit_stack_tie ();
11323 if (use_backchain_to_restore_sp
)
11325 emit_move_insn (sp_reg_rtx
, frame_reg_rtx
);
11327 else if (sp_offset
!= 0)
11329 emit_insn (TARGET_32BIT
11330 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
11331 GEN_INT (sp_offset
))
11332 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
11333 GEN_INT (sp_offset
)));
11337 if (current_function_calls_eh_return
)
11339 rtx sa
= EH_RETURN_STACKADJ_RTX
;
11340 emit_insn (TARGET_32BIT
11341 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
, sa
)
11342 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
, sa
));
11348 if (! restoring_FPRs_inline
)
11349 p
= rtvec_alloc (3 + 64 - info
->first_fp_reg_save
);
11351 p
= rtvec_alloc (2);
11353 RTVEC_ELT (p
, 0) = gen_rtx_RETURN (VOIDmode
);
11354 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
11355 gen_rtx_REG (Pmode
,
11356 LINK_REGISTER_REGNUM
));
11358 /* If we have to restore more than two FP registers, branch to the
11359 restore function. It will return to our caller. */
11360 if (! restoring_FPRs_inline
)
11364 const char *alloc_rname
;
11366 sprintf (rname
, "%s%d%s", RESTORE_FP_PREFIX
,
11367 info
->first_fp_reg_save
- 32, RESTORE_FP_SUFFIX
);
11368 alloc_rname
= ggc_strdup (rname
);
11369 RTVEC_ELT (p
, 2) = gen_rtx_USE (VOIDmode
,
11370 gen_rtx_SYMBOL_REF (Pmode
,
11373 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
11376 addr
= gen_rtx_PLUS (Pmode
, sp_reg_rtx
,
11377 GEN_INT (info
->fp_save_offset
+ 8*i
));
11378 mem
= gen_rtx_MEM (DFmode
, addr
);
11379 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11381 RTVEC_ELT (p
, i
+3) =
11382 gen_rtx_SET (VOIDmode
,
11383 gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
),
11388 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11392 /* Write function epilogue. */
11395 rs6000_output_function_epilogue (file
, size
)
11397 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
11399 rs6000_stack_t
*info
= rs6000_stack_info ();
11401 if (! HAVE_epilogue
)
11403 rtx insn
= get_last_insn ();
11404 /* If the last insn was a BARRIER, we don't have to write anything except
11405 the trace table. */
11406 if (GET_CODE (insn
) == NOTE
)
11407 insn
= prev_nonnote_insn (insn
);
11408 if (insn
== 0 || GET_CODE (insn
) != BARRIER
)
11410 /* This is slightly ugly, but at least we don't have two
11411 copies of the epilogue-emitting code. */
11414 /* A NOTE_INSN_DELETED is supposed to be at the start
11415 and end of the "toplevel" insn chain. */
11416 emit_note (0, NOTE_INSN_DELETED
);
11417 rs6000_emit_epilogue (FALSE
);
11418 emit_note (0, NOTE_INSN_DELETED
);
11420 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11424 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
11426 INSN_ADDRESSES_NEW (insn
, addr
);
11431 if (TARGET_DEBUG_STACK
)
11432 debug_rtx_list (get_insns (), 100);
11433 final (get_insns (), file
, FALSE
, FALSE
);
11438 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11441 We don't output a traceback table if -finhibit-size-directive was
11442 used. The documentation for -finhibit-size-directive reads
11443 ``don't output a @code{.size} assembler directive, or anything
11444 else that would cause trouble if the function is split in the
11445 middle, and the two halves are placed at locations far apart in
11446 memory.'' The traceback table has this property, since it
11447 includes the offset from the start of the function to the
11448 traceback table itself.
11450 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11451 different traceback table. */
11452 if (DEFAULT_ABI
== ABI_AIX
&& ! flag_inhibit_size_directive
11453 && rs6000_traceback
!= traceback_none
)
11455 const char *fname
= NULL
;
11456 const char *language_string
= lang_hooks
.name
;
11457 int fixed_parms
= 0, float_parms
= 0, parm_info
= 0;
11459 int optional_tbtab
;
11461 if (rs6000_traceback
== traceback_full
)
11462 optional_tbtab
= 1;
11463 else if (rs6000_traceback
== traceback_part
)
11464 optional_tbtab
= 0;
11466 optional_tbtab
= !optimize_size
&& !TARGET_ELF
;
11468 if (optional_tbtab
)
11470 fname
= XSTR (XEXP (DECL_RTL (current_function_decl
), 0), 0);
11471 while (*fname
== '.') /* V.4 encodes . in the name */
11474 /* Need label immediately before tbtab, so we can compute
11475 its offset from the function start. */
11476 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
11477 ASM_OUTPUT_LABEL (file
, fname
);
11480 /* The .tbtab pseudo-op can only be used for the first eight
11481 expressions, since it can't handle the possibly variable
11482 length fields that follow. However, if you omit the optional
11483 fields, the assembler outputs zeros for all optional fields
11484 anyways, giving each variable length field is minimum length
11485 (as defined in sys/debug.h). Thus we can not use the .tbtab
11486 pseudo-op at all. */
11488 /* An all-zero word flags the start of the tbtab, for debuggers
11489 that have to find it by searching forward from the entry
11490 point or from the current pc. */
11491 fputs ("\t.long 0\n", file
);
11493 /* Tbtab format type. Use format type 0. */
11494 fputs ("\t.byte 0,", file
);
11496 /* Language type. Unfortunately, there doesn't seem to be any
11497 official way to get this info, so we use language_string. C
11498 is 0. C++ is 9. No number defined for Obj-C, so use the
11499 value for C for now. There is no official value for Java,
11500 although IBM appears to be using 13. There is no official value
11501 for Chill, so we've chosen 44 pseudo-randomly. */
11502 if (! strcmp (language_string
, "GNU C")
11503 || ! strcmp (language_string
, "GNU Objective-C"))
11505 else if (! strcmp (language_string
, "GNU F77"))
11507 else if (! strcmp (language_string
, "GNU Ada"))
11509 else if (! strcmp (language_string
, "GNU Pascal"))
11511 else if (! strcmp (language_string
, "GNU C++"))
11513 else if (! strcmp (language_string
, "GNU Java"))
11515 else if (! strcmp (language_string
, "GNU CHILL"))
11519 fprintf (file
, "%d,", i
);
11521 /* 8 single bit fields: global linkage (not set for C extern linkage,
11522 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
11523 from start of procedure stored in tbtab, internal function, function
11524 has controlled storage, function has no toc, function uses fp,
11525 function logs/aborts fp operations. */
11526 /* Assume that fp operations are used if any fp reg must be saved. */
11527 fprintf (file
, "%d,",
11528 (optional_tbtab
<< 5) | ((info
->first_fp_reg_save
!= 64) << 1));
11530 /* 6 bitfields: function is interrupt handler, name present in
11531 proc table, function calls alloca, on condition directives
11532 (controls stack walks, 3 bits), saves condition reg, saves
11534 /* The `function calls alloca' bit seems to be set whenever reg 31 is
11535 set up as a frame pointer, even when there is no alloca call. */
11536 fprintf (file
, "%d,",
11537 ((optional_tbtab
<< 6)
11538 | ((optional_tbtab
& frame_pointer_needed
) << 5)
11539 | (info
->cr_save_p
<< 1)
11540 | (info
->lr_save_p
)));
11542 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
11544 fprintf (file
, "%d,",
11545 (info
->push_p
<< 7) | (64 - info
->first_fp_reg_save
));
11547 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
11548 fprintf (file
, "%d,", (32 - first_reg_to_save ()));
11550 if (optional_tbtab
)
11552 /* Compute the parameter info from the function decl argument
11555 int next_parm_info_bit
= 31;
11557 for (decl
= DECL_ARGUMENTS (current_function_decl
);
11558 decl
; decl
= TREE_CHAIN (decl
))
11560 rtx parameter
= DECL_INCOMING_RTL (decl
);
11561 enum machine_mode mode
= GET_MODE (parameter
);
11563 if (GET_CODE (parameter
) == REG
)
11565 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
11571 if (mode
== SFmode
)
11573 else if (mode
== DFmode
|| mode
== TFmode
)
11578 /* If only one bit will fit, don't or in this entry. */
11579 if (next_parm_info_bit
> 0)
11580 parm_info
|= (bits
<< (next_parm_info_bit
- 1));
11581 next_parm_info_bit
-= 2;
11585 fixed_parms
+= ((GET_MODE_SIZE (mode
)
11586 + (UNITS_PER_WORD
- 1))
11588 next_parm_info_bit
-= 1;
11594 /* Number of fixed point parameters. */
11595 /* This is actually the number of words of fixed point parameters; thus
11596 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11597 fprintf (file
, "%d,", fixed_parms
);
11599 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11601 /* This is actually the number of fp registers that hold parameters;
11602 and thus the maximum value is 13. */
11603 /* Set parameters on stack bit if parameters are not in their original
11604 registers, regardless of whether they are on the stack? Xlc
11605 seems to set the bit when not optimizing. */
11606 fprintf (file
, "%d\n", ((float_parms
<< 1) | (! optimize
)));
11608 if (! optional_tbtab
)
11611 /* Optional fields follow. Some are variable length. */
11613 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11614 11 double float. */
11615 /* There is an entry for each parameter in a register, in the order that
11616 they occur in the parameter list. Any intervening arguments on the
11617 stack are ignored. If the list overflows a long (max possible length
11618 34 bits) then completely leave off all elements that don't fit. */
11619 /* Only emit this long if there was at least one parameter. */
11620 if (fixed_parms
|| float_parms
)
11621 fprintf (file
, "\t.long %d\n", parm_info
);
11623 /* Offset from start of code to tb table. */
11624 fputs ("\t.long ", file
);
11625 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
11627 RS6000_OUTPUT_BASENAME (file
, fname
);
11629 assemble_name (file
, fname
);
11631 fputs ("-.", file
);
11633 RS6000_OUTPUT_BASENAME (file
, fname
);
11635 assemble_name (file
, fname
);
11639 /* Interrupt handler mask. */
11640 /* Omit this long, since we never set the interrupt handler bit
11643 /* Number of CTL (controlled storage) anchors. */
11644 /* Omit this long, since the has_ctl bit is never set above. */
11646 /* Displacement into stack of each CTL anchor. */
11647 /* Omit this list of longs, because there are no CTL anchors. */
11649 /* Length of function name. */
11652 fprintf (file
, "\t.short %d\n", (int) strlen (fname
));
11654 /* Function name. */
11655 assemble_string (fname
, strlen (fname
));
11657 /* Register for alloca automatic storage; this is always reg 31.
11658 Only emit this if the alloca bit was set above. */
11659 if (frame_pointer_needed
)
11660 fputs ("\t.byte 31\n", file
);
11662 fputs ("\t.align 2\n", file
);
11666 /* A C compound statement that outputs the assembler code for a thunk
11667 function, used to implement C++ virtual function calls with
11668 multiple inheritance. The thunk acts as a wrapper around a virtual
11669 function, adjusting the implicit object parameter before handing
11670 control off to the real function.
11672 First, emit code to add the integer DELTA to the location that
11673 contains the incoming first argument. Assume that this argument
11674 contains a pointer, and is the one used to pass the `this' pointer
11675 in C++. This is the incoming argument *before* the function
11676 prologue, e.g. `%o0' on a sparc. The addition must preserve the
11677 values of all other incoming arguments.
11679 After the addition, emit code to jump to FUNCTION, which is a
11680 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
11681 not touch the return address. Hence returning from FUNCTION will
11682 return to whoever called the current `thunk'.
11684 The effect must be as if FUNCTION had been called directly with the
11685 adjusted first argument. This macro is responsible for emitting
11686 all of the code for a thunk function; output_function_prologue()
11687 and output_function_epilogue() are not invoked.
11689 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
11690 been extracted from it.) It might possibly be useful on some
11691 targets, but probably not.
11693 If you do not define this macro, the target-independent code in the
11694 C++ frontend will generate a less efficient heavyweight thunk that
11695 calls FUNCTION instead of jumping to it. The generic approach does
11696 not support varargs. */
11699 rs6000_output_mi_thunk (file
, thunk_fndecl
, delta
, vcall_offset
, function
)
11701 tree thunk_fndecl ATTRIBUTE_UNUSED
;
11702 HOST_WIDE_INT delta
;
11703 HOST_WIDE_INT vcall_offset
;
11706 rtx
this, insn
, funexp
;
11708 reload_completed
= 1;
11709 no_new_pseudos
= 1;
11711 /* Mark the end of the (empty) prologue. */
11712 emit_note (NULL
, NOTE_INSN_PROLOGUE_END
);
11714 /* Find the "this" pointer. If the function returns a structure,
11715 the structure return pointer is in r3. */
11716 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
))))
11717 this = gen_rtx_REG (Pmode
, 4);
11719 this = gen_rtx_REG (Pmode
, 3);
11721 /* Apply the constant offset, if required. */
11724 rtx delta_rtx
= GEN_INT (delta
);
11725 emit_insn (TARGET_32BIT
11726 ? gen_addsi3 (this, this, delta_rtx
)
11727 : gen_adddi3 (this, this, delta_rtx
));
11730 /* Apply the offset from the vtable, if required. */
11733 rtx vcall_offset_rtx
= GEN_INT (vcall_offset
);
11734 rtx tmp
= gen_rtx_REG (Pmode
, 12);
11736 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, this));
11737 emit_insn (TARGET_32BIT
11738 ? gen_addsi3 (tmp
, tmp
, vcall_offset_rtx
)
11739 : gen_adddi3 (tmp
, tmp
, vcall_offset_rtx
));
11740 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, tmp
));
11741 emit_insn (TARGET_32BIT
11742 ? gen_addsi3 (this, this, tmp
)
11743 : gen_adddi3 (this, this, tmp
));
11746 /* Generate a tail call to the target function. */
11747 if (!TREE_USED (function
))
11749 assemble_external (function
);
11750 TREE_USED (function
) = 1;
11752 funexp
= XEXP (DECL_RTL (function
), 0);
11753 SYMBOL_REF_FLAGS (funexp
) &= ~SYMBOL_FLAG_LOCAL
;
11754 funexp
= gen_rtx_MEM (FUNCTION_MODE
, funexp
);
11757 if (MACHOPIC_INDIRECT
)
11758 funexp
= machopic_indirect_call_target (funexp
);
11761 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
11762 generate sibcall RTL explicitly to avoid constraint abort. */
11763 insn
= emit_call_insn (
11764 gen_rtx_PARALLEL (VOIDmode
,
11766 gen_rtx_CALL (VOIDmode
,
11767 funexp
, const0_rtx
),
11768 gen_rtx_USE (VOIDmode
, const0_rtx
),
11769 gen_rtx_USE (VOIDmode
,
11770 gen_rtx_REG (SImode
,
11771 LINK_REGISTER_REGNUM
)),
11772 gen_rtx_RETURN (VOIDmode
))));
11773 SIBLING_CALL_P (insn
) = 1;
11776 /* Run just enough of rest_of_compilation to get the insns emitted.
11777 There's not really enough bulk here to make other passes such as
11778 instruction scheduling worth while. Note that use_thunk calls
11779 assemble_start_function and assemble_end_function. */
11780 insn
= get_insns ();
11781 shorten_branches (insn
);
11782 final_start_function (insn
, file
, 1);
11783 final (insn
, file
, 1, 0);
11784 final_end_function ();
11786 reload_completed
= 0;
11787 no_new_pseudos
= 0;
11790 /* A quick summary of the various types of 'constant-pool tables'
11793 Target Flags Name One table per
11794 AIX (none) AIX TOC object file
11795 AIX -mfull-toc AIX TOC object file
11796 AIX -mminimal-toc AIX minimal TOC translation unit
11797 SVR4/EABI (none) SVR4 SDATA object file
11798 SVR4/EABI -fpic SVR4 pic object file
11799 SVR4/EABI -fPIC SVR4 PIC translation unit
11800 SVR4/EABI -mrelocatable EABI TOC function
11801 SVR4/EABI -maix AIX TOC object file
11802 SVR4/EABI -maix -mminimal-toc
11803 AIX minimal TOC translation unit
11805 Name Reg. Set by entries contains:
11806 made by addrs? fp? sum?
11808 AIX TOC 2 crt0 as Y option option
11809 AIX minimal TOC 30 prolog gcc Y Y option
11810 SVR4 SDATA 13 crt0 gcc N Y N
11811 SVR4 pic 30 prolog ld Y not yet N
11812 SVR4 PIC 30 prolog gcc Y option option
11813 EABI TOC 30 prolog gcc Y option option
11817 /* Hash functions for the hash table. */
11820 rs6000_hash_constant (k
)
11823 enum rtx_code code
= GET_CODE (k
);
11824 enum machine_mode mode
= GET_MODE (k
);
11825 unsigned result
= (code
<< 3) ^ mode
;
11826 const char *format
;
11829 format
= GET_RTX_FORMAT (code
);
11830 flen
= strlen (format
);
11836 return result
* 1231 + (unsigned) INSN_UID (XEXP (k
, 0));
11839 if (mode
!= VOIDmode
)
11840 return real_hash (CONST_DOUBLE_REAL_VALUE (k
)) * result
;
11852 for (; fidx
< flen
; fidx
++)
11853 switch (format
[fidx
])
11858 const char *str
= XSTR (k
, fidx
);
11859 len
= strlen (str
);
11860 result
= result
* 613 + len
;
11861 for (i
= 0; i
< len
; i
++)
11862 result
= result
* 613 + (unsigned) str
[i
];
11867 result
= result
* 1231 + rs6000_hash_constant (XEXP (k
, fidx
));
11871 result
= result
* 613 + (unsigned) XINT (k
, fidx
);
11874 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT
))
11875 result
= result
* 613 + (unsigned) XWINT (k
, fidx
);
11879 for (i
= 0; i
< sizeof(HOST_WIDE_INT
)/sizeof(unsigned); i
++)
11880 result
= result
* 613 + (unsigned) (XWINT (k
, fidx
)
11894 toc_hash_function (hash_entry
)
11895 const void * hash_entry
;
11897 const struct toc_hash_struct
*thc
=
11898 (const struct toc_hash_struct
*) hash_entry
;
11899 return rs6000_hash_constant (thc
->key
) ^ thc
->key_mode
;
11902 /* Compare H1 and H2 for equivalence. */
11905 toc_hash_eq (h1
, h2
)
11909 rtx r1
= ((const struct toc_hash_struct
*) h1
)->key
;
11910 rtx r2
= ((const struct toc_hash_struct
*) h2
)->key
;
11912 if (((const struct toc_hash_struct
*) h1
)->key_mode
11913 != ((const struct toc_hash_struct
*) h2
)->key_mode
)
11916 return rtx_equal_p (r1
, r2
);
11919 /* These are the names given by the C++ front-end to vtables, and
11920 vtable-like objects. Ideally, this logic should not be here;
11921 instead, there should be some programmatic way of inquiring as
11922 to whether or not an object is a vtable. */
11924 #define VTABLE_NAME_P(NAME) \
11925 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
11926 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
11927 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
11928 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
11931 rs6000_output_symbol_ref (file
, x
)
11935 /* Currently C++ toc references to vtables can be emitted before it
11936 is decided whether the vtable is public or private. If this is
11937 the case, then the linker will eventually complain that there is
11938 a reference to an unknown section. Thus, for vtables only,
11939 we emit the TOC reference to reference the symbol and not the
11941 const char *name
= XSTR (x
, 0);
11943 if (VTABLE_NAME_P (name
))
11945 RS6000_OUTPUT_BASENAME (file
, name
);
11948 assemble_name (file
, name
);
11951 /* Output a TOC entry. We derive the entry name from what is being
11955 output_toc (file
, x
, labelno
, mode
)
11959 enum machine_mode mode
;
11962 const char *name
= buf
;
11963 const char *real_name
;
11970 /* When the linker won't eliminate them, don't output duplicate
11971 TOC entries (this happens on AIX if there is any kind of TOC,
11972 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
11974 if (TARGET_TOC
&& GET_CODE (x
) != LABEL_REF
)
11976 struct toc_hash_struct
*h
;
11979 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
11980 time because GGC is not initialised at that point. */
11981 if (toc_hash_table
== NULL
)
11982 toc_hash_table
= htab_create_ggc (1021, toc_hash_function
,
11983 toc_hash_eq
, NULL
);
11985 h
= ggc_alloc (sizeof (*h
));
11987 h
->key_mode
= mode
;
11988 h
->labelno
= labelno
;
11990 found
= htab_find_slot (toc_hash_table
, h
, 1);
11991 if (*found
== NULL
)
11993 else /* This is indeed a duplicate.
11994 Set this label equal to that label. */
11996 fputs ("\t.set ", file
);
11997 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
11998 fprintf (file
, "%d,", labelno
);
11999 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
12000 fprintf (file
, "%d\n", ((*(const struct toc_hash_struct
**)
12006 /* If we're going to put a double constant in the TOC, make sure it's
12007 aligned properly when strict alignment is on. */
12008 if (GET_CODE (x
) == CONST_DOUBLE
12009 && STRICT_ALIGNMENT
12010 && GET_MODE_BITSIZE (mode
) >= 64
12011 && ! (TARGET_NO_FP_IN_TOC
&& ! TARGET_MINIMAL_TOC
)) {
12012 ASM_OUTPUT_ALIGN (file
, 3);
12015 (*targetm
.asm_out
.internal_label
) (file
, "LC", labelno
);
12017 /* Handle FP constants specially. Note that if we have a minimal
12018 TOC, things we put here aren't actually in the TOC, so we can allow
12020 if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == TFmode
)
12022 REAL_VALUE_TYPE rv
;
12025 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
12026 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
12030 if (TARGET_MINIMAL_TOC
)
12031 fputs (DOUBLE_INT_ASM_OP
, file
);
12033 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12034 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
12035 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
12036 fprintf (file
, "0x%lx%08lx,0x%lx%08lx\n",
12037 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
12038 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
12043 if (TARGET_MINIMAL_TOC
)
12044 fputs ("\t.long ", file
);
12046 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12047 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
12048 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
12049 fprintf (file
, "0x%lx,0x%lx,0x%lx,0x%lx\n",
12050 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
12051 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
12055 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
12057 REAL_VALUE_TYPE rv
;
12060 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
12061 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
12065 if (TARGET_MINIMAL_TOC
)
12066 fputs (DOUBLE_INT_ASM_OP
, file
);
12068 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
12069 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
12070 fprintf (file
, "0x%lx%08lx\n",
12071 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
12076 if (TARGET_MINIMAL_TOC
)
12077 fputs ("\t.long ", file
);
12079 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
12080 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
12081 fprintf (file
, "0x%lx,0x%lx\n",
12082 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
12086 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
12088 REAL_VALUE_TYPE rv
;
12091 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
12092 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
12096 if (TARGET_MINIMAL_TOC
)
12097 fputs (DOUBLE_INT_ASM_OP
, file
);
12099 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
12100 fprintf (file
, "0x%lx00000000\n", l
& 0xffffffff);
12105 if (TARGET_MINIMAL_TOC
)
12106 fputs ("\t.long ", file
);
12108 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
12109 fprintf (file
, "0x%lx\n", l
& 0xffffffff);
12113 else if (GET_MODE (x
) == VOIDmode
12114 && (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
))
12116 unsigned HOST_WIDE_INT low
;
12117 HOST_WIDE_INT high
;
12119 if (GET_CODE (x
) == CONST_DOUBLE
)
12121 low
= CONST_DOUBLE_LOW (x
);
12122 high
= CONST_DOUBLE_HIGH (x
);
12125 #if HOST_BITS_PER_WIDE_INT == 32
12128 high
= (low
& 0x80000000) ? ~0 : 0;
12132 low
= INTVAL (x
) & 0xffffffff;
12133 high
= (HOST_WIDE_INT
) INTVAL (x
) >> 32;
12137 /* TOC entries are always Pmode-sized, but since this
12138 is a bigendian machine then if we're putting smaller
12139 integer constants in the TOC we have to pad them.
12140 (This is still a win over putting the constants in
12141 a separate constant pool, because then we'd have
12142 to have both a TOC entry _and_ the actual constant.)
12144 For a 32-bit target, CONST_INT values are loaded and shifted
12145 entirely within `low' and can be stored in one TOC entry. */
12147 if (TARGET_64BIT
&& POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
12148 abort ();/* It would be easy to make this work, but it doesn't now. */
12150 if (POINTER_SIZE
> GET_MODE_BITSIZE (mode
))
12152 #if HOST_BITS_PER_WIDE_INT == 32
12153 lshift_double (low
, high
, POINTER_SIZE
- GET_MODE_BITSIZE (mode
),
12154 POINTER_SIZE
, &low
, &high
, 0);
12157 low
<<= POINTER_SIZE
- GET_MODE_BITSIZE (mode
);
12158 high
= (HOST_WIDE_INT
) low
>> 32;
12165 if (TARGET_MINIMAL_TOC
)
12166 fputs (DOUBLE_INT_ASM_OP
, file
);
12168 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
12169 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
12170 fprintf (file
, "0x%lx%08lx\n",
12171 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
12176 if (POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
12178 if (TARGET_MINIMAL_TOC
)
12179 fputs ("\t.long ", file
);
12181 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
12182 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
12183 fprintf (file
, "0x%lx,0x%lx\n",
12184 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
12188 if (TARGET_MINIMAL_TOC
)
12189 fputs ("\t.long ", file
);
12191 fprintf (file
, "\t.tc IS_%lx[TC],", (long) low
& 0xffffffff);
12192 fprintf (file
, "0x%lx\n", (long) low
& 0xffffffff);
12198 if (GET_CODE (x
) == CONST
)
12200 if (GET_CODE (XEXP (x
, 0)) != PLUS
)
12203 base
= XEXP (XEXP (x
, 0), 0);
12204 offset
= INTVAL (XEXP (XEXP (x
, 0), 1));
12207 if (GET_CODE (base
) == SYMBOL_REF
)
12208 name
= XSTR (base
, 0);
12209 else if (GET_CODE (base
) == LABEL_REF
)
12210 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (XEXP (base
, 0)));
12211 else if (GET_CODE (base
) == CODE_LABEL
)
12212 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (base
));
12216 real_name
= (*targetm
.strip_name_encoding
) (name
);
12217 if (TARGET_MINIMAL_TOC
)
12218 fputs (TARGET_32BIT
? "\t.long " : DOUBLE_INT_ASM_OP
, file
);
12221 fprintf (file
, "\t.tc %s", real_name
);
12224 fprintf (file
, ".N%d", - offset
);
12226 fprintf (file
, ".P%d", offset
);
12228 fputs ("[TC],", file
);
12231 /* Currently C++ toc references to vtables can be emitted before it
12232 is decided whether the vtable is public or private. If this is
12233 the case, then the linker will eventually complain that there is
12234 a TOC reference to an unknown section. Thus, for vtables only,
12235 we emit the TOC reference to reference the symbol and not the
12237 if (VTABLE_NAME_P (name
))
12239 RS6000_OUTPUT_BASENAME (file
, name
);
12241 fprintf (file
, "%d", offset
);
12242 else if (offset
> 0)
12243 fprintf (file
, "+%d", offset
);
12246 output_addr_const (file
, x
);
12250 /* Output an assembler pseudo-op to write an ASCII string of N characters
12251 starting at P to FILE.
12253 On the RS/6000, we have to do this using the .byte operation and
12254 write out special characters outside the quoted string.
12255 Also, the assembler is broken; very long strings are truncated,
12256 so we must artificially break them up early. */
12259 output_ascii (file
, p
, n
)
12265 int i
, count_string
;
12266 const char *for_string
= "\t.byte \"";
12267 const char *for_decimal
= "\t.byte ";
12268 const char *to_close
= NULL
;
12271 for (i
= 0; i
< n
; i
++)
12274 if (c
>= ' ' && c
< 0177)
12277 fputs (for_string
, file
);
12280 /* Write two quotes to get one. */
12288 for_decimal
= "\"\n\t.byte ";
12292 if (count_string
>= 512)
12294 fputs (to_close
, file
);
12296 for_string
= "\t.byte \"";
12297 for_decimal
= "\t.byte ";
12305 fputs (for_decimal
, file
);
12306 fprintf (file
, "%d", c
);
12308 for_string
= "\n\t.byte \"";
12309 for_decimal
= ", ";
12315 /* Now close the string if we have written one. Then end the line. */
12317 fputs (to_close
, file
);
12320 /* Generate a unique section name for FILENAME for a section type
12321 represented by SECTION_DESC. Output goes into BUF.
12323 SECTION_DESC can be any string, as long as it is different for each
12324 possible section type.
12326 We name the section in the same manner as xlc. The name begins with an
12327 underscore followed by the filename (after stripping any leading directory
12328 names) with the last period replaced by the string SECTION_DESC. If
12329 FILENAME does not contain a period, SECTION_DESC is appended to the end of
12333 rs6000_gen_section_name (buf
, filename
, section_desc
)
12335 const char *filename
;
12336 const char *section_desc
;
12338 const char *q
, *after_last_slash
, *last_period
= 0;
12342 after_last_slash
= filename
;
12343 for (q
= filename
; *q
; q
++)
12346 after_last_slash
= q
+ 1;
12347 else if (*q
== '.')
12351 len
= strlen (after_last_slash
) + strlen (section_desc
) + 2;
12352 *buf
= (char *) xmalloc (len
);
12357 for (q
= after_last_slash
; *q
; q
++)
12359 if (q
== last_period
)
12361 strcpy (p
, section_desc
);
12362 p
+= strlen (section_desc
);
12366 else if (ISALNUM (*q
))
12370 if (last_period
== 0)
12371 strcpy (p
, section_desc
);
12376 /* Emit profile function. */
12379 output_profile_hook (labelno
)
12380 int labelno ATTRIBUTE_UNUSED
;
12382 if (TARGET_PROFILE_KERNEL
)
12385 if (DEFAULT_ABI
== ABI_AIX
)
12387 #ifdef NO_PROFILE_COUNTERS
12388 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 0);
12391 const char *label_name
;
12394 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
12395 label_name
= (*targetm
.strip_name_encoding
) (ggc_strdup (buf
));
12396 fun
= gen_rtx_SYMBOL_REF (Pmode
, label_name
);
12398 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 1,
12402 else if (DEFAULT_ABI
== ABI_DARWIN
)
12404 const char *mcount_name
= RS6000_MCOUNT
;
12405 int caller_addr_regno
= LINK_REGISTER_REGNUM
;
12407 /* Be conservative and always set this, at least for now. */
12408 current_function_uses_pic_offset_table
= 1;
12411 /* For PIC code, set up a stub and collect the caller's address
12412 from r0, which is where the prologue puts it. */
12413 if (MACHOPIC_INDIRECT
)
12415 mcount_name
= machopic_stub_name (mcount_name
);
12416 if (current_function_uses_pic_offset_table
)
12417 caller_addr_regno
= 0;
12420 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, mcount_name
),
12422 gen_rtx_REG (Pmode
, caller_addr_regno
), Pmode
);
12426 /* Write function profiler code. */
12429 output_function_profiler (file
, labelno
)
12436 switch (DEFAULT_ABI
)
12445 warning ("no profiling of 64-bit code for this ABI");
12448 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
12449 fprintf (file
, "\tmflr %s\n", reg_names
[0]);
12452 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file
);
12453 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12454 reg_names
[0], save_lr
, reg_names
[1]);
12455 asm_fprintf (file
, "\tmflr %s\n", reg_names
[12]);
12456 asm_fprintf (file
, "\t{l|lwz} %s,", reg_names
[0]);
12457 assemble_name (file
, buf
);
12458 asm_fprintf (file
, "@got(%s)\n", reg_names
[12]);
12460 else if (flag_pic
> 1)
12462 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12463 reg_names
[0], save_lr
, reg_names
[1]);
12464 /* Now, we need to get the address of the label. */
12465 fputs ("\tbl 1f\n\t.long ", file
);
12466 assemble_name (file
, buf
);
12467 fputs ("-.\n1:", file
);
12468 asm_fprintf (file
, "\tmflr %s\n", reg_names
[11]);
12469 asm_fprintf (file
, "\t{l|lwz} %s,0(%s)\n",
12470 reg_names
[0], reg_names
[11]);
12471 asm_fprintf (file
, "\t{cax|add} %s,%s,%s\n",
12472 reg_names
[0], reg_names
[0], reg_names
[11]);
12476 asm_fprintf (file
, "\t{liu|lis} %s,", reg_names
[12]);
12477 assemble_name (file
, buf
);
12478 fputs ("@ha\n", file
);
12479 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12480 reg_names
[0], save_lr
, reg_names
[1]);
12481 asm_fprintf (file
, "\t{cal|la} %s,", reg_names
[0]);
12482 assemble_name (file
, buf
);
12483 asm_fprintf (file
, "@l(%s)\n", reg_names
[12]);
12486 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12487 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
12492 if (!TARGET_PROFILE_KERNEL
)
12494 /* Don't do anything, done in output_profile_hook (). */
12501 asm_fprintf (file
, "\tmflr %s\n", reg_names
[0]);
12502 asm_fprintf (file
, "\tstd %s,16(%s)\n", reg_names
[0], reg_names
[1]);
12504 if (current_function_needs_context
)
12506 asm_fprintf (file
, "\tstd %s,24(%s)\n",
12507 reg_names
[STATIC_CHAIN_REGNUM
], reg_names
[1]);
12508 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
12509 asm_fprintf (file
, "\tld %s,24(%s)\n",
12510 reg_names
[STATIC_CHAIN_REGNUM
], reg_names
[1]);
12513 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
12521 rs6000_use_dfa_pipeline_interface ()
12526 /* Power4 load update and store update instructions are cracked into a
12527 load or store and an integer insn which are executed in the same cycle.
12528 Branches have their own dispatch slot which does not count against the
12529 GCC issue rate, but it changes the program flow so there are no other
12530 instructions to issue in this cycle. */
12533 rs6000_variable_issue (stream
, verbose
, insn
, more
)
12534 FILE *stream ATTRIBUTE_UNUSED
;
12535 int verbose ATTRIBUTE_UNUSED
;
12539 if (GET_CODE (PATTERN (insn
)) == USE
12540 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
12543 if (rs6000_cpu
== PROCESSOR_POWER4
)
12545 enum attr_type type
= get_attr_type (insn
);
12546 if (type
== TYPE_LOAD_EXT_U
|| type
== TYPE_LOAD_EXT_UX
12547 || type
== TYPE_LOAD_UX
|| type
== TYPE_STORE_UX
)
12549 else if (type
== TYPE_LOAD_U
|| type
== TYPE_STORE_U
12550 || type
== TYPE_FPLOAD_U
|| type
== TYPE_FPSTORE_U
12551 || type
== TYPE_FPLOAD_UX
|| type
== TYPE_FPSTORE_UX
12552 || type
== TYPE_LOAD_EXT
|| type
== TYPE_DELAYED_CR
12553 || type
== TYPE_COMPARE
|| type
== TYPE_DELAYED_COMPARE
12554 || type
== TYPE_IMUL_COMPARE
|| type
== TYPE_LMUL_COMPARE
12555 || type
== TYPE_IDIV
|| type
== TYPE_LDIV
)
12556 return more
> 2 ? more
- 2 : 0;
12562 /* Adjust the cost of a scheduling dependency. Return the new cost of
12563 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
12566 rs6000_adjust_cost (insn
, link
, dep_insn
, cost
)
12569 rtx dep_insn ATTRIBUTE_UNUSED
;
12572 if (! recog_memoized (insn
))
12575 if (REG_NOTE_KIND (link
) != 0)
12578 if (REG_NOTE_KIND (link
) == 0)
12580 /* Data dependency; DEP_INSN writes a register that INSN reads
12581 some cycles later. */
12582 switch (get_attr_type (insn
))
12585 /* Tell the first scheduling pass about the latency between
12586 a mtctr and bctr (and mtlr and br/blr). The first
12587 scheduling pass will not know about this latency since
12588 the mtctr instruction, which has the latency associated
12589 to it, will be generated by reload. */
12590 return TARGET_POWER
? 5 : 4;
12592 /* Leave some extra cycles between a compare and its
12593 dependent branch, to inhibit expensive mispredicts. */
12594 if ((rs6000_cpu_attr
== CPU_PPC603
12595 || rs6000_cpu_attr
== CPU_PPC604
12596 || rs6000_cpu_attr
== CPU_PPC604E
12597 || rs6000_cpu_attr
== CPU_PPC620
12598 || rs6000_cpu_attr
== CPU_PPC630
12599 || rs6000_cpu_attr
== CPU_PPC750
12600 || rs6000_cpu_attr
== CPU_PPC7400
12601 || rs6000_cpu_attr
== CPU_PPC7450
12602 || rs6000_cpu_attr
== CPU_POWER4
)
12603 && recog_memoized (dep_insn
)
12604 && (INSN_CODE (dep_insn
) >= 0)
12605 && (get_attr_type (dep_insn
) == TYPE_CMP
12606 || get_attr_type (dep_insn
) == TYPE_COMPARE
12607 || get_attr_type (dep_insn
) == TYPE_DELAYED_COMPARE
12608 || get_attr_type (dep_insn
) == TYPE_IMUL_COMPARE
12609 || get_attr_type (dep_insn
) == TYPE_LMUL_COMPARE
12610 || get_attr_type (dep_insn
) == TYPE_FPCOMPARE
12611 || get_attr_type (dep_insn
) == TYPE_CR_LOGICAL
12612 || get_attr_type (dep_insn
) == TYPE_DELAYED_CR
))
12617 /* Fall out to return default cost. */
12623 /* A C statement (sans semicolon) to update the integer scheduling
12624 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12625 INSN earlier, increase the priority to execute INSN later. Do not
12626 define this macro if you do not need to adjust the scheduling
12627 priorities of insns. */
12630 rs6000_adjust_priority (insn
, priority
)
12631 rtx insn ATTRIBUTE_UNUSED
;
12634 /* On machines (like the 750) which have asymmetric integer units,
12635 where one integer unit can do multiply and divides and the other
12636 can't, reduce the priority of multiply/divide so it is scheduled
12637 before other integer operations. */
12640 if (! INSN_P (insn
))
12643 if (GET_CODE (PATTERN (insn
)) == USE
)
12646 switch (rs6000_cpu_attr
) {
12648 switch (get_attr_type (insn
))
12655 fprintf (stderr
, "priority was %#x (%d) before adjustment\n",
12656 priority
, priority
);
12657 if (priority
>= 0 && priority
< 0x01000000)
12667 /* Return how many instructions the machine can issue per cycle. */
12670 rs6000_issue_rate ()
12672 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
12673 if (!reload_completed
)
12676 switch (rs6000_cpu_attr
) {
12677 case CPU_RIOS1
: /* ? */
12679 case CPU_PPC601
: /* ? */
12700 /* Return how many instructions to look ahead for better insn
12704 rs6000_use_sched_lookahead ()
12706 if (rs6000_cpu_attr
== CPU_PPC8540
)
12712 /* Length in units of the trampoline for entering a nested function. */
12715 rs6000_trampoline_size ()
12719 switch (DEFAULT_ABI
)
12725 ret
= (TARGET_32BIT
) ? 12 : 24;
12730 ret
= (TARGET_32BIT
) ? 40 : 48;
12737 /* Emit RTL insns to initialize the variable parts of a trampoline.
12738 FNADDR is an RTX for the address of the function's pure code.
12739 CXT is an RTX for the static chain value for the function. */
12742 rs6000_initialize_trampoline (addr
, fnaddr
, cxt
)
12747 enum machine_mode pmode
= Pmode
;
12748 int regsize
= (TARGET_32BIT
) ? 4 : 8;
12749 rtx ctx_reg
= force_reg (pmode
, cxt
);
12751 switch (DEFAULT_ABI
)
12756 /* Macros to shorten the code expansions below. */
12757 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12758 #define MEM_PLUS(addr,offset) \
12759 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12761 /* Under AIX, just build the 3 word function descriptor */
12764 rtx fn_reg
= gen_reg_rtx (pmode
);
12765 rtx toc_reg
= gen_reg_rtx (pmode
);
12766 emit_move_insn (fn_reg
, MEM_DEREF (fnaddr
));
12767 emit_move_insn (toc_reg
, MEM_PLUS (fnaddr
, regsize
));
12768 emit_move_insn (MEM_DEREF (addr
), fn_reg
);
12769 emit_move_insn (MEM_PLUS (addr
, regsize
), toc_reg
);
12770 emit_move_insn (MEM_PLUS (addr
, 2*regsize
), ctx_reg
);
12774 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
12777 emit_library_call (gen_rtx_SYMBOL_REF (SImode
, "__trampoline_setup"),
12778 FALSE
, VOIDmode
, 4,
12780 GEN_INT (rs6000_trampoline_size ()), SImode
,
12790 /* Table of valid machine attributes. */
12792 const struct attribute_spec rs6000_attribute_table
[] =
12794 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12795 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
12796 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
12797 { NULL
, 0, 0, false, false, false, NULL
}
12800 /* Handle a "longcall" or "shortcall" attribute; arguments as in
12801 struct attribute_spec.handler. */
12804 rs6000_handle_longcall_attribute (node
, name
, args
, flags
, no_add_attrs
)
12807 tree args ATTRIBUTE_UNUSED
;
12808 int flags ATTRIBUTE_UNUSED
;
12809 bool *no_add_attrs
;
12811 if (TREE_CODE (*node
) != FUNCTION_TYPE
12812 && TREE_CODE (*node
) != FIELD_DECL
12813 && TREE_CODE (*node
) != TYPE_DECL
)
12815 warning ("`%s' attribute only applies to functions",
12816 IDENTIFIER_POINTER (name
));
12817 *no_add_attrs
= true;
12823 /* Set longcall attributes on all functions declared when
12824 rs6000_default_long_calls is true. */
12826 rs6000_set_default_type_attributes (type
)
12829 if (rs6000_default_long_calls
12830 && (TREE_CODE (type
) == FUNCTION_TYPE
12831 || TREE_CODE (type
) == METHOD_TYPE
))
12832 TYPE_ATTRIBUTES (type
) = tree_cons (get_identifier ("longcall"),
12834 TYPE_ATTRIBUTES (type
));
12837 /* Return a reference suitable for calling a function with the
12838 longcall attribute. */
12841 rs6000_longcall_ref (call_ref
)
12844 const char *call_name
;
12847 if (GET_CODE (call_ref
) != SYMBOL_REF
)
12850 /* System V adds '.' to the internal name, so skip them. */
12851 call_name
= XSTR (call_ref
, 0);
12852 if (*call_name
== '.')
12854 while (*call_name
== '.')
12857 node
= get_identifier (call_name
);
12858 call_ref
= gen_rtx_SYMBOL_REF (VOIDmode
, IDENTIFIER_POINTER (node
));
12861 return force_reg (Pmode
, call_ref
);
12865 #ifdef USING_ELFOS_H
12867 /* A C statement or statements to switch to the appropriate section
12868 for output of RTX in mode MODE. You can assume that RTX is some
12869 kind of constant in RTL. The argument MODE is redundant except in
12870 the case of a `const_int' rtx. Select the section by calling
12871 `text_section' or one of the alternatives for other sections.
12873 Do not define this macro if you put all constants in the read-only
12877 rs6000_elf_select_rtx_section (mode
, x
, align
)
12878 enum machine_mode mode
;
12880 unsigned HOST_WIDE_INT align
;
12882 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
12885 default_elf_select_rtx_section (mode
, x
, align
);
12888 /* A C statement or statements to switch to the appropriate
12889 section for output of DECL. DECL is either a `VAR_DECL' node
12890 or a constant of some sort. RELOC indicates whether forming
12891 the initial value of DECL requires link-time relocations. */
12894 rs6000_elf_select_section (decl
, reloc
, align
)
12897 unsigned HOST_WIDE_INT align
;
12899 /* Pretend that we're always building for a shared library when
12900 ABI_AIX, because otherwise we end up with dynamic relocations
12901 in read-only sections. This happens for function pointers,
12902 references to vtables in typeinfo, and probably other cases. */
12903 default_elf_select_section_1 (decl
, reloc
, align
,
12904 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
12907 /* A C statement to build up a unique section name, expressed as a
12908 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
12909 RELOC indicates whether the initial value of EXP requires
12910 link-time relocations. If you do not define this macro, GCC will use
12911 the symbol name prefixed by `.' as the section name. Note - this
12912 macro can now be called for uninitialized data items as well as
12913 initialized data and functions. */
12916 rs6000_elf_unique_section (decl
, reloc
)
12920 /* As above, pretend that we're always building for a shared library
12921 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
12922 default_unique_section_1 (decl
, reloc
,
12923 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
12926 /* For a SYMBOL_REF, set generic flags and then perform some
12927 target-specific processing.
12929 Set SYMBOL_FLAG_SMALL_V4 for an operand in small memory on V.4/eabi;
12930 this is different from the generic SYMBOL_FLAG_SMALL.
12932 When the AIX ABI is requested on a non-AIX system, replace the
12933 function name with the real name (with a leading .) rather than the
12934 function descriptor name. This saves a lot of overriding code to
12935 read the prefixes. */
12938 rs6000_elf_encode_section_info (decl
, rtl
, first
)
12943 default_encode_section_info (decl
, rtl
, first
);
12946 && TREE_CODE (decl
) == FUNCTION_DECL
12948 && DEFAULT_ABI
== ABI_AIX
)
12950 rtx sym_ref
= XEXP (rtl
, 0);
12951 size_t len
= strlen (XSTR (sym_ref
, 0));
12952 char *str
= alloca (len
+ 2);
12954 memcpy (str
+ 1, XSTR (sym_ref
, 0), len
+ 1);
12955 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len
+ 1);
12957 else if (rs6000_sdata
!= SDATA_NONE
12958 && DEFAULT_ABI
== ABI_V4
12959 && TREE_CODE (decl
) == VAR_DECL
)
12961 rtx sym_ref
= XEXP (rtl
, 0);
12962 int size
= int_size_in_bytes (TREE_TYPE (decl
));
12963 tree section_name
= DECL_SECTION_NAME (decl
);
12964 const char *name
= (char *)0;
12968 if (TREE_CODE (section_name
) == STRING_CST
)
12969 name
= TREE_STRING_POINTER (section_name
);
12975 ? (strcmp (name
, ".sdata") == 0
12976 || strcmp (name
, ".sdata2") == 0
12977 || strcmp (name
, ".sbss") == 0
12978 || strcmp (name
, ".sbss2") == 0
12979 || strcmp (name
, ".PPC.EMB.sdata0") == 0
12980 || strcmp (name
, ".PPC.EMB.sbss0") == 0)
12981 : (size
> 0 && size
<= g_switch_value
))
12982 SYMBOL_REF_FLAGS (sym_ref
) |= SYMBOL_FLAG_SMALL_V4
;
12987 rs6000_elf_in_small_data_p (decl
)
12990 if (rs6000_sdata
== SDATA_NONE
)
12993 if (TREE_CODE (decl
) == VAR_DECL
&& DECL_SECTION_NAME (decl
))
12995 const char *section
= TREE_STRING_POINTER (DECL_SECTION_NAME (decl
));
12996 if (strcmp (section
, ".sdata") == 0
12997 || strcmp (section
, ".sdata2") == 0
12998 || strcmp (section
, ".sbss") == 0)
13003 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
13006 && size
<= g_switch_value
13007 && (rs6000_sdata
!= SDATA_DATA
|| TREE_PUBLIC (decl
)))
13014 #endif /* USING_ELFOS_H */
13017 /* Return a REG that occurs in ADDR with coefficient 1.
13018 ADDR can be effectively incremented by incrementing REG.
13020 r0 is special and we must not select it as an address
13021 register by this routine since our caller will try to
13022 increment the returned register via an "la" instruction. */
13025 find_addr_reg (addr
)
13028 while (GET_CODE (addr
) == PLUS
)
13030 if (GET_CODE (XEXP (addr
, 0)) == REG
13031 && REGNO (XEXP (addr
, 0)) != 0)
13032 addr
= XEXP (addr
, 0);
13033 else if (GET_CODE (XEXP (addr
, 1)) == REG
13034 && REGNO (XEXP (addr
, 1)) != 0)
13035 addr
= XEXP (addr
, 1);
13036 else if (CONSTANT_P (XEXP (addr
, 0)))
13037 addr
= XEXP (addr
, 1);
13038 else if (CONSTANT_P (XEXP (addr
, 1)))
13039 addr
= XEXP (addr
, 0);
13043 if (GET_CODE (addr
) == REG
&& REGNO (addr
) != 0)
13049 rs6000_fatal_bad_address (op
)
13052 fatal_insn ("bad address", op
);
13058 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
13059 reference and a constant. */
13062 symbolic_operand (op
)
13065 switch (GET_CODE (op
))
13072 return (GET_CODE (op
) == SYMBOL_REF
||
13073 (GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
13074 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
13075 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
13082 #ifdef RS6000_LONG_BRANCH
13084 static tree stub_list
= 0;
13086 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
13087 procedure calls to the linked list. */
13090 add_compiler_stub (label_name
, function_name
, line_number
)
13092 tree function_name
;
13095 tree stub
= build_tree_list (function_name
, label_name
);
13096 TREE_TYPE (stub
) = build_int_2 (line_number
, 0);
13097 TREE_CHAIN (stub
) = stub_list
;
13101 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
13102 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
13103 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
13105 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
13106 handling procedure calls from the linked list and initializes the
13110 output_compiler_stub ()
13113 char label_buf
[256];
13117 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
13119 fprintf (asm_out_file
,
13120 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub
)));
13122 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13123 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
13124 fprintf (asm_out_file
, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub
));
13125 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13127 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
))[0] == '*')
13129 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
))+1);
13132 label_buf
[0] = '_';
13133 strcpy (label_buf
+1,
13134 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
)));
13137 strcpy (tmp_buf
, "lis r12,hi16(");
13138 strcat (tmp_buf
, label_buf
);
13139 strcat (tmp_buf
, ")\n\tori r12,r12,lo16(");
13140 strcat (tmp_buf
, label_buf
);
13141 strcat (tmp_buf
, ")\n\tmtctr r12\n\tbctr");
13142 output_asm_insn (tmp_buf
, 0);
13144 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13145 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
13146 fprintf(asm_out_file
, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub
));
13147 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13153 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
13154 already there or not. */
13157 no_previous_def (function_name
)
13158 tree function_name
;
13161 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
13162 if (function_name
== STUB_FUNCTION_NAME (stub
))
13167 /* GET_PREV_LABEL gets the label name from the previous definition of
13171 get_prev_label (function_name
)
13172 tree function_name
;
13175 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
13176 if (function_name
== STUB_FUNCTION_NAME (stub
))
13177 return STUB_LABEL_NAME (stub
);
13181 /* INSN is either a function call or a millicode call. It may have an
13182 unconditional jump in its delay slot.
13184 CALL_DEST is the routine we are calling. */
13187 output_call (insn
, call_dest
, operand_number
)
13190 int operand_number
;
13192 static char buf
[256];
13193 if (GET_CODE (call_dest
) == SYMBOL_REF
&& TARGET_LONG_BRANCH
&& !flag_pic
)
13196 tree funname
= get_identifier (XSTR (call_dest
, 0));
13198 if (no_previous_def (funname
))
13200 int line_number
= 0;
13201 rtx label_rtx
= gen_label_rtx ();
13202 char *label_buf
, temp_buf
[256];
13203 ASM_GENERATE_INTERNAL_LABEL (temp_buf
, "L",
13204 CODE_LABEL_NUMBER (label_rtx
));
13205 label_buf
= temp_buf
[0] == '*' ? temp_buf
+ 1 : temp_buf
;
13206 labelname
= get_identifier (label_buf
);
13207 for (; insn
&& GET_CODE (insn
) != NOTE
; insn
= PREV_INSN (insn
));
13209 line_number
= NOTE_LINE_NUMBER (insn
);
13210 add_compiler_stub (labelname
, funname
, line_number
);
13213 labelname
= get_prev_label (funname
);
13215 sprintf (buf
, "jbsr %%z%d,%.246s",
13216 operand_number
, IDENTIFIER_POINTER (labelname
));
13221 sprintf (buf
, "bl %%z%d", operand_number
);
13226 #endif /* RS6000_LONG_BRANCH */
13228 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
13230 const char *const symbol_ = (SYMBOL); \
13231 char *buffer_ = (BUF); \
13232 if (symbol_[0] == '"') \
13234 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
13236 else if (name_needs_quotes(symbol_)) \
13238 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
13242 sprintf(buffer_, "L%d$%s", (N), symbol_); \
13247 /* Generate PIC and indirect symbol stubs. */
13250 machopic_output_stub (file
, symb
, stub
)
13252 const char *symb
, *stub
;
13254 unsigned int length
;
13255 char *symbol_name
, *lazy_ptr_name
;
13256 char *local_label_0
;
13257 static int label
= 0;
13259 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
13260 symb
= (*targetm
.strip_name_encoding
) (symb
);
13264 length
= strlen (symb
);
13265 symbol_name
= alloca (length
+ 32);
13266 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name
, symb
, length
);
13268 lazy_ptr_name
= alloca (length
+ 32);
13269 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name
, symb
, length
);
13271 local_label_0
= alloca (length
+ 32);
13272 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0
, symb
, length
, 0);
13275 machopic_picsymbol_stub1_section ();
13277 machopic_symbol_stub1_section ();
13278 fprintf (file
, "\t.align 2\n");
13280 fprintf (file
, "%s:\n", stub
);
13281 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
13285 fprintf (file
, "\tmflr r0\n");
13286 fprintf (file
, "\tbcl 20,31,%s\n", local_label_0
);
13287 fprintf (file
, "%s:\n\tmflr r11\n", local_label_0
);
13288 fprintf (file
, "\taddis r11,r11,ha16(%s-%s)\n",
13289 lazy_ptr_name
, local_label_0
);
13290 fprintf (file
, "\tmtlr r0\n");
13291 fprintf (file
, "\tlwzu r12,lo16(%s-%s)(r11)\n",
13292 lazy_ptr_name
, local_label_0
);
13293 fprintf (file
, "\tmtctr r12\n");
13294 fprintf (file
, "\tbctr\n");
13298 fprintf (file
, "\tlis r11,ha16(%s)\n", lazy_ptr_name
);
13299 fprintf (file
, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name
);
13300 fprintf (file
, "\tmtctr r12\n");
13301 fprintf (file
, "\tbctr\n");
13304 machopic_lazy_symbol_ptr_section ();
13305 fprintf (file
, "%s:\n", lazy_ptr_name
);
13306 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
13307 fprintf (file
, "\t.long dyld_stub_binding_helper\n");
13310 /* Legitimize PIC addresses. If the address is already
13311 position-independent, we return ORIG. Newly generated
13312 position-independent addresses go into a reg. This is REG if non
13313 zero, otherwise we allocate register(s) as necessary. */
13315 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13318 rs6000_machopic_legitimize_pic_address (orig
, mode
, reg
)
13320 enum machine_mode mode
;
13325 if (reg
== NULL
&& ! reload_in_progress
&& ! reload_completed
)
13326 reg
= gen_reg_rtx (Pmode
);
13328 if (GET_CODE (orig
) == CONST
)
13330 if (GET_CODE (XEXP (orig
, 0)) == PLUS
13331 && XEXP (XEXP (orig
, 0), 0) == pic_offset_table_rtx
)
13334 if (GET_CODE (XEXP (orig
, 0)) == PLUS
)
13337 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0),
13340 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1),
13346 if (GET_CODE (offset
) == CONST_INT
)
13348 if (SMALL_INT (offset
))
13349 return plus_constant (base
, INTVAL (offset
));
13350 else if (! reload_in_progress
&& ! reload_completed
)
13351 offset
= force_reg (Pmode
, offset
);
13354 rtx mem
= force_const_mem (Pmode
, orig
);
13355 return machopic_legitimize_pic_address (mem
, Pmode
, reg
);
13358 return gen_rtx (PLUS
, Pmode
, base
, offset
);
13361 /* Fall back on generic machopic code. */
13362 return machopic_legitimize_pic_address (orig
, mode
, reg
);
13365 /* This is just a placeholder to make linking work without having to
13366 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13367 ever needed for Darwin (not too likely!) this would have to get a
13368 real definition. */
13375 #endif /* TARGET_MACHO */
13378 static unsigned int
13379 rs6000_elf_section_type_flags (decl
, name
, reloc
)
13385 = default_section_type_flags_1 (decl
, name
, reloc
,
13386 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
13388 if (TARGET_RELOCATABLE
)
13389 flags
|= SECTION_WRITE
;
13394 /* Record an element in the table of global constructors. SYMBOL is
13395 a SYMBOL_REF of the function to be called; PRIORITY is a number
13396 between 0 and MAX_INIT_PRIORITY.
13398 This differs from default_named_section_asm_out_constructor in
13399 that we have special handling for -mrelocatable. */
13402 rs6000_elf_asm_out_constructor (symbol
, priority
)
13406 const char *section
= ".ctors";
13409 if (priority
!= DEFAULT_INIT_PRIORITY
)
13411 sprintf (buf
, ".ctors.%.5u",
13412 /* Invert the numbering so the linker puts us in the proper
13413 order; constructors are run from right to left, and the
13414 linker sorts in increasing order. */
13415 MAX_INIT_PRIORITY
- priority
);
13419 named_section_flags (section
, SECTION_WRITE
);
13420 assemble_align (POINTER_SIZE
);
13422 if (TARGET_RELOCATABLE
)
13424 fputs ("\t.long (", asm_out_file
);
13425 output_addr_const (asm_out_file
, symbol
);
13426 fputs (")@fixup\n", asm_out_file
);
13429 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
13433 rs6000_elf_asm_out_destructor (symbol
, priority
)
13437 const char *section
= ".dtors";
13440 if (priority
!= DEFAULT_INIT_PRIORITY
)
13442 sprintf (buf
, ".dtors.%.5u",
13443 /* Invert the numbering so the linker puts us in the proper
13444 order; constructors are run from right to left, and the
13445 linker sorts in increasing order. */
13446 MAX_INIT_PRIORITY
- priority
);
13450 named_section_flags (section
, SECTION_WRITE
);
13451 assemble_align (POINTER_SIZE
);
13453 if (TARGET_RELOCATABLE
)
13455 fputs ("\t.long (", asm_out_file
);
13456 output_addr_const (asm_out_file
, symbol
);
13457 fputs (")@fixup\n", asm_out_file
);
13460 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
13466 rs6000_xcoff_asm_globalize_label (stream
, name
)
13470 fputs (GLOBAL_ASM_OP
, stream
);
13471 RS6000_OUTPUT_BASENAME (stream
, name
);
13472 putc ('\n', stream
);
13476 rs6000_xcoff_asm_named_section (name
, flags
)
13478 unsigned int flags
;
13481 static const char * const suffix
[3] = { "PR", "RO", "RW" };
13483 if (flags
& SECTION_CODE
)
13485 else if (flags
& SECTION_WRITE
)
13490 fprintf (asm_out_file
, "\t.csect %s%s[%s],%u\n",
13491 (flags
& SECTION_CODE
) ? "." : "",
13492 name
, suffix
[smclass
], flags
& SECTION_ENTSIZE
);
13496 rs6000_xcoff_select_section (decl
, reloc
, align
)
13499 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
13501 if (decl_readonly_section_1 (decl
, reloc
, 1))
13503 if (TREE_PUBLIC (decl
))
13504 read_only_data_section ();
13506 read_only_private_data_section ();
13510 if (TREE_PUBLIC (decl
))
13513 private_data_section ();
13518 rs6000_xcoff_unique_section (decl
, reloc
)
13520 int reloc ATTRIBUTE_UNUSED
;
13524 /* Use select_section for private and uninitialized data. */
13525 if (!TREE_PUBLIC (decl
)
13526 || DECL_COMMON (decl
)
13527 || DECL_INITIAL (decl
) == NULL_TREE
13528 || DECL_INITIAL (decl
) == error_mark_node
13529 || (flag_zero_initialized_in_bss
13530 && initializer_zerop (DECL_INITIAL (decl
))))
13533 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
13534 name
= (*targetm
.strip_name_encoding
) (name
);
13535 DECL_SECTION_NAME (decl
) = build_string (strlen (name
), name
);
13538 /* Select section for constant in constant pool.
13540 On RS/6000, all constants are in the private read-only data area.
13541 However, if this is being placed in the TOC it must be output as a
13545 rs6000_xcoff_select_rtx_section (mode
, x
, align
)
13546 enum machine_mode mode
;
13548 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
13550 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
13553 read_only_private_data_section ();
13556 /* Remove any trailing [DS] or the like from the symbol name. */
13558 static const char *
13559 rs6000_xcoff_strip_name_encoding (name
)
13565 len
= strlen (name
);
13566 if (name
[len
- 1] == ']')
13567 return ggc_alloc_string (name
, len
- 4);
13572 /* Section attributes. AIX is always PIC. */
13574 static unsigned int
13575 rs6000_xcoff_section_type_flags (decl
, name
, reloc
)
13580 unsigned int align
;
13581 unsigned int flags
= default_section_type_flags_1 (decl
, name
, reloc
, 1);
13583 /* Align to at least UNIT size. */
13584 if (flags
& SECTION_CODE
)
13585 align
= MIN_UNITS_PER_WORD
;
13587 /* Increase alignment of large objects if not already stricter. */
13588 align
= MAX ((DECL_ALIGN (decl
) / BITS_PER_UNIT
),
13589 int_size_in_bytes (TREE_TYPE (decl
)) > MIN_UNITS_PER_WORD
13590 ? UNITS_PER_FP_WORD
: MIN_UNITS_PER_WORD
);
13592 return flags
| (exact_log2 (align
) & SECTION_ENTSIZE
);
13594 #endif /* TARGET_XCOFF */
13597 /* Cross-module name binding. Darwin does not support overriding
13598 functions at dynamic-link time. */
13601 rs6000_binds_local_p (decl
)
13604 return default_binds_local_p_1 (decl
, 0);
13608 /* Compute a (partial) cost for rtx X. Return true if the complete
13609 cost has been computed, and false if subexpressions should be
13610 scanned. In either case, *TOTAL contains the cost result. */
13613 rs6000_rtx_costs (x
, code
, outer_code
, total
)
13615 int code
, outer_code ATTRIBUTE_UNUSED
;
13620 /* On the RS/6000, if it is valid in the insn, it is free.
13621 So this always returns 0. */
13632 *total
= ((GET_CODE (XEXP (x
, 1)) == CONST_INT
13633 && ((unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1))
13634 + 0x8000) >= 0x10000)
13635 && ((INTVAL (XEXP (x
, 1)) & 0xffff) != 0))
13636 ? COSTS_N_INSNS (2)
13637 : COSTS_N_INSNS (1));
13643 *total
= ((GET_CODE (XEXP (x
, 1)) == CONST_INT
13644 && (INTVAL (XEXP (x
, 1)) & (~ (HOST_WIDE_INT
) 0xffff)) != 0
13645 && ((INTVAL (XEXP (x
, 1)) & 0xffff) != 0))
13646 ? COSTS_N_INSNS (2)
13647 : COSTS_N_INSNS (1));
13653 *total
= COSTS_N_INSNS (2);
13656 switch (rs6000_cpu
)
13658 case PROCESSOR_RIOS1
:
13659 case PROCESSOR_PPC405
:
13660 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13661 ? COSTS_N_INSNS (5)
13662 : (INTVAL (XEXP (x
, 1)) >= -256
13663 && INTVAL (XEXP (x
, 1)) <= 255)
13664 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13667 case PROCESSOR_RS64A
:
13668 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13669 ? GET_MODE (XEXP (x
, 1)) != DImode
13670 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
13671 : (INTVAL (XEXP (x
, 1)) >= -256
13672 && INTVAL (XEXP (x
, 1)) <= 255)
13673 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
13676 case PROCESSOR_RIOS2
:
13677 case PROCESSOR_MPCCORE
:
13678 case PROCESSOR_PPC604e
:
13679 *total
= COSTS_N_INSNS (2);
13682 case PROCESSOR_PPC601
:
13683 *total
= COSTS_N_INSNS (5);
13686 case PROCESSOR_PPC603
:
13687 case PROCESSOR_PPC7400
:
13688 case PROCESSOR_PPC750
:
13689 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13690 ? COSTS_N_INSNS (5)
13691 : (INTVAL (XEXP (x
, 1)) >= -256
13692 && INTVAL (XEXP (x
, 1)) <= 255)
13693 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
13696 case PROCESSOR_PPC7450
:
13697 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13698 ? COSTS_N_INSNS (4)
13699 : COSTS_N_INSNS (3));
13702 case PROCESSOR_PPC403
:
13703 case PROCESSOR_PPC604
:
13704 case PROCESSOR_PPC8540
:
13705 *total
= COSTS_N_INSNS (4);
13708 case PROCESSOR_PPC620
:
13709 case PROCESSOR_PPC630
:
13710 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13711 ? GET_MODE (XEXP (x
, 1)) != DImode
13712 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
13713 : (INTVAL (XEXP (x
, 1)) >= -256
13714 && INTVAL (XEXP (x
, 1)) <= 255)
13715 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13718 case PROCESSOR_POWER4
:
13719 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13720 ? GET_MODE (XEXP (x
, 1)) != DImode
13721 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
13722 : COSTS_N_INSNS (2));
13731 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
13732 && exact_log2 (INTVAL (XEXP (x
, 1))) >= 0)
13734 *total
= COSTS_N_INSNS (2);
13741 switch (rs6000_cpu
)
13743 case PROCESSOR_RIOS1
:
13744 *total
= COSTS_N_INSNS (19);
13747 case PROCESSOR_RIOS2
:
13748 *total
= COSTS_N_INSNS (13);
13751 case PROCESSOR_RS64A
:
13752 *total
= (GET_MODE (XEXP (x
, 1)) != DImode
13753 ? COSTS_N_INSNS (65)
13754 : COSTS_N_INSNS (67));
13757 case PROCESSOR_MPCCORE
:
13758 *total
= COSTS_N_INSNS (6);
13761 case PROCESSOR_PPC403
:
13762 *total
= COSTS_N_INSNS (33);
13765 case PROCESSOR_PPC405
:
13766 *total
= COSTS_N_INSNS (35);
13769 case PROCESSOR_PPC601
:
13770 *total
= COSTS_N_INSNS (36);
13773 case PROCESSOR_PPC603
:
13774 *total
= COSTS_N_INSNS (37);
13777 case PROCESSOR_PPC604
:
13778 case PROCESSOR_PPC604e
:
13779 *total
= COSTS_N_INSNS (20);
13782 case PROCESSOR_PPC620
:
13783 case PROCESSOR_PPC630
:
13784 *total
= (GET_MODE (XEXP (x
, 1)) != DImode
13785 ? COSTS_N_INSNS (21)
13786 : COSTS_N_INSNS (37));
13789 case PROCESSOR_PPC750
:
13790 case PROCESSOR_PPC8540
:
13791 case PROCESSOR_PPC7400
:
13792 *total
= COSTS_N_INSNS (19);
13795 case PROCESSOR_PPC7450
:
13796 *total
= COSTS_N_INSNS (23);
13799 case PROCESSOR_POWER4
:
13800 *total
= (GET_MODE (XEXP (x
, 1)) != DImode
13801 ? COSTS_N_INSNS (18)
13802 : COSTS_N_INSNS (34));
13810 *total
= COSTS_N_INSNS (4);
13814 /* MEM should be slightly more expensive than (plus (reg) (const)) */
13823 /* A C expression returning the cost of moving data from a register of class
13824 CLASS1 to one of CLASS2. */
13827 rs6000_register_move_cost (mode
, from
, to
)
13828 enum machine_mode mode
;
13829 enum reg_class from
, to
;
13831 /* Moves from/to GENERAL_REGS. */
13832 if (reg_classes_intersect_p (to
, GENERAL_REGS
)
13833 || reg_classes_intersect_p (from
, GENERAL_REGS
))
13835 if (! reg_classes_intersect_p (to
, GENERAL_REGS
))
13838 if (from
== FLOAT_REGS
|| from
== ALTIVEC_REGS
)
13839 return (rs6000_memory_move_cost (mode
, from
, 0)
13840 + rs6000_memory_move_cost (mode
, GENERAL_REGS
, 0));
13842 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
13843 else if (from
== CR_REGS
)
13847 /* A move will cost one instruction per GPR moved. */
13848 return 2 * HARD_REGNO_NREGS (0, mode
);
13851 /* Moving between two similar registers is just one instruction. */
13852 else if (reg_classes_intersect_p (to
, from
))
13853 return mode
== TFmode
? 4 : 2;
13855 /* Everything else has to go through GENERAL_REGS. */
13857 return (rs6000_register_move_cost (mode
, GENERAL_REGS
, to
)
13858 + rs6000_register_move_cost (mode
, from
, GENERAL_REGS
));
13861 /* A C expressions returning the cost of moving data of MODE from a register to
13865 rs6000_memory_move_cost (mode
, class, in
)
13866 enum machine_mode mode
;
13867 enum reg_class
class;
13868 int in ATTRIBUTE_UNUSED
;
13870 if (reg_classes_intersect_p (class, GENERAL_REGS
))
13871 return 4 * HARD_REGNO_NREGS (0, mode
);
13872 else if (reg_classes_intersect_p (class, FLOAT_REGS
))
13873 return 4 * HARD_REGNO_NREGS (32, mode
);
13874 else if (reg_classes_intersect_p (class, ALTIVEC_REGS
))
13875 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO
, mode
);
13877 return 4 + rs6000_register_move_cost (mode
, class, GENERAL_REGS
);
13880 /* Return true if TYPE is of type __ev64_opaque__. */
13883 is_ev64_opaque_type (type
)
13887 && (type
== opaque_V2SI_type_node
13888 || type
== opaque_V2SF_type_node
13889 || (TREE_CODE (type
) == VECTOR_TYPE
13890 && TYPE_NAME (type
)
13891 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
13892 && DECL_NAME (TYPE_NAME (type
))
13893 && strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type
))),
13894 "__ev64_opaque__") == 0)));
13898 rs6000_dwarf_register_span (reg
)
13903 if (!TARGET_SPE
|| !SPE_VECTOR_MODE (GET_MODE (reg
)))
13906 regno
= REGNO (reg
);
13908 /* The duality of the SPE register size wreaks all kinds of havoc.
13909 This is a way of distinguishing r0 in 32-bits from r0 in
13912 gen_rtx_PARALLEL (VOIDmode
,
13915 gen_rtx_REG (SImode
, regno
+ 1200),
13916 gen_rtx_REG (SImode
, regno
))
13918 gen_rtx_REG (SImode
, regno
),
13919 gen_rtx_REG (SImode
, regno
+ 1200)));
13922 #include "gt-rs6000.h"