1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
54 #ifndef TARGET_NO_PROTOTYPE
55 #define TARGET_NO_PROTOTYPE 0
58 #define min(A,B) ((A) < (B) ? (A) : (B))
59 #define max(A,B) ((A) > (B) ? (A) : (B))
63 enum processor_type rs6000_cpu
;
64 struct rs6000_cpu_select rs6000_select
[3] =
66 /* switch name, tune arch */
67 { (const char *)0, "--with-cpu=", 1, 1 },
68 { (const char *)0, "-mcpu=", 1, 1 },
69 { (const char *)0, "-mtune=", 1, 0 },
72 /* Size of long double */
73 const char *rs6000_long_double_size_string
;
74 int rs6000_long_double_type_size
;
76 /* Whether -mabi=altivec has appeared */
77 int rs6000_altivec_abi
;
79 /* Whether VRSAVE instructions should be generated. */
80 int rs6000_altivec_vrsave
;
82 /* String from -mvrsave= option. */
83 const char *rs6000_altivec_vrsave_string
;
85 /* Nonzero if we want SPE ABI extensions. */
88 /* Whether isel instructions should be generated. */
91 /* Nonzero if we have FPRs. */
94 /* String from -misel=. */
95 const char *rs6000_isel_string
;
97 /* Set to nonzero once AIX common-mode calls have been defined. */
98 static GTY(()) int common_mode_defined
;
100 /* Private copy of original value of flag_pic for ABI_AIX. */
101 static int rs6000_flag_pic
;
103 /* Save information from a "cmpxx" operation until the branch or scc is
105 rtx rs6000_compare_op0
, rs6000_compare_op1
;
106 int rs6000_compare_fp_p
;
108 /* Label number of label created for -mrelocatable, to call to so we can
109 get the address of the GOT section */
110 int rs6000_pic_labelno
;
113 /* Which abi to adhere to */
114 const char *rs6000_abi_name
= RS6000_ABI_NAME
;
116 /* Semantics of the small data area */
117 enum rs6000_sdata_type rs6000_sdata
= SDATA_DATA
;
119 /* Which small data model to use */
120 const char *rs6000_sdata_name
= (char *)0;
122 /* Counter for labels which are to be placed in .fixup. */
123 int fixuplabelno
= 0;
126 /* ABI enumeration available for subtarget to use. */
127 enum rs6000_abi rs6000_current_abi
;
129 /* ABI string from -mabi= option. */
130 const char *rs6000_abi_string
;
133 const char *rs6000_debug_name
;
134 int rs6000_debug_stack
; /* debug stack applications */
135 int rs6000_debug_arg
; /* debug argument handling */
137 const char *rs6000_traceback_name
;
139 traceback_default
= 0,
145 /* Flag to say the TOC is initialized */
147 char toc_label_name
[10];
149 /* Alias set for saves and restores from the rs6000 stack. */
150 static int rs6000_sr_alias_set
;
152 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
153 The only place that looks at this is rs6000_set_default_type_attributes;
154 everywhere else should rely on the presence or absence of a longcall
155 attribute on the function declaration. */
156 int rs6000_default_long_calls
;
157 const char *rs6000_longcall_switch
;
159 struct builtin_description
161 /* mask is not const because we're going to alter it below. This
162 nonsense will go away when we rewrite the -march infrastructure
163 to give us more target flag bits. */
165 const enum insn_code icode
;
166 const char *const name
;
167 const enum rs6000_builtins code
;
170 static bool rs6000_function_ok_for_sibcall
PARAMS ((tree
, tree
));
171 static int num_insns_constant_wide
PARAMS ((HOST_WIDE_INT
));
172 static void validate_condition_mode
173 PARAMS ((enum rtx_code
, enum machine_mode
));
174 static rtx rs6000_generate_compare
PARAMS ((enum rtx_code
));
175 static void rs6000_maybe_dead
PARAMS ((rtx
));
176 static void rs6000_emit_stack_tie
PARAMS ((void));
177 static void rs6000_frame_related
PARAMS ((rtx
, rtx
, HOST_WIDE_INT
, rtx
, rtx
));
178 static void emit_frame_save
PARAMS ((rtx
, rtx
, enum machine_mode
,
179 unsigned int, int, int));
180 static rtx gen_frame_mem_offset
PARAMS ((enum machine_mode
, rtx
, int));
181 static void rs6000_emit_allocate_stack
PARAMS ((HOST_WIDE_INT
, int));
182 static unsigned rs6000_hash_constant
PARAMS ((rtx
));
183 static unsigned toc_hash_function
PARAMS ((const void *));
184 static int toc_hash_eq
PARAMS ((const void *, const void *));
185 static int constant_pool_expr_1
PARAMS ((rtx
, int *, int *));
186 static struct machine_function
* rs6000_init_machine_status
PARAMS ((void));
187 static bool rs6000_assemble_integer
PARAMS ((rtx
, unsigned int, int));
188 #ifdef HAVE_GAS_HIDDEN
189 static void rs6000_assemble_visibility
PARAMS ((tree
, int));
191 static int rs6000_ra_ever_killed
PARAMS ((void));
192 static tree rs6000_handle_longcall_attribute
PARAMS ((tree
*, tree
, tree
, int, bool *));
193 const struct attribute_spec rs6000_attribute_table
[];
194 static void rs6000_set_default_type_attributes
PARAMS ((tree
));
195 static void rs6000_output_function_prologue
PARAMS ((FILE *, HOST_WIDE_INT
));
196 static void rs6000_output_function_epilogue
PARAMS ((FILE *, HOST_WIDE_INT
));
197 static void rs6000_output_mi_thunk
PARAMS ((FILE *, tree
, HOST_WIDE_INT
,
198 HOST_WIDE_INT
, tree
));
199 static rtx rs6000_emit_set_long_const
PARAMS ((rtx
,
200 HOST_WIDE_INT
, HOST_WIDE_INT
));
202 static unsigned int rs6000_elf_section_type_flags
PARAMS ((tree
, const char *,
204 static void rs6000_elf_asm_out_constructor
PARAMS ((rtx
, int));
205 static void rs6000_elf_asm_out_destructor
PARAMS ((rtx
, int));
206 static void rs6000_elf_select_section
PARAMS ((tree
, int,
207 unsigned HOST_WIDE_INT
));
208 static void rs6000_elf_unique_section
PARAMS ((tree
, int));
209 static void rs6000_elf_select_rtx_section
PARAMS ((enum machine_mode
, rtx
,
210 unsigned HOST_WIDE_INT
));
211 static void rs6000_elf_encode_section_info
PARAMS ((tree
, int))
213 static const char *rs6000_elf_strip_name_encoding
PARAMS ((const char *));
214 static bool rs6000_elf_in_small_data_p
PARAMS ((tree
));
217 static void rs6000_xcoff_asm_globalize_label
PARAMS ((FILE *, const char *));
218 static void rs6000_xcoff_asm_named_section
PARAMS ((const char *, unsigned int));
219 static void rs6000_xcoff_select_section
PARAMS ((tree
, int,
220 unsigned HOST_WIDE_INT
));
221 static void rs6000_xcoff_unique_section
PARAMS ((tree
, int));
222 static void rs6000_xcoff_select_rtx_section
PARAMS ((enum machine_mode
, rtx
,
223 unsigned HOST_WIDE_INT
));
224 static const char * rs6000_xcoff_strip_name_encoding
PARAMS ((const char *));
225 static unsigned int rs6000_xcoff_section_type_flags
PARAMS ((tree
, const char *, int));
227 static void rs6000_xcoff_encode_section_info
PARAMS ((tree
, int))
229 static bool rs6000_binds_local_p
PARAMS ((tree
));
230 static int rs6000_use_dfa_pipeline_interface
PARAMS ((void));
231 static int rs6000_multipass_dfa_lookahead
PARAMS ((void));
232 static int rs6000_variable_issue
PARAMS ((FILE *, int, rtx
, int));
233 static bool rs6000_rtx_costs
PARAMS ((rtx
, int, int, int *));
234 static int rs6000_adjust_cost
PARAMS ((rtx
, rtx
, rtx
, int));
235 static int rs6000_adjust_priority
PARAMS ((rtx
, int));
236 static int rs6000_issue_rate
PARAMS ((void));
238 static void rs6000_init_builtins
PARAMS ((void));
239 static rtx rs6000_expand_unop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
240 static rtx rs6000_expand_binop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
241 static rtx rs6000_expand_ternop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
242 static rtx rs6000_expand_builtin
PARAMS ((tree
, rtx
, rtx
, enum machine_mode
, int));
243 static void altivec_init_builtins
PARAMS ((void));
244 static void rs6000_common_init_builtins
PARAMS ((void));
246 static void enable_mask_for_builtins
PARAMS ((struct builtin_description
*,
247 int, enum rs6000_builtins
,
248 enum rs6000_builtins
));
249 static void spe_init_builtins
PARAMS ((void));
250 static rtx spe_expand_builtin
PARAMS ((tree
, rtx
, bool *));
251 static rtx spe_expand_predicate_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
252 static rtx spe_expand_evsel_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
253 static int rs6000_emit_int_cmove
PARAMS ((rtx
, rtx
, rtx
, rtx
));
255 static rtx altivec_expand_builtin
PARAMS ((tree
, rtx
, bool *));
256 static rtx altivec_expand_ld_builtin
PARAMS ((tree
, rtx
, bool *));
257 static rtx altivec_expand_st_builtin
PARAMS ((tree
, rtx
, bool *));
258 static rtx altivec_expand_dst_builtin
PARAMS ((tree
, rtx
, bool *));
259 static rtx altivec_expand_abs_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
260 static rtx altivec_expand_predicate_builtin
PARAMS ((enum insn_code
, const char *, tree
, rtx
));
261 static rtx altivec_expand_stv_builtin
PARAMS ((enum insn_code
, tree
));
262 static void rs6000_parse_abi_options
PARAMS ((void));
263 static void rs6000_parse_vrsave_option
PARAMS ((void));
264 static void rs6000_parse_isel_option
PARAMS ((void));
265 static int first_altivec_reg_to_save
PARAMS ((void));
266 static unsigned int compute_vrsave_mask
PARAMS ((void));
267 static void is_altivec_return_reg
PARAMS ((rtx
, void *));
268 static rtx generate_set_vrsave
PARAMS ((rtx
, rs6000_stack_t
*, int));
269 static void altivec_frame_fixup
PARAMS ((rtx
, rtx
, HOST_WIDE_INT
));
270 static int easy_vector_constant
PARAMS ((rtx
));
271 static bool is_ev64_opaque_type
PARAMS ((tree
));
273 /* Hash table stuff for keeping track of TOC entries. */
275 struct toc_hash_struct
GTY(())
277 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
278 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
280 enum machine_mode key_mode
;
284 static GTY ((param_is (struct toc_hash_struct
))) htab_t toc_hash_table
;
286 /* Default register names. */
287 char rs6000_reg_names
[][8] =
289 "0", "1", "2", "3", "4", "5", "6", "7",
290 "8", "9", "10", "11", "12", "13", "14", "15",
291 "16", "17", "18", "19", "20", "21", "22", "23",
292 "24", "25", "26", "27", "28", "29", "30", "31",
293 "0", "1", "2", "3", "4", "5", "6", "7",
294 "8", "9", "10", "11", "12", "13", "14", "15",
295 "16", "17", "18", "19", "20", "21", "22", "23",
296 "24", "25", "26", "27", "28", "29", "30", "31",
297 "mq", "lr", "ctr","ap",
298 "0", "1", "2", "3", "4", "5", "6", "7",
300 /* AltiVec registers. */
301 "0", "1", "2", "3", "4", "5", "6", "7",
302 "8", "9", "10", "11", "12", "13", "14", "15",
303 "16", "17", "18", "19", "20", "21", "22", "23",
304 "24", "25", "26", "27", "28", "29", "30", "31",
310 #ifdef TARGET_REGNAMES
311 static const char alt_reg_names
[][8] =
313 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
314 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
315 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
316 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
317 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
318 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
319 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
320 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
321 "mq", "lr", "ctr", "ap",
322 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
324 /* AltiVec registers. */
325 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
326 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
327 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
328 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
335 #ifndef MASK_STRICT_ALIGN
336 #define MASK_STRICT_ALIGN 0
339 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
340 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
342 /* Initialize the GCC target structure. */
343 #undef TARGET_ATTRIBUTE_TABLE
344 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
345 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
346 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
348 #undef TARGET_ASM_ALIGNED_DI_OP
349 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
351 /* Default unaligned ops are only provided for ELF. Find the ops needed
352 for non-ELF systems. */
353 #ifndef OBJECT_FORMAT_ELF
355 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
357 #undef TARGET_ASM_UNALIGNED_HI_OP
358 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
359 #undef TARGET_ASM_UNALIGNED_SI_OP
360 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
361 #undef TARGET_ASM_UNALIGNED_DI_OP
362 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
365 #undef TARGET_ASM_UNALIGNED_HI_OP
366 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
367 #undef TARGET_ASM_UNALIGNED_SI_OP
368 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
372 /* This hook deals with fixups for relocatable code and DI-mode objects
374 #undef TARGET_ASM_INTEGER
375 #define TARGET_ASM_INTEGER rs6000_assemble_integer
377 #ifdef HAVE_GAS_HIDDEN
378 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
379 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
382 #undef TARGET_ASM_FUNCTION_PROLOGUE
383 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
384 #undef TARGET_ASM_FUNCTION_EPILOGUE
385 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
387 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
388 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
389 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
390 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_multipass_dfa_lookahead
391 #undef TARGET_SCHED_VARIABLE_ISSUE
392 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
394 #undef TARGET_SCHED_ISSUE_RATE
395 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
396 #undef TARGET_SCHED_ADJUST_COST
397 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
398 #undef TARGET_SCHED_ADJUST_PRIORITY
399 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
401 #undef TARGET_INIT_BUILTINS
402 #define TARGET_INIT_BUILTINS rs6000_init_builtins
404 #undef TARGET_EXPAND_BUILTIN
405 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
407 #undef TARGET_BINDS_LOCAL_P
408 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
410 #undef TARGET_ASM_OUTPUT_MI_THUNK
411 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
413 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
414 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
416 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
417 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
419 #undef TARGET_RTX_COSTS
420 #define TARGET_RTX_COSTS rs6000_rtx_costs
421 #undef TARGET_ADDRESS_COST
422 #define TARGET_ADDRESS_COST hook_int_rtx_0
424 #undef TARGET_VECTOR_OPAQUE_P
425 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
427 struct gcc_target targetm
= TARGET_INITIALIZER
;
429 /* Override command line options. Mostly we process the processor
430 type and sometimes adjust other TARGET_ options. */
433 rs6000_override_options (default_cpu
)
434 const char *default_cpu
;
437 struct rs6000_cpu_select
*ptr
;
439 /* Simplify the entries below by making a mask for any POWER
440 variant and any PowerPC variant. */
442 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
443 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
444 | MASK_PPC_GFXOPT | MASK_POWERPC64)
445 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
449 const char *const name
; /* Canonical processor name. */
450 const enum processor_type processor
; /* Processor type enum value. */
451 const int target_enable
; /* Target flags to enable. */
452 const int target_disable
; /* Target flags to disable. */
453 } const processor_target_table
[]
454 = {{"common", PROCESSOR_COMMON
, MASK_NEW_MNEMONICS
,
455 POWER_MASKS
| POWERPC_MASKS
},
456 {"power", PROCESSOR_POWER
,
457 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
458 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
459 {"power2", PROCESSOR_POWER
,
460 MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
,
461 POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
462 {"power3", PROCESSOR_PPC630
,
463 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
464 POWER_MASKS
| MASK_PPC_GPOPT
},
465 {"power4", PROCESSOR_POWER4
,
466 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
467 POWER_MASKS
| MASK_PPC_GPOPT
},
468 {"powerpc", PROCESSOR_POWERPC
,
469 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
470 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
471 {"powerpc64", PROCESSOR_POWERPC64
,
472 MASK_POWERPC
| MASK_POWERPC64
| MASK_NEW_MNEMONICS
,
473 POWER_MASKS
| POWERPC_OPT_MASKS
},
474 {"rios", PROCESSOR_RIOS1
,
475 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
476 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
477 {"rios1", PROCESSOR_RIOS1
,
478 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
479 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
480 {"rsc", PROCESSOR_PPC601
,
481 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
482 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
483 {"rsc1", PROCESSOR_PPC601
,
484 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
485 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
486 {"rios2", PROCESSOR_RIOS2
,
487 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
| MASK_POWER2
,
488 POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
489 {"rs64a", PROCESSOR_RS64A
,
490 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
491 POWER_MASKS
| POWERPC_OPT_MASKS
},
492 {"401", PROCESSOR_PPC403
,
493 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
494 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
495 {"403", PROCESSOR_PPC403
,
496 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
| MASK_STRICT_ALIGN
,
497 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
498 {"405", PROCESSOR_PPC405
,
499 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
500 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
501 {"405f", PROCESSOR_PPC405
,
502 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
503 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
504 {"505", PROCESSOR_MPCCORE
,
505 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
506 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
507 {"601", PROCESSOR_PPC601
,
508 MASK_POWER
| MASK_POWERPC
| MASK_NEW_MNEMONICS
| MASK_MULTIPLE
| MASK_STRING
,
509 MASK_POWER2
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
510 {"602", PROCESSOR_PPC603
,
511 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
512 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
513 {"603", PROCESSOR_PPC603
,
514 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
515 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
516 {"603e", PROCESSOR_PPC603
,
517 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
518 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
519 {"ec603e", PROCESSOR_PPC603
,
520 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
521 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
522 {"604", PROCESSOR_PPC604
,
523 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
524 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
525 {"604e", PROCESSOR_PPC604e
,
526 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
527 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
528 {"620", PROCESSOR_PPC620
,
529 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
530 POWER_MASKS
| MASK_PPC_GPOPT
},
531 {"630", PROCESSOR_PPC630
,
532 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
533 POWER_MASKS
| MASK_PPC_GPOPT
},
534 {"740", PROCESSOR_PPC750
,
535 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
536 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
537 {"750", PROCESSOR_PPC750
,
538 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
539 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
540 {"7400", PROCESSOR_PPC7400
,
541 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
542 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
543 {"7450", PROCESSOR_PPC7450
,
544 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
545 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
546 {"8540", PROCESSOR_PPC8540
,
547 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
548 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
549 {"801", PROCESSOR_MPCCORE
,
550 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
551 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
552 {"821", PROCESSOR_MPCCORE
,
553 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
554 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
555 {"823", PROCESSOR_MPCCORE
,
556 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
557 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
558 {"860", PROCESSOR_MPCCORE
,
559 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
560 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
}};
562 const size_t ptt_size
= ARRAY_SIZE (processor_target_table
);
564 /* Save current -mmultiple/-mno-multiple status. */
565 int multiple
= TARGET_MULTIPLE
;
566 /* Save current -mstring/-mno-string status. */
567 int string
= TARGET_STRING
;
569 /* Identify the processor type. */
570 rs6000_select
[0].string
= default_cpu
;
571 rs6000_cpu
= TARGET_POWERPC64
? PROCESSOR_DEFAULT64
: PROCESSOR_DEFAULT
;
573 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
575 ptr
= &rs6000_select
[i
];
576 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
578 for (j
= 0; j
< ptt_size
; j
++)
579 if (! strcmp (ptr
->string
, processor_target_table
[j
].name
))
582 rs6000_cpu
= processor_target_table
[j
].processor
;
586 target_flags
|= processor_target_table
[j
].target_enable
;
587 target_flags
&= ~processor_target_table
[j
].target_disable
;
593 error ("bad value (%s) for %s switch", ptr
->string
, ptr
->name
);
597 if (rs6000_cpu
== PROCESSOR_PPC8540
)
600 /* If we are optimizing big endian systems for space, use the load/store
601 multiple and string instructions. */
602 if (BYTES_BIG_ENDIAN
&& optimize_size
)
603 target_flags
|= MASK_MULTIPLE
| MASK_STRING
;
605 /* If -mmultiple or -mno-multiple was explicitly used, don't
606 override with the processor default */
607 if ((target_flags_explicit
& MASK_MULTIPLE
) != 0)
608 target_flags
= (target_flags
& ~MASK_MULTIPLE
) | multiple
;
610 /* If -mstring or -mno-string was explicitly used, don't override
611 with the processor default. */
612 if ((target_flags_explicit
& MASK_STRING
) != 0)
613 target_flags
= (target_flags
& ~MASK_STRING
) | string
;
615 /* Don't allow -mmultiple or -mstring on little endian systems
616 unless the cpu is a 750, because the hardware doesn't support the
617 instructions used in little endian mode, and causes an alignment
618 trap. The 750 does not cause an alignment trap (except when the
619 target is unaligned). */
621 if (!BYTES_BIG_ENDIAN
&& rs6000_cpu
!= PROCESSOR_PPC750
)
625 target_flags
&= ~MASK_MULTIPLE
;
626 if ((target_flags_explicit
& MASK_MULTIPLE
) != 0)
627 warning ("-mmultiple is not supported on little endian systems");
632 target_flags
&= ~MASK_STRING
;
633 if ((target_flags_explicit
& MASK_STRING
) != 0)
634 warning ("-mstring is not supported on little endian systems");
638 if (flag_pic
!= 0 && DEFAULT_ABI
== ABI_AIX
)
640 rs6000_flag_pic
= flag_pic
;
644 /* For Darwin, always silently make -fpic and -fPIC identical. */
645 if (flag_pic
== 1 && DEFAULT_ABI
== ABI_DARWIN
)
648 /* Set debug flags */
649 if (rs6000_debug_name
)
651 if (! strcmp (rs6000_debug_name
, "all"))
652 rs6000_debug_stack
= rs6000_debug_arg
= 1;
653 else if (! strcmp (rs6000_debug_name
, "stack"))
654 rs6000_debug_stack
= 1;
655 else if (! strcmp (rs6000_debug_name
, "arg"))
656 rs6000_debug_arg
= 1;
658 error ("unknown -mdebug-%s switch", rs6000_debug_name
);
661 if (rs6000_traceback_name
)
663 if (! strncmp (rs6000_traceback_name
, "full", 4))
664 rs6000_traceback
= traceback_full
;
665 else if (! strncmp (rs6000_traceback_name
, "part", 4))
666 rs6000_traceback
= traceback_part
;
667 else if (! strncmp (rs6000_traceback_name
, "no", 2))
668 rs6000_traceback
= traceback_none
;
670 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
671 rs6000_traceback_name
);
674 /* Set size of long double */
675 rs6000_long_double_type_size
= 64;
676 if (rs6000_long_double_size_string
)
679 int size
= strtol (rs6000_long_double_size_string
, &tail
, 10);
680 if (*tail
!= '\0' || (size
!= 64 && size
!= 128))
681 error ("Unknown switch -mlong-double-%s",
682 rs6000_long_double_size_string
);
684 rs6000_long_double_type_size
= size
;
687 /* Handle -mabi= options. */
688 rs6000_parse_abi_options ();
690 /* Handle -mvrsave= option. */
691 rs6000_parse_vrsave_option ();
693 /* Handle -misel= option. */
694 rs6000_parse_isel_option ();
696 #ifdef SUBTARGET_OVERRIDE_OPTIONS
697 SUBTARGET_OVERRIDE_OPTIONS
;
699 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
700 SUBSUBTARGET_OVERRIDE_OPTIONS
;
703 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
704 using TARGET_OPTIONS to handle a toggle switch, but we're out of
705 bits in target_flags so TARGET_SWITCHES cannot be used.
706 Assumption here is that rs6000_longcall_switch points into the
707 text of the complete option, rather than being a copy, so we can
708 scan back for the presence or absence of the no- modifier. */
709 if (rs6000_longcall_switch
)
711 const char *base
= rs6000_longcall_switch
;
712 while (base
[-1] != 'm') base
--;
714 if (*rs6000_longcall_switch
!= '\0')
715 error ("invalid option `%s'", base
);
716 rs6000_default_long_calls
= (base
[0] != 'n');
719 #ifdef TARGET_REGNAMES
720 /* If the user desires alternate register names, copy in the
721 alternate names now. */
723 memcpy (rs6000_reg_names
, alt_reg_names
, sizeof (rs6000_reg_names
));
726 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
727 If -maix-struct-return or -msvr4-struct-return was explicitly
728 used, don't override with the ABI default. */
729 if ((target_flags_explicit
& MASK_AIX_STRUCT_RET
) == 0)
731 if (DEFAULT_ABI
== ABI_V4
&& !DRAFT_V4_STRUCT_RET
)
732 target_flags
= (target_flags
& ~MASK_AIX_STRUCT_RET
);
734 target_flags
|= MASK_AIX_STRUCT_RET
;
737 if (TARGET_LONG_DOUBLE_128
738 && (DEFAULT_ABI
== ABI_AIX
|| DEFAULT_ABI
== ABI_DARWIN
))
739 real_format_for_mode
[TFmode
- QFmode
] = &ibm_extended_format
;
741 /* Allocate an alias set for register saves & restores from stack. */
742 rs6000_sr_alias_set
= new_alias_set ();
745 ASM_GENERATE_INTERNAL_LABEL (toc_label_name
, "LCTOC", 1);
747 /* We can only guarantee the availability of DI pseudo-ops when
748 assembling for 64-bit targets. */
751 targetm
.asm_out
.aligned_op
.di
= NULL
;
752 targetm
.asm_out
.unaligned_op
.di
= NULL
;
755 /* Set maximum branch target alignment at two instructions, eight bytes. */
756 align_jumps_max_skip
= 8;
757 align_loops_max_skip
= 8;
759 /* Arrange to save and restore machine status around nested functions. */
760 init_machine_status
= rs6000_init_machine_status
;
763 /* Handle -misel= option. */
765 rs6000_parse_isel_option ()
767 if (rs6000_isel_string
== 0)
769 else if (! strcmp (rs6000_isel_string
, "yes"))
771 else if (! strcmp (rs6000_isel_string
, "no"))
774 error ("unknown -misel= option specified: '%s'",
778 /* Handle -mvrsave= options. */
780 rs6000_parse_vrsave_option ()
782 /* Generate VRSAVE instructions by default. */
783 if (rs6000_altivec_vrsave_string
== 0
784 || ! strcmp (rs6000_altivec_vrsave_string
, "yes"))
785 rs6000_altivec_vrsave
= 1;
786 else if (! strcmp (rs6000_altivec_vrsave_string
, "no"))
787 rs6000_altivec_vrsave
= 0;
789 error ("unknown -mvrsave= option specified: '%s'",
790 rs6000_altivec_vrsave_string
);
793 /* Handle -mabi= options. */
795 rs6000_parse_abi_options ()
797 if (rs6000_abi_string
== 0)
799 else if (! strcmp (rs6000_abi_string
, "altivec"))
800 rs6000_altivec_abi
= 1;
801 else if (! strcmp (rs6000_abi_string
, "no-altivec"))
802 rs6000_altivec_abi
= 0;
803 else if (! strcmp (rs6000_abi_string
, "spe"))
807 error ("not configured for ABI: '%s'", rs6000_abi_string
);
810 else if (! strcmp (rs6000_abi_string
, "no-spe"))
813 error ("unknown ABI specified: '%s'", rs6000_abi_string
);
817 optimization_options (level
, size
)
818 int level ATTRIBUTE_UNUSED
;
819 int size ATTRIBUTE_UNUSED
;
823 /* Do anything needed at the start of the asm file. */
826 rs6000_file_start (file
, default_cpu
)
828 const char *default_cpu
;
832 const char *start
= buffer
;
833 struct rs6000_cpu_select
*ptr
;
835 if (flag_verbose_asm
)
837 sprintf (buffer
, "\n%s rs6000/powerpc options:", ASM_COMMENT_START
);
838 rs6000_select
[0].string
= default_cpu
;
840 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
842 ptr
= &rs6000_select
[i
];
843 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
845 fprintf (file
, "%s %s%s", start
, ptr
->name
, ptr
->string
);
851 switch (rs6000_sdata
)
853 case SDATA_NONE
: fprintf (file
, "%s -msdata=none", start
); start
= ""; break;
854 case SDATA_DATA
: fprintf (file
, "%s -msdata=data", start
); start
= ""; break;
855 case SDATA_SYSV
: fprintf (file
, "%s -msdata=sysv", start
); start
= ""; break;
856 case SDATA_EABI
: fprintf (file
, "%s -msdata=eabi", start
); start
= ""; break;
859 if (rs6000_sdata
&& g_switch_value
)
861 fprintf (file
, "%s -G %d", start
, g_switch_value
);
871 /* Return nonzero if this function is known to have a null epilogue. */
876 if (reload_completed
)
878 rs6000_stack_t
*info
= rs6000_stack_info ();
880 if (info
->first_gp_reg_save
== 32
881 && info
->first_fp_reg_save
== 64
882 && info
->first_altivec_reg_save
== LAST_ALTIVEC_REGNO
+ 1
885 && info
->vrsave_mask
== 0
893 /* Returns 1 always. */
896 any_operand (op
, mode
)
897 rtx op ATTRIBUTE_UNUSED
;
898 enum machine_mode mode ATTRIBUTE_UNUSED
;
903 /* Returns 1 if op is the count register. */
905 count_register_operand (op
, mode
)
907 enum machine_mode mode ATTRIBUTE_UNUSED
;
909 if (GET_CODE (op
) != REG
)
912 if (REGNO (op
) == COUNT_REGISTER_REGNUM
)
915 if (REGNO (op
) > FIRST_PSEUDO_REGISTER
)
921 /* Returns 1 if op is an altivec register. */
923 altivec_register_operand (op
, mode
)
925 enum machine_mode mode ATTRIBUTE_UNUSED
;
928 return (register_operand (op
, mode
)
929 && (GET_CODE (op
) != REG
930 || REGNO (op
) > FIRST_PSEUDO_REGISTER
931 || ALTIVEC_REGNO_P (REGNO (op
))));
935 xer_operand (op
, mode
)
937 enum machine_mode mode ATTRIBUTE_UNUSED
;
939 if (GET_CODE (op
) != REG
)
942 if (XER_REGNO_P (REGNO (op
)))
948 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
949 by such constants completes more quickly. */
952 s8bit_cint_operand (op
, mode
)
954 enum machine_mode mode ATTRIBUTE_UNUSED
;
956 return ( GET_CODE (op
) == CONST_INT
957 && (INTVAL (op
) >= -128 && INTVAL (op
) <= 127));
960 /* Return 1 if OP is a constant that can fit in a D field. */
963 short_cint_operand (op
, mode
)
965 enum machine_mode mode ATTRIBUTE_UNUSED
;
967 return (GET_CODE (op
) == CONST_INT
968 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I'));
971 /* Similar for an unsigned D field. */
974 u_short_cint_operand (op
, mode
)
976 enum machine_mode mode ATTRIBUTE_UNUSED
;
978 return (GET_CODE (op
) == CONST_INT
979 && CONST_OK_FOR_LETTER_P (INTVAL (op
) & GET_MODE_MASK (mode
), 'K'));
982 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
985 non_short_cint_operand (op
, mode
)
987 enum machine_mode mode ATTRIBUTE_UNUSED
;
989 return (GET_CODE (op
) == CONST_INT
990 && (unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x8000) >= 0x10000);
993 /* Returns 1 if OP is a CONST_INT that is a positive value
994 and an exact power of 2. */
997 exact_log2_cint_operand (op
, mode
)
999 enum machine_mode mode ATTRIBUTE_UNUSED
;
1001 return (GET_CODE (op
) == CONST_INT
1003 && exact_log2 (INTVAL (op
)) >= 0);
1006 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1010 gpc_reg_operand (op
, mode
)
1012 enum machine_mode mode
;
1014 return (register_operand (op
, mode
)
1015 && (GET_CODE (op
) != REG
1016 || (REGNO (op
) >= ARG_POINTER_REGNUM
1017 && !XER_REGNO_P (REGNO (op
)))
1018 || REGNO (op
) < MQ_REGNO
));
1021 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1025 cc_reg_operand (op
, mode
)
1027 enum machine_mode mode
;
1029 return (register_operand (op
, mode
)
1030 && (GET_CODE (op
) != REG
1031 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
1032 || CR_REGNO_P (REGNO (op
))));
1035 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1036 CR field that isn't CR0. */
1039 cc_reg_not_cr0_operand (op
, mode
)
1041 enum machine_mode mode
;
1043 return (register_operand (op
, mode
)
1044 && (GET_CODE (op
) != REG
1045 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
1046 || CR_REGNO_NOT_CR0_P (REGNO (op
))));
1049 /* Returns 1 if OP is either a constant integer valid for a D-field or
1050 a non-special register. If a register, it must be in the proper
1051 mode unless MODE is VOIDmode. */
1054 reg_or_short_operand (op
, mode
)
1056 enum machine_mode mode
;
1058 return short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1061 /* Similar, except check if the negation of the constant would be
1062 valid for a D-field. */
1065 reg_or_neg_short_operand (op
, mode
)
1067 enum machine_mode mode
;
1069 if (GET_CODE (op
) == CONST_INT
)
1070 return CONST_OK_FOR_LETTER_P (INTVAL (op
), 'P');
1072 return gpc_reg_operand (op
, mode
);
1075 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1076 a non-special register. If a register, it must be in the proper
1077 mode unless MODE is VOIDmode. */
1080 reg_or_aligned_short_operand (op
, mode
)
1082 enum machine_mode mode
;
1084 if (gpc_reg_operand (op
, mode
))
1086 else if (short_cint_operand (op
, mode
) && !(INTVAL (op
) & 3))
1093 /* Return 1 if the operand is either a register or an integer whose
1094 high-order 16 bits are zero. */
1097 reg_or_u_short_operand (op
, mode
)
1099 enum machine_mode mode
;
1101 return u_short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1104 /* Return 1 is the operand is either a non-special register or ANY
1105 constant integer. */
1108 reg_or_cint_operand (op
, mode
)
1110 enum machine_mode mode
;
1112 return (GET_CODE (op
) == CONST_INT
|| gpc_reg_operand (op
, mode
));
1115 /* Return 1 is the operand is either a non-special register or ANY
1116 32-bit signed constant integer. */
1119 reg_or_arith_cint_operand (op
, mode
)
1121 enum machine_mode mode
;
1123 return (gpc_reg_operand (op
, mode
)
1124 || (GET_CODE (op
) == CONST_INT
1125 #if HOST_BITS_PER_WIDE_INT != 32
1126 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80000000)
1127 < (unsigned HOST_WIDE_INT
) 0x100000000ll
)
1132 /* Return 1 is the operand is either a non-special register or a 32-bit
1133 signed constant integer valid for 64-bit addition. */
1136 reg_or_add_cint64_operand (op
, mode
)
1138 enum machine_mode mode
;
1140 return (gpc_reg_operand (op
, mode
)
1141 || (GET_CODE (op
) == CONST_INT
1142 #if HOST_BITS_PER_WIDE_INT == 32
1143 && INTVAL (op
) < 0x7fff8000
1145 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80008000)
1151 /* Return 1 is the operand is either a non-special register or a 32-bit
1152 signed constant integer valid for 64-bit subtraction. */
1155 reg_or_sub_cint64_operand (op
, mode
)
1157 enum machine_mode mode
;
1159 return (gpc_reg_operand (op
, mode
)
1160 || (GET_CODE (op
) == CONST_INT
1161 #if HOST_BITS_PER_WIDE_INT == 32
1162 && (- INTVAL (op
)) < 0x7fff8000
1164 && ((unsigned HOST_WIDE_INT
) ((- INTVAL (op
)) + 0x80008000)
1170 /* Return 1 is the operand is either a non-special register or ANY
1171 32-bit unsigned constant integer. */
1174 reg_or_logical_cint_operand (op
, mode
)
1176 enum machine_mode mode
;
1178 if (GET_CODE (op
) == CONST_INT
)
1180 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
)
1182 if (GET_MODE_BITSIZE (mode
) <= 32)
1185 if (INTVAL (op
) < 0)
1189 return ((INTVAL (op
) & GET_MODE_MASK (mode
)
1190 & (~ (unsigned HOST_WIDE_INT
) 0xffffffff)) == 0);
1192 else if (GET_CODE (op
) == CONST_DOUBLE
)
1194 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
1198 return CONST_DOUBLE_HIGH (op
) == 0;
1201 return gpc_reg_operand (op
, mode
);
1204 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1207 got_operand (op
, mode
)
1209 enum machine_mode mode ATTRIBUTE_UNUSED
;
1211 return (GET_CODE (op
) == SYMBOL_REF
1212 || GET_CODE (op
) == CONST
1213 || GET_CODE (op
) == LABEL_REF
);
1216 /* Return 1 if the operand is a simple references that can be loaded via
1217 the GOT (labels involving addition aren't allowed). */
1220 got_no_const_operand (op
, mode
)
1222 enum machine_mode mode ATTRIBUTE_UNUSED
;
1224 return (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
);
1227 /* Return the number of instructions it takes to form a constant in an
1228 integer register. */
1231 num_insns_constant_wide (value
)
1232 HOST_WIDE_INT value
;
1234 /* signed constant loadable with {cal|addi} */
1235 if (CONST_OK_FOR_LETTER_P (value
, 'I'))
1238 /* constant loadable with {cau|addis} */
1239 else if (CONST_OK_FOR_LETTER_P (value
, 'L'))
1242 #if HOST_BITS_PER_WIDE_INT == 64
1243 else if (TARGET_POWERPC64
)
1245 HOST_WIDE_INT low
= ((value
& 0xffffffff) ^ 0x80000000) - 0x80000000;
1246 HOST_WIDE_INT high
= value
>> 31;
1248 if (high
== 0 || high
== -1)
1254 return num_insns_constant_wide (high
) + 1;
1256 return (num_insns_constant_wide (high
)
1257 + num_insns_constant_wide (low
) + 1);
1266 num_insns_constant (op
, mode
)
1268 enum machine_mode mode
;
1270 if (GET_CODE (op
) == CONST_INT
)
1272 #if HOST_BITS_PER_WIDE_INT == 64
1273 if ((INTVAL (op
) >> 31) != 0 && (INTVAL (op
) >> 31) != -1
1274 && mask64_operand (op
, mode
))
1278 return num_insns_constant_wide (INTVAL (op
));
1281 else if (GET_CODE (op
) == CONST_DOUBLE
&& mode
== SFmode
)
1286 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1287 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1288 return num_insns_constant_wide ((HOST_WIDE_INT
) l
);
1291 else if (GET_CODE (op
) == CONST_DOUBLE
)
1297 int endian
= (WORDS_BIG_ENDIAN
== 0);
1299 if (mode
== VOIDmode
|| mode
== DImode
)
1301 high
= CONST_DOUBLE_HIGH (op
);
1302 low
= CONST_DOUBLE_LOW (op
);
1306 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1307 REAL_VALUE_TO_TARGET_DOUBLE (rv
, l
);
1309 low
= l
[1 - endian
];
1313 return (num_insns_constant_wide (low
)
1314 + num_insns_constant_wide (high
));
1318 if (high
== 0 && low
>= 0)
1319 return num_insns_constant_wide (low
);
1321 else if (high
== -1 && low
< 0)
1322 return num_insns_constant_wide (low
);
1324 else if (mask64_operand (op
, mode
))
1328 return num_insns_constant_wide (high
) + 1;
1331 return (num_insns_constant_wide (high
)
1332 + num_insns_constant_wide (low
) + 1);
1340 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1341 register with one instruction per word. We only do this if we can
1342 safely read CONST_DOUBLE_{LOW,HIGH}. */
1345 easy_fp_constant (op
, mode
)
1347 enum machine_mode mode
;
1349 if (GET_CODE (op
) != CONST_DOUBLE
1350 || GET_MODE (op
) != mode
1351 || (GET_MODE_CLASS (mode
) != MODE_FLOAT
&& mode
!= DImode
))
1354 /* Consider all constants with -msoft-float to be easy. */
1355 if ((TARGET_SOFT_FLOAT
|| !TARGET_FPRS
)
1359 /* If we are using V.4 style PIC, consider all constants to be hard. */
1360 if (flag_pic
&& DEFAULT_ABI
== ABI_V4
)
1363 #ifdef TARGET_RELOCATABLE
1364 /* Similarly if we are using -mrelocatable, consider all constants
1366 if (TARGET_RELOCATABLE
)
1375 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1376 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
1378 return (num_insns_constant_wide ((HOST_WIDE_INT
) k
[0]) == 1
1379 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[1]) == 1
1380 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[2]) == 1
1381 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[3]) == 1);
1384 else if (mode
== DFmode
)
1389 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1390 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
1392 return (num_insns_constant_wide ((HOST_WIDE_INT
) k
[0]) == 1
1393 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[1]) == 1);
1396 else if (mode
== SFmode
)
1401 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1402 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1404 return num_insns_constant_wide (l
) == 1;
1407 else if (mode
== DImode
)
1408 return ((TARGET_POWERPC64
1409 && GET_CODE (op
) == CONST_DOUBLE
&& CONST_DOUBLE_LOW (op
) == 0)
1410 || (num_insns_constant (op
, DImode
) <= 2));
1412 else if (mode
== SImode
)
1418 /* Return 1 if the operand is a CONST_INT and can be put into a
1419 register with one instruction. */
1422 easy_vector_constant (op
)
1428 if (GET_CODE (op
) != CONST_VECTOR
)
1431 units
= CONST_VECTOR_NUNITS (op
);
1433 /* We can generate 0 easily. Look for that. */
1434 for (i
= 0; i
< units
; ++i
)
1436 elt
= CONST_VECTOR_ELT (op
, i
);
1438 /* We could probably simplify this by just checking for equality
1439 with CONST0_RTX for the current mode, but let's be safe
1442 switch (GET_CODE (elt
))
1445 if (INTVAL (elt
) != 0)
1449 if (CONST_DOUBLE_LOW (elt
) != 0 || CONST_DOUBLE_HIGH (elt
) != 0)
1457 /* We could probably generate a few other constants trivially, but
1458 gcc doesn't generate them yet. FIXME later. */
1462 /* Return 1 if the operand is the constant 0. This works for scalars
1463 as well as vectors. */
1465 zero_constant (op
, mode
)
1467 enum machine_mode mode
;
1469 return op
== CONST0_RTX (mode
);
1472 /* Return 1 if the operand is 0.0. */
1474 zero_fp_constant (op
, mode
)
1476 enum machine_mode mode
;
1478 return GET_MODE_CLASS (mode
) == MODE_FLOAT
&& op
== CONST0_RTX (mode
);
1481 /* Return 1 if the operand is in volatile memory. Note that during
1482 the RTL generation phase, memory_operand does not return TRUE for
1483 volatile memory references. So this function allows us to
1484 recognize volatile references where its safe. */
1487 volatile_mem_operand (op
, mode
)
1489 enum machine_mode mode
;
1491 if (GET_CODE (op
) != MEM
)
1494 if (!MEM_VOLATILE_P (op
))
1497 if (mode
!= GET_MODE (op
))
1500 if (reload_completed
)
1501 return memory_operand (op
, mode
);
1503 if (reload_in_progress
)
1504 return strict_memory_address_p (mode
, XEXP (op
, 0));
1506 return memory_address_p (mode
, XEXP (op
, 0));
1509 /* Return 1 if the operand is an offsettable memory operand. */
1512 offsettable_mem_operand (op
, mode
)
1514 enum machine_mode mode
;
1516 return ((GET_CODE (op
) == MEM
)
1517 && offsettable_address_p (reload_completed
|| reload_in_progress
,
1518 mode
, XEXP (op
, 0)));
1521 /* Return 1 if the operand is either an easy FP constant (see above) or
1525 mem_or_easy_const_operand (op
, mode
)
1527 enum machine_mode mode
;
1529 return memory_operand (op
, mode
) || easy_fp_constant (op
, mode
);
1532 /* Return 1 if the operand is either a non-special register or an item
1533 that can be used as the operand of a `mode' add insn. */
1536 add_operand (op
, mode
)
1538 enum machine_mode mode
;
1540 if (GET_CODE (op
) == CONST_INT
)
1541 return (CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1542 || CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1544 return gpc_reg_operand (op
, mode
);
1547 /* Return 1 if OP is a constant but not a valid add_operand. */
1550 non_add_cint_operand (op
, mode
)
1552 enum machine_mode mode ATTRIBUTE_UNUSED
;
1554 return (GET_CODE (op
) == CONST_INT
1555 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1556 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1559 /* Return 1 if the operand is a non-special register or a constant that
1560 can be used as the operand of an OR or XOR insn on the RS/6000. */
1563 logical_operand (op
, mode
)
1565 enum machine_mode mode
;
1567 HOST_WIDE_INT opl
, oph
;
1569 if (gpc_reg_operand (op
, mode
))
1572 if (GET_CODE (op
) == CONST_INT
)
1574 opl
= INTVAL (op
) & GET_MODE_MASK (mode
);
1576 #if HOST_BITS_PER_WIDE_INT <= 32
1577 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
&& opl
< 0)
1581 else if (GET_CODE (op
) == CONST_DOUBLE
)
1583 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1586 opl
= CONST_DOUBLE_LOW (op
);
1587 oph
= CONST_DOUBLE_HIGH (op
);
1594 return ((opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff) == 0
1595 || (opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff0000) == 0);
1598 /* Return 1 if C is a constant that is not a logical operand (as
1599 above), but could be split into one. */
1602 non_logical_cint_operand (op
, mode
)
1604 enum machine_mode mode
;
1606 return ((GET_CODE (op
) == CONST_INT
|| GET_CODE (op
) == CONST_DOUBLE
)
1607 && ! logical_operand (op
, mode
)
1608 && reg_or_logical_cint_operand (op
, mode
));
1611 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1612 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1613 Reject all ones and all zeros, since these should have been optimized
1614 away and confuse the making of MB and ME. */
1617 mask_operand (op
, mode
)
1619 enum machine_mode mode ATTRIBUTE_UNUSED
;
1621 HOST_WIDE_INT c
, lsb
;
1623 if (GET_CODE (op
) != CONST_INT
)
1628 /* Fail in 64-bit mode if the mask wraps around because the upper
1629 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1630 if (TARGET_POWERPC64
&& (c
& 0x80000001) == 0x80000001)
1633 /* We don't change the number of transitions by inverting,
1634 so make sure we start with the LS bit zero. */
1638 /* Reject all zeros or all ones. */
1642 /* Find the first transition. */
1645 /* Invert to look for a second transition. */
1648 /* Erase first transition. */
1651 /* Find the second transition (if any). */
1654 /* Match if all the bits above are 1's (or c is zero). */
1658 /* Return 1 for the PowerPC64 rlwinm corner case. */
1661 mask_operand_wrap (op
, mode
)
1663 enum machine_mode mode ATTRIBUTE_UNUSED
;
1665 HOST_WIDE_INT c
, lsb
;
1667 if (GET_CODE (op
) != CONST_INT
)
1672 if ((c
& 0x80000001) != 0x80000001)
1686 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1687 It is if there are no more than one 1->0 or 0->1 transitions.
1688 Reject all zeros, since zero should have been optimized away and
1689 confuses the making of MB and ME. */
1692 mask64_operand (op
, mode
)
1694 enum machine_mode mode ATTRIBUTE_UNUSED
;
1696 if (GET_CODE (op
) == CONST_INT
)
1698 HOST_WIDE_INT c
, lsb
;
1702 /* Reject all zeros. */
1706 /* We don't change the number of transitions by inverting,
1707 so make sure we start with the LS bit zero. */
1711 /* Find the transition, and check that all bits above are 1's. */
1714 /* Match if all the bits above are 1's (or c is zero). */
1720 /* Like mask64_operand, but allow up to three transitions. This
1721 predicate is used by insn patterns that generate two rldicl or
1722 rldicr machine insns. */
1725 mask64_2_operand (op
, mode
)
1727 enum machine_mode mode ATTRIBUTE_UNUSED
;
1729 if (GET_CODE (op
) == CONST_INT
)
1731 HOST_WIDE_INT c
, lsb
;
1735 /* Disallow all zeros. */
1739 /* We don't change the number of transitions by inverting,
1740 so make sure we start with the LS bit zero. */
1744 /* Find the first transition. */
1747 /* Invert to look for a second transition. */
1750 /* Erase first transition. */
1753 /* Find the second transition. */
1756 /* Invert to look for a third transition. */
1759 /* Erase second transition. */
1762 /* Find the third transition (if any). */
1765 /* Match if all the bits above are 1's (or c is zero). */
1771 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1772 implement ANDing by the mask IN. */
1774 build_mask64_2_operands (in
, out
)
1778 #if HOST_BITS_PER_WIDE_INT >= 64
1779 unsigned HOST_WIDE_INT c
, lsb
, m1
, m2
;
1782 if (GET_CODE (in
) != CONST_INT
)
1788 /* Assume c initially something like 0x00fff000000fffff. The idea
1789 is to rotate the word so that the middle ^^^^^^ group of zeros
1790 is at the MS end and can be cleared with an rldicl mask. We then
1791 rotate back and clear off the MS ^^ group of zeros with a
1793 c
= ~c
; /* c == 0xff000ffffff00000 */
1794 lsb
= c
& -c
; /* lsb == 0x0000000000100000 */
1795 m1
= -lsb
; /* m1 == 0xfffffffffff00000 */
1796 c
= ~c
; /* c == 0x00fff000000fffff */
1797 c
&= -lsb
; /* c == 0x00fff00000000000 */
1798 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
1799 c
= ~c
; /* c == 0xff000fffffffffff */
1800 c
&= -lsb
; /* c == 0xff00000000000000 */
1802 while ((lsb
>>= 1) != 0)
1803 shift
++; /* shift == 44 on exit from loop */
1804 m1
<<= 64 - shift
; /* m1 == 0xffffff0000000000 */
1805 m1
= ~m1
; /* m1 == 0x000000ffffffffff */
1806 m2
= ~c
; /* m2 == 0x00ffffffffffffff */
1810 /* Assume c initially something like 0xff000f0000000000. The idea
1811 is to rotate the word so that the ^^^ middle group of zeros
1812 is at the LS end and can be cleared with an rldicr mask. We then
1813 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1815 lsb
= c
& -c
; /* lsb == 0x0000010000000000 */
1816 m2
= -lsb
; /* m2 == 0xffffff0000000000 */
1817 c
= ~c
; /* c == 0x00fff0ffffffffff */
1818 c
&= -lsb
; /* c == 0x00fff00000000000 */
1819 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
1820 c
= ~c
; /* c == 0xff000fffffffffff */
1821 c
&= -lsb
; /* c == 0xff00000000000000 */
1823 while ((lsb
>>= 1) != 0)
1824 shift
++; /* shift == 44 on exit from loop */
1825 m1
= ~c
; /* m1 == 0x00ffffffffffffff */
1826 m1
>>= shift
; /* m1 == 0x0000000000000fff */
1827 m1
= ~m1
; /* m1 == 0xfffffffffffff000 */
1830 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1831 masks will be all 1's. We are guaranteed more than one transition. */
1832 out
[0] = GEN_INT (64 - shift
);
1833 out
[1] = GEN_INT (m1
);
1834 out
[2] = GEN_INT (shift
);
1835 out
[3] = GEN_INT (m2
);
1843 /* Return 1 if the operand is either a non-special register or a constant
1844 that can be used as the operand of a PowerPC64 logical AND insn. */
1847 and64_operand (op
, mode
)
1849 enum machine_mode mode
;
1851 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
1852 return (gpc_reg_operand (op
, mode
) || mask64_operand (op
, mode
));
1854 return (logical_operand (op
, mode
) || mask64_operand (op
, mode
));
1857 /* Like the above, but also match constants that can be implemented
1858 with two rldicl or rldicr insns. */
1861 and64_2_operand (op
, mode
)
1863 enum machine_mode mode
;
1865 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
1866 return gpc_reg_operand (op
, mode
) || mask64_2_operand (op
, mode
);
1868 return logical_operand (op
, mode
) || mask64_2_operand (op
, mode
);
1871 /* Return 1 if the operand is either a non-special register or a
1872 constant that can be used as the operand of an RS/6000 logical AND insn. */
1875 and_operand (op
, mode
)
1877 enum machine_mode mode
;
1879 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
1880 return (gpc_reg_operand (op
, mode
) || mask_operand (op
, mode
));
1882 return (logical_operand (op
, mode
) || mask_operand (op
, mode
));
1885 /* Return 1 if the operand is a general register or memory operand. */
1888 reg_or_mem_operand (op
, mode
)
1890 enum machine_mode mode
;
1892 return (gpc_reg_operand (op
, mode
)
1893 || memory_operand (op
, mode
)
1894 || volatile_mem_operand (op
, mode
));
1897 /* Return 1 if the operand is a general register or memory operand without
1898 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1902 lwa_operand (op
, mode
)
1904 enum machine_mode mode
;
1908 if (reload_completed
&& GET_CODE (inner
) == SUBREG
)
1909 inner
= SUBREG_REG (inner
);
1911 return gpc_reg_operand (inner
, mode
)
1912 || (memory_operand (inner
, mode
)
1913 && GET_CODE (XEXP (inner
, 0)) != PRE_INC
1914 && GET_CODE (XEXP (inner
, 0)) != PRE_DEC
1915 && (GET_CODE (XEXP (inner
, 0)) != PLUS
1916 || GET_CODE (XEXP (XEXP (inner
, 0), 1)) != CONST_INT
1917 || INTVAL (XEXP (XEXP (inner
, 0), 1)) % 4 == 0));
1920 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
1923 symbol_ref_operand (op
, mode
)
1925 enum machine_mode mode
;
1927 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1930 return (GET_CODE (op
) == SYMBOL_REF
);
1933 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1934 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
1937 call_operand (op
, mode
)
1939 enum machine_mode mode
;
1941 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1944 return (GET_CODE (op
) == SYMBOL_REF
1945 || (GET_CODE (op
) == REG
1946 && (REGNO (op
) == LINK_REGISTER_REGNUM
1947 || REGNO (op
) == COUNT_REGISTER_REGNUM
1948 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
)));
1951 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1952 this file and the function is not weakly defined. */
1955 current_file_function_operand (op
, mode
)
1957 enum machine_mode mode ATTRIBUTE_UNUSED
;
1959 return (GET_CODE (op
) == SYMBOL_REF
1960 && (SYMBOL_REF_FLAG (op
)
1961 || (op
== XEXP (DECL_RTL (current_function_decl
), 0)
1962 && ! DECL_WEAK (current_function_decl
))));
1965 /* Return 1 if this operand is a valid input for a move insn. */
1968 input_operand (op
, mode
)
1970 enum machine_mode mode
;
1972 /* Memory is always valid. */
1973 if (memory_operand (op
, mode
))
1976 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1977 if (GET_CODE (op
) == CONSTANT_P_RTX
)
1980 /* For floating-point, easy constants are valid. */
1981 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
1983 && easy_fp_constant (op
, mode
))
1986 /* Allow any integer constant. */
1987 if (GET_MODE_CLASS (mode
) == MODE_INT
1988 && (GET_CODE (op
) == CONST_INT
1989 || GET_CODE (op
) == CONST_DOUBLE
))
1992 /* For floating-point or multi-word mode, the only remaining valid type
1994 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
1995 || GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
1996 return register_operand (op
, mode
);
1998 /* The only cases left are integral modes one word or smaller (we
1999 do not get called for MODE_CC values). These can be in any
2001 if (register_operand (op
, mode
))
2004 /* A SYMBOL_REF referring to the TOC is valid. */
2005 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op
))
2008 /* A constant pool expression (relative to the TOC) is valid */
2009 if (TOC_RELATIVE_EXPR_P (op
))
2012 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2014 if (DEFAULT_ABI
== ABI_V4
2015 && (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == CONST
)
2016 && small_data_operand (op
, Pmode
))
2022 /* Return 1 for an operand in small memory on V.4/eabi. */
2025 small_data_operand (op
, mode
)
2026 rtx op ATTRIBUTE_UNUSED
;
2027 enum machine_mode mode ATTRIBUTE_UNUSED
;
2032 if (rs6000_sdata
== SDATA_NONE
|| rs6000_sdata
== SDATA_DATA
)
2035 if (DEFAULT_ABI
!= ABI_V4
)
2038 if (GET_CODE (op
) == SYMBOL_REF
)
2041 else if (GET_CODE (op
) != CONST
2042 || GET_CODE (XEXP (op
, 0)) != PLUS
2043 || GET_CODE (XEXP (XEXP (op
, 0), 0)) != SYMBOL_REF
2044 || GET_CODE (XEXP (XEXP (op
, 0), 1)) != CONST_INT
)
2049 rtx sum
= XEXP (op
, 0);
2050 HOST_WIDE_INT summand
;
2052 /* We have to be careful here, because it is the referenced address
2053 that must be 32k from _SDA_BASE_, not just the symbol. */
2054 summand
= INTVAL (XEXP (sum
, 1));
2055 if (summand
< 0 || summand
> g_switch_value
)
2058 sym_ref
= XEXP (sum
, 0);
2061 if (*XSTR (sym_ref
, 0) != '@')
2072 constant_pool_expr_1 (op
, have_sym
, have_toc
)
2077 switch (GET_CODE(op
))
2080 if (CONSTANT_POOL_ADDRESS_P (op
))
2082 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op
), Pmode
))
2090 else if (! strcmp (XSTR (op
, 0), toc_label_name
))
2099 return (constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
)
2100 && constant_pool_expr_1 (XEXP (op
, 1), have_sym
, have_toc
));
2102 return constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
);
2111 constant_pool_expr_p (op
)
2116 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_sym
;
2120 toc_relative_expr_p (op
)
2125 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_toc
;
2128 /* Try machine-dependent ways of modifying an illegitimate address
2129 to be legitimate. If we find one, return the new, valid address.
2130 This is used from only one place: `memory_address' in explow.c.
2132 OLDX is the address as it was before break_out_memory_refs was
2133 called. In some cases it is useful to look at this to decide what
2136 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2138 It is always safe for this function to do nothing. It exists to
2139 recognize opportunities to optimize the output.
2141 On RS/6000, first check for the sum of a register with a constant
2142 integer that is out of range. If so, generate code to add the
2143 constant with the low-order 16 bits masked to the register and force
2144 this result into another register (this can be done with `cau').
2145 Then generate an address of REG+(CONST&0xffff), allowing for the
2146 possibility of bit 16 being a one.
2148 Then check for the sum of a register and something not constant, try to
2149 load the other things into a register and return the sum. */
2151 rs6000_legitimize_address (x
, oldx
, mode
)
2153 rtx oldx ATTRIBUTE_UNUSED
;
2154 enum machine_mode mode
;
2156 if (GET_CODE (x
) == PLUS
2157 && GET_CODE (XEXP (x
, 0)) == REG
2158 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2159 && (unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1)) + 0x8000) >= 0x10000)
2161 HOST_WIDE_INT high_int
, low_int
;
2163 low_int
= ((INTVAL (XEXP (x
, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2164 high_int
= INTVAL (XEXP (x
, 1)) - low_int
;
2165 sum
= force_operand (gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2166 GEN_INT (high_int
)), 0);
2167 return gen_rtx_PLUS (Pmode
, sum
, GEN_INT (low_int
));
2169 else if (GET_CODE (x
) == PLUS
2170 && GET_CODE (XEXP (x
, 0)) == REG
2171 && GET_CODE (XEXP (x
, 1)) != CONST_INT
2172 && GET_MODE_NUNITS (mode
) == 1
2173 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
2175 || (mode
!= DFmode
&& mode
!= TFmode
))
2176 && (TARGET_POWERPC64
|| mode
!= DImode
)
2179 return gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2180 force_reg (Pmode
, force_operand (XEXP (x
, 1), 0)));
2182 else if (ALTIVEC_VECTOR_MODE (mode
))
2186 /* Make sure both operands are registers. */
2187 if (GET_CODE (x
) == PLUS
)
2188 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, XEXP (x
, 0)),
2189 force_reg (Pmode
, XEXP (x
, 1)));
2191 reg
= force_reg (Pmode
, x
);
2194 else if (SPE_VECTOR_MODE (mode
))
2196 /* We accept [reg + reg] and [reg + OFFSET]. */
2198 if (GET_CODE (x
) == PLUS
)
2200 rtx op1
= XEXP (x
, 0);
2201 rtx op2
= XEXP (x
, 1);
2203 op1
= force_reg (Pmode
, op1
);
2205 if (GET_CODE (op2
) != REG
2206 && (GET_CODE (op2
) != CONST_INT
2207 || !SPE_CONST_OFFSET_OK (INTVAL (op2
))))
2208 op2
= force_reg (Pmode
, op2
);
2210 return gen_rtx_PLUS (Pmode
, op1
, op2
);
2213 return force_reg (Pmode
, x
);
2215 else if (TARGET_ELF
&& TARGET_32BIT
&& TARGET_NO_TOC
&& ! flag_pic
2216 && GET_CODE (x
) != CONST_INT
2217 && GET_CODE (x
) != CONST_DOUBLE
2219 && GET_MODE_NUNITS (mode
) == 1
2220 && (GET_MODE_BITSIZE (mode
) <= 32
2221 || ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) && mode
== DFmode
)))
2223 rtx reg
= gen_reg_rtx (Pmode
);
2224 emit_insn (gen_elf_high (reg
, (x
)));
2225 return gen_rtx_LO_SUM (Pmode
, reg
, (x
));
2227 else if (TARGET_MACHO
&& TARGET_32BIT
&& TARGET_NO_TOC
2229 && GET_CODE (x
) != CONST_INT
2230 && GET_CODE (x
) != CONST_DOUBLE
2232 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) || mode
!= DFmode
)
2236 rtx reg
= gen_reg_rtx (Pmode
);
2237 emit_insn (gen_macho_high (reg
, (x
)));
2238 return gen_rtx_LO_SUM (Pmode
, reg
, (x
));
2241 && CONSTANT_POOL_EXPR_P (x
)
2242 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), Pmode
))
2244 return create_TOC_reference (x
);
2250 /* The convention appears to be to define this wherever it is used.
2251 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2252 is now used here. */
2253 #ifndef REG_MODE_OK_FOR_BASE_P
2254 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2257 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2258 replace the input X, or the original X if no replacement is called for.
2259 The output parameter *WIN is 1 if the calling macro should goto WIN,
2262 For RS/6000, we wish to handle large displacements off a base
2263 register by splitting the addend across an addiu/addis and the mem insn.
2264 This cuts number of extra insns needed from 3 to 1.
2266 On Darwin, we use this to generate code for floating point constants.
2267 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2268 The Darwin code is inside #if TARGET_MACHO because only then is
2269 machopic_function_base_name() defined. */
2271 rs6000_legitimize_reload_address (x
, mode
, opnum
, type
, ind_levels
, win
)
2273 enum machine_mode mode
;
2276 int ind_levels ATTRIBUTE_UNUSED
;
2279 /* We must recognize output that we have already generated ourselves. */
2280 if (GET_CODE (x
) == PLUS
2281 && GET_CODE (XEXP (x
, 0)) == PLUS
2282 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
2283 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2284 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2286 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2287 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
2288 opnum
, (enum reload_type
)type
);
2294 if (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
2295 && GET_CODE (x
) == LO_SUM
2296 && GET_CODE (XEXP (x
, 0)) == PLUS
2297 && XEXP (XEXP (x
, 0), 0) == pic_offset_table_rtx
2298 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == HIGH
2299 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 0)) == CONST
2300 && XEXP (XEXP (XEXP (x
, 0), 1), 0) == XEXP (x
, 1)
2301 && GET_CODE (XEXP (XEXP (x
, 1), 0)) == MINUS
2302 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 0)) == SYMBOL_REF
2303 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 1)) == SYMBOL_REF
)
2305 /* Result of previous invocation of this function on Darwin
2306 floating point constant. */
2307 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2308 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
2309 opnum
, (enum reload_type
)type
);
2314 if (GET_CODE (x
) == PLUS
2315 && GET_CODE (XEXP (x
, 0)) == REG
2316 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2317 && REG_MODE_OK_FOR_BASE_P (XEXP (x
, 0), mode
)
2318 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2319 && !SPE_VECTOR_MODE (mode
)
2320 && !ALTIVEC_VECTOR_MODE (mode
))
2322 HOST_WIDE_INT val
= INTVAL (XEXP (x
, 1));
2323 HOST_WIDE_INT low
= ((val
& 0xffff) ^ 0x8000) - 0x8000;
2325 = (((val
- low
) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2327 /* Check for 32-bit overflow. */
2328 if (high
+ low
!= val
)
2334 /* Reload the high part into a base reg; leave the low part
2335 in the mem directly. */
2337 x
= gen_rtx_PLUS (GET_MODE (x
),
2338 gen_rtx_PLUS (GET_MODE (x
), XEXP (x
, 0),
2342 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2343 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
2344 opnum
, (enum reload_type
)type
);
2349 if (GET_CODE (x
) == SYMBOL_REF
2350 && DEFAULT_ABI
== ABI_DARWIN
2351 && !ALTIVEC_VECTOR_MODE (mode
)
2354 /* Darwin load of floating point constant. */
2355 rtx offset
= gen_rtx (CONST
, Pmode
,
2356 gen_rtx (MINUS
, Pmode
, x
,
2357 gen_rtx (SYMBOL_REF
, Pmode
,
2358 machopic_function_base_name ())));
2359 x
= gen_rtx (LO_SUM
, GET_MODE (x
),
2360 gen_rtx (PLUS
, Pmode
, pic_offset_table_rtx
,
2361 gen_rtx (HIGH
, Pmode
, offset
)), offset
);
2362 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2363 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
2364 opnum
, (enum reload_type
)type
);
2370 && CONSTANT_POOL_EXPR_P (x
)
2371 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), mode
))
2373 (x
) = create_TOC_reference (x
);
2381 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2382 that is a valid memory address for an instruction.
2383 The MODE argument is the machine mode for the MEM expression
2384 that wants to use this address.
2386 On the RS/6000, there are four valid address: a SYMBOL_REF that
2387 refers to a constant pool entry of an address (or the sum of it
2388 plus a constant), a short (16-bit signed) constant plus a register,
2389 the sum of two registers, or a register indirect, possibly with an
2390 auto-increment. For DFmode and DImode with a constant plus register,
2391 we must ensure that both words are addressable or PowerPC64 with offset
2394 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2395 32-bit DImode, TImode), indexed addressing cannot be used because
2396 adjacent memory cells are accessed by adding word-sized offsets
2397 during assembly output. */
2399 rs6000_legitimate_address (mode
, x
, reg_ok_strict
)
2400 enum machine_mode mode
;
2404 if (LEGITIMATE_INDIRECT_ADDRESS_P (x
, reg_ok_strict
))
2406 if ((GET_CODE (x
) == PRE_INC
|| GET_CODE (x
) == PRE_DEC
)
2407 && !ALTIVEC_VECTOR_MODE (mode
)
2408 && !SPE_VECTOR_MODE (mode
)
2410 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x
, 0), reg_ok_strict
))
2412 if (LEGITIMATE_SMALL_DATA_P (mode
, x
))
2414 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x
))
2416 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2418 && GET_CODE (x
) == PLUS
2419 && GET_CODE (XEXP (x
, 0)) == REG
2420 && XEXP (x
, 0) == virtual_stack_vars_rtx
2421 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2423 if (LEGITIMATE_OFFSET_ADDRESS_P (mode
, x
, reg_ok_strict
))
2426 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
2428 || (mode
!= DFmode
&& mode
!= TFmode
))
2429 && (TARGET_POWERPC64
|| mode
!= DImode
)
2430 && LEGITIMATE_INDEXED_ADDRESS_P (x
, reg_ok_strict
))
2432 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode
, x
, reg_ok_strict
))
2437 /* Try to output insns to set TARGET equal to the constant C if it can
2438 be done in less than N insns. Do all computations in MODE.
2439 Returns the place where the output has been placed if it can be
2440 done and the insns have been emitted. If it would take more than N
2441 insns, zero is returned and no insns and emitted. */
2444 rs6000_emit_set_const (dest
, mode
, source
, n
)
2446 enum machine_mode mode
;
2447 int n ATTRIBUTE_UNUSED
;
2449 rtx result
, insn
, set
;
2450 HOST_WIDE_INT c0
, c1
;
2452 if (mode
== QImode
|| mode
== HImode
)
2455 dest
= gen_reg_rtx (mode
);
2456 emit_insn (gen_rtx_SET (VOIDmode
, dest
, source
));
2459 else if (mode
== SImode
)
2461 result
= no_new_pseudos
? dest
: gen_reg_rtx (SImode
);
2463 emit_insn (gen_rtx_SET (VOIDmode
, result
,
2464 GEN_INT (INTVAL (source
)
2465 & (~ (HOST_WIDE_INT
) 0xffff))));
2466 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
2467 gen_rtx_IOR (SImode
, result
,
2468 GEN_INT (INTVAL (source
) & 0xffff))));
2471 else if (mode
== DImode
)
2473 if (GET_CODE (source
) == CONST_INT
)
2475 c0
= INTVAL (source
);
2478 else if (GET_CODE (source
) == CONST_DOUBLE
)
2480 #if HOST_BITS_PER_WIDE_INT >= 64
2481 c0
= CONST_DOUBLE_LOW (source
);
2484 c0
= CONST_DOUBLE_LOW (source
);
2485 c1
= CONST_DOUBLE_HIGH (source
);
2491 result
= rs6000_emit_set_long_const (dest
, c0
, c1
);
2496 insn
= get_last_insn ();
2497 set
= single_set (insn
);
2498 if (! CONSTANT_P (SET_SRC (set
)))
2499 set_unique_reg_note (insn
, REG_EQUAL
, source
);
2504 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2505 fall back to a straight forward decomposition. We do this to avoid
2506 exponential run times encountered when looking for longer sequences
2507 with rs6000_emit_set_const. */
2509 rs6000_emit_set_long_const (dest
, c1
, c2
)
2511 HOST_WIDE_INT c1
, c2
;
2513 if (!TARGET_POWERPC64
)
2515 rtx operand1
, operand2
;
2517 operand1
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
== 0,
2519 operand2
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
!= 0,
2521 emit_move_insn (operand1
, GEN_INT (c1
));
2522 emit_move_insn (operand2
, GEN_INT (c2
));
2526 HOST_WIDE_INT ud1
, ud2
, ud3
, ud4
;
2529 ud2
= (c1
& 0xffff0000) >> 16;
2530 #if HOST_BITS_PER_WIDE_INT >= 64
2534 ud4
= (c2
& 0xffff0000) >> 16;
2536 if ((ud4
== 0xffff && ud3
== 0xffff && ud2
== 0xffff && (ud1
& 0x8000))
2537 || (ud4
== 0 && ud3
== 0 && ud2
== 0 && ! (ud1
& 0x8000)))
2540 emit_move_insn (dest
, GEN_INT (((ud1
^ 0x8000) - 0x8000)));
2542 emit_move_insn (dest
, GEN_INT (ud1
));
2545 else if ((ud4
== 0xffff && ud3
== 0xffff && (ud2
& 0x8000))
2546 || (ud4
== 0 && ud3
== 0 && ! (ud2
& 0x8000)))
2549 emit_move_insn (dest
, GEN_INT (((ud2
<< 16) ^ 0x80000000)
2552 emit_move_insn (dest
, GEN_INT (ud2
<< 16));
2554 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2556 else if ((ud4
== 0xffff && (ud3
& 0x8000))
2557 || (ud4
== 0 && ! (ud3
& 0x8000)))
2560 emit_move_insn (dest
, GEN_INT (((ud3
<< 16) ^ 0x80000000)
2563 emit_move_insn (dest
, GEN_INT (ud3
<< 16));
2566 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud2
)));
2567 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (16)));
2569 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2574 emit_move_insn (dest
, GEN_INT (((ud4
<< 16) ^ 0x80000000)
2577 emit_move_insn (dest
, GEN_INT (ud4
<< 16));
2580 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud3
)));
2582 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (32)));
2584 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
,
2585 GEN_INT (ud2
<< 16)));
2587 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2593 /* Emit a move from SOURCE to DEST in mode MODE. */
2595 rs6000_emit_move (dest
, source
, mode
)
2598 enum machine_mode mode
;
2602 operands
[1] = source
;
2604 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2605 if (GET_CODE (operands
[1]) == CONST_DOUBLE
2606 && ! FLOAT_MODE_P (mode
)
2607 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
2609 /* FIXME. This should never happen. */
2610 /* Since it seems that it does, do the safe thing and convert
2612 operands
[1] = gen_int_mode (CONST_DOUBLE_LOW (operands
[1]), mode
);
2614 if (GET_CODE (operands
[1]) == CONST_DOUBLE
2615 && ! FLOAT_MODE_P (mode
)
2616 && ((CONST_DOUBLE_HIGH (operands
[1]) == 0
2617 && CONST_DOUBLE_LOW (operands
[1]) >= 0)
2618 || (CONST_DOUBLE_HIGH (operands
[1]) == -1
2619 && CONST_DOUBLE_LOW (operands
[1]) < 0)))
2622 /* Check if GCC is setting up a block move that will end up using FP
2623 registers as temporaries. We must make sure this is acceptable. */
2624 if (GET_CODE (operands
[0]) == MEM
2625 && GET_CODE (operands
[1]) == MEM
2627 && (SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[0]))
2628 || SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[1])))
2629 && ! (SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[0]) > 32
2630 ? 32 : MEM_ALIGN (operands
[0])))
2631 || SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[1]) > 32
2633 : MEM_ALIGN (operands
[1]))))
2634 && ! MEM_VOLATILE_P (operands
[0])
2635 && ! MEM_VOLATILE_P (operands
[1]))
2637 emit_move_insn (adjust_address (operands
[0], SImode
, 0),
2638 adjust_address (operands
[1], SImode
, 0));
2639 emit_move_insn (adjust_address (operands
[0], SImode
, 4),
2640 adjust_address (operands
[1], SImode
, 4));
2644 if (!no_new_pseudos
)
2646 if (GET_CODE (operands
[1]) == MEM
&& optimize
> 0
2647 && (mode
== QImode
|| mode
== HImode
|| mode
== SImode
)
2648 && GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
2650 rtx reg
= gen_reg_rtx (word_mode
);
2652 emit_insn (gen_rtx_SET (word_mode
, reg
,
2653 gen_rtx_ZERO_EXTEND (word_mode
,
2655 operands
[1] = gen_lowpart (mode
, reg
);
2657 if (GET_CODE (operands
[0]) != REG
)
2658 operands
[1] = force_reg (mode
, operands
[1]);
2661 if (mode
== SFmode
&& ! TARGET_POWERPC
2662 && TARGET_HARD_FLOAT
&& TARGET_FPRS
2663 && GET_CODE (operands
[0]) == MEM
)
2667 if (reload_in_progress
|| reload_completed
)
2668 regnum
= true_regnum (operands
[1]);
2669 else if (GET_CODE (operands
[1]) == REG
)
2670 regnum
= REGNO (operands
[1]);
2674 /* If operands[1] is a register, on POWER it may have
2675 double-precision data in it, so truncate it to single
2677 if (FP_REGNO_P (regnum
) || regnum
>= FIRST_PSEUDO_REGISTER
)
2680 newreg
= (no_new_pseudos
? operands
[1] : gen_reg_rtx (mode
));
2681 emit_insn (gen_aux_truncdfsf2 (newreg
, operands
[1]));
2682 operands
[1] = newreg
;
2686 /* Handle the case where reload calls us with an invalid address;
2687 and the case of CONSTANT_P_RTX. */
2688 if (!ALTIVEC_VECTOR_MODE (mode
)
2689 && (! general_operand (operands
[1], mode
)
2690 || ! nonimmediate_operand (operands
[0], mode
)
2691 || GET_CODE (operands
[1]) == CONSTANT_P_RTX
))
2693 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2697 /* FIXME: In the long term, this switch statement should go away
2698 and be replaced by a sequence of tests based on things like
2704 if (CONSTANT_P (operands
[1])
2705 && GET_CODE (operands
[1]) != CONST_INT
)
2706 operands
[1] = force_const_mem (mode
, operands
[1]);
2712 if (CONSTANT_P (operands
[1])
2713 && ! easy_fp_constant (operands
[1], mode
))
2714 operands
[1] = force_const_mem (mode
, operands
[1]);
2725 if (CONSTANT_P (operands
[1])
2726 && !easy_vector_constant (operands
[1]))
2727 operands
[1] = force_const_mem (mode
, operands
[1]);
2732 /* Use default pattern for address of ELF small data */
2735 && DEFAULT_ABI
== ABI_V4
2736 && (GET_CODE (operands
[1]) == SYMBOL_REF
2737 || GET_CODE (operands
[1]) == CONST
)
2738 && small_data_operand (operands
[1], mode
))
2740 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2744 if (DEFAULT_ABI
== ABI_V4
2745 && mode
== Pmode
&& mode
== SImode
2746 && flag_pic
== 1 && got_operand (operands
[1], mode
))
2748 emit_insn (gen_movsi_got (operands
[0], operands
[1]));
2752 if ((TARGET_ELF
|| DEFAULT_ABI
== ABI_DARWIN
)
2753 && TARGET_NO_TOC
&& ! flag_pic
2755 && CONSTANT_P (operands
[1])
2756 && GET_CODE (operands
[1]) != HIGH
2757 && GET_CODE (operands
[1]) != CONST_INT
)
2759 rtx target
= (no_new_pseudos
? operands
[0] : gen_reg_rtx (mode
));
2761 /* If this is a function address on -mcall-aixdesc,
2762 convert it to the address of the descriptor. */
2763 if (DEFAULT_ABI
== ABI_AIX
2764 && GET_CODE (operands
[1]) == SYMBOL_REF
2765 && XSTR (operands
[1], 0)[0] == '.')
2767 const char *name
= XSTR (operands
[1], 0);
2769 while (*name
== '.')
2771 new_ref
= gen_rtx_SYMBOL_REF (Pmode
, name
);
2772 CONSTANT_POOL_ADDRESS_P (new_ref
)
2773 = CONSTANT_POOL_ADDRESS_P (operands
[1]);
2774 SYMBOL_REF_FLAG (new_ref
) = SYMBOL_REF_FLAG (operands
[1]);
2775 SYMBOL_REF_USED (new_ref
) = SYMBOL_REF_USED (operands
[1]);
2776 operands
[1] = new_ref
;
2779 if (DEFAULT_ABI
== ABI_DARWIN
)
2781 emit_insn (gen_macho_high (target
, operands
[1]));
2782 emit_insn (gen_macho_low (operands
[0], target
, operands
[1]));
2786 emit_insn (gen_elf_high (target
, operands
[1]));
2787 emit_insn (gen_elf_low (operands
[0], target
, operands
[1]));
2791 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2792 and we have put it in the TOC, we just need to make a TOC-relative
2795 && GET_CODE (operands
[1]) == SYMBOL_REF
2796 && CONSTANT_POOL_EXPR_P (operands
[1])
2797 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands
[1]),
2798 get_pool_mode (operands
[1])))
2800 operands
[1] = create_TOC_reference (operands
[1]);
2802 else if (mode
== Pmode
2803 && CONSTANT_P (operands
[1])
2804 && ((GET_CODE (operands
[1]) != CONST_INT
2805 && ! easy_fp_constant (operands
[1], mode
))
2806 || (GET_CODE (operands
[1]) == CONST_INT
2807 && num_insns_constant (operands
[1], mode
) > 2)
2808 || (GET_CODE (operands
[0]) == REG
2809 && FP_REGNO_P (REGNO (operands
[0]))))
2810 && GET_CODE (operands
[1]) != HIGH
2811 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands
[1])
2812 && ! TOC_RELATIVE_EXPR_P (operands
[1]))
2814 /* Emit a USE operation so that the constant isn't deleted if
2815 expensive optimizations are turned on because nobody
2816 references it. This should only be done for operands that
2817 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2818 This should not be done for operands that contain LABEL_REFs.
2819 For now, we just handle the obvious case. */
2820 if (GET_CODE (operands
[1]) != LABEL_REF
)
2821 emit_insn (gen_rtx_USE (VOIDmode
, operands
[1]));
2824 /* Darwin uses a special PIC legitimizer. */
2825 if (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
)
2828 rs6000_machopic_legitimize_pic_address (operands
[1], mode
,
2830 if (operands
[0] != operands
[1])
2831 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2836 /* If we are to limit the number of things we put in the TOC and
2837 this is a symbol plus a constant we can add in one insn,
2838 just put the symbol in the TOC and add the constant. Don't do
2839 this if reload is in progress. */
2840 if (GET_CODE (operands
[1]) == CONST
2841 && TARGET_NO_SUM_IN_TOC
&& ! reload_in_progress
2842 && GET_CODE (XEXP (operands
[1], 0)) == PLUS
2843 && add_operand (XEXP (XEXP (operands
[1], 0), 1), mode
)
2844 && (GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == LABEL_REF
2845 || GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == SYMBOL_REF
)
2846 && ! side_effects_p (operands
[0]))
2849 force_const_mem (mode
, XEXP (XEXP (operands
[1], 0), 0));
2850 rtx other
= XEXP (XEXP (operands
[1], 0), 1);
2852 sym
= force_reg (mode
, sym
);
2854 emit_insn (gen_addsi3 (operands
[0], sym
, other
));
2856 emit_insn (gen_adddi3 (operands
[0], sym
, other
));
2860 operands
[1] = force_const_mem (mode
, operands
[1]);
2863 && CONSTANT_POOL_EXPR_P (XEXP (operands
[1], 0))
2864 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2865 get_pool_constant (XEXP (operands
[1], 0)),
2866 get_pool_mode (XEXP (operands
[1], 0))))
2869 = gen_rtx_MEM (mode
,
2870 create_TOC_reference (XEXP (operands
[1], 0)));
2871 set_mem_alias_set (operands
[1], get_TOC_alias_set ());
2872 RTX_UNCHANGING_P (operands
[1]) = 1;
2878 if (GET_CODE (operands
[0]) == MEM
2879 && GET_CODE (XEXP (operands
[0], 0)) != REG
2880 && ! reload_in_progress
)
2882 = replace_equiv_address (operands
[0],
2883 copy_addr_to_reg (XEXP (operands
[0], 0)));
2885 if (GET_CODE (operands
[1]) == MEM
2886 && GET_CODE (XEXP (operands
[1], 0)) != REG
2887 && ! reload_in_progress
)
2889 = replace_equiv_address (operands
[1],
2890 copy_addr_to_reg (XEXP (operands
[1], 0)));
2893 emit_insn (gen_rtx_PARALLEL (VOIDmode
,
2895 gen_rtx_SET (VOIDmode
,
2896 operands
[0], operands
[1]),
2897 gen_rtx_CLOBBER (VOIDmode
,
2898 gen_rtx_SCRATCH (SImode
)))));
2907 /* Above, we may have called force_const_mem which may have returned
2908 an invalid address. If we can, fix this up; otherwise, reload will
2909 have to deal with it. */
2910 if (GET_CODE (operands
[1]) == MEM
2911 && ! memory_address_p (mode
, XEXP (operands
[1], 0))
2912 && ! reload_in_progress
)
2913 operands
[1] = adjust_address (operands
[1], mode
, 0);
2915 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2919 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2920 for a call to a function whose data type is FNTYPE.
2921 For a library call, FNTYPE is 0.
2923 For incoming args we set the number of arguments in the prototype large
2924 so we never return a PARALLEL. */
2927 init_cumulative_args (cum
, fntype
, libname
, incoming
)
2928 CUMULATIVE_ARGS
*cum
;
2930 rtx libname ATTRIBUTE_UNUSED
;
2933 static CUMULATIVE_ARGS zero_cumulative
;
2935 *cum
= zero_cumulative
;
2937 cum
->fregno
= FP_ARG_MIN_REG
;
2938 cum
->vregno
= ALTIVEC_ARG_MIN_REG
;
2939 cum
->prototype
= (fntype
&& TYPE_ARG_TYPES (fntype
));
2940 cum
->call_cookie
= CALL_NORMAL
;
2941 cum
->sysv_gregno
= GP_ARG_MIN_REG
;
2944 cum
->nargs_prototype
= 1000; /* don't return a PARALLEL */
2946 else if (cum
->prototype
)
2947 cum
->nargs_prototype
= (list_length (TYPE_ARG_TYPES (fntype
)) - 1
2948 + (TYPE_MODE (TREE_TYPE (fntype
)) == BLKmode
2949 || RETURN_IN_MEMORY (TREE_TYPE (fntype
))));
2952 cum
->nargs_prototype
= 0;
2954 cum
->orig_nargs
= cum
->nargs_prototype
;
2956 /* Check for a longcall attribute. */
2958 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
))
2959 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
2960 cum
->call_cookie
= CALL_LONG
;
2962 if (TARGET_DEBUG_ARG
)
2964 fprintf (stderr
, "\ninit_cumulative_args:");
2967 tree ret_type
= TREE_TYPE (fntype
);
2968 fprintf (stderr
, " ret code = %s,",
2969 tree_code_name
[ (int)TREE_CODE (ret_type
) ]);
2972 if (cum
->call_cookie
& CALL_LONG
)
2973 fprintf (stderr
, " longcall,");
2975 fprintf (stderr
, " proto = %d, nargs = %d\n",
2976 cum
->prototype
, cum
->nargs_prototype
);
2980 /* If defined, a C expression which determines whether, and in which
2981 direction, to pad out an argument with extra space. The value
2982 should be of type `enum direction': either `upward' to pad above
2983 the argument, `downward' to pad below, or `none' to inhibit
2986 For the AIX ABI structs are always stored left shifted in their
2990 function_arg_padding (mode
, type
)
2991 enum machine_mode mode
;
2994 if (type
!= 0 && AGGREGATE_TYPE_P (type
))
2997 /* This is the default definition. */
2998 return (! BYTES_BIG_ENDIAN
3001 ? (type
&& TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
3002 && int_size_in_bytes (type
) < (PARM_BOUNDARY
/ BITS_PER_UNIT
))
3003 : GET_MODE_BITSIZE (mode
) < PARM_BOUNDARY
)
3004 ? downward
: upward
));
3007 /* If defined, a C expression that gives the alignment boundary, in bits,
3008 of an argument with the specified mode and type. If it is not defined,
3009 PARM_BOUNDARY is used for all arguments.
3011 V.4 wants long longs to be double word aligned. */
3014 function_arg_boundary (mode
, type
)
3015 enum machine_mode mode
;
3016 tree type ATTRIBUTE_UNUSED
;
3018 if (DEFAULT_ABI
== ABI_V4
&& (mode
== DImode
|| mode
== DFmode
))
3020 else if (SPE_VECTOR_MODE (mode
))
3022 else if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3025 return PARM_BOUNDARY
;
3028 /* Update the data in CUM to advance over an argument
3029 of mode MODE and data type TYPE.
3030 (TYPE is null for libcalls where that information may not be available.) */
3033 function_arg_advance (cum
, mode
, type
, named
)
3034 CUMULATIVE_ARGS
*cum
;
3035 enum machine_mode mode
;
3039 cum
->nargs_prototype
--;
3041 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3043 if (cum
->vregno
<= ALTIVEC_ARG_MAX_REG
&& cum
->nargs_prototype
>= 0)
3046 cum
->words
+= RS6000_ARG_SIZE (mode
, type
);
3048 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
)
3049 && named
&& cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
3051 else if (DEFAULT_ABI
== ABI_V4
)
3053 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
3054 && (mode
== SFmode
|| mode
== DFmode
))
3056 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
3061 cum
->words
+= cum
->words
& 1;
3062 cum
->words
+= RS6000_ARG_SIZE (mode
, type
);
3068 int gregno
= cum
->sysv_gregno
;
3070 /* Aggregates and IEEE quad get passed by reference. */
3071 if ((type
&& AGGREGATE_TYPE_P (type
))
3075 n_words
= RS6000_ARG_SIZE (mode
, type
);
3077 /* Long long and SPE vectors are put in odd registers. */
3078 if (n_words
== 2 && (gregno
& 1) == 0)
3081 /* Long long and SPE vectors are not split between registers
3083 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
3085 /* Long long is aligned on the stack. */
3087 cum
->words
+= cum
->words
& 1;
3088 cum
->words
+= n_words
;
3091 /* Note: continuing to accumulate gregno past when we've started
3092 spilling to the stack indicates the fact that we've started
3093 spilling to the stack to expand_builtin_saveregs. */
3094 cum
->sysv_gregno
= gregno
+ n_words
;
3097 if (TARGET_DEBUG_ARG
)
3099 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
3100 cum
->words
, cum
->fregno
);
3101 fprintf (stderr
, "gregno = %2d, nargs = %4d, proto = %d, ",
3102 cum
->sysv_gregno
, cum
->nargs_prototype
, cum
->prototype
);
3103 fprintf (stderr
, "mode = %4s, named = %d\n",
3104 GET_MODE_NAME (mode
), named
);
3109 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
3110 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
3112 cum
->words
+= align
+ RS6000_ARG_SIZE (mode
, type
);
3114 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
3115 && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3116 cum
->fregno
+= (mode
== TFmode
? 2 : 1);
3118 if (TARGET_DEBUG_ARG
)
3120 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
3121 cum
->words
, cum
->fregno
);
3122 fprintf (stderr
, "nargs = %4d, proto = %d, mode = %4s, ",
3123 cum
->nargs_prototype
, cum
->prototype
, GET_MODE_NAME (mode
));
3124 fprintf (stderr
, "named = %d, align = %d\n", named
, align
);
3129 /* Determine where to put an argument to a function.
3130 Value is zero to push the argument on the stack,
3131 or a hard register in which to store the argument.
3133 MODE is the argument's machine mode.
3134 TYPE is the data type of the argument (as a tree).
3135 This is null for libcalls where that information may
3137 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3138 the preceding args and about the function being called.
3139 NAMED is nonzero if this argument is a named parameter
3140 (otherwise it is an extra parameter matching an ellipsis).
3142 On RS/6000 the first eight words of non-FP are normally in registers
3143 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3144 Under V.4, the first 8 FP args are in registers.
3146 If this is floating-point and no prototype is specified, we use
3147 both an FP and integer register (or possibly FP reg and stack). Library
3148 functions (when TYPE is zero) always have the proper types for args,
3149 so we can pass the FP value just in one register. emit_library_function
3150 doesn't support PARALLEL anyway. */
3153 function_arg (cum
, mode
, type
, named
)
3154 CUMULATIVE_ARGS
*cum
;
3155 enum machine_mode mode
;
3159 enum rs6000_abi abi
= DEFAULT_ABI
;
3161 /* Return a marker to indicate whether CR1 needs to set or clear the
3162 bit that V.4 uses to say fp args were passed in registers.
3163 Assume that we don't need the marker for software floating point,
3164 or compiler generated library calls. */
3165 if (mode
== VOIDmode
)
3168 && cum
->nargs_prototype
< 0
3169 && type
&& (cum
->prototype
|| TARGET_NO_PROTOTYPE
))
3171 /* For the SPE, we need to crxor CR6 always. */
3173 return GEN_INT (cum
->call_cookie
| CALL_V4_SET_FP_ARGS
);
3174 else if (TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3175 return GEN_INT (cum
->call_cookie
3176 | ((cum
->fregno
== FP_ARG_MIN_REG
)
3177 ? CALL_V4_SET_FP_ARGS
3178 : CALL_V4_CLEAR_FP_ARGS
));
3181 return GEN_INT (cum
->call_cookie
);
3184 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3186 if (named
&& cum
->vregno
<= ALTIVEC_ARG_MAX_REG
)
3187 return gen_rtx_REG (mode
, cum
->vregno
);
3191 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
) && named
)
3193 if (cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
3194 return gen_rtx_REG (mode
, cum
->sysv_gregno
);
3198 else if (abi
== ABI_V4
)
3200 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
3201 && (mode
== SFmode
|| mode
== DFmode
))
3203 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
3204 return gen_rtx_REG (mode
, cum
->fregno
);
3211 int gregno
= cum
->sysv_gregno
;
3213 /* Aggregates and IEEE quad get passed by reference. */
3214 if ((type
&& AGGREGATE_TYPE_P (type
))
3218 n_words
= RS6000_ARG_SIZE (mode
, type
);
3220 /* Long long and SPE vectors are put in odd registers. */
3221 if (n_words
== 2 && (gregno
& 1) == 0)
3224 /* Long long and SPE vectors are not split between registers
3226 if (gregno
+ n_words
- 1 <= GP_ARG_MAX_REG
)
3228 /* SPE vectors in ... get split into 2 registers. */
3229 if (TARGET_SPE
&& TARGET_SPE_ABI
3230 && SPE_VECTOR_MODE (mode
) && !named
)
3233 enum machine_mode m
= SImode
;
3235 r1
= gen_rtx_REG (m
, gregno
);
3236 r1
= gen_rtx_EXPR_LIST (m
, r1
, const0_rtx
);
3237 r2
= gen_rtx_REG (m
, gregno
+ 1);
3238 r2
= gen_rtx_EXPR_LIST (m
, r2
, GEN_INT (4));
3239 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r2
));
3241 return gen_rtx_REG (mode
, gregno
);
3249 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
3250 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
3251 int align_words
= cum
->words
+ align
;
3253 if (type
&& TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
3256 if (USE_FP_FOR_ARG_P (*cum
, mode
, type
))
3259 || ((cum
->nargs_prototype
> 0)
3260 /* IBM AIX extended its linkage convention definition always
3261 to require FP args after register save area hole on the
3263 && (DEFAULT_ABI
!= ABI_AIX
3265 || (align_words
< GP_ARG_NUM_REG
))))
3266 return gen_rtx_REG (mode
, cum
->fregno
);
3268 return gen_rtx_PARALLEL (mode
,
3270 gen_rtx_EXPR_LIST (VOIDmode
,
3271 ((align_words
>= GP_ARG_NUM_REG
)
3274 + RS6000_ARG_SIZE (mode
, type
)
3276 /* If this is partially on the stack, then
3277 we only include the portion actually
3278 in registers here. */
3279 ? gen_rtx_REG (SImode
,
3280 GP_ARG_MIN_REG
+ align_words
)
3281 : gen_rtx_REG (mode
,
3282 GP_ARG_MIN_REG
+ align_words
))),
3284 gen_rtx_EXPR_LIST (VOIDmode
,
3285 gen_rtx_REG (mode
, cum
->fregno
),
3288 else if (align_words
< GP_ARG_NUM_REG
)
3289 return gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
3295 /* For an arg passed partly in registers and partly in memory,
3296 this is the number of registers used.
3297 For args passed entirely in registers or entirely in memory, zero. */
3300 function_arg_partial_nregs (cum
, mode
, type
, named
)
3301 CUMULATIVE_ARGS
*cum
;
3302 enum machine_mode mode
;
3304 int named ATTRIBUTE_UNUSED
;
3306 if (DEFAULT_ABI
== ABI_V4
)
3309 if (USE_FP_FOR_ARG_P (*cum
, mode
, type
)
3310 || USE_ALTIVEC_FOR_ARG_P (*cum
, mode
, type
))
3312 if (cum
->nargs_prototype
>= 0)
3316 if (cum
->words
< GP_ARG_NUM_REG
3317 && GP_ARG_NUM_REG
< (cum
->words
+ RS6000_ARG_SIZE (mode
, type
)))
3319 int ret
= GP_ARG_NUM_REG
- cum
->words
;
3320 if (ret
&& TARGET_DEBUG_ARG
)
3321 fprintf (stderr
, "function_arg_partial_nregs: %d\n", ret
);
3329 /* A C expression that indicates when an argument must be passed by
3330 reference. If nonzero for an argument, a copy of that argument is
3331 made in memory and a pointer to the argument is passed instead of
3332 the argument itself. The pointer is passed in whatever way is
3333 appropriate for passing a pointer to that type.
3335 Under V.4, structures and unions are passed by reference.
3337 As an extension to all ABIs, variable sized types are passed by
3341 function_arg_pass_by_reference (cum
, mode
, type
, named
)
3342 CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
;
3343 enum machine_mode mode ATTRIBUTE_UNUSED
;
3345 int named ATTRIBUTE_UNUSED
;
3347 if (DEFAULT_ABI
== ABI_V4
3348 && ((type
&& AGGREGATE_TYPE_P (type
))
3351 if (TARGET_DEBUG_ARG
)
3352 fprintf (stderr
, "function_arg_pass_by_reference: aggregate\n");
3356 return type
&& int_size_in_bytes (type
) <= 0;
3359 /* Perform any needed actions needed for a function that is receiving a
3360 variable number of arguments.
3364 MODE and TYPE are the mode and type of the current parameter.
3366 PRETEND_SIZE is a variable that should be set to the amount of stack
3367 that must be pushed by the prolog to pretend that our caller pushed
3370 Normally, this macro will push all remaining incoming registers on the
3371 stack and set PRETEND_SIZE to the length of the registers pushed. */
3374 setup_incoming_varargs (cum
, mode
, type
, pretend_size
, no_rtl
)
3375 CUMULATIVE_ARGS
*cum
;
3376 enum machine_mode mode
;
3378 int *pretend_size ATTRIBUTE_UNUSED
;
3382 CUMULATIVE_ARGS next_cum
;
3383 int reg_size
= TARGET_32BIT
? 4 : 8;
3384 rtx save_area
= NULL_RTX
, mem
;
3385 int first_reg_offset
, set
;
3389 fntype
= TREE_TYPE (current_function_decl
);
3390 stdarg_p
= (TYPE_ARG_TYPES (fntype
) != 0
3391 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
3392 != void_type_node
));
3394 /* For varargs, we do not want to skip the dummy va_dcl argument.
3395 For stdargs, we do want to skip the last named argument. */
3398 function_arg_advance (&next_cum
, mode
, type
, 1);
3400 if (DEFAULT_ABI
== ABI_V4
)
3402 /* Indicate to allocate space on the stack for varargs save area. */
3403 cfun
->machine
->sysv_varargs_p
= 1;
3405 save_area
= plus_constant (virtual_stack_vars_rtx
,
3406 - RS6000_VARARGS_SIZE
);
3408 first_reg_offset
= next_cum
.sysv_gregno
- GP_ARG_MIN_REG
;
3412 first_reg_offset
= next_cum
.words
;
3413 save_area
= virtual_incoming_args_rtx
;
3414 cfun
->machine
->sysv_varargs_p
= 0;
3416 if (MUST_PASS_IN_STACK (mode
, type
))
3417 first_reg_offset
+= RS6000_ARG_SIZE (TYPE_MODE (type
), type
);
3420 set
= get_varargs_alias_set ();
3421 if (! no_rtl
&& first_reg_offset
< GP_ARG_NUM_REG
)
3423 mem
= gen_rtx_MEM (BLKmode
,
3424 plus_constant (save_area
,
3425 first_reg_offset
* reg_size
)),
3426 set_mem_alias_set (mem
, set
);
3427 set_mem_align (mem
, BITS_PER_WORD
);
3430 (GP_ARG_MIN_REG
+ first_reg_offset
, mem
,
3431 GP_ARG_NUM_REG
- first_reg_offset
,
3432 (GP_ARG_NUM_REG
- first_reg_offset
) * UNITS_PER_WORD
);
3435 /* Save FP registers if needed. */
3436 if (DEFAULT_ABI
== ABI_V4
3437 && TARGET_HARD_FLOAT
&& TARGET_FPRS
3439 && next_cum
.fregno
<= FP_ARG_V4_MAX_REG
)
3441 int fregno
= next_cum
.fregno
;
3442 rtx cr1
= gen_rtx_REG (CCmode
, CR1_REGNO
);
3443 rtx lab
= gen_label_rtx ();
3444 int off
= (GP_ARG_NUM_REG
* reg_size
) + ((fregno
- FP_ARG_MIN_REG
) * 8);
3446 emit_jump_insn (gen_rtx_SET (VOIDmode
,
3448 gen_rtx_IF_THEN_ELSE (VOIDmode
,
3449 gen_rtx_NE (VOIDmode
, cr1
,
3451 gen_rtx_LABEL_REF (VOIDmode
, lab
),
3454 while (fregno
<= FP_ARG_V4_MAX_REG
)
3456 mem
= gen_rtx_MEM (DFmode
, plus_constant (save_area
, off
));
3457 set_mem_alias_set (mem
, set
);
3458 emit_move_insn (mem
, gen_rtx_REG (DFmode
, fregno
));
3467 /* Create the va_list data type. */
3470 rs6000_build_va_list ()
3472 tree f_gpr
, f_fpr
, f_ovf
, f_sav
, record
, type_decl
;
3474 /* For AIX, prefer 'char *' because that's what the system
3475 header files like. */
3476 if (DEFAULT_ABI
!= ABI_V4
)
3477 return build_pointer_type (char_type_node
);
3479 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
3480 type_decl
= build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
3482 f_gpr
= build_decl (FIELD_DECL
, get_identifier ("gpr"),
3483 unsigned_char_type_node
);
3484 f_fpr
= build_decl (FIELD_DECL
, get_identifier ("fpr"),
3485 unsigned_char_type_node
);
3486 f_ovf
= build_decl (FIELD_DECL
, get_identifier ("overflow_arg_area"),
3488 f_sav
= build_decl (FIELD_DECL
, get_identifier ("reg_save_area"),
3491 DECL_FIELD_CONTEXT (f_gpr
) = record
;
3492 DECL_FIELD_CONTEXT (f_fpr
) = record
;
3493 DECL_FIELD_CONTEXT (f_ovf
) = record
;
3494 DECL_FIELD_CONTEXT (f_sav
) = record
;
3496 TREE_CHAIN (record
) = type_decl
;
3497 TYPE_NAME (record
) = type_decl
;
3498 TYPE_FIELDS (record
) = f_gpr
;
3499 TREE_CHAIN (f_gpr
) = f_fpr
;
3500 TREE_CHAIN (f_fpr
) = f_ovf
;
3501 TREE_CHAIN (f_ovf
) = f_sav
;
3503 layout_type (record
);
3505 /* The correct type is an array type of one element. */
3506 return build_array_type (record
, build_index_type (size_zero_node
));
3509 /* Implement va_start. */
3512 rs6000_va_start (valist
, nextarg
)
3516 HOST_WIDE_INT words
, n_gpr
, n_fpr
;
3517 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
3518 tree gpr
, fpr
, ovf
, sav
, t
;
3520 /* Only SVR4 needs something special. */
3521 if (DEFAULT_ABI
!= ABI_V4
)
3523 std_expand_builtin_va_start (valist
, nextarg
);
3527 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
3528 f_fpr
= TREE_CHAIN (f_gpr
);
3529 f_ovf
= TREE_CHAIN (f_fpr
);
3530 f_sav
= TREE_CHAIN (f_ovf
);
3532 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
3533 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
3534 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
3535 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
3536 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
3538 /* Count number of gp and fp argument registers used. */
3539 words
= current_function_args_info
.words
;
3540 n_gpr
= current_function_args_info
.sysv_gregno
- GP_ARG_MIN_REG
;
3541 n_fpr
= current_function_args_info
.fregno
- FP_ARG_MIN_REG
;
3543 if (TARGET_DEBUG_ARG
)
3545 fputs ("va_start: words = ", stderr
);
3546 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, words
);
3547 fputs (", n_gpr = ", stderr
);
3548 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, n_gpr
);
3549 fputs (", n_fpr = ", stderr
);
3550 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, n_fpr
);
3551 putc ('\n', stderr
);
3554 t
= build (MODIFY_EXPR
, TREE_TYPE (gpr
), gpr
, build_int_2 (n_gpr
, 0));
3555 TREE_SIDE_EFFECTS (t
) = 1;
3556 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3558 t
= build (MODIFY_EXPR
, TREE_TYPE (fpr
), fpr
, build_int_2 (n_fpr
, 0));
3559 TREE_SIDE_EFFECTS (t
) = 1;
3560 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3562 /* Find the overflow area. */
3563 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
3565 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), t
,
3566 build_int_2 (words
* UNITS_PER_WORD
, 0));
3567 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
3568 TREE_SIDE_EFFECTS (t
) = 1;
3569 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3571 /* Find the register save area. */
3572 t
= make_tree (TREE_TYPE (sav
), virtual_stack_vars_rtx
);
3573 t
= build (PLUS_EXPR
, TREE_TYPE (sav
), t
,
3574 build_int_2 (-RS6000_VARARGS_SIZE
, -1));
3575 t
= build (MODIFY_EXPR
, TREE_TYPE (sav
), sav
, t
);
3576 TREE_SIDE_EFFECTS (t
) = 1;
3577 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3580 /* Implement va_arg. */
3583 rs6000_va_arg (valist
, type
)
3586 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
3587 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
3588 int indirect_p
, size
, rsize
, n_reg
, sav_ofs
, sav_scale
;
3589 rtx lab_false
, lab_over
, addr_rtx
, r
;
3591 if (DEFAULT_ABI
!= ABI_V4
)
3593 /* Variable sized types are passed by reference. */
3594 if (int_size_in_bytes (type
) <= 0)
3596 u
= build_pointer_type (type
);
3598 /* Args grow upward. */
3599 t
= build (POSTINCREMENT_EXPR
, TREE_TYPE (valist
), valist
,
3600 build_int_2 (POINTER_SIZE
/ BITS_PER_UNIT
, 0));
3601 TREE_SIDE_EFFECTS (t
) = 1;
3603 t
= build1 (NOP_EXPR
, build_pointer_type (u
), t
);
3604 TREE_SIDE_EFFECTS (t
) = 1;
3606 t
= build1 (INDIRECT_REF
, u
, t
);
3607 TREE_SIDE_EFFECTS (t
) = 1;
3609 return expand_expr (t
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
3612 return std_expand_builtin_va_arg (valist
, type
);
3615 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
3616 f_fpr
= TREE_CHAIN (f_gpr
);
3617 f_ovf
= TREE_CHAIN (f_fpr
);
3618 f_sav
= TREE_CHAIN (f_ovf
);
3620 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
3621 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
3622 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
3623 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
3624 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
3626 size
= int_size_in_bytes (type
);
3627 rsize
= (size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
3629 if (AGGREGATE_TYPE_P (type
) || TYPE_MODE (type
) == TFmode
)
3631 /* Aggregates and long doubles are passed by reference. */
3637 size
= UNITS_PER_WORD
;
3640 else if (FLOAT_TYPE_P (type
) && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3642 /* FP args go in FP registers, if present. */
3651 /* Otherwise into GP registers. */
3659 /* Pull the value out of the saved registers ... */
3661 lab_false
= gen_label_rtx ();
3662 lab_over
= gen_label_rtx ();
3663 addr_rtx
= gen_reg_rtx (Pmode
);
3665 /* AltiVec vectors never go in registers. */
3666 if (!TARGET_ALTIVEC
|| TREE_CODE (type
) != VECTOR_TYPE
)
3668 TREE_THIS_VOLATILE (reg
) = 1;
3669 emit_cmp_and_jump_insns
3670 (expand_expr (reg
, NULL_RTX
, QImode
, EXPAND_NORMAL
),
3671 GEN_INT (8 - n_reg
+ 1), GE
, const1_rtx
, QImode
, 1,
3674 /* Long long is aligned in the registers. */
3677 u
= build (BIT_AND_EXPR
, TREE_TYPE (reg
), reg
,
3678 build_int_2 (n_reg
- 1, 0));
3679 u
= build (PLUS_EXPR
, TREE_TYPE (reg
), reg
, u
);
3680 u
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, u
);
3681 TREE_SIDE_EFFECTS (u
) = 1;
3682 expand_expr (u
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3686 t
= build (PLUS_EXPR
, ptr_type_node
, sav
, build_int_2 (sav_ofs
, 0));
3690 u
= build (POSTINCREMENT_EXPR
, TREE_TYPE (reg
), reg
,
3691 build_int_2 (n_reg
, 0));
3692 TREE_SIDE_EFFECTS (u
) = 1;
3694 u
= build1 (CONVERT_EXPR
, integer_type_node
, u
);
3695 TREE_SIDE_EFFECTS (u
) = 1;
3697 u
= build (MULT_EXPR
, integer_type_node
, u
, build_int_2 (sav_scale
, 0));
3698 TREE_SIDE_EFFECTS (u
) = 1;
3700 t
= build (PLUS_EXPR
, ptr_type_node
, t
, u
);
3701 TREE_SIDE_EFFECTS (t
) = 1;
3703 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
3705 emit_move_insn (addr_rtx
, r
);
3707 emit_jump_insn (gen_jump (lab_over
));
3711 emit_label (lab_false
);
3713 /* ... otherwise out of the overflow area. */
3715 /* Make sure we don't find reg 7 for the next int arg.
3717 All AltiVec vectors go in the overflow area. So in the AltiVec
3718 case we need to get the vectors from the overflow area, but
3719 remember where the GPRs and FPRs are. */
3720 if (n_reg
> 1 && (TREE_CODE (type
) != VECTOR_TYPE
3721 || !TARGET_ALTIVEC
))
3723 t
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, build_int_2 (8, 0));
3724 TREE_SIDE_EFFECTS (t
) = 1;
3725 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3728 /* Care for on-stack alignment if needed. */
3735 /* AltiVec vectors are 16 byte aligned. */
3736 if (TARGET_ALTIVEC
&& TREE_CODE (type
) == VECTOR_TYPE
)
3741 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), ovf
, build_int_2 (align
, 0));
3742 t
= build (BIT_AND_EXPR
, TREE_TYPE (t
), t
, build_int_2 (-align
-1, -1));
3746 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
3748 emit_move_insn (addr_rtx
, r
);
3750 t
= build (PLUS_EXPR
, TREE_TYPE (t
), t
, build_int_2 (size
, 0));
3751 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
3752 TREE_SIDE_EFFECTS (t
) = 1;
3753 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3755 emit_label (lab_over
);
3759 r
= gen_rtx_MEM (Pmode
, addr_rtx
);
3760 set_mem_alias_set (r
, get_varargs_alias_set ());
3761 emit_move_insn (addr_rtx
, r
);
3769 #define def_builtin(MASK, NAME, TYPE, CODE) \
3771 if ((MASK) & target_flags) \
3772 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3776 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3778 static const struct builtin_description bdesc_3arg
[] =
3780 { MASK_ALTIVEC
, CODE_FOR_altivec_vmaddfp
, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP
},
3781 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhaddshs
, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS
},
3782 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhraddshs
, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS
},
3783 { MASK_ALTIVEC
, CODE_FOR_altivec_vmladduhm
, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM
},
3784 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumubm
, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM
},
3785 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsummbm
, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM
},
3786 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhm
, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM
},
3787 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshm
, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM
},
3788 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhs
, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS
},
3789 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshs
, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS
},
3790 { MASK_ALTIVEC
, CODE_FOR_altivec_vnmsubfp
, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP
},
3791 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4sf
, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF
},
3792 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4si
, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI
},
3793 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_8hi
, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI
},
3794 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_16qi
, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI
},
3795 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4sf
, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF
},
3796 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4si
, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI
},
3797 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_8hi
, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI
},
3798 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_16qi
, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI
},
3799 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_16qi
, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI
},
3800 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_8hi
, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI
},
3801 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4si
, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI
},
3802 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4sf
, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF
},
3805 /* DST operations: void foo (void *, const int, const char). */
3807 static const struct builtin_description bdesc_dst
[] =
3809 { MASK_ALTIVEC
, CODE_FOR_altivec_dst
, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST
},
3810 { MASK_ALTIVEC
, CODE_FOR_altivec_dstt
, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT
},
3811 { MASK_ALTIVEC
, CODE_FOR_altivec_dstst
, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST
},
3812 { MASK_ALTIVEC
, CODE_FOR_altivec_dststt
, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT
}
3815 /* Simple binary operations: VECc = foo (VECa, VECb). */
3817 static struct builtin_description bdesc_2arg
[] =
3819 { MASK_ALTIVEC
, CODE_FOR_addv16qi3
, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM
},
3820 { MASK_ALTIVEC
, CODE_FOR_addv8hi3
, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM
},
3821 { MASK_ALTIVEC
, CODE_FOR_addv4si3
, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM
},
3822 { MASK_ALTIVEC
, CODE_FOR_addv4sf3
, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP
},
3823 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddcuw
, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW
},
3824 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddubs
, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS
},
3825 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsbs
, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS
},
3826 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduhs
, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS
},
3827 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddshs
, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS
},
3828 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduws
, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS
},
3829 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsws
, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS
},
3830 { MASK_ALTIVEC
, CODE_FOR_andv4si3
, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND
},
3831 { MASK_ALTIVEC
, CODE_FOR_altivec_vandc
, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC
},
3832 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgub
, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB
},
3833 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsb
, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB
},
3834 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguh
, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH
},
3835 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsh
, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH
},
3836 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguw
, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW
},
3837 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsw
, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW
},
3838 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfux
, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX
},
3839 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfsx
, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX
},
3840 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpbfp
, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP
},
3841 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequb
, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB
},
3842 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequh
, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH
},
3843 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequw
, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW
},
3844 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpeqfp
, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP
},
3845 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgefp
, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP
},
3846 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtub
, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB
},
3847 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsb
, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB
},
3848 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuh
, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH
},
3849 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsh
, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH
},
3850 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuw
, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW
},
3851 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsw
, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW
},
3852 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtfp
, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP
},
3853 { MASK_ALTIVEC
, CODE_FOR_altivec_vctsxs
, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS
},
3854 { MASK_ALTIVEC
, CODE_FOR_altivec_vctuxs
, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS
},
3855 { MASK_ALTIVEC
, CODE_FOR_umaxv16qi3
, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB
},
3856 { MASK_ALTIVEC
, CODE_FOR_smaxv16qi3
, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB
},
3857 { MASK_ALTIVEC
, CODE_FOR_umaxv8hi3
, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH
},
3858 { MASK_ALTIVEC
, CODE_FOR_smaxv8hi3
, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH
},
3859 { MASK_ALTIVEC
, CODE_FOR_umaxv4si3
, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW
},
3860 { MASK_ALTIVEC
, CODE_FOR_smaxv4si3
, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW
},
3861 { MASK_ALTIVEC
, CODE_FOR_smaxv4sf3
, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP
},
3862 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghb
, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB
},
3863 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghh
, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH
},
3864 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghw
, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW
},
3865 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglb
, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB
},
3866 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglh
, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH
},
3867 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglw
, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW
},
3868 { MASK_ALTIVEC
, CODE_FOR_uminv16qi3
, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB
},
3869 { MASK_ALTIVEC
, CODE_FOR_sminv16qi3
, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB
},
3870 { MASK_ALTIVEC
, CODE_FOR_uminv8hi3
, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH
},
3871 { MASK_ALTIVEC
, CODE_FOR_sminv8hi3
, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH
},
3872 { MASK_ALTIVEC
, CODE_FOR_uminv4si3
, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW
},
3873 { MASK_ALTIVEC
, CODE_FOR_sminv4si3
, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW
},
3874 { MASK_ALTIVEC
, CODE_FOR_sminv4sf3
, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP
},
3875 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleub
, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB
},
3876 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesb
, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB
},
3877 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleuh
, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH
},
3878 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesh
, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH
},
3879 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuloub
, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB
},
3880 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosb
, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB
},
3881 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulouh
, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH
},
3882 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosh
, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH
},
3883 { MASK_ALTIVEC
, CODE_FOR_altivec_vnor
, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR
},
3884 { MASK_ALTIVEC
, CODE_FOR_iorv4si3
, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR
},
3885 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhum
, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM
},
3886 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwum
, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM
},
3887 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkpx
, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX
},
3888 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhss
, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS
},
3889 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshss
, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS
},
3890 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwss
, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS
},
3891 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswss
, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS
},
3892 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhus
, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS
},
3893 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshus
, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS
},
3894 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwus
, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS
},
3895 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswus
, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS
},
3896 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlb
, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB
},
3897 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlh
, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH
},
3898 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlw
, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW
},
3899 { MASK_ALTIVEC
, CODE_FOR_altivec_vslb
, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB
},
3900 { MASK_ALTIVEC
, CODE_FOR_altivec_vslh
, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH
},
3901 { MASK_ALTIVEC
, CODE_FOR_altivec_vslw
, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW
},
3902 { MASK_ALTIVEC
, CODE_FOR_altivec_vsl
, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL
},
3903 { MASK_ALTIVEC
, CODE_FOR_altivec_vslo
, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO
},
3904 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltb
, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB
},
3905 { MASK_ALTIVEC
, CODE_FOR_altivec_vsplth
, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH
},
3906 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltw
, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW
},
3907 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrb
, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB
},
3908 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrh
, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH
},
3909 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrw
, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW
},
3910 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrab
, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB
},
3911 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrah
, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH
},
3912 { MASK_ALTIVEC
, CODE_FOR_altivec_vsraw
, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW
},
3913 { MASK_ALTIVEC
, CODE_FOR_altivec_vsr
, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR
},
3914 { MASK_ALTIVEC
, CODE_FOR_altivec_vsro
, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO
},
3915 { MASK_ALTIVEC
, CODE_FOR_subv16qi3
, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM
},
3916 { MASK_ALTIVEC
, CODE_FOR_subv8hi3
, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM
},
3917 { MASK_ALTIVEC
, CODE_FOR_subv4si3
, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM
},
3918 { MASK_ALTIVEC
, CODE_FOR_subv4sf3
, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP
},
3919 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubcuw
, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW
},
3920 { MASK_ALTIVEC
, CODE_FOR_altivec_vsububs
, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS
},
3921 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsbs
, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS
},
3922 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuhs
, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS
},
3923 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubshs
, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS
},
3924 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuws
, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS
},
3925 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsws
, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS
},
3926 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4ubs
, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS
},
3927 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4sbs
, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS
},
3928 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4shs
, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS
},
3929 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum2sws
, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS
},
3930 { MASK_ALTIVEC
, CODE_FOR_altivec_vsumsws
, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS
},
3931 { MASK_ALTIVEC
, CODE_FOR_xorv4si3
, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR
},
3933 /* Place holder, leave as first spe builtin. */
3934 { 0, CODE_FOR_spe_evaddw
, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW
},
3935 { 0, CODE_FOR_spe_evand
, "__builtin_spe_evand", SPE_BUILTIN_EVAND
},
3936 { 0, CODE_FOR_spe_evandc
, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC
},
3937 { 0, CODE_FOR_spe_evdivws
, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS
},
3938 { 0, CODE_FOR_spe_evdivwu
, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU
},
3939 { 0, CODE_FOR_spe_eveqv
, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV
},
3940 { 0, CODE_FOR_spe_evfsadd
, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD
},
3941 { 0, CODE_FOR_spe_evfsdiv
, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV
},
3942 { 0, CODE_FOR_spe_evfsmul
, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL
},
3943 { 0, CODE_FOR_spe_evfssub
, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB
},
3944 { 0, CODE_FOR_spe_evmergehi
, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI
},
3945 { 0, CODE_FOR_spe_evmergehilo
, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO
},
3946 { 0, CODE_FOR_spe_evmergelo
, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO
},
3947 { 0, CODE_FOR_spe_evmergelohi
, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI
},
3948 { 0, CODE_FOR_spe_evmhegsmfaa
, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA
},
3949 { 0, CODE_FOR_spe_evmhegsmfan
, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN
},
3950 { 0, CODE_FOR_spe_evmhegsmiaa
, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA
},
3951 { 0, CODE_FOR_spe_evmhegsmian
, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN
},
3952 { 0, CODE_FOR_spe_evmhegumiaa
, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA
},
3953 { 0, CODE_FOR_spe_evmhegumian
, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN
},
3954 { 0, CODE_FOR_spe_evmhesmf
, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF
},
3955 { 0, CODE_FOR_spe_evmhesmfa
, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA
},
3956 { 0, CODE_FOR_spe_evmhesmfaaw
, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW
},
3957 { 0, CODE_FOR_spe_evmhesmfanw
, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW
},
3958 { 0, CODE_FOR_spe_evmhesmi
, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI
},
3959 { 0, CODE_FOR_spe_evmhesmia
, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA
},
3960 { 0, CODE_FOR_spe_evmhesmiaaw
, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW
},
3961 { 0, CODE_FOR_spe_evmhesmianw
, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW
},
3962 { 0, CODE_FOR_spe_evmhessf
, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF
},
3963 { 0, CODE_FOR_spe_evmhessfa
, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA
},
3964 { 0, CODE_FOR_spe_evmhessfaaw
, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW
},
3965 { 0, CODE_FOR_spe_evmhessfanw
, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW
},
3966 { 0, CODE_FOR_spe_evmhessiaaw
, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW
},
3967 { 0, CODE_FOR_spe_evmhessianw
, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW
},
3968 { 0, CODE_FOR_spe_evmheumi
, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI
},
3969 { 0, CODE_FOR_spe_evmheumia
, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA
},
3970 { 0, CODE_FOR_spe_evmheumiaaw
, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW
},
3971 { 0, CODE_FOR_spe_evmheumianw
, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW
},
3972 { 0, CODE_FOR_spe_evmheusiaaw
, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW
},
3973 { 0, CODE_FOR_spe_evmheusianw
, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW
},
3974 { 0, CODE_FOR_spe_evmhogsmfaa
, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA
},
3975 { 0, CODE_FOR_spe_evmhogsmfan
, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN
},
3976 { 0, CODE_FOR_spe_evmhogsmiaa
, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA
},
3977 { 0, CODE_FOR_spe_evmhogsmian
, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN
},
3978 { 0, CODE_FOR_spe_evmhogumiaa
, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA
},
3979 { 0, CODE_FOR_spe_evmhogumian
, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN
},
3980 { 0, CODE_FOR_spe_evmhosmf
, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF
},
3981 { 0, CODE_FOR_spe_evmhosmfa
, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA
},
3982 { 0, CODE_FOR_spe_evmhosmfaaw
, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW
},
3983 { 0, CODE_FOR_spe_evmhosmfanw
, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW
},
3984 { 0, CODE_FOR_spe_evmhosmi
, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI
},
3985 { 0, CODE_FOR_spe_evmhosmia
, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA
},
3986 { 0, CODE_FOR_spe_evmhosmiaaw
, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW
},
3987 { 0, CODE_FOR_spe_evmhosmianw
, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW
},
3988 { 0, CODE_FOR_spe_evmhossf
, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF
},
3989 { 0, CODE_FOR_spe_evmhossfa
, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA
},
3990 { 0, CODE_FOR_spe_evmhossfaaw
, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW
},
3991 { 0, CODE_FOR_spe_evmhossfanw
, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW
},
3992 { 0, CODE_FOR_spe_evmhossiaaw
, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW
},
3993 { 0, CODE_FOR_spe_evmhossianw
, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW
},
3994 { 0, CODE_FOR_spe_evmhoumi
, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI
},
3995 { 0, CODE_FOR_spe_evmhoumia
, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA
},
3996 { 0, CODE_FOR_spe_evmhoumiaaw
, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW
},
3997 { 0, CODE_FOR_spe_evmhoumianw
, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW
},
3998 { 0, CODE_FOR_spe_evmhousiaaw
, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW
},
3999 { 0, CODE_FOR_spe_evmhousianw
, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW
},
4000 { 0, CODE_FOR_spe_evmwhsmf
, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF
},
4001 { 0, CODE_FOR_spe_evmwhsmfa
, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA
},
4002 { 0, CODE_FOR_spe_evmwhsmi
, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI
},
4003 { 0, CODE_FOR_spe_evmwhsmia
, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA
},
4004 { 0, CODE_FOR_spe_evmwhssf
, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF
},
4005 { 0, CODE_FOR_spe_evmwhssfa
, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA
},
4006 { 0, CODE_FOR_spe_evmwhumi
, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI
},
4007 { 0, CODE_FOR_spe_evmwhumia
, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA
},
4008 { 0, CODE_FOR_spe_evmwlsmiaaw
, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW
},
4009 { 0, CODE_FOR_spe_evmwlsmianw
, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW
},
4010 { 0, CODE_FOR_spe_evmwlssiaaw
, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW
},
4011 { 0, CODE_FOR_spe_evmwlssianw
, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW
},
4012 { 0, CODE_FOR_spe_evmwlumi
, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI
},
4013 { 0, CODE_FOR_spe_evmwlumia
, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA
},
4014 { 0, CODE_FOR_spe_evmwlumiaaw
, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW
},
4015 { 0, CODE_FOR_spe_evmwlumianw
, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW
},
4016 { 0, CODE_FOR_spe_evmwlusiaaw
, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW
},
4017 { 0, CODE_FOR_spe_evmwlusianw
, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW
},
4018 { 0, CODE_FOR_spe_evmwsmf
, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF
},
4019 { 0, CODE_FOR_spe_evmwsmfa
, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA
},
4020 { 0, CODE_FOR_spe_evmwsmfaa
, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA
},
4021 { 0, CODE_FOR_spe_evmwsmfan
, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN
},
4022 { 0, CODE_FOR_spe_evmwsmi
, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI
},
4023 { 0, CODE_FOR_spe_evmwsmia
, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA
},
4024 { 0, CODE_FOR_spe_evmwsmiaa
, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA
},
4025 { 0, CODE_FOR_spe_evmwsmian
, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN
},
4026 { 0, CODE_FOR_spe_evmwssf
, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF
},
4027 { 0, CODE_FOR_spe_evmwssfa
, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA
},
4028 { 0, CODE_FOR_spe_evmwssfaa
, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA
},
4029 { 0, CODE_FOR_spe_evmwssfan
, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN
},
4030 { 0, CODE_FOR_spe_evmwumi
, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI
},
4031 { 0, CODE_FOR_spe_evmwumia
, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA
},
4032 { 0, CODE_FOR_spe_evmwumiaa
, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA
},
4033 { 0, CODE_FOR_spe_evmwumian
, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN
},
4034 { 0, CODE_FOR_spe_evnand
, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND
},
4035 { 0, CODE_FOR_spe_evnor
, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR
},
4036 { 0, CODE_FOR_spe_evor
, "__builtin_spe_evor", SPE_BUILTIN_EVOR
},
4037 { 0, CODE_FOR_spe_evorc
, "__builtin_spe_evorc", SPE_BUILTIN_EVORC
},
4038 { 0, CODE_FOR_spe_evrlw
, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW
},
4039 { 0, CODE_FOR_spe_evslw
, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW
},
4040 { 0, CODE_FOR_spe_evsrws
, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS
},
4041 { 0, CODE_FOR_spe_evsrwu
, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU
},
4042 { 0, CODE_FOR_spe_evsubfw
, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW
},
4044 /* SPE binary operations expecting a 5-bit unsigned literal. */
4045 { 0, CODE_FOR_spe_evaddiw
, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW
},
4047 { 0, CODE_FOR_spe_evrlwi
, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI
},
4048 { 0, CODE_FOR_spe_evslwi
, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI
},
4049 { 0, CODE_FOR_spe_evsrwis
, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS
},
4050 { 0, CODE_FOR_spe_evsrwiu
, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU
},
4051 { 0, CODE_FOR_spe_evsubifw
, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW
},
4052 { 0, CODE_FOR_spe_evmwhssfaa
, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA
},
4053 { 0, CODE_FOR_spe_evmwhssmaa
, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA
},
4054 { 0, CODE_FOR_spe_evmwhsmfaa
, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA
},
4055 { 0, CODE_FOR_spe_evmwhsmiaa
, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA
},
4056 { 0, CODE_FOR_spe_evmwhusiaa
, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA
},
4057 { 0, CODE_FOR_spe_evmwhumiaa
, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA
},
4058 { 0, CODE_FOR_spe_evmwhssfan
, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN
},
4059 { 0, CODE_FOR_spe_evmwhssian
, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN
},
4060 { 0, CODE_FOR_spe_evmwhsmfan
, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN
},
4061 { 0, CODE_FOR_spe_evmwhsmian
, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN
},
4062 { 0, CODE_FOR_spe_evmwhusian
, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN
},
4063 { 0, CODE_FOR_spe_evmwhumian
, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN
},
4064 { 0, CODE_FOR_spe_evmwhgssfaa
, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA
},
4065 { 0, CODE_FOR_spe_evmwhgsmfaa
, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA
},
4066 { 0, CODE_FOR_spe_evmwhgsmiaa
, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA
},
4067 { 0, CODE_FOR_spe_evmwhgumiaa
, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA
},
4068 { 0, CODE_FOR_spe_evmwhgssfan
, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN
},
4069 { 0, CODE_FOR_spe_evmwhgsmfan
, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN
},
4070 { 0, CODE_FOR_spe_evmwhgsmian
, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN
},
4071 { 0, CODE_FOR_spe_evmwhgumian
, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN
},
4072 { 0, CODE_FOR_spe_brinc
, "__builtin_spe_brinc", SPE_BUILTIN_BRINC
},
4074 /* Place-holder. Leave as last binary SPE builtin. */
4075 { 0, CODE_FOR_xorv2si3
, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR
},
4078 /* AltiVec predicates. */
4080 struct builtin_description_predicates
4082 const unsigned int mask
;
4083 const enum insn_code icode
;
4085 const char *const name
;
4086 const enum rs6000_builtins code
;
4089 static const struct builtin_description_predicates bdesc_altivec_preds
[] =
4091 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P
},
4092 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P
},
4093 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P
},
4094 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P
},
4095 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P
},
4096 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P
},
4097 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P
},
4098 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P
},
4099 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P
},
4100 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P
},
4101 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P
},
4102 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P
},
4103 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P
}
4106 /* SPE predicates. */
4107 static struct builtin_description bdesc_spe_predicates
[] =
4109 /* Place-holder. Leave as first. */
4110 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ
},
4111 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS
},
4112 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU
},
4113 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS
},
4114 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU
},
4115 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ
},
4116 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT
},
4117 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT
},
4118 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ
},
4119 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT
},
4120 /* Place-holder. Leave as last. */
4121 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT
},
4124 /* SPE evsel predicates. */
4125 static struct builtin_description bdesc_spe_evsel
[] =
4127 /* Place-holder. Leave as first. */
4128 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS
},
4129 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU
},
4130 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS
},
4131 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU
},
4132 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ
},
4133 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT
},
4134 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT
},
4135 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ
},
4136 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT
},
4137 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT
},
4138 /* Place-holder. Leave as last. */
4139 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ
},
4142 /* ABS* operations. */
4144 static const struct builtin_description bdesc_abs
[] =
4146 { MASK_ALTIVEC
, CODE_FOR_absv4si2
, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI
},
4147 { MASK_ALTIVEC
, CODE_FOR_absv8hi2
, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI
},
4148 { MASK_ALTIVEC
, CODE_FOR_absv4sf2
, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF
},
4149 { MASK_ALTIVEC
, CODE_FOR_absv16qi2
, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI
},
4150 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v4si
, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI
},
4151 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v8hi
, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI
},
4152 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v16qi
, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI
}
4155 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4158 static struct builtin_description bdesc_1arg
[] =
4160 { MASK_ALTIVEC
, CODE_FOR_altivec_vexptefp
, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP
},
4161 { MASK_ALTIVEC
, CODE_FOR_altivec_vlogefp
, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP
},
4162 { MASK_ALTIVEC
, CODE_FOR_altivec_vrefp
, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP
},
4163 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfim
, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM
},
4164 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfin
, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN
},
4165 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfip
, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP
},
4166 { MASK_ALTIVEC
, CODE_FOR_ftruncv4sf2
, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ
},
4167 { MASK_ALTIVEC
, CODE_FOR_altivec_vrsqrtefp
, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP
},
4168 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisb
, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB
},
4169 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltish
, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH
},
4170 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisw
, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW
},
4171 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsb
, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB
},
4172 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhpx
, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX
},
4173 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsh
, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH
},
4174 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsb
, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB
},
4175 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklpx
, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX
},
4176 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsh
, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH
},
4178 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4179 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4180 { 0, CODE_FOR_spe_evabs
, "__builtin_spe_evabs", SPE_BUILTIN_EVABS
},
4181 { 0, CODE_FOR_spe_evaddsmiaaw
, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW
},
4182 { 0, CODE_FOR_spe_evaddssiaaw
, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW
},
4183 { 0, CODE_FOR_spe_evaddumiaaw
, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW
},
4184 { 0, CODE_FOR_spe_evaddusiaaw
, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW
},
4185 { 0, CODE_FOR_spe_evcntlsw
, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW
},
4186 { 0, CODE_FOR_spe_evcntlzw
, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW
},
4187 { 0, CODE_FOR_spe_evextsb
, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB
},
4188 { 0, CODE_FOR_spe_evextsh
, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH
},
4189 { 0, CODE_FOR_spe_evfsabs
, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS
},
4190 { 0, CODE_FOR_spe_evfscfsf
, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF
},
4191 { 0, CODE_FOR_spe_evfscfsi
, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI
},
4192 { 0, CODE_FOR_spe_evfscfuf
, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF
},
4193 { 0, CODE_FOR_spe_evfscfui
, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI
},
4194 { 0, CODE_FOR_spe_evfsctsf
, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF
},
4195 { 0, CODE_FOR_spe_evfsctsi
, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI
},
4196 { 0, CODE_FOR_spe_evfsctsiz
, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ
},
4197 { 0, CODE_FOR_spe_evfsctuf
, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF
},
4198 { 0, CODE_FOR_spe_evfsctui
, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI
},
4199 { 0, CODE_FOR_spe_evfsctuiz
, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ
},
4200 { 0, CODE_FOR_spe_evfsnabs
, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS
},
4201 { 0, CODE_FOR_spe_evfsneg
, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG
},
4202 { 0, CODE_FOR_spe_evmra
, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA
},
4203 { 0, CODE_FOR_spe_evneg
, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG
},
4204 { 0, CODE_FOR_spe_evrndw
, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW
},
4205 { 0, CODE_FOR_spe_evsubfsmiaaw
, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW
},
4206 { 0, CODE_FOR_spe_evsubfssiaaw
, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW
},
4207 { 0, CODE_FOR_spe_evsubfumiaaw
, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW
},
4208 { 0, CODE_FOR_spe_evsplatfi
, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI
},
4209 { 0, CODE_FOR_spe_evsplati
, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI
},
4211 /* Place-holder. Leave as last unary SPE builtin. */
4212 { 0, CODE_FOR_spe_evsubfusiaaw
, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW
},
4216 rs6000_expand_unop_builtin (icode
, arglist
, target
)
4217 enum insn_code icode
;
4222 tree arg0
= TREE_VALUE (arglist
);
4223 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4224 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4225 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4227 if (icode
== CODE_FOR_nothing
)
4228 /* Builtin not supported on this processor. */
4231 /* If we got invalid arguments bail out before generating bad rtl. */
4232 if (arg0
== error_mark_node
)
4235 if (icode
== CODE_FOR_altivec_vspltisb
4236 || icode
== CODE_FOR_altivec_vspltish
4237 || icode
== CODE_FOR_altivec_vspltisw
4238 || icode
== CODE_FOR_spe_evsplatfi
4239 || icode
== CODE_FOR_spe_evsplati
)
4241 /* Only allow 5-bit *signed* literals. */
4242 if (GET_CODE (op0
) != CONST_INT
4243 || INTVAL (op0
) > 0x1f
4244 || INTVAL (op0
) < -0x1f)
4246 error ("argument 1 must be a 5-bit signed literal");
4252 || GET_MODE (target
) != tmode
4253 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4254 target
= gen_reg_rtx (tmode
);
4256 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4257 op0
= copy_to_mode_reg (mode0
, op0
);
4259 pat
= GEN_FCN (icode
) (target
, op0
);
4268 altivec_expand_abs_builtin (icode
, arglist
, target
)
4269 enum insn_code icode
;
4273 rtx pat
, scratch1
, scratch2
;
4274 tree arg0
= TREE_VALUE (arglist
);
4275 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4276 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4277 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4279 /* If we have invalid arguments, bail out before generating bad rtl. */
4280 if (arg0
== error_mark_node
)
4284 || GET_MODE (target
) != tmode
4285 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4286 target
= gen_reg_rtx (tmode
);
4288 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4289 op0
= copy_to_mode_reg (mode0
, op0
);
4291 scratch1
= gen_reg_rtx (mode0
);
4292 scratch2
= gen_reg_rtx (mode0
);
4294 pat
= GEN_FCN (icode
) (target
, op0
, scratch1
, scratch2
);
4303 rs6000_expand_binop_builtin (icode
, arglist
, target
)
4304 enum insn_code icode
;
4309 tree arg0
= TREE_VALUE (arglist
);
4310 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4311 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4312 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4313 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4314 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4315 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4317 if (icode
== CODE_FOR_nothing
)
4318 /* Builtin not supported on this processor. */
4321 /* If we got invalid arguments bail out before generating bad rtl. */
4322 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
4325 if (icode
== CODE_FOR_altivec_vcfux
4326 || icode
== CODE_FOR_altivec_vcfsx
4327 || icode
== CODE_FOR_altivec_vctsxs
4328 || icode
== CODE_FOR_altivec_vctuxs
4329 || icode
== CODE_FOR_altivec_vspltb
4330 || icode
== CODE_FOR_altivec_vsplth
4331 || icode
== CODE_FOR_altivec_vspltw
4332 || icode
== CODE_FOR_spe_evaddiw
4333 || icode
== CODE_FOR_spe_evldd
4334 || icode
== CODE_FOR_spe_evldh
4335 || icode
== CODE_FOR_spe_evldw
4336 || icode
== CODE_FOR_spe_evlhhesplat
4337 || icode
== CODE_FOR_spe_evlhhossplat
4338 || icode
== CODE_FOR_spe_evlhhousplat
4339 || icode
== CODE_FOR_spe_evlwhe
4340 || icode
== CODE_FOR_spe_evlwhos
4341 || icode
== CODE_FOR_spe_evlwhou
4342 || icode
== CODE_FOR_spe_evlwhsplat
4343 || icode
== CODE_FOR_spe_evlwwsplat
4344 || icode
== CODE_FOR_spe_evrlwi
4345 || icode
== CODE_FOR_spe_evslwi
4346 || icode
== CODE_FOR_spe_evsrwis
4347 || icode
== CODE_FOR_spe_evsrwiu
)
4349 /* Only allow 5-bit unsigned literals. */
4350 if (TREE_CODE (arg1
) != INTEGER_CST
4351 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
4353 error ("argument 2 must be a 5-bit unsigned literal");
4359 || GET_MODE (target
) != tmode
4360 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4361 target
= gen_reg_rtx (tmode
);
4363 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4364 op0
= copy_to_mode_reg (mode0
, op0
);
4365 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4366 op1
= copy_to_mode_reg (mode1
, op1
);
4368 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
4377 altivec_expand_predicate_builtin (icode
, opcode
, arglist
, target
)
4378 enum insn_code icode
;
4384 tree cr6_form
= TREE_VALUE (arglist
);
4385 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
4386 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4387 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4388 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4389 enum machine_mode tmode
= SImode
;
4390 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4391 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4394 if (TREE_CODE (cr6_form
) != INTEGER_CST
)
4396 error ("argument 1 of __builtin_altivec_predicate must be a constant");
4400 cr6_form_int
= TREE_INT_CST_LOW (cr6_form
);
4405 /* If we have invalid arguments, bail out before generating bad rtl. */
4406 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
4410 || GET_MODE (target
) != tmode
4411 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4412 target
= gen_reg_rtx (tmode
);
4414 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4415 op0
= copy_to_mode_reg (mode0
, op0
);
4416 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4417 op1
= copy_to_mode_reg (mode1
, op1
);
4419 scratch
= gen_reg_rtx (mode0
);
4421 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
,
4422 gen_rtx (SYMBOL_REF
, Pmode
, opcode
));
4427 /* The vec_any* and vec_all* predicates use the same opcodes for two
4428 different operations, but the bits in CR6 will be different
4429 depending on what information we want. So we have to play tricks
4430 with CR6 to get the right bits out.
4432 If you think this is disgusting, look at the specs for the
4433 AltiVec predicates. */
4435 switch (cr6_form_int
)
4438 emit_insn (gen_cr6_test_for_zero (target
));
4441 emit_insn (gen_cr6_test_for_zero_reverse (target
));
4444 emit_insn (gen_cr6_test_for_lt (target
));
4447 emit_insn (gen_cr6_test_for_lt_reverse (target
));
4450 error ("argument 1 of __builtin_altivec_predicate is out of range");
4458 altivec_expand_stv_builtin (icode
, arglist
)
4459 enum insn_code icode
;
4462 tree arg0
= TREE_VALUE (arglist
);
4463 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4464 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4465 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4466 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4467 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4469 enum machine_mode mode0
= insn_data
[icode
].operand
[0].mode
;
4470 enum machine_mode mode1
= insn_data
[icode
].operand
[1].mode
;
4471 enum machine_mode mode2
= insn_data
[icode
].operand
[2].mode
;
4473 /* Invalid arguments. Bail before doing anything stoopid! */
4474 if (arg0
== error_mark_node
4475 || arg1
== error_mark_node
4476 || arg2
== error_mark_node
)
4479 if (! (*insn_data
[icode
].operand
[2].predicate
) (op0
, mode2
))
4480 op0
= copy_to_mode_reg (mode2
, op0
);
4481 if (! (*insn_data
[icode
].operand
[0].predicate
) (op1
, mode0
))
4482 op1
= copy_to_mode_reg (mode0
, op1
);
4483 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
4484 op2
= copy_to_mode_reg (mode1
, op2
);
4486 pat
= GEN_FCN (icode
) (op1
, op2
, op0
);
4493 rs6000_expand_ternop_builtin (icode
, arglist
, target
)
4494 enum insn_code icode
;
4499 tree arg0
= TREE_VALUE (arglist
);
4500 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4501 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4502 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4503 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4504 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4505 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4506 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4507 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4508 enum machine_mode mode2
= insn_data
[icode
].operand
[3].mode
;
4510 if (icode
== CODE_FOR_nothing
)
4511 /* Builtin not supported on this processor. */
4514 /* If we got invalid arguments bail out before generating bad rtl. */
4515 if (arg0
== error_mark_node
4516 || arg1
== error_mark_node
4517 || arg2
== error_mark_node
)
4520 if (icode
== CODE_FOR_altivec_vsldoi_4sf
4521 || icode
== CODE_FOR_altivec_vsldoi_4si
4522 || icode
== CODE_FOR_altivec_vsldoi_8hi
4523 || icode
== CODE_FOR_altivec_vsldoi_16qi
)
4525 /* Only allow 4-bit unsigned literals. */
4526 if (TREE_CODE (arg2
) != INTEGER_CST
4527 || TREE_INT_CST_LOW (arg2
) & ~0xf)
4529 error ("argument 3 must be a 4-bit unsigned literal");
4535 || GET_MODE (target
) != tmode
4536 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4537 target
= gen_reg_rtx (tmode
);
4539 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4540 op0
= copy_to_mode_reg (mode0
, op0
);
4541 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4542 op1
= copy_to_mode_reg (mode1
, op1
);
4543 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
4544 op2
= copy_to_mode_reg (mode2
, op2
);
4546 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
4554 /* Expand the lvx builtins. */
4556 altivec_expand_ld_builtin (exp
, target
, expandedp
)
4561 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4562 tree arglist
= TREE_OPERAND (exp
, 1);
4563 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4565 enum machine_mode tmode
, mode0
;
4567 enum insn_code icode
;
4571 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi
:
4572 icode
= CODE_FOR_altivec_lvx_16qi
;
4574 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi
:
4575 icode
= CODE_FOR_altivec_lvx_8hi
;
4577 case ALTIVEC_BUILTIN_LD_INTERNAL_4si
:
4578 icode
= CODE_FOR_altivec_lvx_4si
;
4580 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf
:
4581 icode
= CODE_FOR_altivec_lvx_4sf
;
4590 arg0
= TREE_VALUE (arglist
);
4591 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4592 tmode
= insn_data
[icode
].operand
[0].mode
;
4593 mode0
= insn_data
[icode
].operand
[1].mode
;
4596 || GET_MODE (target
) != tmode
4597 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4598 target
= gen_reg_rtx (tmode
);
4600 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4601 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
4603 pat
= GEN_FCN (icode
) (target
, op0
);
4610 /* Expand the stvx builtins. */
4612 altivec_expand_st_builtin (exp
, target
, expandedp
)
4614 rtx target ATTRIBUTE_UNUSED
;
4617 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4618 tree arglist
= TREE_OPERAND (exp
, 1);
4619 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4621 enum machine_mode mode0
, mode1
;
4623 enum insn_code icode
;
4627 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi
:
4628 icode
= CODE_FOR_altivec_stvx_16qi
;
4630 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi
:
4631 icode
= CODE_FOR_altivec_stvx_8hi
;
4633 case ALTIVEC_BUILTIN_ST_INTERNAL_4si
:
4634 icode
= CODE_FOR_altivec_stvx_4si
;
4636 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf
:
4637 icode
= CODE_FOR_altivec_stvx_4sf
;
4644 arg0
= TREE_VALUE (arglist
);
4645 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4646 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4647 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4648 mode0
= insn_data
[icode
].operand
[0].mode
;
4649 mode1
= insn_data
[icode
].operand
[1].mode
;
4651 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4652 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
4653 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
4654 op1
= copy_to_mode_reg (mode1
, op1
);
4656 pat
= GEN_FCN (icode
) (op0
, op1
);
4664 /* Expand the dst builtins. */
4666 altivec_expand_dst_builtin (exp
, target
, expandedp
)
4668 rtx target ATTRIBUTE_UNUSED
;
4671 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4672 tree arglist
= TREE_OPERAND (exp
, 1);
4673 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4674 tree arg0
, arg1
, arg2
;
4675 enum machine_mode mode0
, mode1
, mode2
;
4676 rtx pat
, op0
, op1
, op2
;
4677 struct builtin_description
*d
;
4682 /* Handle DST variants. */
4683 d
= (struct builtin_description
*) bdesc_dst
;
4684 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
4685 if (d
->code
== fcode
)
4687 arg0
= TREE_VALUE (arglist
);
4688 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4689 arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4690 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4691 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4692 op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4693 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
4694 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
4695 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
4697 /* Invalid arguments, bail out before generating bad rtl. */
4698 if (arg0
== error_mark_node
4699 || arg1
== error_mark_node
4700 || arg2
== error_mark_node
)
4703 if (TREE_CODE (arg2
) != INTEGER_CST
4704 || TREE_INT_CST_LOW (arg2
) & ~0x3)
4706 error ("argument to `%s' must be a 2-bit unsigned literal", d
->name
);
4710 if (! (*insn_data
[d
->icode
].operand
[0].predicate
) (op0
, mode0
))
4711 op0
= copy_to_mode_reg (mode0
, op0
);
4712 if (! (*insn_data
[d
->icode
].operand
[1].predicate
) (op1
, mode1
))
4713 op1
= copy_to_mode_reg (mode1
, op1
);
4715 pat
= GEN_FCN (d
->icode
) (op0
, op1
, op2
);
4726 /* Expand the builtin in EXP and store the result in TARGET. Store
4727 true in *EXPANDEDP if we found a builtin to expand. */
4729 altivec_expand_builtin (exp
, target
, expandedp
)
4734 struct builtin_description
*d
;
4735 struct builtin_description_predicates
*dp
;
4737 enum insn_code icode
;
4738 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4739 tree arglist
= TREE_OPERAND (exp
, 1);
4742 enum machine_mode tmode
, mode0
;
4743 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4745 target
= altivec_expand_ld_builtin (exp
, target
, expandedp
);
4749 target
= altivec_expand_st_builtin (exp
, target
, expandedp
);
4753 target
= altivec_expand_dst_builtin (exp
, target
, expandedp
);
4761 case ALTIVEC_BUILTIN_STVX
:
4762 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx
, arglist
);
4763 case ALTIVEC_BUILTIN_STVEBX
:
4764 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx
, arglist
);
4765 case ALTIVEC_BUILTIN_STVEHX
:
4766 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx
, arglist
);
4767 case ALTIVEC_BUILTIN_STVEWX
:
4768 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx
, arglist
);
4769 case ALTIVEC_BUILTIN_STVXL
:
4770 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl
, arglist
);
4772 case ALTIVEC_BUILTIN_MFVSCR
:
4773 icode
= CODE_FOR_altivec_mfvscr
;
4774 tmode
= insn_data
[icode
].operand
[0].mode
;
4777 || GET_MODE (target
) != tmode
4778 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4779 target
= gen_reg_rtx (tmode
);
4781 pat
= GEN_FCN (icode
) (target
);
4787 case ALTIVEC_BUILTIN_MTVSCR
:
4788 icode
= CODE_FOR_altivec_mtvscr
;
4789 arg0
= TREE_VALUE (arglist
);
4790 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4791 mode0
= insn_data
[icode
].operand
[0].mode
;
4793 /* If we got invalid arguments bail out before generating bad rtl. */
4794 if (arg0
== error_mark_node
)
4797 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4798 op0
= copy_to_mode_reg (mode0
, op0
);
4800 pat
= GEN_FCN (icode
) (op0
);
4805 case ALTIVEC_BUILTIN_DSSALL
:
4806 emit_insn (gen_altivec_dssall ());
4809 case ALTIVEC_BUILTIN_DSS
:
4810 icode
= CODE_FOR_altivec_dss
;
4811 arg0
= TREE_VALUE (arglist
);
4812 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4813 mode0
= insn_data
[icode
].operand
[0].mode
;
4815 /* If we got invalid arguments bail out before generating bad rtl. */
4816 if (arg0
== error_mark_node
)
4819 if (TREE_CODE (arg0
) != INTEGER_CST
4820 || TREE_INT_CST_LOW (arg0
) & ~0x3)
4822 error ("argument to dss must be a 2-bit unsigned literal");
4826 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4827 op0
= copy_to_mode_reg (mode0
, op0
);
4829 emit_insn (gen_altivec_dss (op0
));
4833 /* Expand abs* operations. */
4834 d
= (struct builtin_description
*) bdesc_abs
;
4835 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
4836 if (d
->code
== fcode
)
4837 return altivec_expand_abs_builtin (d
->icode
, arglist
, target
);
4839 /* Expand the AltiVec predicates. */
4840 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
4841 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
4842 if (dp
->code
== fcode
)
4843 return altivec_expand_predicate_builtin (dp
->icode
, dp
->opcode
, arglist
, target
);
4845 /* LV* are funky. We initialized them differently. */
4848 case ALTIVEC_BUILTIN_LVSL
:
4849 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl
,
4851 case ALTIVEC_BUILTIN_LVSR
:
4852 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr
,
4854 case ALTIVEC_BUILTIN_LVEBX
:
4855 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx
,
4857 case ALTIVEC_BUILTIN_LVEHX
:
4858 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx
,
4860 case ALTIVEC_BUILTIN_LVEWX
:
4861 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx
,
4863 case ALTIVEC_BUILTIN_LVXL
:
4864 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl
,
4866 case ALTIVEC_BUILTIN_LVX
:
4867 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx
,
4878 /* Binops that need to be initialized manually, but can be expanded
4879 automagically by rs6000_expand_binop_builtin. */
4880 static struct builtin_description bdesc_2arg_spe
[] =
4882 { 0, CODE_FOR_spe_evlddx
, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX
},
4883 { 0, CODE_FOR_spe_evldwx
, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX
},
4884 { 0, CODE_FOR_spe_evldhx
, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX
},
4885 { 0, CODE_FOR_spe_evlwhex
, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX
},
4886 { 0, CODE_FOR_spe_evlwhoux
, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX
},
4887 { 0, CODE_FOR_spe_evlwhosx
, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX
},
4888 { 0, CODE_FOR_spe_evlwwsplatx
, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX
},
4889 { 0, CODE_FOR_spe_evlwhsplatx
, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX
},
4890 { 0, CODE_FOR_spe_evlhhesplatx
, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX
},
4891 { 0, CODE_FOR_spe_evlhhousplatx
, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX
},
4892 { 0, CODE_FOR_spe_evlhhossplatx
, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX
},
4893 { 0, CODE_FOR_spe_evldd
, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD
},
4894 { 0, CODE_FOR_spe_evldw
, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW
},
4895 { 0, CODE_FOR_spe_evldh
, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH
},
4896 { 0, CODE_FOR_spe_evlwhe
, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE
},
4897 { 0, CODE_FOR_spe_evlwhou
, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU
},
4898 { 0, CODE_FOR_spe_evlwhos
, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS
},
4899 { 0, CODE_FOR_spe_evlwwsplat
, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT
},
4900 { 0, CODE_FOR_spe_evlwhsplat
, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT
},
4901 { 0, CODE_FOR_spe_evlhhesplat
, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT
},
4902 { 0, CODE_FOR_spe_evlhhousplat
, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT
},
4903 { 0, CODE_FOR_spe_evlhhossplat
, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT
}
4906 /* Expand the builtin in EXP and store the result in TARGET. Store
4907 true in *EXPANDEDP if we found a builtin to expand.
4909 This expands the SPE builtins that are not simple unary and binary
4912 spe_expand_builtin (exp
, target
, expandedp
)
4917 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4918 tree arglist
= TREE_OPERAND (exp
, 1);
4920 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4921 enum insn_code icode
;
4922 enum machine_mode tmode
, mode0
;
4924 struct builtin_description
*d
;
4929 /* Syntax check for a 5-bit unsigned immediate. */
4932 case SPE_BUILTIN_EVSTDD
:
4933 case SPE_BUILTIN_EVSTDH
:
4934 case SPE_BUILTIN_EVSTDW
:
4935 case SPE_BUILTIN_EVSTWHE
:
4936 case SPE_BUILTIN_EVSTWHO
:
4937 case SPE_BUILTIN_EVSTWWE
:
4938 case SPE_BUILTIN_EVSTWWO
:
4939 arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4940 if (TREE_CODE (arg1
) != INTEGER_CST
4941 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
4943 error ("argument 2 must be a 5-bit unsigned literal");
4951 d
= (struct builtin_description
*) bdesc_2arg_spe
;
4952 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg_spe
); ++i
, ++d
)
4953 if (d
->code
== fcode
)
4954 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
4956 d
= (struct builtin_description
*) bdesc_spe_predicates
;
4957 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, ++d
)
4958 if (d
->code
== fcode
)
4959 return spe_expand_predicate_builtin (d
->icode
, arglist
, target
);
4961 d
= (struct builtin_description
*) bdesc_spe_evsel
;
4962 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, ++d
)
4963 if (d
->code
== fcode
)
4964 return spe_expand_evsel_builtin (d
->icode
, arglist
, target
);
4968 case SPE_BUILTIN_EVSTDDX
:
4969 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx
, arglist
);
4970 case SPE_BUILTIN_EVSTDHX
:
4971 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx
, arglist
);
4972 case SPE_BUILTIN_EVSTDWX
:
4973 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx
, arglist
);
4974 case SPE_BUILTIN_EVSTWHEX
:
4975 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex
, arglist
);
4976 case SPE_BUILTIN_EVSTWHOX
:
4977 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox
, arglist
);
4978 case SPE_BUILTIN_EVSTWWEX
:
4979 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex
, arglist
);
4980 case SPE_BUILTIN_EVSTWWOX
:
4981 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox
, arglist
);
4982 case SPE_BUILTIN_EVSTDD
:
4983 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd
, arglist
);
4984 case SPE_BUILTIN_EVSTDH
:
4985 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh
, arglist
);
4986 case SPE_BUILTIN_EVSTDW
:
4987 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw
, arglist
);
4988 case SPE_BUILTIN_EVSTWHE
:
4989 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe
, arglist
);
4990 case SPE_BUILTIN_EVSTWHO
:
4991 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho
, arglist
);
4992 case SPE_BUILTIN_EVSTWWE
:
4993 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe
, arglist
);
4994 case SPE_BUILTIN_EVSTWWO
:
4995 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo
, arglist
);
4996 case SPE_BUILTIN_MFSPEFSCR
:
4997 icode
= CODE_FOR_spe_mfspefscr
;
4998 tmode
= insn_data
[icode
].operand
[0].mode
;
5001 || GET_MODE (target
) != tmode
5002 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5003 target
= gen_reg_rtx (tmode
);
5005 pat
= GEN_FCN (icode
) (target
);
5010 case SPE_BUILTIN_MTSPEFSCR
:
5011 icode
= CODE_FOR_spe_mtspefscr
;
5012 arg0
= TREE_VALUE (arglist
);
5013 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5014 mode0
= insn_data
[icode
].operand
[0].mode
;
5016 if (arg0
== error_mark_node
)
5019 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
5020 op0
= copy_to_mode_reg (mode0
, op0
);
5022 pat
= GEN_FCN (icode
) (op0
);
5035 spe_expand_predicate_builtin (icode
, arglist
, target
)
5036 enum insn_code icode
;
5040 rtx pat
, scratch
, tmp
;
5041 tree form
= TREE_VALUE (arglist
);
5042 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
5043 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5044 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5045 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5046 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5047 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5051 if (TREE_CODE (form
) != INTEGER_CST
)
5053 error ("argument 1 of __builtin_spe_predicate must be a constant");
5057 form_int
= TREE_INT_CST_LOW (form
);
5062 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
5066 || GET_MODE (target
) != SImode
5067 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, SImode
))
5068 target
= gen_reg_rtx (SImode
);
5070 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5071 op0
= copy_to_mode_reg (mode0
, op0
);
5072 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5073 op1
= copy_to_mode_reg (mode1
, op1
);
5075 scratch
= gen_reg_rtx (CCmode
);
5077 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
5082 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5083 _lower_. We use one compare, but look in different bits of the
5084 CR for each variant.
5086 There are 2 elements in each SPE simd type (upper/lower). The CR
5087 bits are set as follows:
5089 BIT0 | BIT 1 | BIT 2 | BIT 3
5090 U | L | (U | L) | (U & L)
5092 So, for an "all" relationship, BIT 3 would be set.
5093 For an "any" relationship, BIT 2 would be set. Etc.
5095 Following traditional nomenclature, these bits map to:
5097 BIT0 | BIT 1 | BIT 2 | BIT 3
5100 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5105 /* All variant. OV bit. */
5107 /* We need to get to the OV bit, which is the ORDERED bit. We
5108 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5109 that's ugly and will trigger a validate_condition_mode abort.
5110 So let's just use another pattern. */
5111 emit_insn (gen_move_from_CR_ov_bit (target
, scratch
));
5113 /* Any variant. EQ bit. */
5117 /* Upper variant. LT bit. */
5121 /* Lower variant. GT bit. */
5126 error ("argument 1 of __builtin_spe_predicate is out of range");
5130 tmp
= gen_rtx_fmt_ee (code
, SImode
, scratch
, const0_rtx
);
5131 emit_move_insn (target
, tmp
);
5136 /* The evsel builtins look like this:
5138 e = __builtin_spe_evsel_OP (a, b, c, d);
5142 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5143 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5147 spe_expand_evsel_builtin (icode
, arglist
, target
)
5148 enum insn_code icode
;
5153 tree arg0
= TREE_VALUE (arglist
);
5154 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5155 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5156 tree arg3
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist
))));
5157 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5158 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5159 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
5160 rtx op3
= expand_expr (arg3
, NULL_RTX
, VOIDmode
, 0);
5161 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5162 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5167 if (arg0
== error_mark_node
|| arg1
== error_mark_node
5168 || arg2
== error_mark_node
|| arg3
== error_mark_node
)
5172 || GET_MODE (target
) != mode0
5173 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, mode0
))
5174 target
= gen_reg_rtx (mode0
);
5176 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5177 op0
= copy_to_mode_reg (mode0
, op0
);
5178 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
5179 op1
= copy_to_mode_reg (mode0
, op1
);
5180 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
5181 op2
= copy_to_mode_reg (mode0
, op2
);
5182 if (! (*insn_data
[icode
].operand
[1].predicate
) (op3
, mode1
))
5183 op3
= copy_to_mode_reg (mode0
, op3
);
5185 /* Generate the compare. */
5186 scratch
= gen_reg_rtx (CCmode
);
5187 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
5192 if (mode0
== V2SImode
)
5193 emit_insn (gen_spe_evsel (target
, op2
, op3
, scratch
));
5195 emit_insn (gen_spe_evsel_fs (target
, op2
, op3
, scratch
));
5200 /* Expand an expression EXP that calls a built-in function,
5201 with result going to TARGET if that's convenient
5202 (and in mode MODE if that's convenient).
5203 SUBTARGET may be used as the target for computing one of EXP's operands.
5204 IGNORE is nonzero if the value is to be ignored. */
5207 rs6000_expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
5210 rtx subtarget ATTRIBUTE_UNUSED
;
5211 enum machine_mode mode ATTRIBUTE_UNUSED
;
5212 int ignore ATTRIBUTE_UNUSED
;
5214 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5215 tree arglist
= TREE_OPERAND (exp
, 1);
5216 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5217 struct builtin_description
*d
;
5224 ret
= altivec_expand_builtin (exp
, target
, &success
);
5231 ret
= spe_expand_builtin (exp
, target
, &success
);
5237 if (TARGET_ALTIVEC
|| TARGET_SPE
)
5239 /* Handle simple unary operations. */
5240 d
= (struct builtin_description
*) bdesc_1arg
;
5241 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
5242 if (d
->code
== fcode
)
5243 return rs6000_expand_unop_builtin (d
->icode
, arglist
, target
);
5245 /* Handle simple binary operations. */
5246 d
= (struct builtin_description
*) bdesc_2arg
;
5247 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5248 if (d
->code
== fcode
)
5249 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
5251 /* Handle simple ternary operations. */
5252 d
= (struct builtin_description
*) bdesc_3arg
;
5253 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
5254 if (d
->code
== fcode
)
5255 return rs6000_expand_ternop_builtin (d
->icode
, arglist
, target
);
5263 rs6000_init_builtins ()
5266 spe_init_builtins ();
5268 altivec_init_builtins ();
5269 if (TARGET_ALTIVEC
|| TARGET_SPE
)
5270 rs6000_common_init_builtins ();
5273 /* Search through a set of builtins and enable the mask bits.
5274 DESC is an array of builtins.
5275 SIZE is the total number of builtins.
5276 START is the builtin enum at which to start.
5277 END is the builtin enum at which to end. */
5279 enable_mask_for_builtins (desc
, size
, start
, end
)
5280 struct builtin_description
*desc
;
5282 enum rs6000_builtins start
, end
;
5286 for (i
= 0; i
< size
; ++i
)
5287 if (desc
[i
].code
== start
)
5293 for (; i
< size
; ++i
)
5295 /* Flip all the bits on. */
5296 desc
[i
].mask
= target_flags
;
5297 if (desc
[i
].code
== end
)
5303 spe_init_builtins ()
5305 tree endlink
= void_list_node
;
5306 tree puint_type_node
= build_pointer_type (unsigned_type_node
);
5307 tree pushort_type_node
= build_pointer_type (short_unsigned_type_node
);
5308 tree pv2si_type_node
= build_pointer_type (V2SI_type_node
);
5309 struct builtin_description
*d
;
5312 tree v2si_ftype_4_v2si
5313 = build_function_type
5315 tree_cons (NULL_TREE
, V2SI_type_node
,
5316 tree_cons (NULL_TREE
, V2SI_type_node
,
5317 tree_cons (NULL_TREE
, V2SI_type_node
,
5318 tree_cons (NULL_TREE
, V2SI_type_node
,
5321 tree v2sf_ftype_4_v2sf
5322 = build_function_type
5324 tree_cons (NULL_TREE
, V2SF_type_node
,
5325 tree_cons (NULL_TREE
, V2SF_type_node
,
5326 tree_cons (NULL_TREE
, V2SF_type_node
,
5327 tree_cons (NULL_TREE
, V2SF_type_node
,
5330 tree int_ftype_int_v2si_v2si
5331 = build_function_type
5333 tree_cons (NULL_TREE
, integer_type_node
,
5334 tree_cons (NULL_TREE
, V2SI_type_node
,
5335 tree_cons (NULL_TREE
, V2SI_type_node
,
5338 tree int_ftype_int_v2sf_v2sf
5339 = build_function_type
5341 tree_cons (NULL_TREE
, integer_type_node
,
5342 tree_cons (NULL_TREE
, V2SF_type_node
,
5343 tree_cons (NULL_TREE
, V2SF_type_node
,
5346 tree void_ftype_v2si_puint_int
5347 = build_function_type (void_type_node
,
5348 tree_cons (NULL_TREE
, V2SI_type_node
,
5349 tree_cons (NULL_TREE
, puint_type_node
,
5350 tree_cons (NULL_TREE
,
5354 tree void_ftype_v2si_puint_char
5355 = build_function_type (void_type_node
,
5356 tree_cons (NULL_TREE
, V2SI_type_node
,
5357 tree_cons (NULL_TREE
, puint_type_node
,
5358 tree_cons (NULL_TREE
,
5362 tree void_ftype_v2si_pv2si_int
5363 = build_function_type (void_type_node
,
5364 tree_cons (NULL_TREE
, V2SI_type_node
,
5365 tree_cons (NULL_TREE
, pv2si_type_node
,
5366 tree_cons (NULL_TREE
,
5370 tree void_ftype_v2si_pv2si_char
5371 = build_function_type (void_type_node
,
5372 tree_cons (NULL_TREE
, V2SI_type_node
,
5373 tree_cons (NULL_TREE
, pv2si_type_node
,
5374 tree_cons (NULL_TREE
,
5379 = build_function_type (void_type_node
,
5380 tree_cons (NULL_TREE
, integer_type_node
, endlink
));
5383 = build_function_type (integer_type_node
,
5384 tree_cons (NULL_TREE
, void_type_node
, endlink
));
5386 tree v2si_ftype_pv2si_int
5387 = build_function_type (V2SI_type_node
,
5388 tree_cons (NULL_TREE
, pv2si_type_node
,
5389 tree_cons (NULL_TREE
, integer_type_node
,
5392 tree v2si_ftype_puint_int
5393 = build_function_type (V2SI_type_node
,
5394 tree_cons (NULL_TREE
, puint_type_node
,
5395 tree_cons (NULL_TREE
, integer_type_node
,
5398 tree v2si_ftype_pushort_int
5399 = build_function_type (V2SI_type_node
,
5400 tree_cons (NULL_TREE
, pushort_type_node
,
5401 tree_cons (NULL_TREE
, integer_type_node
,
5404 /* The initialization of the simple binary and unary builtins is
5405 done in rs6000_common_init_builtins, but we have to enable the
5406 mask bits here manually because we have run out of `target_flags'
5407 bits. We really need to redesign this mask business. */
5409 enable_mask_for_builtins ((struct builtin_description
*) bdesc_2arg
,
5410 ARRAY_SIZE (bdesc_2arg
),
5413 enable_mask_for_builtins ((struct builtin_description
*) bdesc_1arg
,
5414 ARRAY_SIZE (bdesc_1arg
),
5416 SPE_BUILTIN_EVSUBFUSIAAW
);
5417 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_predicates
,
5418 ARRAY_SIZE (bdesc_spe_predicates
),
5419 SPE_BUILTIN_EVCMPEQ
,
5420 SPE_BUILTIN_EVFSTSTLT
);
5421 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_evsel
,
5422 ARRAY_SIZE (bdesc_spe_evsel
),
5423 SPE_BUILTIN_EVSEL_CMPGTS
,
5424 SPE_BUILTIN_EVSEL_FSTSTEQ
);
5426 /* Initialize irregular SPE builtins. */
5428 def_builtin (target_flags
, "__builtin_spe_mtspefscr", void_ftype_int
, SPE_BUILTIN_MTSPEFSCR
);
5429 def_builtin (target_flags
, "__builtin_spe_mfspefscr", int_ftype_void
, SPE_BUILTIN_MFSPEFSCR
);
5430 def_builtin (target_flags
, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDDX
);
5431 def_builtin (target_flags
, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDHX
);
5432 def_builtin (target_flags
, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDWX
);
5433 def_builtin (target_flags
, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHEX
);
5434 def_builtin (target_flags
, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHOX
);
5435 def_builtin (target_flags
, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWEX
);
5436 def_builtin (target_flags
, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWOX
);
5437 def_builtin (target_flags
, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDD
);
5438 def_builtin (target_flags
, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDH
);
5439 def_builtin (target_flags
, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDW
);
5440 def_builtin (target_flags
, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHE
);
5441 def_builtin (target_flags
, "__builtin_spe_evstwho", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHO
);
5442 def_builtin (target_flags
, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWE
);
5443 def_builtin (target_flags
, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWO
);
5446 def_builtin (target_flags
, "__builtin_spe_evlddx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDDX
);
5447 def_builtin (target_flags
, "__builtin_spe_evldwx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDWX
);
5448 def_builtin (target_flags
, "__builtin_spe_evldhx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDHX
);
5449 def_builtin (target_flags
, "__builtin_spe_evlwhex", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHEX
);
5450 def_builtin (target_flags
, "__builtin_spe_evlwhoux", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOUX
);
5451 def_builtin (target_flags
, "__builtin_spe_evlwhosx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOSX
);
5452 def_builtin (target_flags
, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLATX
);
5453 def_builtin (target_flags
, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLATX
);
5454 def_builtin (target_flags
, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLATX
);
5455 def_builtin (target_flags
, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLATX
);
5456 def_builtin (target_flags
, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLATX
);
5457 def_builtin (target_flags
, "__builtin_spe_evldd", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDD
);
5458 def_builtin (target_flags
, "__builtin_spe_evldw", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDW
);
5459 def_builtin (target_flags
, "__builtin_spe_evldh", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDH
);
5460 def_builtin (target_flags
, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLAT
);
5461 def_builtin (target_flags
, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLAT
);
5462 def_builtin (target_flags
, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLAT
);
5463 def_builtin (target_flags
, "__builtin_spe_evlwhe", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHE
);
5464 def_builtin (target_flags
, "__builtin_spe_evlwhos", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOS
);
5465 def_builtin (target_flags
, "__builtin_spe_evlwhou", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOU
);
5466 def_builtin (target_flags
, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLAT
);
5467 def_builtin (target_flags
, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLAT
);
5470 d
= (struct builtin_description
*) bdesc_spe_predicates
;
5471 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, d
++)
5475 switch (insn_data
[d
->icode
].operand
[1].mode
)
5478 type
= int_ftype_int_v2si_v2si
;
5481 type
= int_ftype_int_v2sf_v2sf
;
5487 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5490 /* Evsel predicates. */
5491 d
= (struct builtin_description
*) bdesc_spe_evsel
;
5492 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, d
++)
5496 switch (insn_data
[d
->icode
].operand
[1].mode
)
5499 type
= v2si_ftype_4_v2si
;
5502 type
= v2sf_ftype_4_v2sf
;
5508 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5513 altivec_init_builtins ()
5515 struct builtin_description
*d
;
5516 struct builtin_description_predicates
*dp
;
5518 tree pfloat_type_node
= build_pointer_type (float_type_node
);
5519 tree pint_type_node
= build_pointer_type (integer_type_node
);
5520 tree pshort_type_node
= build_pointer_type (short_integer_type_node
);
5521 tree pchar_type_node
= build_pointer_type (char_type_node
);
5523 tree pvoid_type_node
= build_pointer_type (void_type_node
);
5525 tree pcfloat_type_node
= build_pointer_type (build_qualified_type (float_type_node
, TYPE_QUAL_CONST
));
5526 tree pcint_type_node
= build_pointer_type (build_qualified_type (integer_type_node
, TYPE_QUAL_CONST
));
5527 tree pcshort_type_node
= build_pointer_type (build_qualified_type (short_integer_type_node
, TYPE_QUAL_CONST
));
5528 tree pcchar_type_node
= build_pointer_type (build_qualified_type (char_type_node
, TYPE_QUAL_CONST
));
5530 tree pcvoid_type_node
= build_pointer_type (build_qualified_type (void_type_node
, TYPE_QUAL_CONST
));
5532 tree int_ftype_int_v4si_v4si
5533 = build_function_type_list (integer_type_node
,
5534 integer_type_node
, V4SI_type_node
,
5535 V4SI_type_node
, NULL_TREE
);
5536 tree v4sf_ftype_pcfloat
5537 = build_function_type_list (V4SF_type_node
, pcfloat_type_node
, NULL_TREE
);
5538 tree void_ftype_pfloat_v4sf
5539 = build_function_type_list (void_type_node
,
5540 pfloat_type_node
, V4SF_type_node
, NULL_TREE
);
5541 tree v4si_ftype_pcint
5542 = build_function_type_list (V4SI_type_node
, pcint_type_node
, NULL_TREE
);
5543 tree void_ftype_pint_v4si
5544 = build_function_type_list (void_type_node
,
5545 pint_type_node
, V4SI_type_node
, NULL_TREE
);
5546 tree v8hi_ftype_pcshort
5547 = build_function_type_list (V8HI_type_node
, pcshort_type_node
, NULL_TREE
);
5548 tree void_ftype_pshort_v8hi
5549 = build_function_type_list (void_type_node
,
5550 pshort_type_node
, V8HI_type_node
, NULL_TREE
);
5551 tree v16qi_ftype_pcchar
5552 = build_function_type_list (V16QI_type_node
, pcchar_type_node
, NULL_TREE
);
5553 tree void_ftype_pchar_v16qi
5554 = build_function_type_list (void_type_node
,
5555 pchar_type_node
, V16QI_type_node
, NULL_TREE
);
5556 tree void_ftype_v4si
5557 = build_function_type_list (void_type_node
, V4SI_type_node
, NULL_TREE
);
5558 tree v8hi_ftype_void
5559 = build_function_type (V8HI_type_node
, void_list_node
);
5560 tree void_ftype_void
5561 = build_function_type (void_type_node
, void_list_node
);
5563 = build_function_type_list (void_type_node
, char_type_node
, NULL_TREE
);
5565 tree v16qi_ftype_int_pcvoid
5566 = build_function_type_list (V16QI_type_node
,
5567 integer_type_node
, pcvoid_type_node
, NULL_TREE
);
5568 tree v8hi_ftype_int_pcvoid
5569 = build_function_type_list (V8HI_type_node
,
5570 integer_type_node
, pcvoid_type_node
, NULL_TREE
);
5571 tree v4si_ftype_int_pcvoid
5572 = build_function_type_list (V4SI_type_node
,
5573 integer_type_node
, pcvoid_type_node
, NULL_TREE
);
5575 tree void_ftype_v4si_int_pvoid
5576 = build_function_type_list (void_type_node
,
5577 V4SI_type_node
, integer_type_node
,
5578 pvoid_type_node
, NULL_TREE
);
5579 tree void_ftype_v16qi_int_pvoid
5580 = build_function_type_list (void_type_node
,
5581 V16QI_type_node
, integer_type_node
,
5582 pvoid_type_node
, NULL_TREE
);
5583 tree void_ftype_v8hi_int_pvoid
5584 = build_function_type_list (void_type_node
,
5585 V8HI_type_node
, integer_type_node
,
5586 pvoid_type_node
, NULL_TREE
);
5587 tree int_ftype_int_v8hi_v8hi
5588 = build_function_type_list (integer_type_node
,
5589 integer_type_node
, V8HI_type_node
,
5590 V8HI_type_node
, NULL_TREE
);
5591 tree int_ftype_int_v16qi_v16qi
5592 = build_function_type_list (integer_type_node
,
5593 integer_type_node
, V16QI_type_node
,
5594 V16QI_type_node
, NULL_TREE
);
5595 tree int_ftype_int_v4sf_v4sf
5596 = build_function_type_list (integer_type_node
,
5597 integer_type_node
, V4SF_type_node
,
5598 V4SF_type_node
, NULL_TREE
);
5599 tree v4si_ftype_v4si
5600 = build_function_type_list (V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5601 tree v8hi_ftype_v8hi
5602 = build_function_type_list (V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5603 tree v16qi_ftype_v16qi
5604 = build_function_type_list (V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5605 tree v4sf_ftype_v4sf
5606 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5607 tree void_ftype_pcvoid_int_char
5608 = build_function_type_list (void_type_node
,
5609 pcvoid_type_node
, integer_type_node
,
5610 char_type_node
, NULL_TREE
);
5612 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat
,
5613 ALTIVEC_BUILTIN_LD_INTERNAL_4sf
);
5614 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf
,
5615 ALTIVEC_BUILTIN_ST_INTERNAL_4sf
);
5616 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint
,
5617 ALTIVEC_BUILTIN_LD_INTERNAL_4si
);
5618 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si
,
5619 ALTIVEC_BUILTIN_ST_INTERNAL_4si
);
5620 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort
,
5621 ALTIVEC_BUILTIN_LD_INTERNAL_8hi
);
5622 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi
,
5623 ALTIVEC_BUILTIN_ST_INTERNAL_8hi
);
5624 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar
,
5625 ALTIVEC_BUILTIN_LD_INTERNAL_16qi
);
5626 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi
,
5627 ALTIVEC_BUILTIN_ST_INTERNAL_16qi
);
5628 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mtvscr", void_ftype_v4si
, ALTIVEC_BUILTIN_MTVSCR
);
5629 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mfvscr", v8hi_ftype_void
, ALTIVEC_BUILTIN_MFVSCR
);
5630 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dssall", void_ftype_void
, ALTIVEC_BUILTIN_DSSALL
);
5631 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dss", void_ftype_qi
, ALTIVEC_BUILTIN_DSS
);
5632 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVSL
);
5633 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVSR
);
5634 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVEBX
);
5635 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVEHX
);
5636 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVEWX
);
5637 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVXL
);
5638 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVX
);
5639 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVX
);
5640 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVEWX
);
5641 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVXL
);
5642 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid
, ALTIVEC_BUILTIN_STVEBX
);
5643 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid
, ALTIVEC_BUILTIN_STVEHX
);
5645 /* Add the DST variants. */
5646 d
= (struct builtin_description
*) bdesc_dst
;
5647 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
5648 def_builtin (d
->mask
, d
->name
, void_ftype_pcvoid_int_char
, d
->code
);
5650 /* Initialize the predicates. */
5651 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
5652 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
5654 enum machine_mode mode1
;
5657 mode1
= insn_data
[dp
->icode
].operand
[1].mode
;
5662 type
= int_ftype_int_v4si_v4si
;
5665 type
= int_ftype_int_v8hi_v8hi
;
5668 type
= int_ftype_int_v16qi_v16qi
;
5671 type
= int_ftype_int_v4sf_v4sf
;
5677 def_builtin (dp
->mask
, dp
->name
, type
, dp
->code
);
5680 /* Initialize the abs* operators. */
5681 d
= (struct builtin_description
*) bdesc_abs
;
5682 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
5684 enum machine_mode mode0
;
5687 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
5692 type
= v4si_ftype_v4si
;
5695 type
= v8hi_ftype_v8hi
;
5698 type
= v16qi_ftype_v16qi
;
5701 type
= v4sf_ftype_v4sf
;
5707 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5712 rs6000_common_init_builtins ()
5714 struct builtin_description
*d
;
5717 tree v4sf_ftype_v4sf_v4sf_v16qi
5718 = build_function_type_list (V4SF_type_node
,
5719 V4SF_type_node
, V4SF_type_node
,
5720 V16QI_type_node
, NULL_TREE
);
5721 tree v4si_ftype_v4si_v4si_v16qi
5722 = build_function_type_list (V4SI_type_node
,
5723 V4SI_type_node
, V4SI_type_node
,
5724 V16QI_type_node
, NULL_TREE
);
5725 tree v8hi_ftype_v8hi_v8hi_v16qi
5726 = build_function_type_list (V8HI_type_node
,
5727 V8HI_type_node
, V8HI_type_node
,
5728 V16QI_type_node
, NULL_TREE
);
5729 tree v16qi_ftype_v16qi_v16qi_v16qi
5730 = build_function_type_list (V16QI_type_node
,
5731 V16QI_type_node
, V16QI_type_node
,
5732 V16QI_type_node
, NULL_TREE
);
5733 tree v4si_ftype_char
5734 = build_function_type_list (V4SI_type_node
, char_type_node
, NULL_TREE
);
5735 tree v8hi_ftype_char
5736 = build_function_type_list (V8HI_type_node
, char_type_node
, NULL_TREE
);
5737 tree v16qi_ftype_char
5738 = build_function_type_list (V16QI_type_node
, char_type_node
, NULL_TREE
);
5739 tree v8hi_ftype_v16qi
5740 = build_function_type_list (V8HI_type_node
, V16QI_type_node
, NULL_TREE
);
5741 tree v4sf_ftype_v4sf
5742 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5744 tree v2si_ftype_v2si_v2si
5745 = build_function_type_list (V2SI_type_node
,
5746 V2SI_type_node
, V2SI_type_node
, NULL_TREE
);
5748 tree v2sf_ftype_v2sf_v2sf
5749 = build_function_type_list (V2SF_type_node
,
5750 V2SF_type_node
, V2SF_type_node
, NULL_TREE
);
5752 tree v2si_ftype_int_int
5753 = build_function_type_list (V2SI_type_node
,
5754 integer_type_node
, integer_type_node
,
5757 tree v2si_ftype_v2si
5758 = build_function_type_list (V2SI_type_node
, V2SI_type_node
, NULL_TREE
);
5760 tree v2sf_ftype_v2sf
5761 = build_function_type_list (V2SF_type_node
,
5762 V2SF_type_node
, NULL_TREE
);
5764 tree v2sf_ftype_v2si
5765 = build_function_type_list (V2SF_type_node
,
5766 V2SI_type_node
, NULL_TREE
);
5768 tree v2si_ftype_v2sf
5769 = build_function_type_list (V2SI_type_node
,
5770 V2SF_type_node
, NULL_TREE
);
5772 tree v2si_ftype_v2si_char
5773 = build_function_type_list (V2SI_type_node
,
5774 V2SI_type_node
, char_type_node
, NULL_TREE
);
5776 tree v2si_ftype_int_char
5777 = build_function_type_list (V2SI_type_node
,
5778 integer_type_node
, char_type_node
, NULL_TREE
);
5780 tree v2si_ftype_char
5781 = build_function_type_list (V2SI_type_node
, char_type_node
, NULL_TREE
);
5783 tree int_ftype_int_int
5784 = build_function_type_list (integer_type_node
,
5785 integer_type_node
, integer_type_node
,
5788 tree v4si_ftype_v4si_v4si
5789 = build_function_type_list (V4SI_type_node
,
5790 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5791 tree v4sf_ftype_v4si_char
5792 = build_function_type_list (V4SF_type_node
,
5793 V4SI_type_node
, char_type_node
, NULL_TREE
);
5794 tree v4si_ftype_v4sf_char
5795 = build_function_type_list (V4SI_type_node
,
5796 V4SF_type_node
, char_type_node
, NULL_TREE
);
5797 tree v4si_ftype_v4si_char
5798 = build_function_type_list (V4SI_type_node
,
5799 V4SI_type_node
, char_type_node
, NULL_TREE
);
5800 tree v8hi_ftype_v8hi_char
5801 = build_function_type_list (V8HI_type_node
,
5802 V8HI_type_node
, char_type_node
, NULL_TREE
);
5803 tree v16qi_ftype_v16qi_char
5804 = build_function_type_list (V16QI_type_node
,
5805 V16QI_type_node
, char_type_node
, NULL_TREE
);
5806 tree v16qi_ftype_v16qi_v16qi_char
5807 = build_function_type_list (V16QI_type_node
,
5808 V16QI_type_node
, V16QI_type_node
,
5809 char_type_node
, NULL_TREE
);
5810 tree v8hi_ftype_v8hi_v8hi_char
5811 = build_function_type_list (V8HI_type_node
,
5812 V8HI_type_node
, V8HI_type_node
,
5813 char_type_node
, NULL_TREE
);
5814 tree v4si_ftype_v4si_v4si_char
5815 = build_function_type_list (V4SI_type_node
,
5816 V4SI_type_node
, V4SI_type_node
,
5817 char_type_node
, NULL_TREE
);
5818 tree v4sf_ftype_v4sf_v4sf_char
5819 = build_function_type_list (V4SF_type_node
,
5820 V4SF_type_node
, V4SF_type_node
,
5821 char_type_node
, NULL_TREE
);
5822 tree v4sf_ftype_v4sf_v4sf
5823 = build_function_type_list (V4SF_type_node
,
5824 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5825 tree v4sf_ftype_v4sf_v4sf_v4si
5826 = build_function_type_list (V4SF_type_node
,
5827 V4SF_type_node
, V4SF_type_node
,
5828 V4SI_type_node
, NULL_TREE
);
5829 tree v4sf_ftype_v4sf_v4sf_v4sf
5830 = build_function_type_list (V4SF_type_node
,
5831 V4SF_type_node
, V4SF_type_node
,
5832 V4SF_type_node
, NULL_TREE
);
5833 tree v4si_ftype_v4si_v4si_v4si
5834 = build_function_type_list (V4SI_type_node
,
5835 V4SI_type_node
, V4SI_type_node
,
5836 V4SI_type_node
, NULL_TREE
);
5837 tree v8hi_ftype_v8hi_v8hi
5838 = build_function_type_list (V8HI_type_node
,
5839 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5840 tree v8hi_ftype_v8hi_v8hi_v8hi
5841 = build_function_type_list (V8HI_type_node
,
5842 V8HI_type_node
, V8HI_type_node
,
5843 V8HI_type_node
, NULL_TREE
);
5844 tree v4si_ftype_v8hi_v8hi_v4si
5845 = build_function_type_list (V4SI_type_node
,
5846 V8HI_type_node
, V8HI_type_node
,
5847 V4SI_type_node
, NULL_TREE
);
5848 tree v4si_ftype_v16qi_v16qi_v4si
5849 = build_function_type_list (V4SI_type_node
,
5850 V16QI_type_node
, V16QI_type_node
,
5851 V4SI_type_node
, NULL_TREE
);
5852 tree v16qi_ftype_v16qi_v16qi
5853 = build_function_type_list (V16QI_type_node
,
5854 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5855 tree v4si_ftype_v4sf_v4sf
5856 = build_function_type_list (V4SI_type_node
,
5857 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5858 tree v8hi_ftype_v16qi_v16qi
5859 = build_function_type_list (V8HI_type_node
,
5860 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5861 tree v4si_ftype_v8hi_v8hi
5862 = build_function_type_list (V4SI_type_node
,
5863 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5864 tree v8hi_ftype_v4si_v4si
5865 = build_function_type_list (V8HI_type_node
,
5866 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5867 tree v16qi_ftype_v8hi_v8hi
5868 = build_function_type_list (V16QI_type_node
,
5869 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5870 tree v4si_ftype_v16qi_v4si
5871 = build_function_type_list (V4SI_type_node
,
5872 V16QI_type_node
, V4SI_type_node
, NULL_TREE
);
5873 tree v4si_ftype_v16qi_v16qi
5874 = build_function_type_list (V4SI_type_node
,
5875 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5876 tree v4si_ftype_v8hi_v4si
5877 = build_function_type_list (V4SI_type_node
,
5878 V8HI_type_node
, V4SI_type_node
, NULL_TREE
);
5879 tree v4si_ftype_v8hi
5880 = build_function_type_list (V4SI_type_node
, V8HI_type_node
, NULL_TREE
);
5881 tree int_ftype_v4si_v4si
5882 = build_function_type_list (integer_type_node
,
5883 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5884 tree int_ftype_v4sf_v4sf
5885 = build_function_type_list (integer_type_node
,
5886 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5887 tree int_ftype_v16qi_v16qi
5888 = build_function_type_list (integer_type_node
,
5889 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5890 tree int_ftype_v8hi_v8hi
5891 = build_function_type_list (integer_type_node
,
5892 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5894 /* Add the simple ternary operators. */
5895 d
= (struct builtin_description
*) bdesc_3arg
;
5896 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
5899 enum machine_mode mode0
, mode1
, mode2
, mode3
;
5902 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
5905 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
5906 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
5907 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
5908 mode3
= insn_data
[d
->icode
].operand
[3].mode
;
5910 /* When all four are of the same mode. */
5911 if (mode0
== mode1
&& mode1
== mode2
&& mode2
== mode3
)
5916 type
= v4si_ftype_v4si_v4si_v4si
;
5919 type
= v4sf_ftype_v4sf_v4sf_v4sf
;
5922 type
= v8hi_ftype_v8hi_v8hi_v8hi
;
5925 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
5931 else if (mode0
== mode1
&& mode1
== mode2
&& mode3
== V16QImode
)
5936 type
= v4si_ftype_v4si_v4si_v16qi
;
5939 type
= v4sf_ftype_v4sf_v4sf_v16qi
;
5942 type
= v8hi_ftype_v8hi_v8hi_v16qi
;
5945 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
5951 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
5952 && mode3
== V4SImode
)
5953 type
= v4si_ftype_v16qi_v16qi_v4si
;
5954 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
5955 && mode3
== V4SImode
)
5956 type
= v4si_ftype_v8hi_v8hi_v4si
;
5957 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
5958 && mode3
== V4SImode
)
5959 type
= v4sf_ftype_v4sf_v4sf_v4si
;
5961 /* vchar, vchar, vchar, 4 bit literal. */
5962 else if (mode0
== V16QImode
&& mode1
== mode0
&& mode2
== mode0
5964 type
= v16qi_ftype_v16qi_v16qi_char
;
5966 /* vshort, vshort, vshort, 4 bit literal. */
5967 else if (mode0
== V8HImode
&& mode1
== mode0
&& mode2
== mode0
5969 type
= v8hi_ftype_v8hi_v8hi_char
;
5971 /* vint, vint, vint, 4 bit literal. */
5972 else if (mode0
== V4SImode
&& mode1
== mode0
&& mode2
== mode0
5974 type
= v4si_ftype_v4si_v4si_char
;
5976 /* vfloat, vfloat, vfloat, 4 bit literal. */
5977 else if (mode0
== V4SFmode
&& mode1
== mode0
&& mode2
== mode0
5979 type
= v4sf_ftype_v4sf_v4sf_char
;
5984 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5987 /* Add the simple binary operators. */
5988 d
= (struct builtin_description
*) bdesc_2arg
;
5989 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5991 enum machine_mode mode0
, mode1
, mode2
;
5994 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
5997 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
5998 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
5999 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
6001 /* When all three operands are of the same mode. */
6002 if (mode0
== mode1
&& mode1
== mode2
)
6007 type
= v4sf_ftype_v4sf_v4sf
;
6010 type
= v4si_ftype_v4si_v4si
;
6013 type
= v16qi_ftype_v16qi_v16qi
;
6016 type
= v8hi_ftype_v8hi_v8hi
;
6019 type
= v2si_ftype_v2si_v2si
;
6022 type
= v2sf_ftype_v2sf_v2sf
;
6025 type
= int_ftype_int_int
;
6032 /* A few other combos we really don't want to do manually. */
6034 /* vint, vfloat, vfloat. */
6035 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
)
6036 type
= v4si_ftype_v4sf_v4sf
;
6038 /* vshort, vchar, vchar. */
6039 else if (mode0
== V8HImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
6040 type
= v8hi_ftype_v16qi_v16qi
;
6042 /* vint, vshort, vshort. */
6043 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
6044 type
= v4si_ftype_v8hi_v8hi
;
6046 /* vshort, vint, vint. */
6047 else if (mode0
== V8HImode
&& mode1
== V4SImode
&& mode2
== V4SImode
)
6048 type
= v8hi_ftype_v4si_v4si
;
6050 /* vchar, vshort, vshort. */
6051 else if (mode0
== V16QImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
6052 type
= v16qi_ftype_v8hi_v8hi
;
6054 /* vint, vchar, vint. */
6055 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V4SImode
)
6056 type
= v4si_ftype_v16qi_v4si
;
6058 /* vint, vchar, vchar. */
6059 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
6060 type
= v4si_ftype_v16qi_v16qi
;
6062 /* vint, vshort, vint. */
6063 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V4SImode
)
6064 type
= v4si_ftype_v8hi_v4si
;
6066 /* vint, vint, 5 bit literal. */
6067 else if (mode0
== V4SImode
&& mode1
== V4SImode
&& mode2
== QImode
)
6068 type
= v4si_ftype_v4si_char
;
6070 /* vshort, vshort, 5 bit literal. */
6071 else if (mode0
== V8HImode
&& mode1
== V8HImode
&& mode2
== QImode
)
6072 type
= v8hi_ftype_v8hi_char
;
6074 /* vchar, vchar, 5 bit literal. */
6075 else if (mode0
== V16QImode
&& mode1
== V16QImode
&& mode2
== QImode
)
6076 type
= v16qi_ftype_v16qi_char
;
6078 /* vfloat, vint, 5 bit literal. */
6079 else if (mode0
== V4SFmode
&& mode1
== V4SImode
&& mode2
== QImode
)
6080 type
= v4sf_ftype_v4si_char
;
6082 /* vint, vfloat, 5 bit literal. */
6083 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== QImode
)
6084 type
= v4si_ftype_v4sf_char
;
6086 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== SImode
)
6087 type
= v2si_ftype_int_int
;
6089 else if (mode0
== V2SImode
&& mode1
== V2SImode
&& mode2
== QImode
)
6090 type
= v2si_ftype_v2si_char
;
6092 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== QImode
)
6093 type
= v2si_ftype_int_char
;
6096 else if (mode0
== SImode
)
6101 type
= int_ftype_v4si_v4si
;
6104 type
= int_ftype_v4sf_v4sf
;
6107 type
= int_ftype_v16qi_v16qi
;
6110 type
= int_ftype_v8hi_v8hi
;
6120 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6123 /* Add the simple unary operators. */
6124 d
= (struct builtin_description
*) bdesc_1arg
;
6125 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
6127 enum machine_mode mode0
, mode1
;
6130 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
6133 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6134 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
6136 if (mode0
== V4SImode
&& mode1
== QImode
)
6137 type
= v4si_ftype_char
;
6138 else if (mode0
== V8HImode
&& mode1
== QImode
)
6139 type
= v8hi_ftype_char
;
6140 else if (mode0
== V16QImode
&& mode1
== QImode
)
6141 type
= v16qi_ftype_char
;
6142 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
)
6143 type
= v4sf_ftype_v4sf
;
6144 else if (mode0
== V8HImode
&& mode1
== V16QImode
)
6145 type
= v8hi_ftype_v16qi
;
6146 else if (mode0
== V4SImode
&& mode1
== V8HImode
)
6147 type
= v4si_ftype_v8hi
;
6148 else if (mode0
== V2SImode
&& mode1
== V2SImode
)
6149 type
= v2si_ftype_v2si
;
6150 else if (mode0
== V2SFmode
&& mode1
== V2SFmode
)
6151 type
= v2sf_ftype_v2sf
;
6152 else if (mode0
== V2SFmode
&& mode1
== V2SImode
)
6153 type
= v2sf_ftype_v2si
;
6154 else if (mode0
== V2SImode
&& mode1
== V2SFmode
)
6155 type
= v2si_ftype_v2sf
;
6156 else if (mode0
== V2SImode
&& mode1
== QImode
)
6157 type
= v2si_ftype_char
;
6161 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6166 /* Expand a block move operation, and return 1 if successful. Return 0
6167 if we should let the compiler generate normal code.
6169 operands[0] is the destination
6170 operands[1] is the source
6171 operands[2] is the length
6172 operands[3] is the alignment */
6174 #define MAX_MOVE_REG 4
6177 expand_block_move (operands
)
6180 rtx orig_dest
= operands
[0];
6181 rtx orig_src
= operands
[1];
6182 rtx bytes_rtx
= operands
[2];
6183 rtx align_rtx
= operands
[3];
6184 int constp
= (GET_CODE (bytes_rtx
) == CONST_INT
);
6190 /* If this is not a fixed size move, just call memcpy */
6194 /* If this is not a fixed size alignment, abort */
6195 if (GET_CODE (align_rtx
) != CONST_INT
)
6197 align
= INTVAL (align_rtx
);
6199 /* Anything to move? */
6200 bytes
= INTVAL (bytes_rtx
);
6204 /* store_one_arg depends on expand_block_move to handle at least the size of
6205 reg_parm_stack_space. */
6206 if (bytes
> (TARGET_POWERPC64
? 64 : 32))
6209 if (TARGET_STRING
) /* string instructions are available */
6211 for (offset
= 0; bytes
> 0; offset
+= move_bytes
, bytes
-= move_bytes
)
6214 rtx (*movstrsi
) PARAMS ((rtx
, rtx
, rtx
, rtx
));
6215 rtx (*mov
) PARAMS ((rtx
, rtx
));
6217 enum machine_mode mode
= BLKmode
;
6220 if (bytes
> 24 /* move up to 32 bytes at a time */
6228 && ! fixed_regs
[12])
6230 move_bytes
= (bytes
> 32) ? 32 : bytes
;
6231 gen_func
.movstrsi
= gen_movstrsi_8reg
;
6233 else if (bytes
> 16 /* move up to 24 bytes at a time */
6239 && ! fixed_regs
[10])
6241 move_bytes
= (bytes
> 24) ? 24 : bytes
;
6242 gen_func
.movstrsi
= gen_movstrsi_6reg
;
6244 else if (bytes
> 8 /* move up to 16 bytes at a time */
6250 move_bytes
= (bytes
> 16) ? 16 : bytes
;
6251 gen_func
.movstrsi
= gen_movstrsi_4reg
;
6253 else if (bytes
>= 8 && TARGET_POWERPC64
6254 /* 64-bit loads and stores require word-aligned
6256 && (align
>= 8 || (! STRICT_ALIGNMENT
&& align
>= 4)))
6260 gen_func
.mov
= gen_movdi
;
6262 else if (bytes
> 4 && !TARGET_POWERPC64
)
6263 { /* move up to 8 bytes at a time */
6264 move_bytes
= (bytes
> 8) ? 8 : bytes
;
6265 gen_func
.movstrsi
= gen_movstrsi_2reg
;
6267 else if (bytes
>= 4 && (align
>= 4 || ! STRICT_ALIGNMENT
))
6268 { /* move 4 bytes */
6271 gen_func
.mov
= gen_movsi
;
6273 else if (bytes
== 2 && (align
>= 2 || ! STRICT_ALIGNMENT
))
6274 { /* move 2 bytes */
6277 gen_func
.mov
= gen_movhi
;
6279 else if (bytes
== 1) /* move 1 byte */
6283 gen_func
.mov
= gen_movqi
;
6286 { /* move up to 4 bytes at a time */
6287 move_bytes
= (bytes
> 4) ? 4 : bytes
;
6288 gen_func
.movstrsi
= gen_movstrsi_1reg
;
6291 src
= adjust_address (orig_src
, mode
, offset
);
6292 dest
= adjust_address (orig_dest
, mode
, offset
);
6294 if (mode
== BLKmode
)
6296 /* Move the address into scratch registers. The movstrsi
6297 patterns require zero offset. */
6298 if (!REG_P (XEXP (src
, 0)))
6300 rtx src_reg
= copy_addr_to_reg (XEXP (src
, 0));
6301 src
= replace_equiv_address (src
, src_reg
);
6303 set_mem_size (src
, GEN_INT (move_bytes
));
6305 if (!REG_P (XEXP (dest
, 0)))
6307 rtx dest_reg
= copy_addr_to_reg (XEXP (dest
, 0));
6308 dest
= replace_equiv_address (dest
, dest_reg
);
6310 set_mem_size (dest
, GEN_INT (move_bytes
));
6312 emit_insn ((*gen_func
.movstrsi
) (dest
, src
,
6313 GEN_INT (move_bytes
& 31),
6318 rtx tmp_reg
= gen_reg_rtx (mode
);
6320 emit_insn ((*gen_func
.mov
) (tmp_reg
, src
));
6321 emit_insn ((*gen_func
.mov
) (dest
, tmp_reg
));
6326 else /* string instructions not available */
6328 rtx stores
[MAX_MOVE_REG
];
6332 for (offset
= 0; bytes
> 0; offset
+= move_bytes
, bytes
-= move_bytes
)
6334 rtx (*gen_mov_func
) PARAMS ((rtx
, rtx
));
6335 enum machine_mode mode
;
6336 rtx src
, dest
, tmp_reg
;
6338 /* Generate the appropriate load and store, saving the stores
6340 if (bytes
>= 8 && TARGET_POWERPC64
6341 /* 64-bit loads and stores require word-aligned
6343 && (align
>= 8 || (! STRICT_ALIGNMENT
&& align
>= 4)))
6347 gen_mov_func
= gen_movdi
;
6349 else if (bytes
>= 4 && (align
>= 4 || ! STRICT_ALIGNMENT
))
6353 gen_mov_func
= gen_movsi
;
6355 else if (bytes
>= 2 && (align
>= 2 || ! STRICT_ALIGNMENT
))
6359 gen_mov_func
= gen_movhi
;
6365 gen_mov_func
= gen_movqi
;
6368 src
= adjust_address (orig_src
, mode
, offset
);
6369 dest
= adjust_address (orig_dest
, mode
, offset
);
6370 tmp_reg
= gen_reg_rtx (mode
);
6372 emit_insn ((*gen_mov_func
) (tmp_reg
, src
));
6373 stores
[num_reg
++] = (*gen_mov_func
) (dest
, tmp_reg
);
6375 if (num_reg
>= MAX_MOVE_REG
)
6377 for (i
= 0; i
< num_reg
; i
++)
6378 emit_insn (stores
[i
]);
6383 for (i
= 0; i
< num_reg
; i
++)
6384 emit_insn (stores
[i
]);
6391 /* Return 1 if OP is a load multiple operation. It is known to be a
6392 PARALLEL and the first section will be tested. */
6395 load_multiple_operation (op
, mode
)
6397 enum machine_mode mode ATTRIBUTE_UNUSED
;
6399 int count
= XVECLEN (op
, 0);
6400 unsigned int dest_regno
;
6404 /* Perform a quick check so we don't blow up below. */
6406 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6407 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6408 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
6411 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6412 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
6414 for (i
= 1; i
< count
; i
++)
6416 rtx elt
= XVECEXP (op
, 0, i
);
6418 if (GET_CODE (elt
) != SET
6419 || GET_CODE (SET_DEST (elt
)) != REG
6420 || GET_MODE (SET_DEST (elt
)) != SImode
6421 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
6422 || GET_CODE (SET_SRC (elt
)) != MEM
6423 || GET_MODE (SET_SRC (elt
)) != SImode
6424 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
6425 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
6426 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
6427 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != i
* 4)
6434 /* Similar, but tests for store multiple. Here, the second vector element
6435 is a CLOBBER. It will be tested later. */
6438 store_multiple_operation (op
, mode
)
6440 enum machine_mode mode ATTRIBUTE_UNUSED
;
6442 int count
= XVECLEN (op
, 0) - 1;
6443 unsigned int src_regno
;
6447 /* Perform a quick check so we don't blow up below. */
6449 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6450 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
6451 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
6454 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6455 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
6457 for (i
= 1; i
< count
; i
++)
6459 rtx elt
= XVECEXP (op
, 0, i
+ 1);
6461 if (GET_CODE (elt
) != SET
6462 || GET_CODE (SET_SRC (elt
)) != REG
6463 || GET_MODE (SET_SRC (elt
)) != SImode
6464 || REGNO (SET_SRC (elt
)) != src_regno
+ i
6465 || GET_CODE (SET_DEST (elt
)) != MEM
6466 || GET_MODE (SET_DEST (elt
)) != SImode
6467 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
6468 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
6469 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
6470 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != i
* 4)
6477 /* Return a string to perform a load_multiple operation.
6478 operands[0] is the vector.
6479 operands[1] is the source address.
6480 operands[2] is the first destination register. */
6483 rs6000_output_load_multiple (operands
)
6486 /* We have to handle the case where the pseudo used to contain the address
6487 is assigned to one of the output registers. */
6489 int words
= XVECLEN (operands
[0], 0);
6492 if (XVECLEN (operands
[0], 0) == 1)
6493 return "{l|lwz} %2,0(%1)";
6495 for (i
= 0; i
< words
; i
++)
6496 if (refers_to_regno_p (REGNO (operands
[2]) + i
,
6497 REGNO (operands
[2]) + i
+ 1, operands
[1], 0))
6501 xop
[0] = GEN_INT (4 * (words
-1));
6502 xop
[1] = operands
[1];
6503 xop
[2] = operands
[2];
6504 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop
);
6509 xop
[0] = GEN_INT (4 * (words
-1));
6510 xop
[1] = operands
[1];
6511 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + 1);
6512 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop
);
6517 for (j
= 0; j
< words
; j
++)
6520 xop
[0] = GEN_INT (j
* 4);
6521 xop
[1] = operands
[1];
6522 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + j
);
6523 output_asm_insn ("{l|lwz} %2,%0(%1)", xop
);
6525 xop
[0] = GEN_INT (i
* 4);
6526 xop
[1] = operands
[1];
6527 output_asm_insn ("{l|lwz} %1,%0(%1)", xop
);
6532 return "{lsi|lswi} %2,%1,%N0";
6535 /* Return 1 for a parallel vrsave operation. */
6538 vrsave_operation (op
, mode
)
6540 enum machine_mode mode ATTRIBUTE_UNUSED
;
6542 int count
= XVECLEN (op
, 0);
6543 unsigned int dest_regno
, src_regno
;
6547 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6548 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6549 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC_VOLATILE
)
6552 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6553 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6555 if (dest_regno
!= VRSAVE_REGNO
6556 && src_regno
!= VRSAVE_REGNO
)
6559 for (i
= 1; i
< count
; i
++)
6561 rtx elt
= XVECEXP (op
, 0, i
);
6563 if (GET_CODE (elt
) != CLOBBER
6564 && GET_CODE (elt
) != SET
)
6571 /* Return 1 for an PARALLEL suitable for mtcrf. */
6574 mtcrf_operation (op
, mode
)
6576 enum machine_mode mode ATTRIBUTE_UNUSED
;
6578 int count
= XVECLEN (op
, 0);
6582 /* Perform a quick check so we don't blow up below. */
6584 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6585 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC
6586 || XVECLEN (SET_SRC (XVECEXP (op
, 0, 0)), 0) != 2)
6588 src_reg
= XVECEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0, 0);
6590 if (GET_CODE (src_reg
) != REG
6591 || GET_MODE (src_reg
) != SImode
6592 || ! INT_REGNO_P (REGNO (src_reg
)))
6595 for (i
= 0; i
< count
; i
++)
6597 rtx exp
= XVECEXP (op
, 0, i
);
6601 if (GET_CODE (exp
) != SET
6602 || GET_CODE (SET_DEST (exp
)) != REG
6603 || GET_MODE (SET_DEST (exp
)) != CCmode
6604 || ! CR_REGNO_P (REGNO (SET_DEST (exp
))))
6606 unspec
= SET_SRC (exp
);
6607 maskval
= 1 << (MAX_CR_REGNO
- REGNO (SET_DEST (exp
)));
6609 if (GET_CODE (unspec
) != UNSPEC
6610 || XINT (unspec
, 1) != 20
6611 || XVECLEN (unspec
, 0) != 2
6612 || XVECEXP (unspec
, 0, 0) != src_reg
6613 || GET_CODE (XVECEXP (unspec
, 0, 1)) != CONST_INT
6614 || INTVAL (XVECEXP (unspec
, 0, 1)) != maskval
)
6620 /* Return 1 for an PARALLEL suitable for lmw. */
6623 lmw_operation (op
, mode
)
6625 enum machine_mode mode ATTRIBUTE_UNUSED
;
6627 int count
= XVECLEN (op
, 0);
6628 unsigned int dest_regno
;
6630 unsigned int base_regno
;
6631 HOST_WIDE_INT offset
;
6634 /* Perform a quick check so we don't blow up below. */
6636 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6637 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6638 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
6641 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6642 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
6645 || count
!= 32 - (int) dest_regno
)
6648 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr
, 0))
6651 base_regno
= REGNO (src_addr
);
6652 if (base_regno
== 0)
6655 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, src_addr
, 0))
6657 offset
= INTVAL (XEXP (src_addr
, 1));
6658 base_regno
= REGNO (XEXP (src_addr
, 0));
6663 for (i
= 0; i
< count
; i
++)
6665 rtx elt
= XVECEXP (op
, 0, i
);
6668 HOST_WIDE_INT newoffset
;
6670 if (GET_CODE (elt
) != SET
6671 || GET_CODE (SET_DEST (elt
)) != REG
6672 || GET_MODE (SET_DEST (elt
)) != SImode
6673 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
6674 || GET_CODE (SET_SRC (elt
)) != MEM
6675 || GET_MODE (SET_SRC (elt
)) != SImode
)
6677 newaddr
= XEXP (SET_SRC (elt
), 0);
6678 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr
, 0))
6683 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, newaddr
, 0))
6685 addr_reg
= XEXP (newaddr
, 0);
6686 newoffset
= INTVAL (XEXP (newaddr
, 1));
6690 if (REGNO (addr_reg
) != base_regno
6691 || newoffset
!= offset
+ 4 * i
)
6698 /* Return 1 for an PARALLEL suitable for stmw. */
6701 stmw_operation (op
, mode
)
6703 enum machine_mode mode ATTRIBUTE_UNUSED
;
6705 int count
= XVECLEN (op
, 0);
6706 unsigned int src_regno
;
6708 unsigned int base_regno
;
6709 HOST_WIDE_INT offset
;
6712 /* Perform a quick check so we don't blow up below. */
6714 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6715 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
6716 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
6719 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6720 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
6723 || count
!= 32 - (int) src_regno
)
6726 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr
, 0))
6729 base_regno
= REGNO (dest_addr
);
6730 if (base_regno
== 0)
6733 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, dest_addr
, 0))
6735 offset
= INTVAL (XEXP (dest_addr
, 1));
6736 base_regno
= REGNO (XEXP (dest_addr
, 0));
6741 for (i
= 0; i
< count
; i
++)
6743 rtx elt
= XVECEXP (op
, 0, i
);
6746 HOST_WIDE_INT newoffset
;
6748 if (GET_CODE (elt
) != SET
6749 || GET_CODE (SET_SRC (elt
)) != REG
6750 || GET_MODE (SET_SRC (elt
)) != SImode
6751 || REGNO (SET_SRC (elt
)) != src_regno
+ i
6752 || GET_CODE (SET_DEST (elt
)) != MEM
6753 || GET_MODE (SET_DEST (elt
)) != SImode
)
6755 newaddr
= XEXP (SET_DEST (elt
), 0);
6756 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr
, 0))
6761 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, newaddr
, 0))
6763 addr_reg
= XEXP (newaddr
, 0);
6764 newoffset
= INTVAL (XEXP (newaddr
, 1));
6768 if (REGNO (addr_reg
) != base_regno
6769 || newoffset
!= offset
+ 4 * i
)
6776 /* A validation routine: say whether CODE, a condition code, and MODE
6777 match. The other alternatives either don't make sense or should
6778 never be generated. */
6781 validate_condition_mode (code
, mode
)
6783 enum machine_mode mode
;
6785 if (GET_RTX_CLASS (code
) != '<'
6786 || GET_MODE_CLASS (mode
) != MODE_CC
)
6789 /* These don't make sense. */
6790 if ((code
== GT
|| code
== LT
|| code
== GE
|| code
== LE
)
6791 && mode
== CCUNSmode
)
6794 if ((code
== GTU
|| code
== LTU
|| code
== GEU
|| code
== LEU
)
6795 && mode
!= CCUNSmode
)
6798 if (mode
!= CCFPmode
6799 && (code
== ORDERED
|| code
== UNORDERED
6800 || code
== UNEQ
|| code
== LTGT
6801 || code
== UNGT
|| code
== UNLT
6802 || code
== UNGE
|| code
== UNLE
))
6805 /* These should never be generated except for
6806 flag_unsafe_math_optimizations and flag_finite_math_only. */
6807 if (mode
== CCFPmode
6808 && ! flag_unsafe_math_optimizations
6809 && ! flag_finite_math_only
6810 && (code
== LE
|| code
== GE
6811 || code
== UNEQ
|| code
== LTGT
6812 || code
== UNGT
|| code
== UNLT
))
6815 /* These are invalid; the information is not there. */
6816 if (mode
== CCEQmode
6817 && code
!= EQ
&& code
!= NE
)
6821 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
6822 We only check the opcode against the mode of the CC value here. */
6825 branch_comparison_operator (op
, mode
)
6827 enum machine_mode mode ATTRIBUTE_UNUSED
;
6829 enum rtx_code code
= GET_CODE (op
);
6830 enum machine_mode cc_mode
;
6832 if (GET_RTX_CLASS (code
) != '<')
6835 cc_mode
= GET_MODE (XEXP (op
, 0));
6836 if (GET_MODE_CLASS (cc_mode
) != MODE_CC
)
6839 validate_condition_mode (code
, cc_mode
);
6844 /* Return 1 if OP is a comparison operation that is valid for a branch
6845 insn and which is true if the corresponding bit in the CC register
6849 branch_positive_comparison_operator (op
, mode
)
6851 enum machine_mode mode
;
6855 if (! branch_comparison_operator (op
, mode
))
6858 code
= GET_CODE (op
);
6859 return (code
== EQ
|| code
== LT
|| code
== GT
6860 || (TARGET_SPE
&& TARGET_HARD_FLOAT
&& !TARGET_FPRS
&& code
== NE
)
6861 || code
== LTU
|| code
== GTU
6862 || code
== UNORDERED
);
6865 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
6866 We check the opcode against the mode of the CC value and disallow EQ or
6867 NE comparisons for integers. */
6870 scc_comparison_operator (op
, mode
)
6872 enum machine_mode mode
;
6874 enum rtx_code code
= GET_CODE (op
);
6875 enum machine_mode cc_mode
;
6877 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
6880 if (GET_RTX_CLASS (code
) != '<')
6883 cc_mode
= GET_MODE (XEXP (op
, 0));
6884 if (GET_MODE_CLASS (cc_mode
) != MODE_CC
)
6887 validate_condition_mode (code
, cc_mode
);
6889 if (code
== NE
&& cc_mode
!= CCFPmode
)
6896 trap_comparison_operator (op
, mode
)
6898 enum machine_mode mode
;
6900 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
6902 return GET_RTX_CLASS (GET_CODE (op
)) == '<';
6906 boolean_operator (op
, mode
)
6908 enum machine_mode mode ATTRIBUTE_UNUSED
;
6910 enum rtx_code code
= GET_CODE (op
);
6911 return (code
== AND
|| code
== IOR
|| code
== XOR
);
6915 boolean_or_operator (op
, mode
)
6917 enum machine_mode mode ATTRIBUTE_UNUSED
;
6919 enum rtx_code code
= GET_CODE (op
);
6920 return (code
== IOR
|| code
== XOR
);
6924 min_max_operator (op
, mode
)
6926 enum machine_mode mode ATTRIBUTE_UNUSED
;
6928 enum rtx_code code
= GET_CODE (op
);
6929 return (code
== SMIN
|| code
== SMAX
|| code
== UMIN
|| code
== UMAX
);
6932 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
6933 mask required to convert the result of a rotate insn into a shift
6934 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
6937 includes_lshift_p (shiftop
, andop
)
6941 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
6943 shift_mask
<<= INTVAL (shiftop
);
6945 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
6948 /* Similar, but for right shift. */
6951 includes_rshift_p (shiftop
, andop
)
6955 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
6957 shift_mask
>>= INTVAL (shiftop
);
6959 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
6962 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
6963 to perform a left shift. It must have exactly SHIFTOP least
6964 significant 0's, then one or more 1's, then zero or more 0's. */
6967 includes_rldic_lshift_p (shiftop
, andop
)
6971 if (GET_CODE (andop
) == CONST_INT
)
6973 HOST_WIDE_INT c
, lsb
, shift_mask
;
6976 if (c
== 0 || c
== ~0)
6980 shift_mask
<<= INTVAL (shiftop
);
6982 /* Find the least significant one bit. */
6985 /* It must coincide with the LSB of the shift mask. */
6986 if (-lsb
!= shift_mask
)
6989 /* Invert to look for the next transition (if any). */
6992 /* Remove the low group of ones (originally low group of zeros). */
6995 /* Again find the lsb, and check we have all 1's above. */
6999 else if (GET_CODE (andop
) == CONST_DOUBLE
7000 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
7002 HOST_WIDE_INT low
, high
, lsb
;
7003 HOST_WIDE_INT shift_mask_low
, shift_mask_high
;
7005 low
= CONST_DOUBLE_LOW (andop
);
7006 if (HOST_BITS_PER_WIDE_INT
< 64)
7007 high
= CONST_DOUBLE_HIGH (andop
);
7009 if ((low
== 0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== 0))
7010 || (low
== ~0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0)))
7013 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
7015 shift_mask_high
= ~0;
7016 if (INTVAL (shiftop
) > 32)
7017 shift_mask_high
<<= INTVAL (shiftop
) - 32;
7021 if (-lsb
!= shift_mask_high
|| INTVAL (shiftop
) < 32)
7028 return high
== -lsb
;
7031 shift_mask_low
= ~0;
7032 shift_mask_low
<<= INTVAL (shiftop
);
7036 if (-lsb
!= shift_mask_low
)
7039 if (HOST_BITS_PER_WIDE_INT
< 64)
7044 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
7047 return high
== -lsb
;
7051 return low
== -lsb
&& (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0);
7057 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7058 to perform a left shift. It must have SHIFTOP or more least
7059 signifigant 0's, with the remainder of the word 1's. */
7062 includes_rldicr_lshift_p (shiftop
, andop
)
7066 if (GET_CODE (andop
) == CONST_INT
)
7068 HOST_WIDE_INT c
, lsb
, shift_mask
;
7071 shift_mask
<<= INTVAL (shiftop
);
7074 /* Find the least signifigant one bit. */
7077 /* It must be covered by the shift mask.
7078 This test also rejects c == 0. */
7079 if ((lsb
& shift_mask
) == 0)
7082 /* Check we have all 1's above the transition, and reject all 1's. */
7083 return c
== -lsb
&& lsb
!= 1;
7085 else if (GET_CODE (andop
) == CONST_DOUBLE
7086 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
7088 HOST_WIDE_INT low
, lsb
, shift_mask_low
;
7090 low
= CONST_DOUBLE_LOW (andop
);
7092 if (HOST_BITS_PER_WIDE_INT
< 64)
7094 HOST_WIDE_INT high
, shift_mask_high
;
7096 high
= CONST_DOUBLE_HIGH (andop
);
7100 shift_mask_high
= ~0;
7101 if (INTVAL (shiftop
) > 32)
7102 shift_mask_high
<<= INTVAL (shiftop
) - 32;
7106 if ((lsb
& shift_mask_high
) == 0)
7109 return high
== -lsb
;
7115 shift_mask_low
= ~0;
7116 shift_mask_low
<<= INTVAL (shiftop
);
7120 if ((lsb
& shift_mask_low
) == 0)
7123 return low
== -lsb
&& lsb
!= 1;
7129 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7130 for lfq and stfq insns.
7132 Note reg1 and reg2 *must* be hard registers. To be sure we will
7133 abort if we are passed pseudo registers. */
7136 registers_ok_for_quad_peep (reg1
, reg2
)
7139 /* We might have been passed a SUBREG. */
7140 if (GET_CODE (reg1
) != REG
|| GET_CODE (reg2
) != REG
)
7143 return (REGNO (reg1
) == REGNO (reg2
) - 1);
7146 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7147 addr1 and addr2 must be in consecutive memory locations
7148 (addr2 == addr1 + 8). */
7151 addrs_ok_for_quad_peep (addr1
, addr2
)
7158 /* Extract an offset (if used) from the first addr. */
7159 if (GET_CODE (addr1
) == PLUS
)
7161 /* If not a REG, return zero. */
7162 if (GET_CODE (XEXP (addr1
, 0)) != REG
)
7166 reg1
= REGNO (XEXP (addr1
, 0));
7167 /* The offset must be constant! */
7168 if (GET_CODE (XEXP (addr1
, 1)) != CONST_INT
)
7170 offset1
= INTVAL (XEXP (addr1
, 1));
7173 else if (GET_CODE (addr1
) != REG
)
7177 reg1
= REGNO (addr1
);
7178 /* This was a simple (mem (reg)) expression. Offset is 0. */
7182 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
7183 if (GET_CODE (addr2
) != PLUS
)
7186 if (GET_CODE (XEXP (addr2
, 0)) != REG
7187 || GET_CODE (XEXP (addr2
, 1)) != CONST_INT
)
7190 if (reg1
!= REGNO (XEXP (addr2
, 0)))
7193 /* The offset for the second addr must be 8 more than the first addr. */
7194 if (INTVAL (XEXP (addr2
, 1)) != offset1
+ 8)
7197 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7202 /* Return the register class of a scratch register needed to copy IN into
7203 or out of a register in CLASS in MODE. If it can be done directly,
7204 NO_REGS is returned. */
7207 secondary_reload_class (class, mode
, in
)
7208 enum reg_class
class;
7209 enum machine_mode mode ATTRIBUTE_UNUSED
;
7214 if (TARGET_ELF
|| (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))
7216 /* We cannot copy a symbolic operand directly into anything
7217 other than BASE_REGS for TARGET_ELF. So indicate that a
7218 register from BASE_REGS is needed as an intermediate
7221 On Darwin, pic addresses require a load from memory, which
7222 needs a base register. */
7223 if (class != BASE_REGS
7224 && (GET_CODE (in
) == SYMBOL_REF
7225 || GET_CODE (in
) == HIGH
7226 || GET_CODE (in
) == LABEL_REF
7227 || GET_CODE (in
) == CONST
))
7231 if (GET_CODE (in
) == REG
)
7234 if (regno
>= FIRST_PSEUDO_REGISTER
)
7236 regno
= true_regnum (in
);
7237 if (regno
>= FIRST_PSEUDO_REGISTER
)
7241 else if (GET_CODE (in
) == SUBREG
)
7243 regno
= true_regnum (in
);
7244 if (regno
>= FIRST_PSEUDO_REGISTER
)
7250 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7252 if (class == GENERAL_REGS
|| class == BASE_REGS
7253 || (regno
>= 0 && INT_REGNO_P (regno
)))
7256 /* Constants, memory, and FP registers can go into FP registers. */
7257 if ((regno
== -1 || FP_REGNO_P (regno
))
7258 && (class == FLOAT_REGS
|| class == NON_SPECIAL_REGS
))
7261 /* Memory, and AltiVec registers can go into AltiVec registers. */
7262 if ((regno
== -1 || ALTIVEC_REGNO_P (regno
))
7263 && class == ALTIVEC_REGS
)
7266 /* We can copy among the CR registers. */
7267 if ((class == CR_REGS
|| class == CR0_REGS
)
7268 && regno
>= 0 && CR_REGNO_P (regno
))
7271 /* Otherwise, we need GENERAL_REGS. */
7272 return GENERAL_REGS
;
7275 /* Given a comparison operation, return the bit number in CCR to test. We
7276 know this is a valid comparison.
7278 SCC_P is 1 if this is for an scc. That means that %D will have been
7279 used instead of %C, so the bits will be in different places.
7281 Return -1 if OP isn't a valid comparison for some reason. */
7288 enum rtx_code code
= GET_CODE (op
);
7289 enum machine_mode cc_mode
;
7294 if (GET_RTX_CLASS (code
) != '<')
7299 if (GET_CODE (reg
) != REG
7300 || ! CR_REGNO_P (REGNO (reg
)))
7303 cc_mode
= GET_MODE (reg
);
7304 cc_regnum
= REGNO (reg
);
7305 base_bit
= 4 * (cc_regnum
- CR0_REGNO
);
7307 validate_condition_mode (code
, cc_mode
);
7312 if (TARGET_SPE
&& TARGET_HARD_FLOAT
&& cc_mode
== CCFPmode
)
7313 return base_bit
+ 1;
7314 return scc_p
? base_bit
+ 3 : base_bit
+ 2;
7316 if (TARGET_SPE
&& TARGET_HARD_FLOAT
&& cc_mode
== CCFPmode
)
7317 return base_bit
+ 1;
7318 return base_bit
+ 2;
7319 case GT
: case GTU
: case UNLE
:
7320 return base_bit
+ 1;
7321 case LT
: case LTU
: case UNGE
:
7323 case ORDERED
: case UNORDERED
:
7324 return base_bit
+ 3;
7327 /* If scc, we will have done a cror to put the bit in the
7328 unordered position. So test that bit. For integer, this is ! LT
7329 unless this is an scc insn. */
7330 return scc_p
? base_bit
+ 3 : base_bit
;
7333 return scc_p
? base_bit
+ 3 : base_bit
+ 1;
7340 /* Return the GOT register. */
7343 rs6000_got_register (value
)
7344 rtx value ATTRIBUTE_UNUSED
;
7346 /* The second flow pass currently (June 1999) can't update
7347 regs_ever_live without disturbing other parts of the compiler, so
7348 update it here to make the prolog/epilogue code happy. */
7349 if (no_new_pseudos
&& ! regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
])
7350 regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
7352 current_function_uses_pic_offset_table
= 1;
7354 return pic_offset_table_rtx
;
7357 /* Function to init struct machine_function.
7358 This will be called, via a pointer variable,
7359 from push_function_context. */
7361 static struct machine_function
*
7362 rs6000_init_machine_status ()
7364 return ggc_alloc_cleared (sizeof (machine_function
));
7367 /* These macros test for integers and extract the low-order bits. */
7369 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7370 && GET_MODE (X) == VOIDmode)
7372 #define INT_LOWPART(X) \
7373 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7380 unsigned long val
= INT_LOWPART (op
);
7382 /* If the high bit is zero, the value is the first 1 bit we find
7384 if ((val
& 0x80000000) == 0)
7386 if ((val
& 0xffffffff) == 0)
7390 while (((val
<<= 1) & 0x80000000) == 0)
7395 /* If the high bit is set and the low bit is not, or the mask is all
7396 1's, the value is zero. */
7397 if ((val
& 1) == 0 || (val
& 0xffffffff) == 0xffffffff)
7400 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7403 while (((val
>>= 1) & 1) != 0)
7414 unsigned long val
= INT_LOWPART (op
);
7416 /* If the low bit is zero, the value is the first 1 bit we find from
7420 if ((val
& 0xffffffff) == 0)
7424 while (((val
>>= 1) & 1) == 0)
7430 /* If the low bit is set and the high bit is not, or the mask is all
7431 1's, the value is 31. */
7432 if ((val
& 0x80000000) == 0 || (val
& 0xffffffff) == 0xffffffff)
7435 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7438 while (((val
<<= 1) & 0x80000000) != 0)
7444 /* Print an operand. Recognize special options, documented below. */
7447 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7448 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7450 #define SMALL_DATA_RELOC "sda21"
7451 #define SMALL_DATA_REG 0
7455 print_operand (file
, x
, code
)
7462 unsigned HOST_WIDE_INT uval
;
7467 /* Write out an instruction after the call which may be replaced
7468 with glue code by the loader. This depends on the AIX version. */
7469 asm_fprintf (file
, RS6000_CALL_GLUE
);
7472 /* %a is output_address. */
7475 /* If X is a constant integer whose low-order 5 bits are zero,
7476 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
7477 in the AIX assembler where "sri" with a zero shift count
7478 writes a trash instruction. */
7479 if (GET_CODE (x
) == CONST_INT
&& (INTVAL (x
) & 31) == 0)
7486 /* If constant, low-order 16 bits of constant, unsigned.
7487 Otherwise, write normally. */
7489 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 0xffff);
7491 print_operand (file
, x
, 0);
7495 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7496 for 64-bit mask direction. */
7497 putc (((INT_LOWPART(x
) & 1) == 0 ? 'r' : 'l'), file
);
7500 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7504 /* There used to be a comment for 'C' reading "This is an
7505 optional cror needed for certain floating-point
7506 comparisons. Otherwise write nothing." */
7508 /* Similar, except that this is for an scc, so we must be able to
7509 encode the test in a single bit that is one. We do the above
7510 for any LE, GE, GEU, or LEU and invert the bit for NE. */
7511 if (GET_CODE (x
) == LE
|| GET_CODE (x
) == GE
7512 || GET_CODE (x
) == LEU
|| GET_CODE (x
) == GEU
)
7514 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7516 fprintf (file
, "cror %d,%d,%d\n\t", base_bit
+ 3,
7518 base_bit
+ (GET_CODE (x
) == GE
|| GET_CODE (x
) == GEU
));
7521 else if (GET_CODE (x
) == NE
)
7523 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7525 fprintf (file
, "crnor %d,%d,%d\n\t", base_bit
+ 3,
7526 base_bit
+ 2, base_bit
+ 2);
7528 else if (TARGET_SPE
&& TARGET_HARD_FLOAT
7529 && GET_CODE (x
) == EQ
7530 && GET_MODE (XEXP (x
, 0)) == CCFPmode
)
7532 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7534 fprintf (file
, "crnor %d,%d,%d\n\t", base_bit
+ 1,
7535 base_bit
+ 1, base_bit
+ 1);
7540 /* X is a CR register. Print the number of the EQ bit of the CR */
7541 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7542 output_operand_lossage ("invalid %%E value");
7544 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
) + 2);
7548 /* X is a CR register. Print the shift count needed to move it
7549 to the high-order four bits. */
7550 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7551 output_operand_lossage ("invalid %%f value");
7553 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
));
7557 /* Similar, but print the count for the rotate in the opposite
7559 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7560 output_operand_lossage ("invalid %%F value");
7562 fprintf (file
, "%d", 32 - 4 * (REGNO (x
) - CR0_REGNO
));
7566 /* X is a constant integer. If it is negative, print "m",
7567 otherwise print "z". This is to make an aze or ame insn. */
7568 if (GET_CODE (x
) != CONST_INT
)
7569 output_operand_lossage ("invalid %%G value");
7570 else if (INTVAL (x
) >= 0)
7577 /* If constant, output low-order five bits. Otherwise, write
7580 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 31);
7582 print_operand (file
, x
, 0);
7586 /* If constant, output low-order six bits. Otherwise, write
7589 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 63);
7591 print_operand (file
, x
, 0);
7595 /* Print `i' if this is a constant, else nothing. */
7601 /* Write the bit number in CCR for jump. */
7604 output_operand_lossage ("invalid %%j code");
7606 fprintf (file
, "%d", i
);
7610 /* Similar, but add one for shift count in rlinm for scc and pass
7611 scc flag to `ccr_bit'. */
7614 output_operand_lossage ("invalid %%J code");
7616 /* If we want bit 31, write a shift count of zero, not 32. */
7617 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
7621 /* X must be a constant. Write the 1's complement of the
7624 output_operand_lossage ("invalid %%k value");
7626 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ~ INT_LOWPART (x
));
7630 /* X must be a symbolic constant on ELF. Write an
7631 expression suitable for an 'addi' that adds in the low 16
7633 if (GET_CODE (x
) != CONST
)
7635 print_operand_address (file
, x
);
7640 if (GET_CODE (XEXP (x
, 0)) != PLUS
7641 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) != SYMBOL_REF
7642 && GET_CODE (XEXP (XEXP (x
, 0), 0)) != LABEL_REF
)
7643 || GET_CODE (XEXP (XEXP (x
, 0), 1)) != CONST_INT
)
7644 output_operand_lossage ("invalid %%K value");
7645 print_operand_address (file
, XEXP (XEXP (x
, 0), 0));
7647 /* For GNU as, there must be a non-alphanumeric character
7648 between 'l' and the number. The '-' is added by
7649 print_operand() already. */
7650 if (INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0)
7652 print_operand (file
, XEXP (XEXP (x
, 0), 1), 0);
7656 /* %l is output_asm_label. */
7659 /* Write second word of DImode or DFmode reference. Works on register
7660 or non-indexed memory only. */
7661 if (GET_CODE (x
) == REG
)
7662 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
7663 else if (GET_CODE (x
) == MEM
)
7665 /* Handle possible auto-increment. Since it is pre-increment and
7666 we have already done it, we can just use an offset of word. */
7667 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
7668 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
7669 output_address (plus_constant (XEXP (XEXP (x
, 0), 0),
7672 output_address (XEXP (adjust_address_nv (x
, SImode
,
7676 if (small_data_operand (x
, GET_MODE (x
)))
7677 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
7678 reg_names
[SMALL_DATA_REG
]);
7683 /* MB value for a mask operand. */
7684 if (! mask_operand (x
, SImode
))
7685 output_operand_lossage ("invalid %%m value");
7687 fprintf (file
, "%d", extract_MB (x
));
7691 /* ME value for a mask operand. */
7692 if (! mask_operand (x
, SImode
))
7693 output_operand_lossage ("invalid %%M value");
7695 fprintf (file
, "%d", extract_ME (x
));
7698 /* %n outputs the negative of its operand. */
7701 /* Write the number of elements in the vector times 4. */
7702 if (GET_CODE (x
) != PARALLEL
)
7703 output_operand_lossage ("invalid %%N value");
7705 fprintf (file
, "%d", XVECLEN (x
, 0) * 4);
7709 /* Similar, but subtract 1 first. */
7710 if (GET_CODE (x
) != PARALLEL
)
7711 output_operand_lossage ("invalid %%O value");
7713 fprintf (file
, "%d", (XVECLEN (x
, 0) - 1) * 4);
7717 /* X is a CONST_INT that is a power of two. Output the logarithm. */
7719 || INT_LOWPART (x
) < 0
7720 || (i
= exact_log2 (INT_LOWPART (x
))) < 0)
7721 output_operand_lossage ("invalid %%p value");
7723 fprintf (file
, "%d", i
);
7727 /* The operand must be an indirect memory reference. The result
7728 is the register number. */
7729 if (GET_CODE (x
) != MEM
|| GET_CODE (XEXP (x
, 0)) != REG
7730 || REGNO (XEXP (x
, 0)) >= 32)
7731 output_operand_lossage ("invalid %%P value");
7733 fprintf (file
, "%d", REGNO (XEXP (x
, 0)));
7737 /* This outputs the logical code corresponding to a boolean
7738 expression. The expression may have one or both operands
7739 negated (if one, only the first one). For condition register
7740 logical operations, it will also treat the negated
7741 CR codes as NOTs, but not handle NOTs of them. */
7743 const char *const *t
= 0;
7745 enum rtx_code code
= GET_CODE (x
);
7746 static const char * const tbl
[3][3] = {
7747 { "and", "andc", "nor" },
7748 { "or", "orc", "nand" },
7749 { "xor", "eqv", "xor" } };
7753 else if (code
== IOR
)
7755 else if (code
== XOR
)
7758 output_operand_lossage ("invalid %%q value");
7760 if (GET_CODE (XEXP (x
, 0)) != NOT
)
7764 if (GET_CODE (XEXP (x
, 1)) == NOT
)
7775 /* X is a CR register. Print the mask for `mtcrf'. */
7776 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7777 output_operand_lossage ("invalid %%R value");
7779 fprintf (file
, "%d", 128 >> (REGNO (x
) - CR0_REGNO
));
7783 /* Low 5 bits of 32 - value */
7785 output_operand_lossage ("invalid %%s value");
7787 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, (32 - INT_LOWPART (x
)) & 31);
7791 /* PowerPC64 mask position. All 0's is excluded.
7792 CONST_INT 32-bit mask is considered sign-extended so any
7793 transition must occur within the CONST_INT, not on the boundary. */
7794 if (! mask64_operand (x
, DImode
))
7795 output_operand_lossage ("invalid %%S value");
7797 uval
= INT_LOWPART (x
);
7799 if (uval
& 1) /* Clear Left */
7801 #if HOST_BITS_PER_WIDE_INT > 64
7802 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
7806 else /* Clear Right */
7809 #if HOST_BITS_PER_WIDE_INT > 64
7810 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
7818 fprintf (file
, "%d", i
);
7822 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
7823 if (GET_CODE (x
) != REG
|| GET_MODE (x
) != CCmode
)
7826 /* Bit 3 is OV bit. */
7827 i
= 4 * (REGNO (x
) - CR0_REGNO
) + 3;
7829 /* If we want bit 31, write a shift count of zero, not 32. */
7830 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
7834 /* Print the symbolic name of a branch target register. */
7835 if (GET_CODE (x
) != REG
|| (REGNO (x
) != LINK_REGISTER_REGNUM
7836 && REGNO (x
) != COUNT_REGISTER_REGNUM
))
7837 output_operand_lossage ("invalid %%T value");
7838 else if (REGNO (x
) == LINK_REGISTER_REGNUM
)
7839 fputs (TARGET_NEW_MNEMONICS
? "lr" : "r", file
);
7841 fputs ("ctr", file
);
7845 /* High-order 16 bits of constant for use in unsigned operand. */
7847 output_operand_lossage ("invalid %%u value");
7849 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
7850 (INT_LOWPART (x
) >> 16) & 0xffff);
7854 /* High-order 16 bits of constant for use in signed operand. */
7856 output_operand_lossage ("invalid %%v value");
7858 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
7859 (INT_LOWPART (x
) >> 16) & 0xffff);
7863 /* Print `u' if this has an auto-increment or auto-decrement. */
7864 if (GET_CODE (x
) == MEM
7865 && (GET_CODE (XEXP (x
, 0)) == PRE_INC
7866 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
))
7871 /* Print the trap code for this operand. */
7872 switch (GET_CODE (x
))
7875 fputs ("eq", file
); /* 4 */
7878 fputs ("ne", file
); /* 24 */
7881 fputs ("lt", file
); /* 16 */
7884 fputs ("le", file
); /* 20 */
7887 fputs ("gt", file
); /* 8 */
7890 fputs ("ge", file
); /* 12 */
7893 fputs ("llt", file
); /* 2 */
7896 fputs ("lle", file
); /* 6 */
7899 fputs ("lgt", file
); /* 1 */
7902 fputs ("lge", file
); /* 5 */
7910 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
7913 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
7914 ((INT_LOWPART (x
) & 0xffff) ^ 0x8000) - 0x8000);
7916 print_operand (file
, x
, 0);
7920 /* MB value for a PowerPC64 rldic operand. */
7921 val
= (GET_CODE (x
) == CONST_INT
7922 ? INTVAL (x
) : CONST_DOUBLE_HIGH (x
));
7927 for (i
= 0; i
< HOST_BITS_PER_WIDE_INT
; i
++)
7928 if ((val
<<= 1) < 0)
7931 #if HOST_BITS_PER_WIDE_INT == 32
7932 if (GET_CODE (x
) == CONST_INT
&& i
>= 0)
7933 i
+= 32; /* zero-extend high-part was all 0's */
7934 else if (GET_CODE (x
) == CONST_DOUBLE
&& i
== 32)
7936 val
= CONST_DOUBLE_LOW (x
);
7943 for ( ; i
< 64; i
++)
7944 if ((val
<<= 1) < 0)
7949 fprintf (file
, "%d", i
+ 1);
7953 if (GET_CODE (x
) == MEM
7954 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x
, 0), 0))
7959 /* Like 'L', for third word of TImode */
7960 if (GET_CODE (x
) == REG
)
7961 fprintf (file
, "%s", reg_names
[REGNO (x
) + 2]);
7962 else if (GET_CODE (x
) == MEM
)
7964 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
7965 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
7966 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 8));
7968 output_address (XEXP (adjust_address_nv (x
, SImode
, 8), 0));
7969 if (small_data_operand (x
, GET_MODE (x
)))
7970 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
7971 reg_names
[SMALL_DATA_REG
]);
7976 /* X is a SYMBOL_REF. Write out the name preceded by a
7977 period and without any trailing data in brackets. Used for function
7978 names. If we are configured for System V (or the embedded ABI) on
7979 the PowerPC, do not emit the period, since those systems do not use
7980 TOCs and the like. */
7981 if (GET_CODE (x
) != SYMBOL_REF
)
7984 if (XSTR (x
, 0)[0] != '.')
7986 switch (DEFAULT_ABI
)
7996 case ABI_AIX_NODESC
:
8002 RS6000_OUTPUT_BASENAME (file
, XSTR (x
, 0));
8004 assemble_name (file
, XSTR (x
, 0));
8009 /* Like 'L', for last word of TImode. */
8010 if (GET_CODE (x
) == REG
)
8011 fprintf (file
, "%s", reg_names
[REGNO (x
) + 3]);
8012 else if (GET_CODE (x
) == MEM
)
8014 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
8015 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
8016 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 12));
8018 output_address (XEXP (adjust_address_nv (x
, SImode
, 12), 0));
8019 if (small_data_operand (x
, GET_MODE (x
)))
8020 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
8021 reg_names
[SMALL_DATA_REG
]);
8025 /* Print AltiVec or SPE memory operand. */
8030 if (GET_CODE (x
) != MEM
)
8038 if (GET_CODE (tmp
) == REG
)
8040 fprintf (file
, "0(%s)", reg_names
[REGNO (tmp
)]);
8043 /* Handle [reg+UIMM]. */
8044 else if (GET_CODE (tmp
) == PLUS
&&
8045 GET_CODE (XEXP (tmp
, 1)) == CONST_INT
)
8049 if (GET_CODE (XEXP (tmp
, 0)) != REG
)
8052 x
= INTVAL (XEXP (tmp
, 1));
8053 fprintf (file
, "%d(%s)", x
, reg_names
[REGNO (XEXP (tmp
, 0))]);
8057 /* Fall through. Must be [reg+reg]. */
8059 if (GET_CODE (tmp
) == REG
)
8060 fprintf (file
, "0,%s", reg_names
[REGNO (tmp
)]);
8061 else if (GET_CODE (tmp
) == PLUS
&& GET_CODE (XEXP (tmp
, 1)) == REG
)
8063 if (REGNO (XEXP (tmp
, 0)) == 0)
8064 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 1)) ],
8065 reg_names
[ REGNO (XEXP (tmp
, 0)) ]);
8067 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 0)) ],
8068 reg_names
[ REGNO (XEXP (tmp
, 1)) ]);
8076 if (GET_CODE (x
) == REG
)
8077 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
8078 else if (GET_CODE (x
) == MEM
)
8080 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8081 know the width from the mode. */
8082 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
)
8083 fprintf (file
, "%d(%s)", GET_MODE_SIZE (GET_MODE (x
)),
8084 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
8085 else if (GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
8086 fprintf (file
, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x
)),
8087 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
8089 output_address (XEXP (x
, 0));
8092 output_addr_const (file
, x
);
8096 output_operand_lossage ("invalid %%xn code");
8100 /* Print the address of an operand. */
8103 print_operand_address (file
, x
)
8107 if (GET_CODE (x
) == REG
)
8108 fprintf (file
, "0(%s)", reg_names
[ REGNO (x
) ]);
8109 else if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
8110 || GET_CODE (x
) == LABEL_REF
)
8112 output_addr_const (file
, x
);
8113 if (small_data_operand (x
, GET_MODE (x
)))
8114 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
8115 reg_names
[SMALL_DATA_REG
]);
8116 else if (TARGET_TOC
)
8119 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == REG
)
8121 if (REGNO (XEXP (x
, 0)) == 0)
8122 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 1)) ],
8123 reg_names
[ REGNO (XEXP (x
, 0)) ]);
8125 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 0)) ],
8126 reg_names
[ REGNO (XEXP (x
, 1)) ]);
8128 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
8130 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (XEXP (x
, 1)));
8131 fprintf (file
, "(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8134 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
8135 && CONSTANT_P (XEXP (x
, 1)))
8137 output_addr_const (file
, XEXP (x
, 1));
8138 fprintf (file
, "@l(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8142 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
8143 && CONSTANT_P (XEXP (x
, 1)))
8145 fprintf (file
, "lo16(");
8146 output_addr_const (file
, XEXP (x
, 1));
8147 fprintf (file
, ")(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8150 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x
))
8152 if (TARGET_AIX
&& (!TARGET_ELF
|| !TARGET_MINIMAL_TOC
))
8154 rtx contains_minus
= XEXP (x
, 1);
8158 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8159 turn it into (sym) for output_addr_const. */
8160 while (GET_CODE (XEXP (contains_minus
, 0)) != MINUS
)
8161 contains_minus
= XEXP (contains_minus
, 0);
8163 minus
= XEXP (contains_minus
, 0);
8164 symref
= XEXP (minus
, 0);
8165 XEXP (contains_minus
, 0) = symref
;
8170 name
= XSTR (symref
, 0);
8171 newname
= alloca (strlen (name
) + sizeof ("@toc"));
8172 strcpy (newname
, name
);
8173 strcat (newname
, "@toc");
8174 XSTR (symref
, 0) = newname
;
8176 output_addr_const (file
, XEXP (x
, 1));
8178 XSTR (symref
, 0) = name
;
8179 XEXP (contains_minus
, 0) = minus
;
8182 output_addr_const (file
, XEXP (x
, 1));
8184 fprintf (file
, "(%s)", reg_names
[REGNO (XEXP (x
, 0))]);
8190 /* Target hook for assembling integer objects. The PowerPC version has
8191 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8192 is defined. It also needs to handle DI-mode objects on 64-bit
8196 rs6000_assemble_integer (x
, size
, aligned_p
)
8201 #ifdef RELOCATABLE_NEEDS_FIXUP
8202 /* Special handling for SI values. */
8203 if (size
== 4 && aligned_p
)
8205 extern int in_toc_section
PARAMS ((void));
8206 static int recurse
= 0;
8208 /* For -mrelocatable, we mark all addresses that need to be fixed up
8209 in the .fixup section. */
8210 if (TARGET_RELOCATABLE
8211 && !in_toc_section ()
8212 && !in_text_section ()
8214 && GET_CODE (x
) != CONST_INT
8215 && GET_CODE (x
) != CONST_DOUBLE
8221 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCP", fixuplabelno
);
8223 ASM_OUTPUT_LABEL (asm_out_file
, buf
);
8224 fprintf (asm_out_file
, "\t.long\t(");
8225 output_addr_const (asm_out_file
, x
);
8226 fprintf (asm_out_file
, ")@fixup\n");
8227 fprintf (asm_out_file
, "\t.section\t\".fixup\",\"aw\"\n");
8228 ASM_OUTPUT_ALIGN (asm_out_file
, 2);
8229 fprintf (asm_out_file
, "\t.long\t");
8230 assemble_name (asm_out_file
, buf
);
8231 fprintf (asm_out_file
, "\n\t.previous\n");
8235 /* Remove initial .'s to turn a -mcall-aixdesc function
8236 address into the address of the descriptor, not the function
8238 else if (GET_CODE (x
) == SYMBOL_REF
8239 && XSTR (x
, 0)[0] == '.'
8240 && DEFAULT_ABI
== ABI_AIX
)
8242 const char *name
= XSTR (x
, 0);
8243 while (*name
== '.')
8246 fprintf (asm_out_file
, "\t.long\t%s\n", name
);
8250 #endif /* RELOCATABLE_NEEDS_FIXUP */
8251 return default_assemble_integer (x
, size
, aligned_p
);
8254 #ifdef HAVE_GAS_HIDDEN
8255 /* Emit an assembler directive to set symbol visibility for DECL to
8259 rs6000_assemble_visibility (decl
, vis
)
8263 /* Functions need to have their entry point symbol visibility set as
8264 well as their descriptor symbol visibility. */
8265 if (DEFAULT_ABI
== ABI_AIX
&& TREE_CODE (decl
) == FUNCTION_DECL
)
8267 static const char * const visibility_types
[] = {
8268 NULL
, "internal", "hidden", "protected"
8271 const char *name
, *type
;
8273 name
= ((* targetm
.strip_name_encoding
)
8274 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
))));
8275 type
= visibility_types
[vis
];
8277 fprintf (asm_out_file
, "\t.%s\t%s\n", type
, name
);
8278 fprintf (asm_out_file
, "\t.%s\t.%s\n", type
, name
);
8281 default_assemble_visibility (decl
, vis
);
8286 rs6000_reverse_condition (mode
, code
)
8287 enum machine_mode mode
;
8290 /* Reversal of FP compares takes care -- an ordered compare
8291 becomes an unordered compare and vice versa. */
8292 if (mode
== CCFPmode
&& !flag_unsafe_math_optimizations
)
8293 return reverse_condition_maybe_unordered (code
);
8295 return reverse_condition (code
);
8298 /* Generate a compare for CODE. Return a brand-new rtx that
8299 represents the result of the compare. */
8302 rs6000_generate_compare (code
)
8305 enum machine_mode comp_mode
;
8308 if (rs6000_compare_fp_p
)
8309 comp_mode
= CCFPmode
;
8310 else if (code
== GTU
|| code
== LTU
8311 || code
== GEU
|| code
== LEU
)
8312 comp_mode
= CCUNSmode
;
8316 /* First, the compare. */
8317 compare_result
= gen_reg_rtx (comp_mode
);
8319 /* SPE FP compare instructions on the GPRs. Yuck! */
8320 if ((TARGET_SPE
&& TARGET_HARD_FLOAT
) && rs6000_compare_fp_p
)
8322 rtx cmp
, or1
, or2
, or_result
, compare_result2
;
8330 cmp
= flag_unsafe_math_optimizations
8331 ? gen_tstsfeq_gpr (compare_result
, rs6000_compare_op0
,
8333 : gen_cmpsfeq_gpr (compare_result
, rs6000_compare_op0
,
8334 rs6000_compare_op1
);
8342 cmp
= flag_unsafe_math_optimizations
8343 ? gen_tstsfgt_gpr (compare_result
, rs6000_compare_op0
,
8345 : gen_cmpsfgt_gpr (compare_result
, rs6000_compare_op0
,
8346 rs6000_compare_op1
);
8354 cmp
= flag_unsafe_math_optimizations
8355 ? gen_tstsflt_gpr (compare_result
, rs6000_compare_op0
,
8357 : gen_cmpsflt_gpr (compare_result
, rs6000_compare_op0
,
8358 rs6000_compare_op1
);
8364 /* Synthesize LE and GE from LT/GT || EQ. */
8365 if (code
== LE
|| code
== GE
|| code
== LEU
|| code
== GEU
)
8367 /* Synthesize GE/LE frome GT/LT || EQ. */
8373 case LE
: code
= LT
; break;
8374 case GE
: code
= GT
; break;
8375 case LEU
: code
= LT
; break;
8376 case GEU
: code
= GT
; break;
8380 or1
= gen_reg_rtx (SImode
);
8381 or2
= gen_reg_rtx (SImode
);
8382 or_result
= gen_reg_rtx (CCEQmode
);
8383 compare_result2
= gen_reg_rtx (CCFPmode
);
8386 cmp
= flag_unsafe_math_optimizations
8387 ? gen_tstsfeq_gpr (compare_result2
, rs6000_compare_op0
,
8389 : gen_cmpsfeq_gpr (compare_result2
, rs6000_compare_op0
,
8390 rs6000_compare_op1
);
8393 /* The MC8540 FP compare instructions set the CR bits
8394 differently than other PPC compare instructions. For
8395 that matter, there is no generic test instruction, but a
8396 testgt, testlt, and testeq. For a true condition, bit 2
8397 is set (x1xx) in the CR. Following the traditional CR
8403 ... bit 2 would be a GT CR alias, so later on we
8404 look in the GT bits for the branch instructions.
8405 However, we must be careful to emit correct RTL in
8406 the meantime, so optimizations don't get confused. */
8408 or1
= gen_rtx (NE
, SImode
, compare_result
, const0_rtx
);
8409 or2
= gen_rtx (NE
, SImode
, compare_result2
, const0_rtx
);
8411 /* OR them together. */
8412 cmp
= gen_rtx_SET (VOIDmode
, or_result
,
8413 gen_rtx_COMPARE (CCEQmode
,
8414 gen_rtx_IOR (SImode
, or1
, or2
),
8416 compare_result
= or_result
;
8421 /* We only care about 1 bit (x1xx), so map everything to NE to
8422 maintain rtl sanity. We'll get to the right bit (x1xx) at
8423 code output time. */
8424 if (code
== NE
|| code
== LTGT
)
8425 /* Do the inverse here because we have no cmpne
8426 instruction. We use the cmpeq instruction and expect
8427 to get a 0 instead. */
8436 emit_insn (gen_rtx_SET (VOIDmode
, compare_result
,
8437 gen_rtx_COMPARE (comp_mode
,
8439 rs6000_compare_op1
)));
8441 /* Some kinds of FP comparisons need an OR operation;
8442 except for flag_unsafe_math_optimizations we don't bother. */
8443 if (rs6000_compare_fp_p
8444 && ! flag_unsafe_math_optimizations
8445 && ! (TARGET_HARD_FLOAT
&& TARGET_SPE
)
8446 && (code
== LE
|| code
== GE
8447 || code
== UNEQ
|| code
== LTGT
8448 || code
== UNGT
|| code
== UNLT
))
8450 enum rtx_code or1
, or2
;
8451 rtx or1_rtx
, or2_rtx
, compare2_rtx
;
8452 rtx or_result
= gen_reg_rtx (CCEQmode
);
8456 case LE
: or1
= LT
; or2
= EQ
; break;
8457 case GE
: or1
= GT
; or2
= EQ
; break;
8458 case UNEQ
: or1
= UNORDERED
; or2
= EQ
; break;
8459 case LTGT
: or1
= LT
; or2
= GT
; break;
8460 case UNGT
: or1
= UNORDERED
; or2
= GT
; break;
8461 case UNLT
: or1
= UNORDERED
; or2
= LT
; break;
8464 validate_condition_mode (or1
, comp_mode
);
8465 validate_condition_mode (or2
, comp_mode
);
8466 or1_rtx
= gen_rtx (or1
, SImode
, compare_result
, const0_rtx
);
8467 or2_rtx
= gen_rtx (or2
, SImode
, compare_result
, const0_rtx
);
8468 compare2_rtx
= gen_rtx_COMPARE (CCEQmode
,
8469 gen_rtx_IOR (SImode
, or1_rtx
, or2_rtx
),
8471 emit_insn (gen_rtx_SET (VOIDmode
, or_result
, compare2_rtx
));
8473 compare_result
= or_result
;
8477 validate_condition_mode (code
, GET_MODE (compare_result
));
8479 return gen_rtx (code
, VOIDmode
, compare_result
, const0_rtx
);
8483 /* Emit the RTL for an sCOND pattern. */
8486 rs6000_emit_sCOND (code
, result
)
8491 enum machine_mode op_mode
;
8493 condition_rtx
= rs6000_generate_compare (code
);
8495 op_mode
= GET_MODE (rs6000_compare_op0
);
8496 if (op_mode
== VOIDmode
)
8497 op_mode
= GET_MODE (rs6000_compare_op1
);
8499 if (TARGET_POWERPC64
&& (op_mode
== DImode
|| rs6000_compare_fp_p
))
8501 PUT_MODE (condition_rtx
, DImode
);
8502 convert_move (result
, condition_rtx
, 0);
8506 PUT_MODE (condition_rtx
, SImode
);
8507 emit_insn (gen_rtx_SET (VOIDmode
, result
, condition_rtx
));
8511 /* Emit a branch of kind CODE to location LOC. */
8514 rs6000_emit_cbranch (code
, loc
)
8518 rtx condition_rtx
, loc_ref
;
8520 condition_rtx
= rs6000_generate_compare (code
);
8521 loc_ref
= gen_rtx_LABEL_REF (VOIDmode
, loc
);
8522 emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
,
8523 gen_rtx_IF_THEN_ELSE (VOIDmode
, condition_rtx
,
8527 /* Return the string to output a conditional branch to LABEL, which is
8528 the operand number of the label, or -1 if the branch is really a
8531 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
8532 condition code register and its mode specifies what kind of
8535 REVERSED is nonzero if we should reverse the sense of the comparison.
8537 INSN is the insn. */
8540 output_cbranch (op
, label
, reversed
, insn
)
8546 static char string
[64];
8547 enum rtx_code code
= GET_CODE (op
);
8548 rtx cc_reg
= XEXP (op
, 0);
8549 enum machine_mode mode
= GET_MODE (cc_reg
);
8550 int cc_regno
= REGNO (cc_reg
) - CR0_REGNO
;
8551 int need_longbranch
= label
!= NULL
&& get_attr_length (insn
) == 8;
8552 int really_reversed
= reversed
^ need_longbranch
;
8558 validate_condition_mode (code
, mode
);
8560 /* Work out which way this really branches. We could use
8561 reverse_condition_maybe_unordered here always but this
8562 makes the resulting assembler clearer. */
8563 if (really_reversed
)
8565 /* Reversal of FP compares takes care -- an ordered compare
8566 becomes an unordered compare and vice versa. */
8567 if (mode
== CCFPmode
)
8568 code
= reverse_condition_maybe_unordered (code
);
8570 code
= reverse_condition (code
);
8573 if ((TARGET_SPE
&& TARGET_HARD_FLOAT
) && mode
== CCFPmode
)
8575 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8578 /* Opposite of GT. */
8580 else if (code
== NE
)
8588 /* Not all of these are actually distinct opcodes, but
8589 we distinguish them for clarity of the resulting assembler. */
8591 ccode
= "ne"; break;
8593 ccode
= "eq"; break;
8595 ccode
= "ge"; break;
8596 case GT
: case GTU
: case UNGT
:
8597 ccode
= "gt"; break;
8599 ccode
= "le"; break;
8600 case LT
: case LTU
: case UNLT
:
8601 ccode
= "lt"; break;
8602 case UNORDERED
: ccode
= "un"; break;
8603 case ORDERED
: ccode
= "nu"; break;
8604 case UNGE
: ccode
= "nl"; break;
8605 case UNLE
: ccode
= "ng"; break;
8610 /* Maybe we have a guess as to how likely the branch is.
8611 The old mnemonics don't have a way to specify this information. */
8613 note
= find_reg_note (insn
, REG_BR_PROB
, NULL_RTX
);
8614 if (note
!= NULL_RTX
)
8616 /* PROB is the difference from 50%. */
8617 int prob
= INTVAL (XEXP (note
, 0)) - REG_BR_PROB_BASE
/ 2;
8618 bool always_hint
= rs6000_cpu
!= PROCESSOR_POWER4
;
8620 /* Only hint for highly probable/improbable branches on newer
8621 cpus as static prediction overrides processor dynamic
8622 prediction. For older cpus we may as well always hint, but
8623 assume not taken for branches that are very close to 50% as a
8624 mispredicted taken branch is more expensive than a
8625 mispredicted not-taken branch. */
8627 || abs (prob
) > REG_BR_PROB_BASE
/ 100 * 48)
8629 if (abs (prob
) > REG_BR_PROB_BASE
/ 20
8630 && ((prob
> 0) ^ need_longbranch
))
8638 s
+= sprintf (s
, "{b%sr|b%slr%s} ", ccode
, ccode
, pred
);
8640 s
+= sprintf (s
, "{b%s|b%s%s} ", ccode
, ccode
, pred
);
8642 /* We need to escape any '%' characters in the reg_names string.
8643 Assume they'd only be the first character... */
8644 if (reg_names
[cc_regno
+ CR0_REGNO
][0] == '%')
8646 s
+= sprintf (s
, "%s", reg_names
[cc_regno
+ CR0_REGNO
]);
8650 /* If the branch distance was too far, we may have to use an
8651 unconditional branch to go the distance. */
8652 if (need_longbranch
)
8653 s
+= sprintf (s
, ",$+8\n\tb %s", label
);
8655 s
+= sprintf (s
, ",%s", label
);
8661 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
8662 operands of the last comparison is nonzero/true, FALSE_COND if it
8663 is zero/false. Return 0 if the hardware has no such operation. */
8666 rs6000_emit_cmove (dest
, op
, true_cond
, false_cond
)
8672 enum rtx_code code
= GET_CODE (op
);
8673 rtx op0
= rs6000_compare_op0
;
8674 rtx op1
= rs6000_compare_op1
;
8676 enum machine_mode compare_mode
= GET_MODE (op0
);
8677 enum machine_mode result_mode
= GET_MODE (dest
);
8680 /* These modes should always match. */
8681 if (GET_MODE (op1
) != compare_mode
8682 /* In the isel case however, we can use a compare immediate, so
8683 op1 may be a small constant. */
8684 && (!TARGET_ISEL
|| !short_cint_operand (op1
, VOIDmode
)))
8686 if (GET_MODE (true_cond
) != result_mode
)
8688 if (GET_MODE (false_cond
) != result_mode
)
8691 /* First, work out if the hardware can do this at all, or
8692 if it's too slow... */
8693 if (! rs6000_compare_fp_p
)
8696 return rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
);
8700 /* Eliminate half of the comparisons by switching operands, this
8701 makes the remaining code simpler. */
8702 if (code
== UNLT
|| code
== UNGT
|| code
== UNORDERED
|| code
== NE
8703 || code
== LTGT
|| code
== LT
)
8705 code
= reverse_condition_maybe_unordered (code
);
8707 true_cond
= false_cond
;
8711 /* UNEQ and LTGT take four instructions for a comparison with zero,
8712 it'll probably be faster to use a branch here too. */
8716 if (GET_CODE (op1
) == CONST_DOUBLE
)
8717 REAL_VALUE_FROM_CONST_DOUBLE (c1
, op1
);
8719 /* We're going to try to implement comparisons by performing
8720 a subtract, then comparing against zero. Unfortunately,
8721 Inf - Inf is NaN which is not zero, and so if we don't
8722 know that the operand is finite and the comparison
8723 would treat EQ different to UNORDERED, we can't do it. */
8724 if (! flag_unsafe_math_optimizations
8725 && code
!= GT
&& code
!= UNGE
8726 && (GET_CODE (op1
) != CONST_DOUBLE
|| real_isinf (&c1
))
8727 /* Constructs of the form (a OP b ? a : b) are safe. */
8728 && ((! rtx_equal_p (op0
, false_cond
) && ! rtx_equal_p (op1
, false_cond
))
8729 || (! rtx_equal_p (op0
, true_cond
)
8730 && ! rtx_equal_p (op1
, true_cond
))))
8732 /* At this point we know we can use fsel. */
8734 /* Reduce the comparison to a comparison against zero. */
8735 temp
= gen_reg_rtx (compare_mode
);
8736 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8737 gen_rtx_MINUS (compare_mode
, op0
, op1
)));
8739 op1
= CONST0_RTX (compare_mode
);
8741 /* If we don't care about NaNs we can reduce some of the comparisons
8742 down to faster ones. */
8743 if (flag_unsafe_math_optimizations
)
8749 true_cond
= false_cond
;
8762 /* Now, reduce everything down to a GE. */
8769 temp
= gen_reg_rtx (compare_mode
);
8770 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
8775 temp
= gen_reg_rtx (compare_mode
);
8776 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_ABS (compare_mode
, op0
)));
8781 temp
= gen_reg_rtx (compare_mode
);
8782 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8783 gen_rtx_NEG (compare_mode
,
8784 gen_rtx_ABS (compare_mode
, op0
))));
8789 temp
= gen_reg_rtx (result_mode
);
8790 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8791 gen_rtx_IF_THEN_ELSE (result_mode
,
8792 gen_rtx_GE (VOIDmode
,
8794 true_cond
, false_cond
)));
8796 true_cond
= false_cond
;
8798 temp
= gen_reg_rtx (compare_mode
);
8799 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
8804 temp
= gen_reg_rtx (result_mode
);
8805 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8806 gen_rtx_IF_THEN_ELSE (result_mode
,
8807 gen_rtx_GE (VOIDmode
,
8809 true_cond
, false_cond
)));
8811 false_cond
= true_cond
;
8813 temp
= gen_reg_rtx (compare_mode
);
8814 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
8822 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
8823 gen_rtx_IF_THEN_ELSE (result_mode
,
8824 gen_rtx_GE (VOIDmode
,
8826 true_cond
, false_cond
)));
8830 /* Same as above, but for ints (isel). */
8833 rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
)
8839 rtx condition_rtx
, cr
;
8841 /* All isel implementations thus far are 32-bits. */
8842 if (GET_MODE (rs6000_compare_op0
) != SImode
)
8845 /* We still have to do the compare, because isel doesn't do a
8846 compare, it just looks at the CRx bits set by a previous compare
8848 condition_rtx
= rs6000_generate_compare (GET_CODE (op
));
8849 cr
= XEXP (condition_rtx
, 0);
8851 if (GET_MODE (cr
) == CCmode
)
8852 emit_insn (gen_isel_signed (dest
, condition_rtx
,
8853 true_cond
, false_cond
, cr
));
8855 emit_insn (gen_isel_unsigned (dest
, condition_rtx
,
8856 true_cond
, false_cond
, cr
));
8862 output_isel (operands
)
8867 code
= GET_CODE (operands
[1]);
8868 if (code
== GE
|| code
== GEU
|| code
== LE
|| code
== LEU
|| code
== NE
)
8870 PUT_CODE (operands
[1], reverse_condition (code
));
8871 return "isel %0,%3,%2,%j1";
8874 return "isel %0,%2,%3,%j1";
8878 rs6000_emit_minmax (dest
, code
, op0
, op1
)
8884 enum machine_mode mode
= GET_MODE (op0
);
8888 if (code
== SMAX
|| code
== SMIN
)
8893 if (code
== SMAX
|| code
== UMAX
)
8894 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
8897 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
8899 if (target
== NULL_RTX
)
8902 emit_move_insn (dest
, target
);
8905 /* This page contains routines that are used to determine what the
8906 function prologue and epilogue code will do and write them out. */
8908 /* Return the first fixed-point register that is required to be
8909 saved. 32 if none. */
8912 first_reg_to_save ()
8916 /* Find lowest numbered live register. */
8917 for (first_reg
= 13; first_reg
<= 31; first_reg
++)
8918 if (regs_ever_live
[first_reg
]
8919 && (! call_used_regs
[first_reg
]
8920 || (first_reg
== RS6000_PIC_OFFSET_TABLE_REGNUM
8921 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
8922 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
)))))
8927 && current_function_uses_pic_offset_table
8928 && first_reg
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
8929 return RS6000_PIC_OFFSET_TABLE_REGNUM
;
8935 /* Similar, for FP regs. */
8938 first_fp_reg_to_save ()
8942 /* Find lowest numbered live register. */
8943 for (first_reg
= 14 + 32; first_reg
<= 63; first_reg
++)
8944 if (regs_ever_live
[first_reg
])
8950 /* Similar, for AltiVec regs. */
8953 first_altivec_reg_to_save ()
8957 /* Stack frame remains as is unless we are in AltiVec ABI. */
8958 if (! TARGET_ALTIVEC_ABI
)
8959 return LAST_ALTIVEC_REGNO
+ 1;
8961 /* Find lowest numbered live register. */
8962 for (i
= FIRST_ALTIVEC_REGNO
+ 20; i
<= LAST_ALTIVEC_REGNO
; ++i
)
8963 if (regs_ever_live
[i
])
8969 /* Return a 32-bit mask of the AltiVec registers we need to set in
8970 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
8971 the 32-bit word is 0. */
8974 compute_vrsave_mask ()
8976 unsigned int i
, mask
= 0;
8978 /* First, find out if we use _any_ altivec registers. */
8979 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
8980 if (regs_ever_live
[i
])
8981 mask
|= ALTIVEC_REG_BIT (i
);
8986 /* Next, add all registers that are call-clobbered. We do this
8987 because post-reload register optimizers such as regrename_optimize
8988 may choose to use them. They never change the register class
8989 chosen by reload, so cannot create new uses of altivec registers
8990 if there were none before, so the early exit above is safe. */
8991 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
8992 altivec registers not saved in the mask, which might well make the
8993 adjustments below more effective in eliding the save/restore of
8994 VRSAVE in small functions. */
8995 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
8996 if (call_used_regs
[i
])
8997 mask
|= ALTIVEC_REG_BIT (i
);
8999 /* Next, remove the argument registers from the set. These must
9000 be in the VRSAVE mask set by the caller, so we don't need to add
9001 them in again. More importantly, the mask we compute here is
9002 used to generate CLOBBERs in the set_vrsave insn, and we do not
9003 wish the argument registers to die. */
9004 for (i
= cfun
->args_info
.vregno
; i
>= ALTIVEC_ARG_MIN_REG
; --i
)
9005 mask
&= ~ALTIVEC_REG_BIT (i
);
9007 /* Similarly, remove the return value from the set. */
9010 diddle_return_value (is_altivec_return_reg
, &yes
);
9012 mask
&= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN
);
9019 is_altivec_return_reg (reg
, xyes
)
9023 bool *yes
= (bool *) xyes
;
9024 if (REGNO (reg
) == ALTIVEC_ARG_RETURN
)
9029 /* Calculate the stack information for the current function. This is
9030 complicated by having two separate calling sequences, the AIX calling
9031 sequence and the V.4 calling sequence.
9033 AIX (and Darwin/Mac OS X) stack frames look like:
9035 SP----> +---------------------------------------+
9036 | back chain to caller | 0 0
9037 +---------------------------------------+
9038 | saved CR | 4 8 (8-11)
9039 +---------------------------------------+
9041 +---------------------------------------+
9042 | reserved for compilers | 12 24
9043 +---------------------------------------+
9044 | reserved for binders | 16 32
9045 +---------------------------------------+
9046 | saved TOC pointer | 20 40
9047 +---------------------------------------+
9048 | Parameter save area (P) | 24 48
9049 +---------------------------------------+
9050 | Alloca space (A) | 24+P etc.
9051 +---------------------------------------+
9052 | Local variable space (L) | 24+P+A
9053 +---------------------------------------+
9054 | Float/int conversion temporary (X) | 24+P+A+L
9055 +---------------------------------------+
9056 | Save area for AltiVec registers (W) | 24+P+A+L+X
9057 +---------------------------------------+
9058 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9059 +---------------------------------------+
9060 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9061 +---------------------------------------+
9062 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9063 +---------------------------------------+
9064 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9065 +---------------------------------------+
9066 old SP->| back chain to caller's caller |
9067 +---------------------------------------+
9069 The required alignment for AIX configurations is two words (i.e., 8
9073 V.4 stack frames look like:
9075 SP----> +---------------------------------------+
9076 | back chain to caller | 0
9077 +---------------------------------------+
9078 | caller's saved LR | 4
9079 +---------------------------------------+
9080 | Parameter save area (P) | 8
9081 +---------------------------------------+
9082 | Alloca space (A) | 8+P
9083 +---------------------------------------+
9084 | Varargs save area (V) | 8+P+A
9085 +---------------------------------------+
9086 | Local variable space (L) | 8+P+A+V
9087 +---------------------------------------+
9088 | Float/int conversion temporary (X) | 8+P+A+V+L
9089 +---------------------------------------+
9090 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9091 +---------------------------------------+
9092 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9093 +---------------------------------------+
9094 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9095 +---------------------------------------+
9096 | SPE: area for 64-bit GP registers |
9097 +---------------------------------------+
9098 | SPE alignment padding |
9099 +---------------------------------------+
9100 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9101 +---------------------------------------+
9102 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9103 +---------------------------------------+
9104 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9105 +---------------------------------------+
9106 old SP->| back chain to caller's caller |
9107 +---------------------------------------+
9109 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9110 given. (But note below and in sysv4.h that we require only 8 and
9111 may round up the size of our stack frame anyways. The historical
9112 reason is early versions of powerpc-linux which didn't properly
9113 align the stack at program startup. A happy side-effect is that
9114 -mno-eabi libraries can be used with -meabi programs.)
9116 The EABI configuration defaults to the V.4 layout, unless
9117 -mcall-aix is used, in which case the AIX layout is used. However,
9118 the stack alignment requirements may differ. If -mno-eabi is not
9119 given, the required stack alignment is 8 bytes; if -mno-eabi is
9120 given, the required alignment is 16 bytes. (But see V.4 comment
9123 #ifndef ABI_STACK_BOUNDARY
9124 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9128 rs6000_stack_info ()
9130 static rs6000_stack_t info
, zero_info
;
9131 rs6000_stack_t
*info_ptr
= &info
;
9132 int reg_size
= TARGET_POWERPC64
? 8 : 4;
9133 enum rs6000_abi abi
;
9137 /* Zero all fields portably. */
9140 /* Select which calling sequence. */
9141 info_ptr
->abi
= abi
= DEFAULT_ABI
;
9143 /* Calculate which registers need to be saved & save area size. */
9144 info_ptr
->first_gp_reg_save
= first_reg_to_save ();
9145 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9146 even if it currently looks like we won't. */
9147 if (((TARGET_TOC
&& TARGET_MINIMAL_TOC
)
9148 || (flag_pic
== 1 && abi
== ABI_V4
)
9149 || (flag_pic
&& abi
== ABI_DARWIN
))
9150 && info_ptr
->first_gp_reg_save
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
9151 info_ptr
->gp_size
= reg_size
* (32 - RS6000_PIC_OFFSET_TABLE_REGNUM
);
9153 info_ptr
->gp_size
= reg_size
* (32 - info_ptr
->first_gp_reg_save
);
9155 /* For the SPE, we have an additional upper 32-bits on each GPR.
9156 Ideally we should save the entire 64-bits only when the upper
9157 half is used in SIMD instructions. Since we only record
9158 registers live (not the size they are used in), this proves
9159 difficult because we'd have to traverse the instruction chain at
9160 the right time, taking reload into account. This is a real pain,
9161 so we opt to save the GPRs in 64-bits always. Anyone overly
9162 concerned with frame size can fix this. ;-).
9164 So... since we save all GPRs (except the SP) in 64-bits, the
9165 traditional GP save area will be empty. */
9167 info_ptr
->gp_size
= 0;
9169 info_ptr
->first_fp_reg_save
= first_fp_reg_to_save ();
9170 info_ptr
->fp_size
= 8 * (64 - info_ptr
->first_fp_reg_save
);
9172 info_ptr
->first_altivec_reg_save
= first_altivec_reg_to_save ();
9173 info_ptr
->altivec_size
= 16 * (LAST_ALTIVEC_REGNO
+ 1
9174 - info_ptr
->first_altivec_reg_save
);
9176 /* Does this function call anything? */
9177 info_ptr
->calls_p
= (! current_function_is_leaf
9178 || cfun
->machine
->ra_needs_full_frame
);
9180 /* Determine if we need to save the link register. */
9181 if (rs6000_ra_ever_killed ()
9182 || (DEFAULT_ABI
== ABI_AIX
&& current_function_profile
)
9183 #ifdef TARGET_RELOCATABLE
9184 || (TARGET_RELOCATABLE
&& (get_pool_size () != 0))
9186 || (info_ptr
->first_fp_reg_save
!= 64
9187 && !FP_SAVE_INLINE (info_ptr
->first_fp_reg_save
))
9188 || info_ptr
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
9189 || (abi
== ABI_V4
&& current_function_calls_alloca
)
9190 || (DEFAULT_ABI
== ABI_DARWIN
9192 && current_function_uses_pic_offset_table
)
9193 || info_ptr
->calls_p
)
9195 info_ptr
->lr_save_p
= 1;
9196 regs_ever_live
[LINK_REGISTER_REGNUM
] = 1;
9199 /* Determine if we need to save the condition code registers. */
9200 if (regs_ever_live
[CR2_REGNO
]
9201 || regs_ever_live
[CR3_REGNO
]
9202 || regs_ever_live
[CR4_REGNO
])
9204 info_ptr
->cr_save_p
= 1;
9206 info_ptr
->cr_size
= reg_size
;
9209 /* If the current function calls __builtin_eh_return, then we need
9210 to allocate stack space for registers that will hold data for
9211 the exception handler. */
9212 if (current_function_calls_eh_return
)
9215 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; ++i
)
9218 /* SPE saves EH registers in 64-bits. */
9219 ehrd_size
= i
* (TARGET_SPE_ABI
? UNITS_PER_SPE_WORD
: UNITS_PER_WORD
);
9224 /* Determine various sizes. */
9225 info_ptr
->reg_size
= reg_size
;
9226 info_ptr
->fixed_size
= RS6000_SAVE_AREA
;
9227 info_ptr
->varargs_size
= RS6000_VARARGS_AREA
;
9228 info_ptr
->vars_size
= RS6000_ALIGN (get_frame_size (), 8);
9229 info_ptr
->parm_size
= RS6000_ALIGN (current_function_outgoing_args_size
,
9233 info_ptr
->spe_gp_size
= 8 * (32 - info_ptr
->first_gp_reg_save
);
9235 info_ptr
->spe_gp_size
= 0;
9237 if (TARGET_ALTIVEC_ABI
&& TARGET_ALTIVEC_VRSAVE
)
9239 info_ptr
->vrsave_mask
= compute_vrsave_mask ();
9240 info_ptr
->vrsave_size
= info_ptr
->vrsave_mask
? 4 : 0;
9244 info_ptr
->vrsave_mask
= 0;
9245 info_ptr
->vrsave_size
= 0;
9248 /* Calculate the offsets. */
9256 case ABI_AIX_NODESC
:
9258 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
9259 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
9261 if (TARGET_ALTIVEC_ABI
)
9263 info_ptr
->vrsave_save_offset
9264 = info_ptr
->gp_save_offset
- info_ptr
->vrsave_size
;
9266 /* Align stack so vector save area is on a quadword boundary. */
9267 if (info_ptr
->altivec_size
!= 0)
9268 info_ptr
->altivec_padding_size
9269 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
9271 info_ptr
->altivec_padding_size
= 0;
9273 info_ptr
->altivec_save_offset
9274 = info_ptr
->vrsave_save_offset
9275 - info_ptr
->altivec_padding_size
9276 - info_ptr
->altivec_size
;
9278 /* Adjust for AltiVec case. */
9279 info_ptr
->ehrd_offset
= info_ptr
->altivec_save_offset
- ehrd_size
;
9282 info_ptr
->ehrd_offset
= info_ptr
->gp_save_offset
- ehrd_size
;
9283 info_ptr
->cr_save_offset
= reg_size
; /* first word when 64-bit. */
9284 info_ptr
->lr_save_offset
= 2*reg_size
;
9288 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
9289 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
9290 info_ptr
->cr_save_offset
= info_ptr
->gp_save_offset
- info_ptr
->cr_size
;
9294 /* Align stack so SPE GPR save area is aligned on a
9295 double-word boundary. */
9296 if (info_ptr
->spe_gp_size
!= 0)
9297 info_ptr
->spe_padding_size
9298 = 8 - (-info_ptr
->cr_save_offset
% 8);
9300 info_ptr
->spe_padding_size
= 0;
9302 info_ptr
->spe_gp_save_offset
9303 = info_ptr
->cr_save_offset
9304 - info_ptr
->spe_padding_size
9305 - info_ptr
->spe_gp_size
;
9307 /* Adjust for SPE case. */
9308 info_ptr
->toc_save_offset
9309 = info_ptr
->spe_gp_save_offset
- info_ptr
->toc_size
;
9311 else if (TARGET_ALTIVEC_ABI
)
9313 info_ptr
->vrsave_save_offset
9314 = info_ptr
->cr_save_offset
- info_ptr
->vrsave_size
;
9316 /* Align stack so vector save area is on a quadword boundary. */
9317 if (info_ptr
->altivec_size
!= 0)
9318 info_ptr
->altivec_padding_size
9319 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
9321 info_ptr
->altivec_padding_size
= 0;
9323 info_ptr
->altivec_save_offset
9324 = info_ptr
->vrsave_save_offset
9325 - info_ptr
->altivec_padding_size
9326 - info_ptr
->altivec_size
;
9328 /* Adjust for AltiVec case. */
9329 info_ptr
->toc_save_offset
9330 = info_ptr
->altivec_save_offset
- info_ptr
->toc_size
;
9333 info_ptr
->toc_save_offset
= info_ptr
->cr_save_offset
- info_ptr
->toc_size
;
9334 info_ptr
->ehrd_offset
= info_ptr
->toc_save_offset
- ehrd_size
;
9335 info_ptr
->lr_save_offset
= reg_size
;
9339 info_ptr
->save_size
= RS6000_ALIGN (info_ptr
->fp_size
9341 + info_ptr
->altivec_size
9342 + info_ptr
->altivec_padding_size
9343 + info_ptr
->vrsave_size
9344 + info_ptr
->spe_gp_size
9345 + info_ptr
->spe_padding_size
9349 + info_ptr
->vrsave_size
9350 + info_ptr
->toc_size
,
9351 (TARGET_ALTIVEC_ABI
|| ABI_DARWIN
)
9354 total_raw_size
= (info_ptr
->vars_size
9355 + info_ptr
->parm_size
9356 + info_ptr
->save_size
9357 + info_ptr
->varargs_size
9358 + info_ptr
->fixed_size
);
9360 info_ptr
->total_size
=
9361 RS6000_ALIGN (total_raw_size
, ABI_STACK_BOUNDARY
/ BITS_PER_UNIT
);
9363 /* Determine if we need to allocate any stack frame:
9365 For AIX we need to push the stack if a frame pointer is needed
9366 (because the stack might be dynamically adjusted), if we are
9367 debugging, if we make calls, or if the sum of fp_save, gp_save,
9368 and local variables are more than the space needed to save all
9369 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9370 + 18*8 = 288 (GPR13 reserved).
9372 For V.4 we don't have the stack cushion that AIX uses, but assume
9373 that the debugger can handle stackless frames. */
9375 if (info_ptr
->calls_p
)
9376 info_ptr
->push_p
= 1;
9378 else if (abi
== ABI_V4
)
9379 info_ptr
->push_p
= total_raw_size
> info_ptr
->fixed_size
;
9382 info_ptr
->push_p
= (frame_pointer_needed
9383 || (abi
!= ABI_DARWIN
&& write_symbols
!= NO_DEBUG
)
9384 || ((total_raw_size
- info_ptr
->fixed_size
)
9385 > (TARGET_32BIT
? 220 : 288)));
9387 /* Zero offsets if we're not saving those registers. */
9388 if (info_ptr
->fp_size
== 0)
9389 info_ptr
->fp_save_offset
= 0;
9391 if (info_ptr
->gp_size
== 0)
9392 info_ptr
->gp_save_offset
= 0;
9394 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->altivec_size
== 0)
9395 info_ptr
->altivec_save_offset
= 0;
9397 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->vrsave_mask
== 0)
9398 info_ptr
->vrsave_save_offset
= 0;
9400 if (! TARGET_SPE_ABI
|| info_ptr
->spe_gp_size
== 0)
9401 info_ptr
->spe_gp_save_offset
= 0;
9403 if (! info_ptr
->lr_save_p
)
9404 info_ptr
->lr_save_offset
= 0;
9406 if (! info_ptr
->cr_save_p
)
9407 info_ptr
->cr_save_offset
= 0;
9409 if (! info_ptr
->toc_save_p
)
9410 info_ptr
->toc_save_offset
= 0;
9416 debug_stack_info (info
)
9417 rs6000_stack_t
*info
;
9419 const char *abi_string
;
9422 info
= rs6000_stack_info ();
9424 fprintf (stderr
, "\nStack information for function %s:\n",
9425 ((current_function_decl
&& DECL_NAME (current_function_decl
))
9426 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl
))
9431 default: abi_string
= "Unknown"; break;
9432 case ABI_NONE
: abi_string
= "NONE"; break;
9434 case ABI_AIX_NODESC
: abi_string
= "AIX"; break;
9435 case ABI_DARWIN
: abi_string
= "Darwin"; break;
9436 case ABI_V4
: abi_string
= "V.4"; break;
9439 fprintf (stderr
, "\tABI = %5s\n", abi_string
);
9441 if (TARGET_ALTIVEC_ABI
)
9442 fprintf (stderr
, "\tALTIVEC ABI extensions enabled.\n");
9445 fprintf (stderr
, "\tSPE ABI extensions enabled.\n");
9447 if (info
->first_gp_reg_save
!= 32)
9448 fprintf (stderr
, "\tfirst_gp_reg_save = %5d\n", info
->first_gp_reg_save
);
9450 if (info
->first_fp_reg_save
!= 64)
9451 fprintf (stderr
, "\tfirst_fp_reg_save = %5d\n", info
->first_fp_reg_save
);
9453 if (info
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
)
9454 fprintf (stderr
, "\tfirst_altivec_reg_save = %5d\n",
9455 info
->first_altivec_reg_save
);
9457 if (info
->lr_save_p
)
9458 fprintf (stderr
, "\tlr_save_p = %5d\n", info
->lr_save_p
);
9460 if (info
->cr_save_p
)
9461 fprintf (stderr
, "\tcr_save_p = %5d\n", info
->cr_save_p
);
9463 if (info
->toc_save_p
)
9464 fprintf (stderr
, "\ttoc_save_p = %5d\n", info
->toc_save_p
);
9466 if (info
->vrsave_mask
)
9467 fprintf (stderr
, "\tvrsave_mask = 0x%x\n", info
->vrsave_mask
);
9470 fprintf (stderr
, "\tpush_p = %5d\n", info
->push_p
);
9473 fprintf (stderr
, "\tcalls_p = %5d\n", info
->calls_p
);
9475 if (info
->gp_save_offset
)
9476 fprintf (stderr
, "\tgp_save_offset = %5d\n", info
->gp_save_offset
);
9478 if (info
->fp_save_offset
)
9479 fprintf (stderr
, "\tfp_save_offset = %5d\n", info
->fp_save_offset
);
9481 if (info
->altivec_save_offset
)
9482 fprintf (stderr
, "\taltivec_save_offset = %5d\n",
9483 info
->altivec_save_offset
);
9485 if (info
->spe_gp_save_offset
)
9486 fprintf (stderr
, "\tspe_gp_save_offset = %5d\n",
9487 info
->spe_gp_save_offset
);
9489 if (info
->vrsave_save_offset
)
9490 fprintf (stderr
, "\tvrsave_save_offset = %5d\n",
9491 info
->vrsave_save_offset
);
9493 if (info
->lr_save_offset
)
9494 fprintf (stderr
, "\tlr_save_offset = %5d\n", info
->lr_save_offset
);
9496 if (info
->cr_save_offset
)
9497 fprintf (stderr
, "\tcr_save_offset = %5d\n", info
->cr_save_offset
);
9499 if (info
->toc_save_offset
)
9500 fprintf (stderr
, "\ttoc_save_offset = %5d\n", info
->toc_save_offset
);
9502 if (info
->varargs_save_offset
)
9503 fprintf (stderr
, "\tvarargs_save_offset = %5d\n", info
->varargs_save_offset
);
9505 if (info
->total_size
)
9506 fprintf (stderr
, "\ttotal_size = %5d\n", info
->total_size
);
9508 if (info
->varargs_size
)
9509 fprintf (stderr
, "\tvarargs_size = %5d\n", info
->varargs_size
);
9511 if (info
->vars_size
)
9512 fprintf (stderr
, "\tvars_size = %5d\n", info
->vars_size
);
9514 if (info
->parm_size
)
9515 fprintf (stderr
, "\tparm_size = %5d\n", info
->parm_size
);
9517 if (info
->fixed_size
)
9518 fprintf (stderr
, "\tfixed_size = %5d\n", info
->fixed_size
);
9521 fprintf (stderr
, "\tgp_size = %5d\n", info
->gp_size
);
9523 if (info
->spe_gp_size
)
9524 fprintf (stderr
, "\tspe_gp_size = %5d\n", info
->spe_gp_size
);
9527 fprintf (stderr
, "\tfp_size = %5d\n", info
->fp_size
);
9529 if (info
->altivec_size
)
9530 fprintf (stderr
, "\taltivec_size = %5d\n", info
->altivec_size
);
9532 if (info
->vrsave_size
)
9533 fprintf (stderr
, "\tvrsave_size = %5d\n", info
->vrsave_size
);
9535 if (info
->altivec_padding_size
)
9536 fprintf (stderr
, "\taltivec_padding_size= %5d\n",
9537 info
->altivec_padding_size
);
9539 if (info
->spe_padding_size
)
9540 fprintf (stderr
, "\tspe_padding_size = %5d\n",
9541 info
->spe_padding_size
);
9544 fprintf (stderr
, "\tlr_size = %5d\n", info
->lr_size
);
9547 fprintf (stderr
, "\tcr_size = %5d\n", info
->cr_size
);
9550 fprintf (stderr
, "\ttoc_size = %5d\n", info
->toc_size
);
9552 if (info
->save_size
)
9553 fprintf (stderr
, "\tsave_size = %5d\n", info
->save_size
);
9555 if (info
->reg_size
!= 4)
9556 fprintf (stderr
, "\treg_size = %5d\n", info
->reg_size
);
9558 fprintf (stderr
, "\n");
9562 rs6000_return_addr (count
, frame
)
9566 /* Currently we don't optimize very well between prolog and body
9567 code and for PIC code the code can be actually quite bad, so
9568 don't try to be too clever here. */
9569 if (count
!= 0 || flag_pic
!= 0)
9571 cfun
->machine
->ra_needs_full_frame
= 1;
9578 plus_constant (copy_to_reg
9579 (gen_rtx_MEM (Pmode
,
9580 memory_address (Pmode
, frame
))),
9581 RETURN_ADDRESS_OFFSET
)));
9584 return get_hard_reg_initial_val (Pmode
, LINK_REGISTER_REGNUM
);
9587 /* Say whether a function is a candidate for sibcall handling or not.
9588 We do not allow indirect calls to be optimized into sibling calls.
9589 Also, we can't do it if there are any vector parameters; there's
9590 nowhere to put the VRsave code so it works; note that functions with
9591 vector parameters are required to have a prototype, so the argument
9592 type info must be available here. (The tail recursion case can work
9593 with vector parameters, but there's no way to distinguish here.) */
9595 rs6000_function_ok_for_sibcall (decl
, exp
)
9597 tree exp ATTRIBUTE_UNUSED
;
9602 if (TARGET_ALTIVEC_VRSAVE
)
9604 for (type
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
9605 type
; type
= TREE_CHAIN (type
))
9607 if (TREE_CODE (TREE_VALUE (type
)) == VECTOR_TYPE
)
9611 if (DEFAULT_ABI
== ABI_DARWIN
9612 || (*targetm
.binds_local_p
) (decl
))
9614 tree attr_list
= TYPE_ATTRIBUTES (TREE_TYPE (decl
));
9616 if (!lookup_attribute ("longcall", attr_list
)
9617 || lookup_attribute ("shortcall", attr_list
))
9625 rs6000_ra_ever_killed ()
9631 /* Irritatingly, there are two kinds of thunks -- those created with
9632 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
9633 through the regular part of the compiler. This is a very hacky
9634 way to tell them apart. */
9635 if (current_function_is_thunk
&& !no_new_pseudos
)
9638 /* regs_ever_live has LR marked as used if any sibcalls are present,
9639 but this should not force saving and restoring in the
9640 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
9641 clobbers LR, so that is inappropriate. */
9643 /* Also, the prologue can generate a store into LR that
9644 doesn't really count, like this:
9647 bcl to set PIC register
9651 When we're called from the epilogue, we need to avoid counting
9654 push_topmost_sequence ();
9656 pop_topmost_sequence ();
9657 reg
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
9659 for (insn
= NEXT_INSN (top
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
9663 if (FIND_REG_INC_NOTE (insn
, reg
))
9665 else if (GET_CODE (insn
) == CALL_INSN
9666 && !SIBLING_CALL_P (insn
))
9668 else if (set_of (reg
, insn
) != NULL_RTX
9669 && !prologue_epilogue_contains (insn
))
9676 /* Add a REG_MAYBE_DEAD note to the insn. */
9678 rs6000_maybe_dead (insn
)
9681 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
,
9686 /* Emit instructions needed to load the TOC register.
9687 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9688 a constant pool; or for SVR4 -fpic. */
9691 rs6000_emit_load_toc_table (fromprolog
)
9695 dest
= gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
);
9697 if (TARGET_ELF
&& DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
9699 rtx temp
= (fromprolog
9700 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
9701 : gen_reg_rtx (Pmode
));
9702 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp
)));
9703 rs6000_maybe_dead (emit_move_insn (dest
, temp
));
9705 else if (TARGET_ELF
&& DEFAULT_ABI
!= ABI_AIX
&& flag_pic
== 2)
9708 rtx tempLR
= (fromprolog
9709 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
9710 : gen_reg_rtx (Pmode
));
9711 rtx temp0
= (fromprolog
9712 ? gen_rtx_REG (Pmode
, 0)
9713 : gen_reg_rtx (Pmode
));
9716 /* possibly create the toc section */
9717 if (! toc_initialized
)
9720 function_section (current_function_decl
);
9727 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
9728 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9730 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCL", rs6000_pic_labelno
);
9731 symL
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9733 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR
,
9735 rs6000_maybe_dead (emit_move_insn (dest
, tempLR
));
9736 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0
, dest
,
9743 static int reload_toc_labelno
= 0;
9745 tocsym
= gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
);
9747 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCG", reload_toc_labelno
++);
9748 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9750 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR
,
9753 rs6000_maybe_dead (emit_move_insn (dest
, tempLR
));
9754 rs6000_maybe_dead (emit_move_insn (temp0
,
9755 gen_rtx_MEM (Pmode
, dest
)));
9757 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest
, temp0
, dest
)));
9759 else if (TARGET_ELF
&& !TARGET_AIX
&& flag_pic
== 0 && TARGET_MINIMAL_TOC
)
9761 /* This is for AIX code running in non-PIC ELF32. */
9764 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCTOC", 1);
9765 realsym
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9767 rs6000_maybe_dead (emit_insn (gen_elf_high (dest
, realsym
)));
9768 rs6000_maybe_dead (emit_insn (gen_elf_low (dest
, dest
, realsym
)));
9770 else if (DEFAULT_ABI
== ABI_AIX
)
9773 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest
)));
9775 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest
)));
9782 get_TOC_alias_set ()
9784 static int set
= -1;
9786 set
= new_alias_set ();
9790 /* This retuns nonzero if the current function uses the TOC. This is
9791 determined by the presence of (unspec ... 7), which is generated by
9792 the various load_toc_* patterns. */
9799 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
9802 rtx pat
= PATTERN (insn
);
9805 if (GET_CODE (pat
) == PARALLEL
)
9806 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
9807 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, i
)) == UNSPEC
9808 && XINT (XVECEXP (PATTERN (insn
), 0, i
), 1) == 7)
9815 create_TOC_reference (symbol
)
9818 return gen_rtx_PLUS (Pmode
,
9819 gen_rtx_REG (Pmode
, TOC_REGISTER
),
9820 gen_rtx_CONST (Pmode
,
9821 gen_rtx_MINUS (Pmode
, symbol
,
9822 gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
))));
9826 /* __throw will restore its own return address to be the same as the
9827 return address of the function that the throw is being made to.
9828 This is unfortunate, because we want to check the original
9829 return address to see if we need to restore the TOC.
9830 So we have to squirrel it away here.
9831 This is used only in compiling __throw and __rethrow.
9833 Most of this code should be removed by CSE. */
9834 static rtx insn_after_throw
;
9836 /* This does the saving... */
9838 rs6000_aix_emit_builtin_unwind_init ()
9841 rtx stack_top
= gen_reg_rtx (Pmode
);
9842 rtx opcode_addr
= gen_reg_rtx (Pmode
);
9844 insn_after_throw
= gen_reg_rtx (SImode
);
9846 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
9847 emit_move_insn (stack_top
, mem
);
9849 mem
= gen_rtx_MEM (Pmode
,
9850 gen_rtx_PLUS (Pmode
, stack_top
,
9851 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
9852 emit_move_insn (opcode_addr
, mem
);
9853 emit_move_insn (insn_after_throw
, gen_rtx_MEM (SImode
, opcode_addr
));
9856 /* Emit insns to _restore_ the TOC register, at runtime (specifically
9857 in _eh.o). Only used on AIX.
9859 The idea is that on AIX, function calls look like this:
9860 bl somefunction-trampoline
9864 somefunction-trampoline:
9866 ... load function address in the count register ...
9868 or like this, if the linker determines that this is not a cross-module call
9869 and so the TOC need not be restored:
9872 or like this, if the compiler could determine that this is not a
9875 now, the tricky bit here is that register 2 is saved and restored
9876 by the _linker_, so we can't readily generate debugging information
9877 for it. So we need to go back up the call chain looking at the
9878 insns at return addresses to see which calls saved the TOC register
9879 and so see where it gets restored from.
9881 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
9882 just before the actual epilogue.
9884 On the bright side, this incurs no space or time overhead unless an
9885 exception is thrown, except for the extra code in libgcc.a.
9887 The parameter STACKSIZE is a register containing (at runtime)
9888 the amount to be popped off the stack in addition to the stack frame
9889 of this routine (which will be __throw or __rethrow, and so is
9890 guaranteed to have a stack frame). */
9893 rs6000_emit_eh_toc_restore (stacksize
)
9897 rtx bottom_of_stack
= gen_reg_rtx (Pmode
);
9898 rtx tocompare
= gen_reg_rtx (SImode
);
9899 rtx opcode
= gen_reg_rtx (SImode
);
9900 rtx opcode_addr
= gen_reg_rtx (Pmode
);
9902 rtx loop_start
= gen_label_rtx ();
9903 rtx no_toc_restore_needed
= gen_label_rtx ();
9904 rtx loop_exit
= gen_label_rtx ();
9906 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
9907 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9908 emit_move_insn (bottom_of_stack
, mem
);
9910 top_of_stack
= expand_binop (Pmode
, add_optab
,
9911 bottom_of_stack
, stacksize
,
9912 NULL_RTX
, 1, OPTAB_WIDEN
);
9914 emit_move_insn (tocompare
, gen_int_mode (TARGET_32BIT
? 0x80410014
9915 : 0xE8410028, SImode
));
9917 if (insn_after_throw
== NULL_RTX
)
9919 emit_move_insn (opcode
, insn_after_throw
);
9921 emit_note (NULL
, NOTE_INSN_LOOP_BEG
);
9922 emit_label (loop_start
);
9924 do_compare_rtx_and_jump (opcode
, tocompare
, NE
, 1,
9925 SImode
, NULL_RTX
, NULL_RTX
,
9926 no_toc_restore_needed
);
9928 mem
= gen_rtx_MEM (Pmode
,
9929 gen_rtx_PLUS (Pmode
, bottom_of_stack
,
9930 GEN_INT (5 * GET_MODE_SIZE (Pmode
))));
9931 emit_move_insn (gen_rtx_REG (Pmode
, 2), mem
);
9933 emit_label (no_toc_restore_needed
);
9934 do_compare_rtx_and_jump (top_of_stack
, bottom_of_stack
, EQ
, 1,
9935 Pmode
, NULL_RTX
, NULL_RTX
,
9938 mem
= gen_rtx_MEM (Pmode
, bottom_of_stack
);
9939 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9940 emit_move_insn (bottom_of_stack
, mem
);
9942 mem
= gen_rtx_MEM (Pmode
,
9943 gen_rtx_PLUS (Pmode
, bottom_of_stack
,
9944 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
9945 emit_move_insn (opcode_addr
, mem
);
9946 emit_move_insn (opcode
, gen_rtx_MEM (SImode
, opcode_addr
));
9948 emit_note (NULL
, NOTE_INSN_LOOP_CONT
);
9949 emit_jump (loop_start
);
9950 emit_note (NULL
, NOTE_INSN_LOOP_END
);
9951 emit_label (loop_exit
);
9953 #endif /* TARGET_AIX */
9955 /* This ties together stack memory (MEM with an alias set of
9956 rs6000_sr_alias_set) and the change to the stack pointer. */
9959 rs6000_emit_stack_tie ()
9961 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
));
9963 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9964 emit_insn (gen_stack_tie (mem
));
9967 /* Emit the correct code for allocating stack space, as insns.
9968 If COPY_R12, make sure a copy of the old frame is left in r12.
9969 The generated code may use hard register 0 as a temporary. */
9972 rs6000_emit_allocate_stack (size
, copy_r12
)
9977 rtx stack_reg
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
9978 rtx tmp_reg
= gen_rtx_REG (Pmode
, 0);
9979 rtx todec
= GEN_INT (-size
);
9981 if (current_function_limit_stack
)
9983 if (REG_P (stack_limit_rtx
)
9984 && REGNO (stack_limit_rtx
) > 1
9985 && REGNO (stack_limit_rtx
) <= 31)
9987 emit_insn (TARGET_32BIT
9988 ? gen_addsi3 (tmp_reg
,
9991 : gen_adddi3 (tmp_reg
,
9995 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
9998 else if (GET_CODE (stack_limit_rtx
) == SYMBOL_REF
10000 && DEFAULT_ABI
== ABI_V4
)
10002 rtx toload
= gen_rtx_CONST (VOIDmode
,
10003 gen_rtx_PLUS (Pmode
,
10007 emit_insn (gen_elf_high (tmp_reg
, toload
));
10008 emit_insn (gen_elf_low (tmp_reg
, tmp_reg
, toload
));
10009 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
10013 warning ("stack limit expression is not supported");
10016 if (copy_r12
|| ! TARGET_UPDATE
)
10017 emit_move_insn (gen_rtx_REG (Pmode
, 12), stack_reg
);
10023 /* Need a note here so that try_split doesn't get confused. */
10024 if (get_last_insn() == NULL_RTX
)
10025 emit_note (0, NOTE_INSN_DELETED
);
10026 insn
= emit_move_insn (tmp_reg
, todec
);
10027 try_split (PATTERN (insn
), insn
, 0);
10031 insn
= emit_insn (TARGET_32BIT
10032 ? gen_movsi_update (stack_reg
, stack_reg
,
10034 : gen_movdi_update (stack_reg
, stack_reg
,
10035 todec
, stack_reg
));
10039 insn
= emit_insn (TARGET_32BIT
10040 ? gen_addsi3 (stack_reg
, stack_reg
, todec
)
10041 : gen_adddi3 (stack_reg
, stack_reg
, todec
));
10042 emit_move_insn (gen_rtx_MEM (Pmode
, stack_reg
),
10043 gen_rtx_REG (Pmode
, 12));
10046 RTX_FRAME_RELATED_P (insn
) = 1;
10048 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
10049 gen_rtx_SET (VOIDmode
, stack_reg
,
10050 gen_rtx_PLUS (Pmode
, stack_reg
,
10055 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
10058 (mem (plus (blah) (regXX)))
10062 (mem (plus (blah) (const VALUE_OF_REGXX))). */
10065 altivec_frame_fixup (insn
, reg
, val
)
10071 real
= copy_rtx (PATTERN (insn
));
10073 real
= replace_rtx (real
, reg
, GEN_INT (val
));
10075 RTX_FRAME_RELATED_P (insn
) = 1;
10076 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
10081 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10082 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10083 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10084 deduce these equivalences by itself so it wasn't necessary to hold
10085 its hand so much. */
10088 rs6000_frame_related (insn
, reg
, val
, reg2
, rreg
)
10097 /* copy_rtx will not make unique copies of registers, so we need to
10098 ensure we don't have unwanted sharing here. */
10100 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
10103 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
10105 real
= copy_rtx (PATTERN (insn
));
10107 if (reg2
!= NULL_RTX
)
10108 real
= replace_rtx (real
, reg2
, rreg
);
10110 real
= replace_rtx (real
, reg
,
10111 gen_rtx_PLUS (Pmode
, gen_rtx_REG (Pmode
,
10112 STACK_POINTER_REGNUM
),
10115 /* We expect that 'real' is either a SET or a PARALLEL containing
10116 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10117 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10119 if (GET_CODE (real
) == SET
)
10123 temp
= simplify_rtx (SET_SRC (set
));
10125 SET_SRC (set
) = temp
;
10126 temp
= simplify_rtx (SET_DEST (set
));
10128 SET_DEST (set
) = temp
;
10129 if (GET_CODE (SET_DEST (set
)) == MEM
)
10131 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
10133 XEXP (SET_DEST (set
), 0) = temp
;
10136 else if (GET_CODE (real
) == PARALLEL
)
10139 for (i
= 0; i
< XVECLEN (real
, 0); i
++)
10140 if (GET_CODE (XVECEXP (real
, 0, i
)) == SET
)
10142 rtx set
= XVECEXP (real
, 0, i
);
10144 temp
= simplify_rtx (SET_SRC (set
));
10146 SET_SRC (set
) = temp
;
10147 temp
= simplify_rtx (SET_DEST (set
));
10149 SET_DEST (set
) = temp
;
10150 if (GET_CODE (SET_DEST (set
)) == MEM
)
10152 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
10154 XEXP (SET_DEST (set
), 0) = temp
;
10156 RTX_FRAME_RELATED_P (set
) = 1;
10162 RTX_FRAME_RELATED_P (insn
) = 1;
10163 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
10168 /* Returns an insn that has a vrsave set operation with the
10169 appropriate CLOBBERs. */
10172 generate_set_vrsave (reg
, info
, epiloguep
)
10174 rs6000_stack_t
*info
;
10178 rtx insn
, clobs
[TOTAL_ALTIVEC_REGS
+ 1];
10179 rtx vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
10182 = gen_rtx_SET (VOIDmode
,
10184 gen_rtx_UNSPEC_VOLATILE (SImode
,
10185 gen_rtvec (2, reg
, vrsave
),
10190 /* We need to clobber the registers in the mask so the scheduler
10191 does not move sets to VRSAVE before sets of AltiVec registers.
10193 However, if the function receives nonlocal gotos, reload will set
10194 all call saved registers live. We will end up with:
10196 (set (reg 999) (mem))
10197 (parallel [ (set (reg vrsave) (unspec blah))
10198 (clobber (reg 999))])
10200 The clobber will cause the store into reg 999 to be dead, and
10201 flow will attempt to delete an epilogue insn. In this case, we
10202 need an unspec use/set of the register. */
10204 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10205 if (info
->vrsave_mask
!= 0 && ALTIVEC_REG_BIT (i
) != 0)
10207 if (!epiloguep
|| call_used_regs
[i
])
10208 clobs
[nclobs
++] = gen_rtx_CLOBBER (VOIDmode
,
10209 gen_rtx_REG (V4SImode
, i
));
10212 rtx reg
= gen_rtx_REG (V4SImode
, i
);
10215 = gen_rtx_SET (VOIDmode
,
10217 gen_rtx_UNSPEC (V4SImode
,
10218 gen_rtvec (1, reg
), 27));
10222 insn
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nclobs
));
10224 for (i
= 0; i
< nclobs
; ++i
)
10225 XVECEXP (insn
, 0, i
) = clobs
[i
];
10230 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10231 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
10234 emit_frame_save (frame_reg
, frame_ptr
, mode
, regno
, offset
, total_size
)
10237 enum machine_mode mode
;
10238 unsigned int regno
;
10242 rtx reg
, offset_rtx
, insn
, mem
, addr
, int_rtx
;
10243 rtx replacea
, replaceb
;
10245 int_rtx
= GEN_INT (offset
);
10247 /* Some cases that need register indexed addressing. */
10248 if ((TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
10250 && SPE_VECTOR_MODE (mode
)
10251 && !SPE_CONST_OFFSET_OK (offset
)))
10253 /* Whomever calls us must make sure r11 is available in the
10254 flow path of instructions in the prologue. */
10255 offset_rtx
= gen_rtx_REG (Pmode
, 11);
10256 emit_move_insn (offset_rtx
, int_rtx
);
10258 replacea
= offset_rtx
;
10259 replaceb
= int_rtx
;
10263 offset_rtx
= int_rtx
;
10264 replacea
= NULL_RTX
;
10265 replaceb
= NULL_RTX
;
10268 reg
= gen_rtx_REG (mode
, regno
);
10269 addr
= gen_rtx_PLUS (Pmode
, frame_reg
, offset_rtx
);
10270 mem
= gen_rtx_MEM (mode
, addr
);
10271 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10273 insn
= emit_move_insn (mem
, reg
);
10275 rs6000_frame_related (insn
, frame_ptr
, total_size
, replacea
, replaceb
);
10278 /* Emit an offset memory reference suitable for a frame store, while
10279 converting to a valid addressing mode. */
10282 gen_frame_mem_offset (mode
, reg
, offset
)
10283 enum machine_mode mode
;
10287 rtx int_rtx
, offset_rtx
;
10289 int_rtx
= GEN_INT (offset
);
10291 if (TARGET_SPE_ABI
&& SPE_VECTOR_MODE (mode
))
10293 offset_rtx
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
10294 emit_move_insn (offset_rtx
, int_rtx
);
10297 offset_rtx
= int_rtx
;
10299 return gen_rtx_MEM (mode
, gen_rtx_PLUS (Pmode
, reg
, offset_rtx
));
10302 /* Emit function prologue as insns. */
10305 rs6000_emit_prologue ()
10307 rs6000_stack_t
*info
= rs6000_stack_info ();
10308 enum machine_mode reg_mode
= TARGET_POWERPC64
? DImode
: SImode
;
10309 int reg_size
= TARGET_POWERPC64
? 8 : 4;
10310 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
10311 rtx frame_ptr_rtx
= gen_rtx_REG (Pmode
, 12);
10312 rtx frame_reg_rtx
= sp_reg_rtx
;
10313 rtx cr_save_rtx
= NULL
;
10315 int saving_FPRs_inline
;
10316 int using_store_multiple
;
10317 HOST_WIDE_INT sp_offset
= 0;
10319 if (TARGET_SPE_ABI
)
10321 reg_mode
= V2SImode
;
10325 using_store_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
10327 && info
->first_gp_reg_save
< 31);
10328 saving_FPRs_inline
= (info
->first_fp_reg_save
== 64
10329 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
10331 /* For V.4, update stack before we do any saving and set back pointer. */
10332 if (info
->push_p
&& DEFAULT_ABI
== ABI_V4
)
10334 if (info
->total_size
< 32767)
10335 sp_offset
= info
->total_size
;
10337 frame_reg_rtx
= frame_ptr_rtx
;
10338 rs6000_emit_allocate_stack (info
->total_size
,
10339 (frame_reg_rtx
!= sp_reg_rtx
10340 && (info
->cr_save_p
10342 || info
->first_fp_reg_save
< 64
10343 || info
->first_gp_reg_save
< 32
10345 if (frame_reg_rtx
!= sp_reg_rtx
)
10346 rs6000_emit_stack_tie ();
10349 /* Save AltiVec registers if needed. */
10350 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
10354 /* There should be a non inline version of this, for when we
10355 are saving lots of vector registers. */
10356 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10357 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
10359 rtx areg
, savereg
, mem
;
10362 offset
= info
->altivec_save_offset
+ sp_offset
10363 + 16 * (i
- info
->first_altivec_reg_save
);
10365 savereg
= gen_rtx_REG (V4SImode
, i
);
10367 areg
= gen_rtx_REG (Pmode
, 0);
10368 emit_move_insn (areg
, GEN_INT (offset
));
10370 /* AltiVec addressing mode is [reg+reg]. */
10371 mem
= gen_rtx_MEM (V4SImode
,
10372 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
));
10374 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10376 insn
= emit_move_insn (mem
, savereg
);
10378 altivec_frame_fixup (insn
, areg
, offset
);
10382 /* VRSAVE is a bit vector representing which AltiVec registers
10383 are used. The OS uses this to determine which vector
10384 registers to save on a context switch. We need to save
10385 VRSAVE on the stack frame, add whatever AltiVec registers we
10386 used in this function, and do the corresponding magic in the
10389 if (TARGET_ALTIVEC
&& info
->vrsave_mask
!= 0)
10391 rtx reg
, mem
, vrsave
;
10394 /* Get VRSAVE onto a GPR. */
10395 reg
= gen_rtx_REG (SImode
, 12);
10396 vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
10398 emit_insn (gen_get_vrsave_internal (reg
));
10400 emit_insn (gen_rtx_SET (VOIDmode
, reg
, vrsave
));
10403 offset
= info
->vrsave_save_offset
+ sp_offset
;
10405 = gen_rtx_MEM (SImode
,
10406 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, GEN_INT (offset
)));
10407 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10408 insn
= emit_move_insn (mem
, reg
);
10410 /* Include the registers in the mask. */
10411 emit_insn (gen_iorsi3 (reg
, reg
, GEN_INT ((int) info
->vrsave_mask
)));
10413 insn
= emit_insn (generate_set_vrsave (reg
, info
, 0));
10416 /* If we use the link register, get it into r0. */
10417 if (info
->lr_save_p
)
10418 emit_move_insn (gen_rtx_REG (Pmode
, 0),
10419 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10421 /* If we need to save CR, put it into r12. */
10422 if (info
->cr_save_p
&& frame_reg_rtx
!= frame_ptr_rtx
)
10424 cr_save_rtx
= gen_rtx_REG (SImode
, 12);
10425 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
10428 /* Do any required saving of fpr's. If only one or two to save, do
10429 it ourselves. Otherwise, call function. */
10430 if (saving_FPRs_inline
)
10433 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10434 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
10435 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
10436 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, DFmode
,
10437 info
->first_fp_reg_save
+ i
,
10438 info
->fp_save_offset
+ sp_offset
+ 8 * i
,
10441 else if (info
->first_fp_reg_save
!= 64)
10445 const char *alloc_rname
;
10447 p
= rtvec_alloc (2 + 64 - info
->first_fp_reg_save
);
10449 RTVEC_ELT (p
, 0) = gen_rtx_CLOBBER (VOIDmode
,
10450 gen_rtx_REG (Pmode
,
10451 LINK_REGISTER_REGNUM
));
10452 sprintf (rname
, "%s%d%s", SAVE_FP_PREFIX
,
10453 info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
);
10454 alloc_rname
= ggc_strdup (rname
);
10455 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
10456 gen_rtx_SYMBOL_REF (Pmode
,
10458 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10460 rtx addr
, reg
, mem
;
10461 reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
10462 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10463 GEN_INT (info
->fp_save_offset
10464 + sp_offset
+ 8*i
));
10465 mem
= gen_rtx_MEM (DFmode
, addr
);
10466 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10468 RTVEC_ELT (p
, i
+ 2) = gen_rtx_SET (VOIDmode
, mem
, reg
);
10470 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10471 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10472 NULL_RTX
, NULL_RTX
);
10475 /* Save GPRs. This is done as a PARALLEL if we are using
10476 the store-multiple instructions. */
10477 if (using_store_multiple
)
10481 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
10482 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10484 rtx addr
, reg
, mem
;
10485 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
10486 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10487 GEN_INT (info
->gp_save_offset
10490 mem
= gen_rtx_MEM (reg_mode
, addr
);
10491 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10493 RTVEC_ELT (p
, i
) = gen_rtx_SET (VOIDmode
, mem
, reg
);
10495 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10496 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10497 NULL_RTX
, NULL_RTX
);
10502 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10503 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
10504 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
10505 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
10506 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
10507 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
10509 rtx addr
, reg
, mem
;
10510 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
10512 if (TARGET_SPE_ABI
)
10514 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
10517 if (!SPE_CONST_OFFSET_OK (offset
))
10519 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
10520 emit_move_insn (b
, GEN_INT (offset
));
10523 b
= GEN_INT (offset
);
10525 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
10526 mem
= gen_rtx_MEM (V2SImode
, addr
);
10527 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10528 insn
= emit_move_insn (mem
, reg
);
10530 if (GET_CODE (b
) == CONST_INT
)
10531 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10532 NULL_RTX
, NULL_RTX
);
10534 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10535 b
, GEN_INT (offset
));
10539 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10540 GEN_INT (info
->gp_save_offset
10543 mem
= gen_rtx_MEM (reg_mode
, addr
);
10544 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10546 insn
= emit_move_insn (mem
, reg
);
10547 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10548 NULL_RTX
, NULL_RTX
);
10553 /* ??? There's no need to emit actual instructions here, but it's the
10554 easiest way to get the frame unwind information emitted. */
10555 if (current_function_calls_eh_return
)
10557 unsigned int i
, regno
;
10561 regno
= EH_RETURN_DATA_REGNO (i
);
10562 if (regno
== INVALID_REGNUM
)
10565 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, reg_mode
, regno
,
10566 info
->ehrd_offset
+ sp_offset
10567 + reg_size
* (int) i
,
10572 /* Save lr if we used it. */
10573 if (info
->lr_save_p
)
10575 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10576 GEN_INT (info
->lr_save_offset
+ sp_offset
));
10577 rtx reg
= gen_rtx_REG (Pmode
, 0);
10578 rtx mem
= gen_rtx_MEM (Pmode
, addr
);
10579 /* This should not be of rs6000_sr_alias_set, because of
10580 __builtin_return_address. */
10582 insn
= emit_move_insn (mem
, reg
);
10583 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10584 reg
, gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10587 /* Save CR if we use any that must be preserved. */
10588 if (info
->cr_save_p
)
10590 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10591 GEN_INT (info
->cr_save_offset
+ sp_offset
));
10592 rtx mem
= gen_rtx_MEM (SImode
, addr
);
10594 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10596 /* If r12 was used to hold the original sp, copy cr into r0 now
10598 if (REGNO (frame_reg_rtx
) == 12)
10600 cr_save_rtx
= gen_rtx_REG (SImode
, 0);
10601 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
10603 insn
= emit_move_insn (mem
, cr_save_rtx
);
10605 /* Now, there's no way that dwarf2out_frame_debug_expr is going
10606 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
10607 OK. All we have to do is specify that _one_ condition code
10608 register is saved in this stack slot. The thrower's epilogue
10609 will then restore all the call-saved registers.
10610 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
10611 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10612 cr_save_rtx
, gen_rtx_REG (SImode
, CR2_REGNO
));
10615 /* Update stack and set back pointer unless this is V.4,
10616 for which it was done previously. */
10617 if (info
->push_p
&& DEFAULT_ABI
!= ABI_V4
)
10618 rs6000_emit_allocate_stack (info
->total_size
, FALSE
);
10620 /* Set frame pointer, if needed. */
10621 if (frame_pointer_needed
)
10623 insn
= emit_move_insn (gen_rtx_REG (Pmode
, FRAME_POINTER_REGNUM
),
10625 RTX_FRAME_RELATED_P (insn
) = 1;
10628 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
10629 if ((TARGET_TOC
&& TARGET_MINIMAL_TOC
&& get_pool_size () != 0)
10630 || (DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1
10631 && regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
]))
10633 /* If emit_load_toc_table will use the link register, we need to save
10634 it. We use R11 for this purpose because emit_load_toc_table
10635 can use register 0. This allows us to use a plain 'blr' to return
10636 from the procedure more often. */
10637 int save_LR_around_toc_setup
= (TARGET_ELF
&& flag_pic
!= 0
10638 && ! info
->lr_save_p
10639 && EXIT_BLOCK_PTR
->pred
!= NULL
);
10640 if (save_LR_around_toc_setup
)
10641 emit_move_insn (gen_rtx_REG (Pmode
, 11),
10642 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10644 rs6000_emit_load_toc_table (TRUE
);
10646 if (save_LR_around_toc_setup
)
10647 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
10648 gen_rtx_REG (Pmode
, 11));
10652 if (DEFAULT_ABI
== ABI_DARWIN
10653 && flag_pic
&& current_function_uses_pic_offset_table
)
10655 rtx dest
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
10656 const char *picbase
= machopic_function_base_name ();
10657 rtx src
= gen_rtx_SYMBOL_REF (Pmode
, ggc_alloc_string (picbase
, -1));
10659 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest
, src
)));
10661 rs6000_maybe_dead (
10662 emit_move_insn (gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
),
10663 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)));
10668 /* Write function prologue. */
10671 rs6000_output_function_prologue (file
, size
)
10673 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
10675 rs6000_stack_t
*info
= rs6000_stack_info ();
10677 if (TARGET_DEBUG_STACK
)
10678 debug_stack_info (info
);
10680 /* Write .extern for any function we will call to save and restore
10682 if (info
->first_fp_reg_save
< 64
10683 && !FP_SAVE_INLINE (info
->first_fp_reg_save
))
10684 fprintf (file
, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
10685 SAVE_FP_PREFIX
, info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
,
10686 RESTORE_FP_PREFIX
, info
->first_fp_reg_save
- 32,
10687 RESTORE_FP_SUFFIX
);
10689 /* Write .extern for AIX common mode routines, if needed. */
10690 if (! TARGET_POWER
&& ! TARGET_POWERPC
&& ! common_mode_defined
)
10692 fputs ("\t.extern __mulh\n", file
);
10693 fputs ("\t.extern __mull\n", file
);
10694 fputs ("\t.extern __divss\n", file
);
10695 fputs ("\t.extern __divus\n", file
);
10696 fputs ("\t.extern __quoss\n", file
);
10697 fputs ("\t.extern __quous\n", file
);
10698 common_mode_defined
= 1;
10701 if (! HAVE_prologue
)
10705 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
10706 the "toplevel" insn chain. */
10707 emit_note (0, NOTE_INSN_DELETED
);
10708 rs6000_emit_prologue ();
10709 emit_note (0, NOTE_INSN_DELETED
);
10711 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10715 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
10717 INSN_ADDRESSES_NEW (insn
, addr
);
10722 if (TARGET_DEBUG_STACK
)
10723 debug_rtx_list (get_insns (), 100);
10724 final (get_insns (), file
, FALSE
, FALSE
);
10728 rs6000_pic_labelno
++;
10731 /* Emit function epilogue as insns.
10733 At present, dwarf2out_frame_debug_expr doesn't understand
10734 register restores, so we don't bother setting RTX_FRAME_RELATED_P
10735 anywhere in the epilogue. Most of the insns below would in any case
10736 need special notes to explain where r11 is in relation to the stack. */
10739 rs6000_emit_epilogue (sibcall
)
10742 rs6000_stack_t
*info
;
10743 int restoring_FPRs_inline
;
10744 int using_load_multiple
;
10745 int using_mfcr_multiple
;
10746 int use_backchain_to_restore_sp
;
10748 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, 1);
10749 rtx frame_reg_rtx
= sp_reg_rtx
;
10750 enum machine_mode reg_mode
= TARGET_POWERPC64
? DImode
: SImode
;
10751 int reg_size
= TARGET_POWERPC64
? 8 : 4;
10754 if (TARGET_SPE_ABI
)
10756 reg_mode
= V2SImode
;
10760 info
= rs6000_stack_info ();
10761 using_load_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
10763 && info
->first_gp_reg_save
< 31);
10764 restoring_FPRs_inline
= (sibcall
10765 || current_function_calls_eh_return
10766 || info
->first_fp_reg_save
== 64
10767 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
10768 use_backchain_to_restore_sp
= (frame_pointer_needed
10769 || current_function_calls_alloca
10770 || info
->total_size
> 32767);
10771 using_mfcr_multiple
= (rs6000_cpu
== PROCESSOR_PPC601
10772 || rs6000_cpu
== PROCESSOR_PPC603
10773 || rs6000_cpu
== PROCESSOR_PPC750
10776 /* If we have a frame pointer, a call to alloca, or a large stack
10777 frame, restore the old stack pointer using the backchain. Otherwise,
10778 we know what size to update it with. */
10779 if (use_backchain_to_restore_sp
)
10781 /* Under V.4, don't reset the stack pointer until after we're done
10782 loading the saved registers. */
10783 if (DEFAULT_ABI
== ABI_V4
)
10784 frame_reg_rtx
= gen_rtx_REG (Pmode
, 11);
10786 emit_move_insn (frame_reg_rtx
,
10787 gen_rtx_MEM (Pmode
, sp_reg_rtx
));
10790 else if (info
->push_p
)
10792 if (DEFAULT_ABI
== ABI_V4
)
10793 sp_offset
= info
->total_size
;
10796 emit_insn (TARGET_32BIT
10797 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
10798 GEN_INT (info
->total_size
))
10799 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
10800 GEN_INT (info
->total_size
)));
10804 /* Restore AltiVec registers if needed. */
10805 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
10809 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10810 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
10812 rtx addr
, areg
, mem
;
10814 areg
= gen_rtx_REG (Pmode
, 0);
10816 (areg
, GEN_INT (info
->altivec_save_offset
10818 + 16 * (i
- info
->first_altivec_reg_save
)));
10820 /* AltiVec addressing mode is [reg+reg]. */
10821 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
);
10822 mem
= gen_rtx_MEM (V4SImode
, addr
);
10823 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10825 emit_move_insn (gen_rtx_REG (V4SImode
, i
), mem
);
10829 /* Restore VRSAVE if needed. */
10830 if (TARGET_ALTIVEC_ABI
&& info
->vrsave_mask
!= 0)
10832 rtx addr
, mem
, reg
;
10834 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10835 GEN_INT (info
->vrsave_save_offset
+ sp_offset
));
10836 mem
= gen_rtx_MEM (SImode
, addr
);
10837 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10838 reg
= gen_rtx_REG (SImode
, 12);
10839 emit_move_insn (reg
, mem
);
10841 emit_insn (generate_set_vrsave (reg
, info
, 1));
10844 /* Get the old lr if we saved it. */
10845 if (info
->lr_save_p
)
10847 rtx mem
= gen_frame_mem_offset (Pmode
, frame_reg_rtx
,
10848 info
->lr_save_offset
+ sp_offset
);
10850 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10852 emit_move_insn (gen_rtx_REG (Pmode
, 0), mem
);
10855 /* Get the old cr if we saved it. */
10856 if (info
->cr_save_p
)
10858 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10859 GEN_INT (info
->cr_save_offset
+ sp_offset
));
10860 rtx mem
= gen_rtx_MEM (SImode
, addr
);
10862 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10864 emit_move_insn (gen_rtx_REG (SImode
, 12), mem
);
10867 /* Set LR here to try to overlap restores below. */
10868 if (info
->lr_save_p
)
10869 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
10870 gen_rtx_REG (Pmode
, 0));
10872 /* Load exception handler data registers, if needed. */
10873 if (current_function_calls_eh_return
)
10875 unsigned int i
, regno
;
10881 regno
= EH_RETURN_DATA_REGNO (i
);
10882 if (regno
== INVALID_REGNUM
)
10885 mem
= gen_frame_mem_offset (reg_mode
, frame_reg_rtx
,
10886 info
->ehrd_offset
+ sp_offset
10887 + reg_size
* (int) i
);
10888 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10890 emit_move_insn (gen_rtx_REG (reg_mode
, regno
), mem
);
10894 /* Restore GPRs. This is done as a PARALLEL if we are using
10895 the load-multiple instructions. */
10896 if (using_load_multiple
)
10899 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
10900 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10902 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10903 GEN_INT (info
->gp_save_offset
10906 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
10908 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10911 gen_rtx_SET (VOIDmode
,
10912 gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
),
10915 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10918 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10919 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
10920 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
10921 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
10922 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
10923 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
10925 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10926 GEN_INT (info
->gp_save_offset
10929 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
10931 /* Restore 64-bit quantities for SPE. */
10932 if (TARGET_SPE_ABI
)
10934 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
10937 if (!SPE_CONST_OFFSET_OK (offset
))
10939 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
10940 emit_move_insn (b
, GEN_INT (offset
));
10943 b
= GEN_INT (offset
);
10945 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
10946 mem
= gen_rtx_MEM (V2SImode
, addr
);
10949 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10951 emit_move_insn (gen_rtx_REG (reg_mode
,
10952 info
->first_gp_reg_save
+ i
), mem
);
10955 /* Restore fpr's if we need to do it without calling a function. */
10956 if (restoring_FPRs_inline
)
10957 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10958 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
10959 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
10962 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10963 GEN_INT (info
->fp_save_offset
10966 mem
= gen_rtx_MEM (DFmode
, addr
);
10967 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10969 emit_move_insn (gen_rtx_REG (DFmode
,
10970 info
->first_fp_reg_save
+ i
),
10974 /* If we saved cr, restore it here. Just those that were used. */
10975 if (info
->cr_save_p
)
10977 rtx r12_rtx
= gen_rtx_REG (SImode
, 12);
10980 if (using_mfcr_multiple
)
10982 for (i
= 0; i
< 8; i
++)
10983 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
10989 if (using_mfcr_multiple
&& count
> 1)
10994 p
= rtvec_alloc (count
);
10997 for (i
= 0; i
< 8; i
++)
10998 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
11000 rtvec r
= rtvec_alloc (2);
11001 RTVEC_ELT (r
, 0) = r12_rtx
;
11002 RTVEC_ELT (r
, 1) = GEN_INT (1 << (7-i
));
11003 RTVEC_ELT (p
, ndx
) =
11004 gen_rtx_SET (VOIDmode
, gen_rtx_REG (CCmode
, CR0_REGNO
+i
),
11005 gen_rtx_UNSPEC (CCmode
, r
, 20));
11008 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11013 for (i
= 0; i
< 8; i
++)
11014 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
11016 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode
,
11022 /* If this is V.4, unwind the stack pointer after all of the loads
11023 have been done. We need to emit a block here so that sched
11024 doesn't decide to move the sp change before the register restores
11025 (which may not have any obvious dependency on the stack). This
11026 doesn't hurt performance, because there is no scheduling that can
11027 be done after this point. */
11028 if (DEFAULT_ABI
== ABI_V4
)
11030 if (frame_reg_rtx
!= sp_reg_rtx
)
11031 rs6000_emit_stack_tie ();
11033 if (use_backchain_to_restore_sp
)
11035 emit_move_insn (sp_reg_rtx
, frame_reg_rtx
);
11037 else if (sp_offset
!= 0)
11039 emit_insn (TARGET_32BIT
11040 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
11041 GEN_INT (sp_offset
))
11042 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
11043 GEN_INT (sp_offset
)));
11047 if (current_function_calls_eh_return
)
11049 rtx sa
= EH_RETURN_STACKADJ_RTX
;
11050 emit_insn (TARGET_32BIT
11051 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
, sa
)
11052 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
, sa
));
11058 if (! restoring_FPRs_inline
)
11059 p
= rtvec_alloc (3 + 64 - info
->first_fp_reg_save
);
11061 p
= rtvec_alloc (2);
11063 RTVEC_ELT (p
, 0) = gen_rtx_RETURN (VOIDmode
);
11064 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
11065 gen_rtx_REG (Pmode
,
11066 LINK_REGISTER_REGNUM
));
11068 /* If we have to restore more than two FP registers, branch to the
11069 restore function. It will return to our caller. */
11070 if (! restoring_FPRs_inline
)
11074 const char *alloc_rname
;
11076 sprintf (rname
, "%s%d%s", RESTORE_FP_PREFIX
,
11077 info
->first_fp_reg_save
- 32, RESTORE_FP_SUFFIX
);
11078 alloc_rname
= ggc_strdup (rname
);
11079 RTVEC_ELT (p
, 2) = gen_rtx_USE (VOIDmode
,
11080 gen_rtx_SYMBOL_REF (Pmode
,
11083 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
11086 addr
= gen_rtx_PLUS (Pmode
, sp_reg_rtx
,
11087 GEN_INT (info
->fp_save_offset
+ 8*i
));
11088 mem
= gen_rtx_MEM (DFmode
, addr
);
11089 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11091 RTVEC_ELT (p
, i
+3) =
11092 gen_rtx_SET (VOIDmode
,
11093 gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
),
11098 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11102 /* Write function epilogue. */
11105 rs6000_output_function_epilogue (file
, size
)
11107 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
11109 rs6000_stack_t
*info
= rs6000_stack_info ();
11111 if (! HAVE_epilogue
)
11113 rtx insn
= get_last_insn ();
11114 /* If the last insn was a BARRIER, we don't have to write anything except
11115 the trace table. */
11116 if (GET_CODE (insn
) == NOTE
)
11117 insn
= prev_nonnote_insn (insn
);
11118 if (insn
== 0 || GET_CODE (insn
) != BARRIER
)
11120 /* This is slightly ugly, but at least we don't have two
11121 copies of the epilogue-emitting code. */
11124 /* A NOTE_INSN_DELETED is supposed to be at the start
11125 and end of the "toplevel" insn chain. */
11126 emit_note (0, NOTE_INSN_DELETED
);
11127 rs6000_emit_epilogue (FALSE
);
11128 emit_note (0, NOTE_INSN_DELETED
);
11130 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11134 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
11136 INSN_ADDRESSES_NEW (insn
, addr
);
11141 if (TARGET_DEBUG_STACK
)
11142 debug_rtx_list (get_insns (), 100);
11143 final (get_insns (), file
, FALSE
, FALSE
);
11148 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11151 We don't output a traceback table if -finhibit-size-directive was
11152 used. The documentation for -finhibit-size-directive reads
11153 ``don't output a @code{.size} assembler directive, or anything
11154 else that would cause trouble if the function is split in the
11155 middle, and the two halves are placed at locations far apart in
11156 memory.'' The traceback table has this property, since it
11157 includes the offset from the start of the function to the
11158 traceback table itself.
11160 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11161 different traceback table. */
11162 if (DEFAULT_ABI
== ABI_AIX
&& ! flag_inhibit_size_directive
11163 && rs6000_traceback
!= traceback_none
)
11165 const char *fname
= NULL
;
11166 const char *language_string
= lang_hooks
.name
;
11167 int fixed_parms
= 0, float_parms
= 0, parm_info
= 0;
11169 int optional_tbtab
;
11171 if (rs6000_traceback
== traceback_full
)
11172 optional_tbtab
= 1;
11173 else if (rs6000_traceback
== traceback_part
)
11174 optional_tbtab
= 0;
11176 optional_tbtab
= !optimize_size
&& !TARGET_ELF
;
11178 if (optional_tbtab
)
11180 fname
= XSTR (XEXP (DECL_RTL (current_function_decl
), 0), 0);
11181 while (*fname
== '.') /* V.4 encodes . in the name */
11184 /* Need label immediately before tbtab, so we can compute
11185 its offset from the function start. */
11186 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
11187 ASM_OUTPUT_LABEL (file
, fname
);
11190 /* The .tbtab pseudo-op can only be used for the first eight
11191 expressions, since it can't handle the possibly variable
11192 length fields that follow. However, if you omit the optional
11193 fields, the assembler outputs zeros for all optional fields
11194 anyways, giving each variable length field is minimum length
11195 (as defined in sys/debug.h). Thus we can not use the .tbtab
11196 pseudo-op at all. */
11198 /* An all-zero word flags the start of the tbtab, for debuggers
11199 that have to find it by searching forward from the entry
11200 point or from the current pc. */
11201 fputs ("\t.long 0\n", file
);
11203 /* Tbtab format type. Use format type 0. */
11204 fputs ("\t.byte 0,", file
);
11206 /* Language type. Unfortunately, there doesn't seem to be any
11207 official way to get this info, so we use language_string. C
11208 is 0. C++ is 9. No number defined for Obj-C, so use the
11209 value for C for now. There is no official value for Java,
11210 although IBM appears to be using 13. There is no official value
11211 for Chill, so we've chosen 44 pseudo-randomly. */
11212 if (! strcmp (language_string
, "GNU C")
11213 || ! strcmp (language_string
, "GNU Objective-C"))
11215 else if (! strcmp (language_string
, "GNU F77"))
11217 else if (! strcmp (language_string
, "GNU Ada"))
11219 else if (! strcmp (language_string
, "GNU Pascal"))
11221 else if (! strcmp (language_string
, "GNU C++"))
11223 else if (! strcmp (language_string
, "GNU Java"))
11225 else if (! strcmp (language_string
, "GNU CHILL"))
11229 fprintf (file
, "%d,", i
);
11231 /* 8 single bit fields: global linkage (not set for C extern linkage,
11232 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
11233 from start of procedure stored in tbtab, internal function, function
11234 has controlled storage, function has no toc, function uses fp,
11235 function logs/aborts fp operations. */
11236 /* Assume that fp operations are used if any fp reg must be saved. */
11237 fprintf (file
, "%d,",
11238 (optional_tbtab
<< 5) | ((info
->first_fp_reg_save
!= 64) << 1));
11240 /* 6 bitfields: function is interrupt handler, name present in
11241 proc table, function calls alloca, on condition directives
11242 (controls stack walks, 3 bits), saves condition reg, saves
11244 /* The `function calls alloca' bit seems to be set whenever reg 31 is
11245 set up as a frame pointer, even when there is no alloca call. */
11246 fprintf (file
, "%d,",
11247 ((optional_tbtab
<< 6)
11248 | ((optional_tbtab
& frame_pointer_needed
) << 5)
11249 | (info
->cr_save_p
<< 1)
11250 | (info
->lr_save_p
)));
11252 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
11254 fprintf (file
, "%d,",
11255 (info
->push_p
<< 7) | (64 - info
->first_fp_reg_save
));
11257 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
11258 fprintf (file
, "%d,", (32 - first_reg_to_save ()));
11260 if (optional_tbtab
)
11262 /* Compute the parameter info from the function decl argument
11265 int next_parm_info_bit
= 31;
11267 for (decl
= DECL_ARGUMENTS (current_function_decl
);
11268 decl
; decl
= TREE_CHAIN (decl
))
11270 rtx parameter
= DECL_INCOMING_RTL (decl
);
11271 enum machine_mode mode
= GET_MODE (parameter
);
11273 if (GET_CODE (parameter
) == REG
)
11275 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
11281 if (mode
== SFmode
)
11283 else if (mode
== DFmode
|| mode
== TFmode
)
11288 /* If only one bit will fit, don't or in this entry. */
11289 if (next_parm_info_bit
> 0)
11290 parm_info
|= (bits
<< (next_parm_info_bit
- 1));
11291 next_parm_info_bit
-= 2;
11295 fixed_parms
+= ((GET_MODE_SIZE (mode
)
11296 + (UNITS_PER_WORD
- 1))
11298 next_parm_info_bit
-= 1;
11304 /* Number of fixed point parameters. */
11305 /* This is actually the number of words of fixed point parameters; thus
11306 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11307 fprintf (file
, "%d,", fixed_parms
);
11309 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11311 /* This is actually the number of fp registers that hold parameters;
11312 and thus the maximum value is 13. */
11313 /* Set parameters on stack bit if parameters are not in their original
11314 registers, regardless of whether they are on the stack? Xlc
11315 seems to set the bit when not optimizing. */
11316 fprintf (file
, "%d\n", ((float_parms
<< 1) | (! optimize
)));
11318 if (! optional_tbtab
)
11321 /* Optional fields follow. Some are variable length. */
11323 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11324 11 double float. */
11325 /* There is an entry for each parameter in a register, in the order that
11326 they occur in the parameter list. Any intervening arguments on the
11327 stack are ignored. If the list overflows a long (max possible length
11328 34 bits) then completely leave off all elements that don't fit. */
11329 /* Only emit this long if there was at least one parameter. */
11330 if (fixed_parms
|| float_parms
)
11331 fprintf (file
, "\t.long %d\n", parm_info
);
11333 /* Offset from start of code to tb table. */
11334 fputs ("\t.long ", file
);
11335 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
11337 RS6000_OUTPUT_BASENAME (file
, fname
);
11339 assemble_name (file
, fname
);
11341 fputs ("-.", file
);
11343 RS6000_OUTPUT_BASENAME (file
, fname
);
11345 assemble_name (file
, fname
);
11349 /* Interrupt handler mask. */
11350 /* Omit this long, since we never set the interrupt handler bit
11353 /* Number of CTL (controlled storage) anchors. */
11354 /* Omit this long, since the has_ctl bit is never set above. */
11356 /* Displacement into stack of each CTL anchor. */
11357 /* Omit this list of longs, because there are no CTL anchors. */
11359 /* Length of function name. */
11362 fprintf (file
, "\t.short %d\n", (int) strlen (fname
));
11364 /* Function name. */
11365 assemble_string (fname
, strlen (fname
));
11367 /* Register for alloca automatic storage; this is always reg 31.
11368 Only emit this if the alloca bit was set above. */
11369 if (frame_pointer_needed
)
11370 fputs ("\t.byte 31\n", file
);
11372 fputs ("\t.align 2\n", file
);
11376 /* A C compound statement that outputs the assembler code for a thunk
11377 function, used to implement C++ virtual function calls with
11378 multiple inheritance. The thunk acts as a wrapper around a virtual
11379 function, adjusting the implicit object parameter before handing
11380 control off to the real function.
11382 First, emit code to add the integer DELTA to the location that
11383 contains the incoming first argument. Assume that this argument
11384 contains a pointer, and is the one used to pass the `this' pointer
11385 in C++. This is the incoming argument *before* the function
11386 prologue, e.g. `%o0' on a sparc. The addition must preserve the
11387 values of all other incoming arguments.
11389 After the addition, emit code to jump to FUNCTION, which is a
11390 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
11391 not touch the return address. Hence returning from FUNCTION will
11392 return to whoever called the current `thunk'.
11394 The effect must be as if FUNCTION had been called directly with the
11395 adjusted first argument. This macro is responsible for emitting
11396 all of the code for a thunk function; output_function_prologue()
11397 and output_function_epilogue() are not invoked.
11399 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
11400 been extracted from it.) It might possibly be useful on some
11401 targets, but probably not.
11403 If you do not define this macro, the target-independent code in the
11404 C++ frontend will generate a less efficient heavyweight thunk that
11405 calls FUNCTION instead of jumping to it. The generic approach does
11406 not support varargs. */
11409 rs6000_output_mi_thunk (file
, thunk_fndecl
, delta
, vcall_offset
, function
)
11411 tree thunk_fndecl ATTRIBUTE_UNUSED
;
11412 HOST_WIDE_INT delta
;
11413 HOST_WIDE_INT vcall_offset
;
11416 rtx
this, insn
, funexp
;
11418 reload_completed
= 1;
11419 no_new_pseudos
= 1;
11421 /* Mark the end of the (empty) prologue. */
11422 emit_note (NULL
, NOTE_INSN_PROLOGUE_END
);
11424 /* Find the "this" pointer. If the function returns a structure,
11425 the structure return pointer is in r3. */
11426 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
))))
11427 this = gen_rtx_REG (Pmode
, 4);
11429 this = gen_rtx_REG (Pmode
, 3);
11431 /* Apply the constant offset, if required. */
11434 rtx delta_rtx
= GEN_INT (delta
);
11435 emit_insn (TARGET_32BIT
11436 ? gen_addsi3 (this, this, delta_rtx
)
11437 : gen_adddi3 (this, this, delta_rtx
));
11440 /* Apply the offset from the vtable, if required. */
11443 rtx vcall_offset_rtx
= GEN_INT (vcall_offset
);
11444 rtx tmp
= gen_rtx_REG (Pmode
, 12);
11446 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, this));
11447 emit_insn (TARGET_32BIT
11448 ? gen_addsi3 (tmp
, tmp
, vcall_offset_rtx
)
11449 : gen_adddi3 (tmp
, tmp
, vcall_offset_rtx
));
11450 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, tmp
));
11451 emit_insn (TARGET_32BIT
11452 ? gen_addsi3 (this, this, tmp
)
11453 : gen_adddi3 (this, this, tmp
));
11456 /* Generate a tail call to the target function. */
11457 if (!TREE_USED (function
))
11459 assemble_external (function
);
11460 TREE_USED (function
) = 1;
11462 funexp
= XEXP (DECL_RTL (function
), 0);
11464 SYMBOL_REF_FLAG (funexp
) = 0;
11465 if (current_file_function_operand (funexp
, VOIDmode
)
11466 && (! lookup_attribute ("longcall",
11467 TYPE_ATTRIBUTES (TREE_TYPE (function
)))
11468 || lookup_attribute ("shortcall",
11469 TYPE_ATTRIBUTES (TREE_TYPE (function
)))))
11470 SYMBOL_REF_FLAG (funexp
) = 1;
11472 funexp
= gen_rtx_MEM (FUNCTION_MODE
, funexp
);
11476 funexp
= machopic_indirect_call_target (funexp
);
11479 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
11480 generate sibcall RTL explicitly to avoid constraint abort. */
11481 insn
= emit_call_insn (
11482 gen_rtx_PARALLEL (VOIDmode
,
11484 gen_rtx_CALL (VOIDmode
,
11485 funexp
, const0_rtx
),
11486 gen_rtx_USE (VOIDmode
, const0_rtx
),
11487 gen_rtx_USE (VOIDmode
,
11488 gen_rtx_REG (SImode
,
11489 LINK_REGISTER_REGNUM
)),
11490 gen_rtx_RETURN (VOIDmode
))));
11491 SIBLING_CALL_P (insn
) = 1;
11494 /* Run just enough of rest_of_compilation to get the insns emitted.
11495 There's not really enough bulk here to make other passes such as
11496 instruction scheduling worth while. Note that use_thunk calls
11497 assemble_start_function and assemble_end_function. */
11498 insn
= get_insns ();
11499 shorten_branches (insn
);
11500 final_start_function (insn
, file
, 1);
11501 final (insn
, file
, 1, 0);
11502 final_end_function ();
11504 reload_completed
= 0;
11505 no_new_pseudos
= 0;
11508 /* A quick summary of the various types of 'constant-pool tables'
11511 Target Flags Name One table per
11512 AIX (none) AIX TOC object file
11513 AIX -mfull-toc AIX TOC object file
11514 AIX -mminimal-toc AIX minimal TOC translation unit
11515 SVR4/EABI (none) SVR4 SDATA object file
11516 SVR4/EABI -fpic SVR4 pic object file
11517 SVR4/EABI -fPIC SVR4 PIC translation unit
11518 SVR4/EABI -mrelocatable EABI TOC function
11519 SVR4/EABI -maix AIX TOC object file
11520 SVR4/EABI -maix -mminimal-toc
11521 AIX minimal TOC translation unit
11523 Name Reg. Set by entries contains:
11524 made by addrs? fp? sum?
11526 AIX TOC 2 crt0 as Y option option
11527 AIX minimal TOC 30 prolog gcc Y Y option
11528 SVR4 SDATA 13 crt0 gcc N Y N
11529 SVR4 pic 30 prolog ld Y not yet N
11530 SVR4 PIC 30 prolog gcc Y option option
11531 EABI TOC 30 prolog gcc Y option option
11535 /* Hash functions for the hash table. */
11538 rs6000_hash_constant (k
)
11541 enum rtx_code code
= GET_CODE (k
);
11542 enum machine_mode mode
= GET_MODE (k
);
11543 unsigned result
= (code
<< 3) ^ mode
;
11544 const char *format
;
11547 format
= GET_RTX_FORMAT (code
);
11548 flen
= strlen (format
);
11554 return result
* 1231 + (unsigned) INSN_UID (XEXP (k
, 0));
11557 if (mode
!= VOIDmode
)
11558 return real_hash (CONST_DOUBLE_REAL_VALUE (k
)) * result
;
11570 for (; fidx
< flen
; fidx
++)
11571 switch (format
[fidx
])
11576 const char *str
= XSTR (k
, fidx
);
11577 len
= strlen (str
);
11578 result
= result
* 613 + len
;
11579 for (i
= 0; i
< len
; i
++)
11580 result
= result
* 613 + (unsigned) str
[i
];
11585 result
= result
* 1231 + rs6000_hash_constant (XEXP (k
, fidx
));
11589 result
= result
* 613 + (unsigned) XINT (k
, fidx
);
11592 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT
))
11593 result
= result
* 613 + (unsigned) XWINT (k
, fidx
);
11597 for (i
= 0; i
< sizeof(HOST_WIDE_INT
)/sizeof(unsigned); i
++)
11598 result
= result
* 613 + (unsigned) (XWINT (k
, fidx
)
11610 toc_hash_function (hash_entry
)
11611 const void * hash_entry
;
11613 const struct toc_hash_struct
*thc
=
11614 (const struct toc_hash_struct
*) hash_entry
;
11615 return rs6000_hash_constant (thc
->key
) ^ thc
->key_mode
;
11618 /* Compare H1 and H2 for equivalence. */
11621 toc_hash_eq (h1
, h2
)
11625 rtx r1
= ((const struct toc_hash_struct
*) h1
)->key
;
11626 rtx r2
= ((const struct toc_hash_struct
*) h2
)->key
;
11628 if (((const struct toc_hash_struct
*) h1
)->key_mode
11629 != ((const struct toc_hash_struct
*) h2
)->key_mode
)
11632 return rtx_equal_p (r1
, r2
);
11635 /* These are the names given by the C++ front-end to vtables, and
11636 vtable-like objects. Ideally, this logic should not be here;
11637 instead, there should be some programmatic way of inquiring as
11638 to whether or not an object is a vtable. */
11640 #define VTABLE_NAME_P(NAME) \
11641 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
11642 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
11643 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
11644 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
11647 rs6000_output_symbol_ref (file
, x
)
11651 /* Currently C++ toc references to vtables can be emitted before it
11652 is decided whether the vtable is public or private. If this is
11653 the case, then the linker will eventually complain that there is
11654 a reference to an unknown section. Thus, for vtables only,
11655 we emit the TOC reference to reference the symbol and not the
11657 const char *name
= XSTR (x
, 0);
11659 if (VTABLE_NAME_P (name
))
11661 RS6000_OUTPUT_BASENAME (file
, name
);
11664 assemble_name (file
, name
);
11667 /* Output a TOC entry. We derive the entry name from what is being
11671 output_toc (file
, x
, labelno
, mode
)
11675 enum machine_mode mode
;
11678 const char *name
= buf
;
11679 const char *real_name
;
11686 /* When the linker won't eliminate them, don't output duplicate
11687 TOC entries (this happens on AIX if there is any kind of TOC,
11688 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
11690 if (TARGET_TOC
&& GET_CODE (x
) != LABEL_REF
)
11692 struct toc_hash_struct
*h
;
11695 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
11696 time because GGC is not initialised at that point. */
11697 if (toc_hash_table
== NULL
)
11698 toc_hash_table
= htab_create_ggc (1021, toc_hash_function
,
11699 toc_hash_eq
, NULL
);
11701 h
= ggc_alloc (sizeof (*h
));
11703 h
->key_mode
= mode
;
11704 h
->labelno
= labelno
;
11706 found
= htab_find_slot (toc_hash_table
, h
, 1);
11707 if (*found
== NULL
)
11709 else /* This is indeed a duplicate.
11710 Set this label equal to that label. */
11712 fputs ("\t.set ", file
);
11713 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
11714 fprintf (file
, "%d,", labelno
);
11715 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
11716 fprintf (file
, "%d\n", ((*(const struct toc_hash_struct
**)
11722 /* If we're going to put a double constant in the TOC, make sure it's
11723 aligned properly when strict alignment is on. */
11724 if (GET_CODE (x
) == CONST_DOUBLE
11725 && STRICT_ALIGNMENT
11726 && GET_MODE_BITSIZE (mode
) >= 64
11727 && ! (TARGET_NO_FP_IN_TOC
&& ! TARGET_MINIMAL_TOC
)) {
11728 ASM_OUTPUT_ALIGN (file
, 3);
11731 (*targetm
.asm_out
.internal_label
) (file
, "LC", labelno
);
11733 /* Handle FP constants specially. Note that if we have a minimal
11734 TOC, things we put here aren't actually in the TOC, so we can allow
11736 if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == TFmode
)
11738 REAL_VALUE_TYPE rv
;
11741 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
11742 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
11746 if (TARGET_MINIMAL_TOC
)
11747 fputs (DOUBLE_INT_ASM_OP
, file
);
11749 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
11750 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
11751 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
11752 fprintf (file
, "0x%lx%08lx,0x%lx%08lx\n",
11753 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
11754 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
11759 if (TARGET_MINIMAL_TOC
)
11760 fputs ("\t.long ", file
);
11762 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
11763 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
11764 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
11765 fprintf (file
, "0x%lx,0x%lx,0x%lx,0x%lx\n",
11766 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
11767 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
11771 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
11773 REAL_VALUE_TYPE rv
;
11776 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
11777 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
11781 if (TARGET_MINIMAL_TOC
)
11782 fputs (DOUBLE_INT_ASM_OP
, file
);
11784 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
11785 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11786 fprintf (file
, "0x%lx%08lx\n",
11787 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11792 if (TARGET_MINIMAL_TOC
)
11793 fputs ("\t.long ", file
);
11795 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
11796 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11797 fprintf (file
, "0x%lx,0x%lx\n",
11798 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11802 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
11804 REAL_VALUE_TYPE rv
;
11807 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
11808 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
11812 if (TARGET_MINIMAL_TOC
)
11813 fputs (DOUBLE_INT_ASM_OP
, file
);
11815 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
11816 fprintf (file
, "0x%lx00000000\n", l
& 0xffffffff);
11821 if (TARGET_MINIMAL_TOC
)
11822 fputs ("\t.long ", file
);
11824 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
11825 fprintf (file
, "0x%lx\n", l
& 0xffffffff);
11829 else if (GET_MODE (x
) == VOIDmode
11830 && (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
))
11832 unsigned HOST_WIDE_INT low
;
11833 HOST_WIDE_INT high
;
11835 if (GET_CODE (x
) == CONST_DOUBLE
)
11837 low
= CONST_DOUBLE_LOW (x
);
11838 high
= CONST_DOUBLE_HIGH (x
);
11841 #if HOST_BITS_PER_WIDE_INT == 32
11844 high
= (low
& 0x80000000) ? ~0 : 0;
11848 low
= INTVAL (x
) & 0xffffffff;
11849 high
= (HOST_WIDE_INT
) INTVAL (x
) >> 32;
11853 /* TOC entries are always Pmode-sized, but since this
11854 is a bigendian machine then if we're putting smaller
11855 integer constants in the TOC we have to pad them.
11856 (This is still a win over putting the constants in
11857 a separate constant pool, because then we'd have
11858 to have both a TOC entry _and_ the actual constant.)
11860 For a 32-bit target, CONST_INT values are loaded and shifted
11861 entirely within `low' and can be stored in one TOC entry. */
11863 if (TARGET_64BIT
&& POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
11864 abort ();/* It would be easy to make this work, but it doesn't now. */
11866 if (POINTER_SIZE
> GET_MODE_BITSIZE (mode
))
11868 #if HOST_BITS_PER_WIDE_INT == 32
11869 lshift_double (low
, high
, POINTER_SIZE
- GET_MODE_BITSIZE (mode
),
11870 POINTER_SIZE
, &low
, &high
, 0);
11873 low
<<= POINTER_SIZE
- GET_MODE_BITSIZE (mode
);
11874 high
= (HOST_WIDE_INT
) low
>> 32;
11881 if (TARGET_MINIMAL_TOC
)
11882 fputs (DOUBLE_INT_ASM_OP
, file
);
11884 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
11885 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
11886 fprintf (file
, "0x%lx%08lx\n",
11887 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
11892 if (POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
11894 if (TARGET_MINIMAL_TOC
)
11895 fputs ("\t.long ", file
);
11897 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
11898 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
11899 fprintf (file
, "0x%lx,0x%lx\n",
11900 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
11904 if (TARGET_MINIMAL_TOC
)
11905 fputs ("\t.long ", file
);
11907 fprintf (file
, "\t.tc IS_%lx[TC],", (long) low
& 0xffffffff);
11908 fprintf (file
, "0x%lx\n", (long) low
& 0xffffffff);
11914 if (GET_CODE (x
) == CONST
)
11916 if (GET_CODE (XEXP (x
, 0)) != PLUS
)
11919 base
= XEXP (XEXP (x
, 0), 0);
11920 offset
= INTVAL (XEXP (XEXP (x
, 0), 1));
11923 if (GET_CODE (base
) == SYMBOL_REF
)
11924 name
= XSTR (base
, 0);
11925 else if (GET_CODE (base
) == LABEL_REF
)
11926 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (XEXP (base
, 0)));
11927 else if (GET_CODE (base
) == CODE_LABEL
)
11928 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (base
));
11932 real_name
= (*targetm
.strip_name_encoding
) (name
);
11933 if (TARGET_MINIMAL_TOC
)
11934 fputs (TARGET_32BIT
? "\t.long " : DOUBLE_INT_ASM_OP
, file
);
11937 fprintf (file
, "\t.tc %s", real_name
);
11940 fprintf (file
, ".N%d", - offset
);
11942 fprintf (file
, ".P%d", offset
);
11944 fputs ("[TC],", file
);
11947 /* Currently C++ toc references to vtables can be emitted before it
11948 is decided whether the vtable is public or private. If this is
11949 the case, then the linker will eventually complain that there is
11950 a TOC reference to an unknown section. Thus, for vtables only,
11951 we emit the TOC reference to reference the symbol and not the
11953 if (VTABLE_NAME_P (name
))
11955 RS6000_OUTPUT_BASENAME (file
, name
);
11957 fprintf (file
, "%d", offset
);
11958 else if (offset
> 0)
11959 fprintf (file
, "+%d", offset
);
11962 output_addr_const (file
, x
);
11966 /* Output an assembler pseudo-op to write an ASCII string of N characters
11967 starting at P to FILE.
11969 On the RS/6000, we have to do this using the .byte operation and
11970 write out special characters outside the quoted string.
11971 Also, the assembler is broken; very long strings are truncated,
11972 so we must artificially break them up early. */
11975 output_ascii (file
, p
, n
)
11981 int i
, count_string
;
11982 const char *for_string
= "\t.byte \"";
11983 const char *for_decimal
= "\t.byte ";
11984 const char *to_close
= NULL
;
11987 for (i
= 0; i
< n
; i
++)
11990 if (c
>= ' ' && c
< 0177)
11993 fputs (for_string
, file
);
11996 /* Write two quotes to get one. */
12004 for_decimal
= "\"\n\t.byte ";
12008 if (count_string
>= 512)
12010 fputs (to_close
, file
);
12012 for_string
= "\t.byte \"";
12013 for_decimal
= "\t.byte ";
12021 fputs (for_decimal
, file
);
12022 fprintf (file
, "%d", c
);
12024 for_string
= "\n\t.byte \"";
12025 for_decimal
= ", ";
12031 /* Now close the string if we have written one. Then end the line. */
12033 fputs (to_close
, file
);
12036 /* Generate a unique section name for FILENAME for a section type
12037 represented by SECTION_DESC. Output goes into BUF.
12039 SECTION_DESC can be any string, as long as it is different for each
12040 possible section type.
12042 We name the section in the same manner as xlc. The name begins with an
12043 underscore followed by the filename (after stripping any leading directory
12044 names) with the last period replaced by the string SECTION_DESC. If
12045 FILENAME does not contain a period, SECTION_DESC is appended to the end of
12049 rs6000_gen_section_name (buf
, filename
, section_desc
)
12051 const char *filename
;
12052 const char *section_desc
;
12054 const char *q
, *after_last_slash
, *last_period
= 0;
12058 after_last_slash
= filename
;
12059 for (q
= filename
; *q
; q
++)
12062 after_last_slash
= q
+ 1;
12063 else if (*q
== '.')
12067 len
= strlen (after_last_slash
) + strlen (section_desc
) + 2;
12068 *buf
= (char *) xmalloc (len
);
12073 for (q
= after_last_slash
; *q
; q
++)
12075 if (q
== last_period
)
12077 strcpy (p
, section_desc
);
12078 p
+= strlen (section_desc
);
12082 else if (ISALNUM (*q
))
12086 if (last_period
== 0)
12087 strcpy (p
, section_desc
);
12092 /* Emit profile function. */
12095 output_profile_hook (labelno
)
12096 int labelno ATTRIBUTE_UNUSED
;
12098 if (DEFAULT_ABI
== ABI_AIX
)
12100 #ifdef NO_PROFILE_COUNTERS
12101 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 0);
12104 const char *label_name
;
12107 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
12108 label_name
= (*targetm
.strip_name_encoding
) (ggc_strdup (buf
));
12109 fun
= gen_rtx_SYMBOL_REF (Pmode
, label_name
);
12111 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 1,
12115 else if (DEFAULT_ABI
== ABI_DARWIN
)
12117 const char *mcount_name
= RS6000_MCOUNT
;
12118 int caller_addr_regno
= LINK_REGISTER_REGNUM
;
12120 /* Be conservative and always set this, at least for now. */
12121 current_function_uses_pic_offset_table
= 1;
12124 /* For PIC code, set up a stub and collect the caller's address
12125 from r0, which is where the prologue puts it. */
12128 mcount_name
= machopic_stub_name (mcount_name
);
12129 if (current_function_uses_pic_offset_table
)
12130 caller_addr_regno
= 0;
12133 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, mcount_name
),
12135 gen_rtx_REG (Pmode
, caller_addr_regno
), Pmode
);
12139 /* Write function profiler code. */
12142 output_function_profiler (file
, labelno
)
12149 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
12150 switch (DEFAULT_ABI
)
12157 /* Fall through. */
12159 case ABI_AIX_NODESC
:
12162 warning ("no profiling of 64-bit code for this ABI");
12165 fprintf (file
, "\tmflr %s\n", reg_names
[0]);
12168 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file
);
12169 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12170 reg_names
[0], save_lr
, reg_names
[1]);
12171 asm_fprintf (file
, "\tmflr %s\n", reg_names
[12]);
12172 asm_fprintf (file
, "\t{l|lwz} %s,", reg_names
[0]);
12173 assemble_name (file
, buf
);
12174 asm_fprintf (file
, "@got(%s)\n", reg_names
[12]);
12176 else if (flag_pic
> 1)
12178 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12179 reg_names
[0], save_lr
, reg_names
[1]);
12180 /* Now, we need to get the address of the label. */
12181 fputs ("\tbl 1f\n\t.long ", file
);
12182 assemble_name (file
, buf
);
12183 fputs ("-.\n1:", file
);
12184 asm_fprintf (file
, "\tmflr %s\n", reg_names
[11]);
12185 asm_fprintf (file
, "\t{l|lwz} %s,0(%s)\n",
12186 reg_names
[0], reg_names
[11]);
12187 asm_fprintf (file
, "\t{cax|add} %s,%s,%s\n",
12188 reg_names
[0], reg_names
[0], reg_names
[11]);
12192 asm_fprintf (file
, "\t{liu|lis} %s,", reg_names
[12]);
12193 assemble_name (file
, buf
);
12194 fputs ("@ha\n", file
);
12195 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12196 reg_names
[0], save_lr
, reg_names
[1]);
12197 asm_fprintf (file
, "\t{cal|la} %s,", reg_names
[0]);
12198 assemble_name (file
, buf
);
12199 asm_fprintf (file
, "@l(%s)\n", reg_names
[12]);
12202 if (current_function_needs_context
&& DEFAULT_ABI
== ABI_AIX_NODESC
)
12204 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12205 reg_names
[STATIC_CHAIN_REGNUM
],
12207 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
12208 asm_fprintf (file
, "\t{l|lwz} %s,%d(%s)\n",
12209 reg_names
[STATIC_CHAIN_REGNUM
],
12213 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12214 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
12219 /* Don't do anything, done in output_profile_hook (). */
12226 rs6000_use_dfa_pipeline_interface ()
12232 rs6000_multipass_dfa_lookahead ()
12234 if (rs6000_cpu
== PROCESSOR_POWER4
)
12240 /* Power4 load update and store update instructions are cracked into a
12241 load or store and an integer insn which are executed in the same cycle.
12242 Branches have their own dispatch slot which does not count against the
12243 GCC issue rate, but it changes the program flow so there are no other
12244 instructions to issue in this cycle. */
12247 rs6000_variable_issue (stream
, verbose
, insn
, more
)
12248 FILE *stream ATTRIBUTE_UNUSED
;
12249 int verbose ATTRIBUTE_UNUSED
;
12253 if (GET_CODE (PATTERN (insn
)) == USE
12254 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
12257 if (rs6000_cpu
== PROCESSOR_POWER4
)
12259 enum attr_type type
= get_attr_type (insn
);
12260 if (type
== TYPE_LOAD_EXT_U
|| type
== TYPE_LOAD_EXT_UX
12261 || type
== TYPE_LOAD_UX
|| type
== TYPE_STORE_UX
12262 || type
== TYPE_FPLOAD_UX
|| type
== TYPE_FPSTORE_UX
)
12264 else if (type
== TYPE_LOAD_U
|| type
== TYPE_STORE_U
12265 || type
== TYPE_FPLOAD_U
|| type
== TYPE_FPSTORE_U
12266 || type
== TYPE_LOAD_EXT
|| type
== TYPE_DELAYED_CR
)
12275 /* Adjust the cost of a scheduling dependency. Return the new cost of
12276 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
12279 rs6000_adjust_cost (insn
, link
, dep_insn
, cost
)
12282 rtx dep_insn ATTRIBUTE_UNUSED
;
12285 if (! recog_memoized (insn
))
12288 if (REG_NOTE_KIND (link
) != 0)
12291 if (REG_NOTE_KIND (link
) == 0)
12293 /* Data dependency; DEP_INSN writes a register that INSN reads
12294 some cycles later. */
12295 switch (get_attr_type (insn
))
12298 /* Tell the first scheduling pass about the latency between
12299 a mtctr and bctr (and mtlr and br/blr). The first
12300 scheduling pass will not know about this latency since
12301 the mtctr instruction, which has the latency associated
12302 to it, will be generated by reload. */
12303 return TARGET_POWER
? 5 : 4;
12305 /* Leave some extra cycles between a compare and its
12306 dependent branch, to inhibit expensive mispredicts. */
12307 if ((rs6000_cpu_attr
== CPU_PPC603
12308 || rs6000_cpu_attr
== CPU_PPC604
12309 || rs6000_cpu_attr
== CPU_PPC604E
12310 || rs6000_cpu_attr
== CPU_PPC620
12311 || rs6000_cpu_attr
== CPU_PPC630
12312 || rs6000_cpu_attr
== CPU_PPC750
12313 || rs6000_cpu_attr
== CPU_PPC7400
12314 || rs6000_cpu_attr
== CPU_PPC7450
12315 || rs6000_cpu_attr
== CPU_POWER4
)
12316 && recog_memoized (dep_insn
)
12317 && (INSN_CODE (dep_insn
) >= 0)
12318 && (get_attr_type (dep_insn
) == TYPE_CMP
12319 || get_attr_type (dep_insn
) == TYPE_COMPARE
12320 || get_attr_type (dep_insn
) == TYPE_DELAYED_COMPARE
12321 || get_attr_type (dep_insn
) == TYPE_FPCOMPARE
12322 || get_attr_type (dep_insn
) == TYPE_CR_LOGICAL
12323 || get_attr_type (dep_insn
) == TYPE_DELAYED_CR
))
12328 /* Fall out to return default cost. */
12334 /* A C statement (sans semicolon) to update the integer scheduling
12335 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12336 INSN earlier, increase the priority to execute INSN later. Do not
12337 define this macro if you do not need to adjust the scheduling
12338 priorities of insns. */
12341 rs6000_adjust_priority (insn
, priority
)
12342 rtx insn ATTRIBUTE_UNUSED
;
12345 /* On machines (like the 750) which have asymmetric integer units,
12346 where one integer unit can do multiply and divides and the other
12347 can't, reduce the priority of multiply/divide so it is scheduled
12348 before other integer operations. */
12351 if (! INSN_P (insn
))
12354 if (GET_CODE (PATTERN (insn
)) == USE
)
12357 switch (rs6000_cpu_attr
) {
12359 switch (get_attr_type (insn
))
12366 fprintf (stderr
, "priority was %#x (%d) before adjustment\n",
12367 priority
, priority
);
12368 if (priority
>= 0 && priority
< 0x01000000)
12378 /* Return how many instructions the machine can issue per cycle. */
12381 rs6000_issue_rate ()
12383 switch (rs6000_cpu_attr
) {
12384 case CPU_RIOS1
: /* ? */
12386 case CPU_PPC601
: /* ? */
12407 /* Length in units of the trampoline for entering a nested function. */
12410 rs6000_trampoline_size ()
12414 switch (DEFAULT_ABI
)
12420 ret
= (TARGET_32BIT
) ? 12 : 24;
12425 case ABI_AIX_NODESC
:
12426 ret
= (TARGET_32BIT
) ? 40 : 48;
12433 /* Emit RTL insns to initialize the variable parts of a trampoline.
12434 FNADDR is an RTX for the address of the function's pure code.
12435 CXT is an RTX for the static chain value for the function. */
12438 rs6000_initialize_trampoline (addr
, fnaddr
, cxt
)
12443 enum machine_mode pmode
= Pmode
;
12444 int regsize
= (TARGET_32BIT
) ? 4 : 8;
12445 rtx ctx_reg
= force_reg (pmode
, cxt
);
12447 switch (DEFAULT_ABI
)
12452 /* Macros to shorten the code expansions below. */
12453 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12454 #define MEM_PLUS(addr,offset) \
12455 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12457 /* Under AIX, just build the 3 word function descriptor */
12460 rtx fn_reg
= gen_reg_rtx (pmode
);
12461 rtx toc_reg
= gen_reg_rtx (pmode
);
12462 emit_move_insn (fn_reg
, MEM_DEREF (fnaddr
));
12463 emit_move_insn (toc_reg
, MEM_PLUS (fnaddr
, regsize
));
12464 emit_move_insn (MEM_DEREF (addr
), fn_reg
);
12465 emit_move_insn (MEM_PLUS (addr
, regsize
), toc_reg
);
12466 emit_move_insn (MEM_PLUS (addr
, 2*regsize
), ctx_reg
);
12470 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
12473 case ABI_AIX_NODESC
:
12474 emit_library_call (gen_rtx_SYMBOL_REF (SImode
, "__trampoline_setup"),
12475 FALSE
, VOIDmode
, 4,
12477 GEN_INT (rs6000_trampoline_size ()), SImode
,
12487 /* Table of valid machine attributes. */
12489 const struct attribute_spec rs6000_attribute_table
[] =
12491 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12492 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
12493 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
12494 { NULL
, 0, 0, false, false, false, NULL
}
12497 /* Handle a "longcall" or "shortcall" attribute; arguments as in
12498 struct attribute_spec.handler. */
12501 rs6000_handle_longcall_attribute (node
, name
, args
, flags
, no_add_attrs
)
12504 tree args ATTRIBUTE_UNUSED
;
12505 int flags ATTRIBUTE_UNUSED
;
12506 bool *no_add_attrs
;
12508 if (TREE_CODE (*node
) != FUNCTION_TYPE
12509 && TREE_CODE (*node
) != FIELD_DECL
12510 && TREE_CODE (*node
) != TYPE_DECL
)
12512 warning ("`%s' attribute only applies to functions",
12513 IDENTIFIER_POINTER (name
));
12514 *no_add_attrs
= true;
12520 /* Set longcall attributes on all functions declared when
12521 rs6000_default_long_calls is true. */
12523 rs6000_set_default_type_attributes (type
)
12526 if (rs6000_default_long_calls
12527 && (TREE_CODE (type
) == FUNCTION_TYPE
12528 || TREE_CODE (type
) == METHOD_TYPE
))
12529 TYPE_ATTRIBUTES (type
) = tree_cons (get_identifier ("longcall"),
12531 TYPE_ATTRIBUTES (type
));
12534 /* Return a reference suitable for calling a function with the
12535 longcall attribute. */
12538 rs6000_longcall_ref (call_ref
)
12541 const char *call_name
;
12544 if (GET_CODE (call_ref
) != SYMBOL_REF
)
12547 /* System V adds '.' to the internal name, so skip them. */
12548 call_name
= XSTR (call_ref
, 0);
12549 if (*call_name
== '.')
12551 while (*call_name
== '.')
12554 node
= get_identifier (call_name
);
12555 call_ref
= gen_rtx_SYMBOL_REF (VOIDmode
, IDENTIFIER_POINTER (node
));
12558 return force_reg (Pmode
, call_ref
);
12562 #ifdef USING_ELFOS_H
12564 /* A C statement or statements to switch to the appropriate section
12565 for output of RTX in mode MODE. You can assume that RTX is some
12566 kind of constant in RTL. The argument MODE is redundant except in
12567 the case of a `const_int' rtx. Select the section by calling
12568 `text_section' or one of the alternatives for other sections.
12570 Do not define this macro if you put all constants in the read-only
12574 rs6000_elf_select_rtx_section (mode
, x
, align
)
12575 enum machine_mode mode
;
12577 unsigned HOST_WIDE_INT align
;
12579 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
12582 default_elf_select_rtx_section (mode
, x
, align
);
12585 /* A C statement or statements to switch to the appropriate
12586 section for output of DECL. DECL is either a `VAR_DECL' node
12587 or a constant of some sort. RELOC indicates whether forming
12588 the initial value of DECL requires link-time relocations. */
12591 rs6000_elf_select_section (decl
, reloc
, align
)
12594 unsigned HOST_WIDE_INT align
;
12596 default_elf_select_section_1 (decl
, reloc
, align
,
12597 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
12600 /* A C statement to build up a unique section name, expressed as a
12601 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
12602 RELOC indicates whether the initial value of EXP requires
12603 link-time relocations. If you do not define this macro, GCC will use
12604 the symbol name prefixed by `.' as the section name. Note - this
12605 macro can now be called for uninitialized data items as well as
12606 initialized data and functions. */
12609 rs6000_elf_unique_section (decl
, reloc
)
12613 default_unique_section_1 (decl
, reloc
,
12614 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
12618 /* If we are referencing a function that is static or is known to be
12619 in this file, make the SYMBOL_REF special. We can use this to indicate
12620 that we can branch to this function without emitting a no-op after the
12621 call. For real AIX calling sequences, we also replace the
12622 function name with the real name (1 or 2 leading .'s), rather than
12623 the function descriptor name. This saves a lot of overriding code
12624 to read the prefixes. */
12627 rs6000_elf_encode_section_info (decl
, first
)
12634 if (TREE_CODE (decl
) == FUNCTION_DECL
)
12636 rtx sym_ref
= XEXP (DECL_RTL (decl
), 0);
12637 if ((*targetm
.binds_local_p
) (decl
))
12638 SYMBOL_REF_FLAG (sym_ref
) = 1;
12640 if (DEFAULT_ABI
== ABI_AIX
)
12642 size_t len1
= (DEFAULT_ABI
== ABI_AIX
) ? 1 : 2;
12643 size_t len2
= strlen (XSTR (sym_ref
, 0));
12644 char *str
= alloca (len1
+ len2
+ 1);
12647 memcpy (str
+ len1
, XSTR (sym_ref
, 0), len2
+ 1);
12649 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len1
+ len2
);
12652 else if (rs6000_sdata
!= SDATA_NONE
12653 && DEFAULT_ABI
== ABI_V4
12654 && TREE_CODE (decl
) == VAR_DECL
)
12656 rtx sym_ref
= XEXP (DECL_RTL (decl
), 0);
12657 int size
= int_size_in_bytes (TREE_TYPE (decl
));
12658 tree section_name
= DECL_SECTION_NAME (decl
);
12659 const char *name
= (char *)0;
12662 if ((*targetm
.binds_local_p
) (decl
))
12663 SYMBOL_REF_FLAG (sym_ref
) = 1;
12667 if (TREE_CODE (section_name
) == STRING_CST
)
12669 name
= TREE_STRING_POINTER (section_name
);
12670 len
= TREE_STRING_LENGTH (section_name
);
12677 ? ((len
== sizeof (".sdata") - 1
12678 && strcmp (name
, ".sdata") == 0)
12679 || (len
== sizeof (".sdata2") - 1
12680 && strcmp (name
, ".sdata2") == 0)
12681 || (len
== sizeof (".sbss") - 1
12682 && strcmp (name
, ".sbss") == 0)
12683 || (len
== sizeof (".sbss2") - 1
12684 && strcmp (name
, ".sbss2") == 0)
12685 || (len
== sizeof (".PPC.EMB.sdata0") - 1
12686 && strcmp (name
, ".PPC.EMB.sdata0") == 0)
12687 || (len
== sizeof (".PPC.EMB.sbss0") - 1
12688 && strcmp (name
, ".PPC.EMB.sbss0") == 0))
12689 : (size
> 0 && size
<= g_switch_value
))
12691 size_t len
= strlen (XSTR (sym_ref
, 0));
12692 char *str
= alloca (len
+ 2);
12695 memcpy (str
+ 1, XSTR (sym_ref
, 0), len
+ 1);
12696 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len
+ 1);
12701 static const char *
12702 rs6000_elf_strip_name_encoding (str
)
12705 while (*str
== '*' || *str
== '@')
12711 rs6000_elf_in_small_data_p (decl
)
12714 if (rs6000_sdata
== SDATA_NONE
)
12717 if (TREE_CODE (decl
) == VAR_DECL
&& DECL_SECTION_NAME (decl
))
12719 const char *section
= TREE_STRING_POINTER (DECL_SECTION_NAME (decl
));
12720 if (strcmp (section
, ".sdata") == 0
12721 || strcmp (section
, ".sdata2") == 0
12722 || strcmp (section
, ".sbss") == 0)
12727 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
12730 && size
<= g_switch_value
12731 && (rs6000_sdata
!= SDATA_DATA
|| TREE_PUBLIC (decl
)))
12738 #endif /* USING_ELFOS_H */
12741 /* Return a REG that occurs in ADDR with coefficient 1.
12742 ADDR can be effectively incremented by incrementing REG.
12744 r0 is special and we must not select it as an address
12745 register by this routine since our caller will try to
12746 increment the returned register via an "la" instruction. */
12749 find_addr_reg (addr
)
12752 while (GET_CODE (addr
) == PLUS
)
12754 if (GET_CODE (XEXP (addr
, 0)) == REG
12755 && REGNO (XEXP (addr
, 0)) != 0)
12756 addr
= XEXP (addr
, 0);
12757 else if (GET_CODE (XEXP (addr
, 1)) == REG
12758 && REGNO (XEXP (addr
, 1)) != 0)
12759 addr
= XEXP (addr
, 1);
12760 else if (CONSTANT_P (XEXP (addr
, 0)))
12761 addr
= XEXP (addr
, 1);
12762 else if (CONSTANT_P (XEXP (addr
, 1)))
12763 addr
= XEXP (addr
, 0);
12767 if (GET_CODE (addr
) == REG
&& REGNO (addr
) != 0)
12773 rs6000_fatal_bad_address (op
)
12776 fatal_insn ("bad address", op
);
12782 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
12783 reference and a constant. */
12786 symbolic_operand (op
)
12789 switch (GET_CODE (op
))
12796 return (GET_CODE (op
) == SYMBOL_REF
||
12797 (GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
12798 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
12799 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
12806 #ifdef RS6000_LONG_BRANCH
12808 static tree stub_list
= 0;
12810 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
12811 procedure calls to the linked list. */
12814 add_compiler_stub (label_name
, function_name
, line_number
)
12816 tree function_name
;
12819 tree stub
= build_tree_list (function_name
, label_name
);
12820 TREE_TYPE (stub
) = build_int_2 (line_number
, 0);
12821 TREE_CHAIN (stub
) = stub_list
;
12825 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
12826 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
12827 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
12829 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
12830 handling procedure calls from the linked list and initializes the
12834 output_compiler_stub ()
12837 char label_buf
[256];
12841 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
12843 fprintf (asm_out_file
,
12844 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub
)));
12846 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12847 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
12848 fprintf (asm_out_file
, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub
));
12849 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12851 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
))[0] == '*')
12853 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
))+1);
12856 label_buf
[0] = '_';
12857 strcpy (label_buf
+1,
12858 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
)));
12861 strcpy (tmp_buf
, "lis r12,hi16(");
12862 strcat (tmp_buf
, label_buf
);
12863 strcat (tmp_buf
, ")\n\tori r12,r12,lo16(");
12864 strcat (tmp_buf
, label_buf
);
12865 strcat (tmp_buf
, ")\n\tmtctr r12\n\tbctr");
12866 output_asm_insn (tmp_buf
, 0);
12868 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12869 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
12870 fprintf(asm_out_file
, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub
));
12871 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12877 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
12878 already there or not. */
12881 no_previous_def (function_name
)
12882 tree function_name
;
12885 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
12886 if (function_name
== STUB_FUNCTION_NAME (stub
))
12891 /* GET_PREV_LABEL gets the label name from the previous definition of
12895 get_prev_label (function_name
)
12896 tree function_name
;
12899 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
12900 if (function_name
== STUB_FUNCTION_NAME (stub
))
12901 return STUB_LABEL_NAME (stub
);
12905 /* INSN is either a function call or a millicode call. It may have an
12906 unconditional jump in its delay slot.
12908 CALL_DEST is the routine we are calling. */
12911 output_call (insn
, call_dest
, operand_number
)
12914 int operand_number
;
12916 static char buf
[256];
12917 if (GET_CODE (call_dest
) == SYMBOL_REF
&& TARGET_LONG_BRANCH
&& !flag_pic
)
12920 tree funname
= get_identifier (XSTR (call_dest
, 0));
12922 if (no_previous_def (funname
))
12924 int line_number
= 0;
12925 rtx label_rtx
= gen_label_rtx ();
12926 char *label_buf
, temp_buf
[256];
12927 ASM_GENERATE_INTERNAL_LABEL (temp_buf
, "L",
12928 CODE_LABEL_NUMBER (label_rtx
));
12929 label_buf
= temp_buf
[0] == '*' ? temp_buf
+ 1 : temp_buf
;
12930 labelname
= get_identifier (label_buf
);
12931 for (; insn
&& GET_CODE (insn
) != NOTE
; insn
= PREV_INSN (insn
));
12933 line_number
= NOTE_LINE_NUMBER (insn
);
12934 add_compiler_stub (labelname
, funname
, line_number
);
12937 labelname
= get_prev_label (funname
);
12939 sprintf (buf
, "jbsr %%z%d,%.246s",
12940 operand_number
, IDENTIFIER_POINTER (labelname
));
12945 sprintf (buf
, "bl %%z%d", operand_number
);
12950 #endif /* RS6000_LONG_BRANCH */
12952 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
12954 const char *const symbol_ = (SYMBOL); \
12955 char *buffer_ = (BUF); \
12956 if (symbol_[0] == '"') \
12958 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
12960 else if (name_needs_quotes(symbol_)) \
12962 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
12966 sprintf(buffer_, "L%d$%s", (N), symbol_); \
12971 /* Generate PIC and indirect symbol stubs. */
12974 machopic_output_stub (file
, symb
, stub
)
12976 const char *symb
, *stub
;
12978 unsigned int length
;
12979 char *symbol_name
, *lazy_ptr_name
;
12980 char *local_label_0
;
12981 static int label
= 0;
12983 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
12984 symb
= (*targetm
.strip_name_encoding
) (symb
);
12988 length
= strlen (symb
);
12989 symbol_name
= alloca (length
+ 32);
12990 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name
, symb
, length
);
12992 lazy_ptr_name
= alloca (length
+ 32);
12993 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name
, symb
, length
);
12995 local_label_0
= alloca (length
+ 32);
12996 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0
, symb
, length
, 0);
12999 machopic_picsymbol_stub1_section ();
13001 machopic_symbol_stub1_section ();
13002 fprintf (file
, "\t.align 2\n");
13004 fprintf (file
, "%s:\n", stub
);
13005 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
13009 fprintf (file
, "\tmflr r0\n");
13010 fprintf (file
, "\tbcl 20,31,%s\n", local_label_0
);
13011 fprintf (file
, "%s:\n\tmflr r11\n", local_label_0
);
13012 fprintf (file
, "\taddis r11,r11,ha16(%s-%s)\n",
13013 lazy_ptr_name
, local_label_0
);
13014 fprintf (file
, "\tmtlr r0\n");
13015 fprintf (file
, "\tlwzu r12,lo16(%s-%s)(r11)\n",
13016 lazy_ptr_name
, local_label_0
);
13017 fprintf (file
, "\tmtctr r12\n");
13018 fprintf (file
, "\tbctr\n");
13021 fprintf (file
, "non-pure not supported\n");
13023 machopic_lazy_symbol_ptr_section ();
13024 fprintf (file
, "%s:\n", lazy_ptr_name
);
13025 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
13026 fprintf (file
, "\t.long dyld_stub_binding_helper\n");
13029 /* Legitimize PIC addresses. If the address is already
13030 position-independent, we return ORIG. Newly generated
13031 position-independent addresses go into a reg. This is REG if non
13032 zero, otherwise we allocate register(s) as necessary. */
13034 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13037 rs6000_machopic_legitimize_pic_address (orig
, mode
, reg
)
13039 enum machine_mode mode
;
13044 if (reg
== NULL
&& ! reload_in_progress
&& ! reload_completed
)
13045 reg
= gen_reg_rtx (Pmode
);
13047 if (GET_CODE (orig
) == CONST
)
13049 if (GET_CODE (XEXP (orig
, 0)) == PLUS
13050 && XEXP (XEXP (orig
, 0), 0) == pic_offset_table_rtx
)
13053 if (GET_CODE (XEXP (orig
, 0)) == PLUS
)
13056 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0),
13059 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1),
13065 if (GET_CODE (offset
) == CONST_INT
)
13067 if (SMALL_INT (offset
))
13068 return plus_constant (base
, INTVAL (offset
));
13069 else if (! reload_in_progress
&& ! reload_completed
)
13070 offset
= force_reg (Pmode
, offset
);
13073 rtx mem
= force_const_mem (Pmode
, orig
);
13074 return machopic_legitimize_pic_address (mem
, Pmode
, reg
);
13077 return gen_rtx (PLUS
, Pmode
, base
, offset
);
13080 /* Fall back on generic machopic code. */
13081 return machopic_legitimize_pic_address (orig
, mode
, reg
);
13084 /* This is just a placeholder to make linking work without having to
13085 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13086 ever needed for Darwin (not too likely!) this would have to get a
13087 real definition. */
13094 #endif /* TARGET_MACHO */
13097 static unsigned int
13098 rs6000_elf_section_type_flags (decl
, name
, reloc
)
13104 = default_section_type_flags_1 (decl
, name
, reloc
,
13105 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
13107 if (TARGET_RELOCATABLE
)
13108 flags
|= SECTION_WRITE
;
13113 /* Record an element in the table of global constructors. SYMBOL is
13114 a SYMBOL_REF of the function to be called; PRIORITY is a number
13115 between 0 and MAX_INIT_PRIORITY.
13117 This differs from default_named_section_asm_out_constructor in
13118 that we have special handling for -mrelocatable. */
13121 rs6000_elf_asm_out_constructor (symbol
, priority
)
13125 const char *section
= ".ctors";
13128 if (priority
!= DEFAULT_INIT_PRIORITY
)
13130 sprintf (buf
, ".ctors.%.5u",
13131 /* Invert the numbering so the linker puts us in the proper
13132 order; constructors are run from right to left, and the
13133 linker sorts in increasing order. */
13134 MAX_INIT_PRIORITY
- priority
);
13138 named_section_flags (section
, SECTION_WRITE
);
13139 assemble_align (POINTER_SIZE
);
13141 if (TARGET_RELOCATABLE
)
13143 fputs ("\t.long (", asm_out_file
);
13144 output_addr_const (asm_out_file
, symbol
);
13145 fputs (")@fixup\n", asm_out_file
);
13148 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
13152 rs6000_elf_asm_out_destructor (symbol
, priority
)
13156 const char *section
= ".dtors";
13159 if (priority
!= DEFAULT_INIT_PRIORITY
)
13161 sprintf (buf
, ".dtors.%.5u",
13162 /* Invert the numbering so the linker puts us in the proper
13163 order; constructors are run from right to left, and the
13164 linker sorts in increasing order. */
13165 MAX_INIT_PRIORITY
- priority
);
13169 named_section_flags (section
, SECTION_WRITE
);
13170 assemble_align (POINTER_SIZE
);
13172 if (TARGET_RELOCATABLE
)
13174 fputs ("\t.long (", asm_out_file
);
13175 output_addr_const (asm_out_file
, symbol
);
13176 fputs (")@fixup\n", asm_out_file
);
13179 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
13185 rs6000_xcoff_asm_globalize_label (stream
, name
)
13189 fputs (GLOBAL_ASM_OP
, stream
);
13190 RS6000_OUTPUT_BASENAME (stream
, name
);
13191 putc ('\n', stream
);
13195 rs6000_xcoff_asm_named_section (name
, flags
)
13197 unsigned int flags
;
13200 static const char * const suffix
[3] = { "PR", "RO", "RW" };
13202 if (flags
& SECTION_CODE
)
13204 else if (flags
& SECTION_WRITE
)
13209 fprintf (asm_out_file
, "\t.csect %s%s[%s],%u\n",
13210 (flags
& SECTION_CODE
) ? "." : "",
13211 name
, suffix
[smclass
], flags
& SECTION_ENTSIZE
);
13215 rs6000_xcoff_select_section (decl
, reloc
, align
)
13218 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
13220 if (decl_readonly_section_1 (decl
, reloc
, 1))
13222 if (TREE_PUBLIC (decl
))
13223 read_only_data_section ();
13225 read_only_private_data_section ();
13229 if (TREE_PUBLIC (decl
))
13232 private_data_section ();
13237 rs6000_xcoff_unique_section (decl
, reloc
)
13239 int reloc ATTRIBUTE_UNUSED
;
13243 /* Use select_section for private and uninitialized data. */
13244 if (!TREE_PUBLIC (decl
)
13245 || DECL_COMMON (decl
)
13246 || DECL_INITIAL (decl
) == NULL_TREE
13247 || DECL_INITIAL (decl
) == error_mark_node
13248 || (flag_zero_initialized_in_bss
13249 && initializer_zerop (DECL_INITIAL (decl
))))
13252 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
13253 name
= (*targetm
.strip_name_encoding
) (name
);
13254 DECL_SECTION_NAME (decl
) = build_string (strlen (name
), name
);
13257 /* Select section for constant in constant pool.
13259 On RS/6000, all constants are in the private read-only data area.
13260 However, if this is being placed in the TOC it must be output as a
13264 rs6000_xcoff_select_rtx_section (mode
, x
, align
)
13265 enum machine_mode mode
;
13267 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
13269 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
13272 read_only_private_data_section ();
13275 /* Remove any trailing [DS] or the like from the symbol name. */
13277 static const char *
13278 rs6000_xcoff_strip_name_encoding (name
)
13284 len
= strlen (name
);
13285 if (name
[len
- 1] == ']')
13286 return ggc_alloc_string (name
, len
- 4);
13291 /* Section attributes. AIX is always PIC. */
13293 static unsigned int
13294 rs6000_xcoff_section_type_flags (decl
, name
, reloc
)
13299 unsigned int align
;
13300 unsigned int flags
= default_section_type_flags_1 (decl
, name
, reloc
, 1);
13302 /* Align to at least UNIT size. */
13303 if (flags
& SECTION_CODE
)
13304 align
= MIN_UNITS_PER_WORD
;
13306 /* Increase alignment of large objects if not already stricter. */
13307 align
= MAX ((DECL_ALIGN (decl
) / BITS_PER_UNIT
),
13308 int_size_in_bytes (TREE_TYPE (decl
)) > MIN_UNITS_PER_WORD
13309 ? UNITS_PER_FP_WORD
: MIN_UNITS_PER_WORD
);
13311 return flags
| (exact_log2 (align
) & SECTION_ENTSIZE
);
13314 #endif /* TARGET_XCOFF */
13316 /* Note that this is also used for PPC64 Linux. */
13319 rs6000_xcoff_encode_section_info (decl
, first
)
13321 int first ATTRIBUTE_UNUSED
;
13323 if (TREE_CODE (decl
) == FUNCTION_DECL
13324 && (*targetm
.binds_local_p
) (decl
))
13325 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl
), 0)) = 1;
13328 /* Cross-module name binding. For AIX and PPC64 Linux, which always are
13329 PIC, use private copy of flag_pic. */
13332 rs6000_binds_local_p (decl
)
13335 return default_binds_local_p_1 (decl
, flag_pic
|| rs6000_flag_pic
);
13338 /* Compute a (partial) cost for rtx X. Return true if the complete
13339 cost has been computed, and false if subexpressions should be
13340 scanned. In either case, *TOTAL contains the cost result. */
13343 rs6000_rtx_costs (x
, code
, outer_code
, total
)
13345 int code
, outer_code ATTRIBUTE_UNUSED
;
13350 /* On the RS/6000, if it is valid in the insn, it is free.
13351 So this always returns 0. */
13362 *total
= ((GET_CODE (XEXP (x
, 1)) == CONST_INT
13363 && ((unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1))
13364 + 0x8000) >= 0x10000)
13365 && ((INTVAL (XEXP (x
, 1)) & 0xffff) != 0))
13366 ? COSTS_N_INSNS (2)
13367 : COSTS_N_INSNS (1));
13373 *total
= ((GET_CODE (XEXP (x
, 1)) == CONST_INT
13374 && (INTVAL (XEXP (x
, 1)) & (~ (HOST_WIDE_INT
) 0xffff)) != 0
13375 && ((INTVAL (XEXP (x
, 1)) & 0xffff) != 0))
13376 ? COSTS_N_INSNS (2)
13377 : COSTS_N_INSNS (1));
13383 *total
= COSTS_N_INSNS (2);
13386 switch (rs6000_cpu
)
13388 case PROCESSOR_RIOS1
:
13389 case PROCESSOR_PPC405
:
13390 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13391 ? COSTS_N_INSNS (5)
13392 : (INTVAL (XEXP (x
, 1)) >= -256
13393 && INTVAL (XEXP (x
, 1)) <= 255)
13394 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13397 case PROCESSOR_RS64A
:
13398 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13399 ? GET_MODE (XEXP (x
, 1)) != DImode
13400 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
13401 : (INTVAL (XEXP (x
, 1)) >= -256
13402 && INTVAL (XEXP (x
, 1)) <= 255)
13403 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
13406 case PROCESSOR_RIOS2
:
13407 case PROCESSOR_MPCCORE
:
13408 case PROCESSOR_PPC604e
:
13409 *total
= COSTS_N_INSNS (2);
13412 case PROCESSOR_PPC601
:
13413 *total
= COSTS_N_INSNS (5);
13416 case PROCESSOR_PPC603
:
13417 case PROCESSOR_PPC7400
:
13418 case PROCESSOR_PPC750
:
13419 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13420 ? COSTS_N_INSNS (5)
13421 : (INTVAL (XEXP (x
, 1)) >= -256
13422 && INTVAL (XEXP (x
, 1)) <= 255)
13423 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
13426 case PROCESSOR_PPC7450
:
13427 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13428 ? COSTS_N_INSNS (4)
13429 : COSTS_N_INSNS (3));
13432 case PROCESSOR_PPC403
:
13433 case PROCESSOR_PPC604
:
13434 case PROCESSOR_PPC8540
:
13435 *total
= COSTS_N_INSNS (4);
13438 case PROCESSOR_PPC620
:
13439 case PROCESSOR_PPC630
:
13440 case PROCESSOR_POWER4
:
13441 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13442 ? GET_MODE (XEXP (x
, 1)) != DImode
13443 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
13444 : (INTVAL (XEXP (x
, 1)) >= -256
13445 && INTVAL (XEXP (x
, 1)) <= 255)
13446 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13455 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
13456 && exact_log2 (INTVAL (XEXP (x
, 1))) >= 0)
13458 *total
= COSTS_N_INSNS (2);
13465 switch (rs6000_cpu
)
13467 case PROCESSOR_RIOS1
:
13468 *total
= COSTS_N_INSNS (19);
13471 case PROCESSOR_RIOS2
:
13472 *total
= COSTS_N_INSNS (13);
13475 case PROCESSOR_RS64A
:
13476 *total
= (GET_MODE (XEXP (x
, 1)) != DImode
13477 ? COSTS_N_INSNS (65)
13478 : COSTS_N_INSNS (67));
13481 case PROCESSOR_MPCCORE
:
13482 *total
= COSTS_N_INSNS (6);
13485 case PROCESSOR_PPC403
:
13486 *total
= COSTS_N_INSNS (33);
13489 case PROCESSOR_PPC405
:
13490 *total
= COSTS_N_INSNS (35);
13493 case PROCESSOR_PPC601
:
13494 *total
= COSTS_N_INSNS (36);
13497 case PROCESSOR_PPC603
:
13498 *total
= COSTS_N_INSNS (37);
13501 case PROCESSOR_PPC604
:
13502 case PROCESSOR_PPC604e
:
13503 *total
= COSTS_N_INSNS (20);
13506 case PROCESSOR_PPC620
:
13507 case PROCESSOR_PPC630
:
13508 case PROCESSOR_POWER4
:
13509 *total
= (GET_MODE (XEXP (x
, 1)) != DImode
13510 ? COSTS_N_INSNS (21)
13511 : COSTS_N_INSNS (37));
13514 case PROCESSOR_PPC750
:
13515 case PROCESSOR_PPC8540
:
13516 case PROCESSOR_PPC7400
:
13517 *total
= COSTS_N_INSNS (19);
13520 case PROCESSOR_PPC7450
:
13521 *total
= COSTS_N_INSNS (23);
13529 *total
= COSTS_N_INSNS (4);
13533 /* MEM should be slightly more expensive than (plus (reg) (const)) */
13542 /* A C expression returning the cost of moving data from a register of class
13543 CLASS1 to one of CLASS2. */
13546 rs6000_register_move_cost (mode
, from
, to
)
13547 enum machine_mode mode
;
13548 enum reg_class from
, to
;
13550 /* Moves from/to GENERAL_REGS. */
13551 if (reg_classes_intersect_p (to
, GENERAL_REGS
)
13552 || reg_classes_intersect_p (from
, GENERAL_REGS
))
13554 if (! reg_classes_intersect_p (to
, GENERAL_REGS
))
13557 if (from
== FLOAT_REGS
|| from
== ALTIVEC_REGS
)
13558 return (rs6000_memory_move_cost (mode
, from
, 0)
13559 + rs6000_memory_move_cost (mode
, GENERAL_REGS
, 0));
13561 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
13562 else if (from
== CR_REGS
)
13566 /* A move will cost one instruction per GPR moved. */
13567 return 2 * HARD_REGNO_NREGS (0, mode
);
13570 /* Moving between two similar registers is just one instruction. */
13571 else if (reg_classes_intersect_p (to
, from
))
13572 return mode
== TFmode
? 4 : 2;
13574 /* Everything else has to go through GENERAL_REGS. */
13576 return (rs6000_register_move_cost (mode
, GENERAL_REGS
, to
)
13577 + rs6000_register_move_cost (mode
, from
, GENERAL_REGS
));
13580 /* A C expressions returning the cost of moving data of MODE from a register to
13584 rs6000_memory_move_cost (mode
, class, in
)
13585 enum machine_mode mode
;
13586 enum reg_class
class;
13587 int in ATTRIBUTE_UNUSED
;
13589 if (reg_classes_intersect_p (class, GENERAL_REGS
))
13590 return 4 * HARD_REGNO_NREGS (0, mode
);
13591 else if (reg_classes_intersect_p (class, FLOAT_REGS
))
13592 return 4 * HARD_REGNO_NREGS (32, mode
);
13593 else if (reg_classes_intersect_p (class, ALTIVEC_REGS
))
13594 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO
, mode
);
13596 return 4 + rs6000_register_move_cost (mode
, class, GENERAL_REGS
);
13599 /* Return true if TYPE is of type __ev64_opaque__. */
13602 is_ev64_opaque_type (type
)
13606 && TREE_CODE (type
) == VECTOR_TYPE
13607 && TYPE_NAME (type
)
13608 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
13609 && DECL_NAME (TYPE_NAME (type
))
13610 && strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type
))),
13611 "__ev64_opaque__") == 0);
13614 #include "gt-rs6000.h"