1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
54 #ifndef TARGET_NO_PROTOTYPE
55 #define TARGET_NO_PROTOTYPE 0
58 #define min(A,B) ((A) < (B) ? (A) : (B))
59 #define max(A,B) ((A) > (B) ? (A) : (B))
63 enum processor_type rs6000_cpu
;
64 struct rs6000_cpu_select rs6000_select
[3] =
66 /* switch name, tune arch */
67 { (const char *)0, "--with-cpu=", 1, 1 },
68 { (const char *)0, "-mcpu=", 1, 1 },
69 { (const char *)0, "-mtune=", 1, 0 },
72 /* Size of long double */
73 const char *rs6000_long_double_size_string
;
74 int rs6000_long_double_type_size
;
76 /* Whether -mabi=altivec has appeared */
77 int rs6000_altivec_abi
;
79 /* Whether VRSAVE instructions should be generated. */
80 int rs6000_altivec_vrsave
;
82 /* String from -mvrsave= option. */
83 const char *rs6000_altivec_vrsave_string
;
85 /* Nonzero if we want SPE ABI extensions. */
88 /* Whether isel instructions should be generated. */
91 /* Nonzero if we have FPRs. */
94 /* String from -misel=. */
95 const char *rs6000_isel_string
;
97 /* Set to nonzero once AIX common-mode calls have been defined. */
98 static GTY(()) int common_mode_defined
;
100 /* Private copy of original value of flag_pic for ABI_AIX. */
101 static int rs6000_flag_pic
;
103 /* Save information from a "cmpxx" operation until the branch or scc is
105 rtx rs6000_compare_op0
, rs6000_compare_op1
;
106 int rs6000_compare_fp_p
;
108 /* Label number of label created for -mrelocatable, to call to so we can
109 get the address of the GOT section */
110 int rs6000_pic_labelno
;
113 /* Which abi to adhere to */
114 const char *rs6000_abi_name
= RS6000_ABI_NAME
;
116 /* Semantics of the small data area */
117 enum rs6000_sdata_type rs6000_sdata
= SDATA_DATA
;
119 /* Which small data model to use */
120 const char *rs6000_sdata_name
= (char *)0;
122 /* Counter for labels which are to be placed in .fixup. */
123 int fixuplabelno
= 0;
126 /* ABI enumeration available for subtarget to use. */
127 enum rs6000_abi rs6000_current_abi
;
129 /* ABI string from -mabi= option. */
130 const char *rs6000_abi_string
;
133 const char *rs6000_debug_name
;
134 int rs6000_debug_stack
; /* debug stack applications */
135 int rs6000_debug_arg
; /* debug argument handling */
137 const char *rs6000_traceback_name
;
139 traceback_default
= 0,
145 /* Flag to say the TOC is initialized */
147 char toc_label_name
[10];
149 /* Alias set for saves and restores from the rs6000 stack. */
150 static int rs6000_sr_alias_set
;
152 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
153 The only place that looks at this is rs6000_set_default_type_attributes;
154 everywhere else should rely on the presence or absence of a longcall
155 attribute on the function declaration. */
156 int rs6000_default_long_calls
;
157 const char *rs6000_longcall_switch
;
159 struct builtin_description
161 /* mask is not const because we're going to alter it below. This
162 nonsense will go away when we rewrite the -march infrastructure
163 to give us more target flag bits. */
165 const enum insn_code icode
;
166 const char *const name
;
167 const enum rs6000_builtins code
;
170 static bool rs6000_function_ok_for_sibcall
PARAMS ((tree
, tree
));
171 static int num_insns_constant_wide
PARAMS ((HOST_WIDE_INT
));
172 static void validate_condition_mode
173 PARAMS ((enum rtx_code
, enum machine_mode
));
174 static rtx rs6000_generate_compare
PARAMS ((enum rtx_code
));
175 static void rs6000_maybe_dead
PARAMS ((rtx
));
176 static void rs6000_emit_stack_tie
PARAMS ((void));
177 static void rs6000_frame_related
PARAMS ((rtx
, rtx
, HOST_WIDE_INT
, rtx
, rtx
));
178 static void emit_frame_save
PARAMS ((rtx
, rtx
, enum machine_mode
,
179 unsigned int, int, int));
180 static rtx gen_frame_mem_offset
PARAMS ((enum machine_mode
, rtx
, int));
181 static void rs6000_emit_allocate_stack
PARAMS ((HOST_WIDE_INT
, int));
182 static unsigned rs6000_hash_constant
PARAMS ((rtx
));
183 static unsigned toc_hash_function
PARAMS ((const void *));
184 static int toc_hash_eq
PARAMS ((const void *, const void *));
185 static int constant_pool_expr_1
PARAMS ((rtx
, int *, int *));
186 static struct machine_function
* rs6000_init_machine_status
PARAMS ((void));
187 static bool rs6000_assemble_integer
PARAMS ((rtx
, unsigned int, int));
188 #ifdef HAVE_GAS_HIDDEN
189 static void rs6000_assemble_visibility
PARAMS ((tree
, int));
191 static int rs6000_ra_ever_killed
PARAMS ((void));
192 static tree rs6000_handle_longcall_attribute
PARAMS ((tree
*, tree
, tree
, int, bool *));
193 const struct attribute_spec rs6000_attribute_table
[];
194 static void rs6000_set_default_type_attributes
PARAMS ((tree
));
195 static void rs6000_output_function_prologue
PARAMS ((FILE *, HOST_WIDE_INT
));
196 static void rs6000_output_function_epilogue
PARAMS ((FILE *, HOST_WIDE_INT
));
197 static void rs6000_output_mi_thunk
PARAMS ((FILE *, tree
, HOST_WIDE_INT
,
198 HOST_WIDE_INT
, tree
));
199 static rtx rs6000_emit_set_long_const
PARAMS ((rtx
,
200 HOST_WIDE_INT
, HOST_WIDE_INT
));
202 static unsigned int rs6000_elf_section_type_flags
PARAMS ((tree
, const char *,
204 static void rs6000_elf_asm_out_constructor
PARAMS ((rtx
, int));
205 static void rs6000_elf_asm_out_destructor
PARAMS ((rtx
, int));
206 static void rs6000_elf_select_section
PARAMS ((tree
, int,
207 unsigned HOST_WIDE_INT
));
208 static void rs6000_elf_unique_section
PARAMS ((tree
, int));
209 static void rs6000_elf_select_rtx_section
PARAMS ((enum machine_mode
, rtx
,
210 unsigned HOST_WIDE_INT
));
211 static void rs6000_elf_encode_section_info
PARAMS ((tree
, int))
213 static const char *rs6000_elf_strip_name_encoding
PARAMS ((const char *));
214 static bool rs6000_elf_in_small_data_p
PARAMS ((tree
));
217 static void rs6000_xcoff_asm_globalize_label
PARAMS ((FILE *, const char *));
218 static void rs6000_xcoff_asm_named_section
PARAMS ((const char *, unsigned int));
219 static void rs6000_xcoff_select_section
PARAMS ((tree
, int,
220 unsigned HOST_WIDE_INT
));
221 static void rs6000_xcoff_unique_section
PARAMS ((tree
, int));
222 static void rs6000_xcoff_select_rtx_section
PARAMS ((enum machine_mode
, rtx
,
223 unsigned HOST_WIDE_INT
));
224 static const char * rs6000_xcoff_strip_name_encoding
PARAMS ((const char *));
225 static unsigned int rs6000_xcoff_section_type_flags
PARAMS ((tree
, const char *, int));
227 static void rs6000_xcoff_encode_section_info
PARAMS ((tree
, int))
229 static bool rs6000_binds_local_p
PARAMS ((tree
));
230 static int rs6000_adjust_cost
PARAMS ((rtx
, rtx
, rtx
, int));
231 static int rs6000_adjust_priority
PARAMS ((rtx
, int));
232 static int rs6000_issue_rate
PARAMS ((void));
234 static void rs6000_init_builtins
PARAMS ((void));
235 static rtx rs6000_expand_unop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
236 static rtx rs6000_expand_binop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
237 static rtx rs6000_expand_ternop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
238 static rtx rs6000_expand_builtin
PARAMS ((tree
, rtx
, rtx
, enum machine_mode
, int));
239 static void altivec_init_builtins
PARAMS ((void));
240 static void rs6000_common_init_builtins
PARAMS ((void));
242 static void enable_mask_for_builtins
PARAMS ((struct builtin_description
*,
243 int, enum rs6000_builtins
,
244 enum rs6000_builtins
));
245 static void spe_init_builtins
PARAMS ((void));
246 static rtx spe_expand_builtin
PARAMS ((tree
, rtx
, bool *));
247 static rtx spe_expand_predicate_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
248 static rtx spe_expand_evsel_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
249 static int rs6000_emit_int_cmove
PARAMS ((rtx
, rtx
, rtx
, rtx
));
251 static rtx altivec_expand_builtin
PARAMS ((tree
, rtx
, bool *));
252 static rtx altivec_expand_ld_builtin
PARAMS ((tree
, rtx
, bool *));
253 static rtx altivec_expand_st_builtin
PARAMS ((tree
, rtx
, bool *));
254 static rtx altivec_expand_dst_builtin
PARAMS ((tree
, rtx
, bool *));
255 static rtx altivec_expand_abs_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
256 static rtx altivec_expand_predicate_builtin
PARAMS ((enum insn_code
, const char *, tree
, rtx
));
257 static rtx altivec_expand_stv_builtin
PARAMS ((enum insn_code
, tree
));
258 static void rs6000_parse_abi_options
PARAMS ((void));
259 static void rs6000_parse_vrsave_option
PARAMS ((void));
260 static void rs6000_parse_isel_option
PARAMS ((void));
261 static int first_altivec_reg_to_save
PARAMS ((void));
262 static unsigned int compute_vrsave_mask
PARAMS ((void));
263 static void is_altivec_return_reg
PARAMS ((rtx
, void *));
264 static rtx generate_set_vrsave
PARAMS ((rtx
, rs6000_stack_t
*, int));
265 static void altivec_frame_fixup
PARAMS ((rtx
, rtx
, HOST_WIDE_INT
));
266 static int easy_vector_constant
PARAMS ((rtx
));
268 /* Hash table stuff for keeping track of TOC entries. */
270 struct toc_hash_struct
GTY(())
272 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
273 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
275 enum machine_mode key_mode
;
279 static GTY ((param_is (struct toc_hash_struct
))) htab_t toc_hash_table
;
281 /* Default register names. */
282 char rs6000_reg_names
[][8] =
284 "0", "1", "2", "3", "4", "5", "6", "7",
285 "8", "9", "10", "11", "12", "13", "14", "15",
286 "16", "17", "18", "19", "20", "21", "22", "23",
287 "24", "25", "26", "27", "28", "29", "30", "31",
288 "0", "1", "2", "3", "4", "5", "6", "7",
289 "8", "9", "10", "11", "12", "13", "14", "15",
290 "16", "17", "18", "19", "20", "21", "22", "23",
291 "24", "25", "26", "27", "28", "29", "30", "31",
292 "mq", "lr", "ctr","ap",
293 "0", "1", "2", "3", "4", "5", "6", "7",
295 /* AltiVec registers. */
296 "0", "1", "2", "3", "4", "5", "6", "7",
297 "8", "9", "10", "11", "12", "13", "14", "15",
298 "16", "17", "18", "19", "20", "21", "22", "23",
299 "24", "25", "26", "27", "28", "29", "30", "31",
305 #ifdef TARGET_REGNAMES
306 static const char alt_reg_names
[][8] =
308 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
309 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
310 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
311 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
312 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
313 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
314 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
315 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
316 "mq", "lr", "ctr", "ap",
317 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
319 /* AltiVec registers. */
320 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
321 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
322 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
323 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
330 #ifndef MASK_STRICT_ALIGN
331 #define MASK_STRICT_ALIGN 0
334 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
335 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
337 /* Initialize the GCC target structure. */
338 #undef TARGET_ATTRIBUTE_TABLE
339 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
340 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
341 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
343 #undef TARGET_ASM_ALIGNED_DI_OP
344 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
346 /* Default unaligned ops are only provided for ELF. Find the ops needed
347 for non-ELF systems. */
348 #ifndef OBJECT_FORMAT_ELF
350 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
352 #undef TARGET_ASM_UNALIGNED_HI_OP
353 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
354 #undef TARGET_ASM_UNALIGNED_SI_OP
355 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
356 #undef TARGET_ASM_UNALIGNED_DI_OP
357 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
360 #undef TARGET_ASM_UNALIGNED_HI_OP
361 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
362 #undef TARGET_ASM_UNALIGNED_SI_OP
363 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
367 /* This hook deals with fixups for relocatable code and DI-mode objects
369 #undef TARGET_ASM_INTEGER
370 #define TARGET_ASM_INTEGER rs6000_assemble_integer
372 #ifdef HAVE_GAS_HIDDEN
373 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
374 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
377 #undef TARGET_ASM_FUNCTION_PROLOGUE
378 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
379 #undef TARGET_ASM_FUNCTION_EPILOGUE
380 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
382 #undef TARGET_SCHED_ISSUE_RATE
383 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
384 #undef TARGET_SCHED_ADJUST_COST
385 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
386 #undef TARGET_SCHED_ADJUST_PRIORITY
387 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
389 #undef TARGET_INIT_BUILTINS
390 #define TARGET_INIT_BUILTINS rs6000_init_builtins
392 #undef TARGET_EXPAND_BUILTIN
393 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
395 #undef TARGET_BINDS_LOCAL_P
396 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
398 #undef TARGET_ASM_OUTPUT_MI_THUNK
399 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
401 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
402 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
404 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
405 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
407 struct gcc_target targetm
= TARGET_INITIALIZER
;
409 /* Override command line options. Mostly we process the processor
410 type and sometimes adjust other TARGET_ options. */
413 rs6000_override_options (default_cpu
)
414 const char *default_cpu
;
417 struct rs6000_cpu_select
*ptr
;
419 /* Simplify the entries below by making a mask for any POWER
420 variant and any PowerPC variant. */
422 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
423 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
424 | MASK_PPC_GFXOPT | MASK_POWERPC64)
425 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
429 const char *const name
; /* Canonical processor name. */
430 const enum processor_type processor
; /* Processor type enum value. */
431 const int target_enable
; /* Target flags to enable. */
432 const int target_disable
; /* Target flags to disable. */
433 } const processor_target_table
[]
434 = {{"common", PROCESSOR_COMMON
, MASK_NEW_MNEMONICS
,
435 POWER_MASKS
| POWERPC_MASKS
},
436 {"power", PROCESSOR_POWER
,
437 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
438 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
439 {"power2", PROCESSOR_POWER
,
440 MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
,
441 POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
442 {"power3", PROCESSOR_PPC630
,
443 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
444 POWER_MASKS
| MASK_PPC_GPOPT
},
445 {"power4", PROCESSOR_POWER4
,
446 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
447 POWER_MASKS
| MASK_PPC_GPOPT
},
448 {"powerpc", PROCESSOR_POWERPC
,
449 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
450 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
451 {"powerpc64", PROCESSOR_POWERPC64
,
452 MASK_POWERPC
| MASK_POWERPC64
| MASK_NEW_MNEMONICS
,
453 POWER_MASKS
| POWERPC_OPT_MASKS
},
454 {"rios", PROCESSOR_RIOS1
,
455 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
456 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
457 {"rios1", PROCESSOR_RIOS1
,
458 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
459 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
460 {"rsc", PROCESSOR_PPC601
,
461 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
462 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
463 {"rsc1", PROCESSOR_PPC601
,
464 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
465 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
466 {"rios2", PROCESSOR_RIOS2
,
467 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
| MASK_POWER2
,
468 POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
469 {"rs64a", PROCESSOR_RS64A
,
470 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
471 POWER_MASKS
| POWERPC_OPT_MASKS
},
472 {"401", PROCESSOR_PPC403
,
473 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
474 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
475 {"403", PROCESSOR_PPC403
,
476 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
| MASK_STRICT_ALIGN
,
477 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
478 {"405", PROCESSOR_PPC405
,
479 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
480 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
481 {"405f", PROCESSOR_PPC405
,
482 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
483 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
484 {"505", PROCESSOR_MPCCORE
,
485 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
486 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
487 {"601", PROCESSOR_PPC601
,
488 MASK_POWER
| MASK_POWERPC
| MASK_NEW_MNEMONICS
| MASK_MULTIPLE
| MASK_STRING
,
489 MASK_POWER2
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
490 {"602", PROCESSOR_PPC603
,
491 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
492 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
493 {"603", PROCESSOR_PPC603
,
494 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
495 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
496 {"603e", PROCESSOR_PPC603
,
497 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
498 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
499 {"ec603e", PROCESSOR_PPC603
,
500 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
501 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
502 {"604", PROCESSOR_PPC604
,
503 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
504 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
505 {"604e", PROCESSOR_PPC604e
,
506 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
507 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
508 {"620", PROCESSOR_PPC620
,
509 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
510 POWER_MASKS
| MASK_PPC_GPOPT
},
511 {"630", PROCESSOR_PPC630
,
512 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
513 POWER_MASKS
| MASK_PPC_GPOPT
},
514 {"740", PROCESSOR_PPC750
,
515 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
516 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
517 {"750", PROCESSOR_PPC750
,
518 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
519 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
520 {"7400", PROCESSOR_PPC7400
,
521 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
522 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
523 {"7450", PROCESSOR_PPC7450
,
524 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
525 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
526 {"8540", PROCESSOR_PPC8540
,
527 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
528 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
529 {"801", PROCESSOR_MPCCORE
,
530 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
531 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
532 {"821", PROCESSOR_MPCCORE
,
533 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
534 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
535 {"823", PROCESSOR_MPCCORE
,
536 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
537 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
538 {"860", PROCESSOR_MPCCORE
,
539 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
540 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
}};
542 const size_t ptt_size
= ARRAY_SIZE (processor_target_table
);
544 /* Save current -mmultiple/-mno-multiple status. */
545 int multiple
= TARGET_MULTIPLE
;
546 /* Save current -mstring/-mno-string status. */
547 int string
= TARGET_STRING
;
549 /* Identify the processor type. */
550 rs6000_select
[0].string
= default_cpu
;
551 rs6000_cpu
= TARGET_POWERPC64
? PROCESSOR_DEFAULT64
: PROCESSOR_DEFAULT
;
553 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
555 ptr
= &rs6000_select
[i
];
556 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
558 for (j
= 0; j
< ptt_size
; j
++)
559 if (! strcmp (ptr
->string
, processor_target_table
[j
].name
))
562 rs6000_cpu
= processor_target_table
[j
].processor
;
566 target_flags
|= processor_target_table
[j
].target_enable
;
567 target_flags
&= ~processor_target_table
[j
].target_disable
;
573 error ("bad value (%s) for %s switch", ptr
->string
, ptr
->name
);
577 if (rs6000_cpu
== PROCESSOR_PPC8540
)
580 /* If we are optimizing big endian systems for space, use the load/store
581 multiple and string instructions. */
582 if (BYTES_BIG_ENDIAN
&& optimize_size
)
583 target_flags
|= MASK_MULTIPLE
| MASK_STRING
;
585 /* If -mmultiple or -mno-multiple was explicitly used, don't
586 override with the processor default */
587 if ((target_flags_explicit
& MASK_MULTIPLE
) != 0)
588 target_flags
= (target_flags
& ~MASK_MULTIPLE
) | multiple
;
590 /* If -mstring or -mno-string was explicitly used, don't override
591 with the processor default. */
592 if ((target_flags_explicit
& MASK_STRING
) != 0)
593 target_flags
= (target_flags
& ~MASK_STRING
) | string
;
595 /* Don't allow -mmultiple or -mstring on little endian systems
596 unless the cpu is a 750, because the hardware doesn't support the
597 instructions used in little endian mode, and causes an alignment
598 trap. The 750 does not cause an alignment trap (except when the
599 target is unaligned). */
601 if (!BYTES_BIG_ENDIAN
&& rs6000_cpu
!= PROCESSOR_PPC750
)
605 target_flags
&= ~MASK_MULTIPLE
;
606 if ((target_flags_explicit
& MASK_MULTIPLE
) != 0)
607 warning ("-mmultiple is not supported on little endian systems");
612 target_flags
&= ~MASK_STRING
;
613 if ((target_flags_explicit
& MASK_STRING
) != 0)
614 warning ("-mstring is not supported on little endian systems");
618 if (flag_pic
!= 0 && DEFAULT_ABI
== ABI_AIX
)
620 rs6000_flag_pic
= flag_pic
;
624 /* For Darwin, always silently make -fpic and -fPIC identical. */
625 if (flag_pic
== 1 && DEFAULT_ABI
== ABI_DARWIN
)
628 /* Set debug flags */
629 if (rs6000_debug_name
)
631 if (! strcmp (rs6000_debug_name
, "all"))
632 rs6000_debug_stack
= rs6000_debug_arg
= 1;
633 else if (! strcmp (rs6000_debug_name
, "stack"))
634 rs6000_debug_stack
= 1;
635 else if (! strcmp (rs6000_debug_name
, "arg"))
636 rs6000_debug_arg
= 1;
638 error ("unknown -mdebug-%s switch", rs6000_debug_name
);
641 if (rs6000_traceback_name
)
643 if (! strncmp (rs6000_traceback_name
, "full", 4))
644 rs6000_traceback
= traceback_full
;
645 else if (! strncmp (rs6000_traceback_name
, "part", 4))
646 rs6000_traceback
= traceback_part
;
647 else if (! strncmp (rs6000_traceback_name
, "no", 2))
648 rs6000_traceback
= traceback_none
;
650 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
651 rs6000_traceback_name
);
654 /* Set size of long double */
655 rs6000_long_double_type_size
= 64;
656 if (rs6000_long_double_size_string
)
659 int size
= strtol (rs6000_long_double_size_string
, &tail
, 10);
660 if (*tail
!= '\0' || (size
!= 64 && size
!= 128))
661 error ("Unknown switch -mlong-double-%s",
662 rs6000_long_double_size_string
);
664 rs6000_long_double_type_size
= size
;
667 /* Handle -mabi= options. */
668 rs6000_parse_abi_options ();
670 /* Handle -mvrsave= option. */
671 rs6000_parse_vrsave_option ();
673 /* Handle -misel= option. */
674 rs6000_parse_isel_option ();
676 #ifdef SUBTARGET_OVERRIDE_OPTIONS
677 SUBTARGET_OVERRIDE_OPTIONS
;
679 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
680 SUBSUBTARGET_OVERRIDE_OPTIONS
;
683 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
684 using TARGET_OPTIONS to handle a toggle switch, but we're out of
685 bits in target_flags so TARGET_SWITCHES cannot be used.
686 Assumption here is that rs6000_longcall_switch points into the
687 text of the complete option, rather than being a copy, so we can
688 scan back for the presence or absence of the no- modifier. */
689 if (rs6000_longcall_switch
)
691 const char *base
= rs6000_longcall_switch
;
692 while (base
[-1] != 'm') base
--;
694 if (*rs6000_longcall_switch
!= '\0')
695 error ("invalid option `%s'", base
);
696 rs6000_default_long_calls
= (base
[0] != 'n');
699 #ifdef TARGET_REGNAMES
700 /* If the user desires alternate register names, copy in the
701 alternate names now. */
703 memcpy (rs6000_reg_names
, alt_reg_names
, sizeof (rs6000_reg_names
));
706 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
707 If -maix-struct-return or -msvr4-struct-return was explicitly
708 used, don't override with the ABI default. */
709 if ((target_flags_explicit
& MASK_AIX_STRUCT_RET
) == 0)
711 if (DEFAULT_ABI
== ABI_V4
&& !DRAFT_V4_STRUCT_RET
)
712 target_flags
= (target_flags
& ~MASK_AIX_STRUCT_RET
);
714 target_flags
|= MASK_AIX_STRUCT_RET
;
717 if (TARGET_LONG_DOUBLE_128
718 && (DEFAULT_ABI
== ABI_AIX
|| DEFAULT_ABI
== ABI_DARWIN
))
719 real_format_for_mode
[TFmode
- QFmode
] = &ibm_extended_format
;
721 /* Allocate an alias set for register saves & restores from stack. */
722 rs6000_sr_alias_set
= new_alias_set ();
725 ASM_GENERATE_INTERNAL_LABEL (toc_label_name
, "LCTOC", 1);
727 /* We can only guarantee the availability of DI pseudo-ops when
728 assembling for 64-bit targets. */
731 targetm
.asm_out
.aligned_op
.di
= NULL
;
732 targetm
.asm_out
.unaligned_op
.di
= NULL
;
735 /* Arrange to save and restore machine status around nested functions. */
736 init_machine_status
= rs6000_init_machine_status
;
739 /* Handle -misel= option. */
741 rs6000_parse_isel_option ()
743 if (rs6000_isel_string
== 0)
745 else if (! strcmp (rs6000_isel_string
, "yes"))
747 else if (! strcmp (rs6000_isel_string
, "no"))
750 error ("unknown -misel= option specified: '%s'",
754 /* Handle -mvrsave= options. */
756 rs6000_parse_vrsave_option ()
758 /* Generate VRSAVE instructions by default. */
759 if (rs6000_altivec_vrsave_string
== 0
760 || ! strcmp (rs6000_altivec_vrsave_string
, "yes"))
761 rs6000_altivec_vrsave
= 1;
762 else if (! strcmp (rs6000_altivec_vrsave_string
, "no"))
763 rs6000_altivec_vrsave
= 0;
765 error ("unknown -mvrsave= option specified: '%s'",
766 rs6000_altivec_vrsave_string
);
769 /* Handle -mabi= options. */
771 rs6000_parse_abi_options ()
773 if (rs6000_abi_string
== 0)
775 else if (! strcmp (rs6000_abi_string
, "altivec"))
776 rs6000_altivec_abi
= 1;
777 else if (! strcmp (rs6000_abi_string
, "no-altivec"))
778 rs6000_altivec_abi
= 0;
779 else if (! strcmp (rs6000_abi_string
, "spe"))
781 else if (! strcmp (rs6000_abi_string
, "no-spe"))
784 error ("unknown ABI specified: '%s'", rs6000_abi_string
);
788 optimization_options (level
, size
)
789 int level ATTRIBUTE_UNUSED
;
790 int size ATTRIBUTE_UNUSED
;
794 /* Do anything needed at the start of the asm file. */
797 rs6000_file_start (file
, default_cpu
)
799 const char *default_cpu
;
803 const char *start
= buffer
;
804 struct rs6000_cpu_select
*ptr
;
806 if (flag_verbose_asm
)
808 sprintf (buffer
, "\n%s rs6000/powerpc options:", ASM_COMMENT_START
);
809 rs6000_select
[0].string
= default_cpu
;
811 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
813 ptr
= &rs6000_select
[i
];
814 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
816 fprintf (file
, "%s %s%s", start
, ptr
->name
, ptr
->string
);
822 switch (rs6000_sdata
)
824 case SDATA_NONE
: fprintf (file
, "%s -msdata=none", start
); start
= ""; break;
825 case SDATA_DATA
: fprintf (file
, "%s -msdata=data", start
); start
= ""; break;
826 case SDATA_SYSV
: fprintf (file
, "%s -msdata=sysv", start
); start
= ""; break;
827 case SDATA_EABI
: fprintf (file
, "%s -msdata=eabi", start
); start
= ""; break;
830 if (rs6000_sdata
&& g_switch_value
)
832 fprintf (file
, "%s -G %d", start
, g_switch_value
);
842 /* Return nonzero if this function is known to have a null epilogue. */
847 if (reload_completed
)
849 rs6000_stack_t
*info
= rs6000_stack_info ();
851 if (info
->first_gp_reg_save
== 32
852 && info
->first_fp_reg_save
== 64
853 && info
->first_altivec_reg_save
== LAST_ALTIVEC_REGNO
+ 1
856 && info
->vrsave_mask
== 0
864 /* Returns 1 always. */
867 any_operand (op
, mode
)
868 rtx op ATTRIBUTE_UNUSED
;
869 enum machine_mode mode ATTRIBUTE_UNUSED
;
874 /* Returns 1 if op is the count register. */
876 count_register_operand (op
, mode
)
878 enum machine_mode mode ATTRIBUTE_UNUSED
;
880 if (GET_CODE (op
) != REG
)
883 if (REGNO (op
) == COUNT_REGISTER_REGNUM
)
886 if (REGNO (op
) > FIRST_PSEUDO_REGISTER
)
892 /* Returns 1 if op is an altivec register. */
894 altivec_register_operand (op
, mode
)
896 enum machine_mode mode ATTRIBUTE_UNUSED
;
899 return (register_operand (op
, mode
)
900 && (GET_CODE (op
) != REG
901 || REGNO (op
) > FIRST_PSEUDO_REGISTER
902 || ALTIVEC_REGNO_P (REGNO (op
))));
906 xer_operand (op
, mode
)
908 enum machine_mode mode ATTRIBUTE_UNUSED
;
910 if (GET_CODE (op
) != REG
)
913 if (XER_REGNO_P (REGNO (op
)))
919 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
920 by such constants completes more quickly. */
923 s8bit_cint_operand (op
, mode
)
925 enum machine_mode mode ATTRIBUTE_UNUSED
;
927 return ( GET_CODE (op
) == CONST_INT
928 && (INTVAL (op
) >= -128 && INTVAL (op
) <= 127));
931 /* Return 1 if OP is a constant that can fit in a D field. */
934 short_cint_operand (op
, mode
)
936 enum machine_mode mode ATTRIBUTE_UNUSED
;
938 return (GET_CODE (op
) == CONST_INT
939 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I'));
942 /* Similar for an unsigned D field. */
945 u_short_cint_operand (op
, mode
)
947 enum machine_mode mode ATTRIBUTE_UNUSED
;
949 return (GET_CODE (op
) == CONST_INT
950 && CONST_OK_FOR_LETTER_P (INTVAL (op
) & GET_MODE_MASK (mode
), 'K'));
953 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
956 non_short_cint_operand (op
, mode
)
958 enum machine_mode mode ATTRIBUTE_UNUSED
;
960 return (GET_CODE (op
) == CONST_INT
961 && (unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x8000) >= 0x10000);
964 /* Returns 1 if OP is a CONST_INT that is a positive value
965 and an exact power of 2. */
968 exact_log2_cint_operand (op
, mode
)
970 enum machine_mode mode ATTRIBUTE_UNUSED
;
972 return (GET_CODE (op
) == CONST_INT
974 && exact_log2 (INTVAL (op
)) >= 0);
977 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
981 gpc_reg_operand (op
, mode
)
983 enum machine_mode mode
;
985 return (register_operand (op
, mode
)
986 && (GET_CODE (op
) != REG
987 || (REGNO (op
) >= ARG_POINTER_REGNUM
988 && !XER_REGNO_P (REGNO (op
)))
989 || REGNO (op
) < MQ_REGNO
));
992 /* Returns 1 if OP is either a pseudo-register or a register denoting a
996 cc_reg_operand (op
, mode
)
998 enum machine_mode mode
;
1000 return (register_operand (op
, mode
)
1001 && (GET_CODE (op
) != REG
1002 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
1003 || CR_REGNO_P (REGNO (op
))));
1006 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1007 CR field that isn't CR0. */
1010 cc_reg_not_cr0_operand (op
, mode
)
1012 enum machine_mode mode
;
1014 return (register_operand (op
, mode
)
1015 && (GET_CODE (op
) != REG
1016 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
1017 || CR_REGNO_NOT_CR0_P (REGNO (op
))));
1020 /* Returns 1 if OP is either a constant integer valid for a D-field or
1021 a non-special register. If a register, it must be in the proper
1022 mode unless MODE is VOIDmode. */
1025 reg_or_short_operand (op
, mode
)
1027 enum machine_mode mode
;
1029 return short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1032 /* Similar, except check if the negation of the constant would be
1033 valid for a D-field. */
1036 reg_or_neg_short_operand (op
, mode
)
1038 enum machine_mode mode
;
1040 if (GET_CODE (op
) == CONST_INT
)
1041 return CONST_OK_FOR_LETTER_P (INTVAL (op
), 'P');
1043 return gpc_reg_operand (op
, mode
);
1046 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1047 a non-special register. If a register, it must be in the proper
1048 mode unless MODE is VOIDmode. */
1051 reg_or_aligned_short_operand (op
, mode
)
1053 enum machine_mode mode
;
1055 if (gpc_reg_operand (op
, mode
))
1057 else if (short_cint_operand (op
, mode
) && !(INTVAL (op
) & 3))
1064 /* Return 1 if the operand is either a register or an integer whose
1065 high-order 16 bits are zero. */
1068 reg_or_u_short_operand (op
, mode
)
1070 enum machine_mode mode
;
1072 return u_short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1075 /* Return 1 is the operand is either a non-special register or ANY
1076 constant integer. */
1079 reg_or_cint_operand (op
, mode
)
1081 enum machine_mode mode
;
1083 return (GET_CODE (op
) == CONST_INT
|| gpc_reg_operand (op
, mode
));
1086 /* Return 1 is the operand is either a non-special register or ANY
1087 32-bit signed constant integer. */
1090 reg_or_arith_cint_operand (op
, mode
)
1092 enum machine_mode mode
;
1094 return (gpc_reg_operand (op
, mode
)
1095 || (GET_CODE (op
) == CONST_INT
1096 #if HOST_BITS_PER_WIDE_INT != 32
1097 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80000000)
1098 < (unsigned HOST_WIDE_INT
) 0x100000000ll
)
1103 /* Return 1 is the operand is either a non-special register or a 32-bit
1104 signed constant integer valid for 64-bit addition. */
1107 reg_or_add_cint64_operand (op
, mode
)
1109 enum machine_mode mode
;
1111 return (gpc_reg_operand (op
, mode
)
1112 || (GET_CODE (op
) == CONST_INT
1113 #if HOST_BITS_PER_WIDE_INT == 32
1114 && INTVAL (op
) < 0x7fff8000
1116 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80008000)
1122 /* Return 1 is the operand is either a non-special register or a 32-bit
1123 signed constant integer valid for 64-bit subtraction. */
1126 reg_or_sub_cint64_operand (op
, mode
)
1128 enum machine_mode mode
;
1130 return (gpc_reg_operand (op
, mode
)
1131 || (GET_CODE (op
) == CONST_INT
1132 #if HOST_BITS_PER_WIDE_INT == 32
1133 && (- INTVAL (op
)) < 0x7fff8000
1135 && ((unsigned HOST_WIDE_INT
) ((- INTVAL (op
)) + 0x80008000)
1141 /* Return 1 is the operand is either a non-special register or ANY
1142 32-bit unsigned constant integer. */
1145 reg_or_logical_cint_operand (op
, mode
)
1147 enum machine_mode mode
;
1149 if (GET_CODE (op
) == CONST_INT
)
1151 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
)
1153 if (GET_MODE_BITSIZE (mode
) <= 32)
1156 if (INTVAL (op
) < 0)
1160 return ((INTVAL (op
) & GET_MODE_MASK (mode
)
1161 & (~ (unsigned HOST_WIDE_INT
) 0xffffffff)) == 0);
1163 else if (GET_CODE (op
) == CONST_DOUBLE
)
1165 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
1169 return CONST_DOUBLE_HIGH (op
) == 0;
1172 return gpc_reg_operand (op
, mode
);
1175 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1178 got_operand (op
, mode
)
1180 enum machine_mode mode ATTRIBUTE_UNUSED
;
1182 return (GET_CODE (op
) == SYMBOL_REF
1183 || GET_CODE (op
) == CONST
1184 || GET_CODE (op
) == LABEL_REF
);
1187 /* Return 1 if the operand is a simple references that can be loaded via
1188 the GOT (labels involving addition aren't allowed). */
1191 got_no_const_operand (op
, mode
)
1193 enum machine_mode mode ATTRIBUTE_UNUSED
;
1195 return (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
);
1198 /* Return the number of instructions it takes to form a constant in an
1199 integer register. */
1202 num_insns_constant_wide (value
)
1203 HOST_WIDE_INT value
;
1205 /* signed constant loadable with {cal|addi} */
1206 if (CONST_OK_FOR_LETTER_P (value
, 'I'))
1209 /* constant loadable with {cau|addis} */
1210 else if (CONST_OK_FOR_LETTER_P (value
, 'L'))
1213 #if HOST_BITS_PER_WIDE_INT == 64
1214 else if (TARGET_POWERPC64
)
1216 HOST_WIDE_INT low
= ((value
& 0xffffffff) ^ 0x80000000) - 0x80000000;
1217 HOST_WIDE_INT high
= value
>> 31;
1219 if (high
== 0 || high
== -1)
1225 return num_insns_constant_wide (high
) + 1;
1227 return (num_insns_constant_wide (high
)
1228 + num_insns_constant_wide (low
) + 1);
1237 num_insns_constant (op
, mode
)
1239 enum machine_mode mode
;
1241 if (GET_CODE (op
) == CONST_INT
)
1243 #if HOST_BITS_PER_WIDE_INT == 64
1244 if ((INTVAL (op
) >> 31) != 0 && (INTVAL (op
) >> 31) != -1
1245 && mask64_operand (op
, mode
))
1249 return num_insns_constant_wide (INTVAL (op
));
1252 else if (GET_CODE (op
) == CONST_DOUBLE
&& mode
== SFmode
)
1257 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1258 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1259 return num_insns_constant_wide ((HOST_WIDE_INT
) l
);
1262 else if (GET_CODE (op
) == CONST_DOUBLE
)
1268 int endian
= (WORDS_BIG_ENDIAN
== 0);
1270 if (mode
== VOIDmode
|| mode
== DImode
)
1272 high
= CONST_DOUBLE_HIGH (op
);
1273 low
= CONST_DOUBLE_LOW (op
);
1277 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1278 REAL_VALUE_TO_TARGET_DOUBLE (rv
, l
);
1280 low
= l
[1 - endian
];
1284 return (num_insns_constant_wide (low
)
1285 + num_insns_constant_wide (high
));
1289 if (high
== 0 && low
>= 0)
1290 return num_insns_constant_wide (low
);
1292 else if (high
== -1 && low
< 0)
1293 return num_insns_constant_wide (low
);
1295 else if (mask64_operand (op
, mode
))
1299 return num_insns_constant_wide (high
) + 1;
1302 return (num_insns_constant_wide (high
)
1303 + num_insns_constant_wide (low
) + 1);
1311 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1312 register with one instruction per word. We only do this if we can
1313 safely read CONST_DOUBLE_{LOW,HIGH}. */
1316 easy_fp_constant (op
, mode
)
1318 enum machine_mode mode
;
1320 if (GET_CODE (op
) != CONST_DOUBLE
1321 || GET_MODE (op
) != mode
1322 || (GET_MODE_CLASS (mode
) != MODE_FLOAT
&& mode
!= DImode
))
1325 /* Consider all constants with -msoft-float to be easy. */
1326 if ((TARGET_SOFT_FLOAT
|| !TARGET_FPRS
)
1330 /* If we are using V.4 style PIC, consider all constants to be hard. */
1331 if (flag_pic
&& DEFAULT_ABI
== ABI_V4
)
1334 #ifdef TARGET_RELOCATABLE
1335 /* Similarly if we are using -mrelocatable, consider all constants
1337 if (TARGET_RELOCATABLE
)
1346 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1347 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
1349 return (num_insns_constant_wide ((HOST_WIDE_INT
) k
[0]) == 1
1350 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[1]) == 1
1351 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[2]) == 1
1352 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[3]) == 1);
1355 else if (mode
== DFmode
)
1360 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1361 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
1363 return (num_insns_constant_wide ((HOST_WIDE_INT
) k
[0]) == 1
1364 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[1]) == 1);
1367 else if (mode
== SFmode
)
1372 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1373 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1375 return num_insns_constant_wide (l
) == 1;
1378 else if (mode
== DImode
)
1379 return ((TARGET_POWERPC64
1380 && GET_CODE (op
) == CONST_DOUBLE
&& CONST_DOUBLE_LOW (op
) == 0)
1381 || (num_insns_constant (op
, DImode
) <= 2));
1383 else if (mode
== SImode
)
1389 /* Return 1 if the operand is a CONST_INT and can be put into a
1390 register with one instruction. */
1393 easy_vector_constant (op
)
1399 if (GET_CODE (op
) != CONST_VECTOR
)
1402 units
= CONST_VECTOR_NUNITS (op
);
1404 /* We can generate 0 easily. Look for that. */
1405 for (i
= 0; i
< units
; ++i
)
1407 elt
= CONST_VECTOR_ELT (op
, i
);
1409 /* We could probably simplify this by just checking for equality
1410 with CONST0_RTX for the current mode, but let's be safe
1413 switch (GET_CODE (elt
))
1416 if (INTVAL (elt
) != 0)
1420 if (CONST_DOUBLE_LOW (elt
) != 0 || CONST_DOUBLE_HIGH (elt
) != 0)
1428 /* We could probably generate a few other constants trivially, but
1429 gcc doesn't generate them yet. FIXME later. */
1433 /* Return 1 if the operand is the constant 0. This works for scalars
1434 as well as vectors. */
1436 zero_constant (op
, mode
)
1438 enum machine_mode mode
;
1440 return op
== CONST0_RTX (mode
);
1443 /* Return 1 if the operand is 0.0. */
1445 zero_fp_constant (op
, mode
)
1447 enum machine_mode mode
;
1449 return GET_MODE_CLASS (mode
) == MODE_FLOAT
&& op
== CONST0_RTX (mode
);
1452 /* Return 1 if the operand is in volatile memory. Note that during
1453 the RTL generation phase, memory_operand does not return TRUE for
1454 volatile memory references. So this function allows us to
1455 recognize volatile references where its safe. */
1458 volatile_mem_operand (op
, mode
)
1460 enum machine_mode mode
;
1462 if (GET_CODE (op
) != MEM
)
1465 if (!MEM_VOLATILE_P (op
))
1468 if (mode
!= GET_MODE (op
))
1471 if (reload_completed
)
1472 return memory_operand (op
, mode
);
1474 if (reload_in_progress
)
1475 return strict_memory_address_p (mode
, XEXP (op
, 0));
1477 return memory_address_p (mode
, XEXP (op
, 0));
1480 /* Return 1 if the operand is an offsettable memory operand. */
1483 offsettable_mem_operand (op
, mode
)
1485 enum machine_mode mode
;
1487 return ((GET_CODE (op
) == MEM
)
1488 && offsettable_address_p (reload_completed
|| reload_in_progress
,
1489 mode
, XEXP (op
, 0)));
1492 /* Return 1 if the operand is either an easy FP constant (see above) or
1496 mem_or_easy_const_operand (op
, mode
)
1498 enum machine_mode mode
;
1500 return memory_operand (op
, mode
) || easy_fp_constant (op
, mode
);
1503 /* Return 1 if the operand is either a non-special register or an item
1504 that can be used as the operand of a `mode' add insn. */
1507 add_operand (op
, mode
)
1509 enum machine_mode mode
;
1511 if (GET_CODE (op
) == CONST_INT
)
1512 return (CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1513 || CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1515 return gpc_reg_operand (op
, mode
);
1518 /* Return 1 if OP is a constant but not a valid add_operand. */
1521 non_add_cint_operand (op
, mode
)
1523 enum machine_mode mode ATTRIBUTE_UNUSED
;
1525 return (GET_CODE (op
) == CONST_INT
1526 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1527 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1530 /* Return 1 if the operand is a non-special register or a constant that
1531 can be used as the operand of an OR or XOR insn on the RS/6000. */
1534 logical_operand (op
, mode
)
1536 enum machine_mode mode
;
1538 HOST_WIDE_INT opl
, oph
;
1540 if (gpc_reg_operand (op
, mode
))
1543 if (GET_CODE (op
) == CONST_INT
)
1545 opl
= INTVAL (op
) & GET_MODE_MASK (mode
);
1547 #if HOST_BITS_PER_WIDE_INT <= 32
1548 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
&& opl
< 0)
1552 else if (GET_CODE (op
) == CONST_DOUBLE
)
1554 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1557 opl
= CONST_DOUBLE_LOW (op
);
1558 oph
= CONST_DOUBLE_HIGH (op
);
1565 return ((opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff) == 0
1566 || (opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff0000) == 0);
1569 /* Return 1 if C is a constant that is not a logical operand (as
1570 above), but could be split into one. */
1573 non_logical_cint_operand (op
, mode
)
1575 enum machine_mode mode
;
1577 return ((GET_CODE (op
) == CONST_INT
|| GET_CODE (op
) == CONST_DOUBLE
)
1578 && ! logical_operand (op
, mode
)
1579 && reg_or_logical_cint_operand (op
, mode
));
1582 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1583 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1584 Reject all ones and all zeros, since these should have been optimized
1585 away and confuse the making of MB and ME. */
1588 mask_operand (op
, mode
)
1590 enum machine_mode mode ATTRIBUTE_UNUSED
;
1592 HOST_WIDE_INT c
, lsb
;
1594 if (GET_CODE (op
) != CONST_INT
)
1599 /* Fail in 64-bit mode if the mask wraps around because the upper
1600 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1601 if (TARGET_POWERPC64
&& (c
& 0x80000001) == 0x80000001)
1604 /* We don't change the number of transitions by inverting,
1605 so make sure we start with the LS bit zero. */
1609 /* Reject all zeros or all ones. */
1613 /* Find the first transition. */
1616 /* Invert to look for a second transition. */
1619 /* Erase first transition. */
1622 /* Find the second transition (if any). */
1625 /* Match if all the bits above are 1's (or c is zero). */
1629 /* Return 1 for the PowerPC64 rlwinm corner case. */
1632 mask_operand_wrap (op
, mode
)
1634 enum machine_mode mode ATTRIBUTE_UNUSED
;
1636 HOST_WIDE_INT c
, lsb
;
1638 if (GET_CODE (op
) != CONST_INT
)
1643 if ((c
& 0x80000001) != 0x80000001)
1657 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1658 It is if there are no more than one 1->0 or 0->1 transitions.
1659 Reject all zeros, since zero should have been optimized away and
1660 confuses the making of MB and ME. */
1663 mask64_operand (op
, mode
)
1665 enum machine_mode mode ATTRIBUTE_UNUSED
;
1667 if (GET_CODE (op
) == CONST_INT
)
1669 HOST_WIDE_INT c
, lsb
;
1673 /* Reject all zeros. */
1677 /* We don't change the number of transitions by inverting,
1678 so make sure we start with the LS bit zero. */
1682 /* Find the transition, and check that all bits above are 1's. */
1689 /* Like mask64_operand, but allow up to three transitions. This
1690 predicate is used by insn patterns that generate two rldicl or
1691 rldicr machine insns. */
1694 mask64_2_operand (op
, mode
)
1696 enum machine_mode mode ATTRIBUTE_UNUSED
;
1698 if (GET_CODE (op
) == CONST_INT
)
1700 HOST_WIDE_INT c
, lsb
;
1704 /* Disallow all zeros. */
1708 /* We don't change the number of transitions by inverting,
1709 so make sure we start with the LS bit zero. */
1713 /* Find the first transition. */
1716 /* Invert to look for a second transition. */
1719 /* Erase first transition. */
1722 /* Find the second transition. */
1725 /* Invert to look for a third transition. */
1728 /* Erase second transition. */
1731 /* Find the third transition (if any). */
1734 /* Match if all the bits above are 1's (or c is zero). */
1740 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1741 implement ANDing by the mask IN. */
1743 build_mask64_2_operands (in
, out
)
1747 #if HOST_BITS_PER_WIDE_INT >= 64
1748 unsigned HOST_WIDE_INT c
, lsb
, m1
, m2
;
1751 if (GET_CODE (in
) != CONST_INT
)
1757 /* Assume c initially something like 0x00fff000000fffff. The idea
1758 is to rotate the word so that the middle ^^^^^^ group of zeros
1759 is at the MS end and can be cleared with an rldicl mask. We then
1760 rotate back and clear off the MS ^^ group of zeros with a
1762 c
= ~c
; /* c == 0xff000ffffff00000 */
1763 lsb
= c
& -c
; /* lsb == 0x0000000000100000 */
1764 m1
= -lsb
; /* m1 == 0xfffffffffff00000 */
1765 c
= ~c
; /* c == 0x00fff000000fffff */
1766 c
&= -lsb
; /* c == 0x00fff00000000000 */
1767 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
1768 c
= ~c
; /* c == 0xff000fffffffffff */
1769 c
&= -lsb
; /* c == 0xff00000000000000 */
1771 while ((lsb
>>= 1) != 0)
1772 shift
++; /* shift == 44 on exit from loop */
1773 m1
<<= 64 - shift
; /* m1 == 0xffffff0000000000 */
1774 m1
= ~m1
; /* m1 == 0x000000ffffffffff */
1775 m2
= ~c
; /* m2 == 0x00ffffffffffffff */
1779 /* Assume c initially something like 0xff000f0000000000. The idea
1780 is to rotate the word so that the ^^^ middle group of zeros
1781 is at the LS end and can be cleared with an rldicr mask. We then
1782 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1784 lsb
= c
& -c
; /* lsb == 0x0000010000000000 */
1785 m2
= -lsb
; /* m2 == 0xffffff0000000000 */
1786 c
= ~c
; /* c == 0x00fff0ffffffffff */
1787 c
&= -lsb
; /* c == 0x00fff00000000000 */
1788 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
1789 c
= ~c
; /* c == 0xff000fffffffffff */
1790 c
&= -lsb
; /* c == 0xff00000000000000 */
1792 while ((lsb
>>= 1) != 0)
1793 shift
++; /* shift == 44 on exit from loop */
1794 m1
= ~c
; /* m1 == 0x00ffffffffffffff */
1795 m1
>>= shift
; /* m1 == 0x0000000000000fff */
1796 m1
= ~m1
; /* m1 == 0xfffffffffffff000 */
1799 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1800 masks will be all 1's. We are guaranteed more than one transition. */
1801 out
[0] = GEN_INT (64 - shift
);
1802 out
[1] = GEN_INT (m1
);
1803 out
[2] = GEN_INT (shift
);
1804 out
[3] = GEN_INT (m2
);
1812 /* Return 1 if the operand is either a non-special register or a constant
1813 that can be used as the operand of a PowerPC64 logical AND insn. */
1816 and64_operand (op
, mode
)
1818 enum machine_mode mode
;
1820 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
1821 return (gpc_reg_operand (op
, mode
) || mask64_operand (op
, mode
));
1823 return (logical_operand (op
, mode
) || mask64_operand (op
, mode
));
1826 /* Like the above, but also match constants that can be implemented
1827 with two rldicl or rldicr insns. */
1830 and64_2_operand (op
, mode
)
1832 enum machine_mode mode
;
1834 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
1835 return gpc_reg_operand (op
, mode
) || mask64_2_operand (op
, mode
);
1837 return logical_operand (op
, mode
) || mask64_2_operand (op
, mode
);
1840 /* Return 1 if the operand is either a non-special register or a
1841 constant that can be used as the operand of an RS/6000 logical AND insn. */
1844 and_operand (op
, mode
)
1846 enum machine_mode mode
;
1848 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
1849 return (gpc_reg_operand (op
, mode
) || mask_operand (op
, mode
));
1851 return (logical_operand (op
, mode
) || mask_operand (op
, mode
));
1854 /* Return 1 if the operand is a general register or memory operand. */
1857 reg_or_mem_operand (op
, mode
)
1859 enum machine_mode mode
;
1861 return (gpc_reg_operand (op
, mode
)
1862 || memory_operand (op
, mode
)
1863 || volatile_mem_operand (op
, mode
));
1866 /* Return 1 if the operand is a general register or memory operand without
1867 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1871 lwa_operand (op
, mode
)
1873 enum machine_mode mode
;
1877 if (reload_completed
&& GET_CODE (inner
) == SUBREG
)
1878 inner
= SUBREG_REG (inner
);
1880 return gpc_reg_operand (inner
, mode
)
1881 || (memory_operand (inner
, mode
)
1882 && GET_CODE (XEXP (inner
, 0)) != PRE_INC
1883 && GET_CODE (XEXP (inner
, 0)) != PRE_DEC
1884 && (GET_CODE (XEXP (inner
, 0)) != PLUS
1885 || GET_CODE (XEXP (XEXP (inner
, 0), 1)) != CONST_INT
1886 || INTVAL (XEXP (XEXP (inner
, 0), 1)) % 4 == 0));
1889 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
1892 symbol_ref_operand (op
, mode
)
1894 enum machine_mode mode
;
1896 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1899 return (GET_CODE (op
) == SYMBOL_REF
);
1902 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1903 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
1906 call_operand (op
, mode
)
1908 enum machine_mode mode
;
1910 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1913 return (GET_CODE (op
) == SYMBOL_REF
1914 || (GET_CODE (op
) == REG
1915 && (REGNO (op
) == LINK_REGISTER_REGNUM
1916 || REGNO (op
) == COUNT_REGISTER_REGNUM
1917 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
)));
1920 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1921 this file and the function is not weakly defined. */
1924 current_file_function_operand (op
, mode
)
1926 enum machine_mode mode ATTRIBUTE_UNUSED
;
1928 return (GET_CODE (op
) == SYMBOL_REF
1929 && (SYMBOL_REF_FLAG (op
)
1930 || (op
== XEXP (DECL_RTL (current_function_decl
), 0)
1931 && ! DECL_WEAK (current_function_decl
))));
1934 /* Return 1 if this operand is a valid input for a move insn. */
1937 input_operand (op
, mode
)
1939 enum machine_mode mode
;
1941 /* Memory is always valid. */
1942 if (memory_operand (op
, mode
))
1945 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1946 if (GET_CODE (op
) == CONSTANT_P_RTX
)
1949 /* For floating-point, easy constants are valid. */
1950 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
1952 && easy_fp_constant (op
, mode
))
1955 /* Allow any integer constant. */
1956 if (GET_MODE_CLASS (mode
) == MODE_INT
1957 && (GET_CODE (op
) == CONST_INT
1958 || GET_CODE (op
) == CONST_DOUBLE
))
1961 /* For floating-point or multi-word mode, the only remaining valid type
1963 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
1964 || GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
1965 return register_operand (op
, mode
);
1967 /* The only cases left are integral modes one word or smaller (we
1968 do not get called for MODE_CC values). These can be in any
1970 if (register_operand (op
, mode
))
1973 /* A SYMBOL_REF referring to the TOC is valid. */
1974 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op
))
1977 /* A constant pool expression (relative to the TOC) is valid */
1978 if (TOC_RELATIVE_EXPR_P (op
))
1981 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1983 if (DEFAULT_ABI
== ABI_V4
1984 && (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == CONST
)
1985 && small_data_operand (op
, Pmode
))
1991 /* Return 1 for an operand in small memory on V.4/eabi. */
1994 small_data_operand (op
, mode
)
1995 rtx op ATTRIBUTE_UNUSED
;
1996 enum machine_mode mode ATTRIBUTE_UNUSED
;
2001 if (rs6000_sdata
== SDATA_NONE
|| rs6000_sdata
== SDATA_DATA
)
2004 if (DEFAULT_ABI
!= ABI_V4
)
2007 if (GET_CODE (op
) == SYMBOL_REF
)
2010 else if (GET_CODE (op
) != CONST
2011 || GET_CODE (XEXP (op
, 0)) != PLUS
2012 || GET_CODE (XEXP (XEXP (op
, 0), 0)) != SYMBOL_REF
2013 || GET_CODE (XEXP (XEXP (op
, 0), 1)) != CONST_INT
)
2018 rtx sum
= XEXP (op
, 0);
2019 HOST_WIDE_INT summand
;
2021 /* We have to be careful here, because it is the referenced address
2022 that must be 32k from _SDA_BASE_, not just the symbol. */
2023 summand
= INTVAL (XEXP (sum
, 1));
2024 if (summand
< 0 || summand
> g_switch_value
)
2027 sym_ref
= XEXP (sum
, 0);
2030 if (*XSTR (sym_ref
, 0) != '@')
2041 constant_pool_expr_1 (op
, have_sym
, have_toc
)
2046 switch (GET_CODE(op
))
2049 if (CONSTANT_POOL_ADDRESS_P (op
))
2051 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op
), Pmode
))
2059 else if (! strcmp (XSTR (op
, 0), toc_label_name
))
2068 return (constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
)
2069 && constant_pool_expr_1 (XEXP (op
, 1), have_sym
, have_toc
));
2071 return constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
);
2080 constant_pool_expr_p (op
)
2085 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_sym
;
2089 toc_relative_expr_p (op
)
2094 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_toc
;
2097 /* Try machine-dependent ways of modifying an illegitimate address
2098 to be legitimate. If we find one, return the new, valid address.
2099 This is used from only one place: `memory_address' in explow.c.
2101 OLDX is the address as it was before break_out_memory_refs was
2102 called. In some cases it is useful to look at this to decide what
2105 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2107 It is always safe for this function to do nothing. It exists to
2108 recognize opportunities to optimize the output.
2110 On RS/6000, first check for the sum of a register with a constant
2111 integer that is out of range. If so, generate code to add the
2112 constant with the low-order 16 bits masked to the register and force
2113 this result into another register (this can be done with `cau').
2114 Then generate an address of REG+(CONST&0xffff), allowing for the
2115 possibility of bit 16 being a one.
2117 Then check for the sum of a register and something not constant, try to
2118 load the other things into a register and return the sum. */
2120 rs6000_legitimize_address (x
, oldx
, mode
)
2122 rtx oldx ATTRIBUTE_UNUSED
;
2123 enum machine_mode mode
;
2125 if (GET_CODE (x
) == PLUS
2126 && GET_CODE (XEXP (x
, 0)) == REG
2127 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2128 && (unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1)) + 0x8000) >= 0x10000)
2130 HOST_WIDE_INT high_int
, low_int
;
2132 low_int
= ((INTVAL (XEXP (x
, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2133 high_int
= INTVAL (XEXP (x
, 1)) - low_int
;
2134 sum
= force_operand (gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2135 GEN_INT (high_int
)), 0);
2136 return gen_rtx_PLUS (Pmode
, sum
, GEN_INT (low_int
));
2138 else if (GET_CODE (x
) == PLUS
2139 && GET_CODE (XEXP (x
, 0)) == REG
2140 && GET_CODE (XEXP (x
, 1)) != CONST_INT
2141 && GET_MODE_NUNITS (mode
) == 1
2142 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
2144 || (mode
!= DFmode
&& mode
!= TFmode
))
2145 && (TARGET_POWERPC64
|| mode
!= DImode
)
2148 return gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2149 force_reg (Pmode
, force_operand (XEXP (x
, 1), 0)));
2151 else if (ALTIVEC_VECTOR_MODE (mode
))
2155 /* Make sure both operands are registers. */
2156 if (GET_CODE (x
) == PLUS
)
2157 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, XEXP (x
, 0)),
2158 force_reg (Pmode
, XEXP (x
, 1)));
2160 reg
= force_reg (Pmode
, x
);
2163 else if (SPE_VECTOR_MODE (mode
))
2165 /* We accept [reg + reg] and [reg + OFFSET]. */
2167 if (GET_CODE (x
) == PLUS
)
2169 rtx op1
= XEXP (x
, 0);
2170 rtx op2
= XEXP (x
, 1);
2172 op1
= force_reg (Pmode
, op1
);
2174 if (GET_CODE (op2
) != REG
2175 && (GET_CODE (op2
) != CONST_INT
2176 || !SPE_CONST_OFFSET_OK (INTVAL (op2
))))
2177 op2
= force_reg (Pmode
, op2
);
2179 return gen_rtx_PLUS (Pmode
, op1
, op2
);
2182 return force_reg (Pmode
, x
);
2184 else if (TARGET_ELF
&& TARGET_32BIT
&& TARGET_NO_TOC
&& ! flag_pic
2185 && GET_CODE (x
) != CONST_INT
2186 && GET_CODE (x
) != CONST_DOUBLE
2188 && GET_MODE_NUNITS (mode
) == 1
2189 && (GET_MODE_BITSIZE (mode
) <= 32
2190 || ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) && mode
== DFmode
)))
2192 rtx reg
= gen_reg_rtx (Pmode
);
2193 emit_insn (gen_elf_high (reg
, (x
)));
2194 return gen_rtx_LO_SUM (Pmode
, reg
, (x
));
2196 else if (TARGET_MACHO
&& TARGET_32BIT
&& TARGET_NO_TOC
2198 && GET_CODE (x
) != CONST_INT
2199 && GET_CODE (x
) != CONST_DOUBLE
2201 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) || mode
!= DFmode
)
2205 rtx reg
= gen_reg_rtx (Pmode
);
2206 emit_insn (gen_macho_high (reg
, (x
)));
2207 return gen_rtx_LO_SUM (Pmode
, reg
, (x
));
2210 && CONSTANT_POOL_EXPR_P (x
)
2211 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), Pmode
))
2213 return create_TOC_reference (x
);
2219 /* The convention appears to be to define this wherever it is used.
2220 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2221 is now used here. */
2222 #ifndef REG_MODE_OK_FOR_BASE_P
2223 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2226 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2227 replace the input X, or the original X if no replacement is called for.
2228 The output parameter *WIN is 1 if the calling macro should goto WIN,
2231 For RS/6000, we wish to handle large displacements off a base
2232 register by splitting the addend across an addiu/addis and the mem insn.
2233 This cuts number of extra insns needed from 3 to 1.
2235 On Darwin, we use this to generate code for floating point constants.
2236 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2237 The Darwin code is inside #if TARGET_MACHO because only then is
2238 machopic_function_base_name() defined. */
2240 rs6000_legitimize_reload_address (x
, mode
, opnum
, type
, ind_levels
, win
)
2242 enum machine_mode mode
;
2245 int ind_levels ATTRIBUTE_UNUSED
;
2248 /* We must recognize output that we have already generated ourselves. */
2249 if (GET_CODE (x
) == PLUS
2250 && GET_CODE (XEXP (x
, 0)) == PLUS
2251 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
2252 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2253 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2255 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2256 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
2257 opnum
, (enum reload_type
)type
);
2263 if (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
2264 && GET_CODE (x
) == LO_SUM
2265 && GET_CODE (XEXP (x
, 0)) == PLUS
2266 && XEXP (XEXP (x
, 0), 0) == pic_offset_table_rtx
2267 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == HIGH
2268 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 0)) == CONST
2269 && XEXP (XEXP (XEXP (x
, 0), 1), 0) == XEXP (x
, 1)
2270 && GET_CODE (XEXP (XEXP (x
, 1), 0)) == MINUS
2271 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 0)) == SYMBOL_REF
2272 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 1)) == SYMBOL_REF
)
2274 /* Result of previous invocation of this function on Darwin
2275 floating point constant. */
2276 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2277 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
2278 opnum
, (enum reload_type
)type
);
2283 if (GET_CODE (x
) == PLUS
2284 && GET_CODE (XEXP (x
, 0)) == REG
2285 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2286 && REG_MODE_OK_FOR_BASE_P (XEXP (x
, 0), mode
)
2287 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2288 && !SPE_VECTOR_MODE (mode
)
2289 && !ALTIVEC_VECTOR_MODE (mode
))
2291 HOST_WIDE_INT val
= INTVAL (XEXP (x
, 1));
2292 HOST_WIDE_INT low
= ((val
& 0xffff) ^ 0x8000) - 0x8000;
2294 = (((val
- low
) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2296 /* Check for 32-bit overflow. */
2297 if (high
+ low
!= val
)
2303 /* Reload the high part into a base reg; leave the low part
2304 in the mem directly. */
2306 x
= gen_rtx_PLUS (GET_MODE (x
),
2307 gen_rtx_PLUS (GET_MODE (x
), XEXP (x
, 0),
2311 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2312 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
2313 opnum
, (enum reload_type
)type
);
2318 if (GET_CODE (x
) == SYMBOL_REF
2319 && DEFAULT_ABI
== ABI_DARWIN
2320 && !ALTIVEC_VECTOR_MODE (mode
)
2323 /* Darwin load of floating point constant. */
2324 rtx offset
= gen_rtx (CONST
, Pmode
,
2325 gen_rtx (MINUS
, Pmode
, x
,
2326 gen_rtx (SYMBOL_REF
, Pmode
,
2327 machopic_function_base_name ())));
2328 x
= gen_rtx (LO_SUM
, GET_MODE (x
),
2329 gen_rtx (PLUS
, Pmode
, pic_offset_table_rtx
,
2330 gen_rtx (HIGH
, Pmode
, offset
)), offset
);
2331 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2332 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
2333 opnum
, (enum reload_type
)type
);
2339 && CONSTANT_POOL_EXPR_P (x
)
2340 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), mode
))
2342 (x
) = create_TOC_reference (x
);
2350 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2351 that is a valid memory address for an instruction.
2352 The MODE argument is the machine mode for the MEM expression
2353 that wants to use this address.
2355 On the RS/6000, there are four valid address: a SYMBOL_REF that
2356 refers to a constant pool entry of an address (or the sum of it
2357 plus a constant), a short (16-bit signed) constant plus a register,
2358 the sum of two registers, or a register indirect, possibly with an
2359 auto-increment. For DFmode and DImode with a constant plus register,
2360 we must ensure that both words are addressable or PowerPC64 with offset
2363 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2364 32-bit DImode, TImode), indexed addressing cannot be used because
2365 adjacent memory cells are accessed by adding word-sized offsets
2366 during assembly output. */
2368 rs6000_legitimate_address (mode
, x
, reg_ok_strict
)
2369 enum machine_mode mode
;
2373 if (LEGITIMATE_INDIRECT_ADDRESS_P (x
, reg_ok_strict
))
2375 if ((GET_CODE (x
) == PRE_INC
|| GET_CODE (x
) == PRE_DEC
)
2376 && !ALTIVEC_VECTOR_MODE (mode
)
2377 && !SPE_VECTOR_MODE (mode
)
2379 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x
, 0), reg_ok_strict
))
2381 if (LEGITIMATE_SMALL_DATA_P (mode
, x
))
2383 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x
))
2385 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2387 && GET_CODE (x
) == PLUS
2388 && GET_CODE (XEXP (x
, 0)) == REG
2389 && XEXP (x
, 0) == virtual_stack_vars_rtx
2390 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2392 if (LEGITIMATE_OFFSET_ADDRESS_P (mode
, x
, reg_ok_strict
))
2395 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
2397 || (mode
!= DFmode
&& mode
!= TFmode
))
2398 && (TARGET_POWERPC64
|| mode
!= DImode
)
2399 && LEGITIMATE_INDEXED_ADDRESS_P (x
, reg_ok_strict
))
2401 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode
, x
, reg_ok_strict
))
2406 /* Try to output insns to set TARGET equal to the constant C if it can
2407 be done in less than N insns. Do all computations in MODE.
2408 Returns the place where the output has been placed if it can be
2409 done and the insns have been emitted. If it would take more than N
2410 insns, zero is returned and no insns and emitted. */
2413 rs6000_emit_set_const (dest
, mode
, source
, n
)
2415 enum machine_mode mode
;
2416 int n ATTRIBUTE_UNUSED
;
2418 rtx result
, insn
, set
;
2419 HOST_WIDE_INT c0
, c1
;
2421 if (mode
== QImode
|| mode
== HImode
)
2424 dest
= gen_reg_rtx (mode
);
2425 emit_insn (gen_rtx_SET (VOIDmode
, dest
, source
));
2428 else if (mode
== SImode
)
2430 result
= no_new_pseudos
? dest
: gen_reg_rtx (SImode
);
2432 emit_insn (gen_rtx_SET (VOIDmode
, result
,
2433 GEN_INT (INTVAL (source
)
2434 & (~ (HOST_WIDE_INT
) 0xffff))));
2435 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
2436 gen_rtx_IOR (SImode
, result
,
2437 GEN_INT (INTVAL (source
) & 0xffff))));
2440 else if (mode
== DImode
)
2442 if (GET_CODE (source
) == CONST_INT
)
2444 c0
= INTVAL (source
);
2447 else if (GET_CODE (source
) == CONST_DOUBLE
)
2449 #if HOST_BITS_PER_WIDE_INT >= 64
2450 c0
= CONST_DOUBLE_LOW (source
);
2453 c0
= CONST_DOUBLE_LOW (source
);
2454 c1
= CONST_DOUBLE_HIGH (source
);
2460 result
= rs6000_emit_set_long_const (dest
, c0
, c1
);
2465 insn
= get_last_insn ();
2466 set
= single_set (insn
);
2467 if (! CONSTANT_P (SET_SRC (set
)))
2468 set_unique_reg_note (insn
, REG_EQUAL
, source
);
2473 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2474 fall back to a straight forward decomposition. We do this to avoid
2475 exponential run times encountered when looking for longer sequences
2476 with rs6000_emit_set_const. */
2478 rs6000_emit_set_long_const (dest
, c1
, c2
)
2480 HOST_WIDE_INT c1
, c2
;
2482 if (!TARGET_POWERPC64
)
2484 rtx operand1
, operand2
;
2486 operand1
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
== 0,
2488 operand2
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
!= 0,
2490 emit_move_insn (operand1
, GEN_INT (c1
));
2491 emit_move_insn (operand2
, GEN_INT (c2
));
2495 HOST_WIDE_INT ud1
, ud2
, ud3
, ud4
;
2498 ud2
= (c1
& 0xffff0000) >> 16;
2499 #if HOST_BITS_PER_WIDE_INT >= 64
2503 ud4
= (c2
& 0xffff0000) >> 16;
2505 if ((ud4
== 0xffff && ud3
== 0xffff && ud2
== 0xffff && (ud1
& 0x8000))
2506 || (ud4
== 0 && ud3
== 0 && ud2
== 0 && ! (ud1
& 0x8000)))
2509 emit_move_insn (dest
, GEN_INT (((ud1
^ 0x8000) - 0x8000)));
2511 emit_move_insn (dest
, GEN_INT (ud1
));
2514 else if ((ud4
== 0xffff && ud3
== 0xffff && (ud2
& 0x8000))
2515 || (ud4
== 0 && ud3
== 0 && ! (ud2
& 0x8000)))
2518 emit_move_insn (dest
, GEN_INT (((ud2
<< 16) ^ 0x80000000)
2521 emit_move_insn (dest
, GEN_INT (ud2
<< 16));
2523 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2525 else if ((ud4
== 0xffff && (ud3
& 0x8000))
2526 || (ud4
== 0 && ! (ud3
& 0x8000)))
2529 emit_move_insn (dest
, GEN_INT (((ud3
<< 16) ^ 0x80000000)
2532 emit_move_insn (dest
, GEN_INT (ud3
<< 16));
2535 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud2
)));
2536 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (16)));
2538 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2543 emit_move_insn (dest
, GEN_INT (((ud4
<< 16) ^ 0x80000000)
2546 emit_move_insn (dest
, GEN_INT (ud4
<< 16));
2549 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud3
)));
2551 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (32)));
2553 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
,
2554 GEN_INT (ud2
<< 16)));
2556 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2562 /* Emit a move from SOURCE to DEST in mode MODE. */
2564 rs6000_emit_move (dest
, source
, mode
)
2567 enum machine_mode mode
;
2571 operands
[1] = source
;
2573 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2574 if (GET_CODE (operands
[1]) == CONST_DOUBLE
2575 && ! FLOAT_MODE_P (mode
)
2576 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
2578 /* FIXME. This should never happen. */
2579 /* Since it seems that it does, do the safe thing and convert
2581 operands
[1] = gen_int_mode (CONST_DOUBLE_LOW (operands
[1]), mode
);
2583 if (GET_CODE (operands
[1]) == CONST_DOUBLE
2584 && ! FLOAT_MODE_P (mode
)
2585 && ((CONST_DOUBLE_HIGH (operands
[1]) == 0
2586 && CONST_DOUBLE_LOW (operands
[1]) >= 0)
2587 || (CONST_DOUBLE_HIGH (operands
[1]) == -1
2588 && CONST_DOUBLE_LOW (operands
[1]) < 0)))
2591 /* Check if GCC is setting up a block move that will end up using FP
2592 registers as temporaries. We must make sure this is acceptable. */
2593 if (GET_CODE (operands
[0]) == MEM
2594 && GET_CODE (operands
[1]) == MEM
2596 && (SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[0]))
2597 || SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[1])))
2598 && ! (SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[0]) > 32
2599 ? 32 : MEM_ALIGN (operands
[0])))
2600 || SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[1]) > 32
2602 : MEM_ALIGN (operands
[1]))))
2603 && ! MEM_VOLATILE_P (operands
[0])
2604 && ! MEM_VOLATILE_P (operands
[1]))
2606 emit_move_insn (adjust_address (operands
[0], SImode
, 0),
2607 adjust_address (operands
[1], SImode
, 0));
2608 emit_move_insn (adjust_address (operands
[0], SImode
, 4),
2609 adjust_address (operands
[1], SImode
, 4));
2613 if (!no_new_pseudos
)
2615 if (GET_CODE (operands
[1]) == MEM
&& optimize
> 0
2616 && (mode
== QImode
|| mode
== HImode
|| mode
== SImode
)
2617 && GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
2619 rtx reg
= gen_reg_rtx (word_mode
);
2621 emit_insn (gen_rtx_SET (word_mode
, reg
,
2622 gen_rtx_ZERO_EXTEND (word_mode
,
2624 operands
[1] = gen_lowpart (mode
, reg
);
2626 if (GET_CODE (operands
[0]) != REG
)
2627 operands
[1] = force_reg (mode
, operands
[1]);
2630 if (mode
== SFmode
&& ! TARGET_POWERPC
2631 && TARGET_HARD_FLOAT
&& TARGET_FPRS
2632 && GET_CODE (operands
[0]) == MEM
)
2636 if (reload_in_progress
|| reload_completed
)
2637 regnum
= true_regnum (operands
[1]);
2638 else if (GET_CODE (operands
[1]) == REG
)
2639 regnum
= REGNO (operands
[1]);
2643 /* If operands[1] is a register, on POWER it may have
2644 double-precision data in it, so truncate it to single
2646 if (FP_REGNO_P (regnum
) || regnum
>= FIRST_PSEUDO_REGISTER
)
2649 newreg
= (no_new_pseudos
? operands
[1] : gen_reg_rtx (mode
));
2650 emit_insn (gen_aux_truncdfsf2 (newreg
, operands
[1]));
2651 operands
[1] = newreg
;
2655 /* Handle the case where reload calls us with an invalid address;
2656 and the case of CONSTANT_P_RTX. */
2657 if (!ALTIVEC_VECTOR_MODE (mode
)
2658 && (! general_operand (operands
[1], mode
)
2659 || ! nonimmediate_operand (operands
[0], mode
)
2660 || GET_CODE (operands
[1]) == CONSTANT_P_RTX
))
2662 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2666 /* FIXME: In the long term, this switch statement should go away
2667 and be replaced by a sequence of tests based on things like
2673 if (CONSTANT_P (operands
[1])
2674 && GET_CODE (operands
[1]) != CONST_INT
)
2675 operands
[1] = force_const_mem (mode
, operands
[1]);
2681 if (CONSTANT_P (operands
[1])
2682 && ! easy_fp_constant (operands
[1], mode
))
2683 operands
[1] = force_const_mem (mode
, operands
[1]);
2694 if (CONSTANT_P (operands
[1])
2695 && !easy_vector_constant (operands
[1]))
2696 operands
[1] = force_const_mem (mode
, operands
[1]);
2701 /* Use default pattern for address of ELF small data */
2704 && DEFAULT_ABI
== ABI_V4
2705 && (GET_CODE (operands
[1]) == SYMBOL_REF
2706 || GET_CODE (operands
[1]) == CONST
)
2707 && small_data_operand (operands
[1], mode
))
2709 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2713 if (DEFAULT_ABI
== ABI_V4
2714 && mode
== Pmode
&& mode
== SImode
2715 && flag_pic
== 1 && got_operand (operands
[1], mode
))
2717 emit_insn (gen_movsi_got (operands
[0], operands
[1]));
2721 if ((TARGET_ELF
|| DEFAULT_ABI
== ABI_DARWIN
)
2722 && TARGET_NO_TOC
&& ! flag_pic
2724 && CONSTANT_P (operands
[1])
2725 && GET_CODE (operands
[1]) != HIGH
2726 && GET_CODE (operands
[1]) != CONST_INT
)
2728 rtx target
= (no_new_pseudos
? operands
[0] : gen_reg_rtx (mode
));
2730 /* If this is a function address on -mcall-aixdesc,
2731 convert it to the address of the descriptor. */
2732 if (DEFAULT_ABI
== ABI_AIX
2733 && GET_CODE (operands
[1]) == SYMBOL_REF
2734 && XSTR (operands
[1], 0)[0] == '.')
2736 const char *name
= XSTR (operands
[1], 0);
2738 while (*name
== '.')
2740 new_ref
= gen_rtx_SYMBOL_REF (Pmode
, name
);
2741 CONSTANT_POOL_ADDRESS_P (new_ref
)
2742 = CONSTANT_POOL_ADDRESS_P (operands
[1]);
2743 SYMBOL_REF_FLAG (new_ref
) = SYMBOL_REF_FLAG (operands
[1]);
2744 SYMBOL_REF_USED (new_ref
) = SYMBOL_REF_USED (operands
[1]);
2745 operands
[1] = new_ref
;
2748 if (DEFAULT_ABI
== ABI_DARWIN
)
2750 emit_insn (gen_macho_high (target
, operands
[1]));
2751 emit_insn (gen_macho_low (operands
[0], target
, operands
[1]));
2755 emit_insn (gen_elf_high (target
, operands
[1]));
2756 emit_insn (gen_elf_low (operands
[0], target
, operands
[1]));
2760 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2761 and we have put it in the TOC, we just need to make a TOC-relative
2764 && GET_CODE (operands
[1]) == SYMBOL_REF
2765 && CONSTANT_POOL_EXPR_P (operands
[1])
2766 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands
[1]),
2767 get_pool_mode (operands
[1])))
2769 operands
[1] = create_TOC_reference (operands
[1]);
2771 else if (mode
== Pmode
2772 && CONSTANT_P (operands
[1])
2773 && ((GET_CODE (operands
[1]) != CONST_INT
2774 && ! easy_fp_constant (operands
[1], mode
))
2775 || (GET_CODE (operands
[1]) == CONST_INT
2776 && num_insns_constant (operands
[1], mode
) > 2)
2777 || (GET_CODE (operands
[0]) == REG
2778 && FP_REGNO_P (REGNO (operands
[0]))))
2779 && GET_CODE (operands
[1]) != HIGH
2780 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands
[1])
2781 && ! TOC_RELATIVE_EXPR_P (operands
[1]))
2783 /* Emit a USE operation so that the constant isn't deleted if
2784 expensive optimizations are turned on because nobody
2785 references it. This should only be done for operands that
2786 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2787 This should not be done for operands that contain LABEL_REFs.
2788 For now, we just handle the obvious case. */
2789 if (GET_CODE (operands
[1]) != LABEL_REF
)
2790 emit_insn (gen_rtx_USE (VOIDmode
, operands
[1]));
2793 /* Darwin uses a special PIC legitimizer. */
2794 if (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
)
2797 rs6000_machopic_legitimize_pic_address (operands
[1], mode
,
2799 if (operands
[0] != operands
[1])
2800 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2805 /* If we are to limit the number of things we put in the TOC and
2806 this is a symbol plus a constant we can add in one insn,
2807 just put the symbol in the TOC and add the constant. Don't do
2808 this if reload is in progress. */
2809 if (GET_CODE (operands
[1]) == CONST
2810 && TARGET_NO_SUM_IN_TOC
&& ! reload_in_progress
2811 && GET_CODE (XEXP (operands
[1], 0)) == PLUS
2812 && add_operand (XEXP (XEXP (operands
[1], 0), 1), mode
)
2813 && (GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == LABEL_REF
2814 || GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == SYMBOL_REF
)
2815 && ! side_effects_p (operands
[0]))
2818 force_const_mem (mode
, XEXP (XEXP (operands
[1], 0), 0));
2819 rtx other
= XEXP (XEXP (operands
[1], 0), 1);
2821 sym
= force_reg (mode
, sym
);
2823 emit_insn (gen_addsi3 (operands
[0], sym
, other
));
2825 emit_insn (gen_adddi3 (operands
[0], sym
, other
));
2829 operands
[1] = force_const_mem (mode
, operands
[1]);
2832 && CONSTANT_POOL_EXPR_P (XEXP (operands
[1], 0))
2833 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2834 get_pool_constant (XEXP (operands
[1], 0)),
2835 get_pool_mode (XEXP (operands
[1], 0))))
2838 = gen_rtx_MEM (mode
,
2839 create_TOC_reference (XEXP (operands
[1], 0)));
2840 set_mem_alias_set (operands
[1], get_TOC_alias_set ());
2841 RTX_UNCHANGING_P (operands
[1]) = 1;
2847 if (GET_CODE (operands
[0]) == MEM
2848 && GET_CODE (XEXP (operands
[0], 0)) != REG
2849 && ! reload_in_progress
)
2851 = replace_equiv_address (operands
[0],
2852 copy_addr_to_reg (XEXP (operands
[0], 0)));
2854 if (GET_CODE (operands
[1]) == MEM
2855 && GET_CODE (XEXP (operands
[1], 0)) != REG
2856 && ! reload_in_progress
)
2858 = replace_equiv_address (operands
[1],
2859 copy_addr_to_reg (XEXP (operands
[1], 0)));
2866 /* Above, we may have called force_const_mem which may have returned
2867 an invalid address. If we can, fix this up; otherwise, reload will
2868 have to deal with it. */
2869 if (GET_CODE (operands
[1]) == MEM
2870 && ! memory_address_p (mode
, XEXP (operands
[1], 0))
2871 && ! reload_in_progress
)
2872 operands
[1] = adjust_address (operands
[1], mode
, 0);
2874 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2878 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2879 for a call to a function whose data type is FNTYPE.
2880 For a library call, FNTYPE is 0.
2882 For incoming args we set the number of arguments in the prototype large
2883 so we never return a PARALLEL. */
2886 init_cumulative_args (cum
, fntype
, libname
, incoming
)
2887 CUMULATIVE_ARGS
*cum
;
2889 rtx libname ATTRIBUTE_UNUSED
;
2892 static CUMULATIVE_ARGS zero_cumulative
;
2894 *cum
= zero_cumulative
;
2896 cum
->fregno
= FP_ARG_MIN_REG
;
2897 cum
->vregno
= ALTIVEC_ARG_MIN_REG
;
2898 cum
->prototype
= (fntype
&& TYPE_ARG_TYPES (fntype
));
2899 cum
->call_cookie
= CALL_NORMAL
;
2900 cum
->sysv_gregno
= GP_ARG_MIN_REG
;
2903 cum
->nargs_prototype
= 1000; /* don't return a PARALLEL */
2905 else if (cum
->prototype
)
2906 cum
->nargs_prototype
= (list_length (TYPE_ARG_TYPES (fntype
)) - 1
2907 + (TYPE_MODE (TREE_TYPE (fntype
)) == BLKmode
2908 || RETURN_IN_MEMORY (TREE_TYPE (fntype
))));
2911 cum
->nargs_prototype
= 0;
2913 cum
->orig_nargs
= cum
->nargs_prototype
;
2915 /* Check for a longcall attribute. */
2917 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
))
2918 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
2919 cum
->call_cookie
= CALL_LONG
;
2921 if (TARGET_DEBUG_ARG
)
2923 fprintf (stderr
, "\ninit_cumulative_args:");
2926 tree ret_type
= TREE_TYPE (fntype
);
2927 fprintf (stderr
, " ret code = %s,",
2928 tree_code_name
[ (int)TREE_CODE (ret_type
) ]);
2931 if (cum
->call_cookie
& CALL_LONG
)
2932 fprintf (stderr
, " longcall,");
2934 fprintf (stderr
, " proto = %d, nargs = %d\n",
2935 cum
->prototype
, cum
->nargs_prototype
);
2939 /* If defined, a C expression which determines whether, and in which
2940 direction, to pad out an argument with extra space. The value
2941 should be of type `enum direction': either `upward' to pad above
2942 the argument, `downward' to pad below, or `none' to inhibit
2945 For the AIX ABI structs are always stored left shifted in their
2949 function_arg_padding (mode
, type
)
2950 enum machine_mode mode
;
2953 if (type
!= 0 && AGGREGATE_TYPE_P (type
))
2956 /* This is the default definition. */
2957 return (! BYTES_BIG_ENDIAN
2960 ? (type
&& TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
2961 && int_size_in_bytes (type
) < (PARM_BOUNDARY
/ BITS_PER_UNIT
))
2962 : GET_MODE_BITSIZE (mode
) < PARM_BOUNDARY
)
2963 ? downward
: upward
));
2966 /* If defined, a C expression that gives the alignment boundary, in bits,
2967 of an argument with the specified mode and type. If it is not defined,
2968 PARM_BOUNDARY is used for all arguments.
2970 V.4 wants long longs to be double word aligned. */
2973 function_arg_boundary (mode
, type
)
2974 enum machine_mode mode
;
2975 tree type ATTRIBUTE_UNUSED
;
2977 if (DEFAULT_ABI
== ABI_V4
&& (mode
== DImode
|| mode
== DFmode
))
2979 else if (SPE_VECTOR_MODE (mode
))
2981 else if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
2984 return PARM_BOUNDARY
;
2987 /* Update the data in CUM to advance over an argument
2988 of mode MODE and data type TYPE.
2989 (TYPE is null for libcalls where that information may not be available.) */
2992 function_arg_advance (cum
, mode
, type
, named
)
2993 CUMULATIVE_ARGS
*cum
;
2994 enum machine_mode mode
;
2998 cum
->nargs_prototype
--;
3000 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3002 if (cum
->vregno
<= ALTIVEC_ARG_MAX_REG
&& cum
->nargs_prototype
>= 0)
3005 cum
->words
+= RS6000_ARG_SIZE (mode
, type
);
3007 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
)
3008 && named
&& cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
3010 else if (DEFAULT_ABI
== ABI_V4
)
3012 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
3013 && (mode
== SFmode
|| mode
== DFmode
))
3015 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
3020 cum
->words
+= cum
->words
& 1;
3021 cum
->words
+= RS6000_ARG_SIZE (mode
, type
);
3027 int gregno
= cum
->sysv_gregno
;
3029 /* Aggregates and IEEE quad get passed by reference. */
3030 if ((type
&& AGGREGATE_TYPE_P (type
))
3034 n_words
= RS6000_ARG_SIZE (mode
, type
);
3036 /* Long long and SPE vectors are put in odd registers. */
3037 if (n_words
== 2 && (gregno
& 1) == 0)
3040 /* Long long and SPE vectors are not split between registers
3042 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
3044 /* Long long is aligned on the stack. */
3046 cum
->words
+= cum
->words
& 1;
3047 cum
->words
+= n_words
;
3050 /* Note: continuing to accumulate gregno past when we've started
3051 spilling to the stack indicates the fact that we've started
3052 spilling to the stack to expand_builtin_saveregs. */
3053 cum
->sysv_gregno
= gregno
+ n_words
;
3056 if (TARGET_DEBUG_ARG
)
3058 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
3059 cum
->words
, cum
->fregno
);
3060 fprintf (stderr
, "gregno = %2d, nargs = %4d, proto = %d, ",
3061 cum
->sysv_gregno
, cum
->nargs_prototype
, cum
->prototype
);
3062 fprintf (stderr
, "mode = %4s, named = %d\n",
3063 GET_MODE_NAME (mode
), named
);
3068 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
3069 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
3071 cum
->words
+= align
+ RS6000_ARG_SIZE (mode
, type
);
3073 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
3074 && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3075 cum
->fregno
+= (mode
== TFmode
? 2 : 1);
3077 if (TARGET_DEBUG_ARG
)
3079 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
3080 cum
->words
, cum
->fregno
);
3081 fprintf (stderr
, "nargs = %4d, proto = %d, mode = %4s, ",
3082 cum
->nargs_prototype
, cum
->prototype
, GET_MODE_NAME (mode
));
3083 fprintf (stderr
, "named = %d, align = %d\n", named
, align
);
3088 /* Determine where to put an argument to a function.
3089 Value is zero to push the argument on the stack,
3090 or a hard register in which to store the argument.
3092 MODE is the argument's machine mode.
3093 TYPE is the data type of the argument (as a tree).
3094 This is null for libcalls where that information may
3096 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3097 the preceding args and about the function being called.
3098 NAMED is nonzero if this argument is a named parameter
3099 (otherwise it is an extra parameter matching an ellipsis).
3101 On RS/6000 the first eight words of non-FP are normally in registers
3102 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3103 Under V.4, the first 8 FP args are in registers.
3105 If this is floating-point and no prototype is specified, we use
3106 both an FP and integer register (or possibly FP reg and stack). Library
3107 functions (when TYPE is zero) always have the proper types for args,
3108 so we can pass the FP value just in one register. emit_library_function
3109 doesn't support PARALLEL anyway. */
3112 function_arg (cum
, mode
, type
, named
)
3113 CUMULATIVE_ARGS
*cum
;
3114 enum machine_mode mode
;
3118 enum rs6000_abi abi
= DEFAULT_ABI
;
3120 /* Return a marker to indicate whether CR1 needs to set or clear the
3121 bit that V.4 uses to say fp args were passed in registers.
3122 Assume that we don't need the marker for software floating point,
3123 or compiler generated library calls. */
3124 if (mode
== VOIDmode
)
3127 && cum
->nargs_prototype
< 0
3128 && type
&& (cum
->prototype
|| TARGET_NO_PROTOTYPE
))
3130 /* For the SPE, we need to crxor CR6 always. */
3132 return GEN_INT (cum
->call_cookie
| CALL_V4_SET_FP_ARGS
);
3133 else if (TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3134 return GEN_INT (cum
->call_cookie
3135 | ((cum
->fregno
== FP_ARG_MIN_REG
)
3136 ? CALL_V4_SET_FP_ARGS
3137 : CALL_V4_CLEAR_FP_ARGS
));
3140 return GEN_INT (cum
->call_cookie
);
3143 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3145 if (named
&& cum
->vregno
<= ALTIVEC_ARG_MAX_REG
)
3146 return gen_rtx_REG (mode
, cum
->vregno
);
3150 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
) && named
)
3152 if (cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
3153 return gen_rtx_REG (mode
, cum
->sysv_gregno
);
3157 else if (abi
== ABI_V4
)
3159 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
3160 && (mode
== SFmode
|| mode
== DFmode
))
3162 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
3163 return gen_rtx_REG (mode
, cum
->fregno
);
3170 int gregno
= cum
->sysv_gregno
;
3172 /* Aggregates and IEEE quad get passed by reference. */
3173 if ((type
&& AGGREGATE_TYPE_P (type
))
3177 n_words
= RS6000_ARG_SIZE (mode
, type
);
3179 /* Long long and SPE vectors are put in odd registers. */
3180 if (n_words
== 2 && (gregno
& 1) == 0)
3183 /* Long long and SPE vectors are not split between registers
3185 if (gregno
+ n_words
- 1 <= GP_ARG_MAX_REG
)
3187 /* SPE vectors in ... get split into 2 registers. */
3188 if (TARGET_SPE
&& TARGET_SPE_ABI
3189 && SPE_VECTOR_MODE (mode
) && !named
)
3192 enum machine_mode m
= SImode
;
3194 r1
= gen_rtx_REG (m
, gregno
);
3195 r1
= gen_rtx_EXPR_LIST (m
, r1
, const0_rtx
);
3196 r2
= gen_rtx_REG (m
, gregno
+ 1);
3197 r2
= gen_rtx_EXPR_LIST (m
, r2
, GEN_INT (4));
3198 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r2
));
3200 return gen_rtx_REG (mode
, gregno
);
3208 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
3209 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
3210 int align_words
= cum
->words
+ align
;
3212 if (type
&& TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
3215 if (USE_FP_FOR_ARG_P (*cum
, mode
, type
))
3218 || ((cum
->nargs_prototype
> 0)
3219 /* IBM AIX extended its linkage convention definition always
3220 to require FP args after register save area hole on the
3222 && (DEFAULT_ABI
!= ABI_AIX
3224 || (align_words
< GP_ARG_NUM_REG
))))
3225 return gen_rtx_REG (mode
, cum
->fregno
);
3227 return gen_rtx_PARALLEL (mode
,
3229 gen_rtx_EXPR_LIST (VOIDmode
,
3230 ((align_words
>= GP_ARG_NUM_REG
)
3233 + RS6000_ARG_SIZE (mode
, type
)
3235 /* If this is partially on the stack, then
3236 we only include the portion actually
3237 in registers here. */
3238 ? gen_rtx_REG (SImode
,
3239 GP_ARG_MIN_REG
+ align_words
)
3240 : gen_rtx_REG (mode
,
3241 GP_ARG_MIN_REG
+ align_words
))),
3243 gen_rtx_EXPR_LIST (VOIDmode
,
3244 gen_rtx_REG (mode
, cum
->fregno
),
3247 else if (align_words
< GP_ARG_NUM_REG
)
3248 return gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
3254 /* For an arg passed partly in registers and partly in memory,
3255 this is the number of registers used.
3256 For args passed entirely in registers or entirely in memory, zero. */
3259 function_arg_partial_nregs (cum
, mode
, type
, named
)
3260 CUMULATIVE_ARGS
*cum
;
3261 enum machine_mode mode
;
3263 int named ATTRIBUTE_UNUSED
;
3265 if (DEFAULT_ABI
== ABI_V4
)
3268 if (USE_FP_FOR_ARG_P (*cum
, mode
, type
)
3269 || USE_ALTIVEC_FOR_ARG_P (*cum
, mode
, type
))
3271 if (cum
->nargs_prototype
>= 0)
3275 if (cum
->words
< GP_ARG_NUM_REG
3276 && GP_ARG_NUM_REG
< (cum
->words
+ RS6000_ARG_SIZE (mode
, type
)))
3278 int ret
= GP_ARG_NUM_REG
- cum
->words
;
3279 if (ret
&& TARGET_DEBUG_ARG
)
3280 fprintf (stderr
, "function_arg_partial_nregs: %d\n", ret
);
3288 /* A C expression that indicates when an argument must be passed by
3289 reference. If nonzero for an argument, a copy of that argument is
3290 made in memory and a pointer to the argument is passed instead of
3291 the argument itself. The pointer is passed in whatever way is
3292 appropriate for passing a pointer to that type.
3294 Under V.4, structures and unions are passed by reference. */
3297 function_arg_pass_by_reference (cum
, mode
, type
, named
)
3298 CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
;
3299 enum machine_mode mode ATTRIBUTE_UNUSED
;
3301 int named ATTRIBUTE_UNUSED
;
3303 if (DEFAULT_ABI
== ABI_V4
3304 && ((type
&& AGGREGATE_TYPE_P (type
))
3307 if (TARGET_DEBUG_ARG
)
3308 fprintf (stderr
, "function_arg_pass_by_reference: aggregate\n");
3316 /* Perform any needed actions needed for a function that is receiving a
3317 variable number of arguments.
3321 MODE and TYPE are the mode and type of the current parameter.
3323 PRETEND_SIZE is a variable that should be set to the amount of stack
3324 that must be pushed by the prolog to pretend that our caller pushed
3327 Normally, this macro will push all remaining incoming registers on the
3328 stack and set PRETEND_SIZE to the length of the registers pushed. */
3331 setup_incoming_varargs (cum
, mode
, type
, pretend_size
, no_rtl
)
3332 CUMULATIVE_ARGS
*cum
;
3333 enum machine_mode mode
;
3335 int *pretend_size ATTRIBUTE_UNUSED
;
3339 CUMULATIVE_ARGS next_cum
;
3340 int reg_size
= TARGET_32BIT
? 4 : 8;
3341 rtx save_area
= NULL_RTX
, mem
;
3342 int first_reg_offset
, set
;
3346 fntype
= TREE_TYPE (current_function_decl
);
3347 stdarg_p
= (TYPE_ARG_TYPES (fntype
) != 0
3348 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
3349 != void_type_node
));
3351 /* For varargs, we do not want to skip the dummy va_dcl argument.
3352 For stdargs, we do want to skip the last named argument. */
3355 function_arg_advance (&next_cum
, mode
, type
, 1);
3357 if (DEFAULT_ABI
== ABI_V4
)
3359 /* Indicate to allocate space on the stack for varargs save area. */
3360 cfun
->machine
->sysv_varargs_p
= 1;
3362 save_area
= plus_constant (virtual_stack_vars_rtx
,
3363 - RS6000_VARARGS_SIZE
);
3365 first_reg_offset
= next_cum
.sysv_gregno
- GP_ARG_MIN_REG
;
3369 first_reg_offset
= next_cum
.words
;
3370 save_area
= virtual_incoming_args_rtx
;
3371 cfun
->machine
->sysv_varargs_p
= 0;
3373 if (MUST_PASS_IN_STACK (mode
, type
))
3374 first_reg_offset
+= RS6000_ARG_SIZE (TYPE_MODE (type
), type
);
3377 set
= get_varargs_alias_set ();
3378 if (! no_rtl
&& first_reg_offset
< GP_ARG_NUM_REG
)
3380 mem
= gen_rtx_MEM (BLKmode
,
3381 plus_constant (save_area
,
3382 first_reg_offset
* reg_size
)),
3383 set_mem_alias_set (mem
, set
);
3384 set_mem_align (mem
, BITS_PER_WORD
);
3387 (GP_ARG_MIN_REG
+ first_reg_offset
, mem
,
3388 GP_ARG_NUM_REG
- first_reg_offset
,
3389 (GP_ARG_NUM_REG
- first_reg_offset
) * UNITS_PER_WORD
);
3392 /* Save FP registers if needed. */
3393 if (DEFAULT_ABI
== ABI_V4
3394 && TARGET_HARD_FLOAT
&& TARGET_FPRS
3396 && next_cum
.fregno
<= FP_ARG_V4_MAX_REG
)
3398 int fregno
= next_cum
.fregno
;
3399 rtx cr1
= gen_rtx_REG (CCmode
, CR1_REGNO
);
3400 rtx lab
= gen_label_rtx ();
3401 int off
= (GP_ARG_NUM_REG
* reg_size
) + ((fregno
- FP_ARG_MIN_REG
) * 8);
3403 emit_jump_insn (gen_rtx_SET (VOIDmode
,
3405 gen_rtx_IF_THEN_ELSE (VOIDmode
,
3406 gen_rtx_NE (VOIDmode
, cr1
,
3408 gen_rtx_LABEL_REF (VOIDmode
, lab
),
3411 while (fregno
<= FP_ARG_V4_MAX_REG
)
3413 mem
= gen_rtx_MEM (DFmode
, plus_constant (save_area
, off
));
3414 set_mem_alias_set (mem
, set
);
3415 emit_move_insn (mem
, gen_rtx_REG (DFmode
, fregno
));
3424 /* Create the va_list data type. */
3427 rs6000_build_va_list ()
3429 tree f_gpr
, f_fpr
, f_ovf
, f_sav
, record
, type_decl
;
3431 /* For AIX, prefer 'char *' because that's what the system
3432 header files like. */
3433 if (DEFAULT_ABI
!= ABI_V4
)
3434 return build_pointer_type (char_type_node
);
3436 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
3437 type_decl
= build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
3439 f_gpr
= build_decl (FIELD_DECL
, get_identifier ("gpr"),
3440 unsigned_char_type_node
);
3441 f_fpr
= build_decl (FIELD_DECL
, get_identifier ("fpr"),
3442 unsigned_char_type_node
);
3443 f_ovf
= build_decl (FIELD_DECL
, get_identifier ("overflow_arg_area"),
3445 f_sav
= build_decl (FIELD_DECL
, get_identifier ("reg_save_area"),
3448 DECL_FIELD_CONTEXT (f_gpr
) = record
;
3449 DECL_FIELD_CONTEXT (f_fpr
) = record
;
3450 DECL_FIELD_CONTEXT (f_ovf
) = record
;
3451 DECL_FIELD_CONTEXT (f_sav
) = record
;
3453 TREE_CHAIN (record
) = type_decl
;
3454 TYPE_NAME (record
) = type_decl
;
3455 TYPE_FIELDS (record
) = f_gpr
;
3456 TREE_CHAIN (f_gpr
) = f_fpr
;
3457 TREE_CHAIN (f_fpr
) = f_ovf
;
3458 TREE_CHAIN (f_ovf
) = f_sav
;
3460 layout_type (record
);
3462 /* The correct type is an array type of one element. */
3463 return build_array_type (record
, build_index_type (size_zero_node
));
3466 /* Implement va_start. */
3469 rs6000_va_start (valist
, nextarg
)
3473 HOST_WIDE_INT words
, n_gpr
, n_fpr
;
3474 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
3475 tree gpr
, fpr
, ovf
, sav
, t
;
3477 /* Only SVR4 needs something special. */
3478 if (DEFAULT_ABI
!= ABI_V4
)
3480 std_expand_builtin_va_start (valist
, nextarg
);
3484 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
3485 f_fpr
= TREE_CHAIN (f_gpr
);
3486 f_ovf
= TREE_CHAIN (f_fpr
);
3487 f_sav
= TREE_CHAIN (f_ovf
);
3489 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
3490 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
3491 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
3492 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
3493 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
3495 /* Count number of gp and fp argument registers used. */
3496 words
= current_function_args_info
.words
;
3497 n_gpr
= current_function_args_info
.sysv_gregno
- GP_ARG_MIN_REG
;
3498 n_fpr
= current_function_args_info
.fregno
- FP_ARG_MIN_REG
;
3500 if (TARGET_DEBUG_ARG
)
3502 fputs ("va_start: words = ", stderr
);
3503 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, words
);
3504 fputs (", n_gpr = ", stderr
);
3505 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, n_gpr
);
3506 fputs (", n_fpr = ", stderr
);
3507 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, n_fpr
);
3508 putc ('\n', stderr
);
3511 t
= build (MODIFY_EXPR
, TREE_TYPE (gpr
), gpr
, build_int_2 (n_gpr
, 0));
3512 TREE_SIDE_EFFECTS (t
) = 1;
3513 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3515 t
= build (MODIFY_EXPR
, TREE_TYPE (fpr
), fpr
, build_int_2 (n_fpr
, 0));
3516 TREE_SIDE_EFFECTS (t
) = 1;
3517 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3519 /* Find the overflow area. */
3520 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
3522 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), t
,
3523 build_int_2 (words
* UNITS_PER_WORD
, 0));
3524 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
3525 TREE_SIDE_EFFECTS (t
) = 1;
3526 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3528 /* Find the register save area. */
3529 t
= make_tree (TREE_TYPE (sav
), virtual_stack_vars_rtx
);
3530 t
= build (PLUS_EXPR
, TREE_TYPE (sav
), t
,
3531 build_int_2 (-RS6000_VARARGS_SIZE
, -1));
3532 t
= build (MODIFY_EXPR
, TREE_TYPE (sav
), sav
, t
);
3533 TREE_SIDE_EFFECTS (t
) = 1;
3534 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3537 /* Implement va_arg. */
3540 rs6000_va_arg (valist
, type
)
3543 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
3544 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
3545 int indirect_p
, size
, rsize
, n_reg
, sav_ofs
, sav_scale
;
3546 rtx lab_false
, lab_over
, addr_rtx
, r
;
3548 if (DEFAULT_ABI
!= ABI_V4
)
3549 return std_expand_builtin_va_arg (valist
, type
);
3551 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
3552 f_fpr
= TREE_CHAIN (f_gpr
);
3553 f_ovf
= TREE_CHAIN (f_fpr
);
3554 f_sav
= TREE_CHAIN (f_ovf
);
3556 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
3557 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
3558 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
3559 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
3560 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
3562 size
= int_size_in_bytes (type
);
3563 rsize
= (size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
3565 if (AGGREGATE_TYPE_P (type
) || TYPE_MODE (type
) == TFmode
)
3567 /* Aggregates and long doubles are passed by reference. */
3573 size
= UNITS_PER_WORD
;
3576 else if (FLOAT_TYPE_P (type
) && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3578 /* FP args go in FP registers, if present. */
3587 /* Otherwise into GP registers. */
3595 /* Pull the value out of the saved registers ... */
3597 lab_false
= gen_label_rtx ();
3598 lab_over
= gen_label_rtx ();
3599 addr_rtx
= gen_reg_rtx (Pmode
);
3601 /* AltiVec vectors never go in registers. */
3602 if (!TARGET_ALTIVEC
|| TREE_CODE (type
) != VECTOR_TYPE
)
3604 TREE_THIS_VOLATILE (reg
) = 1;
3605 emit_cmp_and_jump_insns
3606 (expand_expr (reg
, NULL_RTX
, QImode
, EXPAND_NORMAL
),
3607 GEN_INT (8 - n_reg
+ 1), GE
, const1_rtx
, QImode
, 1,
3610 /* Long long is aligned in the registers. */
3613 u
= build (BIT_AND_EXPR
, TREE_TYPE (reg
), reg
,
3614 build_int_2 (n_reg
- 1, 0));
3615 u
= build (PLUS_EXPR
, TREE_TYPE (reg
), reg
, u
);
3616 u
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, u
);
3617 TREE_SIDE_EFFECTS (u
) = 1;
3618 expand_expr (u
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3622 t
= build (PLUS_EXPR
, ptr_type_node
, sav
, build_int_2 (sav_ofs
, 0));
3626 u
= build (POSTINCREMENT_EXPR
, TREE_TYPE (reg
), reg
,
3627 build_int_2 (n_reg
, 0));
3628 TREE_SIDE_EFFECTS (u
) = 1;
3630 u
= build1 (CONVERT_EXPR
, integer_type_node
, u
);
3631 TREE_SIDE_EFFECTS (u
) = 1;
3633 u
= build (MULT_EXPR
, integer_type_node
, u
, build_int_2 (sav_scale
, 0));
3634 TREE_SIDE_EFFECTS (u
) = 1;
3636 t
= build (PLUS_EXPR
, ptr_type_node
, t
, u
);
3637 TREE_SIDE_EFFECTS (t
) = 1;
3639 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
3641 emit_move_insn (addr_rtx
, r
);
3643 emit_jump_insn (gen_jump (lab_over
));
3647 emit_label (lab_false
);
3649 /* ... otherwise out of the overflow area. */
3651 /* Make sure we don't find reg 7 for the next int arg.
3653 All AltiVec vectors go in the overflow area. So in the AltiVec
3654 case we need to get the vectors from the overflow area, but
3655 remember where the GPRs and FPRs are. */
3656 if (n_reg
> 1 && (TREE_CODE (type
) != VECTOR_TYPE
3657 || !TARGET_ALTIVEC
))
3659 t
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, build_int_2 (8, 0));
3660 TREE_SIDE_EFFECTS (t
) = 1;
3661 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3664 /* Care for on-stack alignment if needed. */
3671 /* AltiVec vectors are 16 byte aligned. */
3672 if (TARGET_ALTIVEC
&& TREE_CODE (type
) == VECTOR_TYPE
)
3677 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), ovf
, build_int_2 (align
, 0));
3678 t
= build (BIT_AND_EXPR
, TREE_TYPE (t
), t
, build_int_2 (-align
-1, -1));
3682 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
3684 emit_move_insn (addr_rtx
, r
);
3686 t
= build (PLUS_EXPR
, TREE_TYPE (t
), t
, build_int_2 (size
, 0));
3687 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
3688 TREE_SIDE_EFFECTS (t
) = 1;
3689 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3691 emit_label (lab_over
);
3695 r
= gen_rtx_MEM (Pmode
, addr_rtx
);
3696 set_mem_alias_set (r
, get_varargs_alias_set ());
3697 emit_move_insn (addr_rtx
, r
);
3705 #define def_builtin(MASK, NAME, TYPE, CODE) \
3707 if ((MASK) & target_flags) \
3708 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3712 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3714 static const struct builtin_description bdesc_3arg
[] =
3716 { MASK_ALTIVEC
, CODE_FOR_altivec_vmaddfp
, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP
},
3717 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhaddshs
, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS
},
3718 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhraddshs
, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS
},
3719 { MASK_ALTIVEC
, CODE_FOR_altivec_vmladduhm
, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM
},
3720 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumubm
, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM
},
3721 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsummbm
, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM
},
3722 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhm
, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM
},
3723 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshm
, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM
},
3724 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhs
, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS
},
3725 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshs
, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS
},
3726 { MASK_ALTIVEC
, CODE_FOR_altivec_vnmsubfp
, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP
},
3727 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4sf
, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF
},
3728 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4si
, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI
},
3729 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_8hi
, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI
},
3730 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_16qi
, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI
},
3731 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4sf
, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF
},
3732 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4si
, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI
},
3733 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_8hi
, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI
},
3734 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_16qi
, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI
},
3735 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_16qi
, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI
},
3736 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_8hi
, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI
},
3737 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4si
, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI
},
3738 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4sf
, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF
},
3741 /* DST operations: void foo (void *, const int, const char). */
3743 static const struct builtin_description bdesc_dst
[] =
3745 { MASK_ALTIVEC
, CODE_FOR_altivec_dst
, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST
},
3746 { MASK_ALTIVEC
, CODE_FOR_altivec_dstt
, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT
},
3747 { MASK_ALTIVEC
, CODE_FOR_altivec_dstst
, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST
},
3748 { MASK_ALTIVEC
, CODE_FOR_altivec_dststt
, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT
}
3751 /* Simple binary operations: VECc = foo (VECa, VECb). */
3753 static struct builtin_description bdesc_2arg
[] =
3755 { MASK_ALTIVEC
, CODE_FOR_addv16qi3
, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM
},
3756 { MASK_ALTIVEC
, CODE_FOR_addv8hi3
, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM
},
3757 { MASK_ALTIVEC
, CODE_FOR_addv4si3
, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM
},
3758 { MASK_ALTIVEC
, CODE_FOR_addv4sf3
, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP
},
3759 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddcuw
, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW
},
3760 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddubs
, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS
},
3761 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsbs
, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS
},
3762 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduhs
, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS
},
3763 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddshs
, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS
},
3764 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduws
, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS
},
3765 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsws
, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS
},
3766 { MASK_ALTIVEC
, CODE_FOR_andv4si3
, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND
},
3767 { MASK_ALTIVEC
, CODE_FOR_altivec_vandc
, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC
},
3768 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgub
, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB
},
3769 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsb
, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB
},
3770 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguh
, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH
},
3771 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsh
, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH
},
3772 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguw
, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW
},
3773 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsw
, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW
},
3774 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfux
, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX
},
3775 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfsx
, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX
},
3776 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpbfp
, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP
},
3777 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequb
, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB
},
3778 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequh
, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH
},
3779 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequw
, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW
},
3780 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpeqfp
, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP
},
3781 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgefp
, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP
},
3782 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtub
, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB
},
3783 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsb
, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB
},
3784 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuh
, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH
},
3785 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsh
, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH
},
3786 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuw
, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW
},
3787 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsw
, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW
},
3788 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtfp
, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP
},
3789 { MASK_ALTIVEC
, CODE_FOR_altivec_vctsxs
, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS
},
3790 { MASK_ALTIVEC
, CODE_FOR_altivec_vctuxs
, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS
},
3791 { MASK_ALTIVEC
, CODE_FOR_umaxv16qi3
, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB
},
3792 { MASK_ALTIVEC
, CODE_FOR_smaxv16qi3
, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB
},
3793 { MASK_ALTIVEC
, CODE_FOR_umaxv8hi3
, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH
},
3794 { MASK_ALTIVEC
, CODE_FOR_smaxv8hi3
, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH
},
3795 { MASK_ALTIVEC
, CODE_FOR_umaxv4si3
, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW
},
3796 { MASK_ALTIVEC
, CODE_FOR_smaxv4si3
, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW
},
3797 { MASK_ALTIVEC
, CODE_FOR_smaxv4sf3
, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP
},
3798 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghb
, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB
},
3799 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghh
, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH
},
3800 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghw
, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW
},
3801 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglb
, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB
},
3802 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglh
, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH
},
3803 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglw
, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW
},
3804 { MASK_ALTIVEC
, CODE_FOR_uminv16qi3
, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB
},
3805 { MASK_ALTIVEC
, CODE_FOR_sminv16qi3
, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB
},
3806 { MASK_ALTIVEC
, CODE_FOR_uminv8hi3
, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH
},
3807 { MASK_ALTIVEC
, CODE_FOR_sminv8hi3
, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH
},
3808 { MASK_ALTIVEC
, CODE_FOR_uminv4si3
, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW
},
3809 { MASK_ALTIVEC
, CODE_FOR_sminv4si3
, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW
},
3810 { MASK_ALTIVEC
, CODE_FOR_sminv4sf3
, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP
},
3811 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleub
, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB
},
3812 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesb
, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB
},
3813 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleuh
, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH
},
3814 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesh
, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH
},
3815 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuloub
, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB
},
3816 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosb
, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB
},
3817 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulouh
, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH
},
3818 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosh
, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH
},
3819 { MASK_ALTIVEC
, CODE_FOR_altivec_vnor
, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR
},
3820 { MASK_ALTIVEC
, CODE_FOR_iorv4si3
, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR
},
3821 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhum
, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM
},
3822 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwum
, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM
},
3823 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkpx
, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX
},
3824 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhss
, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS
},
3825 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshss
, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS
},
3826 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwss
, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS
},
3827 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswss
, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS
},
3828 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhus
, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS
},
3829 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshus
, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS
},
3830 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwus
, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS
},
3831 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswus
, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS
},
3832 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlb
, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB
},
3833 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlh
, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH
},
3834 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlw
, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW
},
3835 { MASK_ALTIVEC
, CODE_FOR_altivec_vslb
, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB
},
3836 { MASK_ALTIVEC
, CODE_FOR_altivec_vslh
, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH
},
3837 { MASK_ALTIVEC
, CODE_FOR_altivec_vslw
, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW
},
3838 { MASK_ALTIVEC
, CODE_FOR_altivec_vsl
, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL
},
3839 { MASK_ALTIVEC
, CODE_FOR_altivec_vslo
, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO
},
3840 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltb
, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB
},
3841 { MASK_ALTIVEC
, CODE_FOR_altivec_vsplth
, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH
},
3842 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltw
, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW
},
3843 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrb
, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB
},
3844 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrh
, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH
},
3845 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrw
, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW
},
3846 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrab
, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB
},
3847 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrah
, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH
},
3848 { MASK_ALTIVEC
, CODE_FOR_altivec_vsraw
, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW
},
3849 { MASK_ALTIVEC
, CODE_FOR_altivec_vsr
, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR
},
3850 { MASK_ALTIVEC
, CODE_FOR_altivec_vsro
, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO
},
3851 { MASK_ALTIVEC
, CODE_FOR_subv16qi3
, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM
},
3852 { MASK_ALTIVEC
, CODE_FOR_subv8hi3
, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM
},
3853 { MASK_ALTIVEC
, CODE_FOR_subv4si3
, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM
},
3854 { MASK_ALTIVEC
, CODE_FOR_subv4sf3
, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP
},
3855 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubcuw
, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW
},
3856 { MASK_ALTIVEC
, CODE_FOR_altivec_vsububs
, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS
},
3857 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsbs
, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS
},
3858 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuhs
, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS
},
3859 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubshs
, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS
},
3860 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuws
, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS
},
3861 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsws
, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS
},
3862 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4ubs
, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS
},
3863 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4sbs
, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS
},
3864 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4shs
, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS
},
3865 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum2sws
, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS
},
3866 { MASK_ALTIVEC
, CODE_FOR_altivec_vsumsws
, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS
},
3867 { MASK_ALTIVEC
, CODE_FOR_xorv4si3
, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR
},
3869 /* Place holder, leave as first spe builtin. */
3870 { 0, CODE_FOR_spe_evaddw
, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW
},
3871 { 0, CODE_FOR_spe_evand
, "__builtin_spe_evand", SPE_BUILTIN_EVAND
},
3872 { 0, CODE_FOR_spe_evandc
, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC
},
3873 { 0, CODE_FOR_spe_evdivws
, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS
},
3874 { 0, CODE_FOR_spe_evdivwu
, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU
},
3875 { 0, CODE_FOR_spe_eveqv
, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV
},
3876 { 0, CODE_FOR_spe_evfsadd
, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD
},
3877 { 0, CODE_FOR_spe_evfsdiv
, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV
},
3878 { 0, CODE_FOR_spe_evfsmul
, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL
},
3879 { 0, CODE_FOR_spe_evfssub
, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB
},
3880 { 0, CODE_FOR_spe_evmergehi
, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI
},
3881 { 0, CODE_FOR_spe_evmergehilo
, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO
},
3882 { 0, CODE_FOR_spe_evmergelo
, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO
},
3883 { 0, CODE_FOR_spe_evmergelohi
, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI
},
3884 { 0, CODE_FOR_spe_evmhegsmfaa
, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA
},
3885 { 0, CODE_FOR_spe_evmhegsmfan
, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN
},
3886 { 0, CODE_FOR_spe_evmhegsmiaa
, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA
},
3887 { 0, CODE_FOR_spe_evmhegsmian
, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN
},
3888 { 0, CODE_FOR_spe_evmhegumiaa
, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA
},
3889 { 0, CODE_FOR_spe_evmhegumian
, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN
},
3890 { 0, CODE_FOR_spe_evmhesmf
, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF
},
3891 { 0, CODE_FOR_spe_evmhesmfa
, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA
},
3892 { 0, CODE_FOR_spe_evmhesmfaaw
, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW
},
3893 { 0, CODE_FOR_spe_evmhesmfanw
, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW
},
3894 { 0, CODE_FOR_spe_evmhesmi
, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI
},
3895 { 0, CODE_FOR_spe_evmhesmia
, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA
},
3896 { 0, CODE_FOR_spe_evmhesmiaaw
, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW
},
3897 { 0, CODE_FOR_spe_evmhesmianw
, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW
},
3898 { 0, CODE_FOR_spe_evmhessf
, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF
},
3899 { 0, CODE_FOR_spe_evmhessfa
, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA
},
3900 { 0, CODE_FOR_spe_evmhessfaaw
, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW
},
3901 { 0, CODE_FOR_spe_evmhessfanw
, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW
},
3902 { 0, CODE_FOR_spe_evmhessiaaw
, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW
},
3903 { 0, CODE_FOR_spe_evmhessianw
, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW
},
3904 { 0, CODE_FOR_spe_evmheumi
, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI
},
3905 { 0, CODE_FOR_spe_evmheumia
, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA
},
3906 { 0, CODE_FOR_spe_evmheumiaaw
, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW
},
3907 { 0, CODE_FOR_spe_evmheumianw
, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW
},
3908 { 0, CODE_FOR_spe_evmheusiaaw
, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW
},
3909 { 0, CODE_FOR_spe_evmheusianw
, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW
},
3910 { 0, CODE_FOR_spe_evmhogsmfaa
, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA
},
3911 { 0, CODE_FOR_spe_evmhogsmfan
, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN
},
3912 { 0, CODE_FOR_spe_evmhogsmiaa
, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA
},
3913 { 0, CODE_FOR_spe_evmhogsmian
, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN
},
3914 { 0, CODE_FOR_spe_evmhogumiaa
, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA
},
3915 { 0, CODE_FOR_spe_evmhogumian
, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN
},
3916 { 0, CODE_FOR_spe_evmhosmf
, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF
},
3917 { 0, CODE_FOR_spe_evmhosmfa
, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA
},
3918 { 0, CODE_FOR_spe_evmhosmfaaw
, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW
},
3919 { 0, CODE_FOR_spe_evmhosmfanw
, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW
},
3920 { 0, CODE_FOR_spe_evmhosmi
, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI
},
3921 { 0, CODE_FOR_spe_evmhosmia
, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA
},
3922 { 0, CODE_FOR_spe_evmhosmiaaw
, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW
},
3923 { 0, CODE_FOR_spe_evmhosmianw
, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW
},
3924 { 0, CODE_FOR_spe_evmhossf
, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF
},
3925 { 0, CODE_FOR_spe_evmhossfa
, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA
},
3926 { 0, CODE_FOR_spe_evmhossfaaw
, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW
},
3927 { 0, CODE_FOR_spe_evmhossfanw
, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW
},
3928 { 0, CODE_FOR_spe_evmhossiaaw
, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW
},
3929 { 0, CODE_FOR_spe_evmhossianw
, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW
},
3930 { 0, CODE_FOR_spe_evmhoumi
, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI
},
3931 { 0, CODE_FOR_spe_evmhoumia
, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA
},
3932 { 0, CODE_FOR_spe_evmhoumiaaw
, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW
},
3933 { 0, CODE_FOR_spe_evmhoumianw
, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW
},
3934 { 0, CODE_FOR_spe_evmhousiaaw
, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW
},
3935 { 0, CODE_FOR_spe_evmhousianw
, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW
},
3936 { 0, CODE_FOR_spe_evmwhsmf
, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF
},
3937 { 0, CODE_FOR_spe_evmwhsmfa
, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA
},
3938 { 0, CODE_FOR_spe_evmwhsmi
, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI
},
3939 { 0, CODE_FOR_spe_evmwhsmia
, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA
},
3940 { 0, CODE_FOR_spe_evmwhssf
, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF
},
3941 { 0, CODE_FOR_spe_evmwhssfa
, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA
},
3942 { 0, CODE_FOR_spe_evmwhumi
, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI
},
3943 { 0, CODE_FOR_spe_evmwhumia
, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA
},
3944 { 0, CODE_FOR_spe_evmwlsmiaaw
, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW
},
3945 { 0, CODE_FOR_spe_evmwlsmianw
, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW
},
3946 { 0, CODE_FOR_spe_evmwlssiaaw
, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW
},
3947 { 0, CODE_FOR_spe_evmwlssianw
, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW
},
3948 { 0, CODE_FOR_spe_evmwlumi
, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI
},
3949 { 0, CODE_FOR_spe_evmwlumia
, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA
},
3950 { 0, CODE_FOR_spe_evmwlumiaaw
, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW
},
3951 { 0, CODE_FOR_spe_evmwlumianw
, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW
},
3952 { 0, CODE_FOR_spe_evmwlusiaaw
, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW
},
3953 { 0, CODE_FOR_spe_evmwlusianw
, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW
},
3954 { 0, CODE_FOR_spe_evmwsmf
, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF
},
3955 { 0, CODE_FOR_spe_evmwsmfa
, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA
},
3956 { 0, CODE_FOR_spe_evmwsmfaa
, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA
},
3957 { 0, CODE_FOR_spe_evmwsmfan
, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN
},
3958 { 0, CODE_FOR_spe_evmwsmi
, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI
},
3959 { 0, CODE_FOR_spe_evmwsmia
, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA
},
3960 { 0, CODE_FOR_spe_evmwsmiaa
, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA
},
3961 { 0, CODE_FOR_spe_evmwsmian
, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN
},
3962 { 0, CODE_FOR_spe_evmwssf
, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF
},
3963 { 0, CODE_FOR_spe_evmwssfa
, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA
},
3964 { 0, CODE_FOR_spe_evmwssfaa
, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA
},
3965 { 0, CODE_FOR_spe_evmwssfan
, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN
},
3966 { 0, CODE_FOR_spe_evmwumi
, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI
},
3967 { 0, CODE_FOR_spe_evmwumia
, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA
},
3968 { 0, CODE_FOR_spe_evmwumiaa
, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA
},
3969 { 0, CODE_FOR_spe_evmwumian
, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN
},
3970 { 0, CODE_FOR_spe_evnand
, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND
},
3971 { 0, CODE_FOR_spe_evnor
, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR
},
3972 { 0, CODE_FOR_spe_evor
, "__builtin_spe_evor", SPE_BUILTIN_EVOR
},
3973 { 0, CODE_FOR_spe_evorc
, "__builtin_spe_evorc", SPE_BUILTIN_EVORC
},
3974 { 0, CODE_FOR_spe_evrlw
, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW
},
3975 { 0, CODE_FOR_spe_evslw
, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW
},
3976 { 0, CODE_FOR_spe_evsrws
, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS
},
3977 { 0, CODE_FOR_spe_evsrwu
, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU
},
3978 { 0, CODE_FOR_spe_evsubfw
, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW
},
3980 /* SPE binary operations expecting a 5-bit unsigned literal. */
3981 { 0, CODE_FOR_spe_evaddiw
, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW
},
3983 { 0, CODE_FOR_spe_evrlwi
, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI
},
3984 { 0, CODE_FOR_spe_evslwi
, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI
},
3985 { 0, CODE_FOR_spe_evsrwis
, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS
},
3986 { 0, CODE_FOR_spe_evsrwiu
, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU
},
3987 { 0, CODE_FOR_spe_evsubifw
, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW
},
3988 { 0, CODE_FOR_spe_evmwhssfaa
, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA
},
3989 { 0, CODE_FOR_spe_evmwhssmaa
, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA
},
3990 { 0, CODE_FOR_spe_evmwhsmfaa
, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA
},
3991 { 0, CODE_FOR_spe_evmwhsmiaa
, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA
},
3992 { 0, CODE_FOR_spe_evmwhusiaa
, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA
},
3993 { 0, CODE_FOR_spe_evmwhumiaa
, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA
},
3994 { 0, CODE_FOR_spe_evmwhssfan
, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN
},
3995 { 0, CODE_FOR_spe_evmwhssian
, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN
},
3996 { 0, CODE_FOR_spe_evmwhsmfan
, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN
},
3997 { 0, CODE_FOR_spe_evmwhsmian
, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN
},
3998 { 0, CODE_FOR_spe_evmwhusian
, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN
},
3999 { 0, CODE_FOR_spe_evmwhumian
, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN
},
4000 { 0, CODE_FOR_spe_evmwhgssfaa
, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA
},
4001 { 0, CODE_FOR_spe_evmwhgsmfaa
, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA
},
4002 { 0, CODE_FOR_spe_evmwhgsmiaa
, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA
},
4003 { 0, CODE_FOR_spe_evmwhgumiaa
, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA
},
4004 { 0, CODE_FOR_spe_evmwhgssfan
, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN
},
4005 { 0, CODE_FOR_spe_evmwhgsmfan
, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN
},
4006 { 0, CODE_FOR_spe_evmwhgsmian
, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN
},
4007 { 0, CODE_FOR_spe_evmwhgumian
, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN
},
4008 { 0, CODE_FOR_spe_brinc
, "__builtin_spe_brinc", SPE_BUILTIN_BRINC
},
4010 /* Place-holder. Leave as last binary SPE builtin. */
4011 { 0, CODE_FOR_spe_evxor
, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR
},
4014 /* AltiVec predicates. */
4016 struct builtin_description_predicates
4018 const unsigned int mask
;
4019 const enum insn_code icode
;
4021 const char *const name
;
4022 const enum rs6000_builtins code
;
4025 static const struct builtin_description_predicates bdesc_altivec_preds
[] =
4027 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P
},
4028 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P
},
4029 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P
},
4030 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P
},
4031 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P
},
4032 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P
},
4033 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P
},
4034 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P
},
4035 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P
},
4036 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P
},
4037 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P
},
4038 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P
},
4039 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P
}
4042 /* SPE predicates. */
4043 static struct builtin_description bdesc_spe_predicates
[] =
4045 /* Place-holder. Leave as first. */
4046 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ
},
4047 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS
},
4048 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU
},
4049 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS
},
4050 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU
},
4051 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ
},
4052 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT
},
4053 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT
},
4054 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ
},
4055 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT
},
4056 /* Place-holder. Leave as last. */
4057 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT
},
4060 /* SPE evsel predicates. */
4061 static struct builtin_description bdesc_spe_evsel
[] =
4063 /* Place-holder. Leave as first. */
4064 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS
},
4065 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU
},
4066 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS
},
4067 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU
},
4068 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ
},
4069 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT
},
4070 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT
},
4071 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ
},
4072 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT
},
4073 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT
},
4074 /* Place-holder. Leave as last. */
4075 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ
},
4078 /* ABS* opreations. */
4080 static const struct builtin_description bdesc_abs
[] =
4082 { MASK_ALTIVEC
, CODE_FOR_absv4si2
, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI
},
4083 { MASK_ALTIVEC
, CODE_FOR_absv8hi2
, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI
},
4084 { MASK_ALTIVEC
, CODE_FOR_absv4sf2
, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF
},
4085 { MASK_ALTIVEC
, CODE_FOR_absv16qi2
, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI
},
4086 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v4si
, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI
},
4087 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v8hi
, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI
},
4088 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v16qi
, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI
}
4091 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4094 static struct builtin_description bdesc_1arg
[] =
4096 { MASK_ALTIVEC
, CODE_FOR_altivec_vexptefp
, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP
},
4097 { MASK_ALTIVEC
, CODE_FOR_altivec_vlogefp
, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP
},
4098 { MASK_ALTIVEC
, CODE_FOR_altivec_vrefp
, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP
},
4099 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfim
, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM
},
4100 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfin
, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN
},
4101 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfip
, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP
},
4102 { MASK_ALTIVEC
, CODE_FOR_ftruncv4sf2
, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ
},
4103 { MASK_ALTIVEC
, CODE_FOR_altivec_vrsqrtefp
, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP
},
4104 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisb
, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB
},
4105 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltish
, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH
},
4106 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisw
, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW
},
4107 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsb
, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB
},
4108 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhpx
, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX
},
4109 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsh
, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH
},
4110 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsb
, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB
},
4111 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklpx
, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX
},
4112 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsh
, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH
},
4114 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4115 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4116 { 0, CODE_FOR_spe_evabs
, "__builtin_spe_evabs", SPE_BUILTIN_EVABS
},
4117 { 0, CODE_FOR_spe_evaddsmiaaw
, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW
},
4118 { 0, CODE_FOR_spe_evaddssiaaw
, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW
},
4119 { 0, CODE_FOR_spe_evaddumiaaw
, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW
},
4120 { 0, CODE_FOR_spe_evaddusiaaw
, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW
},
4121 { 0, CODE_FOR_spe_evcntlsw
, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW
},
4122 { 0, CODE_FOR_spe_evcntlzw
, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW
},
4123 { 0, CODE_FOR_spe_evextsb
, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB
},
4124 { 0, CODE_FOR_spe_evextsh
, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH
},
4125 { 0, CODE_FOR_spe_evfsabs
, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS
},
4126 { 0, CODE_FOR_spe_evfscfsf
, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF
},
4127 { 0, CODE_FOR_spe_evfscfsi
, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI
},
4128 { 0, CODE_FOR_spe_evfscfuf
, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF
},
4129 { 0, CODE_FOR_spe_evfscfui
, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI
},
4130 { 0, CODE_FOR_spe_evfsctsf
, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF
},
4131 { 0, CODE_FOR_spe_evfsctsi
, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI
},
4132 { 0, CODE_FOR_spe_evfsctsiz
, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ
},
4133 { 0, CODE_FOR_spe_evfsctuf
, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF
},
4134 { 0, CODE_FOR_spe_evfsctui
, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI
},
4135 { 0, CODE_FOR_spe_evfsctuiz
, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ
},
4136 { 0, CODE_FOR_spe_evfsnabs
, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS
},
4137 { 0, CODE_FOR_spe_evfsneg
, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG
},
4138 { 0, CODE_FOR_spe_evmra
, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA
},
4139 { 0, CODE_FOR_spe_evneg
, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG
},
4140 { 0, CODE_FOR_spe_evrndw
, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW
},
4141 { 0, CODE_FOR_spe_evsubfsmiaaw
, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW
},
4142 { 0, CODE_FOR_spe_evsubfssiaaw
, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW
},
4143 { 0, CODE_FOR_spe_evsubfumiaaw
, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW
},
4144 { 0, CODE_FOR_spe_evsplatfi
, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI
},
4145 { 0, CODE_FOR_spe_evsplati
, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI
},
4147 /* Place-holder. Leave as last unary SPE builtin. */
4148 { 0, CODE_FOR_spe_evsubfusiaaw
, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW
},
4152 rs6000_expand_unop_builtin (icode
, arglist
, target
)
4153 enum insn_code icode
;
4158 tree arg0
= TREE_VALUE (arglist
);
4159 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4160 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4161 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4163 if (icode
== CODE_FOR_nothing
)
4164 /* Builtin not supported on this processor. */
4167 /* If we got invalid arguments bail out before generating bad rtl. */
4168 if (arg0
== error_mark_node
)
4171 if (icode
== CODE_FOR_altivec_vspltisb
4172 || icode
== CODE_FOR_altivec_vspltish
4173 || icode
== CODE_FOR_altivec_vspltisw
4174 || icode
== CODE_FOR_spe_evsplatfi
4175 || icode
== CODE_FOR_spe_evsplati
)
4177 /* Only allow 5-bit *signed* literals. */
4178 if (GET_CODE (op0
) != CONST_INT
4179 || INTVAL (op0
) > 0x1f
4180 || INTVAL (op0
) < -0x1f)
4182 error ("argument 1 must be a 5-bit signed literal");
4188 || GET_MODE (target
) != tmode
4189 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4190 target
= gen_reg_rtx (tmode
);
4192 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4193 op0
= copy_to_mode_reg (mode0
, op0
);
4195 pat
= GEN_FCN (icode
) (target
, op0
);
4204 altivec_expand_abs_builtin (icode
, arglist
, target
)
4205 enum insn_code icode
;
4209 rtx pat
, scratch1
, scratch2
;
4210 tree arg0
= TREE_VALUE (arglist
);
4211 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4212 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4213 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4215 /* If we have invalid arguments, bail out before generating bad rtl. */
4216 if (arg0
== error_mark_node
)
4220 || GET_MODE (target
) != tmode
4221 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4222 target
= gen_reg_rtx (tmode
);
4224 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4225 op0
= copy_to_mode_reg (mode0
, op0
);
4227 scratch1
= gen_reg_rtx (mode0
);
4228 scratch2
= gen_reg_rtx (mode0
);
4230 pat
= GEN_FCN (icode
) (target
, op0
, scratch1
, scratch2
);
4239 rs6000_expand_binop_builtin (icode
, arglist
, target
)
4240 enum insn_code icode
;
4245 tree arg0
= TREE_VALUE (arglist
);
4246 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4247 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4248 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4249 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4250 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4251 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4253 if (icode
== CODE_FOR_nothing
)
4254 /* Builtin not supported on this processor. */
4257 /* If we got invalid arguments bail out before generating bad rtl. */
4258 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
4261 if (icode
== CODE_FOR_altivec_vcfux
4262 || icode
== CODE_FOR_altivec_vcfsx
4263 || icode
== CODE_FOR_altivec_vctsxs
4264 || icode
== CODE_FOR_altivec_vctuxs
4265 || icode
== CODE_FOR_altivec_vspltb
4266 || icode
== CODE_FOR_altivec_vsplth
4267 || icode
== CODE_FOR_altivec_vspltw
4268 || icode
== CODE_FOR_spe_evaddiw
4269 || icode
== CODE_FOR_spe_evldd
4270 || icode
== CODE_FOR_spe_evldh
4271 || icode
== CODE_FOR_spe_evldw
4272 || icode
== CODE_FOR_spe_evlhhesplat
4273 || icode
== CODE_FOR_spe_evlhhossplat
4274 || icode
== CODE_FOR_spe_evlhhousplat
4275 || icode
== CODE_FOR_spe_evlwhe
4276 || icode
== CODE_FOR_spe_evlwhos
4277 || icode
== CODE_FOR_spe_evlwhou
4278 || icode
== CODE_FOR_spe_evlwhsplat
4279 || icode
== CODE_FOR_spe_evlwwsplat
4280 || icode
== CODE_FOR_spe_evrlwi
4281 || icode
== CODE_FOR_spe_evslwi
4282 || icode
== CODE_FOR_spe_evsrwis
4283 || icode
== CODE_FOR_spe_evsrwiu
)
4285 /* Only allow 5-bit unsigned literals. */
4286 if (TREE_CODE (arg1
) != INTEGER_CST
4287 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
4289 error ("argument 2 must be a 5-bit unsigned literal");
4295 || GET_MODE (target
) != tmode
4296 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4297 target
= gen_reg_rtx (tmode
);
4299 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4300 op0
= copy_to_mode_reg (mode0
, op0
);
4301 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4302 op1
= copy_to_mode_reg (mode1
, op1
);
4304 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
4313 altivec_expand_predicate_builtin (icode
, opcode
, arglist
, target
)
4314 enum insn_code icode
;
4320 tree cr6_form
= TREE_VALUE (arglist
);
4321 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
4322 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4323 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4324 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4325 enum machine_mode tmode
= SImode
;
4326 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4327 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4330 if (TREE_CODE (cr6_form
) != INTEGER_CST
)
4332 error ("argument 1 of __builtin_altivec_predicate must be a constant");
4336 cr6_form_int
= TREE_INT_CST_LOW (cr6_form
);
4341 /* If we have invalid arguments, bail out before generating bad rtl. */
4342 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
4346 || GET_MODE (target
) != tmode
4347 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4348 target
= gen_reg_rtx (tmode
);
4350 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4351 op0
= copy_to_mode_reg (mode0
, op0
);
4352 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4353 op1
= copy_to_mode_reg (mode1
, op1
);
4355 scratch
= gen_reg_rtx (mode0
);
4357 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
,
4358 gen_rtx (SYMBOL_REF
, Pmode
, opcode
));
4363 /* The vec_any* and vec_all* predicates use the same opcodes for two
4364 different operations, but the bits in CR6 will be different
4365 depending on what information we want. So we have to play tricks
4366 with CR6 to get the right bits out.
4368 If you think this is disgusting, look at the specs for the
4369 AltiVec predicates. */
4371 switch (cr6_form_int
)
4374 emit_insn (gen_cr6_test_for_zero (target
));
4377 emit_insn (gen_cr6_test_for_zero_reverse (target
));
4380 emit_insn (gen_cr6_test_for_lt (target
));
4383 emit_insn (gen_cr6_test_for_lt_reverse (target
));
4386 error ("argument 1 of __builtin_altivec_predicate is out of range");
4394 altivec_expand_stv_builtin (icode
, arglist
)
4395 enum insn_code icode
;
4398 tree arg0
= TREE_VALUE (arglist
);
4399 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4400 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4401 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4402 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4403 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4405 enum machine_mode mode0
= insn_data
[icode
].operand
[0].mode
;
4406 enum machine_mode mode1
= insn_data
[icode
].operand
[1].mode
;
4407 enum machine_mode mode2
= insn_data
[icode
].operand
[2].mode
;
4409 /* Invalid arguments. Bail before doing anything stoopid! */
4410 if (arg0
== error_mark_node
4411 || arg1
== error_mark_node
4412 || arg2
== error_mark_node
)
4415 if (! (*insn_data
[icode
].operand
[2].predicate
) (op0
, mode2
))
4416 op0
= copy_to_mode_reg (mode2
, op0
);
4417 if (! (*insn_data
[icode
].operand
[0].predicate
) (op1
, mode0
))
4418 op1
= copy_to_mode_reg (mode0
, op1
);
4419 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
4420 op2
= copy_to_mode_reg (mode1
, op2
);
4422 pat
= GEN_FCN (icode
) (op1
, op2
, op0
);
4429 rs6000_expand_ternop_builtin (icode
, arglist
, target
)
4430 enum insn_code icode
;
4435 tree arg0
= TREE_VALUE (arglist
);
4436 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4437 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4438 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4439 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4440 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4441 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4442 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4443 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4444 enum machine_mode mode2
= insn_data
[icode
].operand
[3].mode
;
4446 if (icode
== CODE_FOR_nothing
)
4447 /* Builtin not supported on this processor. */
4450 /* If we got invalid arguments bail out before generating bad rtl. */
4451 if (arg0
== error_mark_node
4452 || arg1
== error_mark_node
4453 || arg2
== error_mark_node
)
4456 if (icode
== CODE_FOR_altivec_vsldoi_4sf
4457 || icode
== CODE_FOR_altivec_vsldoi_4si
4458 || icode
== CODE_FOR_altivec_vsldoi_8hi
4459 || icode
== CODE_FOR_altivec_vsldoi_16qi
)
4461 /* Only allow 4-bit unsigned literals. */
4462 if (TREE_CODE (arg2
) != INTEGER_CST
4463 || TREE_INT_CST_LOW (arg2
) & ~0xf)
4465 error ("argument 3 must be a 4-bit unsigned literal");
4471 || GET_MODE (target
) != tmode
4472 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4473 target
= gen_reg_rtx (tmode
);
4475 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4476 op0
= copy_to_mode_reg (mode0
, op0
);
4477 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4478 op1
= copy_to_mode_reg (mode1
, op1
);
4479 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
4480 op2
= copy_to_mode_reg (mode2
, op2
);
4482 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
4490 /* Expand the lvx builtins. */
4492 altivec_expand_ld_builtin (exp
, target
, expandedp
)
4497 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4498 tree arglist
= TREE_OPERAND (exp
, 1);
4499 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4501 enum machine_mode tmode
, mode0
;
4503 enum insn_code icode
;
4507 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi
:
4508 icode
= CODE_FOR_altivec_lvx_16qi
;
4510 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi
:
4511 icode
= CODE_FOR_altivec_lvx_8hi
;
4513 case ALTIVEC_BUILTIN_LD_INTERNAL_4si
:
4514 icode
= CODE_FOR_altivec_lvx_4si
;
4516 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf
:
4517 icode
= CODE_FOR_altivec_lvx_4sf
;
4526 arg0
= TREE_VALUE (arglist
);
4527 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4528 tmode
= insn_data
[icode
].operand
[0].mode
;
4529 mode0
= insn_data
[icode
].operand
[1].mode
;
4532 || GET_MODE (target
) != tmode
4533 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4534 target
= gen_reg_rtx (tmode
);
4536 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4537 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
4539 pat
= GEN_FCN (icode
) (target
, op0
);
4546 /* Expand the stvx builtins. */
4548 altivec_expand_st_builtin (exp
, target
, expandedp
)
4550 rtx target ATTRIBUTE_UNUSED
;
4553 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4554 tree arglist
= TREE_OPERAND (exp
, 1);
4555 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4557 enum machine_mode mode0
, mode1
;
4559 enum insn_code icode
;
4563 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi
:
4564 icode
= CODE_FOR_altivec_stvx_16qi
;
4566 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi
:
4567 icode
= CODE_FOR_altivec_stvx_8hi
;
4569 case ALTIVEC_BUILTIN_ST_INTERNAL_4si
:
4570 icode
= CODE_FOR_altivec_stvx_4si
;
4572 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf
:
4573 icode
= CODE_FOR_altivec_stvx_4sf
;
4580 arg0
= TREE_VALUE (arglist
);
4581 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4582 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4583 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4584 mode0
= insn_data
[icode
].operand
[0].mode
;
4585 mode1
= insn_data
[icode
].operand
[1].mode
;
4587 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4588 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
4589 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
4590 op1
= copy_to_mode_reg (mode1
, op1
);
4592 pat
= GEN_FCN (icode
) (op0
, op1
);
4600 /* Expand the dst builtins. */
4602 altivec_expand_dst_builtin (exp
, target
, expandedp
)
4604 rtx target ATTRIBUTE_UNUSED
;
4607 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4608 tree arglist
= TREE_OPERAND (exp
, 1);
4609 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4610 tree arg0
, arg1
, arg2
;
4611 enum machine_mode mode0
, mode1
, mode2
;
4612 rtx pat
, op0
, op1
, op2
;
4613 struct builtin_description
*d
;
4618 /* Handle DST variants. */
4619 d
= (struct builtin_description
*) bdesc_dst
;
4620 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
4621 if (d
->code
== fcode
)
4623 arg0
= TREE_VALUE (arglist
);
4624 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4625 arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4626 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4627 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4628 op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4629 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
4630 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
4631 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
4633 /* Invalid arguments, bail out before generating bad rtl. */
4634 if (arg0
== error_mark_node
4635 || arg1
== error_mark_node
4636 || arg2
== error_mark_node
)
4639 if (TREE_CODE (arg2
) != INTEGER_CST
4640 || TREE_INT_CST_LOW (arg2
) & ~0x3)
4642 error ("argument to `%s' must be a 2-bit unsigned literal", d
->name
);
4646 if (! (*insn_data
[d
->icode
].operand
[0].predicate
) (op0
, mode0
))
4647 op0
= copy_to_mode_reg (mode0
, op0
);
4648 if (! (*insn_data
[d
->icode
].operand
[1].predicate
) (op1
, mode1
))
4649 op1
= copy_to_mode_reg (mode1
, op1
);
4651 pat
= GEN_FCN (d
->icode
) (op0
, op1
, op2
);
4662 /* Expand the builtin in EXP and store the result in TARGET. Store
4663 true in *EXPANDEDP if we found a builtin to expand. */
4665 altivec_expand_builtin (exp
, target
, expandedp
)
4670 struct builtin_description
*d
;
4671 struct builtin_description_predicates
*dp
;
4673 enum insn_code icode
;
4674 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4675 tree arglist
= TREE_OPERAND (exp
, 1);
4678 enum machine_mode tmode
, mode0
;
4679 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4681 target
= altivec_expand_ld_builtin (exp
, target
, expandedp
);
4685 target
= altivec_expand_st_builtin (exp
, target
, expandedp
);
4689 target
= altivec_expand_dst_builtin (exp
, target
, expandedp
);
4697 case ALTIVEC_BUILTIN_STVX
:
4698 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx
, arglist
);
4699 case ALTIVEC_BUILTIN_STVEBX
:
4700 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx
, arglist
);
4701 case ALTIVEC_BUILTIN_STVEHX
:
4702 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx
, arglist
);
4703 case ALTIVEC_BUILTIN_STVEWX
:
4704 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx
, arglist
);
4705 case ALTIVEC_BUILTIN_STVXL
:
4706 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl
, arglist
);
4708 case ALTIVEC_BUILTIN_MFVSCR
:
4709 icode
= CODE_FOR_altivec_mfvscr
;
4710 tmode
= insn_data
[icode
].operand
[0].mode
;
4713 || GET_MODE (target
) != tmode
4714 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4715 target
= gen_reg_rtx (tmode
);
4717 pat
= GEN_FCN (icode
) (target
);
4723 case ALTIVEC_BUILTIN_MTVSCR
:
4724 icode
= CODE_FOR_altivec_mtvscr
;
4725 arg0
= TREE_VALUE (arglist
);
4726 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4727 mode0
= insn_data
[icode
].operand
[0].mode
;
4729 /* If we got invalid arguments bail out before generating bad rtl. */
4730 if (arg0
== error_mark_node
)
4733 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4734 op0
= copy_to_mode_reg (mode0
, op0
);
4736 pat
= GEN_FCN (icode
) (op0
);
4741 case ALTIVEC_BUILTIN_DSSALL
:
4742 emit_insn (gen_altivec_dssall ());
4745 case ALTIVEC_BUILTIN_DSS
:
4746 icode
= CODE_FOR_altivec_dss
;
4747 arg0
= TREE_VALUE (arglist
);
4748 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4749 mode0
= insn_data
[icode
].operand
[0].mode
;
4751 /* If we got invalid arguments bail out before generating bad rtl. */
4752 if (arg0
== error_mark_node
)
4755 if (TREE_CODE (arg0
) != INTEGER_CST
4756 || TREE_INT_CST_LOW (arg0
) & ~0x3)
4758 error ("argument to dss must be a 2-bit unsigned literal");
4762 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4763 op0
= copy_to_mode_reg (mode0
, op0
);
4765 emit_insn (gen_altivec_dss (op0
));
4769 /* Expand abs* operations. */
4770 d
= (struct builtin_description
*) bdesc_abs
;
4771 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
4772 if (d
->code
== fcode
)
4773 return altivec_expand_abs_builtin (d
->icode
, arglist
, target
);
4775 /* Expand the AltiVec predicates. */
4776 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
4777 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
4778 if (dp
->code
== fcode
)
4779 return altivec_expand_predicate_builtin (dp
->icode
, dp
->opcode
, arglist
, target
);
4781 /* LV* are funky. We initialized them differently. */
4784 case ALTIVEC_BUILTIN_LVSL
:
4785 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl
,
4787 case ALTIVEC_BUILTIN_LVSR
:
4788 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr
,
4790 case ALTIVEC_BUILTIN_LVEBX
:
4791 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx
,
4793 case ALTIVEC_BUILTIN_LVEHX
:
4794 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx
,
4796 case ALTIVEC_BUILTIN_LVEWX
:
4797 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx
,
4799 case ALTIVEC_BUILTIN_LVXL
:
4800 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl
,
4802 case ALTIVEC_BUILTIN_LVX
:
4803 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx
,
4814 /* Binops that need to be initialized manually, but can be expanded
4815 automagically by rs6000_expand_binop_builtin. */
4816 static struct builtin_description bdesc_2arg_spe
[] =
4818 { 0, CODE_FOR_spe_evlddx
, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX
},
4819 { 0, CODE_FOR_spe_evldwx
, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX
},
4820 { 0, CODE_FOR_spe_evldhx
, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX
},
4821 { 0, CODE_FOR_spe_evlwhex
, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX
},
4822 { 0, CODE_FOR_spe_evlwhoux
, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX
},
4823 { 0, CODE_FOR_spe_evlwhosx
, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX
},
4824 { 0, CODE_FOR_spe_evlwwsplatx
, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX
},
4825 { 0, CODE_FOR_spe_evlwhsplatx
, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX
},
4826 { 0, CODE_FOR_spe_evlhhesplatx
, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX
},
4827 { 0, CODE_FOR_spe_evlhhousplatx
, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX
},
4828 { 0, CODE_FOR_spe_evlhhossplatx
, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX
},
4829 { 0, CODE_FOR_spe_evldd
, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD
},
4830 { 0, CODE_FOR_spe_evldw
, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW
},
4831 { 0, CODE_FOR_spe_evldh
, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH
},
4832 { 0, CODE_FOR_spe_evlwhe
, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE
},
4833 { 0, CODE_FOR_spe_evlwhou
, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU
},
4834 { 0, CODE_FOR_spe_evlwhos
, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS
},
4835 { 0, CODE_FOR_spe_evlwwsplat
, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT
},
4836 { 0, CODE_FOR_spe_evlwhsplat
, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT
},
4837 { 0, CODE_FOR_spe_evlhhesplat
, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT
},
4838 { 0, CODE_FOR_spe_evlhhousplat
, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT
},
4839 { 0, CODE_FOR_spe_evlhhossplat
, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT
}
4842 /* Expand the builtin in EXP and store the result in TARGET. Store
4843 true in *EXPANDEDP if we found a builtin to expand.
4845 This expands the SPE builtins that are not simple unary and binary
4848 spe_expand_builtin (exp
, target
, expandedp
)
4853 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4854 tree arglist
= TREE_OPERAND (exp
, 1);
4856 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4857 enum insn_code icode
;
4858 enum machine_mode tmode
, mode0
;
4860 struct builtin_description
*d
;
4865 /* Syntax check for a 5-bit unsigned immediate. */
4868 case SPE_BUILTIN_EVSTDD
:
4869 case SPE_BUILTIN_EVSTDH
:
4870 case SPE_BUILTIN_EVSTDW
:
4871 case SPE_BUILTIN_EVSTWHE
:
4872 case SPE_BUILTIN_EVSTWHO
:
4873 case SPE_BUILTIN_EVSTWWE
:
4874 case SPE_BUILTIN_EVSTWWO
:
4875 arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4876 if (TREE_CODE (arg1
) != INTEGER_CST
4877 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
4879 error ("argument 2 must be a 5-bit unsigned literal");
4887 d
= (struct builtin_description
*) bdesc_2arg_spe
;
4888 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg_spe
); ++i
, ++d
)
4889 if (d
->code
== fcode
)
4890 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
4892 d
= (struct builtin_description
*) bdesc_spe_predicates
;
4893 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, ++d
)
4894 if (d
->code
== fcode
)
4895 return spe_expand_predicate_builtin (d
->icode
, arglist
, target
);
4897 d
= (struct builtin_description
*) bdesc_spe_evsel
;
4898 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, ++d
)
4899 if (d
->code
== fcode
)
4900 return spe_expand_evsel_builtin (d
->icode
, arglist
, target
);
4904 case SPE_BUILTIN_EVSTDDX
:
4905 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx
, arglist
);
4906 case SPE_BUILTIN_EVSTDHX
:
4907 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx
, arglist
);
4908 case SPE_BUILTIN_EVSTDWX
:
4909 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx
, arglist
);
4910 case SPE_BUILTIN_EVSTWHEX
:
4911 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex
, arglist
);
4912 case SPE_BUILTIN_EVSTWHOX
:
4913 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox
, arglist
);
4914 case SPE_BUILTIN_EVSTWWEX
:
4915 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex
, arglist
);
4916 case SPE_BUILTIN_EVSTWWOX
:
4917 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox
, arglist
);
4918 case SPE_BUILTIN_EVSTDD
:
4919 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd
, arglist
);
4920 case SPE_BUILTIN_EVSTDH
:
4921 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh
, arglist
);
4922 case SPE_BUILTIN_EVSTDW
:
4923 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw
, arglist
);
4924 case SPE_BUILTIN_EVSTWHE
:
4925 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe
, arglist
);
4926 case SPE_BUILTIN_EVSTWHO
:
4927 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho
, arglist
);
4928 case SPE_BUILTIN_EVSTWWE
:
4929 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe
, arglist
);
4930 case SPE_BUILTIN_EVSTWWO
:
4931 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo
, arglist
);
4932 case SPE_BUILTIN_MFSPEFSCR
:
4933 icode
= CODE_FOR_spe_mfspefscr
;
4934 tmode
= insn_data
[icode
].operand
[0].mode
;
4937 || GET_MODE (target
) != tmode
4938 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4939 target
= gen_reg_rtx (tmode
);
4941 pat
= GEN_FCN (icode
) (target
);
4946 case SPE_BUILTIN_MTSPEFSCR
:
4947 icode
= CODE_FOR_spe_mtspefscr
;
4948 arg0
= TREE_VALUE (arglist
);
4949 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4950 mode0
= insn_data
[icode
].operand
[0].mode
;
4952 if (arg0
== error_mark_node
)
4955 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4956 op0
= copy_to_mode_reg (mode0
, op0
);
4958 pat
= GEN_FCN (icode
) (op0
);
4971 spe_expand_predicate_builtin (icode
, arglist
, target
)
4972 enum insn_code icode
;
4976 rtx pat
, scratch
, tmp
;
4977 tree form
= TREE_VALUE (arglist
);
4978 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
4979 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4980 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4981 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4982 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4983 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4987 if (TREE_CODE (form
) != INTEGER_CST
)
4989 error ("argument 1 of __builtin_spe_predicate must be a constant");
4993 form_int
= TREE_INT_CST_LOW (form
);
4998 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
5002 || GET_MODE (target
) != SImode
5003 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, SImode
))
5004 target
= gen_reg_rtx (SImode
);
5006 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5007 op0
= copy_to_mode_reg (mode0
, op0
);
5008 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5009 op1
= copy_to_mode_reg (mode1
, op1
);
5011 scratch
= gen_reg_rtx (CCmode
);
5013 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
5018 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5019 _lower_. We use one compare, but look in different bits of the
5020 CR for each variant.
5022 There are 2 elements in each SPE simd type (upper/lower). The CR
5023 bits are set as follows:
5025 BIT0 | BIT 1 | BIT 2 | BIT 3
5026 U | L | (U | L) | (U & L)
5028 So, for an "all" relationship, BIT 3 would be set.
5029 For an "any" relationship, BIT 2 would be set. Etc.
5031 Following traditional nomenclature, these bits map to:
5033 BIT0 | BIT 1 | BIT 2 | BIT 3
5036 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5041 /* All variant. OV bit. */
5043 /* We need to get to the OV bit, which is the ORDERED bit. We
5044 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5045 that's ugly and will trigger a validate_condition_mode abort.
5046 So let's just use another pattern. */
5047 emit_insn (gen_move_from_CR_ov_bit (target
, scratch
));
5049 /* Any variant. EQ bit. */
5053 /* Upper variant. LT bit. */
5057 /* Lower variant. GT bit. */
5062 error ("argument 1 of __builtin_spe_predicate is out of range");
5066 tmp
= gen_rtx_fmt_ee (code
, SImode
, scratch
, const0_rtx
);
5067 emit_move_insn (target
, tmp
);
5072 /* The evsel builtins look like this:
5074 e = __builtin_spe_evsel_OP (a, b, c, d);
5078 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5079 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5083 spe_expand_evsel_builtin (icode
, arglist
, target
)
5084 enum insn_code icode
;
5089 tree arg0
= TREE_VALUE (arglist
);
5090 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5091 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5092 tree arg3
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist
))));
5093 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5094 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5095 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
5096 rtx op3
= expand_expr (arg3
, NULL_RTX
, VOIDmode
, 0);
5097 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5098 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5103 if (arg0
== error_mark_node
|| arg1
== error_mark_node
5104 || arg2
== error_mark_node
|| arg3
== error_mark_node
)
5108 || GET_MODE (target
) != mode0
5109 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, mode0
))
5110 target
= gen_reg_rtx (mode0
);
5112 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5113 op0
= copy_to_mode_reg (mode0
, op0
);
5114 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
5115 op1
= copy_to_mode_reg (mode0
, op1
);
5116 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
5117 op2
= copy_to_mode_reg (mode0
, op2
);
5118 if (! (*insn_data
[icode
].operand
[1].predicate
) (op3
, mode1
))
5119 op3
= copy_to_mode_reg (mode0
, op3
);
5121 /* Generate the compare. */
5122 scratch
= gen_reg_rtx (CCmode
);
5123 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
5128 if (mode0
== V2SImode
)
5129 emit_insn (gen_spe_evsel (target
, op2
, op3
, scratch
));
5131 emit_insn (gen_spe_evsel_fs (target
, op2
, op3
, scratch
));
5136 /* Expand an expression EXP that calls a built-in function,
5137 with result going to TARGET if that's convenient
5138 (and in mode MODE if that's convenient).
5139 SUBTARGET may be used as the target for computing one of EXP's operands.
5140 IGNORE is nonzero if the value is to be ignored. */
5143 rs6000_expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
5146 rtx subtarget ATTRIBUTE_UNUSED
;
5147 enum machine_mode mode ATTRIBUTE_UNUSED
;
5148 int ignore ATTRIBUTE_UNUSED
;
5150 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5151 tree arglist
= TREE_OPERAND (exp
, 1);
5152 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5153 struct builtin_description
*d
;
5160 ret
= altivec_expand_builtin (exp
, target
, &success
);
5167 ret
= spe_expand_builtin (exp
, target
, &success
);
5173 if (TARGET_ALTIVEC
|| TARGET_SPE
)
5175 /* Handle simple unary operations. */
5176 d
= (struct builtin_description
*) bdesc_1arg
;
5177 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
5178 if (d
->code
== fcode
)
5179 return rs6000_expand_unop_builtin (d
->icode
, arglist
, target
);
5181 /* Handle simple binary operations. */
5182 d
= (struct builtin_description
*) bdesc_2arg
;
5183 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5184 if (d
->code
== fcode
)
5185 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
5187 /* Handle simple ternary operations. */
5188 d
= (struct builtin_description
*) bdesc_3arg
;
5189 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
5190 if (d
->code
== fcode
)
5191 return rs6000_expand_ternop_builtin (d
->icode
, arglist
, target
);
5199 rs6000_init_builtins ()
5202 spe_init_builtins ();
5204 altivec_init_builtins ();
5205 if (TARGET_ALTIVEC
|| TARGET_SPE
)
5206 rs6000_common_init_builtins ();
5209 /* Search through a set of builtins and enable the mask bits.
5210 DESC is an array of builtins.
5211 SIZE is the totaly number of builtins.
5212 START is the builtin enum at which to start.
5213 END is the builtin enum at which to end. */
5215 enable_mask_for_builtins (desc
, size
, start
, end
)
5216 struct builtin_description
*desc
;
5218 enum rs6000_builtins start
, end
;
5222 for (i
= 0; i
< size
; ++i
)
5223 if (desc
[i
].code
== start
)
5229 for (; i
< size
; ++i
)
5231 /* Flip all the bits on. */
5232 desc
[i
].mask
= target_flags
;
5233 if (desc
[i
].code
== end
)
5239 spe_init_builtins ()
5241 tree endlink
= void_list_node
;
5242 tree puint_type_node
= build_pointer_type (unsigned_type_node
);
5243 tree pushort_type_node
= build_pointer_type (short_unsigned_type_node
);
5244 tree pv2si_type_node
= build_pointer_type (V2SI_type_node
);
5245 struct builtin_description
*d
;
5248 tree v2si_ftype_4_v2si
5249 = build_function_type
5251 tree_cons (NULL_TREE
, V2SI_type_node
,
5252 tree_cons (NULL_TREE
, V2SI_type_node
,
5253 tree_cons (NULL_TREE
, V2SI_type_node
,
5254 tree_cons (NULL_TREE
, V2SI_type_node
,
5257 tree v2sf_ftype_4_v2sf
5258 = build_function_type
5260 tree_cons (NULL_TREE
, V2SF_type_node
,
5261 tree_cons (NULL_TREE
, V2SF_type_node
,
5262 tree_cons (NULL_TREE
, V2SF_type_node
,
5263 tree_cons (NULL_TREE
, V2SF_type_node
,
5266 tree int_ftype_int_v2si_v2si
5267 = build_function_type
5269 tree_cons (NULL_TREE
, integer_type_node
,
5270 tree_cons (NULL_TREE
, V2SI_type_node
,
5271 tree_cons (NULL_TREE
, V2SI_type_node
,
5274 tree int_ftype_int_v2sf_v2sf
5275 = build_function_type
5277 tree_cons (NULL_TREE
, integer_type_node
,
5278 tree_cons (NULL_TREE
, V2SF_type_node
,
5279 tree_cons (NULL_TREE
, V2SF_type_node
,
5282 tree void_ftype_v2si_puint_int
5283 = build_function_type (void_type_node
,
5284 tree_cons (NULL_TREE
, V2SI_type_node
,
5285 tree_cons (NULL_TREE
, puint_type_node
,
5286 tree_cons (NULL_TREE
,
5290 tree void_ftype_v2si_puint_char
5291 = build_function_type (void_type_node
,
5292 tree_cons (NULL_TREE
, V2SI_type_node
,
5293 tree_cons (NULL_TREE
, puint_type_node
,
5294 tree_cons (NULL_TREE
,
5298 tree void_ftype_v2si_pv2si_int
5299 = build_function_type (void_type_node
,
5300 tree_cons (NULL_TREE
, V2SI_type_node
,
5301 tree_cons (NULL_TREE
, pv2si_type_node
,
5302 tree_cons (NULL_TREE
,
5306 tree void_ftype_v2si_pv2si_char
5307 = build_function_type (void_type_node
,
5308 tree_cons (NULL_TREE
, V2SI_type_node
,
5309 tree_cons (NULL_TREE
, pv2si_type_node
,
5310 tree_cons (NULL_TREE
,
5315 = build_function_type (void_type_node
,
5316 tree_cons (NULL_TREE
, integer_type_node
, endlink
));
5319 = build_function_type (integer_type_node
,
5320 tree_cons (NULL_TREE
, void_type_node
, endlink
));
5322 tree v2si_ftype_pv2si_int
5323 = build_function_type (V2SI_type_node
,
5324 tree_cons (NULL_TREE
, pv2si_type_node
,
5325 tree_cons (NULL_TREE
, integer_type_node
,
5328 tree v2si_ftype_puint_int
5329 = build_function_type (V2SI_type_node
,
5330 tree_cons (NULL_TREE
, puint_type_node
,
5331 tree_cons (NULL_TREE
, integer_type_node
,
5334 tree v2si_ftype_pushort_int
5335 = build_function_type (V2SI_type_node
,
5336 tree_cons (NULL_TREE
, pushort_type_node
,
5337 tree_cons (NULL_TREE
, integer_type_node
,
5340 /* The initialization of the simple binary and unary builtins is
5341 done in rs6000_common_init_builtins, but we have to enable the
5342 mask bits here manually because we have run out of `target_flags'
5343 bits. We really need to redesign this mask business. */
5345 enable_mask_for_builtins ((struct builtin_description
*) bdesc_2arg
,
5346 ARRAY_SIZE (bdesc_2arg
),
5349 enable_mask_for_builtins ((struct builtin_description
*) bdesc_1arg
,
5350 ARRAY_SIZE (bdesc_1arg
),
5352 SPE_BUILTIN_EVSUBFUSIAAW
);
5353 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_predicates
,
5354 ARRAY_SIZE (bdesc_spe_predicates
),
5355 SPE_BUILTIN_EVCMPEQ
,
5356 SPE_BUILTIN_EVFSTSTLT
);
5357 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_evsel
,
5358 ARRAY_SIZE (bdesc_spe_evsel
),
5359 SPE_BUILTIN_EVSEL_CMPGTS
,
5360 SPE_BUILTIN_EVSEL_FSTSTEQ
);
5362 /* Initialize irregular SPE builtins. */
5364 def_builtin (target_flags
, "__builtin_spe_mtspefscr", void_ftype_int
, SPE_BUILTIN_MTSPEFSCR
);
5365 def_builtin (target_flags
, "__builtin_spe_mfspefscr", int_ftype_void
, SPE_BUILTIN_MFSPEFSCR
);
5366 def_builtin (target_flags
, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDDX
);
5367 def_builtin (target_flags
, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDHX
);
5368 def_builtin (target_flags
, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDWX
);
5369 def_builtin (target_flags
, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHEX
);
5370 def_builtin (target_flags
, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHOX
);
5371 def_builtin (target_flags
, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWEX
);
5372 def_builtin (target_flags
, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWOX
);
5373 def_builtin (target_flags
, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDD
);
5374 def_builtin (target_flags
, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDH
);
5375 def_builtin (target_flags
, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDW
);
5376 def_builtin (target_flags
, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHE
);
5377 def_builtin (target_flags
, "__builtin_spe_evstwho", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHO
);
5378 def_builtin (target_flags
, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWE
);
5379 def_builtin (target_flags
, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWO
);
5382 def_builtin (target_flags
, "__builtin_spe_evlddx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDDX
);
5383 def_builtin (target_flags
, "__builtin_spe_evldwx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDWX
);
5384 def_builtin (target_flags
, "__builtin_spe_evldhx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDHX
);
5385 def_builtin (target_flags
, "__builtin_spe_evlwhex", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHEX
);
5386 def_builtin (target_flags
, "__builtin_spe_evlwhoux", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOUX
);
5387 def_builtin (target_flags
, "__builtin_spe_evlwhosx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOSX
);
5388 def_builtin (target_flags
, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLATX
);
5389 def_builtin (target_flags
, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLATX
);
5390 def_builtin (target_flags
, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLATX
);
5391 def_builtin (target_flags
, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLATX
);
5392 def_builtin (target_flags
, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLATX
);
5393 def_builtin (target_flags
, "__builtin_spe_evldd", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDD
);
5394 def_builtin (target_flags
, "__builtin_spe_evldw", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDW
);
5395 def_builtin (target_flags
, "__builtin_spe_evldh", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDH
);
5396 def_builtin (target_flags
, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLAT
);
5397 def_builtin (target_flags
, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLAT
);
5398 def_builtin (target_flags
, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLAT
);
5399 def_builtin (target_flags
, "__builtin_spe_evlwhe", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHE
);
5400 def_builtin (target_flags
, "__builtin_spe_evlwhos", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOS
);
5401 def_builtin (target_flags
, "__builtin_spe_evlwhou", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOU
);
5402 def_builtin (target_flags
, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLAT
);
5403 def_builtin (target_flags
, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLAT
);
5406 d
= (struct builtin_description
*) bdesc_spe_predicates
;
5407 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, d
++)
5411 switch (insn_data
[d
->icode
].operand
[1].mode
)
5414 type
= int_ftype_int_v2si_v2si
;
5417 type
= int_ftype_int_v2sf_v2sf
;
5423 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5426 /* Evsel predicates. */
5427 d
= (struct builtin_description
*) bdesc_spe_evsel
;
5428 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, d
++)
5432 switch (insn_data
[d
->icode
].operand
[1].mode
)
5435 type
= v2si_ftype_4_v2si
;
5438 type
= v2sf_ftype_4_v2sf
;
5444 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5449 altivec_init_builtins ()
5451 struct builtin_description
*d
;
5452 struct builtin_description_predicates
*dp
;
5454 tree pfloat_type_node
= build_pointer_type (float_type_node
);
5455 tree pint_type_node
= build_pointer_type (integer_type_node
);
5456 tree pshort_type_node
= build_pointer_type (short_integer_type_node
);
5457 tree pchar_type_node
= build_pointer_type (char_type_node
);
5459 tree pvoid_type_node
= build_pointer_type (void_type_node
);
5461 tree pcfloat_type_node
= build_pointer_type (build_qualified_type (float_type_node
, TYPE_QUAL_CONST
));
5462 tree pcint_type_node
= build_pointer_type (build_qualified_type (integer_type_node
, TYPE_QUAL_CONST
));
5463 tree pcshort_type_node
= build_pointer_type (build_qualified_type (short_integer_type_node
, TYPE_QUAL_CONST
));
5464 tree pcchar_type_node
= build_pointer_type (build_qualified_type (char_type_node
, TYPE_QUAL_CONST
));
5466 tree pcvoid_type_node
= build_pointer_type (build_qualified_type (void_type_node
, TYPE_QUAL_CONST
));
5468 tree int_ftype_int_v4si_v4si
5469 = build_function_type_list (integer_type_node
,
5470 integer_type_node
, V4SI_type_node
,
5471 V4SI_type_node
, NULL_TREE
);
5472 tree v4sf_ftype_pcfloat
5473 = build_function_type_list (V4SF_type_node
, pcfloat_type_node
, NULL_TREE
);
5474 tree void_ftype_pfloat_v4sf
5475 = build_function_type_list (void_type_node
,
5476 pfloat_type_node
, V4SF_type_node
, NULL_TREE
);
5477 tree v4si_ftype_pcint
5478 = build_function_type_list (V4SI_type_node
, pcint_type_node
, NULL_TREE
);
5479 tree void_ftype_pint_v4si
5480 = build_function_type_list (void_type_node
,
5481 pint_type_node
, V4SI_type_node
, NULL_TREE
);
5482 tree v8hi_ftype_pcshort
5483 = build_function_type_list (V8HI_type_node
, pcshort_type_node
, NULL_TREE
);
5484 tree void_ftype_pshort_v8hi
5485 = build_function_type_list (void_type_node
,
5486 pshort_type_node
, V8HI_type_node
, NULL_TREE
);
5487 tree v16qi_ftype_pcchar
5488 = build_function_type_list (V16QI_type_node
, pcchar_type_node
, NULL_TREE
);
5489 tree void_ftype_pchar_v16qi
5490 = build_function_type_list (void_type_node
,
5491 pchar_type_node
, V16QI_type_node
, NULL_TREE
);
5492 tree void_ftype_v4si
5493 = build_function_type_list (void_type_node
, V4SI_type_node
, NULL_TREE
);
5494 tree v8hi_ftype_void
5495 = build_function_type (V8HI_type_node
, void_list_node
);
5496 tree void_ftype_void
5497 = build_function_type (void_type_node
, void_list_node
);
5499 = build_function_type_list (void_type_node
, char_type_node
, NULL_TREE
);
5501 tree v16qi_ftype_int_pcvoid
5502 = build_function_type_list (V16QI_type_node
,
5503 integer_type_node
, pcvoid_type_node
, NULL_TREE
);
5504 tree v8hi_ftype_int_pcvoid
5505 = build_function_type_list (V8HI_type_node
,
5506 integer_type_node
, pcvoid_type_node
, NULL_TREE
);
5507 tree v4si_ftype_int_pcvoid
5508 = build_function_type_list (V4SI_type_node
,
5509 integer_type_node
, pcvoid_type_node
, NULL_TREE
);
5511 tree void_ftype_v4si_int_pvoid
5512 = build_function_type_list (void_type_node
,
5513 V4SI_type_node
, integer_type_node
,
5514 pvoid_type_node
, NULL_TREE
);
5515 tree void_ftype_v16qi_int_pvoid
5516 = build_function_type_list (void_type_node
,
5517 V16QI_type_node
, integer_type_node
,
5518 pvoid_type_node
, NULL_TREE
);
5519 tree void_ftype_v8hi_int_pvoid
5520 = build_function_type_list (void_type_node
,
5521 V8HI_type_node
, integer_type_node
,
5522 pvoid_type_node
, NULL_TREE
);
5523 tree int_ftype_int_v8hi_v8hi
5524 = build_function_type_list (integer_type_node
,
5525 integer_type_node
, V8HI_type_node
,
5526 V8HI_type_node
, NULL_TREE
);
5527 tree int_ftype_int_v16qi_v16qi
5528 = build_function_type_list (integer_type_node
,
5529 integer_type_node
, V16QI_type_node
,
5530 V16QI_type_node
, NULL_TREE
);
5531 tree int_ftype_int_v4sf_v4sf
5532 = build_function_type_list (integer_type_node
,
5533 integer_type_node
, V4SF_type_node
,
5534 V4SF_type_node
, NULL_TREE
);
5535 tree v4si_ftype_v4si
5536 = build_function_type_list (V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5537 tree v8hi_ftype_v8hi
5538 = build_function_type_list (V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5539 tree v16qi_ftype_v16qi
5540 = build_function_type_list (V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5541 tree v4sf_ftype_v4sf
5542 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5543 tree void_ftype_pcvoid_int_char
5544 = build_function_type_list (void_type_node
,
5545 pcvoid_type_node
, integer_type_node
,
5546 char_type_node
, NULL_TREE
);
5548 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat
,
5549 ALTIVEC_BUILTIN_LD_INTERNAL_4sf
);
5550 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf
,
5551 ALTIVEC_BUILTIN_ST_INTERNAL_4sf
);
5552 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint
,
5553 ALTIVEC_BUILTIN_LD_INTERNAL_4si
);
5554 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si
,
5555 ALTIVEC_BUILTIN_ST_INTERNAL_4si
);
5556 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort
,
5557 ALTIVEC_BUILTIN_LD_INTERNAL_8hi
);
5558 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi
,
5559 ALTIVEC_BUILTIN_ST_INTERNAL_8hi
);
5560 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar
,
5561 ALTIVEC_BUILTIN_LD_INTERNAL_16qi
);
5562 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi
,
5563 ALTIVEC_BUILTIN_ST_INTERNAL_16qi
);
5564 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mtvscr", void_ftype_v4si
, ALTIVEC_BUILTIN_MTVSCR
);
5565 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mfvscr", v8hi_ftype_void
, ALTIVEC_BUILTIN_MFVSCR
);
5566 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dssall", void_ftype_void
, ALTIVEC_BUILTIN_DSSALL
);
5567 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dss", void_ftype_qi
, ALTIVEC_BUILTIN_DSS
);
5568 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVSL
);
5569 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVSR
);
5570 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVEBX
);
5571 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVEHX
);
5572 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVEWX
);
5573 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVXL
);
5574 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVX
);
5575 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVX
);
5576 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVEWX
);
5577 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVXL
);
5578 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid
, ALTIVEC_BUILTIN_STVEBX
);
5579 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid
, ALTIVEC_BUILTIN_STVEHX
);
5581 /* Add the DST variants. */
5582 d
= (struct builtin_description
*) bdesc_dst
;
5583 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
5584 def_builtin (d
->mask
, d
->name
, void_ftype_pcvoid_int_char
, d
->code
);
5586 /* Initialize the predicates. */
5587 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
5588 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
5590 enum machine_mode mode1
;
5593 mode1
= insn_data
[dp
->icode
].operand
[1].mode
;
5598 type
= int_ftype_int_v4si_v4si
;
5601 type
= int_ftype_int_v8hi_v8hi
;
5604 type
= int_ftype_int_v16qi_v16qi
;
5607 type
= int_ftype_int_v4sf_v4sf
;
5613 def_builtin (dp
->mask
, dp
->name
, type
, dp
->code
);
5616 /* Initialize the abs* operators. */
5617 d
= (struct builtin_description
*) bdesc_abs
;
5618 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
5620 enum machine_mode mode0
;
5623 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
5628 type
= v4si_ftype_v4si
;
5631 type
= v8hi_ftype_v8hi
;
5634 type
= v16qi_ftype_v16qi
;
5637 type
= v4sf_ftype_v4sf
;
5643 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5648 rs6000_common_init_builtins ()
5650 struct builtin_description
*d
;
5653 tree v4sf_ftype_v4sf_v4sf_v16qi
5654 = build_function_type_list (V4SF_type_node
,
5655 V4SF_type_node
, V4SF_type_node
,
5656 V16QI_type_node
, NULL_TREE
);
5657 tree v4si_ftype_v4si_v4si_v16qi
5658 = build_function_type_list (V4SI_type_node
,
5659 V4SI_type_node
, V4SI_type_node
,
5660 V16QI_type_node
, NULL_TREE
);
5661 tree v8hi_ftype_v8hi_v8hi_v16qi
5662 = build_function_type_list (V8HI_type_node
,
5663 V8HI_type_node
, V8HI_type_node
,
5664 V16QI_type_node
, NULL_TREE
);
5665 tree v16qi_ftype_v16qi_v16qi_v16qi
5666 = build_function_type_list (V16QI_type_node
,
5667 V16QI_type_node
, V16QI_type_node
,
5668 V16QI_type_node
, NULL_TREE
);
5669 tree v4si_ftype_char
5670 = build_function_type_list (V4SI_type_node
, char_type_node
, NULL_TREE
);
5671 tree v8hi_ftype_char
5672 = build_function_type_list (V8HI_type_node
, char_type_node
, NULL_TREE
);
5673 tree v16qi_ftype_char
5674 = build_function_type_list (V16QI_type_node
, char_type_node
, NULL_TREE
);
5675 tree v8hi_ftype_v16qi
5676 = build_function_type_list (V8HI_type_node
, V16QI_type_node
, NULL_TREE
);
5677 tree v4sf_ftype_v4sf
5678 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5680 tree v2si_ftype_v2si_v2si
5681 = build_function_type_list (V2SI_type_node
,
5682 V2SI_type_node
, V2SI_type_node
, NULL_TREE
);
5684 tree v2sf_ftype_v2sf_v2sf
5685 = build_function_type_list (V2SF_type_node
,
5686 V2SF_type_node
, V2SF_type_node
, NULL_TREE
);
5688 tree v2si_ftype_int_int
5689 = build_function_type_list (V2SI_type_node
,
5690 integer_type_node
, integer_type_node
,
5693 tree v2si_ftype_v2si
5694 = build_function_type_list (V2SI_type_node
, V2SI_type_node
, NULL_TREE
);
5696 tree v2sf_ftype_v2sf
5697 = build_function_type_list (V2SF_type_node
,
5698 V2SF_type_node
, NULL_TREE
);
5700 tree v2sf_ftype_v2si
5701 = build_function_type_list (V2SF_type_node
,
5702 V2SI_type_node
, NULL_TREE
);
5704 tree v2si_ftype_v2sf
5705 = build_function_type_list (V2SI_type_node
,
5706 V2SF_type_node
, NULL_TREE
);
5708 tree v2si_ftype_v2si_char
5709 = build_function_type_list (V2SI_type_node
,
5710 V2SI_type_node
, char_type_node
, NULL_TREE
);
5712 tree v2si_ftype_int_char
5713 = build_function_type_list (V2SI_type_node
,
5714 integer_type_node
, char_type_node
, NULL_TREE
);
5716 tree v2si_ftype_char
5717 = build_function_type_list (V2SI_type_node
, char_type_node
, NULL_TREE
);
5719 tree int_ftype_int_int
5720 = build_function_type_list (integer_type_node
,
5721 integer_type_node
, integer_type_node
,
5724 tree v4si_ftype_v4si_v4si
5725 = build_function_type_list (V4SI_type_node
,
5726 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5727 tree v4sf_ftype_v4si_char
5728 = build_function_type_list (V4SF_type_node
,
5729 V4SI_type_node
, char_type_node
, NULL_TREE
);
5730 tree v4si_ftype_v4sf_char
5731 = build_function_type_list (V4SI_type_node
,
5732 V4SF_type_node
, char_type_node
, NULL_TREE
);
5733 tree v4si_ftype_v4si_char
5734 = build_function_type_list (V4SI_type_node
,
5735 V4SI_type_node
, char_type_node
, NULL_TREE
);
5736 tree v8hi_ftype_v8hi_char
5737 = build_function_type_list (V8HI_type_node
,
5738 V8HI_type_node
, char_type_node
, NULL_TREE
);
5739 tree v16qi_ftype_v16qi_char
5740 = build_function_type_list (V16QI_type_node
,
5741 V16QI_type_node
, char_type_node
, NULL_TREE
);
5742 tree v16qi_ftype_v16qi_v16qi_char
5743 = build_function_type_list (V16QI_type_node
,
5744 V16QI_type_node
, V16QI_type_node
,
5745 char_type_node
, NULL_TREE
);
5746 tree v8hi_ftype_v8hi_v8hi_char
5747 = build_function_type_list (V8HI_type_node
,
5748 V8HI_type_node
, V8HI_type_node
,
5749 char_type_node
, NULL_TREE
);
5750 tree v4si_ftype_v4si_v4si_char
5751 = build_function_type_list (V4SI_type_node
,
5752 V4SI_type_node
, V4SI_type_node
,
5753 char_type_node
, NULL_TREE
);
5754 tree v4sf_ftype_v4sf_v4sf_char
5755 = build_function_type_list (V4SF_type_node
,
5756 V4SF_type_node
, V4SF_type_node
,
5757 char_type_node
, NULL_TREE
);
5758 tree v4sf_ftype_v4sf_v4sf
5759 = build_function_type_list (V4SF_type_node
,
5760 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5761 tree v4sf_ftype_v4sf_v4sf_v4si
5762 = build_function_type_list (V4SF_type_node
,
5763 V4SF_type_node
, V4SF_type_node
,
5764 V4SI_type_node
, NULL_TREE
);
5765 tree v4sf_ftype_v4sf_v4sf_v4sf
5766 = build_function_type_list (V4SF_type_node
,
5767 V4SF_type_node
, V4SF_type_node
,
5768 V4SF_type_node
, NULL_TREE
);
5769 tree v4si_ftype_v4si_v4si_v4si
5770 = build_function_type_list (V4SI_type_node
,
5771 V4SI_type_node
, V4SI_type_node
,
5772 V4SI_type_node
, NULL_TREE
);
5773 tree v8hi_ftype_v8hi_v8hi
5774 = build_function_type_list (V8HI_type_node
,
5775 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5776 tree v8hi_ftype_v8hi_v8hi_v8hi
5777 = build_function_type_list (V8HI_type_node
,
5778 V8HI_type_node
, V8HI_type_node
,
5779 V8HI_type_node
, NULL_TREE
);
5780 tree v4si_ftype_v8hi_v8hi_v4si
5781 = build_function_type_list (V4SI_type_node
,
5782 V8HI_type_node
, V8HI_type_node
,
5783 V4SI_type_node
, NULL_TREE
);
5784 tree v4si_ftype_v16qi_v16qi_v4si
5785 = build_function_type_list (V4SI_type_node
,
5786 V16QI_type_node
, V16QI_type_node
,
5787 V4SI_type_node
, NULL_TREE
);
5788 tree v16qi_ftype_v16qi_v16qi
5789 = build_function_type_list (V16QI_type_node
,
5790 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5791 tree v4si_ftype_v4sf_v4sf
5792 = build_function_type_list (V4SI_type_node
,
5793 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5794 tree v8hi_ftype_v16qi_v16qi
5795 = build_function_type_list (V8HI_type_node
,
5796 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5797 tree v4si_ftype_v8hi_v8hi
5798 = build_function_type_list (V4SI_type_node
,
5799 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5800 tree v8hi_ftype_v4si_v4si
5801 = build_function_type_list (V8HI_type_node
,
5802 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5803 tree v16qi_ftype_v8hi_v8hi
5804 = build_function_type_list (V16QI_type_node
,
5805 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5806 tree v4si_ftype_v16qi_v4si
5807 = build_function_type_list (V4SI_type_node
,
5808 V16QI_type_node
, V4SI_type_node
, NULL_TREE
);
5809 tree v4si_ftype_v16qi_v16qi
5810 = build_function_type_list (V4SI_type_node
,
5811 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5812 tree v4si_ftype_v8hi_v4si
5813 = build_function_type_list (V4SI_type_node
,
5814 V8HI_type_node
, V4SI_type_node
, NULL_TREE
);
5815 tree v4si_ftype_v8hi
5816 = build_function_type_list (V4SI_type_node
, V8HI_type_node
, NULL_TREE
);
5817 tree int_ftype_v4si_v4si
5818 = build_function_type_list (integer_type_node
,
5819 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5820 tree int_ftype_v4sf_v4sf
5821 = build_function_type_list (integer_type_node
,
5822 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5823 tree int_ftype_v16qi_v16qi
5824 = build_function_type_list (integer_type_node
,
5825 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5826 tree int_ftype_v8hi_v8hi
5827 = build_function_type_list (integer_type_node
,
5828 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5830 /* Add the simple ternary operators. */
5831 d
= (struct builtin_description
*) bdesc_3arg
;
5832 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
5835 enum machine_mode mode0
, mode1
, mode2
, mode3
;
5838 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
5841 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
5842 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
5843 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
5844 mode3
= insn_data
[d
->icode
].operand
[3].mode
;
5846 /* When all four are of the same mode. */
5847 if (mode0
== mode1
&& mode1
== mode2
&& mode2
== mode3
)
5852 type
= v4si_ftype_v4si_v4si_v4si
;
5855 type
= v4sf_ftype_v4sf_v4sf_v4sf
;
5858 type
= v8hi_ftype_v8hi_v8hi_v8hi
;
5861 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
5867 else if (mode0
== mode1
&& mode1
== mode2
&& mode3
== V16QImode
)
5872 type
= v4si_ftype_v4si_v4si_v16qi
;
5875 type
= v4sf_ftype_v4sf_v4sf_v16qi
;
5878 type
= v8hi_ftype_v8hi_v8hi_v16qi
;
5881 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
5887 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
5888 && mode3
== V4SImode
)
5889 type
= v4si_ftype_v16qi_v16qi_v4si
;
5890 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
5891 && mode3
== V4SImode
)
5892 type
= v4si_ftype_v8hi_v8hi_v4si
;
5893 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
5894 && mode3
== V4SImode
)
5895 type
= v4sf_ftype_v4sf_v4sf_v4si
;
5897 /* vchar, vchar, vchar, 4 bit literal. */
5898 else if (mode0
== V16QImode
&& mode1
== mode0
&& mode2
== mode0
5900 type
= v16qi_ftype_v16qi_v16qi_char
;
5902 /* vshort, vshort, vshort, 4 bit literal. */
5903 else if (mode0
== V8HImode
&& mode1
== mode0
&& mode2
== mode0
5905 type
= v8hi_ftype_v8hi_v8hi_char
;
5907 /* vint, vint, vint, 4 bit literal. */
5908 else if (mode0
== V4SImode
&& mode1
== mode0
&& mode2
== mode0
5910 type
= v4si_ftype_v4si_v4si_char
;
5912 /* vfloat, vfloat, vfloat, 4 bit literal. */
5913 else if (mode0
== V4SFmode
&& mode1
== mode0
&& mode2
== mode0
5915 type
= v4sf_ftype_v4sf_v4sf_char
;
5920 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5923 /* Add the simple binary operators. */
5924 d
= (struct builtin_description
*) bdesc_2arg
;
5925 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5927 enum machine_mode mode0
, mode1
, mode2
;
5930 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
5933 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
5934 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
5935 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
5937 /* When all three operands are of the same mode. */
5938 if (mode0
== mode1
&& mode1
== mode2
)
5943 type
= v4sf_ftype_v4sf_v4sf
;
5946 type
= v4si_ftype_v4si_v4si
;
5949 type
= v16qi_ftype_v16qi_v16qi
;
5952 type
= v8hi_ftype_v8hi_v8hi
;
5955 type
= v2si_ftype_v2si_v2si
;
5958 type
= v2sf_ftype_v2sf_v2sf
;
5961 type
= int_ftype_int_int
;
5968 /* A few other combos we really don't want to do manually. */
5970 /* vint, vfloat, vfloat. */
5971 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
)
5972 type
= v4si_ftype_v4sf_v4sf
;
5974 /* vshort, vchar, vchar. */
5975 else if (mode0
== V8HImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
5976 type
= v8hi_ftype_v16qi_v16qi
;
5978 /* vint, vshort, vshort. */
5979 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
5980 type
= v4si_ftype_v8hi_v8hi
;
5982 /* vshort, vint, vint. */
5983 else if (mode0
== V8HImode
&& mode1
== V4SImode
&& mode2
== V4SImode
)
5984 type
= v8hi_ftype_v4si_v4si
;
5986 /* vchar, vshort, vshort. */
5987 else if (mode0
== V16QImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
5988 type
= v16qi_ftype_v8hi_v8hi
;
5990 /* vint, vchar, vint. */
5991 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V4SImode
)
5992 type
= v4si_ftype_v16qi_v4si
;
5994 /* vint, vchar, vchar. */
5995 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
5996 type
= v4si_ftype_v16qi_v16qi
;
5998 /* vint, vshort, vint. */
5999 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V4SImode
)
6000 type
= v4si_ftype_v8hi_v4si
;
6002 /* vint, vint, 5 bit literal. */
6003 else if (mode0
== V4SImode
&& mode1
== V4SImode
&& mode2
== QImode
)
6004 type
= v4si_ftype_v4si_char
;
6006 /* vshort, vshort, 5 bit literal. */
6007 else if (mode0
== V8HImode
&& mode1
== V8HImode
&& mode2
== QImode
)
6008 type
= v8hi_ftype_v8hi_char
;
6010 /* vchar, vchar, 5 bit literal. */
6011 else if (mode0
== V16QImode
&& mode1
== V16QImode
&& mode2
== QImode
)
6012 type
= v16qi_ftype_v16qi_char
;
6014 /* vfloat, vint, 5 bit literal. */
6015 else if (mode0
== V4SFmode
&& mode1
== V4SImode
&& mode2
== QImode
)
6016 type
= v4sf_ftype_v4si_char
;
6018 /* vint, vfloat, 5 bit literal. */
6019 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== QImode
)
6020 type
= v4si_ftype_v4sf_char
;
6022 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== SImode
)
6023 type
= v2si_ftype_int_int
;
6025 else if (mode0
== V2SImode
&& mode1
== V2SImode
&& mode2
== QImode
)
6026 type
= v2si_ftype_v2si_char
;
6028 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== QImode
)
6029 type
= v2si_ftype_int_char
;
6032 else if (mode0
== SImode
)
6037 type
= int_ftype_v4si_v4si
;
6040 type
= int_ftype_v4sf_v4sf
;
6043 type
= int_ftype_v16qi_v16qi
;
6046 type
= int_ftype_v8hi_v8hi
;
6056 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6059 /* Add the simple unary operators. */
6060 d
= (struct builtin_description
*) bdesc_1arg
;
6061 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
6063 enum machine_mode mode0
, mode1
;
6066 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
6069 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6070 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
6072 if (mode0
== V4SImode
&& mode1
== QImode
)
6073 type
= v4si_ftype_char
;
6074 else if (mode0
== V8HImode
&& mode1
== QImode
)
6075 type
= v8hi_ftype_char
;
6076 else if (mode0
== V16QImode
&& mode1
== QImode
)
6077 type
= v16qi_ftype_char
;
6078 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
)
6079 type
= v4sf_ftype_v4sf
;
6080 else if (mode0
== V8HImode
&& mode1
== V16QImode
)
6081 type
= v8hi_ftype_v16qi
;
6082 else if (mode0
== V4SImode
&& mode1
== V8HImode
)
6083 type
= v4si_ftype_v8hi
;
6084 else if (mode0
== V2SImode
&& mode1
== V2SImode
)
6085 type
= v2si_ftype_v2si
;
6086 else if (mode0
== V2SFmode
&& mode1
== V2SFmode
)
6087 type
= v2sf_ftype_v2sf
;
6088 else if (mode0
== V2SFmode
&& mode1
== V2SImode
)
6089 type
= v2sf_ftype_v2si
;
6090 else if (mode0
== V2SImode
&& mode1
== V2SFmode
)
6091 type
= v2si_ftype_v2sf
;
6092 else if (mode0
== V2SImode
&& mode1
== QImode
)
6093 type
= v2si_ftype_char
;
6097 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6102 /* Expand a block move operation, and return 1 if successful. Return 0
6103 if we should let the compiler generate normal code.
6105 operands[0] is the destination
6106 operands[1] is the source
6107 operands[2] is the length
6108 operands[3] is the alignment */
6110 #define MAX_MOVE_REG 4
6113 expand_block_move (operands
)
6116 rtx orig_dest
= operands
[0];
6117 rtx orig_src
= operands
[1];
6118 rtx bytes_rtx
= operands
[2];
6119 rtx align_rtx
= operands
[3];
6120 int constp
= (GET_CODE (bytes_rtx
) == CONST_INT
);
6126 /* If this is not a fixed size move, just call memcpy */
6130 /* If this is not a fixed size alignment, abort */
6131 if (GET_CODE (align_rtx
) != CONST_INT
)
6133 align
= INTVAL (align_rtx
);
6135 /* Anything to move? */
6136 bytes
= INTVAL (bytes_rtx
);
6140 /* store_one_arg depends on expand_block_move to handle at least the size of
6141 reg_parm_stack_space. */
6142 if (bytes
> (TARGET_POWERPC64
? 64 : 32))
6145 if (TARGET_STRING
) /* string instructions are available */
6147 for (offset
= 0; bytes
> 0; offset
+= move_bytes
, bytes
-= move_bytes
)
6150 rtx (*movstrsi
) PARAMS ((rtx
, rtx
, rtx
, rtx
));
6151 rtx (*mov
) PARAMS ((rtx
, rtx
));
6153 enum machine_mode mode
= BLKmode
;
6156 if (bytes
> 24 /* move up to 32 bytes at a time */
6164 && ! fixed_regs
[12])
6166 move_bytes
= (bytes
> 32) ? 32 : bytes
;
6167 gen_func
.movstrsi
= gen_movstrsi_8reg
;
6169 else if (bytes
> 16 /* move up to 24 bytes at a time */
6175 && ! fixed_regs
[10])
6177 move_bytes
= (bytes
> 24) ? 24 : bytes
;
6178 gen_func
.movstrsi
= gen_movstrsi_6reg
;
6180 else if (bytes
> 8 /* move up to 16 bytes at a time */
6186 move_bytes
= (bytes
> 16) ? 16 : bytes
;
6187 gen_func
.movstrsi
= gen_movstrsi_4reg
;
6189 else if (bytes
>= 8 && TARGET_POWERPC64
6190 /* 64-bit loads and stores require word-aligned
6192 && (align
>= 8 || (! STRICT_ALIGNMENT
&& align
>= 4)))
6196 gen_func
.mov
= gen_movdi
;
6198 else if (bytes
> 4 && !TARGET_POWERPC64
)
6199 { /* move up to 8 bytes at a time */
6200 move_bytes
= (bytes
> 8) ? 8 : bytes
;
6201 gen_func
.movstrsi
= gen_movstrsi_2reg
;
6203 else if (bytes
>= 4 && (align
>= 4 || ! STRICT_ALIGNMENT
))
6204 { /* move 4 bytes */
6207 gen_func
.mov
= gen_movsi
;
6209 else if (bytes
== 2 && (align
>= 2 || ! STRICT_ALIGNMENT
))
6210 { /* move 2 bytes */
6213 gen_func
.mov
= gen_movhi
;
6215 else if (bytes
== 1) /* move 1 byte */
6219 gen_func
.mov
= gen_movqi
;
6222 { /* move up to 4 bytes at a time */
6223 move_bytes
= (bytes
> 4) ? 4 : bytes
;
6224 gen_func
.movstrsi
= gen_movstrsi_1reg
;
6227 src
= adjust_address (orig_src
, mode
, offset
);
6228 dest
= adjust_address (orig_dest
, mode
, offset
);
6230 if (mode
== BLKmode
)
6232 /* Move the address into scratch registers. The movstrsi
6233 patterns require zero offset. */
6234 if (!REG_P (XEXP (src
, 0)))
6236 rtx src_reg
= copy_addr_to_reg (XEXP (src
, 0));
6237 src
= replace_equiv_address (src
, src_reg
);
6239 set_mem_size (src
, GEN_INT (move_bytes
));
6241 if (!REG_P (XEXP (dest
, 0)))
6243 rtx dest_reg
= copy_addr_to_reg (XEXP (dest
, 0));
6244 dest
= replace_equiv_address (dest
, dest_reg
);
6246 set_mem_size (dest
, GEN_INT (move_bytes
));
6248 emit_insn ((*gen_func
.movstrsi
) (dest
, src
,
6249 GEN_INT (move_bytes
& 31),
6254 rtx tmp_reg
= gen_reg_rtx (mode
);
6256 emit_insn ((*gen_func
.mov
) (tmp_reg
, src
));
6257 emit_insn ((*gen_func
.mov
) (dest
, tmp_reg
));
6262 else /* string instructions not available */
6264 rtx stores
[MAX_MOVE_REG
];
6268 for (offset
= 0; bytes
> 0; offset
+= move_bytes
, bytes
-= move_bytes
)
6270 rtx (*gen_mov_func
) PARAMS ((rtx
, rtx
));
6271 enum machine_mode mode
;
6272 rtx src
, dest
, tmp_reg
;
6274 /* Generate the appropriate load and store, saving the stores
6276 if (bytes
>= 8 && TARGET_POWERPC64
6277 /* 64-bit loads and stores require word-aligned
6279 && (align
>= 8 || (! STRICT_ALIGNMENT
&& align
>= 4)))
6283 gen_mov_func
= gen_movdi
;
6285 else if (bytes
>= 4 && (align
>= 4 || ! STRICT_ALIGNMENT
))
6289 gen_mov_func
= gen_movsi
;
6291 else if (bytes
>= 2 && (align
>= 2 || ! STRICT_ALIGNMENT
))
6295 gen_mov_func
= gen_movhi
;
6301 gen_mov_func
= gen_movqi
;
6304 src
= adjust_address (orig_src
, mode
, offset
);
6305 dest
= adjust_address (orig_dest
, mode
, offset
);
6306 tmp_reg
= gen_reg_rtx (mode
);
6308 emit_insn ((*gen_mov_func
) (tmp_reg
, src
));
6309 stores
[num_reg
++] = (*gen_mov_func
) (dest
, tmp_reg
);
6311 if (num_reg
>= MAX_MOVE_REG
)
6313 for (i
= 0; i
< num_reg
; i
++)
6314 emit_insn (stores
[i
]);
6319 for (i
= 0; i
< num_reg
; i
++)
6320 emit_insn (stores
[i
]);
6327 /* Return 1 if OP is a load multiple operation. It is known to be a
6328 PARALLEL and the first section will be tested. */
6331 load_multiple_operation (op
, mode
)
6333 enum machine_mode mode ATTRIBUTE_UNUSED
;
6335 int count
= XVECLEN (op
, 0);
6336 unsigned int dest_regno
;
6340 /* Perform a quick check so we don't blow up below. */
6342 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6343 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6344 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
6347 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6348 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
6350 for (i
= 1; i
< count
; i
++)
6352 rtx elt
= XVECEXP (op
, 0, i
);
6354 if (GET_CODE (elt
) != SET
6355 || GET_CODE (SET_DEST (elt
)) != REG
6356 || GET_MODE (SET_DEST (elt
)) != SImode
6357 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
6358 || GET_CODE (SET_SRC (elt
)) != MEM
6359 || GET_MODE (SET_SRC (elt
)) != SImode
6360 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
6361 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
6362 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
6363 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != i
* 4)
6370 /* Similar, but tests for store multiple. Here, the second vector element
6371 is a CLOBBER. It will be tested later. */
6374 store_multiple_operation (op
, mode
)
6376 enum machine_mode mode ATTRIBUTE_UNUSED
;
6378 int count
= XVECLEN (op
, 0) - 1;
6379 unsigned int src_regno
;
6383 /* Perform a quick check so we don't blow up below. */
6385 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6386 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
6387 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
6390 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6391 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
6393 for (i
= 1; i
< count
; i
++)
6395 rtx elt
= XVECEXP (op
, 0, i
+ 1);
6397 if (GET_CODE (elt
) != SET
6398 || GET_CODE (SET_SRC (elt
)) != REG
6399 || GET_MODE (SET_SRC (elt
)) != SImode
6400 || REGNO (SET_SRC (elt
)) != src_regno
+ i
6401 || GET_CODE (SET_DEST (elt
)) != MEM
6402 || GET_MODE (SET_DEST (elt
)) != SImode
6403 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
6404 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
6405 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
6406 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != i
* 4)
6413 /* Return a string to perform a load_multiple operation.
6414 operands[0] is the vector.
6415 operands[1] is the source address.
6416 operands[2] is the first destination register. */
6419 rs6000_output_load_multiple (operands
)
6422 /* We have to handle the case where the pseudo used to contain the address
6423 is assigned to one of the output registers. */
6425 int words
= XVECLEN (operands
[0], 0);
6428 if (XVECLEN (operands
[0], 0) == 1)
6429 return "{l|lwz} %2,0(%1)";
6431 for (i
= 0; i
< words
; i
++)
6432 if (refers_to_regno_p (REGNO (operands
[2]) + i
,
6433 REGNO (operands
[2]) + i
+ 1, operands
[1], 0))
6437 xop
[0] = GEN_INT (4 * (words
-1));
6438 xop
[1] = operands
[1];
6439 xop
[2] = operands
[2];
6440 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop
);
6445 xop
[0] = GEN_INT (4 * (words
-1));
6446 xop
[1] = operands
[1];
6447 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + 1);
6448 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop
);
6453 for (j
= 0; j
< words
; j
++)
6456 xop
[0] = GEN_INT (j
* 4);
6457 xop
[1] = operands
[1];
6458 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + j
);
6459 output_asm_insn ("{l|lwz} %2,%0(%1)", xop
);
6461 xop
[0] = GEN_INT (i
* 4);
6462 xop
[1] = operands
[1];
6463 output_asm_insn ("{l|lwz} %1,%0(%1)", xop
);
6468 return "{lsi|lswi} %2,%1,%N0";
6471 /* Return 1 for a parallel vrsave operation. */
6474 vrsave_operation (op
, mode
)
6476 enum machine_mode mode ATTRIBUTE_UNUSED
;
6478 int count
= XVECLEN (op
, 0);
6479 unsigned int dest_regno
, src_regno
;
6483 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6484 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6485 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC_VOLATILE
)
6488 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6489 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6491 if (dest_regno
!= VRSAVE_REGNO
6492 && src_regno
!= VRSAVE_REGNO
)
6495 for (i
= 1; i
< count
; i
++)
6497 rtx elt
= XVECEXP (op
, 0, i
);
6499 if (GET_CODE (elt
) != CLOBBER
6500 && GET_CODE (elt
) != SET
)
6507 /* Return 1 for an PARALLEL suitable for mtcrf. */
6510 mtcrf_operation (op
, mode
)
6512 enum machine_mode mode ATTRIBUTE_UNUSED
;
6514 int count
= XVECLEN (op
, 0);
6518 /* Perform a quick check so we don't blow up below. */
6520 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6521 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC
6522 || XVECLEN (SET_SRC (XVECEXP (op
, 0, 0)), 0) != 2)
6524 src_reg
= XVECEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0, 0);
6526 if (GET_CODE (src_reg
) != REG
6527 || GET_MODE (src_reg
) != SImode
6528 || ! INT_REGNO_P (REGNO (src_reg
)))
6531 for (i
= 0; i
< count
; i
++)
6533 rtx exp
= XVECEXP (op
, 0, i
);
6537 if (GET_CODE (exp
) != SET
6538 || GET_CODE (SET_DEST (exp
)) != REG
6539 || GET_MODE (SET_DEST (exp
)) != CCmode
6540 || ! CR_REGNO_P (REGNO (SET_DEST (exp
))))
6542 unspec
= SET_SRC (exp
);
6543 maskval
= 1 << (MAX_CR_REGNO
- REGNO (SET_DEST (exp
)));
6545 if (GET_CODE (unspec
) != UNSPEC
6546 || XINT (unspec
, 1) != 20
6547 || XVECLEN (unspec
, 0) != 2
6548 || XVECEXP (unspec
, 0, 0) != src_reg
6549 || GET_CODE (XVECEXP (unspec
, 0, 1)) != CONST_INT
6550 || INTVAL (XVECEXP (unspec
, 0, 1)) != maskval
)
6556 /* Return 1 for an PARALLEL suitable for lmw. */
6559 lmw_operation (op
, mode
)
6561 enum machine_mode mode ATTRIBUTE_UNUSED
;
6563 int count
= XVECLEN (op
, 0);
6564 unsigned int dest_regno
;
6566 unsigned int base_regno
;
6567 HOST_WIDE_INT offset
;
6570 /* Perform a quick check so we don't blow up below. */
6572 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6573 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6574 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
6577 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6578 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
6581 || count
!= 32 - (int) dest_regno
)
6584 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr
, 0))
6587 base_regno
= REGNO (src_addr
);
6588 if (base_regno
== 0)
6591 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, src_addr
, 0))
6593 offset
= INTVAL (XEXP (src_addr
, 1));
6594 base_regno
= REGNO (XEXP (src_addr
, 0));
6599 for (i
= 0; i
< count
; i
++)
6601 rtx elt
= XVECEXP (op
, 0, i
);
6604 HOST_WIDE_INT newoffset
;
6606 if (GET_CODE (elt
) != SET
6607 || GET_CODE (SET_DEST (elt
)) != REG
6608 || GET_MODE (SET_DEST (elt
)) != SImode
6609 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
6610 || GET_CODE (SET_SRC (elt
)) != MEM
6611 || GET_MODE (SET_SRC (elt
)) != SImode
)
6613 newaddr
= XEXP (SET_SRC (elt
), 0);
6614 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr
, 0))
6619 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, newaddr
, 0))
6621 addr_reg
= XEXP (newaddr
, 0);
6622 newoffset
= INTVAL (XEXP (newaddr
, 1));
6626 if (REGNO (addr_reg
) != base_regno
6627 || newoffset
!= offset
+ 4 * i
)
6634 /* Return 1 for an PARALLEL suitable for stmw. */
6637 stmw_operation (op
, mode
)
6639 enum machine_mode mode ATTRIBUTE_UNUSED
;
6641 int count
= XVECLEN (op
, 0);
6642 unsigned int src_regno
;
6644 unsigned int base_regno
;
6645 HOST_WIDE_INT offset
;
6648 /* Perform a quick check so we don't blow up below. */
6650 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6651 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
6652 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
6655 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6656 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
6659 || count
!= 32 - (int) src_regno
)
6662 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr
, 0))
6665 base_regno
= REGNO (dest_addr
);
6666 if (base_regno
== 0)
6669 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, dest_addr
, 0))
6671 offset
= INTVAL (XEXP (dest_addr
, 1));
6672 base_regno
= REGNO (XEXP (dest_addr
, 0));
6677 for (i
= 0; i
< count
; i
++)
6679 rtx elt
= XVECEXP (op
, 0, i
);
6682 HOST_WIDE_INT newoffset
;
6684 if (GET_CODE (elt
) != SET
6685 || GET_CODE (SET_SRC (elt
)) != REG
6686 || GET_MODE (SET_SRC (elt
)) != SImode
6687 || REGNO (SET_SRC (elt
)) != src_regno
+ i
6688 || GET_CODE (SET_DEST (elt
)) != MEM
6689 || GET_MODE (SET_DEST (elt
)) != SImode
)
6691 newaddr
= XEXP (SET_DEST (elt
), 0);
6692 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr
, 0))
6697 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, newaddr
, 0))
6699 addr_reg
= XEXP (newaddr
, 0);
6700 newoffset
= INTVAL (XEXP (newaddr
, 1));
6704 if (REGNO (addr_reg
) != base_regno
6705 || newoffset
!= offset
+ 4 * i
)
6712 /* A validation routine: say whether CODE, a condition code, and MODE
6713 match. The other alternatives either don't make sense or should
6714 never be generated. */
6717 validate_condition_mode (code
, mode
)
6719 enum machine_mode mode
;
6721 if (GET_RTX_CLASS (code
) != '<'
6722 || GET_MODE_CLASS (mode
) != MODE_CC
)
6725 /* These don't make sense. */
6726 if ((code
== GT
|| code
== LT
|| code
== GE
|| code
== LE
)
6727 && mode
== CCUNSmode
)
6730 if ((code
== GTU
|| code
== LTU
|| code
== GEU
|| code
== LEU
)
6731 && mode
!= CCUNSmode
)
6734 if (mode
!= CCFPmode
6735 && (code
== ORDERED
|| code
== UNORDERED
6736 || code
== UNEQ
|| code
== LTGT
6737 || code
== UNGT
|| code
== UNLT
6738 || code
== UNGE
|| code
== UNLE
))
6741 /* These should never be generated except for
6742 flag_unsafe_math_optimizations and flag_finite_math_only. */
6743 if (mode
== CCFPmode
6744 && ! flag_unsafe_math_optimizations
6745 && ! flag_finite_math_only
6746 && (code
== LE
|| code
== GE
6747 || code
== UNEQ
|| code
== LTGT
6748 || code
== UNGT
|| code
== UNLT
))
6751 /* These are invalid; the information is not there. */
6752 if (mode
== CCEQmode
6753 && code
!= EQ
&& code
!= NE
)
6757 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
6758 We only check the opcode against the mode of the CC value here. */
6761 branch_comparison_operator (op
, mode
)
6763 enum machine_mode mode ATTRIBUTE_UNUSED
;
6765 enum rtx_code code
= GET_CODE (op
);
6766 enum machine_mode cc_mode
;
6768 if (GET_RTX_CLASS (code
) != '<')
6771 cc_mode
= GET_MODE (XEXP (op
, 0));
6772 if (GET_MODE_CLASS (cc_mode
) != MODE_CC
)
6775 validate_condition_mode (code
, cc_mode
);
6780 /* Return 1 if OP is a comparison operation that is valid for a branch
6781 insn and which is true if the corresponding bit in the CC register
6785 branch_positive_comparison_operator (op
, mode
)
6787 enum machine_mode mode
;
6791 if (! branch_comparison_operator (op
, mode
))
6794 code
= GET_CODE (op
);
6795 return (code
== EQ
|| code
== LT
|| code
== GT
6796 || (TARGET_SPE
&& TARGET_HARD_FLOAT
&& !TARGET_FPRS
&& code
== NE
)
6797 || code
== LTU
|| code
== GTU
6798 || code
== UNORDERED
);
6801 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
6802 We check the opcode against the mode of the CC value and disallow EQ or
6803 NE comparisons for integers. */
6806 scc_comparison_operator (op
, mode
)
6808 enum machine_mode mode
;
6810 enum rtx_code code
= GET_CODE (op
);
6811 enum machine_mode cc_mode
;
6813 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
6816 if (GET_RTX_CLASS (code
) != '<')
6819 cc_mode
= GET_MODE (XEXP (op
, 0));
6820 if (GET_MODE_CLASS (cc_mode
) != MODE_CC
)
6823 validate_condition_mode (code
, cc_mode
);
6825 if (code
== NE
&& cc_mode
!= CCFPmode
)
6832 trap_comparison_operator (op
, mode
)
6834 enum machine_mode mode
;
6836 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
6838 return GET_RTX_CLASS (GET_CODE (op
)) == '<';
6842 boolean_operator (op
, mode
)
6844 enum machine_mode mode ATTRIBUTE_UNUSED
;
6846 enum rtx_code code
= GET_CODE (op
);
6847 return (code
== AND
|| code
== IOR
|| code
== XOR
);
6851 boolean_or_operator (op
, mode
)
6853 enum machine_mode mode ATTRIBUTE_UNUSED
;
6855 enum rtx_code code
= GET_CODE (op
);
6856 return (code
== IOR
|| code
== XOR
);
6860 min_max_operator (op
, mode
)
6862 enum machine_mode mode ATTRIBUTE_UNUSED
;
6864 enum rtx_code code
= GET_CODE (op
);
6865 return (code
== SMIN
|| code
== SMAX
|| code
== UMIN
|| code
== UMAX
);
6868 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
6869 mask required to convert the result of a rotate insn into a shift
6870 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
6873 includes_lshift_p (shiftop
, andop
)
6877 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
6879 shift_mask
<<= INTVAL (shiftop
);
6881 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
6884 /* Similar, but for right shift. */
6887 includes_rshift_p (shiftop
, andop
)
6891 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
6893 shift_mask
>>= INTVAL (shiftop
);
6895 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
6898 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
6899 to perform a left shift. It must have exactly SHIFTOP least
6900 signifigant 0's, then one or more 1's, then zero or more 0's. */
6903 includes_rldic_lshift_p (shiftop
, andop
)
6907 if (GET_CODE (andop
) == CONST_INT
)
6909 HOST_WIDE_INT c
, lsb
, shift_mask
;
6912 if (c
== 0 || c
== ~0)
6916 shift_mask
<<= INTVAL (shiftop
);
6918 /* Find the least signifigant one bit. */
6921 /* It must coincide with the LSB of the shift mask. */
6922 if (-lsb
!= shift_mask
)
6925 /* Invert to look for the next transition (if any). */
6928 /* Remove the low group of ones (originally low group of zeros). */
6931 /* Again find the lsb, and check we have all 1's above. */
6935 else if (GET_CODE (andop
) == CONST_DOUBLE
6936 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
6938 HOST_WIDE_INT low
, high
, lsb
;
6939 HOST_WIDE_INT shift_mask_low
, shift_mask_high
;
6941 low
= CONST_DOUBLE_LOW (andop
);
6942 if (HOST_BITS_PER_WIDE_INT
< 64)
6943 high
= CONST_DOUBLE_HIGH (andop
);
6945 if ((low
== 0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== 0))
6946 || (low
== ~0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0)))
6949 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
6951 shift_mask_high
= ~0;
6952 if (INTVAL (shiftop
) > 32)
6953 shift_mask_high
<<= INTVAL (shiftop
) - 32;
6957 if (-lsb
!= shift_mask_high
|| INTVAL (shiftop
) < 32)
6964 return high
== -lsb
;
6967 shift_mask_low
= ~0;
6968 shift_mask_low
<<= INTVAL (shiftop
);
6972 if (-lsb
!= shift_mask_low
)
6975 if (HOST_BITS_PER_WIDE_INT
< 64)
6980 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
6983 return high
== -lsb
;
6987 return low
== -lsb
&& (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0);
6993 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
6994 to perform a left shift. It must have SHIFTOP or more least
6995 signifigant 0's, with the remainder of the word 1's. */
6998 includes_rldicr_lshift_p (shiftop
, andop
)
7002 if (GET_CODE (andop
) == CONST_INT
)
7004 HOST_WIDE_INT c
, lsb
, shift_mask
;
7007 shift_mask
<<= INTVAL (shiftop
);
7010 /* Find the least signifigant one bit. */
7013 /* It must be covered by the shift mask.
7014 This test also rejects c == 0. */
7015 if ((lsb
& shift_mask
) == 0)
7018 /* Check we have all 1's above the transition, and reject all 1's. */
7019 return c
== -lsb
&& lsb
!= 1;
7021 else if (GET_CODE (andop
) == CONST_DOUBLE
7022 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
7024 HOST_WIDE_INT low
, lsb
, shift_mask_low
;
7026 low
= CONST_DOUBLE_LOW (andop
);
7028 if (HOST_BITS_PER_WIDE_INT
< 64)
7030 HOST_WIDE_INT high
, shift_mask_high
;
7032 high
= CONST_DOUBLE_HIGH (andop
);
7036 shift_mask_high
= ~0;
7037 if (INTVAL (shiftop
) > 32)
7038 shift_mask_high
<<= INTVAL (shiftop
) - 32;
7042 if ((lsb
& shift_mask_high
) == 0)
7045 return high
== -lsb
;
7051 shift_mask_low
= ~0;
7052 shift_mask_low
<<= INTVAL (shiftop
);
7056 if ((lsb
& shift_mask_low
) == 0)
7059 return low
== -lsb
&& lsb
!= 1;
7065 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7066 for lfq and stfq insns.
7068 Note reg1 and reg2 *must* be hard registers. To be sure we will
7069 abort if we are passed pseudo registers. */
7072 registers_ok_for_quad_peep (reg1
, reg2
)
7075 /* We might have been passed a SUBREG. */
7076 if (GET_CODE (reg1
) != REG
|| GET_CODE (reg2
) != REG
)
7079 return (REGNO (reg1
) == REGNO (reg2
) - 1);
7082 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7083 addr1 and addr2 must be in consecutive memory locations
7084 (addr2 == addr1 + 8). */
7087 addrs_ok_for_quad_peep (addr1
, addr2
)
7094 /* Extract an offset (if used) from the first addr. */
7095 if (GET_CODE (addr1
) == PLUS
)
7097 /* If not a REG, return zero. */
7098 if (GET_CODE (XEXP (addr1
, 0)) != REG
)
7102 reg1
= REGNO (XEXP (addr1
, 0));
7103 /* The offset must be constant! */
7104 if (GET_CODE (XEXP (addr1
, 1)) != CONST_INT
)
7106 offset1
= INTVAL (XEXP (addr1
, 1));
7109 else if (GET_CODE (addr1
) != REG
)
7113 reg1
= REGNO (addr1
);
7114 /* This was a simple (mem (reg)) expression. Offset is 0. */
7118 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
7119 if (GET_CODE (addr2
) != PLUS
)
7122 if (GET_CODE (XEXP (addr2
, 0)) != REG
7123 || GET_CODE (XEXP (addr2
, 1)) != CONST_INT
)
7126 if (reg1
!= REGNO (XEXP (addr2
, 0)))
7129 /* The offset for the second addr must be 8 more than the first addr. */
7130 if (INTVAL (XEXP (addr2
, 1)) != offset1
+ 8)
7133 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7138 /* Return the register class of a scratch register needed to copy IN into
7139 or out of a register in CLASS in MODE. If it can be done directly,
7140 NO_REGS is returned. */
7143 secondary_reload_class (class, mode
, in
)
7144 enum reg_class
class;
7145 enum machine_mode mode ATTRIBUTE_UNUSED
;
7150 if (TARGET_ELF
|| (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))
7152 /* We cannot copy a symbolic operand directly into anything
7153 other than BASE_REGS for TARGET_ELF. So indicate that a
7154 register from BASE_REGS is needed as an intermediate
7157 On Darwin, pic addresses require a load from memory, which
7158 needs a base register. */
7159 if (class != BASE_REGS
7160 && (GET_CODE (in
) == SYMBOL_REF
7161 || GET_CODE (in
) == HIGH
7162 || GET_CODE (in
) == LABEL_REF
7163 || GET_CODE (in
) == CONST
))
7167 if (GET_CODE (in
) == REG
)
7170 if (regno
>= FIRST_PSEUDO_REGISTER
)
7172 regno
= true_regnum (in
);
7173 if (regno
>= FIRST_PSEUDO_REGISTER
)
7177 else if (GET_CODE (in
) == SUBREG
)
7179 regno
= true_regnum (in
);
7180 if (regno
>= FIRST_PSEUDO_REGISTER
)
7186 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7188 if (class == GENERAL_REGS
|| class == BASE_REGS
7189 || (regno
>= 0 && INT_REGNO_P (regno
)))
7192 /* Constants, memory, and FP registers can go into FP registers. */
7193 if ((regno
== -1 || FP_REGNO_P (regno
))
7194 && (class == FLOAT_REGS
|| class == NON_SPECIAL_REGS
))
7197 /* Memory, and AltiVec registers can go into AltiVec registers. */
7198 if ((regno
== -1 || ALTIVEC_REGNO_P (regno
))
7199 && class == ALTIVEC_REGS
)
7202 /* We can copy among the CR registers. */
7203 if ((class == CR_REGS
|| class == CR0_REGS
)
7204 && regno
>= 0 && CR_REGNO_P (regno
))
7207 /* Otherwise, we need GENERAL_REGS. */
7208 return GENERAL_REGS
;
7211 /* Given a comparison operation, return the bit number in CCR to test. We
7212 know this is a valid comparison.
7214 SCC_P is 1 if this is for an scc. That means that %D will have been
7215 used instead of %C, so the bits will be in different places.
7217 Return -1 if OP isn't a valid comparison for some reason. */
7224 enum rtx_code code
= GET_CODE (op
);
7225 enum machine_mode cc_mode
;
7230 if (GET_RTX_CLASS (code
) != '<')
7235 if (GET_CODE (reg
) != REG
7236 || ! CR_REGNO_P (REGNO (reg
)))
7239 cc_mode
= GET_MODE (reg
);
7240 cc_regnum
= REGNO (reg
);
7241 base_bit
= 4 * (cc_regnum
- CR0_REGNO
);
7243 validate_condition_mode (code
, cc_mode
);
7248 if (TARGET_SPE
&& TARGET_HARD_FLOAT
&& cc_mode
== CCFPmode
)
7249 return base_bit
+ 1;
7250 return scc_p
? base_bit
+ 3 : base_bit
+ 2;
7252 if (TARGET_SPE
&& TARGET_HARD_FLOAT
&& cc_mode
== CCFPmode
)
7253 return base_bit
+ 1;
7254 return base_bit
+ 2;
7255 case GT
: case GTU
: case UNLE
:
7256 return base_bit
+ 1;
7257 case LT
: case LTU
: case UNGE
:
7259 case ORDERED
: case UNORDERED
:
7260 return base_bit
+ 3;
7263 /* If scc, we will have done a cror to put the bit in the
7264 unordered position. So test that bit. For integer, this is ! LT
7265 unless this is an scc insn. */
7266 return scc_p
? base_bit
+ 3 : base_bit
;
7269 return scc_p
? base_bit
+ 3 : base_bit
+ 1;
7276 /* Return the GOT register. */
7279 rs6000_got_register (value
)
7280 rtx value ATTRIBUTE_UNUSED
;
7282 /* The second flow pass currently (June 1999) can't update
7283 regs_ever_live without disturbing other parts of the compiler, so
7284 update it here to make the prolog/epilogue code happy. */
7285 if (no_new_pseudos
&& ! regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
])
7286 regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
7288 current_function_uses_pic_offset_table
= 1;
7290 return pic_offset_table_rtx
;
7293 /* Function to init struct machine_function.
7294 This will be called, via a pointer variable,
7295 from push_function_context. */
7297 static struct machine_function
*
7298 rs6000_init_machine_status ()
7300 return ggc_alloc_cleared (sizeof (machine_function
));
7303 /* These macros test for integers and extract the low-order bits. */
7305 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7306 && GET_MODE (X) == VOIDmode)
7308 #define INT_LOWPART(X) \
7309 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7316 unsigned long val
= INT_LOWPART (op
);
7318 /* If the high bit is zero, the value is the first 1 bit we find
7320 if ((val
& 0x80000000) == 0)
7322 if ((val
& 0xffffffff) == 0)
7326 while (((val
<<= 1) & 0x80000000) == 0)
7331 /* If the high bit is set and the low bit is not, or the mask is all
7332 1's, the value is zero. */
7333 if ((val
& 1) == 0 || (val
& 0xffffffff) == 0xffffffff)
7336 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7339 while (((val
>>= 1) & 1) != 0)
7350 unsigned long val
= INT_LOWPART (op
);
7352 /* If the low bit is zero, the value is the first 1 bit we find from
7356 if ((val
& 0xffffffff) == 0)
7360 while (((val
>>= 1) & 1) == 0)
7366 /* If the low bit is set and the high bit is not, or the mask is all
7367 1's, the value is 31. */
7368 if ((val
& 0x80000000) == 0 || (val
& 0xffffffff) == 0xffffffff)
7371 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7374 while (((val
<<= 1) & 0x80000000) != 0)
7380 /* Print an operand. Recognize special options, documented below. */
7383 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7384 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7386 #define SMALL_DATA_RELOC "sda21"
7387 #define SMALL_DATA_REG 0
7391 print_operand (file
, x
, code
)
7398 unsigned HOST_WIDE_INT uval
;
7403 /* Write out an instruction after the call which may be replaced
7404 with glue code by the loader. This depends on the AIX version. */
7405 asm_fprintf (file
, RS6000_CALL_GLUE
);
7408 /* %a is output_address. */
7411 /* If X is a constant integer whose low-order 5 bits are zero,
7412 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
7413 in the AIX assembler where "sri" with a zero shift count
7414 writes a trash instruction. */
7415 if (GET_CODE (x
) == CONST_INT
&& (INTVAL (x
) & 31) == 0)
7422 /* If constant, low-order 16 bits of constant, unsigned.
7423 Otherwise, write normally. */
7425 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 0xffff);
7427 print_operand (file
, x
, 0);
7431 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7432 for 64-bit mask direction. */
7433 putc (((INT_LOWPART(x
) & 1) == 0 ? 'r' : 'l'), file
);
7436 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7440 /* There used to be a comment for 'C' reading "This is an
7441 optional cror needed for certain floating-point
7442 comparisons. Otherwise write nothing." */
7444 /* Similar, except that this is for an scc, so we must be able to
7445 encode the test in a single bit that is one. We do the above
7446 for any LE, GE, GEU, or LEU and invert the bit for NE. */
7447 if (GET_CODE (x
) == LE
|| GET_CODE (x
) == GE
7448 || GET_CODE (x
) == LEU
|| GET_CODE (x
) == GEU
)
7450 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7452 fprintf (file
, "cror %d,%d,%d\n\t", base_bit
+ 3,
7454 base_bit
+ (GET_CODE (x
) == GE
|| GET_CODE (x
) == GEU
));
7457 else if (GET_CODE (x
) == NE
)
7459 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7461 fprintf (file
, "crnor %d,%d,%d\n\t", base_bit
+ 3,
7462 base_bit
+ 2, base_bit
+ 2);
7464 else if (TARGET_SPE
&& TARGET_HARD_FLOAT
7465 && GET_CODE (x
) == EQ
7466 && GET_MODE (XEXP (x
, 0)) == CCFPmode
)
7468 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7470 fprintf (file
, "crnor %d,%d,%d\n\t", base_bit
+ 1,
7471 base_bit
+ 1, base_bit
+ 1);
7476 /* X is a CR register. Print the number of the EQ bit of the CR */
7477 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7478 output_operand_lossage ("invalid %%E value");
7480 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
) + 2);
7484 /* X is a CR register. Print the shift count needed to move it
7485 to the high-order four bits. */
7486 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7487 output_operand_lossage ("invalid %%f value");
7489 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
));
7493 /* Similar, but print the count for the rotate in the opposite
7495 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7496 output_operand_lossage ("invalid %%F value");
7498 fprintf (file
, "%d", 32 - 4 * (REGNO (x
) - CR0_REGNO
));
7502 /* X is a constant integer. If it is negative, print "m",
7503 otherwise print "z". This is to make an aze or ame insn. */
7504 if (GET_CODE (x
) != CONST_INT
)
7505 output_operand_lossage ("invalid %%G value");
7506 else if (INTVAL (x
) >= 0)
7513 /* If constant, output low-order five bits. Otherwise, write
7516 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 31);
7518 print_operand (file
, x
, 0);
7522 /* If constant, output low-order six bits. Otherwise, write
7525 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 63);
7527 print_operand (file
, x
, 0);
7531 /* Print `i' if this is a constant, else nothing. */
7537 /* Write the bit number in CCR for jump. */
7540 output_operand_lossage ("invalid %%j code");
7542 fprintf (file
, "%d", i
);
7546 /* Similar, but add one for shift count in rlinm for scc and pass
7547 scc flag to `ccr_bit'. */
7550 output_operand_lossage ("invalid %%J code");
7552 /* If we want bit 31, write a shift count of zero, not 32. */
7553 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
7557 /* X must be a constant. Write the 1's complement of the
7560 output_operand_lossage ("invalid %%k value");
7562 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ~ INT_LOWPART (x
));
7566 /* X must be a symbolic constant on ELF. Write an
7567 expression suitable for an 'addi' that adds in the low 16
7569 if (GET_CODE (x
) != CONST
)
7571 print_operand_address (file
, x
);
7576 if (GET_CODE (XEXP (x
, 0)) != PLUS
7577 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) != SYMBOL_REF
7578 && GET_CODE (XEXP (XEXP (x
, 0), 0)) != LABEL_REF
)
7579 || GET_CODE (XEXP (XEXP (x
, 0), 1)) != CONST_INT
)
7580 output_operand_lossage ("invalid %%K value");
7581 print_operand_address (file
, XEXP (XEXP (x
, 0), 0));
7583 /* For GNU as, there must be a non-alphanumeric character
7584 between 'l' and the number. The '-' is added by
7585 print_operand() already. */
7586 if (INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0)
7588 print_operand (file
, XEXP (XEXP (x
, 0), 1), 0);
7592 /* %l is output_asm_label. */
7595 /* Write second word of DImode or DFmode reference. Works on register
7596 or non-indexed memory only. */
7597 if (GET_CODE (x
) == REG
)
7598 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
7599 else if (GET_CODE (x
) == MEM
)
7601 /* Handle possible auto-increment. Since it is pre-increment and
7602 we have already done it, we can just use an offset of word. */
7603 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
7604 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
7605 output_address (plus_constant (XEXP (XEXP (x
, 0), 0),
7608 output_address (XEXP (adjust_address_nv (x
, SImode
,
7612 if (small_data_operand (x
, GET_MODE (x
)))
7613 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
7614 reg_names
[SMALL_DATA_REG
]);
7619 /* MB value for a mask operand. */
7620 if (! mask_operand (x
, SImode
))
7621 output_operand_lossage ("invalid %%m value");
7623 fprintf (file
, "%d", extract_MB (x
));
7627 /* ME value for a mask operand. */
7628 if (! mask_operand (x
, SImode
))
7629 output_operand_lossage ("invalid %%M value");
7631 fprintf (file
, "%d", extract_ME (x
));
7634 /* %n outputs the negative of its operand. */
7637 /* Write the number of elements in the vector times 4. */
7638 if (GET_CODE (x
) != PARALLEL
)
7639 output_operand_lossage ("invalid %%N value");
7641 fprintf (file
, "%d", XVECLEN (x
, 0) * 4);
7645 /* Similar, but subtract 1 first. */
7646 if (GET_CODE (x
) != PARALLEL
)
7647 output_operand_lossage ("invalid %%O value");
7649 fprintf (file
, "%d", (XVECLEN (x
, 0) - 1) * 4);
7653 /* X is a CONST_INT that is a power of two. Output the logarithm. */
7655 || INT_LOWPART (x
) < 0
7656 || (i
= exact_log2 (INT_LOWPART (x
))) < 0)
7657 output_operand_lossage ("invalid %%p value");
7659 fprintf (file
, "%d", i
);
7663 /* The operand must be an indirect memory reference. The result
7664 is the register number. */
7665 if (GET_CODE (x
) != MEM
|| GET_CODE (XEXP (x
, 0)) != REG
7666 || REGNO (XEXP (x
, 0)) >= 32)
7667 output_operand_lossage ("invalid %%P value");
7669 fprintf (file
, "%d", REGNO (XEXP (x
, 0)));
7673 /* This outputs the logical code corresponding to a boolean
7674 expression. The expression may have one or both operands
7675 negated (if one, only the first one). For condition register
7676 logical operations, it will also treat the negated
7677 CR codes as NOTs, but not handle NOTs of them. */
7679 const char *const *t
= 0;
7681 enum rtx_code code
= GET_CODE (x
);
7682 static const char * const tbl
[3][3] = {
7683 { "and", "andc", "nor" },
7684 { "or", "orc", "nand" },
7685 { "xor", "eqv", "xor" } };
7689 else if (code
== IOR
)
7691 else if (code
== XOR
)
7694 output_operand_lossage ("invalid %%q value");
7696 if (GET_CODE (XEXP (x
, 0)) != NOT
)
7700 if (GET_CODE (XEXP (x
, 1)) == NOT
)
7711 /* X is a CR register. Print the mask for `mtcrf'. */
7712 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7713 output_operand_lossage ("invalid %%R value");
7715 fprintf (file
, "%d", 128 >> (REGNO (x
) - CR0_REGNO
));
7719 /* Low 5 bits of 32 - value */
7721 output_operand_lossage ("invalid %%s value");
7723 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, (32 - INT_LOWPART (x
)) & 31);
7727 /* PowerPC64 mask position. All 0's is excluded.
7728 CONST_INT 32-bit mask is considered sign-extended so any
7729 transition must occur within the CONST_INT, not on the boundary. */
7730 if (! mask64_operand (x
, DImode
))
7731 output_operand_lossage ("invalid %%S value");
7733 uval
= INT_LOWPART (x
);
7735 if (uval
& 1) /* Clear Left */
7737 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 63 << 1) - 1;
7740 else /* Clear Right */
7743 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 63 << 1) - 1;
7750 fprintf (file
, "%d", i
);
7754 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
7755 if (GET_CODE (x
) != REG
|| GET_MODE (x
) != CCmode
)
7758 /* Bit 3 is OV bit. */
7759 i
= 4 * (REGNO (x
) - CR0_REGNO
) + 3;
7761 /* If we want bit 31, write a shift count of zero, not 32. */
7762 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
7766 /* Print the symbolic name of a branch target register. */
7767 if (GET_CODE (x
) != REG
|| (REGNO (x
) != LINK_REGISTER_REGNUM
7768 && REGNO (x
) != COUNT_REGISTER_REGNUM
))
7769 output_operand_lossage ("invalid %%T value");
7770 else if (REGNO (x
) == LINK_REGISTER_REGNUM
)
7771 fputs (TARGET_NEW_MNEMONICS
? "lr" : "r", file
);
7773 fputs ("ctr", file
);
7777 /* High-order 16 bits of constant for use in unsigned operand. */
7779 output_operand_lossage ("invalid %%u value");
7781 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
7782 (INT_LOWPART (x
) >> 16) & 0xffff);
7786 /* High-order 16 bits of constant for use in signed operand. */
7788 output_operand_lossage ("invalid %%v value");
7790 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
7791 (INT_LOWPART (x
) >> 16) & 0xffff);
7795 /* Print `u' if this has an auto-increment or auto-decrement. */
7796 if (GET_CODE (x
) == MEM
7797 && (GET_CODE (XEXP (x
, 0)) == PRE_INC
7798 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
))
7803 /* Print the trap code for this operand. */
7804 switch (GET_CODE (x
))
7807 fputs ("eq", file
); /* 4 */
7810 fputs ("ne", file
); /* 24 */
7813 fputs ("lt", file
); /* 16 */
7816 fputs ("le", file
); /* 20 */
7819 fputs ("gt", file
); /* 8 */
7822 fputs ("ge", file
); /* 12 */
7825 fputs ("llt", file
); /* 2 */
7828 fputs ("lle", file
); /* 6 */
7831 fputs ("lgt", file
); /* 1 */
7834 fputs ("lge", file
); /* 5 */
7842 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
7845 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
7846 ((INT_LOWPART (x
) & 0xffff) ^ 0x8000) - 0x8000);
7848 print_operand (file
, x
, 0);
7852 /* MB value for a PowerPC64 rldic operand. */
7853 val
= (GET_CODE (x
) == CONST_INT
7854 ? INTVAL (x
) : CONST_DOUBLE_HIGH (x
));
7859 for (i
= 0; i
< HOST_BITS_PER_WIDE_INT
; i
++)
7860 if ((val
<<= 1) < 0)
7863 #if HOST_BITS_PER_WIDE_INT == 32
7864 if (GET_CODE (x
) == CONST_INT
&& i
>= 0)
7865 i
+= 32; /* zero-extend high-part was all 0's */
7866 else if (GET_CODE (x
) == CONST_DOUBLE
&& i
== 32)
7868 val
= CONST_DOUBLE_LOW (x
);
7875 for ( ; i
< 64; i
++)
7876 if ((val
<<= 1) < 0)
7881 fprintf (file
, "%d", i
+ 1);
7885 if (GET_CODE (x
) == MEM
7886 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x
, 0), 0))
7891 /* Like 'L', for third word of TImode */
7892 if (GET_CODE (x
) == REG
)
7893 fprintf (file
, "%s", reg_names
[REGNO (x
) + 2]);
7894 else if (GET_CODE (x
) == MEM
)
7896 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
7897 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
7898 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 8));
7900 output_address (XEXP (adjust_address_nv (x
, SImode
, 8), 0));
7901 if (small_data_operand (x
, GET_MODE (x
)))
7902 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
7903 reg_names
[SMALL_DATA_REG
]);
7908 /* X is a SYMBOL_REF. Write out the name preceded by a
7909 period and without any trailing data in brackets. Used for function
7910 names. If we are configured for System V (or the embedded ABI) on
7911 the PowerPC, do not emit the period, since those systems do not use
7912 TOCs and the like. */
7913 if (GET_CODE (x
) != SYMBOL_REF
)
7916 if (XSTR (x
, 0)[0] != '.')
7918 switch (DEFAULT_ABI
)
7928 case ABI_AIX_NODESC
:
7934 RS6000_OUTPUT_BASENAME (file
, XSTR (x
, 0));
7936 assemble_name (file
, XSTR (x
, 0));
7941 /* Like 'L', for last word of TImode. */
7942 if (GET_CODE (x
) == REG
)
7943 fprintf (file
, "%s", reg_names
[REGNO (x
) + 3]);
7944 else if (GET_CODE (x
) == MEM
)
7946 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
7947 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
7948 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 12));
7950 output_address (XEXP (adjust_address_nv (x
, SImode
, 12), 0));
7951 if (small_data_operand (x
, GET_MODE (x
)))
7952 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
7953 reg_names
[SMALL_DATA_REG
]);
7957 /* Print AltiVec or SPE memory operand. */
7962 if (GET_CODE (x
) != MEM
)
7970 if (GET_CODE (tmp
) == REG
)
7972 fprintf (file
, "0(%s)", reg_names
[REGNO (tmp
)]);
7975 /* Handle [reg+UIMM]. */
7976 else if (GET_CODE (tmp
) == PLUS
&&
7977 GET_CODE (XEXP (tmp
, 1)) == CONST_INT
)
7981 if (GET_CODE (XEXP (tmp
, 0)) != REG
)
7984 x
= INTVAL (XEXP (tmp
, 1));
7985 fprintf (file
, "%d(%s)", x
, reg_names
[REGNO (XEXP (tmp
, 0))]);
7989 /* Fall through. Must be [reg+reg]. */
7991 if (GET_CODE (tmp
) == REG
)
7992 fprintf (file
, "0,%s", reg_names
[REGNO (tmp
)]);
7993 else if (GET_CODE (tmp
) == PLUS
&& GET_CODE (XEXP (tmp
, 1)) == REG
)
7995 if (REGNO (XEXP (tmp
, 0)) == 0)
7996 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 1)) ],
7997 reg_names
[ REGNO (XEXP (tmp
, 0)) ]);
7999 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 0)) ],
8000 reg_names
[ REGNO (XEXP (tmp
, 1)) ]);
8008 if (GET_CODE (x
) == REG
)
8009 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
8010 else if (GET_CODE (x
) == MEM
)
8012 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8013 know the width from the mode. */
8014 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
)
8015 fprintf (file
, "%d(%s)", GET_MODE_SIZE (GET_MODE (x
)),
8016 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
8017 else if (GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
8018 fprintf (file
, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x
)),
8019 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
8021 output_address (XEXP (x
, 0));
8024 output_addr_const (file
, x
);
8028 output_operand_lossage ("invalid %%xn code");
8032 /* Print the address of an operand. */
8035 print_operand_address (file
, x
)
8039 if (GET_CODE (x
) == REG
)
8040 fprintf (file
, "0(%s)", reg_names
[ REGNO (x
) ]);
8041 else if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
8042 || GET_CODE (x
) == LABEL_REF
)
8044 output_addr_const (file
, x
);
8045 if (small_data_operand (x
, GET_MODE (x
)))
8046 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
8047 reg_names
[SMALL_DATA_REG
]);
8048 else if (TARGET_TOC
)
8051 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == REG
)
8053 if (REGNO (XEXP (x
, 0)) == 0)
8054 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 1)) ],
8055 reg_names
[ REGNO (XEXP (x
, 0)) ]);
8057 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 0)) ],
8058 reg_names
[ REGNO (XEXP (x
, 1)) ]);
8060 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
8062 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (XEXP (x
, 1)));
8063 fprintf (file
, "(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8066 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
8067 && CONSTANT_P (XEXP (x
, 1)))
8069 output_addr_const (file
, XEXP (x
, 1));
8070 fprintf (file
, "@l(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8074 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
8075 && CONSTANT_P (XEXP (x
, 1)))
8077 fprintf (file
, "lo16(");
8078 output_addr_const (file
, XEXP (x
, 1));
8079 fprintf (file
, ")(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8082 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x
))
8084 if (TARGET_AIX
&& (!TARGET_ELF
|| !TARGET_MINIMAL_TOC
))
8086 rtx contains_minus
= XEXP (x
, 1);
8090 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8091 turn it into (sym) for output_addr_const. */
8092 while (GET_CODE (XEXP (contains_minus
, 0)) != MINUS
)
8093 contains_minus
= XEXP (contains_minus
, 0);
8095 minus
= XEXP (contains_minus
, 0);
8096 symref
= XEXP (minus
, 0);
8097 XEXP (contains_minus
, 0) = symref
;
8102 name
= XSTR (symref
, 0);
8103 newname
= alloca (strlen (name
) + sizeof ("@toc"));
8104 strcpy (newname
, name
);
8105 strcat (newname
, "@toc");
8106 XSTR (symref
, 0) = newname
;
8108 output_addr_const (file
, XEXP (x
, 1));
8110 XSTR (symref
, 0) = name
;
8111 XEXP (contains_minus
, 0) = minus
;
8114 output_addr_const (file
, XEXP (x
, 1));
8116 fprintf (file
, "(%s)", reg_names
[REGNO (XEXP (x
, 0))]);
8122 /* Target hook for assembling integer objects. The PowerPC version has
8123 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8124 is defined. It also needs to handle DI-mode objects on 64-bit
8128 rs6000_assemble_integer (x
, size
, aligned_p
)
8133 #ifdef RELOCATABLE_NEEDS_FIXUP
8134 /* Special handling for SI values. */
8135 if (size
== 4 && aligned_p
)
8137 extern int in_toc_section
PARAMS ((void));
8138 static int recurse
= 0;
8140 /* For -mrelocatable, we mark all addresses that need to be fixed up
8141 in the .fixup section. */
8142 if (TARGET_RELOCATABLE
8143 && !in_toc_section ()
8144 && !in_text_section ()
8146 && GET_CODE (x
) != CONST_INT
8147 && GET_CODE (x
) != CONST_DOUBLE
8153 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCP", fixuplabelno
);
8155 ASM_OUTPUT_LABEL (asm_out_file
, buf
);
8156 fprintf (asm_out_file
, "\t.long\t(");
8157 output_addr_const (asm_out_file
, x
);
8158 fprintf (asm_out_file
, ")@fixup\n");
8159 fprintf (asm_out_file
, "\t.section\t\".fixup\",\"aw\"\n");
8160 ASM_OUTPUT_ALIGN (asm_out_file
, 2);
8161 fprintf (asm_out_file
, "\t.long\t");
8162 assemble_name (asm_out_file
, buf
);
8163 fprintf (asm_out_file
, "\n\t.previous\n");
8167 /* Remove initial .'s to turn a -mcall-aixdesc function
8168 address into the address of the descriptor, not the function
8170 else if (GET_CODE (x
) == SYMBOL_REF
8171 && XSTR (x
, 0)[0] == '.'
8172 && DEFAULT_ABI
== ABI_AIX
)
8174 const char *name
= XSTR (x
, 0);
8175 while (*name
== '.')
8178 fprintf (asm_out_file
, "\t.long\t%s\n", name
);
8182 #endif /* RELOCATABLE_NEEDS_FIXUP */
8183 return default_assemble_integer (x
, size
, aligned_p
);
8186 #ifdef HAVE_GAS_HIDDEN
8187 /* Emit an assembler directive to set symbol visibility for DECL to
8191 rs6000_assemble_visibility (decl
, vis
)
8195 /* Functions need to have their entry point symbol visibility set as
8196 well as their descriptor symbol visibility. */
8197 if (DEFAULT_ABI
== ABI_AIX
&& TREE_CODE (decl
) == FUNCTION_DECL
)
8199 static const char * const visibility_types
[] = {
8200 NULL
, "internal", "hidden", "protected"
8203 const char *name
, *type
;
8205 name
= ((* targetm
.strip_name_encoding
)
8206 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
))));
8207 type
= visibility_types
[vis
];
8209 fprintf (asm_out_file
, "\t.%s\t%s\n", type
, name
);
8210 fprintf (asm_out_file
, "\t.%s\t.%s\n", type
, name
);
8213 default_assemble_visibility (decl
, vis
);
8218 rs6000_reverse_condition (mode
, code
)
8219 enum machine_mode mode
;
8222 /* Reversal of FP compares takes care -- an ordered compare
8223 becomes an unordered compare and vice versa. */
8224 if (mode
== CCFPmode
&& !flag_unsafe_math_optimizations
)
8225 return reverse_condition_maybe_unordered (code
);
8227 return reverse_condition (code
);
8230 /* Generate a compare for CODE. Return a brand-new rtx that
8231 represents the result of the compare. */
8234 rs6000_generate_compare (code
)
8237 enum machine_mode comp_mode
;
8240 if (rs6000_compare_fp_p
)
8241 comp_mode
= CCFPmode
;
8242 else if (code
== GTU
|| code
== LTU
8243 || code
== GEU
|| code
== LEU
)
8244 comp_mode
= CCUNSmode
;
8248 /* First, the compare. */
8249 compare_result
= gen_reg_rtx (comp_mode
);
8251 /* SPE FP compare instructions on the GPRs. Yuck! */
8252 if ((TARGET_SPE
&& TARGET_HARD_FLOAT
) && rs6000_compare_fp_p
)
8254 rtx cmp
, or1
, or2
, or_result
, compare_result2
;
8262 cmp
= flag_unsafe_math_optimizations
8263 ? gen_tstsfeq_gpr (compare_result
, rs6000_compare_op0
,
8265 : gen_cmpsfeq_gpr (compare_result
, rs6000_compare_op0
,
8266 rs6000_compare_op1
);
8274 cmp
= flag_unsafe_math_optimizations
8275 ? gen_tstsfgt_gpr (compare_result
, rs6000_compare_op0
,
8277 : gen_cmpsfgt_gpr (compare_result
, rs6000_compare_op0
,
8278 rs6000_compare_op1
);
8286 cmp
= flag_unsafe_math_optimizations
8287 ? gen_tstsflt_gpr (compare_result
, rs6000_compare_op0
,
8289 : gen_cmpsflt_gpr (compare_result
, rs6000_compare_op0
,
8290 rs6000_compare_op1
);
8296 /* Synthesize LE and GE from LT/GT || EQ. */
8297 if (code
== LE
|| code
== GE
|| code
== LEU
|| code
== GEU
)
8299 /* Synthesize GE/LE frome GT/LT || EQ. */
8305 case LE
: code
= LT
; break;
8306 case GE
: code
= GT
; break;
8307 case LEU
: code
= LT
; break;
8308 case GEU
: code
= GT
; break;
8312 or1
= gen_reg_rtx (SImode
);
8313 or2
= gen_reg_rtx (SImode
);
8314 or_result
= gen_reg_rtx (CCEQmode
);
8315 compare_result2
= gen_reg_rtx (CCFPmode
);
8318 cmp
= flag_unsafe_math_optimizations
8319 ? gen_tstsfeq_gpr (compare_result2
, rs6000_compare_op0
,
8321 : gen_cmpsfeq_gpr (compare_result2
, rs6000_compare_op0
,
8322 rs6000_compare_op1
);
8325 /* The MC8540 FP compare instructions set the CR bits
8326 differently than other PPC compare instructions. For
8327 that matter, there is no generic test instruction, but a
8328 testgt, testlt, and testeq. For a true condition, bit 2
8329 is set (x1xx) in the CR. Following the traditional CR
8335 ... bit 2 would be a GT CR alias, so later on we
8336 look in the GT bits for the branch instructins.
8337 However, we must be careful to emit correct RTL in
8338 the meantime, so optimizations don't get confused. */
8340 or1
= gen_rtx (NE
, SImode
, compare_result
, const0_rtx
);
8341 or2
= gen_rtx (NE
, SImode
, compare_result2
, const0_rtx
);
8343 /* OR them together. */
8344 cmp
= gen_rtx_SET (VOIDmode
, or_result
,
8345 gen_rtx_COMPARE (CCEQmode
,
8346 gen_rtx_IOR (SImode
, or1
, or2
),
8348 compare_result
= or_result
;
8353 /* We only care about 1 bit (x1xx), so map everything to NE to
8354 maintain rtl sanity. We'll get to the right bit (x1xx) at
8355 code output time. */
8356 if (code
== NE
|| code
== LTGT
)
8357 /* Do the inverse here because we have no cmpne
8358 instruction. We use the cmpeq instruction and expect
8359 to get a 0 instead. */
8368 emit_insn (gen_rtx_SET (VOIDmode
, compare_result
,
8369 gen_rtx_COMPARE (comp_mode
,
8371 rs6000_compare_op1
)));
8373 /* Some kinds of FP comparisons need an OR operation;
8374 except for flag_unsafe_math_optimizations we don't bother. */
8375 if (rs6000_compare_fp_p
8376 && ! flag_unsafe_math_optimizations
8377 && ! (TARGET_HARD_FLOAT
&& TARGET_SPE
)
8378 && (code
== LE
|| code
== GE
8379 || code
== UNEQ
|| code
== LTGT
8380 || code
== UNGT
|| code
== UNLT
))
8382 enum rtx_code or1
, or2
;
8383 rtx or1_rtx
, or2_rtx
, compare2_rtx
;
8384 rtx or_result
= gen_reg_rtx (CCEQmode
);
8388 case LE
: or1
= LT
; or2
= EQ
; break;
8389 case GE
: or1
= GT
; or2
= EQ
; break;
8390 case UNEQ
: or1
= UNORDERED
; or2
= EQ
; break;
8391 case LTGT
: or1
= LT
; or2
= GT
; break;
8392 case UNGT
: or1
= UNORDERED
; or2
= GT
; break;
8393 case UNLT
: or1
= UNORDERED
; or2
= LT
; break;
8396 validate_condition_mode (or1
, comp_mode
);
8397 validate_condition_mode (or2
, comp_mode
);
8398 or1_rtx
= gen_rtx (or1
, SImode
, compare_result
, const0_rtx
);
8399 or2_rtx
= gen_rtx (or2
, SImode
, compare_result
, const0_rtx
);
8400 compare2_rtx
= gen_rtx_COMPARE (CCEQmode
,
8401 gen_rtx_IOR (SImode
, or1_rtx
, or2_rtx
),
8403 emit_insn (gen_rtx_SET (VOIDmode
, or_result
, compare2_rtx
));
8405 compare_result
= or_result
;
8409 validate_condition_mode (code
, GET_MODE (compare_result
));
8411 return gen_rtx (code
, VOIDmode
, compare_result
, const0_rtx
);
8415 /* Emit the RTL for an sCOND pattern. */
8418 rs6000_emit_sCOND (code
, result
)
8423 enum machine_mode op_mode
;
8425 condition_rtx
= rs6000_generate_compare (code
);
8427 op_mode
= GET_MODE (rs6000_compare_op0
);
8428 if (op_mode
== VOIDmode
)
8429 op_mode
= GET_MODE (rs6000_compare_op1
);
8431 if (TARGET_POWERPC64
&& (op_mode
== DImode
|| rs6000_compare_fp_p
))
8433 PUT_MODE (condition_rtx
, DImode
);
8434 convert_move (result
, condition_rtx
, 0);
8438 PUT_MODE (condition_rtx
, SImode
);
8439 emit_insn (gen_rtx_SET (VOIDmode
, result
, condition_rtx
));
8443 /* Emit a branch of kind CODE to location LOC. */
8446 rs6000_emit_cbranch (code
, loc
)
8450 rtx condition_rtx
, loc_ref
;
8452 condition_rtx
= rs6000_generate_compare (code
);
8453 loc_ref
= gen_rtx_LABEL_REF (VOIDmode
, loc
);
8454 emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
,
8455 gen_rtx_IF_THEN_ELSE (VOIDmode
, condition_rtx
,
8459 /* Return the string to output a conditional branch to LABEL, which is
8460 the operand number of the label, or -1 if the branch is really a
8463 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
8464 condition code register and its mode specifies what kind of
8467 REVERSED is nonzero if we should reverse the sense of the comparison.
8469 INSN is the insn. */
8472 output_cbranch (op
, label
, reversed
, insn
)
8478 static char string
[64];
8479 enum rtx_code code
= GET_CODE (op
);
8480 rtx cc_reg
= XEXP (op
, 0);
8481 enum machine_mode mode
= GET_MODE (cc_reg
);
8482 int cc_regno
= REGNO (cc_reg
) - CR0_REGNO
;
8483 int need_longbranch
= label
!= NULL
&& get_attr_length (insn
) == 8;
8484 int really_reversed
= reversed
^ need_longbranch
;
8490 validate_condition_mode (code
, mode
);
8492 /* Work out which way this really branches. We could use
8493 reverse_condition_maybe_unordered here always but this
8494 makes the resulting assembler clearer. */
8495 if (really_reversed
)
8497 /* Reversal of FP compares takes care -- an ordered compare
8498 becomes an unordered compare and vice versa. */
8499 if (mode
== CCFPmode
)
8500 code
= reverse_condition_maybe_unordered (code
);
8502 code
= reverse_condition (code
);
8505 if ((TARGET_SPE
&& TARGET_HARD_FLOAT
) && mode
== CCFPmode
)
8507 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8510 /* Opposite of GT. */
8512 else if (code
== NE
)
8520 /* Not all of these are actually distinct opcodes, but
8521 we distinguish them for clarity of the resulting assembler. */
8523 ccode
= "ne"; break;
8525 ccode
= "eq"; break;
8527 ccode
= "ge"; break;
8528 case GT
: case GTU
: case UNGT
:
8529 ccode
= "gt"; break;
8531 ccode
= "le"; break;
8532 case LT
: case LTU
: case UNLT
:
8533 ccode
= "lt"; break;
8534 case UNORDERED
: ccode
= "un"; break;
8535 case ORDERED
: ccode
= "nu"; break;
8536 case UNGE
: ccode
= "nl"; break;
8537 case UNLE
: ccode
= "ng"; break;
8542 /* Maybe we have a guess as to how likely the branch is.
8543 The old mnemonics don't have a way to specify this information. */
8545 note
= find_reg_note (insn
, REG_BR_PROB
, NULL_RTX
);
8546 if (note
!= NULL_RTX
)
8548 /* PROB is the difference from 50%. */
8549 int prob
= INTVAL (XEXP (note
, 0)) - REG_BR_PROB_BASE
/ 2;
8550 bool always_hint
= rs6000_cpu
!= PROCESSOR_POWER4
;
8552 /* Only hint for highly probable/improbable branches on newer
8553 cpus as static prediction overrides processor dynamic
8554 prediction. For older cpus we may as well always hint, but
8555 assume not taken for branches that are very close to 50% as a
8556 mispredicted taken branch is more expensive than a
8557 mispredicted not-taken branch. */
8559 || abs (prob
) > REG_BR_PROB_BASE
/ 100 * 48)
8561 if (abs (prob
) > REG_BR_PROB_BASE
/ 20
8562 && ((prob
> 0) ^ need_longbranch
))
8570 s
+= sprintf (s
, "{b%sr|b%slr%s} ", ccode
, ccode
, pred
);
8572 s
+= sprintf (s
, "{b%s|b%s%s} ", ccode
, ccode
, pred
);
8574 /* We need to escape any '%' characters in the reg_names string.
8575 Assume they'd only be the first character... */
8576 if (reg_names
[cc_regno
+ CR0_REGNO
][0] == '%')
8578 s
+= sprintf (s
, "%s", reg_names
[cc_regno
+ CR0_REGNO
]);
8582 /* If the branch distance was too far, we may have to use an
8583 unconditional branch to go the distance. */
8584 if (need_longbranch
)
8585 s
+= sprintf (s
, ",$+8\n\tb %s", label
);
8587 s
+= sprintf (s
, ",%s", label
);
8593 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
8594 operands of the last comparison is nonzero/true, FALSE_COND if it
8595 is zero/false. Return 0 if the hardware has no such operation. */
8598 rs6000_emit_cmove (dest
, op
, true_cond
, false_cond
)
8604 enum rtx_code code
= GET_CODE (op
);
8605 rtx op0
= rs6000_compare_op0
;
8606 rtx op1
= rs6000_compare_op1
;
8608 enum machine_mode compare_mode
= GET_MODE (op0
);
8609 enum machine_mode result_mode
= GET_MODE (dest
);
8612 /* These modes should always match. */
8613 if (GET_MODE (op1
) != compare_mode
8614 /* In the isel case however, we can use a compare immediate, so
8615 op1 may be a small constant. */
8616 && (!TARGET_ISEL
|| !short_cint_operand (op1
, VOIDmode
)))
8618 if (GET_MODE (true_cond
) != result_mode
)
8620 if (GET_MODE (false_cond
) != result_mode
)
8623 /* First, work out if the hardware can do this at all, or
8624 if it's too slow... */
8625 if (! rs6000_compare_fp_p
)
8628 return rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
);
8632 /* Eliminate half of the comparisons by switching operands, this
8633 makes the remaining code simpler. */
8634 if (code
== UNLT
|| code
== UNGT
|| code
== UNORDERED
|| code
== NE
8635 || code
== LTGT
|| code
== LT
)
8637 code
= reverse_condition_maybe_unordered (code
);
8639 true_cond
= false_cond
;
8643 /* UNEQ and LTGT take four instructions for a comparison with zero,
8644 it'll probably be faster to use a branch here too. */
8648 if (GET_CODE (op1
) == CONST_DOUBLE
)
8649 REAL_VALUE_FROM_CONST_DOUBLE (c1
, op1
);
8651 /* We're going to try to implement comparions by performing
8652 a subtract, then comparing against zero. Unfortunately,
8653 Inf - Inf is NaN which is not zero, and so if we don't
8654 know that the operand is finite and the comparison
8655 would treat EQ different to UNORDERED, we can't do it. */
8656 if (! flag_unsafe_math_optimizations
8657 && code
!= GT
&& code
!= UNGE
8658 && (GET_CODE (op1
) != CONST_DOUBLE
|| real_isinf (&c1
))
8659 /* Constructs of the form (a OP b ? a : b) are safe. */
8660 && ((! rtx_equal_p (op0
, false_cond
) && ! rtx_equal_p (op1
, false_cond
))
8661 || (! rtx_equal_p (op0
, true_cond
)
8662 && ! rtx_equal_p (op1
, true_cond
))))
8664 /* At this point we know we can use fsel. */
8666 /* Reduce the comparison to a comparison against zero. */
8667 temp
= gen_reg_rtx (compare_mode
);
8668 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8669 gen_rtx_MINUS (compare_mode
, op0
, op1
)));
8671 op1
= CONST0_RTX (compare_mode
);
8673 /* If we don't care about NaNs we can reduce some of the comparisons
8674 down to faster ones. */
8675 if (flag_unsafe_math_optimizations
)
8681 true_cond
= false_cond
;
8694 /* Now, reduce everything down to a GE. */
8701 temp
= gen_reg_rtx (compare_mode
);
8702 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
8707 temp
= gen_reg_rtx (compare_mode
);
8708 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_ABS (compare_mode
, op0
)));
8713 temp
= gen_reg_rtx (compare_mode
);
8714 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8715 gen_rtx_NEG (compare_mode
,
8716 gen_rtx_ABS (compare_mode
, op0
))));
8721 temp
= gen_reg_rtx (result_mode
);
8722 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8723 gen_rtx_IF_THEN_ELSE (result_mode
,
8724 gen_rtx_GE (VOIDmode
,
8726 true_cond
, false_cond
)));
8728 true_cond
= false_cond
;
8730 temp
= gen_reg_rtx (compare_mode
);
8731 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
8736 temp
= gen_reg_rtx (result_mode
);
8737 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8738 gen_rtx_IF_THEN_ELSE (result_mode
,
8739 gen_rtx_GE (VOIDmode
,
8741 true_cond
, false_cond
)));
8743 false_cond
= true_cond
;
8745 temp
= gen_reg_rtx (compare_mode
);
8746 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
8754 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
8755 gen_rtx_IF_THEN_ELSE (result_mode
,
8756 gen_rtx_GE (VOIDmode
,
8758 true_cond
, false_cond
)));
8762 /* Same as above, but for ints (isel). */
8765 rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
)
8771 rtx condition_rtx
, cr
;
8773 /* All isel implementations thus far are 32-bits. */
8774 if (GET_MODE (rs6000_compare_op0
) != SImode
)
8777 /* We still have to do the compare, because isel doesn't do a
8778 compare, it just looks at the CRx bits set by a previous compare
8780 condition_rtx
= rs6000_generate_compare (GET_CODE (op
));
8781 cr
= XEXP (condition_rtx
, 0);
8783 if (GET_MODE (cr
) == CCmode
)
8784 emit_insn (gen_isel_signed (dest
, condition_rtx
,
8785 true_cond
, false_cond
, cr
));
8787 emit_insn (gen_isel_unsigned (dest
, condition_rtx
,
8788 true_cond
, false_cond
, cr
));
8794 output_isel (operands
)
8799 code
= GET_CODE (operands
[1]);
8800 if (code
== GE
|| code
== GEU
|| code
== LE
|| code
== LEU
|| code
== NE
)
8802 PUT_CODE (operands
[1], reverse_condition (code
));
8803 return "isel %0,%3,%2,%j1";
8806 return "isel %0,%2,%3,%j1";
8810 rs6000_emit_minmax (dest
, code
, op0
, op1
)
8816 enum machine_mode mode
= GET_MODE (op0
);
8820 if (code
== SMAX
|| code
== SMIN
)
8825 if (code
== SMAX
|| code
== UMAX
)
8826 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
8829 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
8831 if (target
== NULL_RTX
)
8834 emit_move_insn (dest
, target
);
8837 /* This page contains routines that are used to determine what the
8838 function prologue and epilogue code will do and write them out. */
8840 /* Return the first fixed-point register that is required to be
8841 saved. 32 if none. */
8844 first_reg_to_save ()
8848 /* Find lowest numbered live register. */
8849 for (first_reg
= 13; first_reg
<= 31; first_reg
++)
8850 if (regs_ever_live
[first_reg
]
8851 && (! call_used_regs
[first_reg
]
8852 || (first_reg
== RS6000_PIC_OFFSET_TABLE_REGNUM
8853 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
8854 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
)))))
8859 && current_function_uses_pic_offset_table
8860 && first_reg
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
8861 return RS6000_PIC_OFFSET_TABLE_REGNUM
;
8867 /* Similar, for FP regs. */
8870 first_fp_reg_to_save ()
8874 /* Find lowest numbered live register. */
8875 for (first_reg
= 14 + 32; first_reg
<= 63; first_reg
++)
8876 if (regs_ever_live
[first_reg
])
8882 /* Similar, for AltiVec regs. */
8885 first_altivec_reg_to_save ()
8889 /* Stack frame remains as is unless we are in AltiVec ABI. */
8890 if (! TARGET_ALTIVEC_ABI
)
8891 return LAST_ALTIVEC_REGNO
+ 1;
8893 /* Find lowest numbered live register. */
8894 for (i
= FIRST_ALTIVEC_REGNO
+ 20; i
<= LAST_ALTIVEC_REGNO
; ++i
)
8895 if (regs_ever_live
[i
])
8901 /* Return a 32-bit mask of the AltiVec registers we need to set in
8902 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
8903 the 32-bit word is 0. */
8906 compute_vrsave_mask ()
8908 unsigned int i
, mask
= 0;
8910 /* First, find out if we use _any_ altivec registers. */
8911 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
8912 if (regs_ever_live
[i
])
8913 mask
|= ALTIVEC_REG_BIT (i
);
8918 /* Next, add all registers that are call-clobbered. We do this
8919 because post-reload register optimizers such as regrename_optimize
8920 may choose to use them. They never change the register class
8921 chosen by reload, so cannot create new uses of altivec registers
8922 if there were none before, so the early exit above is safe. */
8923 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
8924 altivec registers not saved in the mask, which might well make the
8925 adjustments below more effective in eliding the save/restore of
8926 VRSAVE in small functions. */
8927 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
8928 if (call_used_regs
[i
])
8929 mask
|= ALTIVEC_REG_BIT (i
);
8931 /* Next, remove the argument registers from the set. These must
8932 be in the VRSAVE mask set by the caller, so we don't need to add
8933 them in again. More importantly, the mask we compute here is
8934 used to generate CLOBBERs in the set_vrsave insn, and we do not
8935 wish the argument registers to die. */
8936 for (i
= cfun
->args_info
.vregno
; i
>= ALTIVEC_ARG_MIN_REG
; --i
)
8937 mask
&= ~ALTIVEC_REG_BIT (i
);
8939 /* Similarly, remove the return value from the set. */
8942 diddle_return_value (is_altivec_return_reg
, &yes
);
8944 mask
&= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN
);
8951 is_altivec_return_reg (reg
, xyes
)
8955 bool *yes
= (bool *) xyes
;
8956 if (REGNO (reg
) == ALTIVEC_ARG_RETURN
)
8961 /* Calculate the stack information for the current function. This is
8962 complicated by having two separate calling sequences, the AIX calling
8963 sequence and the V.4 calling sequence.
8965 AIX (and Darwin/Mac OS X) stack frames look like:
8967 SP----> +---------------------------------------+
8968 | back chain to caller | 0 0
8969 +---------------------------------------+
8970 | saved CR | 4 8 (8-11)
8971 +---------------------------------------+
8973 +---------------------------------------+
8974 | reserved for compilers | 12 24
8975 +---------------------------------------+
8976 | reserved for binders | 16 32
8977 +---------------------------------------+
8978 | saved TOC pointer | 20 40
8979 +---------------------------------------+
8980 | Parameter save area (P) | 24 48
8981 +---------------------------------------+
8982 | Alloca space (A) | 24+P etc.
8983 +---------------------------------------+
8984 | Local variable space (L) | 24+P+A
8985 +---------------------------------------+
8986 | Float/int conversion temporary (X) | 24+P+A+L
8987 +---------------------------------------+
8988 | Save area for AltiVec registers (W) | 24+P+A+L+X
8989 +---------------------------------------+
8990 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
8991 +---------------------------------------+
8992 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
8993 +---------------------------------------+
8994 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
8995 +---------------------------------------+
8996 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
8997 +---------------------------------------+
8998 old SP->| back chain to caller's caller |
8999 +---------------------------------------+
9001 The required alignment for AIX configurations is two words (i.e., 8
9005 V.4 stack frames look like:
9007 SP----> +---------------------------------------+
9008 | back chain to caller | 0
9009 +---------------------------------------+
9010 | caller's saved LR | 4
9011 +---------------------------------------+
9012 | Parameter save area (P) | 8
9013 +---------------------------------------+
9014 | Alloca space (A) | 8+P
9015 +---------------------------------------+
9016 | Varargs save area (V) | 8+P+A
9017 +---------------------------------------+
9018 | Local variable space (L) | 8+P+A+V
9019 +---------------------------------------+
9020 | Float/int conversion temporary (X) | 8+P+A+V+L
9021 +---------------------------------------+
9022 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9023 +---------------------------------------+
9024 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9025 +---------------------------------------+
9026 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9027 +---------------------------------------+
9028 | SPE: area for 64-bit GP registers |
9029 +---------------------------------------+
9030 | SPE alignment padding |
9031 +---------------------------------------+
9032 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9033 +---------------------------------------+
9034 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9035 +---------------------------------------+
9036 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9037 +---------------------------------------+
9038 old SP->| back chain to caller's caller |
9039 +---------------------------------------+
9041 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9042 given. (But note below and in sysv4.h that we require only 8 and
9043 may round up the size of our stack frame anyways. The historical
9044 reason is early versions of powerpc-linux which didn't properly
9045 align the stack at program startup. A happy side-effect is that
9046 -mno-eabi libraries can be used with -meabi programs.)
9048 The EABI configuration defaults to the V.4 layout, unless
9049 -mcall-aix is used, in which case the AIX layout is used. However,
9050 the stack alignment requirements may differ. If -mno-eabi is not
9051 given, the required stack alignment is 8 bytes; if -mno-eabi is
9052 given, the required alignment is 16 bytes. (But see V.4 comment
9055 #ifndef ABI_STACK_BOUNDARY
9056 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9060 rs6000_stack_info ()
9062 static rs6000_stack_t info
, zero_info
;
9063 rs6000_stack_t
*info_ptr
= &info
;
9064 int reg_size
= TARGET_POWERPC64
? 8 : 4;
9065 enum rs6000_abi abi
;
9069 /* Zero all fields portably. */
9072 /* Select which calling sequence. */
9073 info_ptr
->abi
= abi
= DEFAULT_ABI
;
9075 /* Calculate which registers need to be saved & save area size. */
9076 info_ptr
->first_gp_reg_save
= first_reg_to_save ();
9077 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9078 even if it currently looks like we won't. */
9079 if (((TARGET_TOC
&& TARGET_MINIMAL_TOC
)
9080 || (flag_pic
== 1 && abi
== ABI_V4
)
9081 || (flag_pic
&& abi
== ABI_DARWIN
))
9082 && info_ptr
->first_gp_reg_save
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
9083 info_ptr
->gp_size
= reg_size
* (32 - RS6000_PIC_OFFSET_TABLE_REGNUM
);
9085 info_ptr
->gp_size
= reg_size
* (32 - info_ptr
->first_gp_reg_save
);
9087 /* For the SPE, we have an additional upper 32-bits on each GPR.
9088 Ideally we should save the entire 64-bits only when the upper
9089 half is used in SIMD instructions. Since we only record
9090 registers live (not the size they are used in), this proves
9091 difficult because we'd have to traverse the instruction chain at
9092 the right time, taking reload into account. This is a real pain,
9093 so we opt to save the GPRs in 64-bits always. Anyone overly
9094 concerned with frame size can fix this. ;-).
9096 So... since we save all GPRs (except the SP) in 64-bits, the
9097 traditional GP save area will be empty. */
9099 info_ptr
->gp_size
= 0;
9101 info_ptr
->first_fp_reg_save
= first_fp_reg_to_save ();
9102 info_ptr
->fp_size
= 8 * (64 - info_ptr
->first_fp_reg_save
);
9104 info_ptr
->first_altivec_reg_save
= first_altivec_reg_to_save ();
9105 info_ptr
->altivec_size
= 16 * (LAST_ALTIVEC_REGNO
+ 1
9106 - info_ptr
->first_altivec_reg_save
);
9108 /* Does this function call anything? */
9109 info_ptr
->calls_p
= (! current_function_is_leaf
9110 || cfun
->machine
->ra_needs_full_frame
);
9112 /* Determine if we need to save the link register. */
9113 if (rs6000_ra_ever_killed ()
9114 || (DEFAULT_ABI
== ABI_AIX
&& current_function_profile
)
9115 #ifdef TARGET_RELOCATABLE
9116 || (TARGET_RELOCATABLE
&& (get_pool_size () != 0))
9118 || (info_ptr
->first_fp_reg_save
!= 64
9119 && !FP_SAVE_INLINE (info_ptr
->first_fp_reg_save
))
9120 || info_ptr
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
9121 || (abi
== ABI_V4
&& current_function_calls_alloca
)
9122 || (DEFAULT_ABI
== ABI_DARWIN
9124 && current_function_uses_pic_offset_table
)
9125 || info_ptr
->calls_p
)
9127 info_ptr
->lr_save_p
= 1;
9128 regs_ever_live
[LINK_REGISTER_REGNUM
] = 1;
9131 /* Determine if we need to save the condition code registers. */
9132 if (regs_ever_live
[CR2_REGNO
]
9133 || regs_ever_live
[CR3_REGNO
]
9134 || regs_ever_live
[CR4_REGNO
])
9136 info_ptr
->cr_save_p
= 1;
9138 info_ptr
->cr_size
= reg_size
;
9141 /* If the current function calls __builtin_eh_return, then we need
9142 to allocate stack space for registers that will hold data for
9143 the exception handler. */
9144 if (current_function_calls_eh_return
)
9147 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; ++i
)
9150 /* SPE saves EH registers in 64-bits. */
9151 ehrd_size
= i
* (TARGET_SPE_ABI
? UNITS_PER_SPE_WORD
: UNITS_PER_WORD
);
9156 /* Determine various sizes. */
9157 info_ptr
->reg_size
= reg_size
;
9158 info_ptr
->fixed_size
= RS6000_SAVE_AREA
;
9159 info_ptr
->varargs_size
= RS6000_VARARGS_AREA
;
9160 info_ptr
->vars_size
= RS6000_ALIGN (get_frame_size (), 8);
9161 info_ptr
->parm_size
= RS6000_ALIGN (current_function_outgoing_args_size
,
9165 info_ptr
->spe_gp_size
= 8 * (32 - info_ptr
->first_gp_reg_save
);
9167 info_ptr
->spe_gp_size
= 0;
9169 if (TARGET_ALTIVEC_ABI
&& TARGET_ALTIVEC_VRSAVE
)
9171 info_ptr
->vrsave_mask
= compute_vrsave_mask ();
9172 info_ptr
->vrsave_size
= info_ptr
->vrsave_mask
? 4 : 0;
9176 info_ptr
->vrsave_mask
= 0;
9177 info_ptr
->vrsave_size
= 0;
9180 /* Calculate the offsets. */
9188 case ABI_AIX_NODESC
:
9190 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
9191 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
9193 if (TARGET_ALTIVEC_ABI
)
9195 info_ptr
->vrsave_save_offset
9196 = info_ptr
->gp_save_offset
- info_ptr
->vrsave_size
;
9198 /* Align stack so vector save area is on a quadword boundary. */
9199 if (info_ptr
->altivec_size
!= 0)
9200 info_ptr
->altivec_padding_size
9201 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
9203 info_ptr
->altivec_padding_size
= 0;
9205 info_ptr
->altivec_save_offset
9206 = info_ptr
->vrsave_save_offset
9207 - info_ptr
->altivec_padding_size
9208 - info_ptr
->altivec_size
;
9210 /* Adjust for AltiVec case. */
9211 info_ptr
->ehrd_offset
= info_ptr
->altivec_save_offset
- ehrd_size
;
9214 info_ptr
->ehrd_offset
= info_ptr
->gp_save_offset
- ehrd_size
;
9215 info_ptr
->cr_save_offset
= reg_size
; /* first word when 64-bit. */
9216 info_ptr
->lr_save_offset
= 2*reg_size
;
9220 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
9221 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
9222 info_ptr
->cr_save_offset
= info_ptr
->gp_save_offset
- info_ptr
->cr_size
;
9226 /* Align stack so SPE GPR save area is aligned on a
9227 double-word boundary. */
9228 if (info_ptr
->spe_gp_size
!= 0)
9229 info_ptr
->spe_padding_size
9230 = 8 - (-info_ptr
->cr_save_offset
% 8);
9232 info_ptr
->spe_padding_size
= 0;
9234 info_ptr
->spe_gp_save_offset
9235 = info_ptr
->cr_save_offset
9236 - info_ptr
->spe_padding_size
9237 - info_ptr
->spe_gp_size
;
9239 /* Adjust for SPE case. */
9240 info_ptr
->toc_save_offset
9241 = info_ptr
->spe_gp_save_offset
- info_ptr
->toc_size
;
9243 else if (TARGET_ALTIVEC_ABI
)
9245 info_ptr
->vrsave_save_offset
9246 = info_ptr
->cr_save_offset
- info_ptr
->vrsave_size
;
9248 /* Align stack so vector save area is on a quadword boundary. */
9249 if (info_ptr
->altivec_size
!= 0)
9250 info_ptr
->altivec_padding_size
9251 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
9253 info_ptr
->altivec_padding_size
= 0;
9255 info_ptr
->altivec_save_offset
9256 = info_ptr
->vrsave_save_offset
9257 - info_ptr
->altivec_padding_size
9258 - info_ptr
->altivec_size
;
9260 /* Adjust for AltiVec case. */
9261 info_ptr
->toc_save_offset
9262 = info_ptr
->altivec_save_offset
- info_ptr
->toc_size
;
9265 info_ptr
->toc_save_offset
= info_ptr
->cr_save_offset
- info_ptr
->toc_size
;
9266 info_ptr
->ehrd_offset
= info_ptr
->toc_save_offset
- ehrd_size
;
9267 info_ptr
->lr_save_offset
= reg_size
;
9271 info_ptr
->save_size
= RS6000_ALIGN (info_ptr
->fp_size
9273 + info_ptr
->altivec_size
9274 + info_ptr
->altivec_padding_size
9275 + info_ptr
->vrsave_size
9276 + info_ptr
->spe_gp_size
9277 + info_ptr
->spe_padding_size
9281 + info_ptr
->vrsave_size
9282 + info_ptr
->toc_size
,
9283 (TARGET_ALTIVEC_ABI
|| ABI_DARWIN
)
9286 total_raw_size
= (info_ptr
->vars_size
9287 + info_ptr
->parm_size
9288 + info_ptr
->save_size
9289 + info_ptr
->varargs_size
9290 + info_ptr
->fixed_size
);
9292 info_ptr
->total_size
=
9293 RS6000_ALIGN (total_raw_size
, ABI_STACK_BOUNDARY
/ BITS_PER_UNIT
);
9295 /* Determine if we need to allocate any stack frame:
9297 For AIX we need to push the stack if a frame pointer is needed
9298 (because the stack might be dynamically adjusted), if we are
9299 debugging, if we make calls, or if the sum of fp_save, gp_save,
9300 and local variables are more than the space needed to save all
9301 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9302 + 18*8 = 288 (GPR13 reserved).
9304 For V.4 we don't have the stack cushion that AIX uses, but assume
9305 that the debugger can handle stackless frames. */
9307 if (info_ptr
->calls_p
)
9308 info_ptr
->push_p
= 1;
9310 else if (abi
== ABI_V4
)
9311 info_ptr
->push_p
= total_raw_size
> info_ptr
->fixed_size
;
9314 info_ptr
->push_p
= (frame_pointer_needed
9315 || (abi
!= ABI_DARWIN
&& write_symbols
!= NO_DEBUG
)
9316 || ((total_raw_size
- info_ptr
->fixed_size
)
9317 > (TARGET_32BIT
? 220 : 288)));
9319 /* Zero offsets if we're not saving those registers. */
9320 if (info_ptr
->fp_size
== 0)
9321 info_ptr
->fp_save_offset
= 0;
9323 if (info_ptr
->gp_size
== 0)
9324 info_ptr
->gp_save_offset
= 0;
9326 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->altivec_size
== 0)
9327 info_ptr
->altivec_save_offset
= 0;
9329 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->vrsave_mask
== 0)
9330 info_ptr
->vrsave_save_offset
= 0;
9332 if (! TARGET_SPE_ABI
|| info_ptr
->spe_gp_size
== 0)
9333 info_ptr
->spe_gp_save_offset
= 0;
9335 if (! info_ptr
->lr_save_p
)
9336 info_ptr
->lr_save_offset
= 0;
9338 if (! info_ptr
->cr_save_p
)
9339 info_ptr
->cr_save_offset
= 0;
9341 if (! info_ptr
->toc_save_p
)
9342 info_ptr
->toc_save_offset
= 0;
9348 debug_stack_info (info
)
9349 rs6000_stack_t
*info
;
9351 const char *abi_string
;
9354 info
= rs6000_stack_info ();
9356 fprintf (stderr
, "\nStack information for function %s:\n",
9357 ((current_function_decl
&& DECL_NAME (current_function_decl
))
9358 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl
))
9363 default: abi_string
= "Unknown"; break;
9364 case ABI_NONE
: abi_string
= "NONE"; break;
9366 case ABI_AIX_NODESC
: abi_string
= "AIX"; break;
9367 case ABI_DARWIN
: abi_string
= "Darwin"; break;
9368 case ABI_V4
: abi_string
= "V.4"; break;
9371 fprintf (stderr
, "\tABI = %5s\n", abi_string
);
9373 if (TARGET_ALTIVEC_ABI
)
9374 fprintf (stderr
, "\tALTIVEC ABI extensions enabled.\n");
9377 fprintf (stderr
, "\tSPE ABI extensions enabled.\n");
9379 if (info
->first_gp_reg_save
!= 32)
9380 fprintf (stderr
, "\tfirst_gp_reg_save = %5d\n", info
->first_gp_reg_save
);
9382 if (info
->first_fp_reg_save
!= 64)
9383 fprintf (stderr
, "\tfirst_fp_reg_save = %5d\n", info
->first_fp_reg_save
);
9385 if (info
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
)
9386 fprintf (stderr
, "\tfirst_altivec_reg_save = %5d\n",
9387 info
->first_altivec_reg_save
);
9389 if (info
->lr_save_p
)
9390 fprintf (stderr
, "\tlr_save_p = %5d\n", info
->lr_save_p
);
9392 if (info
->cr_save_p
)
9393 fprintf (stderr
, "\tcr_save_p = %5d\n", info
->cr_save_p
);
9395 if (info
->toc_save_p
)
9396 fprintf (stderr
, "\ttoc_save_p = %5d\n", info
->toc_save_p
);
9398 if (info
->vrsave_mask
)
9399 fprintf (stderr
, "\tvrsave_mask = 0x%x\n", info
->vrsave_mask
);
9402 fprintf (stderr
, "\tpush_p = %5d\n", info
->push_p
);
9405 fprintf (stderr
, "\tcalls_p = %5d\n", info
->calls_p
);
9407 if (info
->gp_save_offset
)
9408 fprintf (stderr
, "\tgp_save_offset = %5d\n", info
->gp_save_offset
);
9410 if (info
->fp_save_offset
)
9411 fprintf (stderr
, "\tfp_save_offset = %5d\n", info
->fp_save_offset
);
9413 if (info
->altivec_save_offset
)
9414 fprintf (stderr
, "\taltivec_save_offset = %5d\n",
9415 info
->altivec_save_offset
);
9417 if (info
->spe_gp_save_offset
)
9418 fprintf (stderr
, "\tspe_gp_save_offset = %5d\n",
9419 info
->spe_gp_save_offset
);
9421 if (info
->vrsave_save_offset
)
9422 fprintf (stderr
, "\tvrsave_save_offset = %5d\n",
9423 info
->vrsave_save_offset
);
9425 if (info
->lr_save_offset
)
9426 fprintf (stderr
, "\tlr_save_offset = %5d\n", info
->lr_save_offset
);
9428 if (info
->cr_save_offset
)
9429 fprintf (stderr
, "\tcr_save_offset = %5d\n", info
->cr_save_offset
);
9431 if (info
->toc_save_offset
)
9432 fprintf (stderr
, "\ttoc_save_offset = %5d\n", info
->toc_save_offset
);
9434 if (info
->varargs_save_offset
)
9435 fprintf (stderr
, "\tvarargs_save_offset = %5d\n", info
->varargs_save_offset
);
9437 if (info
->total_size
)
9438 fprintf (stderr
, "\ttotal_size = %5d\n", info
->total_size
);
9440 if (info
->varargs_size
)
9441 fprintf (stderr
, "\tvarargs_size = %5d\n", info
->varargs_size
);
9443 if (info
->vars_size
)
9444 fprintf (stderr
, "\tvars_size = %5d\n", info
->vars_size
);
9446 if (info
->parm_size
)
9447 fprintf (stderr
, "\tparm_size = %5d\n", info
->parm_size
);
9449 if (info
->fixed_size
)
9450 fprintf (stderr
, "\tfixed_size = %5d\n", info
->fixed_size
);
9453 fprintf (stderr
, "\tgp_size = %5d\n", info
->gp_size
);
9455 if (info
->spe_gp_size
)
9456 fprintf (stderr
, "\tspe_gp_size = %5d\n", info
->spe_gp_size
);
9459 fprintf (stderr
, "\tfp_size = %5d\n", info
->fp_size
);
9461 if (info
->altivec_size
)
9462 fprintf (stderr
, "\taltivec_size = %5d\n", info
->altivec_size
);
9464 if (info
->vrsave_size
)
9465 fprintf (stderr
, "\tvrsave_size = %5d\n", info
->vrsave_size
);
9467 if (info
->altivec_padding_size
)
9468 fprintf (stderr
, "\taltivec_padding_size= %5d\n",
9469 info
->altivec_padding_size
);
9471 if (info
->spe_padding_size
)
9472 fprintf (stderr
, "\tspe_padding_size = %5d\n",
9473 info
->spe_padding_size
);
9476 fprintf (stderr
, "\tlr_size = %5d\n", info
->lr_size
);
9479 fprintf (stderr
, "\tcr_size = %5d\n", info
->cr_size
);
9482 fprintf (stderr
, "\ttoc_size = %5d\n", info
->toc_size
);
9484 if (info
->save_size
)
9485 fprintf (stderr
, "\tsave_size = %5d\n", info
->save_size
);
9487 if (info
->reg_size
!= 4)
9488 fprintf (stderr
, "\treg_size = %5d\n", info
->reg_size
);
9490 fprintf (stderr
, "\n");
9494 rs6000_return_addr (count
, frame
)
9498 /* Currently we don't optimize very well between prolog and body
9499 code and for PIC code the code can be actually quite bad, so
9500 don't try to be too clever here. */
9501 if (count
!= 0 || flag_pic
!= 0)
9503 cfun
->machine
->ra_needs_full_frame
= 1;
9510 plus_constant (copy_to_reg
9511 (gen_rtx_MEM (Pmode
,
9512 memory_address (Pmode
, frame
))),
9513 RETURN_ADDRESS_OFFSET
)));
9516 return get_hard_reg_initial_val (Pmode
, LINK_REGISTER_REGNUM
);
9519 /* Say whether a function is a candidate for sibcall handling or not.
9520 We do not allow indirect calls to be optimized into sibling calls.
9521 Also, we can't do it if there are any vector parameters; there's
9522 nowhere to put the VRsave code so it works; note that functions with
9523 vector parameters are required to have a prototype, so the argument
9524 type info must be available here. (The tail recursion case can work
9525 with vector parameters, but there's no way to distinguish here.) */
9527 rs6000_function_ok_for_sibcall (decl
, exp
)
9529 tree exp ATTRIBUTE_UNUSED
;
9534 if (TARGET_ALTIVEC_VRSAVE
)
9536 for (type
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
9537 type
; type
= TREE_CHAIN (type
))
9539 if (TREE_CODE (TREE_VALUE (type
)) == VECTOR_TYPE
)
9543 if (DEFAULT_ABI
== ABI_DARWIN
9544 || (*targetm
.binds_local_p
) (decl
))
9546 tree attr_list
= TYPE_ATTRIBUTES (TREE_TYPE (decl
));
9548 if (!lookup_attribute ("longcall", attr_list
)
9549 || lookup_attribute ("shortcall", attr_list
))
9557 rs6000_ra_ever_killed ()
9563 /* Irritatingly, there are two kinds of thunks -- those created with
9564 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
9565 through the regular part of the compiler. This is a very hacky
9566 way to tell them apart. */
9567 if (current_function_is_thunk
&& !no_new_pseudos
)
9570 /* regs_ever_live has LR marked as used if any sibcalls are present,
9571 but this should not force saving and restoring in the
9572 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
9573 clobbers LR, so that is inappropriate. */
9575 /* Also, the prologue can generate a store into LR that
9576 doesn't really count, like this:
9579 bcl to set PIC register
9583 When we're called from the epilogue, we need to avoid counting
9586 push_topmost_sequence ();
9588 pop_topmost_sequence ();
9589 reg
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
9591 for (insn
= NEXT_INSN (top
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
9595 if (FIND_REG_INC_NOTE (insn
, reg
))
9597 else if (GET_CODE (insn
) == CALL_INSN
9598 && !SIBLING_CALL_P (insn
))
9600 else if (set_of (reg
, insn
) != NULL_RTX
9601 && !prologue_epilogue_contains (insn
))
9608 /* Add a REG_MAYBE_DEAD note to the insn. */
9610 rs6000_maybe_dead (insn
)
9613 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
,
9618 /* Emit instructions needed to load the TOC register.
9619 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9620 a constant pool; or for SVR4 -fpic. */
9623 rs6000_emit_load_toc_table (fromprolog
)
9627 dest
= gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
);
9629 if (TARGET_ELF
&& DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
9631 rtx temp
= (fromprolog
9632 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
9633 : gen_reg_rtx (Pmode
));
9634 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp
)));
9635 rs6000_maybe_dead (emit_move_insn (dest
, temp
));
9637 else if (TARGET_ELF
&& DEFAULT_ABI
!= ABI_AIX
&& flag_pic
== 2)
9640 rtx tempLR
= (fromprolog
9641 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
9642 : gen_reg_rtx (Pmode
));
9643 rtx temp0
= (fromprolog
9644 ? gen_rtx_REG (Pmode
, 0)
9645 : gen_reg_rtx (Pmode
));
9648 /* possibly create the toc section */
9649 if (! toc_initialized
)
9652 function_section (current_function_decl
);
9659 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
9660 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9662 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCL", rs6000_pic_labelno
);
9663 symL
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9665 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR
,
9667 rs6000_maybe_dead (emit_move_insn (dest
, tempLR
));
9668 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0
, dest
,
9675 static int reload_toc_labelno
= 0;
9677 tocsym
= gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
);
9679 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCG", reload_toc_labelno
++);
9680 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9682 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR
,
9685 rs6000_maybe_dead (emit_move_insn (dest
, tempLR
));
9686 rs6000_maybe_dead (emit_move_insn (temp0
,
9687 gen_rtx_MEM (Pmode
, dest
)));
9689 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest
, temp0
, dest
)));
9691 else if (TARGET_ELF
&& !TARGET_AIX
&& flag_pic
== 0 && TARGET_MINIMAL_TOC
)
9693 /* This is for AIX code running in non-PIC ELF32. */
9696 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCTOC", 1);
9697 realsym
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9699 rs6000_maybe_dead (emit_insn (gen_elf_high (dest
, realsym
)));
9700 rs6000_maybe_dead (emit_insn (gen_elf_low (dest
, dest
, realsym
)));
9702 else if (DEFAULT_ABI
== ABI_AIX
)
9705 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest
)));
9707 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest
)));
9714 get_TOC_alias_set ()
9716 static int set
= -1;
9718 set
= new_alias_set ();
9722 /* This retuns nonzero if the current function uses the TOC. This is
9723 determined by the presence of (unspec ... 7), which is generated by
9724 the various load_toc_* patterns. */
9731 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
9734 rtx pat
= PATTERN (insn
);
9737 if (GET_CODE (pat
) == PARALLEL
)
9738 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
9739 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, i
)) == UNSPEC
9740 && XINT (XVECEXP (PATTERN (insn
), 0, i
), 1) == 7)
9747 create_TOC_reference (symbol
)
9750 return gen_rtx_PLUS (Pmode
,
9751 gen_rtx_REG (Pmode
, TOC_REGISTER
),
9752 gen_rtx_CONST (Pmode
,
9753 gen_rtx_MINUS (Pmode
, symbol
,
9754 gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
))));
9758 /* __throw will restore its own return address to be the same as the
9759 return address of the function that the throw is being made to.
9760 This is unfortunate, because we want to check the original
9761 return address to see if we need to restore the TOC.
9762 So we have to squirrel it away here.
9763 This is used only in compiling __throw and __rethrow.
9765 Most of this code should be removed by CSE. */
9766 static rtx insn_after_throw
;
9768 /* This does the saving... */
9770 rs6000_aix_emit_builtin_unwind_init ()
9773 rtx stack_top
= gen_reg_rtx (Pmode
);
9774 rtx opcode_addr
= gen_reg_rtx (Pmode
);
9776 insn_after_throw
= gen_reg_rtx (SImode
);
9778 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
9779 emit_move_insn (stack_top
, mem
);
9781 mem
= gen_rtx_MEM (Pmode
,
9782 gen_rtx_PLUS (Pmode
, stack_top
,
9783 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
9784 emit_move_insn (opcode_addr
, mem
);
9785 emit_move_insn (insn_after_throw
, gen_rtx_MEM (SImode
, opcode_addr
));
9788 /* Emit insns to _restore_ the TOC register, at runtime (specifically
9789 in _eh.o). Only used on AIX.
9791 The idea is that on AIX, function calls look like this:
9792 bl somefunction-trampoline
9796 somefunction-trampoline:
9798 ... load function address in the count register ...
9800 or like this, if the linker determines that this is not a cross-module call
9801 and so the TOC need not be restored:
9804 or like this, if the compiler could determine that this is not a
9807 now, the tricky bit here is that register 2 is saved and restored
9808 by the _linker_, so we can't readily generate debugging information
9809 for it. So we need to go back up the call chain looking at the
9810 insns at return addresses to see which calls saved the TOC register
9811 and so see where it gets restored from.
9813 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
9814 just before the actual epilogue.
9816 On the bright side, this incurs no space or time overhead unless an
9817 exception is thrown, except for the extra code in libgcc.a.
9819 The parameter STACKSIZE is a register containing (at runtime)
9820 the amount to be popped off the stack in addition to the stack frame
9821 of this routine (which will be __throw or __rethrow, and so is
9822 guaranteed to have a stack frame). */
9825 rs6000_emit_eh_toc_restore (stacksize
)
9829 rtx bottom_of_stack
= gen_reg_rtx (Pmode
);
9830 rtx tocompare
= gen_reg_rtx (SImode
);
9831 rtx opcode
= gen_reg_rtx (SImode
);
9832 rtx opcode_addr
= gen_reg_rtx (Pmode
);
9834 rtx loop_start
= gen_label_rtx ();
9835 rtx no_toc_restore_needed
= gen_label_rtx ();
9836 rtx loop_exit
= gen_label_rtx ();
9838 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
9839 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9840 emit_move_insn (bottom_of_stack
, mem
);
9842 top_of_stack
= expand_binop (Pmode
, add_optab
,
9843 bottom_of_stack
, stacksize
,
9844 NULL_RTX
, 1, OPTAB_WIDEN
);
9846 emit_move_insn (tocompare
, gen_int_mode (TARGET_32BIT
? 0x80410014
9847 : 0xE8410028, SImode
));
9849 if (insn_after_throw
== NULL_RTX
)
9851 emit_move_insn (opcode
, insn_after_throw
);
9853 emit_note (NULL
, NOTE_INSN_LOOP_BEG
);
9854 emit_label (loop_start
);
9856 do_compare_rtx_and_jump (opcode
, tocompare
, NE
, 1,
9857 SImode
, NULL_RTX
, NULL_RTX
,
9858 no_toc_restore_needed
);
9860 mem
= gen_rtx_MEM (Pmode
,
9861 gen_rtx_PLUS (Pmode
, bottom_of_stack
,
9862 GEN_INT (5 * GET_MODE_SIZE (Pmode
))));
9863 emit_move_insn (gen_rtx_REG (Pmode
, 2), mem
);
9865 emit_label (no_toc_restore_needed
);
9866 do_compare_rtx_and_jump (top_of_stack
, bottom_of_stack
, EQ
, 1,
9867 Pmode
, NULL_RTX
, NULL_RTX
,
9870 mem
= gen_rtx_MEM (Pmode
, bottom_of_stack
);
9871 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9872 emit_move_insn (bottom_of_stack
, mem
);
9874 mem
= gen_rtx_MEM (Pmode
,
9875 gen_rtx_PLUS (Pmode
, bottom_of_stack
,
9876 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
9877 emit_move_insn (opcode_addr
, mem
);
9878 emit_move_insn (opcode
, gen_rtx_MEM (SImode
, opcode_addr
));
9880 emit_note (NULL
, NOTE_INSN_LOOP_CONT
);
9881 emit_jump (loop_start
);
9882 emit_note (NULL
, NOTE_INSN_LOOP_END
);
9883 emit_label (loop_exit
);
9885 #endif /* TARGET_AIX */
9887 /* This ties together stack memory (MEM with an alias set of
9888 rs6000_sr_alias_set) and the change to the stack pointer. */
9891 rs6000_emit_stack_tie ()
9893 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
));
9895 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9896 emit_insn (gen_stack_tie (mem
));
9899 /* Emit the correct code for allocating stack space, as insns.
9900 If COPY_R12, make sure a copy of the old frame is left in r12.
9901 The generated code may use hard register 0 as a temporary. */
9904 rs6000_emit_allocate_stack (size
, copy_r12
)
9909 rtx stack_reg
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
9910 rtx tmp_reg
= gen_rtx_REG (Pmode
, 0);
9911 rtx todec
= GEN_INT (-size
);
9913 if (current_function_limit_stack
)
9915 if (REG_P (stack_limit_rtx
)
9916 && REGNO (stack_limit_rtx
) > 1
9917 && REGNO (stack_limit_rtx
) <= 31)
9919 emit_insn (TARGET_32BIT
9920 ? gen_addsi3 (tmp_reg
,
9923 : gen_adddi3 (tmp_reg
,
9927 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
9930 else if (GET_CODE (stack_limit_rtx
) == SYMBOL_REF
9932 && DEFAULT_ABI
== ABI_V4
)
9934 rtx toload
= gen_rtx_CONST (VOIDmode
,
9935 gen_rtx_PLUS (Pmode
,
9939 emit_insn (gen_elf_high (tmp_reg
, toload
));
9940 emit_insn (gen_elf_low (tmp_reg
, tmp_reg
, toload
));
9941 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
9945 warning ("stack limit expression is not supported");
9948 if (copy_r12
|| ! TARGET_UPDATE
)
9949 emit_move_insn (gen_rtx_REG (Pmode
, 12), stack_reg
);
9955 /* Need a note here so that try_split doesn't get confused. */
9956 if (get_last_insn() == NULL_RTX
)
9957 emit_note (0, NOTE_INSN_DELETED
);
9958 insn
= emit_move_insn (tmp_reg
, todec
);
9959 try_split (PATTERN (insn
), insn
, 0);
9963 insn
= emit_insn (TARGET_32BIT
9964 ? gen_movsi_update (stack_reg
, stack_reg
,
9966 : gen_movdi_update (stack_reg
, stack_reg
,
9971 insn
= emit_insn (TARGET_32BIT
9972 ? gen_addsi3 (stack_reg
, stack_reg
, todec
)
9973 : gen_adddi3 (stack_reg
, stack_reg
, todec
));
9974 emit_move_insn (gen_rtx_MEM (Pmode
, stack_reg
),
9975 gen_rtx_REG (Pmode
, 12));
9978 RTX_FRAME_RELATED_P (insn
) = 1;
9980 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
9981 gen_rtx_SET (VOIDmode
, stack_reg
,
9982 gen_rtx_PLUS (Pmode
, stack_reg
,
9987 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
9990 (mem (plus (blah) (regXX)))
9994 (mem (plus (blah) (const VALUE_OF_REGXX))). */
9997 altivec_frame_fixup (insn
, reg
, val
)
10003 real
= copy_rtx (PATTERN (insn
));
10005 real
= replace_rtx (real
, reg
, GEN_INT (val
));
10007 RTX_FRAME_RELATED_P (insn
) = 1;
10008 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
10013 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10014 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10015 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10016 deduce these equivalences by itself so it wasn't necessary to hold
10017 its hand so much. */
10020 rs6000_frame_related (insn
, reg
, val
, reg2
, rreg
)
10029 /* copy_rtx will not make unique copies of registers, so we need to
10030 ensure we don't have unwanted sharing here. */
10032 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
10035 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
10037 real
= copy_rtx (PATTERN (insn
));
10039 if (reg2
!= NULL_RTX
)
10040 real
= replace_rtx (real
, reg2
, rreg
);
10042 real
= replace_rtx (real
, reg
,
10043 gen_rtx_PLUS (Pmode
, gen_rtx_REG (Pmode
,
10044 STACK_POINTER_REGNUM
),
10047 /* We expect that 'real' is either a SET or a PARALLEL containing
10048 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10049 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10051 if (GET_CODE (real
) == SET
)
10055 temp
= simplify_rtx (SET_SRC (set
));
10057 SET_SRC (set
) = temp
;
10058 temp
= simplify_rtx (SET_DEST (set
));
10060 SET_DEST (set
) = temp
;
10061 if (GET_CODE (SET_DEST (set
)) == MEM
)
10063 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
10065 XEXP (SET_DEST (set
), 0) = temp
;
10068 else if (GET_CODE (real
) == PARALLEL
)
10071 for (i
= 0; i
< XVECLEN (real
, 0); i
++)
10072 if (GET_CODE (XVECEXP (real
, 0, i
)) == SET
)
10074 rtx set
= XVECEXP (real
, 0, i
);
10076 temp
= simplify_rtx (SET_SRC (set
));
10078 SET_SRC (set
) = temp
;
10079 temp
= simplify_rtx (SET_DEST (set
));
10081 SET_DEST (set
) = temp
;
10082 if (GET_CODE (SET_DEST (set
)) == MEM
)
10084 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
10086 XEXP (SET_DEST (set
), 0) = temp
;
10088 RTX_FRAME_RELATED_P (set
) = 1;
10094 RTX_FRAME_RELATED_P (insn
) = 1;
10095 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
10100 /* Returns an insn that has a vrsave set operation with the
10101 appropriate CLOBBERs. */
10104 generate_set_vrsave (reg
, info
, epiloguep
)
10106 rs6000_stack_t
*info
;
10110 rtx insn
, clobs
[TOTAL_ALTIVEC_REGS
+ 1];
10111 rtx vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
10114 = gen_rtx_SET (VOIDmode
,
10116 gen_rtx_UNSPEC_VOLATILE (SImode
,
10117 gen_rtvec (2, reg
, vrsave
),
10122 /* We need to clobber the registers in the mask so the scheduler
10123 does not move sets to VRSAVE before sets of AltiVec registers.
10125 However, if the function receives nonlocal gotos, reload will set
10126 all call saved registers live. We will end up with:
10128 (set (reg 999) (mem))
10129 (parallel [ (set (reg vrsave) (unspec blah))
10130 (clobber (reg 999))])
10132 The clobber will cause the store into reg 999 to be dead, and
10133 flow will attempt to delete an epilogue insn. In this case, we
10134 need an unspec use/set of the register. */
10136 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10137 if (info
->vrsave_mask
!= 0 && ALTIVEC_REG_BIT (i
) != 0)
10139 if (!epiloguep
|| call_used_regs
[i
])
10140 clobs
[nclobs
++] = gen_rtx_CLOBBER (VOIDmode
,
10141 gen_rtx_REG (V4SImode
, i
));
10144 rtx reg
= gen_rtx_REG (V4SImode
, i
);
10147 = gen_rtx_SET (VOIDmode
,
10149 gen_rtx_UNSPEC (V4SImode
,
10150 gen_rtvec (1, reg
), 27));
10154 insn
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nclobs
));
10156 for (i
= 0; i
< nclobs
; ++i
)
10157 XVECEXP (insn
, 0, i
) = clobs
[i
];
10162 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10163 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
10166 emit_frame_save (frame_reg
, frame_ptr
, mode
, regno
, offset
, total_size
)
10169 enum machine_mode mode
;
10170 unsigned int regno
;
10174 rtx reg
, offset_rtx
, insn
, mem
, addr
, int_rtx
;
10175 rtx replacea
, replaceb
;
10177 int_rtx
= GEN_INT (offset
);
10179 /* Some cases that need register indexed addressing. */
10180 if ((TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
10182 && SPE_VECTOR_MODE (mode
)
10183 && !SPE_CONST_OFFSET_OK (offset
)))
10185 /* Whomever calls us must make sure r11 is available in the
10186 flow path of instructions in the prologue. */
10187 offset_rtx
= gen_rtx_REG (Pmode
, 11);
10188 emit_move_insn (offset_rtx
, int_rtx
);
10190 replacea
= offset_rtx
;
10191 replaceb
= int_rtx
;
10195 offset_rtx
= int_rtx
;
10196 replacea
= NULL_RTX
;
10197 replaceb
= NULL_RTX
;
10200 reg
= gen_rtx_REG (mode
, regno
);
10201 addr
= gen_rtx_PLUS (Pmode
, frame_reg
, offset_rtx
);
10202 mem
= gen_rtx_MEM (mode
, addr
);
10203 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10205 insn
= emit_move_insn (mem
, reg
);
10207 rs6000_frame_related (insn
, frame_ptr
, total_size
, replacea
, replaceb
);
10210 /* Emit an offset memory reference suitable for a frame store, while
10211 converting to a valid addressing mode. */
10214 gen_frame_mem_offset (mode
, reg
, offset
)
10215 enum machine_mode mode
;
10219 rtx int_rtx
, offset_rtx
;
10221 int_rtx
= GEN_INT (offset
);
10223 if (TARGET_SPE_ABI
&& SPE_VECTOR_MODE (mode
))
10225 offset_rtx
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
10226 emit_move_insn (offset_rtx
, int_rtx
);
10229 offset_rtx
= int_rtx
;
10231 return gen_rtx_MEM (mode
, gen_rtx_PLUS (Pmode
, reg
, offset_rtx
));
10234 /* Emit function prologue as insns. */
10237 rs6000_emit_prologue ()
10239 rs6000_stack_t
*info
= rs6000_stack_info ();
10240 enum machine_mode reg_mode
= TARGET_POWERPC64
? DImode
: SImode
;
10241 int reg_size
= TARGET_POWERPC64
? 8 : 4;
10242 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
10243 rtx frame_ptr_rtx
= gen_rtx_REG (Pmode
, 12);
10244 rtx frame_reg_rtx
= sp_reg_rtx
;
10245 rtx cr_save_rtx
= NULL
;
10247 int saving_FPRs_inline
;
10248 int using_store_multiple
;
10249 HOST_WIDE_INT sp_offset
= 0;
10251 if (TARGET_SPE_ABI
)
10253 reg_mode
= V2SImode
;
10257 using_store_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
10259 && info
->first_gp_reg_save
< 31);
10260 saving_FPRs_inline
= (info
->first_fp_reg_save
== 64
10261 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
10263 /* For V.4, update stack before we do any saving and set back pointer. */
10264 if (info
->push_p
&& DEFAULT_ABI
== ABI_V4
)
10266 if (info
->total_size
< 32767)
10267 sp_offset
= info
->total_size
;
10269 frame_reg_rtx
= frame_ptr_rtx
;
10270 rs6000_emit_allocate_stack (info
->total_size
,
10271 (frame_reg_rtx
!= sp_reg_rtx
10272 && (info
->cr_save_p
10274 || info
->first_fp_reg_save
< 64
10275 || info
->first_gp_reg_save
< 32
10277 if (frame_reg_rtx
!= sp_reg_rtx
)
10278 rs6000_emit_stack_tie ();
10281 /* Save AltiVec registers if needed. */
10282 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
10286 /* There should be a non inline version of this, for when we
10287 are saving lots of vector registers. */
10288 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10289 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
10291 rtx areg
, savereg
, mem
;
10294 offset
= info
->altivec_save_offset
+ sp_offset
10295 + 16 * (i
- info
->first_altivec_reg_save
);
10297 savereg
= gen_rtx_REG (V4SImode
, i
);
10299 areg
= gen_rtx_REG (Pmode
, 0);
10300 emit_move_insn (areg
, GEN_INT (offset
));
10302 /* AltiVec addressing mode is [reg+reg]. */
10303 mem
= gen_rtx_MEM (V4SImode
,
10304 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
));
10306 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10308 insn
= emit_move_insn (mem
, savereg
);
10310 altivec_frame_fixup (insn
, areg
, offset
);
10314 /* VRSAVE is a bit vector representing which AltiVec registers
10315 are used. The OS uses this to determine which vector
10316 registers to save on a context switch. We need to save
10317 VRSAVE on the stack frame, add whatever AltiVec registers we
10318 used in this function, and do the corresponding magic in the
10321 if (TARGET_ALTIVEC
&& info
->vrsave_mask
!= 0)
10323 rtx reg
, mem
, vrsave
;
10326 /* Get VRSAVE onto a GPR. */
10327 reg
= gen_rtx_REG (SImode
, 12);
10328 vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
10330 emit_insn (gen_get_vrsave_internal (reg
));
10332 emit_insn (gen_rtx_SET (VOIDmode
, reg
, vrsave
));
10335 offset
= info
->vrsave_save_offset
+ sp_offset
;
10337 = gen_rtx_MEM (SImode
,
10338 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, GEN_INT (offset
)));
10339 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10340 insn
= emit_move_insn (mem
, reg
);
10342 /* Include the registers in the mask. */
10343 emit_insn (gen_iorsi3 (reg
, reg
, GEN_INT ((int) info
->vrsave_mask
)));
10345 insn
= emit_insn (generate_set_vrsave (reg
, info
, 0));
10348 /* If we use the link register, get it into r0. */
10349 if (info
->lr_save_p
)
10350 emit_move_insn (gen_rtx_REG (Pmode
, 0),
10351 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10353 /* If we need to save CR, put it into r12. */
10354 if (info
->cr_save_p
&& frame_reg_rtx
!= frame_ptr_rtx
)
10356 cr_save_rtx
= gen_rtx_REG (SImode
, 12);
10357 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
10360 /* Do any required saving of fpr's. If only one or two to save, do
10361 it ourselves. Otherwise, call function. */
10362 if (saving_FPRs_inline
)
10365 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10366 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
10367 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
10368 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, DFmode
,
10369 info
->first_fp_reg_save
+ i
,
10370 info
->fp_save_offset
+ sp_offset
+ 8 * i
,
10373 else if (info
->first_fp_reg_save
!= 64)
10377 const char *alloc_rname
;
10379 p
= rtvec_alloc (2 + 64 - info
->first_fp_reg_save
);
10381 RTVEC_ELT (p
, 0) = gen_rtx_CLOBBER (VOIDmode
,
10382 gen_rtx_REG (Pmode
,
10383 LINK_REGISTER_REGNUM
));
10384 sprintf (rname
, "%s%d%s", SAVE_FP_PREFIX
,
10385 info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
);
10386 alloc_rname
= ggc_strdup (rname
);
10387 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
10388 gen_rtx_SYMBOL_REF (Pmode
,
10390 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10392 rtx addr
, reg
, mem
;
10393 reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
10394 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10395 GEN_INT (info
->fp_save_offset
10396 + sp_offset
+ 8*i
));
10397 mem
= gen_rtx_MEM (DFmode
, addr
);
10398 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10400 RTVEC_ELT (p
, i
+ 2) = gen_rtx_SET (VOIDmode
, mem
, reg
);
10402 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10403 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10404 NULL_RTX
, NULL_RTX
);
10407 /* Save GPRs. This is done as a PARALLEL if we are using
10408 the store-multiple instructions. */
10409 if (using_store_multiple
)
10413 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
10414 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10416 rtx addr
, reg
, mem
;
10417 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
10418 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10419 GEN_INT (info
->gp_save_offset
10422 mem
= gen_rtx_MEM (reg_mode
, addr
);
10423 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10425 RTVEC_ELT (p
, i
) = gen_rtx_SET (VOIDmode
, mem
, reg
);
10427 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10428 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10429 NULL_RTX
, NULL_RTX
);
10434 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10435 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
10436 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
10437 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
10438 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
10439 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
10441 rtx addr
, reg
, mem
;
10442 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
10444 if (TARGET_SPE_ABI
)
10446 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
10449 if (!SPE_CONST_OFFSET_OK (offset
))
10451 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
10452 emit_move_insn (b
, GEN_INT (offset
));
10455 b
= GEN_INT (offset
);
10457 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
10458 mem
= gen_rtx_MEM (V2SImode
, addr
);
10459 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10460 insn
= emit_move_insn (mem
, reg
);
10462 if (GET_CODE (b
) == CONST_INT
)
10463 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10464 NULL_RTX
, NULL_RTX
);
10466 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10467 b
, GEN_INT (offset
));
10471 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10472 GEN_INT (info
->gp_save_offset
10475 mem
= gen_rtx_MEM (reg_mode
, addr
);
10476 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10478 insn
= emit_move_insn (mem
, reg
);
10479 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10480 NULL_RTX
, NULL_RTX
);
10485 /* ??? There's no need to emit actual instructions here, but it's the
10486 easiest way to get the frame unwind information emitted. */
10487 if (current_function_calls_eh_return
)
10489 unsigned int i
, regno
;
10493 regno
= EH_RETURN_DATA_REGNO (i
);
10494 if (regno
== INVALID_REGNUM
)
10497 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, reg_mode
, regno
,
10498 info
->ehrd_offset
+ sp_offset
10499 + reg_size
* (int) i
,
10504 /* Save lr if we used it. */
10505 if (info
->lr_save_p
)
10507 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10508 GEN_INT (info
->lr_save_offset
+ sp_offset
));
10509 rtx reg
= gen_rtx_REG (Pmode
, 0);
10510 rtx mem
= gen_rtx_MEM (Pmode
, addr
);
10511 /* This should not be of rs6000_sr_alias_set, because of
10512 __builtin_return_address. */
10514 insn
= emit_move_insn (mem
, reg
);
10515 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10516 reg
, gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10519 /* Save CR if we use any that must be preserved. */
10520 if (info
->cr_save_p
)
10522 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10523 GEN_INT (info
->cr_save_offset
+ sp_offset
));
10524 rtx mem
= gen_rtx_MEM (SImode
, addr
);
10526 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10528 /* If r12 was used to hold the original sp, copy cr into r0 now
10530 if (REGNO (frame_reg_rtx
) == 12)
10532 cr_save_rtx
= gen_rtx_REG (SImode
, 0);
10533 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
10535 insn
= emit_move_insn (mem
, cr_save_rtx
);
10537 /* Now, there's no way that dwarf2out_frame_debug_expr is going
10538 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
10539 OK. All we have to do is specify that _one_ condition code
10540 register is saved in this stack slot. The thrower's epilogue
10541 will then restore all the call-saved registers.
10542 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
10543 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10544 cr_save_rtx
, gen_rtx_REG (SImode
, CR2_REGNO
));
10547 /* Update stack and set back pointer unless this is V.4,
10548 for which it was done previously. */
10549 if (info
->push_p
&& DEFAULT_ABI
!= ABI_V4
)
10550 rs6000_emit_allocate_stack (info
->total_size
, FALSE
);
10552 /* Set frame pointer, if needed. */
10553 if (frame_pointer_needed
)
10555 insn
= emit_move_insn (gen_rtx_REG (Pmode
, FRAME_POINTER_REGNUM
),
10557 RTX_FRAME_RELATED_P (insn
) = 1;
10560 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
10561 if ((TARGET_TOC
&& TARGET_MINIMAL_TOC
&& get_pool_size () != 0)
10562 || (DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1
10563 && regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
]))
10565 /* If emit_load_toc_table will use the link register, we need to save
10566 it. We use R11 for this purpose because emit_load_toc_table
10567 can use register 0. This allows us to use a plain 'blr' to return
10568 from the procedure more often. */
10569 int save_LR_around_toc_setup
= (TARGET_ELF
&& flag_pic
!= 0
10570 && ! info
->lr_save_p
10571 && EXIT_BLOCK_PTR
->pred
!= NULL
);
10572 if (save_LR_around_toc_setup
)
10573 emit_move_insn (gen_rtx_REG (Pmode
, 11),
10574 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10576 rs6000_emit_load_toc_table (TRUE
);
10578 if (save_LR_around_toc_setup
)
10579 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
10580 gen_rtx_REG (Pmode
, 11));
10584 if (DEFAULT_ABI
== ABI_DARWIN
10585 && flag_pic
&& current_function_uses_pic_offset_table
)
10587 rtx dest
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
10588 char *picbase
= machopic_function_base_name ();
10589 rtx src
= gen_rtx_SYMBOL_REF (Pmode
, ggc_alloc_string (picbase
, -1));
10591 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest
, src
)));
10593 rs6000_maybe_dead (
10594 emit_move_insn (gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
),
10595 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)));
10600 /* Write function prologue. */
10603 rs6000_output_function_prologue (file
, size
)
10605 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
10607 rs6000_stack_t
*info
= rs6000_stack_info ();
10609 if (TARGET_DEBUG_STACK
)
10610 debug_stack_info (info
);
10612 /* Write .extern for any function we will call to save and restore
10614 if (info
->first_fp_reg_save
< 64
10615 && !FP_SAVE_INLINE (info
->first_fp_reg_save
))
10616 fprintf (file
, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
10617 SAVE_FP_PREFIX
, info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
,
10618 RESTORE_FP_PREFIX
, info
->first_fp_reg_save
- 32,
10619 RESTORE_FP_SUFFIX
);
10621 /* Write .extern for AIX common mode routines, if needed. */
10622 if (! TARGET_POWER
&& ! TARGET_POWERPC
&& ! common_mode_defined
)
10624 fputs ("\t.extern __mulh\n", file
);
10625 fputs ("\t.extern __mull\n", file
);
10626 fputs ("\t.extern __divss\n", file
);
10627 fputs ("\t.extern __divus\n", file
);
10628 fputs ("\t.extern __quoss\n", file
);
10629 fputs ("\t.extern __quous\n", file
);
10630 common_mode_defined
= 1;
10633 if (! HAVE_prologue
)
10637 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
10638 the "toplevel" insn chain. */
10639 emit_note (0, NOTE_INSN_DELETED
);
10640 rs6000_emit_prologue ();
10641 emit_note (0, NOTE_INSN_DELETED
);
10643 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10647 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
10649 INSN_ADDRESSES_NEW (insn
, addr
);
10654 if (TARGET_DEBUG_STACK
)
10655 debug_rtx_list (get_insns (), 100);
10656 final (get_insns (), file
, FALSE
, FALSE
);
10660 rs6000_pic_labelno
++;
10663 /* Emit function epilogue as insns.
10665 At present, dwarf2out_frame_debug_expr doesn't understand
10666 register restores, so we don't bother setting RTX_FRAME_RELATED_P
10667 anywhere in the epilogue. Most of the insns below would in any case
10668 need special notes to explain where r11 is in relation to the stack. */
10671 rs6000_emit_epilogue (sibcall
)
10674 rs6000_stack_t
*info
;
10675 int restoring_FPRs_inline
;
10676 int using_load_multiple
;
10677 int using_mfcr_multiple
;
10678 int use_backchain_to_restore_sp
;
10680 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, 1);
10681 rtx frame_reg_rtx
= sp_reg_rtx
;
10682 enum machine_mode reg_mode
= TARGET_POWERPC64
? DImode
: SImode
;
10683 int reg_size
= TARGET_POWERPC64
? 8 : 4;
10686 if (TARGET_SPE_ABI
)
10688 reg_mode
= V2SImode
;
10692 info
= rs6000_stack_info ();
10693 using_load_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
10695 && info
->first_gp_reg_save
< 31);
10696 restoring_FPRs_inline
= (sibcall
10697 || current_function_calls_eh_return
10698 || info
->first_fp_reg_save
== 64
10699 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
10700 use_backchain_to_restore_sp
= (frame_pointer_needed
10701 || current_function_calls_alloca
10702 || info
->total_size
> 32767);
10703 using_mfcr_multiple
= (rs6000_cpu
== PROCESSOR_PPC601
10704 || rs6000_cpu
== PROCESSOR_PPC603
10705 || rs6000_cpu
== PROCESSOR_PPC750
10708 /* If we have a frame pointer, a call to alloca, or a large stack
10709 frame, restore the old stack pointer using the backchain. Otherwise,
10710 we know what size to update it with. */
10711 if (use_backchain_to_restore_sp
)
10713 /* Under V.4, don't reset the stack pointer until after we're done
10714 loading the saved registers. */
10715 if (DEFAULT_ABI
== ABI_V4
)
10716 frame_reg_rtx
= gen_rtx_REG (Pmode
, 11);
10718 emit_move_insn (frame_reg_rtx
,
10719 gen_rtx_MEM (Pmode
, sp_reg_rtx
));
10722 else if (info
->push_p
)
10724 if (DEFAULT_ABI
== ABI_V4
)
10725 sp_offset
= info
->total_size
;
10728 emit_insn (TARGET_32BIT
10729 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
10730 GEN_INT (info
->total_size
))
10731 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
10732 GEN_INT (info
->total_size
)));
10736 /* Restore AltiVec registers if needed. */
10737 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
10741 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10742 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
10744 rtx addr
, areg
, mem
;
10746 areg
= gen_rtx_REG (Pmode
, 0);
10748 (areg
, GEN_INT (info
->altivec_save_offset
10750 + 16 * (i
- info
->first_altivec_reg_save
)));
10752 /* AltiVec addressing mode is [reg+reg]. */
10753 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
);
10754 mem
= gen_rtx_MEM (V4SImode
, addr
);
10755 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10757 emit_move_insn (gen_rtx_REG (V4SImode
, i
), mem
);
10761 /* Restore VRSAVE if needed. */
10762 if (TARGET_ALTIVEC_ABI
&& info
->vrsave_mask
!= 0)
10764 rtx addr
, mem
, reg
;
10766 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10767 GEN_INT (info
->vrsave_save_offset
+ sp_offset
));
10768 mem
= gen_rtx_MEM (SImode
, addr
);
10769 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10770 reg
= gen_rtx_REG (SImode
, 12);
10771 emit_move_insn (reg
, mem
);
10773 emit_insn (generate_set_vrsave (reg
, info
, 1));
10776 /* Get the old lr if we saved it. */
10777 if (info
->lr_save_p
)
10779 rtx mem
= gen_frame_mem_offset (Pmode
, frame_reg_rtx
,
10780 info
->lr_save_offset
+ sp_offset
);
10782 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10784 emit_move_insn (gen_rtx_REG (Pmode
, 0), mem
);
10787 /* Get the old cr if we saved it. */
10788 if (info
->cr_save_p
)
10790 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10791 GEN_INT (info
->cr_save_offset
+ sp_offset
));
10792 rtx mem
= gen_rtx_MEM (SImode
, addr
);
10794 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10796 emit_move_insn (gen_rtx_REG (SImode
, 12), mem
);
10799 /* Set LR here to try to overlap restores below. */
10800 if (info
->lr_save_p
)
10801 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
10802 gen_rtx_REG (Pmode
, 0));
10804 /* Load exception handler data registers, if needed. */
10805 if (current_function_calls_eh_return
)
10807 unsigned int i
, regno
;
10813 regno
= EH_RETURN_DATA_REGNO (i
);
10814 if (regno
== INVALID_REGNUM
)
10817 mem
= gen_frame_mem_offset (reg_mode
, frame_reg_rtx
,
10818 info
->ehrd_offset
+ sp_offset
10819 + reg_size
* (int) i
);
10820 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10822 emit_move_insn (gen_rtx_REG (reg_mode
, regno
), mem
);
10826 /* Restore GPRs. This is done as a PARALLEL if we are using
10827 the load-multiple instructions. */
10828 if (using_load_multiple
)
10831 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
10832 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10834 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10835 GEN_INT (info
->gp_save_offset
10838 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
10840 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10843 gen_rtx_SET (VOIDmode
,
10844 gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
),
10847 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10850 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10851 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
10852 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
10853 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
10854 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
10855 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
10857 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10858 GEN_INT (info
->gp_save_offset
10861 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
10863 /* Restore 64-bit quantities for SPE. */
10864 if (TARGET_SPE_ABI
)
10866 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
10869 if (!SPE_CONST_OFFSET_OK (offset
))
10871 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
10872 emit_move_insn (b
, GEN_INT (offset
));
10875 b
= GEN_INT (offset
);
10877 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
10878 mem
= gen_rtx_MEM (V2SImode
, addr
);
10881 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10883 emit_move_insn (gen_rtx_REG (reg_mode
,
10884 info
->first_gp_reg_save
+ i
), mem
);
10887 /* Restore fpr's if we need to do it without calling a function. */
10888 if (restoring_FPRs_inline
)
10889 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10890 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
10891 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
10894 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10895 GEN_INT (info
->fp_save_offset
10898 mem
= gen_rtx_MEM (DFmode
, addr
);
10899 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10901 emit_move_insn (gen_rtx_REG (DFmode
,
10902 info
->first_fp_reg_save
+ i
),
10906 /* If we saved cr, restore it here. Just those that were used. */
10907 if (info
->cr_save_p
)
10909 rtx r12_rtx
= gen_rtx_REG (SImode
, 12);
10912 if (using_mfcr_multiple
)
10914 for (i
= 0; i
< 8; i
++)
10915 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
10921 if (using_mfcr_multiple
&& count
> 1)
10926 p
= rtvec_alloc (count
);
10929 for (i
= 0; i
< 8; i
++)
10930 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
10932 rtvec r
= rtvec_alloc (2);
10933 RTVEC_ELT (r
, 0) = r12_rtx
;
10934 RTVEC_ELT (r
, 1) = GEN_INT (1 << (7-i
));
10935 RTVEC_ELT (p
, ndx
) =
10936 gen_rtx_SET (VOIDmode
, gen_rtx_REG (CCmode
, CR0_REGNO
+i
),
10937 gen_rtx_UNSPEC (CCmode
, r
, 20));
10940 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10945 for (i
= 0; i
< 8; i
++)
10946 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
10948 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode
,
10954 /* If this is V.4, unwind the stack pointer after all of the loads
10955 have been done. We need to emit a block here so that sched
10956 doesn't decide to move the sp change before the register restores
10957 (which may not have any obvious dependency on the stack). This
10958 doesn't hurt performance, because there is no scheduling that can
10959 be done after this point. */
10960 if (DEFAULT_ABI
== ABI_V4
)
10962 if (frame_reg_rtx
!= sp_reg_rtx
)
10963 rs6000_emit_stack_tie ();
10965 if (use_backchain_to_restore_sp
)
10967 emit_move_insn (sp_reg_rtx
, frame_reg_rtx
);
10969 else if (sp_offset
!= 0)
10971 emit_insn (TARGET_32BIT
10972 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
10973 GEN_INT (sp_offset
))
10974 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
10975 GEN_INT (sp_offset
)));
10979 if (current_function_calls_eh_return
)
10981 rtx sa
= EH_RETURN_STACKADJ_RTX
;
10982 emit_insn (TARGET_32BIT
10983 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
, sa
)
10984 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
, sa
));
10990 if (! restoring_FPRs_inline
)
10991 p
= rtvec_alloc (3 + 64 - info
->first_fp_reg_save
);
10993 p
= rtvec_alloc (2);
10995 RTVEC_ELT (p
, 0) = gen_rtx_RETURN (VOIDmode
);
10996 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
10997 gen_rtx_REG (Pmode
,
10998 LINK_REGISTER_REGNUM
));
11000 /* If we have to restore more than two FP registers, branch to the
11001 restore function. It will return to our caller. */
11002 if (! restoring_FPRs_inline
)
11006 const char *alloc_rname
;
11008 sprintf (rname
, "%s%d%s", RESTORE_FP_PREFIX
,
11009 info
->first_fp_reg_save
- 32, RESTORE_FP_SUFFIX
);
11010 alloc_rname
= ggc_strdup (rname
);
11011 RTVEC_ELT (p
, 2) = gen_rtx_USE (VOIDmode
,
11012 gen_rtx_SYMBOL_REF (Pmode
,
11015 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
11018 addr
= gen_rtx_PLUS (Pmode
, sp_reg_rtx
,
11019 GEN_INT (info
->fp_save_offset
+ 8*i
));
11020 mem
= gen_rtx_MEM (DFmode
, addr
);
11021 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11023 RTVEC_ELT (p
, i
+3) =
11024 gen_rtx_SET (VOIDmode
,
11025 gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
),
11030 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11034 /* Write function epilogue. */
11037 rs6000_output_function_epilogue (file
, size
)
11039 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
11041 rs6000_stack_t
*info
= rs6000_stack_info ();
11043 if (! HAVE_epilogue
)
11045 rtx insn
= get_last_insn ();
11046 /* If the last insn was a BARRIER, we don't have to write anything except
11047 the trace table. */
11048 if (GET_CODE (insn
) == NOTE
)
11049 insn
= prev_nonnote_insn (insn
);
11050 if (insn
== 0 || GET_CODE (insn
) != BARRIER
)
11052 /* This is slightly ugly, but at least we don't have two
11053 copies of the epilogue-emitting code. */
11056 /* A NOTE_INSN_DELETED is supposed to be at the start
11057 and end of the "toplevel" insn chain. */
11058 emit_note (0, NOTE_INSN_DELETED
);
11059 rs6000_emit_epilogue (FALSE
);
11060 emit_note (0, NOTE_INSN_DELETED
);
11062 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11066 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
11068 INSN_ADDRESSES_NEW (insn
, addr
);
11073 if (TARGET_DEBUG_STACK
)
11074 debug_rtx_list (get_insns (), 100);
11075 final (get_insns (), file
, FALSE
, FALSE
);
11080 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11083 We don't output a traceback table if -finhibit-size-directive was
11084 used. The documentation for -finhibit-size-directive reads
11085 ``don't output a @code{.size} assembler directive, or anything
11086 else that would cause trouble if the function is split in the
11087 middle, and the two halves are placed at locations far apart in
11088 memory.'' The traceback table has this property, since it
11089 includes the offset from the start of the function to the
11090 traceback table itself.
11092 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11093 different traceback table. */
11094 if (DEFAULT_ABI
== ABI_AIX
&& ! flag_inhibit_size_directive
11095 && rs6000_traceback
!= traceback_none
)
11097 const char *fname
= NULL
;
11098 const char *language_string
= lang_hooks
.name
;
11099 int fixed_parms
= 0, float_parms
= 0, parm_info
= 0;
11101 int optional_tbtab
;
11103 if (rs6000_traceback
== traceback_full
)
11104 optional_tbtab
= 1;
11105 else if (rs6000_traceback
== traceback_part
)
11106 optional_tbtab
= 0;
11108 optional_tbtab
= !optimize_size
&& !TARGET_ELF
;
11110 if (optional_tbtab
)
11112 fname
= XSTR (XEXP (DECL_RTL (current_function_decl
), 0), 0);
11113 while (*fname
== '.') /* V.4 encodes . in the name */
11116 /* Need label immediately before tbtab, so we can compute
11117 its offset from the function start. */
11118 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
11119 ASM_OUTPUT_LABEL (file
, fname
);
11122 /* The .tbtab pseudo-op can only be used for the first eight
11123 expressions, since it can't handle the possibly variable
11124 length fields that follow. However, if you omit the optional
11125 fields, the assembler outputs zeros for all optional fields
11126 anyways, giving each variable length field is minimum length
11127 (as defined in sys/debug.h). Thus we can not use the .tbtab
11128 pseudo-op at all. */
11130 /* An all-zero word flags the start of the tbtab, for debuggers
11131 that have to find it by searching forward from the entry
11132 point or from the current pc. */
11133 fputs ("\t.long 0\n", file
);
11135 /* Tbtab format type. Use format type 0. */
11136 fputs ("\t.byte 0,", file
);
11138 /* Language type. Unfortunately, there doesn't seem to be any
11139 official way to get this info, so we use language_string. C
11140 is 0. C++ is 9. No number defined for Obj-C, so use the
11141 value for C for now. There is no official value for Java,
11142 although IBM appears to be using 13. There is no official value
11143 for Chill, so we've chosen 44 pseudo-randomly. */
11144 if (! strcmp (language_string
, "GNU C")
11145 || ! strcmp (language_string
, "GNU Objective-C"))
11147 else if (! strcmp (language_string
, "GNU F77"))
11149 else if (! strcmp (language_string
, "GNU Ada"))
11151 else if (! strcmp (language_string
, "GNU Pascal"))
11153 else if (! strcmp (language_string
, "GNU C++"))
11155 else if (! strcmp (language_string
, "GNU Java"))
11157 else if (! strcmp (language_string
, "GNU CHILL"))
11161 fprintf (file
, "%d,", i
);
11163 /* 8 single bit fields: global linkage (not set for C extern linkage,
11164 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
11165 from start of procedure stored in tbtab, internal function, function
11166 has controlled storage, function has no toc, function uses fp,
11167 function logs/aborts fp operations. */
11168 /* Assume that fp operations are used if any fp reg must be saved. */
11169 fprintf (file
, "%d,",
11170 (optional_tbtab
<< 5) | ((info
->first_fp_reg_save
!= 64) << 1));
11172 /* 6 bitfields: function is interrupt handler, name present in
11173 proc table, function calls alloca, on condition directives
11174 (controls stack walks, 3 bits), saves condition reg, saves
11176 /* The `function calls alloca' bit seems to be set whenever reg 31 is
11177 set up as a frame pointer, even when there is no alloca call. */
11178 fprintf (file
, "%d,",
11179 ((optional_tbtab
<< 6)
11180 | ((optional_tbtab
& frame_pointer_needed
) << 5)
11181 | (info
->cr_save_p
<< 1)
11182 | (info
->lr_save_p
)));
11184 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
11186 fprintf (file
, "%d,",
11187 (info
->push_p
<< 7) | (64 - info
->first_fp_reg_save
));
11189 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
11190 fprintf (file
, "%d,", (32 - first_reg_to_save ()));
11192 if (optional_tbtab
)
11194 /* Compute the parameter info from the function decl argument
11197 int next_parm_info_bit
= 31;
11199 for (decl
= DECL_ARGUMENTS (current_function_decl
);
11200 decl
; decl
= TREE_CHAIN (decl
))
11202 rtx parameter
= DECL_INCOMING_RTL (decl
);
11203 enum machine_mode mode
= GET_MODE (parameter
);
11205 if (GET_CODE (parameter
) == REG
)
11207 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
11213 if (mode
== SFmode
)
11215 else if (mode
== DFmode
|| mode
== TFmode
)
11220 /* If only one bit will fit, don't or in this entry. */
11221 if (next_parm_info_bit
> 0)
11222 parm_info
|= (bits
<< (next_parm_info_bit
- 1));
11223 next_parm_info_bit
-= 2;
11227 fixed_parms
+= ((GET_MODE_SIZE (mode
)
11228 + (UNITS_PER_WORD
- 1))
11230 next_parm_info_bit
-= 1;
11236 /* Number of fixed point parameters. */
11237 /* This is actually the number of words of fixed point parameters; thus
11238 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11239 fprintf (file
, "%d,", fixed_parms
);
11241 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11243 /* This is actually the number of fp registers that hold parameters;
11244 and thus the maximum value is 13. */
11245 /* Set parameters on stack bit if parameters are not in their original
11246 registers, regardless of whether they are on the stack? Xlc
11247 seems to set the bit when not optimizing. */
11248 fprintf (file
, "%d\n", ((float_parms
<< 1) | (! optimize
)));
11250 if (! optional_tbtab
)
11253 /* Optional fields follow. Some are variable length. */
11255 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11256 11 double float. */
11257 /* There is an entry for each parameter in a register, in the order that
11258 they occur in the parameter list. Any intervening arguments on the
11259 stack are ignored. If the list overflows a long (max possible length
11260 34 bits) then completely leave off all elements that don't fit. */
11261 /* Only emit this long if there was at least one parameter. */
11262 if (fixed_parms
|| float_parms
)
11263 fprintf (file
, "\t.long %d\n", parm_info
);
11265 /* Offset from start of code to tb table. */
11266 fputs ("\t.long ", file
);
11267 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
11269 RS6000_OUTPUT_BASENAME (file
, fname
);
11271 assemble_name (file
, fname
);
11273 fputs ("-.", file
);
11275 RS6000_OUTPUT_BASENAME (file
, fname
);
11277 assemble_name (file
, fname
);
11281 /* Interrupt handler mask. */
11282 /* Omit this long, since we never set the interrupt handler bit
11285 /* Number of CTL (controlled storage) anchors. */
11286 /* Omit this long, since the has_ctl bit is never set above. */
11288 /* Displacement into stack of each CTL anchor. */
11289 /* Omit this list of longs, because there are no CTL anchors. */
11291 /* Length of function name. */
11294 fprintf (file
, "\t.short %d\n", (int) strlen (fname
));
11296 /* Function name. */
11297 assemble_string (fname
, strlen (fname
));
11299 /* Register for alloca automatic storage; this is always reg 31.
11300 Only emit this if the alloca bit was set above. */
11301 if (frame_pointer_needed
)
11302 fputs ("\t.byte 31\n", file
);
11304 fputs ("\t.align 2\n", file
);
11308 /* A C compound statement that outputs the assembler code for a thunk
11309 function, used to implement C++ virtual function calls with
11310 multiple inheritance. The thunk acts as a wrapper around a virtual
11311 function, adjusting the implicit object parameter before handing
11312 control off to the real function.
11314 First, emit code to add the integer DELTA to the location that
11315 contains the incoming first argument. Assume that this argument
11316 contains a pointer, and is the one used to pass the `this' pointer
11317 in C++. This is the incoming argument *before* the function
11318 prologue, e.g. `%o0' on a sparc. The addition must preserve the
11319 values of all other incoming arguments.
11321 After the addition, emit code to jump to FUNCTION, which is a
11322 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
11323 not touch the return address. Hence returning from FUNCTION will
11324 return to whoever called the current `thunk'.
11326 The effect must be as if FUNCTION had been called directly with the
11327 adjusted first argument. This macro is responsible for emitting
11328 all of the code for a thunk function; output_function_prologue()
11329 and output_function_epilogue() are not invoked.
11331 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
11332 been extracted from it.) It might possibly be useful on some
11333 targets, but probably not.
11335 If you do not define this macro, the target-independent code in the
11336 C++ frontend will generate a less efficient heavyweight thunk that
11337 calls FUNCTION instead of jumping to it. The generic approach does
11338 not support varargs. */
11341 rs6000_output_mi_thunk (file
, thunk_fndecl
, delta
, vcall_offset
, function
)
11343 tree thunk_fndecl ATTRIBUTE_UNUSED
;
11344 HOST_WIDE_INT delta
;
11345 HOST_WIDE_INT vcall_offset
;
11348 rtx
this, insn
, funexp
;
11350 reload_completed
= 1;
11351 no_new_pseudos
= 1;
11353 /* Mark the end of the (empty) prologue. */
11354 emit_note (NULL
, NOTE_INSN_PROLOGUE_END
);
11356 /* Find the "this" pointer. If the function returns a structure,
11357 the structure return pointer is in r3. */
11358 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
))))
11359 this = gen_rtx_REG (Pmode
, 4);
11361 this = gen_rtx_REG (Pmode
, 3);
11363 /* Apply the constant offset, if required. */
11366 rtx delta_rtx
= GEN_INT (delta
);
11367 emit_insn (TARGET_32BIT
11368 ? gen_addsi3 (this, this, delta_rtx
)
11369 : gen_adddi3 (this, this, delta_rtx
));
11372 /* Apply the offset from the vtable, if required. */
11375 rtx vcall_offset_rtx
= GEN_INT (vcall_offset
);
11376 rtx tmp
= gen_rtx_REG (Pmode
, 12);
11378 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, this));
11379 emit_insn (TARGET_32BIT
11380 ? gen_addsi3 (tmp
, tmp
, vcall_offset_rtx
)
11381 : gen_adddi3 (tmp
, tmp
, vcall_offset_rtx
));
11382 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, tmp
));
11383 emit_insn (TARGET_32BIT
11384 ? gen_addsi3 (this, this, tmp
)
11385 : gen_adddi3 (this, this, tmp
));
11388 /* Generate a tail call to the target function. */
11389 if (!TREE_USED (function
))
11391 assemble_external (function
);
11392 TREE_USED (function
) = 1;
11394 funexp
= XEXP (DECL_RTL (function
), 0);
11396 SYMBOL_REF_FLAG (funexp
) = 0;
11397 if (current_file_function_operand (funexp
, VOIDmode
)
11398 && (! lookup_attribute ("longcall",
11399 TYPE_ATTRIBUTES (TREE_TYPE (function
)))
11400 || lookup_attribute ("shortcall",
11401 TYPE_ATTRIBUTES (TREE_TYPE (function
)))))
11402 SYMBOL_REF_FLAG (funexp
) = 1;
11404 funexp
= gen_rtx_MEM (FUNCTION_MODE
, funexp
);
11408 funexp
= machopic_indirect_call_target (funexp
);
11411 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
11412 generate sibcall RTL explicitly to avoid constraint abort. */
11413 insn
= emit_call_insn (
11414 gen_rtx_PARALLEL (VOIDmode
,
11416 gen_rtx_CALL (VOIDmode
,
11417 funexp
, const0_rtx
),
11418 gen_rtx_USE (VOIDmode
, const0_rtx
),
11419 gen_rtx_USE (VOIDmode
,
11420 gen_rtx_REG (SImode
,
11421 LINK_REGISTER_REGNUM
)),
11422 gen_rtx_RETURN (VOIDmode
))));
11423 SIBLING_CALL_P (insn
) = 1;
11426 /* Run just enough of rest_of_compilation to get the insns emitted.
11427 There's not really enough bulk here to make other passes such as
11428 instruction scheduling worth while. Note that use_thunk calls
11429 assemble_start_function and assemble_end_function. */
11430 insn
= get_insns ();
11431 shorten_branches (insn
);
11432 final_start_function (insn
, file
, 1);
11433 final (insn
, file
, 1, 0);
11434 final_end_function ();
11436 reload_completed
= 0;
11437 no_new_pseudos
= 0;
11440 /* A quick summary of the various types of 'constant-pool tables'
11443 Target Flags Name One table per
11444 AIX (none) AIX TOC object file
11445 AIX -mfull-toc AIX TOC object file
11446 AIX -mminimal-toc AIX minimal TOC translation unit
11447 SVR4/EABI (none) SVR4 SDATA object file
11448 SVR4/EABI -fpic SVR4 pic object file
11449 SVR4/EABI -fPIC SVR4 PIC translation unit
11450 SVR4/EABI -mrelocatable EABI TOC function
11451 SVR4/EABI -maix AIX TOC object file
11452 SVR4/EABI -maix -mminimal-toc
11453 AIX minimal TOC translation unit
11455 Name Reg. Set by entries contains:
11456 made by addrs? fp? sum?
11458 AIX TOC 2 crt0 as Y option option
11459 AIX minimal TOC 30 prolog gcc Y Y option
11460 SVR4 SDATA 13 crt0 gcc N Y N
11461 SVR4 pic 30 prolog ld Y not yet N
11462 SVR4 PIC 30 prolog gcc Y option option
11463 EABI TOC 30 prolog gcc Y option option
11467 /* Hash functions for the hash table. */
11470 rs6000_hash_constant (k
)
11473 enum rtx_code code
= GET_CODE (k
);
11474 enum machine_mode mode
= GET_MODE (k
);
11475 unsigned result
= (code
<< 3) ^ mode
;
11476 const char *format
;
11479 format
= GET_RTX_FORMAT (code
);
11480 flen
= strlen (format
);
11486 return result
* 1231 + (unsigned) INSN_UID (XEXP (k
, 0));
11489 if (mode
!= VOIDmode
)
11490 return real_hash (CONST_DOUBLE_REAL_VALUE (k
)) * result
;
11502 for (; fidx
< flen
; fidx
++)
11503 switch (format
[fidx
])
11508 const char *str
= XSTR (k
, fidx
);
11509 len
= strlen (str
);
11510 result
= result
* 613 + len
;
11511 for (i
= 0; i
< len
; i
++)
11512 result
= result
* 613 + (unsigned) str
[i
];
11517 result
= result
* 1231 + rs6000_hash_constant (XEXP (k
, fidx
));
11521 result
= result
* 613 + (unsigned) XINT (k
, fidx
);
11524 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT
))
11525 result
= result
* 613 + (unsigned) XWINT (k
, fidx
);
11529 for (i
= 0; i
< sizeof(HOST_WIDE_INT
)/sizeof(unsigned); i
++)
11530 result
= result
* 613 + (unsigned) (XWINT (k
, fidx
)
11542 toc_hash_function (hash_entry
)
11543 const void * hash_entry
;
11545 const struct toc_hash_struct
*thc
=
11546 (const struct toc_hash_struct
*) hash_entry
;
11547 return rs6000_hash_constant (thc
->key
) ^ thc
->key_mode
;
11550 /* Compare H1 and H2 for equivalence. */
11553 toc_hash_eq (h1
, h2
)
11557 rtx r1
= ((const struct toc_hash_struct
*) h1
)->key
;
11558 rtx r2
= ((const struct toc_hash_struct
*) h2
)->key
;
11560 if (((const struct toc_hash_struct
*) h1
)->key_mode
11561 != ((const struct toc_hash_struct
*) h2
)->key_mode
)
11564 return rtx_equal_p (r1
, r2
);
11567 /* These are the names given by the C++ front-end to vtables, and
11568 vtable-like objects. Ideally, this logic should not be here;
11569 instead, there should be some programmatic way of inquiring as
11570 to whether or not an object is a vtable. */
11572 #define VTABLE_NAME_P(NAME) \
11573 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
11574 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
11575 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
11576 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
11579 rs6000_output_symbol_ref (file
, x
)
11583 /* Currently C++ toc references to vtables can be emitted before it
11584 is decided whether the vtable is public or private. If this is
11585 the case, then the linker will eventually complain that there is
11586 a reference to an unknown section. Thus, for vtables only,
11587 we emit the TOC reference to reference the symbol and not the
11589 const char *name
= XSTR (x
, 0);
11591 if (VTABLE_NAME_P (name
))
11593 RS6000_OUTPUT_BASENAME (file
, name
);
11596 assemble_name (file
, name
);
11599 /* Output a TOC entry. We derive the entry name from what is being
11603 output_toc (file
, x
, labelno
, mode
)
11607 enum machine_mode mode
;
11610 const char *name
= buf
;
11611 const char *real_name
;
11618 /* When the linker won't eliminate them, don't output duplicate
11619 TOC entries (this happens on AIX if there is any kind of TOC,
11620 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
11622 if (TARGET_TOC
&& GET_CODE (x
) != LABEL_REF
)
11624 struct toc_hash_struct
*h
;
11627 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
11628 time because GGC is not initialised at that point. */
11629 if (toc_hash_table
== NULL
)
11630 toc_hash_table
= htab_create_ggc (1021, toc_hash_function
,
11631 toc_hash_eq
, NULL
);
11633 h
= ggc_alloc (sizeof (*h
));
11635 h
->key_mode
= mode
;
11636 h
->labelno
= labelno
;
11638 found
= htab_find_slot (toc_hash_table
, h
, 1);
11639 if (*found
== NULL
)
11641 else /* This is indeed a duplicate.
11642 Set this label equal to that label. */
11644 fputs ("\t.set ", file
);
11645 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
11646 fprintf (file
, "%d,", labelno
);
11647 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
11648 fprintf (file
, "%d\n", ((*(const struct toc_hash_struct
**)
11654 /* If we're going to put a double constant in the TOC, make sure it's
11655 aligned properly when strict alignment is on. */
11656 if (GET_CODE (x
) == CONST_DOUBLE
11657 && STRICT_ALIGNMENT
11658 && GET_MODE_BITSIZE (mode
) >= 64
11659 && ! (TARGET_NO_FP_IN_TOC
&& ! TARGET_MINIMAL_TOC
)) {
11660 ASM_OUTPUT_ALIGN (file
, 3);
11663 (*targetm
.asm_out
.internal_label
) (file
, "LC", labelno
);
11665 /* Handle FP constants specially. Note that if we have a minimal
11666 TOC, things we put here aren't actually in the TOC, so we can allow
11668 if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == TFmode
)
11670 REAL_VALUE_TYPE rv
;
11673 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
11674 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
11678 if (TARGET_MINIMAL_TOC
)
11679 fputs (DOUBLE_INT_ASM_OP
, file
);
11681 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
11682 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
11683 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
11684 fprintf (file
, "0x%lx%08lx,0x%lx%08lx\n",
11685 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
11686 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
11691 if (TARGET_MINIMAL_TOC
)
11692 fputs ("\t.long ", file
);
11694 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
11695 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
11696 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
11697 fprintf (file
, "0x%lx,0x%lx,0x%lx,0x%lx\n",
11698 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
11699 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
11703 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
11705 REAL_VALUE_TYPE rv
;
11708 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
11709 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
11713 if (TARGET_MINIMAL_TOC
)
11714 fputs (DOUBLE_INT_ASM_OP
, file
);
11716 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
11717 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11718 fprintf (file
, "0x%lx%08lx\n",
11719 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11724 if (TARGET_MINIMAL_TOC
)
11725 fputs ("\t.long ", file
);
11727 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
11728 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11729 fprintf (file
, "0x%lx,0x%lx\n",
11730 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11734 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
11736 REAL_VALUE_TYPE rv
;
11739 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
11740 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
11744 if (TARGET_MINIMAL_TOC
)
11745 fputs (DOUBLE_INT_ASM_OP
, file
);
11747 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
11748 fprintf (file
, "0x%lx00000000\n", l
& 0xffffffff);
11753 if (TARGET_MINIMAL_TOC
)
11754 fputs ("\t.long ", file
);
11756 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
11757 fprintf (file
, "0x%lx\n", l
& 0xffffffff);
11761 else if (GET_MODE (x
) == VOIDmode
11762 && (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
))
11764 unsigned HOST_WIDE_INT low
;
11765 HOST_WIDE_INT high
;
11767 if (GET_CODE (x
) == CONST_DOUBLE
)
11769 low
= CONST_DOUBLE_LOW (x
);
11770 high
= CONST_DOUBLE_HIGH (x
);
11773 #if HOST_BITS_PER_WIDE_INT == 32
11776 high
= (low
& 0x80000000) ? ~0 : 0;
11780 low
= INTVAL (x
) & 0xffffffff;
11781 high
= (HOST_WIDE_INT
) INTVAL (x
) >> 32;
11785 /* TOC entries are always Pmode-sized, but since this
11786 is a bigendian machine then if we're putting smaller
11787 integer constants in the TOC we have to pad them.
11788 (This is still a win over putting the constants in
11789 a separate constant pool, because then we'd have
11790 to have both a TOC entry _and_ the actual constant.)
11792 For a 32-bit target, CONST_INT values are loaded and shifted
11793 entirely within `low' and can be stored in one TOC entry. */
11795 if (TARGET_64BIT
&& POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
11796 abort ();/* It would be easy to make this work, but it doesn't now. */
11798 if (POINTER_SIZE
> GET_MODE_BITSIZE (mode
))
11800 #if HOST_BITS_PER_WIDE_INT == 32
11801 lshift_double (low
, high
, POINTER_SIZE
- GET_MODE_BITSIZE (mode
),
11802 POINTER_SIZE
, &low
, &high
, 0);
11805 low
<<= POINTER_SIZE
- GET_MODE_BITSIZE (mode
);
11806 high
= (HOST_WIDE_INT
) low
>> 32;
11813 if (TARGET_MINIMAL_TOC
)
11814 fputs (DOUBLE_INT_ASM_OP
, file
);
11816 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
11817 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
11818 fprintf (file
, "0x%lx%08lx\n",
11819 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
11824 if (POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
11826 if (TARGET_MINIMAL_TOC
)
11827 fputs ("\t.long ", file
);
11829 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
11830 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
11831 fprintf (file
, "0x%lx,0x%lx\n",
11832 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
11836 if (TARGET_MINIMAL_TOC
)
11837 fputs ("\t.long ", file
);
11839 fprintf (file
, "\t.tc IS_%lx[TC],", (long) low
& 0xffffffff);
11840 fprintf (file
, "0x%lx\n", (long) low
& 0xffffffff);
11846 if (GET_CODE (x
) == CONST
)
11848 if (GET_CODE (XEXP (x
, 0)) != PLUS
)
11851 base
= XEXP (XEXP (x
, 0), 0);
11852 offset
= INTVAL (XEXP (XEXP (x
, 0), 1));
11855 if (GET_CODE (base
) == SYMBOL_REF
)
11856 name
= XSTR (base
, 0);
11857 else if (GET_CODE (base
) == LABEL_REF
)
11858 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (XEXP (base
, 0)));
11859 else if (GET_CODE (base
) == CODE_LABEL
)
11860 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (base
));
11864 real_name
= (*targetm
.strip_name_encoding
) (name
);
11865 if (TARGET_MINIMAL_TOC
)
11866 fputs (TARGET_32BIT
? "\t.long " : DOUBLE_INT_ASM_OP
, file
);
11869 fprintf (file
, "\t.tc %s", real_name
);
11872 fprintf (file
, ".N%d", - offset
);
11874 fprintf (file
, ".P%d", offset
);
11876 fputs ("[TC],", file
);
11879 /* Currently C++ toc references to vtables can be emitted before it
11880 is decided whether the vtable is public or private. If this is
11881 the case, then the linker will eventually complain that there is
11882 a TOC reference to an unknown section. Thus, for vtables only,
11883 we emit the TOC reference to reference the symbol and not the
11885 if (VTABLE_NAME_P (name
))
11887 RS6000_OUTPUT_BASENAME (file
, name
);
11889 fprintf (file
, "%d", offset
);
11890 else if (offset
> 0)
11891 fprintf (file
, "+%d", offset
);
11894 output_addr_const (file
, x
);
11898 /* Output an assembler pseudo-op to write an ASCII string of N characters
11899 starting at P to FILE.
11901 On the RS/6000, we have to do this using the .byte operation and
11902 write out special characters outside the quoted string.
11903 Also, the assembler is broken; very long strings are truncated,
11904 so we must artificially break them up early. */
11907 output_ascii (file
, p
, n
)
11913 int i
, count_string
;
11914 const char *for_string
= "\t.byte \"";
11915 const char *for_decimal
= "\t.byte ";
11916 const char *to_close
= NULL
;
11919 for (i
= 0; i
< n
; i
++)
11922 if (c
>= ' ' && c
< 0177)
11925 fputs (for_string
, file
);
11928 /* Write two quotes to get one. */
11936 for_decimal
= "\"\n\t.byte ";
11940 if (count_string
>= 512)
11942 fputs (to_close
, file
);
11944 for_string
= "\t.byte \"";
11945 for_decimal
= "\t.byte ";
11953 fputs (for_decimal
, file
);
11954 fprintf (file
, "%d", c
);
11956 for_string
= "\n\t.byte \"";
11957 for_decimal
= ", ";
11963 /* Now close the string if we have written one. Then end the line. */
11965 fputs (to_close
, file
);
11968 /* Generate a unique section name for FILENAME for a section type
11969 represented by SECTION_DESC. Output goes into BUF.
11971 SECTION_DESC can be any string, as long as it is different for each
11972 possible section type.
11974 We name the section in the same manner as xlc. The name begins with an
11975 underscore followed by the filename (after stripping any leading directory
11976 names) with the last period replaced by the string SECTION_DESC. If
11977 FILENAME does not contain a period, SECTION_DESC is appended to the end of
11981 rs6000_gen_section_name (buf
, filename
, section_desc
)
11983 const char *filename
;
11984 const char *section_desc
;
11986 const char *q
, *after_last_slash
, *last_period
= 0;
11990 after_last_slash
= filename
;
11991 for (q
= filename
; *q
; q
++)
11994 after_last_slash
= q
+ 1;
11995 else if (*q
== '.')
11999 len
= strlen (after_last_slash
) + strlen (section_desc
) + 2;
12000 *buf
= (char *) xmalloc (len
);
12005 for (q
= after_last_slash
; *q
; q
++)
12007 if (q
== last_period
)
12009 strcpy (p
, section_desc
);
12010 p
+= strlen (section_desc
);
12013 else if (ISALNUM (*q
))
12017 if (last_period
== 0)
12018 strcpy (p
, section_desc
);
12023 /* Emit profile function. */
12026 output_profile_hook (labelno
)
12027 int labelno ATTRIBUTE_UNUSED
;
12029 if (DEFAULT_ABI
== ABI_AIX
)
12031 #ifdef NO_PROFILE_COUNTERS
12032 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 0);
12035 const char *label_name
;
12038 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
12039 label_name
= (*targetm
.strip_name_encoding
) (ggc_strdup (buf
));
12040 fun
= gen_rtx_SYMBOL_REF (Pmode
, label_name
);
12042 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 1,
12046 else if (DEFAULT_ABI
== ABI_DARWIN
)
12048 const char *mcount_name
= RS6000_MCOUNT
;
12049 int caller_addr_regno
= LINK_REGISTER_REGNUM
;
12051 /* Be conservative and always set this, at least for now. */
12052 current_function_uses_pic_offset_table
= 1;
12055 /* For PIC code, set up a stub and collect the caller's address
12056 from r0, which is where the prologue puts it. */
12059 mcount_name
= machopic_stub_name (mcount_name
);
12060 if (current_function_uses_pic_offset_table
)
12061 caller_addr_regno
= 0;
12064 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, mcount_name
),
12066 gen_rtx_REG (Pmode
, caller_addr_regno
), Pmode
);
12070 /* Write function profiler code. */
12073 output_function_profiler (file
, labelno
)
12080 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
12081 switch (DEFAULT_ABI
)
12088 /* Fall through. */
12090 case ABI_AIX_NODESC
:
12093 warning ("no profiling of 64-bit code for this ABI");
12096 fprintf (file
, "\tmflr %s\n", reg_names
[0]);
12099 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file
);
12100 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12101 reg_names
[0], save_lr
, reg_names
[1]);
12102 asm_fprintf (file
, "\tmflr %s\n", reg_names
[12]);
12103 asm_fprintf (file
, "\t{l|lwz} %s,", reg_names
[0]);
12104 assemble_name (file
, buf
);
12105 asm_fprintf (file
, "@got(%s)\n", reg_names
[12]);
12107 else if (flag_pic
> 1)
12109 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12110 reg_names
[0], save_lr
, reg_names
[1]);
12111 /* Now, we need to get the address of the label. */
12112 fputs ("\tbl 1f\n\t.long ", file
);
12113 assemble_name (file
, buf
);
12114 fputs ("-.\n1:", file
);
12115 asm_fprintf (file
, "\tmflr %s\n", reg_names
[11]);
12116 asm_fprintf (file
, "\t{l|lwz} %s,0(%s)\n",
12117 reg_names
[0], reg_names
[11]);
12118 asm_fprintf (file
, "\t{cax|add} %s,%s,%s\n",
12119 reg_names
[0], reg_names
[0], reg_names
[11]);
12123 asm_fprintf (file
, "\t{liu|lis} %s,", reg_names
[12]);
12124 assemble_name (file
, buf
);
12125 fputs ("@ha\n", file
);
12126 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12127 reg_names
[0], save_lr
, reg_names
[1]);
12128 asm_fprintf (file
, "\t{cal|la} %s,", reg_names
[0]);
12129 assemble_name (file
, buf
);
12130 asm_fprintf (file
, "@l(%s)\n", reg_names
[12]);
12133 if (current_function_needs_context
&& DEFAULT_ABI
== ABI_AIX_NODESC
)
12135 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12136 reg_names
[STATIC_CHAIN_REGNUM
],
12138 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
12139 asm_fprintf (file
, "\t{l|lwz} %s,%d(%s)\n",
12140 reg_names
[STATIC_CHAIN_REGNUM
],
12144 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12145 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
12150 /* Don't do anything, done in output_profile_hook (). */
12155 /* Adjust the cost of a scheduling dependency. Return the new cost of
12156 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
12159 rs6000_adjust_cost (insn
, link
, dep_insn
, cost
)
12162 rtx dep_insn ATTRIBUTE_UNUSED
;
12165 if (! recog_memoized (insn
))
12168 if (REG_NOTE_KIND (link
) != 0)
12171 if (REG_NOTE_KIND (link
) == 0)
12173 /* Data dependency; DEP_INSN writes a register that INSN reads
12174 some cycles later. */
12175 switch (get_attr_type (insn
))
12178 /* Tell the first scheduling pass about the latency between
12179 a mtctr and bctr (and mtlr and br/blr). The first
12180 scheduling pass will not know about this latency since
12181 the mtctr instruction, which has the latency associated
12182 to it, will be generated by reload. */
12183 return TARGET_POWER
? 5 : 4;
12185 /* Leave some extra cycles between a compare and its
12186 dependent branch, to inhibit expensive mispredicts. */
12187 if ((rs6000_cpu_attr
== CPU_PPC603
12188 || rs6000_cpu_attr
== CPU_PPC604
12189 || rs6000_cpu_attr
== CPU_PPC604E
12190 || rs6000_cpu_attr
== CPU_PPC620
12191 || rs6000_cpu_attr
== CPU_PPC630
12192 || rs6000_cpu_attr
== CPU_PPC750
12193 || rs6000_cpu_attr
== CPU_PPC7400
12194 || rs6000_cpu_attr
== CPU_PPC7450
12195 || rs6000_cpu_attr
== CPU_POWER4
)
12196 && recog_memoized (dep_insn
)
12197 && (INSN_CODE (dep_insn
) >= 0)
12198 && (get_attr_type (dep_insn
) == TYPE_COMPARE
12199 || get_attr_type (dep_insn
) == TYPE_DELAYED_COMPARE
12200 || get_attr_type (dep_insn
) == TYPE_FPCOMPARE
12201 || get_attr_type (dep_insn
) == TYPE_CR_LOGICAL
))
12206 /* Fall out to return default cost. */
12212 /* A C statement (sans semicolon) to update the integer scheduling
12213 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12214 INSN earlier, increase the priority to execute INSN later. Do not
12215 define this macro if you do not need to adjust the scheduling
12216 priorities of insns. */
12219 rs6000_adjust_priority (insn
, priority
)
12220 rtx insn ATTRIBUTE_UNUSED
;
12223 /* On machines (like the 750) which have asymmetric integer units,
12224 where one integer unit can do multiply and divides and the other
12225 can't, reduce the priority of multiply/divide so it is scheduled
12226 before other integer operations. */
12229 if (! INSN_P (insn
))
12232 if (GET_CODE (PATTERN (insn
)) == USE
)
12235 switch (rs6000_cpu_attr
) {
12237 switch (get_attr_type (insn
))
12244 fprintf (stderr
, "priority was %#x (%d) before adjustment\n",
12245 priority
, priority
);
12246 if (priority
>= 0 && priority
< 0x01000000)
12256 /* Return how many instructions the machine can issue per cycle. */
12259 rs6000_issue_rate ()
12261 switch (rs6000_cpu_attr
) {
12262 case CPU_RIOS1
: /* ? */
12264 case CPU_PPC601
: /* ? */
12284 /* Length in units of the trampoline for entering a nested function. */
12287 rs6000_trampoline_size ()
12291 switch (DEFAULT_ABI
)
12297 ret
= (TARGET_32BIT
) ? 12 : 24;
12302 case ABI_AIX_NODESC
:
12303 ret
= (TARGET_32BIT
) ? 40 : 48;
12310 /* Emit RTL insns to initialize the variable parts of a trampoline.
12311 FNADDR is an RTX for the address of the function's pure code.
12312 CXT is an RTX for the static chain value for the function. */
12315 rs6000_initialize_trampoline (addr
, fnaddr
, cxt
)
12320 enum machine_mode pmode
= Pmode
;
12321 int regsize
= (TARGET_32BIT
) ? 4 : 8;
12322 rtx ctx_reg
= force_reg (pmode
, cxt
);
12324 switch (DEFAULT_ABI
)
12329 /* Macros to shorten the code expansions below. */
12330 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12331 #define MEM_PLUS(addr,offset) \
12332 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12334 /* Under AIX, just build the 3 word function descriptor */
12337 rtx fn_reg
= gen_reg_rtx (pmode
);
12338 rtx toc_reg
= gen_reg_rtx (pmode
);
12339 emit_move_insn (fn_reg
, MEM_DEREF (fnaddr
));
12340 emit_move_insn (toc_reg
, MEM_PLUS (fnaddr
, regsize
));
12341 emit_move_insn (MEM_DEREF (addr
), fn_reg
);
12342 emit_move_insn (MEM_PLUS (addr
, regsize
), toc_reg
);
12343 emit_move_insn (MEM_PLUS (addr
, 2*regsize
), ctx_reg
);
12347 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
12350 case ABI_AIX_NODESC
:
12351 emit_library_call (gen_rtx_SYMBOL_REF (SImode
, "__trampoline_setup"),
12352 FALSE
, VOIDmode
, 4,
12354 GEN_INT (rs6000_trampoline_size ()), SImode
,
12364 /* Table of valid machine attributes. */
12366 const struct attribute_spec rs6000_attribute_table
[] =
12368 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12369 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
12370 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
12371 { NULL
, 0, 0, false, false, false, NULL
}
12374 /* Handle a "longcall" or "shortcall" attribute; arguments as in
12375 struct attribute_spec.handler. */
12378 rs6000_handle_longcall_attribute (node
, name
, args
, flags
, no_add_attrs
)
12381 tree args ATTRIBUTE_UNUSED
;
12382 int flags ATTRIBUTE_UNUSED
;
12383 bool *no_add_attrs
;
12385 if (TREE_CODE (*node
) != FUNCTION_TYPE
12386 && TREE_CODE (*node
) != FIELD_DECL
12387 && TREE_CODE (*node
) != TYPE_DECL
)
12389 warning ("`%s' attribute only applies to functions",
12390 IDENTIFIER_POINTER (name
));
12391 *no_add_attrs
= true;
12397 /* Set longcall attributes on all functions declared when
12398 rs6000_default_long_calls is true. */
12400 rs6000_set_default_type_attributes (type
)
12403 if (rs6000_default_long_calls
12404 && (TREE_CODE (type
) == FUNCTION_TYPE
12405 || TREE_CODE (type
) == METHOD_TYPE
))
12406 TYPE_ATTRIBUTES (type
) = tree_cons (get_identifier ("longcall"),
12408 TYPE_ATTRIBUTES (type
));
12411 /* Return a reference suitable for calling a function with the
12412 longcall attribute. */
12415 rs6000_longcall_ref (call_ref
)
12418 const char *call_name
;
12421 if (GET_CODE (call_ref
) != SYMBOL_REF
)
12424 /* System V adds '.' to the internal name, so skip them. */
12425 call_name
= XSTR (call_ref
, 0);
12426 if (*call_name
== '.')
12428 while (*call_name
== '.')
12431 node
= get_identifier (call_name
);
12432 call_ref
= gen_rtx_SYMBOL_REF (VOIDmode
, IDENTIFIER_POINTER (node
));
12435 return force_reg (Pmode
, call_ref
);
12439 #ifdef USING_ELFOS_H
12441 /* A C statement or statements to switch to the appropriate section
12442 for output of RTX in mode MODE. You can assume that RTX is some
12443 kind of constant in RTL. The argument MODE is redundant except in
12444 the case of a `const_int' rtx. Select the section by calling
12445 `text_section' or one of the alternatives for other sections.
12447 Do not define this macro if you put all constants in the read-only
12451 rs6000_elf_select_rtx_section (mode
, x
, align
)
12452 enum machine_mode mode
;
12454 unsigned HOST_WIDE_INT align
;
12456 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
12459 default_elf_select_rtx_section (mode
, x
, align
);
12462 /* A C statement or statements to switch to the appropriate
12463 section for output of DECL. DECL is either a `VAR_DECL' node
12464 or a constant of some sort. RELOC indicates whether forming
12465 the initial value of DECL requires link-time relocations. */
12468 rs6000_elf_select_section (decl
, reloc
, align
)
12471 unsigned HOST_WIDE_INT align
;
12473 default_elf_select_section_1 (decl
, reloc
, align
,
12474 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
12477 /* A C statement to build up a unique section name, expressed as a
12478 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
12479 RELOC indicates whether the initial value of EXP requires
12480 link-time relocations. If you do not define this macro, GCC will use
12481 the symbol name prefixed by `.' as the section name. Note - this
12482 macro can now be called for uninitialized data items as well as
12483 initialized data and functions. */
12486 rs6000_elf_unique_section (decl
, reloc
)
12490 default_unique_section_1 (decl
, reloc
,
12491 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
12495 /* If we are referencing a function that is static or is known to be
12496 in this file, make the SYMBOL_REF special. We can use this to indicate
12497 that we can branch to this function without emitting a no-op after the
12498 call. For real AIX calling sequences, we also replace the
12499 function name with the real name (1 or 2 leading .'s), rather than
12500 the function descriptor name. This saves a lot of overriding code
12501 to read the prefixes. */
12504 rs6000_elf_encode_section_info (decl
, first
)
12511 if (TREE_CODE (decl
) == FUNCTION_DECL
)
12513 rtx sym_ref
= XEXP (DECL_RTL (decl
), 0);
12514 if ((*targetm
.binds_local_p
) (decl
))
12515 SYMBOL_REF_FLAG (sym_ref
) = 1;
12517 if (DEFAULT_ABI
== ABI_AIX
)
12519 size_t len1
= (DEFAULT_ABI
== ABI_AIX
) ? 1 : 2;
12520 size_t len2
= strlen (XSTR (sym_ref
, 0));
12521 char *str
= alloca (len1
+ len2
+ 1);
12524 memcpy (str
+ len1
, XSTR (sym_ref
, 0), len2
+ 1);
12526 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len1
+ len2
);
12529 else if (rs6000_sdata
!= SDATA_NONE
12530 && DEFAULT_ABI
== ABI_V4
12531 && TREE_CODE (decl
) == VAR_DECL
)
12533 rtx sym_ref
= XEXP (DECL_RTL (decl
), 0);
12534 int size
= int_size_in_bytes (TREE_TYPE (decl
));
12535 tree section_name
= DECL_SECTION_NAME (decl
);
12536 const char *name
= (char *)0;
12539 if ((*targetm
.binds_local_p
) (decl
))
12540 SYMBOL_REF_FLAG (sym_ref
) = 1;
12544 if (TREE_CODE (section_name
) == STRING_CST
)
12546 name
= TREE_STRING_POINTER (section_name
);
12547 len
= TREE_STRING_LENGTH (section_name
);
12553 if ((size
> 0 && size
<= g_switch_value
)
12555 && ((len
== sizeof (".sdata") - 1
12556 && strcmp (name
, ".sdata") == 0)
12557 || (len
== sizeof (".sdata2") - 1
12558 && strcmp (name
, ".sdata2") == 0)
12559 || (len
== sizeof (".sbss") - 1
12560 && strcmp (name
, ".sbss") == 0)
12561 || (len
== sizeof (".sbss2") - 1
12562 && strcmp (name
, ".sbss2") == 0)
12563 || (len
== sizeof (".PPC.EMB.sdata0") - 1
12564 && strcmp (name
, ".PPC.EMB.sdata0") == 0)
12565 || (len
== sizeof (".PPC.EMB.sbss0") - 1
12566 && strcmp (name
, ".PPC.EMB.sbss0") == 0))))
12568 size_t len
= strlen (XSTR (sym_ref
, 0));
12569 char *str
= alloca (len
+ 2);
12572 memcpy (str
+ 1, XSTR (sym_ref
, 0), len
+ 1);
12573 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len
+ 1);
12578 static const char *
12579 rs6000_elf_strip_name_encoding (str
)
12582 while (*str
== '*' || *str
== '@')
12588 rs6000_elf_in_small_data_p (decl
)
12591 if (rs6000_sdata
== SDATA_NONE
)
12594 if (TREE_CODE (decl
) == VAR_DECL
&& DECL_SECTION_NAME (decl
))
12596 const char *section
= TREE_STRING_POINTER (DECL_SECTION_NAME (decl
));
12597 if (strcmp (section
, ".sdata") == 0
12598 || strcmp (section
, ".sdata2") == 0
12599 || strcmp (section
, ".sbss") == 0)
12604 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
12607 && size
<= g_switch_value
12608 && (rs6000_sdata
!= SDATA_DATA
|| TREE_PUBLIC (decl
)))
12615 #endif /* USING_ELFOS_H */
12618 /* Return a REG that occurs in ADDR with coefficient 1.
12619 ADDR can be effectively incremented by incrementing REG.
12621 r0 is special and we must not select it as an address
12622 register by this routine since our caller will try to
12623 increment the returned register via an "la" instruction. */
12626 find_addr_reg (addr
)
12629 while (GET_CODE (addr
) == PLUS
)
12631 if (GET_CODE (XEXP (addr
, 0)) == REG
12632 && REGNO (XEXP (addr
, 0)) != 0)
12633 addr
= XEXP (addr
, 0);
12634 else if (GET_CODE (XEXP (addr
, 1)) == REG
12635 && REGNO (XEXP (addr
, 1)) != 0)
12636 addr
= XEXP (addr
, 1);
12637 else if (CONSTANT_P (XEXP (addr
, 0)))
12638 addr
= XEXP (addr
, 1);
12639 else if (CONSTANT_P (XEXP (addr
, 1)))
12640 addr
= XEXP (addr
, 0);
12644 if (GET_CODE (addr
) == REG
&& REGNO (addr
) != 0)
12650 rs6000_fatal_bad_address (op
)
12653 fatal_insn ("bad address", op
);
12659 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
12660 reference and a constant. */
12663 symbolic_operand (op
)
12666 switch (GET_CODE (op
))
12673 return (GET_CODE (op
) == SYMBOL_REF
||
12674 (GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
12675 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
12676 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
12683 #ifdef RS6000_LONG_BRANCH
12685 static tree stub_list
= 0;
12687 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
12688 procedure calls to the linked list. */
12691 add_compiler_stub (label_name
, function_name
, line_number
)
12693 tree function_name
;
12696 tree stub
= build_tree_list (function_name
, label_name
);
12697 TREE_TYPE (stub
) = build_int_2 (line_number
, 0);
12698 TREE_CHAIN (stub
) = stub_list
;
12702 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
12703 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
12704 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
12706 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
12707 handling procedure calls from the linked list and initializes the
12711 output_compiler_stub ()
12714 char label_buf
[256];
12718 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
12720 fprintf (asm_out_file
,
12721 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub
)));
12723 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12724 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
12725 fprintf (asm_out_file
, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub
));
12726 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12728 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
))[0] == '*')
12730 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
))+1);
12733 label_buf
[0] = '_';
12734 strcpy (label_buf
+1,
12735 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
)));
12738 strcpy (tmp_buf
, "lis r12,hi16(");
12739 strcat (tmp_buf
, label_buf
);
12740 strcat (tmp_buf
, ")\n\tori r12,r12,lo16(");
12741 strcat (tmp_buf
, label_buf
);
12742 strcat (tmp_buf
, ")\n\tmtctr r12\n\tbctr");
12743 output_asm_insn (tmp_buf
, 0);
12745 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12746 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
12747 fprintf(asm_out_file
, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub
));
12748 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12754 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
12755 already there or not. */
12758 no_previous_def (function_name
)
12759 tree function_name
;
12762 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
12763 if (function_name
== STUB_FUNCTION_NAME (stub
))
12768 /* GET_PREV_LABEL gets the label name from the previous definition of
12772 get_prev_label (function_name
)
12773 tree function_name
;
12776 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
12777 if (function_name
== STUB_FUNCTION_NAME (stub
))
12778 return STUB_LABEL_NAME (stub
);
12782 /* INSN is either a function call or a millicode call. It may have an
12783 unconditional jump in its delay slot.
12785 CALL_DEST is the routine we are calling. */
12788 output_call (insn
, call_dest
, operand_number
)
12791 int operand_number
;
12793 static char buf
[256];
12794 if (GET_CODE (call_dest
) == SYMBOL_REF
&& TARGET_LONG_BRANCH
&& !flag_pic
)
12797 tree funname
= get_identifier (XSTR (call_dest
, 0));
12799 if (no_previous_def (funname
))
12801 int line_number
= 0;
12802 rtx label_rtx
= gen_label_rtx ();
12803 char *label_buf
, temp_buf
[256];
12804 ASM_GENERATE_INTERNAL_LABEL (temp_buf
, "L",
12805 CODE_LABEL_NUMBER (label_rtx
));
12806 label_buf
= temp_buf
[0] == '*' ? temp_buf
+ 1 : temp_buf
;
12807 labelname
= get_identifier (label_buf
);
12808 for (; insn
&& GET_CODE (insn
) != NOTE
; insn
= PREV_INSN (insn
));
12810 line_number
= NOTE_LINE_NUMBER (insn
);
12811 add_compiler_stub (labelname
, funname
, line_number
);
12814 labelname
= get_prev_label (funname
);
12816 sprintf (buf
, "jbsr %%z%d,%.246s",
12817 operand_number
, IDENTIFIER_POINTER (labelname
));
12822 sprintf (buf
, "bl %%z%d", operand_number
);
12827 #endif /* RS6000_LONG_BRANCH */
12829 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
12831 const char *const symbol_ = (SYMBOL); \
12832 char *buffer_ = (BUF); \
12833 if (symbol_[0] == '"') \
12835 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
12837 else if (name_needs_quotes(symbol_)) \
12839 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
12843 sprintf(buffer_, "L%d$%s", (N), symbol_); \
12848 /* Generate PIC and indirect symbol stubs. */
12851 machopic_output_stub (file
, symb
, stub
)
12853 const char *symb
, *stub
;
12855 unsigned int length
;
12856 char *symbol_name
, *lazy_ptr_name
;
12857 char *local_label_0
;
12858 static int label
= 0;
12860 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
12861 symb
= (*targetm
.strip_name_encoding
) (symb
);
12865 length
= strlen (symb
);
12866 symbol_name
= alloca (length
+ 32);
12867 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name
, symb
, length
);
12869 lazy_ptr_name
= alloca (length
+ 32);
12870 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name
, symb
, length
);
12872 local_label_0
= alloca (length
+ 32);
12873 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0
, symb
, length
, 0);
12876 machopic_picsymbol_stub1_section ();
12878 machopic_symbol_stub1_section ();
12879 fprintf (file
, "\t.align 2\n");
12881 fprintf (file
, "%s:\n", stub
);
12882 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
12886 fprintf (file
, "\tmflr r0\n");
12887 fprintf (file
, "\tbcl 20,31,%s\n", local_label_0
);
12888 fprintf (file
, "%s:\n\tmflr r11\n", local_label_0
);
12889 fprintf (file
, "\taddis r11,r11,ha16(%s-%s)\n",
12890 lazy_ptr_name
, local_label_0
);
12891 fprintf (file
, "\tmtlr r0\n");
12892 fprintf (file
, "\tlwzu r12,lo16(%s-%s)(r11)\n",
12893 lazy_ptr_name
, local_label_0
);
12894 fprintf (file
, "\tmtctr r12\n");
12895 fprintf (file
, "\tbctr\n");
12898 fprintf (file
, "non-pure not supported\n");
12900 machopic_lazy_symbol_ptr_section ();
12901 fprintf (file
, "%s:\n", lazy_ptr_name
);
12902 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
12903 fprintf (file
, "\t.long dyld_stub_binding_helper\n");
12906 /* Legitimize PIC addresses. If the address is already
12907 position-independent, we return ORIG. Newly generated
12908 position-independent addresses go into a reg. This is REG if non
12909 zero, otherwise we allocate register(s) as necessary. */
12911 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
12914 rs6000_machopic_legitimize_pic_address (orig
, mode
, reg
)
12916 enum machine_mode mode
;
12921 if (reg
== NULL
&& ! reload_in_progress
&& ! reload_completed
)
12922 reg
= gen_reg_rtx (Pmode
);
12924 if (GET_CODE (orig
) == CONST
)
12926 if (GET_CODE (XEXP (orig
, 0)) == PLUS
12927 && XEXP (XEXP (orig
, 0), 0) == pic_offset_table_rtx
)
12930 if (GET_CODE (XEXP (orig
, 0)) == PLUS
)
12933 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0),
12936 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1),
12942 if (GET_CODE (offset
) == CONST_INT
)
12944 if (SMALL_INT (offset
))
12945 return plus_constant (base
, INTVAL (offset
));
12946 else if (! reload_in_progress
&& ! reload_completed
)
12947 offset
= force_reg (Pmode
, offset
);
12950 rtx mem
= force_const_mem (Pmode
, orig
);
12951 return machopic_legitimize_pic_address (mem
, Pmode
, reg
);
12954 return gen_rtx (PLUS
, Pmode
, base
, offset
);
12957 /* Fall back on generic machopic code. */
12958 return machopic_legitimize_pic_address (orig
, mode
, reg
);
12961 /* This is just a placeholder to make linking work without having to
12962 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
12963 ever needed for Darwin (not too likely!) this would have to get a
12964 real definition. */
12971 #endif /* TARGET_MACHO */
12974 static unsigned int
12975 rs6000_elf_section_type_flags (decl
, name
, reloc
)
12981 = default_section_type_flags_1 (decl
, name
, reloc
,
12982 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
12984 if (TARGET_RELOCATABLE
)
12985 flags
|= SECTION_WRITE
;
12990 /* Record an element in the table of global constructors. SYMBOL is
12991 a SYMBOL_REF of the function to be called; PRIORITY is a number
12992 between 0 and MAX_INIT_PRIORITY.
12994 This differs from default_named_section_asm_out_constructor in
12995 that we have special handling for -mrelocatable. */
12998 rs6000_elf_asm_out_constructor (symbol
, priority
)
13002 const char *section
= ".ctors";
13005 if (priority
!= DEFAULT_INIT_PRIORITY
)
13007 sprintf (buf
, ".ctors.%.5u",
13008 /* Invert the numbering so the linker puts us in the proper
13009 order; constructors are run from right to left, and the
13010 linker sorts in increasing order. */
13011 MAX_INIT_PRIORITY
- priority
);
13015 named_section_flags (section
, SECTION_WRITE
);
13016 assemble_align (POINTER_SIZE
);
13018 if (TARGET_RELOCATABLE
)
13020 fputs ("\t.long (", asm_out_file
);
13021 output_addr_const (asm_out_file
, symbol
);
13022 fputs (")@fixup\n", asm_out_file
);
13025 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
13029 rs6000_elf_asm_out_destructor (symbol
, priority
)
13033 const char *section
= ".dtors";
13036 if (priority
!= DEFAULT_INIT_PRIORITY
)
13038 sprintf (buf
, ".dtors.%.5u",
13039 /* Invert the numbering so the linker puts us in the proper
13040 order; constructors are run from right to left, and the
13041 linker sorts in increasing order. */
13042 MAX_INIT_PRIORITY
- priority
);
13046 named_section_flags (section
, SECTION_WRITE
);
13047 assemble_align (POINTER_SIZE
);
13049 if (TARGET_RELOCATABLE
)
13051 fputs ("\t.long (", asm_out_file
);
13052 output_addr_const (asm_out_file
, symbol
);
13053 fputs (")@fixup\n", asm_out_file
);
13056 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
13062 rs6000_xcoff_asm_globalize_label (stream
, name
)
13066 fputs (GLOBAL_ASM_OP
, stream
);
13067 RS6000_OUTPUT_BASENAME (stream
, name
);
13068 putc ('\n', stream
);
13072 rs6000_xcoff_asm_named_section (name
, flags
)
13074 unsigned int flags
;
13077 static const char * const suffix
[3] = { "PR", "RO", "RW" };
13079 if (flags
& SECTION_CODE
)
13081 else if (flags
& SECTION_WRITE
)
13086 fprintf (asm_out_file
, "\t.csect %s%s[%s],%u\n",
13087 (flags
& SECTION_CODE
) ? "." : "",
13088 name
, suffix
[smclass
], flags
& SECTION_ENTSIZE
);
13092 rs6000_xcoff_select_section (decl
, reloc
, align
)
13095 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
13097 if (decl_readonly_section_1 (decl
, reloc
, 1))
13099 if (TREE_PUBLIC (decl
))
13100 read_only_data_section ();
13102 read_only_private_data_section ();
13106 if (TREE_PUBLIC (decl
))
13109 private_data_section ();
13114 rs6000_xcoff_unique_section (decl
, reloc
)
13116 int reloc ATTRIBUTE_UNUSED
;
13120 /* Use select_section for private and uninitialized data. */
13121 if (!TREE_PUBLIC (decl
)
13122 || DECL_COMMON (decl
)
13123 || DECL_INITIAL (decl
) == NULL_TREE
13124 || DECL_INITIAL (decl
) == error_mark_node
13125 || (flag_zero_initialized_in_bss
13126 && initializer_zerop (DECL_INITIAL (decl
))))
13129 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
13130 name
= (*targetm
.strip_name_encoding
) (name
);
13131 DECL_SECTION_NAME (decl
) = build_string (strlen (name
), name
);
13134 /* Select section for constant in constant pool.
13136 On RS/6000, all constants are in the private read-only data area.
13137 However, if this is being placed in the TOC it must be output as a
13141 rs6000_xcoff_select_rtx_section (mode
, x
, align
)
13142 enum machine_mode mode
;
13144 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
13146 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
13149 read_only_private_data_section ();
13152 /* Remove any trailing [DS] or the like from the symbol name. */
13154 static const char *
13155 rs6000_xcoff_strip_name_encoding (name
)
13161 len
= strlen (name
);
13162 if (name
[len
- 1] == ']')
13163 return ggc_alloc_string (name
, len
- 4);
13168 /* Section attributes. AIX is always PIC. */
13170 static unsigned int
13171 rs6000_xcoff_section_type_flags (decl
, name
, reloc
)
13176 unsigned int align
;
13177 unsigned int flags
= default_section_type_flags_1 (decl
, name
, reloc
, 1);
13179 /* Align to at least UNIT size. */
13180 if (flags
& SECTION_CODE
)
13181 align
= MIN_UNITS_PER_WORD
;
13183 /* Increase alignment of large objects if not already stricter. */
13184 align
= MAX ((DECL_ALIGN (decl
) / BITS_PER_UNIT
),
13185 int_size_in_bytes (TREE_TYPE (decl
)) > MIN_UNITS_PER_WORD
13186 ? UNITS_PER_FP_WORD
: MIN_UNITS_PER_WORD
);
13188 return flags
| (exact_log2 (align
) & SECTION_ENTSIZE
);
13191 #endif /* TARGET_XCOFF */
13193 /* Note that this is also used for PPC64 Linux. */
13196 rs6000_xcoff_encode_section_info (decl
, first
)
13198 int first ATTRIBUTE_UNUSED
;
13200 if (TREE_CODE (decl
) == FUNCTION_DECL
13201 && (*targetm
.binds_local_p
) (decl
))
13202 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl
), 0)) = 1;
13205 /* Cross-module name binding. For AIX and PPC64 Linux, which always are
13206 PIC, use private copy of flag_pic. */
13209 rs6000_binds_local_p (decl
)
13212 return default_binds_local_p_1 (decl
, flag_pic
|| rs6000_flag_pic
);
13215 /* A C expression returning the cost of moving data from a register of class
13216 CLASS1 to one of CLASS2. */
13219 rs6000_register_move_cost (mode
, from
, to
)
13220 enum machine_mode mode
;
13221 enum reg_class from
, to
;
13223 /* Moves from/to GENERAL_REGS. */
13224 if (reg_classes_intersect_p (to
, GENERAL_REGS
)
13225 || reg_classes_intersect_p (from
, GENERAL_REGS
))
13227 if (! reg_classes_intersect_p (to
, GENERAL_REGS
))
13230 if (from
== FLOAT_REGS
|| from
== ALTIVEC_REGS
)
13231 return (rs6000_memory_move_cost (mode
, from
, 0)
13232 + rs6000_memory_move_cost (mode
, GENERAL_REGS
, 0));
13234 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
13235 else if (from
== CR_REGS
)
13239 /* A move will cost one instruction per GPR moved. */
13240 return 2 * HARD_REGNO_NREGS (0, mode
);
13243 /* Moving between two similar registers is just one instruction. */
13244 else if (reg_classes_intersect_p (to
, from
))
13245 return mode
== TFmode
? 4 : 2;
13247 /* Everything else has to go through GENERAL_REGS. */
13249 return (rs6000_register_move_cost (mode
, GENERAL_REGS
, to
)
13250 + rs6000_register_move_cost (mode
, from
, GENERAL_REGS
));
13253 /* A C expressions returning the cost of moving data of MODE from a register to
13257 rs6000_memory_move_cost (mode
, class, in
)
13258 enum machine_mode mode
;
13259 enum reg_class
class;
13260 int in ATTRIBUTE_UNUSED
;
13262 if (reg_classes_intersect_p (class, GENERAL_REGS
))
13263 return 4 * HARD_REGNO_NREGS (0, mode
);
13264 else if (reg_classes_intersect_p (class, FLOAT_REGS
))
13265 return 4 * HARD_REGNO_NREGS (32, mode
);
13266 else if (reg_classes_intersect_p (class, ALTIVEC_REGS
))
13267 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO
, mode
);
13269 return 4 + rs6000_register_move_cost (mode
, class, GENERAL_REGS
);
13272 #include "gt-rs6000.h"