1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
41 #include "basic-block.h"
42 #include "integrate.h"
48 #include "target-def.h"
49 #include "langhooks.h"
52 #ifndef TARGET_NO_PROTOTYPE
53 #define TARGET_NO_PROTOTYPE 0
56 #define min(A,B) ((A) < (B) ? (A) : (B))
57 #define max(A,B) ((A) > (B) ? (A) : (B))
61 enum processor_type rs6000_cpu
;
62 struct rs6000_cpu_select rs6000_select
[3] =
64 /* switch name, tune arch */
65 { (const char *)0, "--with-cpu=", 1, 1 },
66 { (const char *)0, "-mcpu=", 1, 1 },
67 { (const char *)0, "-mtune=", 1, 0 },
70 /* Size of long double */
71 const char *rs6000_long_double_size_string
;
72 int rs6000_long_double_type_size
;
74 /* Whether -mabi=altivec has appeared */
75 int rs6000_altivec_abi
;
77 /* Whether VRSAVE instructions should be generated. */
78 int rs6000_altivec_vrsave
;
80 /* String from -mvrsave= option. */
81 const char *rs6000_altivec_vrsave_string
;
83 /* Nonzero if we want SPE ABI extensions. */
86 /* Whether isel instructions should be generated. */
89 /* Nonzero if we have FPRs. */
92 /* String from -misel=. */
93 const char *rs6000_isel_string
;
95 /* Set to nonzero once AIX common-mode calls have been defined. */
96 static int common_mode_defined
;
98 /* Private copy of original value of flag_pic for ABI_AIX. */
99 static int rs6000_flag_pic
;
101 /* Save information from a "cmpxx" operation until the branch or scc is
103 rtx rs6000_compare_op0
, rs6000_compare_op1
;
104 int rs6000_compare_fp_p
;
106 /* Label number of label created for -mrelocatable, to call to so we can
107 get the address of the GOT section */
108 int rs6000_pic_labelno
;
111 /* Which abi to adhere to */
112 const char *rs6000_abi_name
= RS6000_ABI_NAME
;
114 /* Semantics of the small data area */
115 enum rs6000_sdata_type rs6000_sdata
= SDATA_DATA
;
117 /* Which small data model to use */
118 const char *rs6000_sdata_name
= (char *)0;
120 /* Counter for labels which are to be placed in .fixup. */
121 int fixuplabelno
= 0;
124 /* ABI enumeration available for subtarget to use. */
125 enum rs6000_abi rs6000_current_abi
;
127 /* ABI string from -mabi= option. */
128 const char *rs6000_abi_string
;
131 const char *rs6000_debug_name
;
132 int rs6000_debug_stack
; /* debug stack applications */
133 int rs6000_debug_arg
; /* debug argument handling */
135 const char *rs6000_traceback_name
;
137 traceback_default
= 0,
143 /* Flag to say the TOC is initialized */
145 char toc_label_name
[10];
147 /* Alias set for saves and restores from the rs6000 stack. */
148 static int rs6000_sr_alias_set
;
150 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
151 The only place that looks at this is rs6000_set_default_type_attributes;
152 everywhere else should rely on the presence or absence of a longcall
153 attribute on the function declaration. */
154 int rs6000_default_long_calls
;
155 const char *rs6000_longcall_switch
;
157 struct builtin_description
159 /* mask is not const because we're going to alter it below. This
160 nonsense will go away when we rewrite the -march infrastructure
161 to give us more target flag bits. */
163 const enum insn_code icode
;
164 const char *const name
;
165 const enum rs6000_builtins code
;
168 static void rs6000_add_gc_roots
PARAMS ((void));
169 static int num_insns_constant_wide
PARAMS ((HOST_WIDE_INT
));
170 static void validate_condition_mode
171 PARAMS ((enum rtx_code
, enum machine_mode
));
172 static rtx rs6000_generate_compare
PARAMS ((enum rtx_code
));
173 static void rs6000_maybe_dead
PARAMS ((rtx
));
174 static void rs6000_emit_stack_tie
PARAMS ((void));
175 static void rs6000_frame_related
PARAMS ((rtx
, rtx
, HOST_WIDE_INT
, rtx
, rtx
));
176 static void emit_frame_save
PARAMS ((rtx
, rtx
, enum machine_mode
,
177 unsigned int, int, int));
178 static rtx gen_frame_mem_offset
PARAMS ((enum machine_mode
, rtx
, int));
179 static void rs6000_emit_allocate_stack
PARAMS ((HOST_WIDE_INT
, int));
180 static unsigned rs6000_hash_constant
PARAMS ((rtx
));
181 static unsigned toc_hash_function
PARAMS ((const void *));
182 static int toc_hash_eq
PARAMS ((const void *, const void *));
183 static int toc_hash_mark_entry
PARAMS ((void **, void *));
184 static void toc_hash_mark_table
PARAMS ((void *));
185 static int constant_pool_expr_1
PARAMS ((rtx
, int *, int *));
186 static struct machine_function
* rs6000_init_machine_status
PARAMS ((void));
187 static bool rs6000_assemble_integer
PARAMS ((rtx
, unsigned int, int));
188 #ifdef HAVE_GAS_HIDDEN
189 static void rs6000_assemble_visibility
PARAMS ((tree
, const char *));
191 static int rs6000_ra_ever_killed
PARAMS ((void));
192 static tree rs6000_handle_longcall_attribute
PARAMS ((tree
*, tree
, tree
, int, bool *));
193 const struct attribute_spec rs6000_attribute_table
[];
194 static void rs6000_set_default_type_attributes
PARAMS ((tree
));
195 static void rs6000_output_function_prologue
PARAMS ((FILE *, HOST_WIDE_INT
));
196 static void rs6000_output_function_epilogue
PARAMS ((FILE *, HOST_WIDE_INT
));
197 static rtx rs6000_emit_set_long_const
PARAMS ((rtx
,
198 HOST_WIDE_INT
, HOST_WIDE_INT
));
200 static unsigned int rs6000_elf_section_type_flags
PARAMS ((tree
, const char *,
202 static void rs6000_elf_asm_out_constructor
PARAMS ((rtx
, int));
203 static void rs6000_elf_asm_out_destructor
PARAMS ((rtx
, int));
204 static void rs6000_elf_select_section
PARAMS ((tree
, int,
205 unsigned HOST_WIDE_INT
));
206 static void rs6000_elf_unique_section
PARAMS ((tree
, int));
207 static void rs6000_elf_select_rtx_section
PARAMS ((enum machine_mode
, rtx
,
208 unsigned HOST_WIDE_INT
));
209 static void rs6000_elf_encode_section_info
PARAMS ((tree
, int))
211 static const char *rs6000_elf_strip_name_encoding
PARAMS ((const char *));
212 static bool rs6000_elf_in_small_data_p
PARAMS ((tree
));
215 static void rs6000_xcoff_asm_globalize_label
PARAMS ((FILE *, const char *));
216 static void rs6000_xcoff_asm_named_section
PARAMS ((const char *, unsigned int));
217 static void rs6000_xcoff_select_section
PARAMS ((tree
, int,
218 unsigned HOST_WIDE_INT
));
219 static void rs6000_xcoff_unique_section
PARAMS ((tree
, int));
220 static void rs6000_xcoff_select_rtx_section
PARAMS ((enum machine_mode
, rtx
,
221 unsigned HOST_WIDE_INT
));
222 static const char * rs6000_xcoff_strip_name_encoding
PARAMS ((const char *));
223 static unsigned int rs6000_xcoff_section_type_flags
PARAMS ((tree
, const char *, int));
225 static void rs6000_xcoff_encode_section_info
PARAMS ((tree
, int))
227 static bool rs6000_binds_local_p
PARAMS ((tree
));
228 static int rs6000_adjust_cost
PARAMS ((rtx
, rtx
, rtx
, int));
229 static int rs6000_adjust_priority
PARAMS ((rtx
, int));
230 static int rs6000_issue_rate
PARAMS ((void));
232 static void rs6000_init_builtins
PARAMS ((void));
233 static rtx rs6000_expand_unop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
234 static rtx rs6000_expand_binop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
235 static rtx rs6000_expand_ternop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
236 static rtx rs6000_expand_builtin
PARAMS ((tree
, rtx
, rtx
, enum machine_mode
, int));
237 static void altivec_init_builtins
PARAMS ((void));
238 static void rs6000_common_init_builtins
PARAMS ((void));
240 static void enable_mask_for_builtins
PARAMS ((struct builtin_description
*,
241 int, enum rs6000_builtins
,
242 enum rs6000_builtins
));
243 static void spe_init_builtins
PARAMS ((void));
244 static rtx spe_expand_builtin
PARAMS ((tree
, rtx
, bool *));
245 static rtx spe_expand_predicate_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
246 static rtx spe_expand_evsel_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
247 static int rs6000_emit_int_cmove
PARAMS ((rtx
, rtx
, rtx
, rtx
));
249 static rtx altivec_expand_builtin
PARAMS ((tree
, rtx
, bool *));
250 static rtx altivec_expand_ld_builtin
PARAMS ((tree
, rtx
, bool *));
251 static rtx altivec_expand_st_builtin
PARAMS ((tree
, rtx
, bool *));
252 static rtx altivec_expand_dst_builtin
PARAMS ((tree
, rtx
, bool *));
253 static rtx altivec_expand_abs_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
254 static rtx altivec_expand_predicate_builtin
PARAMS ((enum insn_code
, const char *, tree
, rtx
));
255 static rtx altivec_expand_stv_builtin
PARAMS ((enum insn_code
, tree
));
256 static void rs6000_parse_abi_options
PARAMS ((void));
257 static void rs6000_parse_vrsave_option
PARAMS ((void));
258 static void rs6000_parse_isel_option
PARAMS ((void));
259 static int first_altivec_reg_to_save
PARAMS ((void));
260 static unsigned int compute_vrsave_mask
PARAMS ((void));
261 static void is_altivec_return_reg
PARAMS ((rtx
, void *));
262 static rtx generate_set_vrsave
PARAMS ((rtx
, rs6000_stack_t
*, int));
263 static void altivec_frame_fixup
PARAMS ((rtx
, rtx
, HOST_WIDE_INT
));
264 static int easy_vector_constant
PARAMS ((rtx
));
266 /* Default register names. */
267 char rs6000_reg_names
[][8] =
269 "0", "1", "2", "3", "4", "5", "6", "7",
270 "8", "9", "10", "11", "12", "13", "14", "15",
271 "16", "17", "18", "19", "20", "21", "22", "23",
272 "24", "25", "26", "27", "28", "29", "30", "31",
273 "0", "1", "2", "3", "4", "5", "6", "7",
274 "8", "9", "10", "11", "12", "13", "14", "15",
275 "16", "17", "18", "19", "20", "21", "22", "23",
276 "24", "25", "26", "27", "28", "29", "30", "31",
277 "mq", "lr", "ctr","ap",
278 "0", "1", "2", "3", "4", "5", "6", "7",
280 /* AltiVec registers. */
281 "0", "1", "2", "3", "4", "5", "6", "7",
282 "8", "9", "10", "11", "12", "13", "14", "15",
283 "16", "17", "18", "19", "20", "21", "22", "23",
284 "24", "25", "26", "27", "28", "29", "30", "31",
288 #ifdef TARGET_REGNAMES
289 static const char alt_reg_names
[][8] =
291 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
292 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
293 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
294 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
295 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
296 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
297 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
298 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
299 "mq", "lr", "ctr", "ap",
300 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
302 /* AltiVec registers. */
303 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
304 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
305 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
306 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
311 #ifndef MASK_STRICT_ALIGN
312 #define MASK_STRICT_ALIGN 0
315 /* Initialize the GCC target structure. */
316 #undef TARGET_ATTRIBUTE_TABLE
317 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
318 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
319 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
321 #undef TARGET_ASM_ALIGNED_DI_OP
322 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
324 /* Default unaligned ops are only provided for ELF. Find the ops needed
325 for non-ELF systems. */
326 #ifndef OBJECT_FORMAT_ELF
328 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
330 #undef TARGET_ASM_UNALIGNED_HI_OP
331 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
332 #undef TARGET_ASM_UNALIGNED_SI_OP
333 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
334 #undef TARGET_ASM_UNALIGNED_DI_OP
335 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
338 #undef TARGET_ASM_UNALIGNED_HI_OP
339 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
340 #undef TARGET_ASM_UNALIGNED_SI_OP
341 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
345 /* This hook deals with fixups for relocatable code and DI-mode objects
347 #undef TARGET_ASM_INTEGER
348 #define TARGET_ASM_INTEGER rs6000_assemble_integer
350 #ifdef HAVE_GAS_HIDDEN
351 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
352 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
355 #undef TARGET_ASM_FUNCTION_PROLOGUE
356 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
357 #undef TARGET_ASM_FUNCTION_EPILOGUE
358 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
360 #undef TARGET_SCHED_ISSUE_RATE
361 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
362 #undef TARGET_SCHED_ADJUST_COST
363 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
364 #undef TARGET_SCHED_ADJUST_PRIORITY
365 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
367 #undef TARGET_INIT_BUILTINS
368 #define TARGET_INIT_BUILTINS rs6000_init_builtins
370 #undef TARGET_EXPAND_BUILTIN
371 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
373 #undef TARGET_BINDS_LOCAL_P
374 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
376 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
377 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
379 struct gcc_target targetm
= TARGET_INITIALIZER
;
381 /* Override command line options. Mostly we process the processor
382 type and sometimes adjust other TARGET_ options. */
385 rs6000_override_options (default_cpu
)
386 const char *default_cpu
;
389 struct rs6000_cpu_select
*ptr
;
391 /* Simplify the entries below by making a mask for any POWER
392 variant and any PowerPC variant. */
394 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
395 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
396 | MASK_PPC_GFXOPT | MASK_POWERPC64)
397 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
401 const char *const name
; /* Canonical processor name. */
402 const enum processor_type processor
; /* Processor type enum value. */
403 const int target_enable
; /* Target flags to enable. */
404 const int target_disable
; /* Target flags to disable. */
405 } const processor_target_table
[]
406 = {{"common", PROCESSOR_COMMON
, MASK_NEW_MNEMONICS
,
407 POWER_MASKS
| POWERPC_MASKS
},
408 {"power", PROCESSOR_POWER
,
409 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
410 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
411 {"power2", PROCESSOR_POWER
,
412 MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
,
413 POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
414 {"power3", PROCESSOR_PPC630
,
415 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
416 POWER_MASKS
| MASK_PPC_GPOPT
},
417 {"power4", PROCESSOR_POWER4
,
418 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
419 POWER_MASKS
| MASK_PPC_GPOPT
},
420 {"powerpc", PROCESSOR_POWERPC
,
421 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
422 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
423 {"powerpc64", PROCESSOR_POWERPC64
,
424 MASK_POWERPC
| MASK_POWERPC64
| MASK_NEW_MNEMONICS
,
425 POWER_MASKS
| POWERPC_OPT_MASKS
},
426 {"rios", PROCESSOR_RIOS1
,
427 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
428 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
429 {"rios1", PROCESSOR_RIOS1
,
430 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
431 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
432 {"rsc", PROCESSOR_PPC601
,
433 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
434 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
435 {"rsc1", PROCESSOR_PPC601
,
436 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
437 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
438 {"rios2", PROCESSOR_RIOS2
,
439 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
| MASK_POWER2
,
440 POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
441 {"rs64a", PROCESSOR_RS64A
,
442 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
443 POWER_MASKS
| POWERPC_OPT_MASKS
},
444 {"401", PROCESSOR_PPC403
,
445 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
446 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
447 {"403", PROCESSOR_PPC403
,
448 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
| MASK_STRICT_ALIGN
,
449 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
450 {"405", PROCESSOR_PPC405
,
451 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
452 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
453 {"505", PROCESSOR_MPCCORE
,
454 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
455 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
456 {"601", PROCESSOR_PPC601
,
457 MASK_POWER
| MASK_POWERPC
| MASK_NEW_MNEMONICS
| MASK_MULTIPLE
| MASK_STRING
,
458 MASK_POWER2
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
459 {"602", PROCESSOR_PPC603
,
460 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
461 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
462 {"603", PROCESSOR_PPC603
,
463 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
464 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
465 {"603e", PROCESSOR_PPC603
,
466 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
467 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
468 {"ec603e", PROCESSOR_PPC603
,
469 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
470 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
471 {"604", PROCESSOR_PPC604
,
472 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
473 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
474 {"604e", PROCESSOR_PPC604e
,
475 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
476 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
477 {"620", PROCESSOR_PPC620
,
478 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
479 POWER_MASKS
| MASK_PPC_GPOPT
},
480 {"630", PROCESSOR_PPC630
,
481 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
482 POWER_MASKS
| MASK_PPC_GPOPT
},
483 {"740", PROCESSOR_PPC750
,
484 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
485 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
486 {"750", PROCESSOR_PPC750
,
487 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
488 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
489 {"7400", PROCESSOR_PPC7400
,
490 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
491 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
492 {"7450", PROCESSOR_PPC7450
,
493 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
494 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
495 {"8540", PROCESSOR_PPC8540
,
496 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
497 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
498 {"801", PROCESSOR_MPCCORE
,
499 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
500 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
501 {"821", PROCESSOR_MPCCORE
,
502 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
503 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
504 {"823", PROCESSOR_MPCCORE
,
505 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
506 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
507 {"860", PROCESSOR_MPCCORE
,
508 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
509 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
}};
511 const size_t ptt_size
= ARRAY_SIZE (processor_target_table
);
513 /* Save current -mmultiple/-mno-multiple status. */
514 int multiple
= TARGET_MULTIPLE
;
515 /* Save current -mstring/-mno-string status. */
516 int string
= TARGET_STRING
;
518 /* Identify the processor type. */
519 rs6000_select
[0].string
= default_cpu
;
520 rs6000_cpu
= TARGET_POWERPC64
? PROCESSOR_DEFAULT64
: PROCESSOR_DEFAULT
;
522 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
524 ptr
= &rs6000_select
[i
];
525 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
527 for (j
= 0; j
< ptt_size
; j
++)
528 if (! strcmp (ptr
->string
, processor_target_table
[j
].name
))
531 rs6000_cpu
= processor_target_table
[j
].processor
;
535 target_flags
|= processor_target_table
[j
].target_enable
;
536 target_flags
&= ~processor_target_table
[j
].target_disable
;
542 error ("bad value (%s) for %s switch", ptr
->string
, ptr
->name
);
546 if (rs6000_cpu
== PROCESSOR_PPC8540
)
549 /* If we are optimizing big endian systems for space, use the store
550 multiple instructions. */
551 if (BYTES_BIG_ENDIAN
&& optimize_size
)
552 target_flags
|= MASK_MULTIPLE
;
554 /* If -mmultiple or -mno-multiple was explicitly used, don't
555 override with the processor default */
556 if (TARGET_MULTIPLE_SET
)
557 target_flags
= (target_flags
& ~MASK_MULTIPLE
) | multiple
;
559 /* If -mstring or -mno-string was explicitly used, don't override
560 with the processor default. */
561 if (TARGET_STRING_SET
)
562 target_flags
= (target_flags
& ~MASK_STRING
) | string
;
564 /* Don't allow -mmultiple or -mstring on little endian systems
565 unless the cpu is a 750, because the hardware doesn't support the
566 instructions used in little endian mode, and causes an alignment
567 trap. The 750 does not cause an alignment trap (except when the
568 target is unaligned). */
570 if (! BYTES_BIG_ENDIAN
&& rs6000_cpu
!= PROCESSOR_PPC750
)
574 target_flags
&= ~MASK_MULTIPLE
;
575 if (TARGET_MULTIPLE_SET
)
576 warning ("-mmultiple is not supported on little endian systems");
581 target_flags
&= ~MASK_STRING
;
582 if (TARGET_STRING_SET
)
583 warning ("-mstring is not supported on little endian systems");
587 if (flag_pic
!= 0 && DEFAULT_ABI
== ABI_AIX
)
589 rs6000_flag_pic
= flag_pic
;
593 /* For Darwin, always silently make -fpic and -fPIC identical. */
594 if (flag_pic
== 1 && DEFAULT_ABI
== ABI_DARWIN
)
597 /* Set debug flags */
598 if (rs6000_debug_name
)
600 if (! strcmp (rs6000_debug_name
, "all"))
601 rs6000_debug_stack
= rs6000_debug_arg
= 1;
602 else if (! strcmp (rs6000_debug_name
, "stack"))
603 rs6000_debug_stack
= 1;
604 else if (! strcmp (rs6000_debug_name
, "arg"))
605 rs6000_debug_arg
= 1;
607 error ("unknown -mdebug-%s switch", rs6000_debug_name
);
610 if (rs6000_traceback_name
)
612 if (! strncmp (rs6000_traceback_name
, "full", 4))
613 rs6000_traceback
= traceback_full
;
614 else if (! strncmp (rs6000_traceback_name
, "part", 4))
615 rs6000_traceback
= traceback_part
;
616 else if (! strncmp (rs6000_traceback_name
, "no", 2))
617 rs6000_traceback
= traceback_none
;
619 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
620 rs6000_traceback_name
);
623 /* Set size of long double */
624 rs6000_long_double_type_size
= 64;
625 if (rs6000_long_double_size_string
)
628 int size
= strtol (rs6000_long_double_size_string
, &tail
, 10);
629 if (*tail
!= '\0' || (size
!= 64 && size
!= 128))
630 error ("Unknown switch -mlong-double-%s",
631 rs6000_long_double_size_string
);
633 rs6000_long_double_type_size
= size
;
636 /* Handle -mabi= options. */
637 rs6000_parse_abi_options ();
639 /* Handle -mvrsave= option. */
640 rs6000_parse_vrsave_option ();
642 /* Handle -misel= option. */
643 rs6000_parse_isel_option ();
645 #ifdef SUBTARGET_OVERRIDE_OPTIONS
646 SUBTARGET_OVERRIDE_OPTIONS
;
648 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
649 SUBSUBTARGET_OVERRIDE_OPTIONS
;
652 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
653 using TARGET_OPTIONS to handle a toggle switch, but we're out of
654 bits in target_flags so TARGET_SWITCHES cannot be used.
655 Assumption here is that rs6000_longcall_switch points into the
656 text of the complete option, rather than being a copy, so we can
657 scan back for the presence or absence of the no- modifier. */
658 if (rs6000_longcall_switch
)
660 const char *base
= rs6000_longcall_switch
;
661 while (base
[-1] != 'm') base
--;
663 if (*rs6000_longcall_switch
!= '\0')
664 error ("invalid option `%s'", base
);
665 rs6000_default_long_calls
= (base
[0] != 'n');
668 #ifdef TARGET_REGNAMES
669 /* If the user desires alternate register names, copy in the
670 alternate names now. */
672 memcpy (rs6000_reg_names
, alt_reg_names
, sizeof (rs6000_reg_names
));
675 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
676 If -maix-struct-return or -msvr4-struct-return was explicitly
677 used, don't override with the ABI default. */
678 if (!(target_flags
& MASK_AIX_STRUCT_RET_SET
))
680 if (DEFAULT_ABI
== ABI_V4
&& !DRAFT_V4_STRUCT_RET
)
681 target_flags
= (target_flags
& ~MASK_AIX_STRUCT_RET
);
683 target_flags
|= MASK_AIX_STRUCT_RET
;
686 /* Register global variables with the garbage collector. */
687 rs6000_add_gc_roots ();
689 /* Allocate an alias set for register saves & restores from stack. */
690 rs6000_sr_alias_set
= new_alias_set ();
693 ASM_GENERATE_INTERNAL_LABEL (toc_label_name
, "LCTOC", 1);
695 /* We can only guarantee the availability of DI pseudo-ops when
696 assembling for 64-bit targets. */
699 targetm
.asm_out
.aligned_op
.di
= NULL
;
700 targetm
.asm_out
.unaligned_op
.di
= NULL
;
703 /* Arrange to save and restore machine status around nested functions. */
704 init_machine_status
= rs6000_init_machine_status
;
707 /* Handle -misel= option. */
709 rs6000_parse_isel_option ()
711 if (rs6000_isel_string
== 0)
713 else if (! strcmp (rs6000_isel_string
, "yes"))
715 else if (! strcmp (rs6000_isel_string
, "no"))
718 error ("unknown -misel= option specified: '%s'",
722 /* Handle -mvrsave= options. */
724 rs6000_parse_vrsave_option ()
726 /* Generate VRSAVE instructions by default. */
727 if (rs6000_altivec_vrsave_string
== 0
728 || ! strcmp (rs6000_altivec_vrsave_string
, "yes"))
729 rs6000_altivec_vrsave
= 1;
730 else if (! strcmp (rs6000_altivec_vrsave_string
, "no"))
731 rs6000_altivec_vrsave
= 0;
733 error ("unknown -mvrsave= option specified: '%s'",
734 rs6000_altivec_vrsave_string
);
737 /* Handle -mabi= options. */
739 rs6000_parse_abi_options ()
741 if (rs6000_abi_string
== 0)
743 else if (! strcmp (rs6000_abi_string
, "altivec"))
744 rs6000_altivec_abi
= 1;
745 else if (! strcmp (rs6000_abi_string
, "no-altivec"))
746 rs6000_altivec_abi
= 0;
747 else if (! strcmp (rs6000_abi_string
, "spe"))
749 else if (! strcmp (rs6000_abi_string
, "no-spe"))
752 error ("unknown ABI specified: '%s'", rs6000_abi_string
);
756 optimization_options (level
, size
)
757 int level ATTRIBUTE_UNUSED
;
758 int size ATTRIBUTE_UNUSED
;
762 /* Do anything needed at the start of the asm file. */
765 rs6000_file_start (file
, default_cpu
)
767 const char *default_cpu
;
771 const char *start
= buffer
;
772 struct rs6000_cpu_select
*ptr
;
774 if (flag_verbose_asm
)
776 sprintf (buffer
, "\n%s rs6000/powerpc options:", ASM_COMMENT_START
);
777 rs6000_select
[0].string
= default_cpu
;
779 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
781 ptr
= &rs6000_select
[i
];
782 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
784 fprintf (file
, "%s %s%s", start
, ptr
->name
, ptr
->string
);
790 switch (rs6000_sdata
)
792 case SDATA_NONE
: fprintf (file
, "%s -msdata=none", start
); start
= ""; break;
793 case SDATA_DATA
: fprintf (file
, "%s -msdata=data", start
); start
= ""; break;
794 case SDATA_SYSV
: fprintf (file
, "%s -msdata=sysv", start
); start
= ""; break;
795 case SDATA_EABI
: fprintf (file
, "%s -msdata=eabi", start
); start
= ""; break;
798 if (rs6000_sdata
&& g_switch_value
)
800 fprintf (file
, "%s -G %d", start
, g_switch_value
);
810 /* Return nonzero if this function is known to have a null epilogue. */
815 if (reload_completed
)
817 rs6000_stack_t
*info
= rs6000_stack_info ();
819 if (info
->first_gp_reg_save
== 32
820 && info
->first_fp_reg_save
== 64
821 && info
->first_altivec_reg_save
== LAST_ALTIVEC_REGNO
+ 1
824 && info
->vrsave_mask
== 0
832 /* Returns 1 always. */
835 any_operand (op
, mode
)
836 rtx op ATTRIBUTE_UNUSED
;
837 enum machine_mode mode ATTRIBUTE_UNUSED
;
842 /* Returns 1 if op is the count register. */
844 count_register_operand (op
, mode
)
846 enum machine_mode mode ATTRIBUTE_UNUSED
;
848 if (GET_CODE (op
) != REG
)
851 if (REGNO (op
) == COUNT_REGISTER_REGNUM
)
854 if (REGNO (op
) > FIRST_PSEUDO_REGISTER
)
860 /* Returns 1 if op is an altivec register. */
862 altivec_register_operand (op
, mode
)
864 enum machine_mode mode ATTRIBUTE_UNUSED
;
867 return (register_operand (op
, mode
)
868 && (GET_CODE (op
) != REG
869 || REGNO (op
) > FIRST_PSEUDO_REGISTER
870 || ALTIVEC_REGNO_P (REGNO (op
))));
874 xer_operand (op
, mode
)
876 enum machine_mode mode ATTRIBUTE_UNUSED
;
878 if (GET_CODE (op
) != REG
)
881 if (XER_REGNO_P (REGNO (op
)))
887 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
888 by such constants completes more quickly. */
891 s8bit_cint_operand (op
, mode
)
893 enum machine_mode mode ATTRIBUTE_UNUSED
;
895 return ( GET_CODE (op
) == CONST_INT
896 && (INTVAL (op
) >= -128 && INTVAL (op
) <= 127));
899 /* Return 1 if OP is a constant that can fit in a D field. */
902 short_cint_operand (op
, mode
)
904 enum machine_mode mode ATTRIBUTE_UNUSED
;
906 return (GET_CODE (op
) == CONST_INT
907 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I'));
910 /* Similar for an unsigned D field. */
913 u_short_cint_operand (op
, mode
)
915 enum machine_mode mode ATTRIBUTE_UNUSED
;
917 return (GET_CODE (op
) == CONST_INT
918 && CONST_OK_FOR_LETTER_P (INTVAL (op
) & GET_MODE_MASK (mode
), 'K'));
921 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
924 non_short_cint_operand (op
, mode
)
926 enum machine_mode mode ATTRIBUTE_UNUSED
;
928 return (GET_CODE (op
) == CONST_INT
929 && (unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x8000) >= 0x10000);
932 /* Returns 1 if OP is a CONST_INT that is a positive value
933 and an exact power of 2. */
936 exact_log2_cint_operand (op
, mode
)
938 enum machine_mode mode ATTRIBUTE_UNUSED
;
940 return (GET_CODE (op
) == CONST_INT
942 && exact_log2 (INTVAL (op
)) >= 0);
945 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
949 gpc_reg_operand (op
, mode
)
951 enum machine_mode mode
;
953 return (register_operand (op
, mode
)
954 && (GET_CODE (op
) != REG
955 || (REGNO (op
) >= ARG_POINTER_REGNUM
956 && !XER_REGNO_P (REGNO (op
)))
957 || REGNO (op
) < MQ_REGNO
));
960 /* Returns 1 if OP is either a pseudo-register or a register denoting a
964 cc_reg_operand (op
, mode
)
966 enum machine_mode mode
;
968 return (register_operand (op
, mode
)
969 && (GET_CODE (op
) != REG
970 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
971 || CR_REGNO_P (REGNO (op
))));
974 /* Returns 1 if OP is either a pseudo-register or a register denoting a
975 CR field that isn't CR0. */
978 cc_reg_not_cr0_operand (op
, mode
)
980 enum machine_mode mode
;
982 return (register_operand (op
, mode
)
983 && (GET_CODE (op
) != REG
984 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
985 || CR_REGNO_NOT_CR0_P (REGNO (op
))));
988 /* Returns 1 if OP is either a constant integer valid for a D-field or
989 a non-special register. If a register, it must be in the proper
990 mode unless MODE is VOIDmode. */
993 reg_or_short_operand (op
, mode
)
995 enum machine_mode mode
;
997 return short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1000 /* Similar, except check if the negation of the constant would be
1001 valid for a D-field. */
1004 reg_or_neg_short_operand (op
, mode
)
1006 enum machine_mode mode
;
1008 if (GET_CODE (op
) == CONST_INT
)
1009 return CONST_OK_FOR_LETTER_P (INTVAL (op
), 'P');
1011 return gpc_reg_operand (op
, mode
);
1014 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1015 a non-special register. If a register, it must be in the proper
1016 mode unless MODE is VOIDmode. */
1019 reg_or_aligned_short_operand (op
, mode
)
1021 enum machine_mode mode
;
1023 if (gpc_reg_operand (op
, mode
))
1025 else if (short_cint_operand (op
, mode
) && !(INTVAL (op
) & 3))
1032 /* Return 1 if the operand is either a register or an integer whose
1033 high-order 16 bits are zero. */
1036 reg_or_u_short_operand (op
, mode
)
1038 enum machine_mode mode
;
1040 return u_short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1043 /* Return 1 is the operand is either a non-special register or ANY
1044 constant integer. */
1047 reg_or_cint_operand (op
, mode
)
1049 enum machine_mode mode
;
1051 return (GET_CODE (op
) == CONST_INT
|| gpc_reg_operand (op
, mode
));
1054 /* Return 1 is the operand is either a non-special register or ANY
1055 32-bit signed constant integer. */
1058 reg_or_arith_cint_operand (op
, mode
)
1060 enum machine_mode mode
;
1062 return (gpc_reg_operand (op
, mode
)
1063 || (GET_CODE (op
) == CONST_INT
1064 #if HOST_BITS_PER_WIDE_INT != 32
1065 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80000000)
1066 < (unsigned HOST_WIDE_INT
) 0x100000000ll
)
1071 /* Return 1 is the operand is either a non-special register or a 32-bit
1072 signed constant integer valid for 64-bit addition. */
1075 reg_or_add_cint64_operand (op
, mode
)
1077 enum machine_mode mode
;
1079 return (gpc_reg_operand (op
, mode
)
1080 || (GET_CODE (op
) == CONST_INT
1081 #if HOST_BITS_PER_WIDE_INT == 32
1082 && INTVAL (op
) < 0x7fff8000
1084 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80008000)
1090 /* Return 1 is the operand is either a non-special register or a 32-bit
1091 signed constant integer valid for 64-bit subtraction. */
1094 reg_or_sub_cint64_operand (op
, mode
)
1096 enum machine_mode mode
;
1098 return (gpc_reg_operand (op
, mode
)
1099 || (GET_CODE (op
) == CONST_INT
1100 #if HOST_BITS_PER_WIDE_INT == 32
1101 && (- INTVAL (op
)) < 0x7fff8000
1103 && ((unsigned HOST_WIDE_INT
) ((- INTVAL (op
)) + 0x80008000)
1109 /* Return 1 is the operand is either a non-special register or ANY
1110 32-bit unsigned constant integer. */
1113 reg_or_logical_cint_operand (op
, mode
)
1115 enum machine_mode mode
;
1117 if (GET_CODE (op
) == CONST_INT
)
1119 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
)
1121 if (GET_MODE_BITSIZE (mode
) <= 32)
1124 if (INTVAL (op
) < 0)
1128 return ((INTVAL (op
) & GET_MODE_MASK (mode
)
1129 & (~ (unsigned HOST_WIDE_INT
) 0xffffffff)) == 0);
1131 else if (GET_CODE (op
) == CONST_DOUBLE
)
1133 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
1137 return CONST_DOUBLE_HIGH (op
) == 0;
1140 return gpc_reg_operand (op
, mode
);
1143 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1146 got_operand (op
, mode
)
1148 enum machine_mode mode ATTRIBUTE_UNUSED
;
1150 return (GET_CODE (op
) == SYMBOL_REF
1151 || GET_CODE (op
) == CONST
1152 || GET_CODE (op
) == LABEL_REF
);
1155 /* Return 1 if the operand is a simple references that can be loaded via
1156 the GOT (labels involving addition aren't allowed). */
1159 got_no_const_operand (op
, mode
)
1161 enum machine_mode mode ATTRIBUTE_UNUSED
;
1163 return (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
);
1166 /* Return the number of instructions it takes to form a constant in an
1167 integer register. */
1170 num_insns_constant_wide (value
)
1171 HOST_WIDE_INT value
;
1173 /* signed constant loadable with {cal|addi} */
1174 if (CONST_OK_FOR_LETTER_P (value
, 'I'))
1177 /* constant loadable with {cau|addis} */
1178 else if (CONST_OK_FOR_LETTER_P (value
, 'L'))
1181 #if HOST_BITS_PER_WIDE_INT == 64
1182 else if (TARGET_POWERPC64
)
1184 HOST_WIDE_INT low
= ((value
& 0xffffffff) ^ 0x80000000) - 0x80000000;
1185 HOST_WIDE_INT high
= value
>> 31;
1187 if (high
== 0 || high
== -1)
1193 return num_insns_constant_wide (high
) + 1;
1195 return (num_insns_constant_wide (high
)
1196 + num_insns_constant_wide (low
) + 1);
1205 num_insns_constant (op
, mode
)
1207 enum machine_mode mode
;
1209 if (GET_CODE (op
) == CONST_INT
)
1211 #if HOST_BITS_PER_WIDE_INT == 64
1212 if ((INTVAL (op
) >> 31) != 0 && (INTVAL (op
) >> 31) != -1
1213 && mask64_operand (op
, mode
))
1217 return num_insns_constant_wide (INTVAL (op
));
1220 else if (GET_CODE (op
) == CONST_DOUBLE
&& mode
== SFmode
)
1225 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1226 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1227 return num_insns_constant_wide ((HOST_WIDE_INT
) l
);
1230 else if (GET_CODE (op
) == CONST_DOUBLE
)
1236 int endian
= (WORDS_BIG_ENDIAN
== 0);
1238 if (mode
== VOIDmode
|| mode
== DImode
)
1240 high
= CONST_DOUBLE_HIGH (op
);
1241 low
= CONST_DOUBLE_LOW (op
);
1245 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1246 REAL_VALUE_TO_TARGET_DOUBLE (rv
, l
);
1248 low
= l
[1 - endian
];
1252 return (num_insns_constant_wide (low
)
1253 + num_insns_constant_wide (high
));
1257 if (high
== 0 && low
>= 0)
1258 return num_insns_constant_wide (low
);
1260 else if (high
== -1 && low
< 0)
1261 return num_insns_constant_wide (low
);
1263 else if (mask64_operand (op
, mode
))
1267 return num_insns_constant_wide (high
) + 1;
1270 return (num_insns_constant_wide (high
)
1271 + num_insns_constant_wide (low
) + 1);
1279 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1280 register with one instruction per word. We only do this if we can
1281 safely read CONST_DOUBLE_{LOW,HIGH}. */
1284 easy_fp_constant (op
, mode
)
1286 enum machine_mode mode
;
1288 if (GET_CODE (op
) != CONST_DOUBLE
1289 || GET_MODE (op
) != mode
1290 || (GET_MODE_CLASS (mode
) != MODE_FLOAT
&& mode
!= DImode
))
1293 /* Consider all constants with -msoft-float to be easy. */
1294 if ((TARGET_SOFT_FLOAT
|| !TARGET_FPRS
)
1298 /* If we are using V.4 style PIC, consider all constants to be hard. */
1299 if (flag_pic
&& DEFAULT_ABI
== ABI_V4
)
1302 #ifdef TARGET_RELOCATABLE
1303 /* Similarly if we are using -mrelocatable, consider all constants
1305 if (TARGET_RELOCATABLE
)
1314 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1315 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
1317 return (num_insns_constant_wide ((HOST_WIDE_INT
) k
[0]) == 1
1318 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[1]) == 1);
1321 else if (mode
== SFmode
)
1326 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1327 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1329 return num_insns_constant_wide (l
) == 1;
1332 else if (mode
== DImode
)
1333 return ((TARGET_POWERPC64
1334 && GET_CODE (op
) == CONST_DOUBLE
&& CONST_DOUBLE_LOW (op
) == 0)
1335 || (num_insns_constant (op
, DImode
) <= 2));
1337 else if (mode
== SImode
)
1343 /* Return 1 if the operand is a CONST_INT and can be put into a
1344 register with one instruction. */
1347 easy_vector_constant (op
)
1353 if (GET_CODE (op
) != CONST_VECTOR
)
1356 units
= CONST_VECTOR_NUNITS (op
);
1358 /* We can generate 0 easily. Look for that. */
1359 for (i
= 0; i
< units
; ++i
)
1361 elt
= CONST_VECTOR_ELT (op
, i
);
1363 /* We could probably simplify this by just checking for equality
1364 with CONST0_RTX for the current mode, but let's be safe
1367 switch (GET_CODE (elt
))
1370 if (INTVAL (elt
) != 0)
1374 if (CONST_DOUBLE_LOW (elt
) != 0 || CONST_DOUBLE_HIGH (elt
) != 0)
1382 /* We could probably generate a few other constants trivially, but
1383 gcc doesn't generate them yet. FIXME later. */
1387 /* Return 1 if the operand is the constant 0. This works for scalars
1388 as well as vectors. */
1390 zero_constant (op
, mode
)
1392 enum machine_mode mode
;
1394 return op
== CONST0_RTX (mode
);
1397 /* Return 1 if the operand is 0.0. */
1399 zero_fp_constant (op
, mode
)
1401 enum machine_mode mode
;
1403 return GET_MODE_CLASS (mode
) == MODE_FLOAT
&& op
== CONST0_RTX (mode
);
1406 /* Return 1 if the operand is in volatile memory. Note that during
1407 the RTL generation phase, memory_operand does not return TRUE for
1408 volatile memory references. So this function allows us to
1409 recognize volatile references where its safe. */
1412 volatile_mem_operand (op
, mode
)
1414 enum machine_mode mode
;
1416 if (GET_CODE (op
) != MEM
)
1419 if (!MEM_VOLATILE_P (op
))
1422 if (mode
!= GET_MODE (op
))
1425 if (reload_completed
)
1426 return memory_operand (op
, mode
);
1428 if (reload_in_progress
)
1429 return strict_memory_address_p (mode
, XEXP (op
, 0));
1431 return memory_address_p (mode
, XEXP (op
, 0));
1434 /* Return 1 if the operand is an offsettable memory operand. */
1437 offsettable_mem_operand (op
, mode
)
1439 enum machine_mode mode
;
1441 return ((GET_CODE (op
) == MEM
)
1442 && offsettable_address_p (reload_completed
|| reload_in_progress
,
1443 mode
, XEXP (op
, 0)));
1446 /* Return 1 if the operand is either an easy FP constant (see above) or
1450 mem_or_easy_const_operand (op
, mode
)
1452 enum machine_mode mode
;
1454 return memory_operand (op
, mode
) || easy_fp_constant (op
, mode
);
1457 /* Return 1 if the operand is either a non-special register or an item
1458 that can be used as the operand of a `mode' add insn. */
1461 add_operand (op
, mode
)
1463 enum machine_mode mode
;
1465 if (GET_CODE (op
) == CONST_INT
)
1466 return (CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1467 || CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1469 return gpc_reg_operand (op
, mode
);
1472 /* Return 1 if OP is a constant but not a valid add_operand. */
1475 non_add_cint_operand (op
, mode
)
1477 enum machine_mode mode ATTRIBUTE_UNUSED
;
1479 return (GET_CODE (op
) == CONST_INT
1480 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1481 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1484 /* Return 1 if the operand is a non-special register or a constant that
1485 can be used as the operand of an OR or XOR insn on the RS/6000. */
1488 logical_operand (op
, mode
)
1490 enum machine_mode mode
;
1492 HOST_WIDE_INT opl
, oph
;
1494 if (gpc_reg_operand (op
, mode
))
1497 if (GET_CODE (op
) == CONST_INT
)
1499 opl
= INTVAL (op
) & GET_MODE_MASK (mode
);
1501 #if HOST_BITS_PER_WIDE_INT <= 32
1502 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
&& opl
< 0)
1506 else if (GET_CODE (op
) == CONST_DOUBLE
)
1508 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1511 opl
= CONST_DOUBLE_LOW (op
);
1512 oph
= CONST_DOUBLE_HIGH (op
);
1519 return ((opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff) == 0
1520 || (opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff0000) == 0);
1523 /* Return 1 if C is a constant that is not a logical operand (as
1524 above), but could be split into one. */
1527 non_logical_cint_operand (op
, mode
)
1529 enum machine_mode mode
;
1531 return ((GET_CODE (op
) == CONST_INT
|| GET_CODE (op
) == CONST_DOUBLE
)
1532 && ! logical_operand (op
, mode
)
1533 && reg_or_logical_cint_operand (op
, mode
));
1536 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1537 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1538 Reject all ones and all zeros, since these should have been optimized
1539 away and confuse the making of MB and ME. */
1542 mask_operand (op
, mode
)
1544 enum machine_mode mode ATTRIBUTE_UNUSED
;
1546 HOST_WIDE_INT c
, lsb
;
1548 if (GET_CODE (op
) != CONST_INT
)
1553 /* Fail in 64-bit mode if the mask wraps around because the upper
1554 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1555 if (TARGET_POWERPC64
&& (c
& 0x80000001) == 0x80000001)
1558 /* We don't change the number of transitions by inverting,
1559 so make sure we start with the LS bit zero. */
1563 /* Reject all zeros or all ones. */
1567 /* Find the first transition. */
1570 /* Invert to look for a second transition. */
1573 /* Erase first transition. */
1576 /* Find the second transition (if any). */
1579 /* Match if all the bits above are 1's (or c is zero). */
1583 /* Return 1 for the PowerPC64 rlwinm corner case. */
1586 mask_operand_wrap (op
, mode
)
1588 enum machine_mode mode ATTRIBUTE_UNUSED
;
1590 HOST_WIDE_INT c
, lsb
;
1592 if (GET_CODE (op
) != CONST_INT
)
1597 if ((c
& 0x80000001) != 0x80000001)
1611 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1612 It is if there are no more than one 1->0 or 0->1 transitions.
1613 Reject all zeros, since zero should have been optimized away and
1614 confuses the making of MB and ME. */
1617 mask64_operand (op
, mode
)
1619 enum machine_mode mode ATTRIBUTE_UNUSED
;
1621 if (GET_CODE (op
) == CONST_INT
)
1623 HOST_WIDE_INT c
, lsb
;
1627 /* Reject all zeros. */
1631 /* We don't change the number of transitions by inverting,
1632 so make sure we start with the LS bit zero. */
1636 /* Find the transition, and check that all bits above are 1's. */
1643 /* Like mask64_operand, but allow up to three transitions. This
1644 predicate is used by insn patterns that generate two rldicl or
1645 rldicr machine insns. */
1648 mask64_2_operand (op
, mode
)
1650 enum machine_mode mode ATTRIBUTE_UNUSED
;
1652 if (GET_CODE (op
) == CONST_INT
)
1654 HOST_WIDE_INT c
, lsb
;
1658 /* Disallow all zeros. */
1662 /* We don't change the number of transitions by inverting,
1663 so make sure we start with the LS bit zero. */
1667 /* Find the first transition. */
1670 /* Invert to look for a second transition. */
1673 /* Erase first transition. */
1676 /* Find the second transition. */
1679 /* Invert to look for a third transition. */
1682 /* Erase second transition. */
1685 /* Find the third transition (if any). */
1688 /* Match if all the bits above are 1's (or c is zero). */
1694 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1695 implement ANDing by the mask IN. */
1697 build_mask64_2_operands (in
, out
)
1701 #if HOST_BITS_PER_WIDE_INT >= 64
1702 unsigned HOST_WIDE_INT c
, lsb
, m1
, m2
;
1705 if (GET_CODE (in
) != CONST_INT
)
1711 /* Assume c initially something like 0x00fff000000fffff. The idea
1712 is to rotate the word so that the middle ^^^^^^ group of zeros
1713 is at the MS end and can be cleared with an rldicl mask. We then
1714 rotate back and clear off the MS ^^ group of zeros with a
1716 c
= ~c
; /* c == 0xff000ffffff00000 */
1717 lsb
= c
& -c
; /* lsb == 0x0000000000100000 */
1718 m1
= -lsb
; /* m1 == 0xfffffffffff00000 */
1719 c
= ~c
; /* c == 0x00fff000000fffff */
1720 c
&= -lsb
; /* c == 0x00fff00000000000 */
1721 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
1722 c
= ~c
; /* c == 0xff000fffffffffff */
1723 c
&= -lsb
; /* c == 0xff00000000000000 */
1725 while ((lsb
>>= 1) != 0)
1726 shift
++; /* shift == 44 on exit from loop */
1727 m1
<<= 64 - shift
; /* m1 == 0xffffff0000000000 */
1728 m1
= ~m1
; /* m1 == 0x000000ffffffffff */
1729 m2
= ~c
; /* m2 == 0x00ffffffffffffff */
1733 /* Assume c initially something like 0xff000f0000000000. The idea
1734 is to rotate the word so that the ^^^ middle group of zeros
1735 is at the LS end and can be cleared with an rldicr mask. We then
1736 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1738 lsb
= c
& -c
; /* lsb == 0x0000010000000000 */
1739 m2
= -lsb
; /* m2 == 0xffffff0000000000 */
1740 c
= ~c
; /* c == 0x00fff0ffffffffff */
1741 c
&= -lsb
; /* c == 0x00fff00000000000 */
1742 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
1743 c
= ~c
; /* c == 0xff000fffffffffff */
1744 c
&= -lsb
; /* c == 0xff00000000000000 */
1746 while ((lsb
>>= 1) != 0)
1747 shift
++; /* shift == 44 on exit from loop */
1748 m1
= ~c
; /* m1 == 0x00ffffffffffffff */
1749 m1
>>= shift
; /* m1 == 0x0000000000000fff */
1750 m1
= ~m1
; /* m1 == 0xfffffffffffff000 */
1753 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1754 masks will be all 1's. We are guaranteed more than one transition. */
1755 out
[0] = GEN_INT (64 - shift
);
1756 out
[1] = GEN_INT (m1
);
1757 out
[2] = GEN_INT (shift
);
1758 out
[3] = GEN_INT (m2
);
1766 /* Return 1 if the operand is either a non-special register or a constant
1767 that can be used as the operand of a PowerPC64 logical AND insn. */
1770 and64_operand (op
, mode
)
1772 enum machine_mode mode
;
1774 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
1775 return (gpc_reg_operand (op
, mode
) || mask64_operand (op
, mode
));
1777 return (logical_operand (op
, mode
) || mask64_operand (op
, mode
));
1780 /* Like the above, but also match constants that can be implemented
1781 with two rldicl or rldicr insns. */
1784 and64_2_operand (op
, mode
)
1786 enum machine_mode mode
;
1788 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
1789 return gpc_reg_operand (op
, mode
) || mask64_2_operand (op
, mode
);
1791 return logical_operand (op
, mode
) || mask64_2_operand (op
, mode
);
1794 /* Return 1 if the operand is either a non-special register or a
1795 constant that can be used as the operand of an RS/6000 logical AND insn. */
1798 and_operand (op
, mode
)
1800 enum machine_mode mode
;
1802 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
1803 return (gpc_reg_operand (op
, mode
) || mask_operand (op
, mode
));
1805 return (logical_operand (op
, mode
) || mask_operand (op
, mode
));
1808 /* Return 1 if the operand is a general register or memory operand. */
1811 reg_or_mem_operand (op
, mode
)
1813 enum machine_mode mode
;
1815 return (gpc_reg_operand (op
, mode
)
1816 || memory_operand (op
, mode
)
1817 || volatile_mem_operand (op
, mode
));
1820 /* Return 1 if the operand is a general register or memory operand without
1821 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1825 lwa_operand (op
, mode
)
1827 enum machine_mode mode
;
1831 if (reload_completed
&& GET_CODE (inner
) == SUBREG
)
1832 inner
= SUBREG_REG (inner
);
1834 return gpc_reg_operand (inner
, mode
)
1835 || (memory_operand (inner
, mode
)
1836 && GET_CODE (XEXP (inner
, 0)) != PRE_INC
1837 && GET_CODE (XEXP (inner
, 0)) != PRE_DEC
1838 && (GET_CODE (XEXP (inner
, 0)) != PLUS
1839 || GET_CODE (XEXP (XEXP (inner
, 0), 1)) != CONST_INT
1840 || INTVAL (XEXP (XEXP (inner
, 0), 1)) % 4 == 0));
1843 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
1846 symbol_ref_operand (op
, mode
)
1848 enum machine_mode mode
;
1850 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1853 return (GET_CODE (op
) == SYMBOL_REF
);
1856 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1857 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
1860 call_operand (op
, mode
)
1862 enum machine_mode mode
;
1864 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1867 return (GET_CODE (op
) == SYMBOL_REF
1868 || (GET_CODE (op
) == REG
1869 && (REGNO (op
) == LINK_REGISTER_REGNUM
1870 || REGNO (op
) == COUNT_REGISTER_REGNUM
1871 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
)));
1874 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1875 this file and the function is not weakly defined. */
1878 current_file_function_operand (op
, mode
)
1880 enum machine_mode mode ATTRIBUTE_UNUSED
;
1882 return (GET_CODE (op
) == SYMBOL_REF
1883 && (SYMBOL_REF_FLAG (op
)
1884 || (op
== XEXP (DECL_RTL (current_function_decl
), 0)
1885 && ! DECL_WEAK (current_function_decl
))));
1888 /* Return 1 if this operand is a valid input for a move insn. */
1891 input_operand (op
, mode
)
1893 enum machine_mode mode
;
1895 /* Memory is always valid. */
1896 if (memory_operand (op
, mode
))
1899 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1900 if (GET_CODE (op
) == CONSTANT_P_RTX
)
1903 /* For floating-point, easy constants are valid. */
1904 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
1906 && easy_fp_constant (op
, mode
))
1909 /* Allow any integer constant. */
1910 if (GET_MODE_CLASS (mode
) == MODE_INT
1911 && (GET_CODE (op
) == CONST_INT
1912 || GET_CODE (op
) == CONST_DOUBLE
))
1915 /* For floating-point or multi-word mode, the only remaining valid type
1917 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
1918 || GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
1919 return register_operand (op
, mode
);
1921 /* The only cases left are integral modes one word or smaller (we
1922 do not get called for MODE_CC values). These can be in any
1924 if (register_operand (op
, mode
))
1927 /* A SYMBOL_REF referring to the TOC is valid. */
1928 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op
))
1931 /* A constant pool expression (relative to the TOC) is valid */
1932 if (TOC_RELATIVE_EXPR_P (op
))
1935 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1937 if (DEFAULT_ABI
== ABI_V4
1938 && (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == CONST
)
1939 && small_data_operand (op
, Pmode
))
1945 /* Return 1 for an operand in small memory on V.4/eabi. */
1948 small_data_operand (op
, mode
)
1949 rtx op ATTRIBUTE_UNUSED
;
1950 enum machine_mode mode ATTRIBUTE_UNUSED
;
1955 if (rs6000_sdata
== SDATA_NONE
|| rs6000_sdata
== SDATA_DATA
)
1958 if (DEFAULT_ABI
!= ABI_V4
)
1961 if (GET_CODE (op
) == SYMBOL_REF
)
1964 else if (GET_CODE (op
) != CONST
1965 || GET_CODE (XEXP (op
, 0)) != PLUS
1966 || GET_CODE (XEXP (XEXP (op
, 0), 0)) != SYMBOL_REF
1967 || GET_CODE (XEXP (XEXP (op
, 0), 1)) != CONST_INT
)
1972 rtx sum
= XEXP (op
, 0);
1973 HOST_WIDE_INT summand
;
1975 /* We have to be careful here, because it is the referenced address
1976 that must be 32k from _SDA_BASE_, not just the symbol. */
1977 summand
= INTVAL (XEXP (sum
, 1));
1978 if (summand
< 0 || summand
> g_switch_value
)
1981 sym_ref
= XEXP (sum
, 0);
1984 if (*XSTR (sym_ref
, 0) != '@')
1995 constant_pool_expr_1 (op
, have_sym
, have_toc
)
2000 switch (GET_CODE(op
))
2003 if (CONSTANT_POOL_ADDRESS_P (op
))
2005 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op
), Pmode
))
2013 else if (! strcmp (XSTR (op
, 0), toc_label_name
))
2022 return (constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
)
2023 && constant_pool_expr_1 (XEXP (op
, 1), have_sym
, have_toc
));
2025 return constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
);
2034 constant_pool_expr_p (op
)
2039 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_sym
;
2043 toc_relative_expr_p (op
)
2048 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_toc
;
2051 /* Try machine-dependent ways of modifying an illegitimate address
2052 to be legitimate. If we find one, return the new, valid address.
2053 This is used from only one place: `memory_address' in explow.c.
2055 OLDX is the address as it was before break_out_memory_refs was
2056 called. In some cases it is useful to look at this to decide what
2059 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2061 It is always safe for this function to do nothing. It exists to
2062 recognize opportunities to optimize the output.
2064 On RS/6000, first check for the sum of a register with a constant
2065 integer that is out of range. If so, generate code to add the
2066 constant with the low-order 16 bits masked to the register and force
2067 this result into another register (this can be done with `cau').
2068 Then generate an address of REG+(CONST&0xffff), allowing for the
2069 possibility of bit 16 being a one.
2071 Then check for the sum of a register and something not constant, try to
2072 load the other things into a register and return the sum. */
2074 rs6000_legitimize_address (x
, oldx
, mode
)
2076 rtx oldx ATTRIBUTE_UNUSED
;
2077 enum machine_mode mode
;
2079 if (GET_CODE (x
) == PLUS
2080 && GET_CODE (XEXP (x
, 0)) == REG
2081 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2082 && (unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1)) + 0x8000) >= 0x10000)
2084 HOST_WIDE_INT high_int
, low_int
;
2086 low_int
= ((INTVAL (XEXP (x
, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2087 high_int
= INTVAL (XEXP (x
, 1)) - low_int
;
2088 sum
= force_operand (gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2089 GEN_INT (high_int
)), 0);
2090 return gen_rtx_PLUS (Pmode
, sum
, GEN_INT (low_int
));
2092 else if (GET_CODE (x
) == PLUS
2093 && GET_CODE (XEXP (x
, 0)) == REG
2094 && GET_CODE (XEXP (x
, 1)) != CONST_INT
2095 && GET_MODE_NUNITS (mode
) == 1
2096 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
2099 && (TARGET_POWERPC64
|| mode
!= DImode
)
2102 return gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2103 force_reg (Pmode
, force_operand (XEXP (x
, 1), 0)));
2105 else if (ALTIVEC_VECTOR_MODE (mode
))
2109 /* Make sure both operands are registers. */
2110 if (GET_CODE (x
) == PLUS
)
2111 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, XEXP (x
, 0)),
2112 force_reg (Pmode
, XEXP (x
, 1)));
2114 reg
= force_reg (Pmode
, x
);
2117 else if (SPE_VECTOR_MODE (mode
))
2119 /* We accept [reg + reg] and [reg + OFFSET]. */
2121 if (GET_CODE (x
) == PLUS
)
2123 rtx op1
= XEXP (x
, 0);
2124 rtx op2
= XEXP (x
, 1);
2126 op1
= force_reg (Pmode
, op1
);
2128 if (GET_CODE (op2
) != REG
2129 && (GET_CODE (op2
) != CONST_INT
2130 || !SPE_CONST_OFFSET_OK (INTVAL (op2
))))
2131 op2
= force_reg (Pmode
, op2
);
2133 return gen_rtx_PLUS (Pmode
, op1
, op2
);
2136 return force_reg (Pmode
, x
);
2138 else if (TARGET_ELF
&& TARGET_32BIT
&& TARGET_NO_TOC
&& ! flag_pic
2139 && GET_CODE (x
) != CONST_INT
2140 && GET_CODE (x
) != CONST_DOUBLE
2142 && GET_MODE_NUNITS (mode
) == 1
2143 && (GET_MODE_BITSIZE (mode
) <= 32
2144 || ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) && mode
== DFmode
)))
2146 rtx reg
= gen_reg_rtx (Pmode
);
2147 emit_insn (gen_elf_high (reg
, (x
)));
2148 return gen_rtx_LO_SUM (Pmode
, reg
, (x
));
2150 else if (TARGET_MACHO
&& TARGET_32BIT
&& TARGET_NO_TOC
2152 && GET_CODE (x
) != CONST_INT
2153 && GET_CODE (x
) != CONST_DOUBLE
2155 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) || mode
!= DFmode
)
2159 rtx reg
= gen_reg_rtx (Pmode
);
2160 emit_insn (gen_macho_high (reg
, (x
)));
2161 return gen_rtx_LO_SUM (Pmode
, reg
, (x
));
2164 && CONSTANT_POOL_EXPR_P (x
)
2165 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), Pmode
))
2167 return create_TOC_reference (x
);
2173 /* The convention appears to be to define this wherever it is used.
2174 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2175 is now used here. */
2176 #ifndef REG_MODE_OK_FOR_BASE_P
2177 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2180 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2181 replace the input X, or the original X if no replacement is called for.
2182 The output parameter *WIN is 1 if the calling macro should goto WIN,
2185 For RS/6000, we wish to handle large displacements off a base
2186 register by splitting the addend across an addiu/addis and the mem insn.
2187 This cuts number of extra insns needed from 3 to 1.
2189 On Darwin, we use this to generate code for floating point constants.
2190 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2191 The Darwin code is inside #if TARGET_MACHO because only then is
2192 machopic_function_base_name() defined. */
2194 rs6000_legitimize_reload_address (x
, mode
, opnum
, type
, ind_levels
, win
)
2196 enum machine_mode mode
;
2199 int ind_levels ATTRIBUTE_UNUSED
;
2202 /* We must recognize output that we have already generated ourselves. */
2203 if (GET_CODE (x
) == PLUS
2204 && GET_CODE (XEXP (x
, 0)) == PLUS
2205 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
2206 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2207 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2209 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2210 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
2211 opnum
, (enum reload_type
)type
);
2217 if (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
2218 && GET_CODE (x
) == LO_SUM
2219 && GET_CODE (XEXP (x
, 0)) == PLUS
2220 && XEXP (XEXP (x
, 0), 0) == pic_offset_table_rtx
2221 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == HIGH
2222 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 0)) == CONST
2223 && XEXP (XEXP (XEXP (x
, 0), 1), 0) == XEXP (x
, 1)
2224 && GET_CODE (XEXP (XEXP (x
, 1), 0)) == MINUS
2225 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 0)) == SYMBOL_REF
2226 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 1)) == SYMBOL_REF
)
2228 /* Result of previous invocation of this function on Darwin
2229 floating point constant. */
2230 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2231 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
2232 opnum
, (enum reload_type
)type
);
2237 if (GET_CODE (x
) == PLUS
2238 && GET_CODE (XEXP (x
, 0)) == REG
2239 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2240 && REG_MODE_OK_FOR_BASE_P (XEXP (x
, 0), mode
)
2241 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2242 && !SPE_VECTOR_MODE (mode
)
2243 && !ALTIVEC_VECTOR_MODE (mode
))
2245 HOST_WIDE_INT val
= INTVAL (XEXP (x
, 1));
2246 HOST_WIDE_INT low
= ((val
& 0xffff) ^ 0x8000) - 0x8000;
2248 = (((val
- low
) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2250 /* Check for 32-bit overflow. */
2251 if (high
+ low
!= val
)
2257 /* Reload the high part into a base reg; leave the low part
2258 in the mem directly. */
2260 x
= gen_rtx_PLUS (GET_MODE (x
),
2261 gen_rtx_PLUS (GET_MODE (x
), XEXP (x
, 0),
2265 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2266 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
2267 opnum
, (enum reload_type
)type
);
2272 if (GET_CODE (x
) == SYMBOL_REF
2273 && DEFAULT_ABI
== ABI_DARWIN
2274 && !ALTIVEC_VECTOR_MODE (mode
)
2277 /* Darwin load of floating point constant. */
2278 rtx offset
= gen_rtx (CONST
, Pmode
,
2279 gen_rtx (MINUS
, Pmode
, x
,
2280 gen_rtx (SYMBOL_REF
, Pmode
,
2281 machopic_function_base_name ())));
2282 x
= gen_rtx (LO_SUM
, GET_MODE (x
),
2283 gen_rtx (PLUS
, Pmode
, pic_offset_table_rtx
,
2284 gen_rtx (HIGH
, Pmode
, offset
)), offset
);
2285 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2286 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
2287 opnum
, (enum reload_type
)type
);
2293 && CONSTANT_POOL_EXPR_P (x
)
2294 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), mode
))
2296 (x
) = create_TOC_reference (x
);
2304 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2305 that is a valid memory address for an instruction.
2306 The MODE argument is the machine mode for the MEM expression
2307 that wants to use this address.
2309 On the RS/6000, there are four valid address: a SYMBOL_REF that
2310 refers to a constant pool entry of an address (or the sum of it
2311 plus a constant), a short (16-bit signed) constant plus a register,
2312 the sum of two registers, or a register indirect, possibly with an
2313 auto-increment. For DFmode and DImode with an constant plus register,
2314 we must ensure that both words are addressable or PowerPC64 with offset
2317 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2318 32-bit DImode, TImode), indexed addressing cannot be used because
2319 adjacent memory cells are accessed by adding word-sized offsets
2320 during assembly output. */
2322 rs6000_legitimate_address (mode
, x
, reg_ok_strict
)
2323 enum machine_mode mode
;
2327 if (LEGITIMATE_INDIRECT_ADDRESS_P (x
, reg_ok_strict
))
2329 if ((GET_CODE (x
) == PRE_INC
|| GET_CODE (x
) == PRE_DEC
)
2330 && !ALTIVEC_VECTOR_MODE (mode
)
2331 && !SPE_VECTOR_MODE (mode
)
2333 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x
, 0), reg_ok_strict
))
2335 if (LEGITIMATE_SMALL_DATA_P (mode
, x
))
2337 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x
))
2339 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2341 && GET_CODE (x
) == PLUS
2342 && GET_CODE (XEXP (x
, 0)) == REG
2343 && XEXP (x
, 0) == virtual_stack_vars_rtx
2344 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2346 if (LEGITIMATE_OFFSET_ADDRESS_P (mode
, x
, reg_ok_strict
))
2349 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
2352 && (TARGET_POWERPC64
|| mode
!= DImode
)
2353 && LEGITIMATE_INDEXED_ADDRESS_P (x
, reg_ok_strict
))
2355 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode
, x
, reg_ok_strict
))
2360 /* Try to output insns to set TARGET equal to the constant C if it can
2361 be done in less than N insns. Do all computations in MODE.
2362 Returns the place where the output has been placed if it can be
2363 done and the insns have been emitted. If it would take more than N
2364 insns, zero is returned and no insns and emitted. */
2367 rs6000_emit_set_const (dest
, mode
, source
, n
)
2369 enum machine_mode mode
;
2370 int n ATTRIBUTE_UNUSED
;
2372 rtx result
, insn
, set
;
2373 HOST_WIDE_INT c0
, c1
;
2375 if (mode
== QImode
|| mode
== HImode
)
2378 dest
= gen_reg_rtx (mode
);
2379 emit_insn (gen_rtx_SET (VOIDmode
, dest
, source
));
2382 else if (mode
== SImode
)
2384 result
= no_new_pseudos
? dest
: gen_reg_rtx (SImode
);
2386 emit_insn (gen_rtx_SET (VOIDmode
, result
,
2387 GEN_INT (INTVAL (source
)
2388 & (~ (HOST_WIDE_INT
) 0xffff))));
2389 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
2390 gen_rtx_IOR (SImode
, result
,
2391 GEN_INT (INTVAL (source
) & 0xffff))));
2394 else if (mode
== DImode
)
2396 if (GET_CODE (source
) == CONST_INT
)
2398 c0
= INTVAL (source
);
2401 else if (GET_CODE (source
) == CONST_DOUBLE
)
2403 #if HOST_BITS_PER_WIDE_INT >= 64
2404 c0
= CONST_DOUBLE_LOW (source
);
2407 c0
= CONST_DOUBLE_LOW (source
);
2408 c1
= CONST_DOUBLE_HIGH (source
);
2414 result
= rs6000_emit_set_long_const (dest
, c0
, c1
);
2419 insn
= get_last_insn ();
2420 set
= single_set (insn
);
2421 if (! CONSTANT_P (SET_SRC (set
)))
2422 set_unique_reg_note (insn
, REG_EQUAL
, source
);
2427 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2428 fall back to a straight forward decomposition. We do this to avoid
2429 exponential run times encountered when looking for longer sequences
2430 with rs6000_emit_set_const. */
2432 rs6000_emit_set_long_const (dest
, c1
, c2
)
2434 HOST_WIDE_INT c1
, c2
;
2436 if (!TARGET_POWERPC64
)
2438 rtx operand1
, operand2
;
2440 operand1
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
== 0,
2442 operand2
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
!= 0,
2444 emit_move_insn (operand1
, GEN_INT (c1
));
2445 emit_move_insn (operand2
, GEN_INT (c2
));
2449 HOST_WIDE_INT ud1
, ud2
, ud3
, ud4
;
2452 ud2
= (c1
& 0xffff0000) >> 16;
2453 #if HOST_BITS_PER_WIDE_INT >= 64
2457 ud4
= (c2
& 0xffff0000) >> 16;
2459 if ((ud4
== 0xffff && ud3
== 0xffff && ud2
== 0xffff && (ud1
& 0x8000))
2460 || (ud4
== 0 && ud3
== 0 && ud2
== 0 && ! (ud1
& 0x8000)))
2463 emit_move_insn (dest
, GEN_INT (((ud1
^ 0x8000) - 0x8000)));
2465 emit_move_insn (dest
, GEN_INT (ud1
));
2468 else if ((ud4
== 0xffff && ud3
== 0xffff && (ud2
& 0x8000))
2469 || (ud4
== 0 && ud3
== 0 && ! (ud2
& 0x8000)))
2472 emit_move_insn (dest
, GEN_INT (((ud2
<< 16) ^ 0x80000000)
2475 emit_move_insn (dest
, GEN_INT (ud2
<< 16));
2477 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2479 else if ((ud4
== 0xffff && (ud3
& 0x8000))
2480 || (ud4
== 0 && ! (ud3
& 0x8000)))
2483 emit_move_insn (dest
, GEN_INT (((ud3
<< 16) ^ 0x80000000)
2486 emit_move_insn (dest
, GEN_INT (ud3
<< 16));
2489 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud2
)));
2490 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (16)));
2492 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2497 emit_move_insn (dest
, GEN_INT (((ud4
<< 16) ^ 0x80000000)
2500 emit_move_insn (dest
, GEN_INT (ud4
<< 16));
2503 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud3
)));
2505 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (32)));
2507 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
,
2508 GEN_INT (ud2
<< 16)));
2510 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2516 /* Emit a move from SOURCE to DEST in mode MODE. */
2518 rs6000_emit_move (dest
, source
, mode
)
2521 enum machine_mode mode
;
2525 operands
[1] = source
;
2527 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2528 if (GET_CODE (operands
[1]) == CONST_DOUBLE
2529 && ! FLOAT_MODE_P (mode
)
2530 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
2532 /* FIXME. This should never happen. */
2533 /* Since it seems that it does, do the safe thing and convert
2535 operands
[1] = gen_int_mode (CONST_DOUBLE_LOW (operands
[1]), mode
);
2537 if (GET_CODE (operands
[1]) == CONST_DOUBLE
2538 && ! FLOAT_MODE_P (mode
)
2539 && ((CONST_DOUBLE_HIGH (operands
[1]) == 0
2540 && CONST_DOUBLE_LOW (operands
[1]) >= 0)
2541 || (CONST_DOUBLE_HIGH (operands
[1]) == -1
2542 && CONST_DOUBLE_LOW (operands
[1]) < 0)))
2545 /* Check if GCC is setting up a block move that will end up using FP
2546 registers as temporaries. We must make sure this is acceptable. */
2547 if (GET_CODE (operands
[0]) == MEM
2548 && GET_CODE (operands
[1]) == MEM
2550 && (SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[0]))
2551 || SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[1])))
2552 && ! (SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[0]) > 32
2553 ? 32 : MEM_ALIGN (operands
[0])))
2554 || SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[1]) > 32
2556 : MEM_ALIGN (operands
[1]))))
2557 && ! MEM_VOLATILE_P (operands
[0])
2558 && ! MEM_VOLATILE_P (operands
[1]))
2560 emit_move_insn (adjust_address (operands
[0], SImode
, 0),
2561 adjust_address (operands
[1], SImode
, 0));
2562 emit_move_insn (adjust_address (operands
[0], SImode
, 4),
2563 adjust_address (operands
[1], SImode
, 4));
2567 if (!no_new_pseudos
)
2569 if (GET_CODE (operands
[1]) == MEM
&& optimize
> 0
2570 && (mode
== QImode
|| mode
== HImode
|| mode
== SImode
)
2571 && GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
2573 rtx reg
= gen_reg_rtx (word_mode
);
2575 emit_insn (gen_rtx_SET (word_mode
, reg
,
2576 gen_rtx_ZERO_EXTEND (word_mode
,
2578 operands
[1] = gen_lowpart (mode
, reg
);
2580 if (GET_CODE (operands
[0]) != REG
)
2581 operands
[1] = force_reg (mode
, operands
[1]);
2584 if (mode
== SFmode
&& ! TARGET_POWERPC
2585 && TARGET_HARD_FLOAT
&& TARGET_FPRS
2586 && GET_CODE (operands
[0]) == MEM
)
2590 if (reload_in_progress
|| reload_completed
)
2591 regnum
= true_regnum (operands
[1]);
2592 else if (GET_CODE (operands
[1]) == REG
)
2593 regnum
= REGNO (operands
[1]);
2597 /* If operands[1] is a register, on POWER it may have
2598 double-precision data in it, so truncate it to single
2600 if (FP_REGNO_P (regnum
) || regnum
>= FIRST_PSEUDO_REGISTER
)
2603 newreg
= (no_new_pseudos
? operands
[1] : gen_reg_rtx (mode
));
2604 emit_insn (gen_aux_truncdfsf2 (newreg
, operands
[1]));
2605 operands
[1] = newreg
;
2609 /* Handle the case where reload calls us with an invalid address;
2610 and the case of CONSTANT_P_RTX. */
2611 if (!ALTIVEC_VECTOR_MODE (mode
)
2612 && (! general_operand (operands
[1], mode
)
2613 || ! nonimmediate_operand (operands
[0], mode
)
2614 || GET_CODE (operands
[1]) == CONSTANT_P_RTX
))
2616 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2620 /* FIXME: In the long term, this switch statement should go away
2621 and be replaced by a sequence of tests based on things like
2627 if (CONSTANT_P (operands
[1])
2628 && GET_CODE (operands
[1]) != CONST_INT
)
2629 operands
[1] = force_const_mem (mode
, operands
[1]);
2635 if (CONSTANT_P (operands
[1])
2636 && ! easy_fp_constant (operands
[1], mode
))
2637 operands
[1] = force_const_mem (mode
, operands
[1]);
2648 if (CONSTANT_P (operands
[1])
2649 && !easy_vector_constant (operands
[1]))
2650 operands
[1] = force_const_mem (mode
, operands
[1]);
2655 /* Use default pattern for address of ELF small data */
2658 && DEFAULT_ABI
== ABI_V4
2659 && (GET_CODE (operands
[1]) == SYMBOL_REF
2660 || GET_CODE (operands
[1]) == CONST
)
2661 && small_data_operand (operands
[1], mode
))
2663 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2667 if (DEFAULT_ABI
== ABI_V4
2668 && mode
== Pmode
&& mode
== SImode
2669 && flag_pic
== 1 && got_operand (operands
[1], mode
))
2671 emit_insn (gen_movsi_got (operands
[0], operands
[1]));
2675 if ((TARGET_ELF
|| DEFAULT_ABI
== ABI_DARWIN
)
2676 && TARGET_NO_TOC
&& ! flag_pic
2678 && CONSTANT_P (operands
[1])
2679 && GET_CODE (operands
[1]) != HIGH
2680 && GET_CODE (operands
[1]) != CONST_INT
)
2682 rtx target
= (no_new_pseudos
? operands
[0] : gen_reg_rtx (mode
));
2684 /* If this is a function address on -mcall-aixdesc,
2685 convert it to the address of the descriptor. */
2686 if (DEFAULT_ABI
== ABI_AIX
2687 && GET_CODE (operands
[1]) == SYMBOL_REF
2688 && XSTR (operands
[1], 0)[0] == '.')
2690 const char *name
= XSTR (operands
[1], 0);
2692 while (*name
== '.')
2694 new_ref
= gen_rtx_SYMBOL_REF (Pmode
, name
);
2695 CONSTANT_POOL_ADDRESS_P (new_ref
)
2696 = CONSTANT_POOL_ADDRESS_P (operands
[1]);
2697 SYMBOL_REF_FLAG (new_ref
) = SYMBOL_REF_FLAG (operands
[1]);
2698 SYMBOL_REF_USED (new_ref
) = SYMBOL_REF_USED (operands
[1]);
2699 operands
[1] = new_ref
;
2702 if (DEFAULT_ABI
== ABI_DARWIN
)
2704 emit_insn (gen_macho_high (target
, operands
[1]));
2705 emit_insn (gen_macho_low (operands
[0], target
, operands
[1]));
2709 emit_insn (gen_elf_high (target
, operands
[1]));
2710 emit_insn (gen_elf_low (operands
[0], target
, operands
[1]));
2714 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2715 and we have put it in the TOC, we just need to make a TOC-relative
2718 && GET_CODE (operands
[1]) == SYMBOL_REF
2719 && CONSTANT_POOL_EXPR_P (operands
[1])
2720 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands
[1]),
2721 get_pool_mode (operands
[1])))
2723 operands
[1] = create_TOC_reference (operands
[1]);
2725 else if (mode
== Pmode
2726 && CONSTANT_P (operands
[1])
2727 && ((GET_CODE (operands
[1]) != CONST_INT
2728 && ! easy_fp_constant (operands
[1], mode
))
2729 || (GET_CODE (operands
[1]) == CONST_INT
2730 && num_insns_constant (operands
[1], mode
) > 2)
2731 || (GET_CODE (operands
[0]) == REG
2732 && FP_REGNO_P (REGNO (operands
[0]))))
2733 && GET_CODE (operands
[1]) != HIGH
2734 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands
[1])
2735 && ! TOC_RELATIVE_EXPR_P (operands
[1]))
2737 /* Emit a USE operation so that the constant isn't deleted if
2738 expensive optimizations are turned on because nobody
2739 references it. This should only be done for operands that
2740 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2741 This should not be done for operands that contain LABEL_REFs.
2742 For now, we just handle the obvious case. */
2743 if (GET_CODE (operands
[1]) != LABEL_REF
)
2744 emit_insn (gen_rtx_USE (VOIDmode
, operands
[1]));
2747 /* Darwin uses a special PIC legitimizer. */
2748 if (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
)
2751 rs6000_machopic_legitimize_pic_address (operands
[1], mode
,
2753 if (operands
[0] != operands
[1])
2754 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2759 /* If we are to limit the number of things we put in the TOC and
2760 this is a symbol plus a constant we can add in one insn,
2761 just put the symbol in the TOC and add the constant. Don't do
2762 this if reload is in progress. */
2763 if (GET_CODE (operands
[1]) == CONST
2764 && TARGET_NO_SUM_IN_TOC
&& ! reload_in_progress
2765 && GET_CODE (XEXP (operands
[1], 0)) == PLUS
2766 && add_operand (XEXP (XEXP (operands
[1], 0), 1), mode
)
2767 && (GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == LABEL_REF
2768 || GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == SYMBOL_REF
)
2769 && ! side_effects_p (operands
[0]))
2772 force_const_mem (mode
, XEXP (XEXP (operands
[1], 0), 0));
2773 rtx other
= XEXP (XEXP (operands
[1], 0), 1);
2775 sym
= force_reg (mode
, sym
);
2777 emit_insn (gen_addsi3 (operands
[0], sym
, other
));
2779 emit_insn (gen_adddi3 (operands
[0], sym
, other
));
2783 operands
[1] = force_const_mem (mode
, operands
[1]);
2786 && CONSTANT_POOL_EXPR_P (XEXP (operands
[1], 0))
2787 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2788 get_pool_constant (XEXP (operands
[1], 0)),
2789 get_pool_mode (XEXP (operands
[1], 0))))
2792 = gen_rtx_MEM (mode
,
2793 create_TOC_reference (XEXP (operands
[1], 0)));
2794 set_mem_alias_set (operands
[1], get_TOC_alias_set ());
2795 RTX_UNCHANGING_P (operands
[1]) = 1;
2801 if (GET_CODE (operands
[0]) == MEM
2802 && GET_CODE (XEXP (operands
[0], 0)) != REG
2803 && ! reload_in_progress
)
2805 = replace_equiv_address (operands
[0],
2806 copy_addr_to_reg (XEXP (operands
[0], 0)));
2808 if (GET_CODE (operands
[1]) == MEM
2809 && GET_CODE (XEXP (operands
[1], 0)) != REG
2810 && ! reload_in_progress
)
2812 = replace_equiv_address (operands
[1],
2813 copy_addr_to_reg (XEXP (operands
[1], 0)));
2820 /* Above, we may have called force_const_mem which may have returned
2821 an invalid address. If we can, fix this up; otherwise, reload will
2822 have to deal with it. */
2823 if (GET_CODE (operands
[1]) == MEM
2824 && ! memory_address_p (mode
, XEXP (operands
[1], 0))
2825 && ! reload_in_progress
)
2826 operands
[1] = adjust_address (operands
[1], mode
, 0);
2828 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2832 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2833 for a call to a function whose data type is FNTYPE.
2834 For a library call, FNTYPE is 0.
2836 For incoming args we set the number of arguments in the prototype large
2837 so we never return a PARALLEL. */
2840 init_cumulative_args (cum
, fntype
, libname
, incoming
)
2841 CUMULATIVE_ARGS
*cum
;
2843 rtx libname ATTRIBUTE_UNUSED
;
2846 static CUMULATIVE_ARGS zero_cumulative
;
2848 *cum
= zero_cumulative
;
2850 cum
->fregno
= FP_ARG_MIN_REG
;
2851 cum
->vregno
= ALTIVEC_ARG_MIN_REG
;
2852 cum
->prototype
= (fntype
&& TYPE_ARG_TYPES (fntype
));
2853 cum
->call_cookie
= CALL_NORMAL
;
2854 cum
->sysv_gregno
= GP_ARG_MIN_REG
;
2857 cum
->nargs_prototype
= 1000; /* don't return a PARALLEL */
2859 else if (cum
->prototype
)
2860 cum
->nargs_prototype
= (list_length (TYPE_ARG_TYPES (fntype
)) - 1
2861 + (TYPE_MODE (TREE_TYPE (fntype
)) == BLKmode
2862 || RETURN_IN_MEMORY (TREE_TYPE (fntype
))));
2865 cum
->nargs_prototype
= 0;
2867 cum
->orig_nargs
= cum
->nargs_prototype
;
2869 /* Check for a longcall attribute. */
2871 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
))
2872 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
2873 cum
->call_cookie
= CALL_LONG
;
2875 if (TARGET_DEBUG_ARG
)
2877 fprintf (stderr
, "\ninit_cumulative_args:");
2880 tree ret_type
= TREE_TYPE (fntype
);
2881 fprintf (stderr
, " ret code = %s,",
2882 tree_code_name
[ (int)TREE_CODE (ret_type
) ]);
2885 if (cum
->call_cookie
& CALL_LONG
)
2886 fprintf (stderr
, " longcall,");
2888 fprintf (stderr
, " proto = %d, nargs = %d\n",
2889 cum
->prototype
, cum
->nargs_prototype
);
2893 /* If defined, a C expression which determines whether, and in which
2894 direction, to pad out an argument with extra space. The value
2895 should be of type `enum direction': either `upward' to pad above
2896 the argument, `downward' to pad below, or `none' to inhibit
2899 For the AIX ABI structs are always stored left shifted in their
2903 function_arg_padding (mode
, type
)
2904 enum machine_mode mode
;
2907 if (type
!= 0 && AGGREGATE_TYPE_P (type
))
2910 /* This is the default definition. */
2911 return (! BYTES_BIG_ENDIAN
2914 ? (type
&& TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
2915 && int_size_in_bytes (type
) < (PARM_BOUNDARY
/ BITS_PER_UNIT
))
2916 : GET_MODE_BITSIZE (mode
) < PARM_BOUNDARY
)
2917 ? downward
: upward
));
2920 /* If defined, a C expression that gives the alignment boundary, in bits,
2921 of an argument with the specified mode and type. If it is not defined,
2922 PARM_BOUNDARY is used for all arguments.
2924 V.4 wants long longs to be double word aligned. */
2927 function_arg_boundary (mode
, type
)
2928 enum machine_mode mode
;
2929 tree type ATTRIBUTE_UNUSED
;
2931 if (DEFAULT_ABI
== ABI_V4
&& (mode
== DImode
|| mode
== DFmode
))
2933 else if (SPE_VECTOR_MODE (mode
))
2935 else if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
2938 return PARM_BOUNDARY
;
2941 /* Update the data in CUM to advance over an argument
2942 of mode MODE and data type TYPE.
2943 (TYPE is null for libcalls where that information may not be available.) */
2946 function_arg_advance (cum
, mode
, type
, named
)
2947 CUMULATIVE_ARGS
*cum
;
2948 enum machine_mode mode
;
2952 cum
->nargs_prototype
--;
2954 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
2956 if (cum
->vregno
<= ALTIVEC_ARG_MAX_REG
&& cum
->nargs_prototype
>= 0)
2959 cum
->words
+= RS6000_ARG_SIZE (mode
, type
);
2961 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
)
2962 && named
&& cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
2964 else if (DEFAULT_ABI
== ABI_V4
)
2966 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
2967 && (mode
== SFmode
|| mode
== DFmode
))
2969 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
2974 cum
->words
+= cum
->words
& 1;
2975 cum
->words
+= RS6000_ARG_SIZE (mode
, type
);
2981 int gregno
= cum
->sysv_gregno
;
2983 /* Aggregates and IEEE quad get passed by reference. */
2984 if ((type
&& AGGREGATE_TYPE_P (type
))
2988 n_words
= RS6000_ARG_SIZE (mode
, type
);
2990 /* Long long and SPE vectors are put in odd registers. */
2991 if (n_words
== 2 && (gregno
& 1) == 0)
2994 /* Long long and SPE vectors are not split between registers
2996 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
2998 /* Long long is aligned on the stack. */
3000 cum
->words
+= cum
->words
& 1;
3001 cum
->words
+= n_words
;
3004 /* Note: continuing to accumulate gregno past when we've started
3005 spilling to the stack indicates the fact that we've started
3006 spilling to the stack to expand_builtin_saveregs. */
3007 cum
->sysv_gregno
= gregno
+ n_words
;
3010 if (TARGET_DEBUG_ARG
)
3012 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
3013 cum
->words
, cum
->fregno
);
3014 fprintf (stderr
, "gregno = %2d, nargs = %4d, proto = %d, ",
3015 cum
->sysv_gregno
, cum
->nargs_prototype
, cum
->prototype
);
3016 fprintf (stderr
, "mode = %4s, named = %d\n",
3017 GET_MODE_NAME (mode
), named
);
3022 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
3023 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
3025 cum
->words
+= align
+ RS6000_ARG_SIZE (mode
, type
);
3027 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
3028 && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3031 if (TARGET_DEBUG_ARG
)
3033 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
3034 cum
->words
, cum
->fregno
);
3035 fprintf (stderr
, "nargs = %4d, proto = %d, mode = %4s, ",
3036 cum
->nargs_prototype
, cum
->prototype
, GET_MODE_NAME (mode
));
3037 fprintf (stderr
, "named = %d, align = %d\n", named
, align
);
3042 /* Determine where to put an argument to a function.
3043 Value is zero to push the argument on the stack,
3044 or a hard register in which to store the argument.
3046 MODE is the argument's machine mode.
3047 TYPE is the data type of the argument (as a tree).
3048 This is null for libcalls where that information may
3050 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3051 the preceding args and about the function being called.
3052 NAMED is nonzero if this argument is a named parameter
3053 (otherwise it is an extra parameter matching an ellipsis).
3055 On RS/6000 the first eight words of non-FP are normally in registers
3056 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3057 Under V.4, the first 8 FP args are in registers.
3059 If this is floating-point and no prototype is specified, we use
3060 both an FP and integer register (or possibly FP reg and stack). Library
3061 functions (when TYPE is zero) always have the proper types for args,
3062 so we can pass the FP value just in one register. emit_library_function
3063 doesn't support PARALLEL anyway. */
3066 function_arg (cum
, mode
, type
, named
)
3067 CUMULATIVE_ARGS
*cum
;
3068 enum machine_mode mode
;
3072 enum rs6000_abi abi
= DEFAULT_ABI
;
3074 /* Return a marker to indicate whether CR1 needs to set or clear the
3075 bit that V.4 uses to say fp args were passed in registers.
3076 Assume that we don't need the marker for software floating point,
3077 or compiler generated library calls. */
3078 if (mode
== VOIDmode
)
3081 && cum
->nargs_prototype
< 0
3082 && type
&& (cum
->prototype
|| TARGET_NO_PROTOTYPE
))
3084 /* For the SPE, we need to crxor CR6 always. */
3086 return GEN_INT (cum
->call_cookie
| CALL_V4_SET_FP_ARGS
);
3087 else if (TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3088 return GEN_INT (cum
->call_cookie
3089 | ((cum
->fregno
== FP_ARG_MIN_REG
)
3090 ? CALL_V4_SET_FP_ARGS
3091 : CALL_V4_CLEAR_FP_ARGS
));
3094 return GEN_INT (cum
->call_cookie
);
3097 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3099 if (named
&& cum
->vregno
<= ALTIVEC_ARG_MAX_REG
)
3100 return gen_rtx_REG (mode
, cum
->vregno
);
3104 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
) && named
)
3106 if (cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
3107 return gen_rtx_REG (mode
, cum
->sysv_gregno
);
3111 else if (abi
== ABI_V4
)
3113 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
3114 && (mode
== SFmode
|| mode
== DFmode
))
3116 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
3117 return gen_rtx_REG (mode
, cum
->fregno
);
3124 int gregno
= cum
->sysv_gregno
;
3126 /* Aggregates and IEEE quad get passed by reference. */
3127 if ((type
&& AGGREGATE_TYPE_P (type
))
3131 n_words
= RS6000_ARG_SIZE (mode
, type
);
3133 /* Long long and SPE vectors are put in odd registers. */
3134 if (n_words
== 2 && (gregno
& 1) == 0)
3137 /* Long long and SPE vectors are not split between registers
3139 if (gregno
+ n_words
- 1 <= GP_ARG_MAX_REG
)
3141 /* SPE vectors in ... get split into 2 registers. */
3142 if (TARGET_SPE
&& TARGET_SPE_ABI
3143 && SPE_VECTOR_MODE (mode
) && !named
)
3146 enum machine_mode m
= GET_MODE_INNER (mode
);
3148 r1
= gen_rtx_REG (m
, gregno
);
3149 r1
= gen_rtx_EXPR_LIST (m
, r1
, const0_rtx
);
3150 r2
= gen_rtx_REG (m
, gregno
+ 1);
3151 r2
= gen_rtx_EXPR_LIST (m
, r2
, GEN_INT (4));
3152 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r2
));
3154 return gen_rtx_REG (mode
, gregno
);
3162 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
3163 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
3164 int align_words
= cum
->words
+ align
;
3166 if (type
&& TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
3169 if (USE_FP_FOR_ARG_P (*cum
, mode
, type
))
3172 || ((cum
->nargs_prototype
> 0)
3173 /* IBM AIX extended its linkage convention definition always
3174 to require FP args after register save area hole on the
3176 && (DEFAULT_ABI
!= ABI_AIX
3178 || (align_words
< GP_ARG_NUM_REG
))))
3179 return gen_rtx_REG (mode
, cum
->fregno
);
3181 return gen_rtx_PARALLEL (mode
,
3183 gen_rtx_EXPR_LIST (VOIDmode
,
3184 ((align_words
>= GP_ARG_NUM_REG
)
3187 + RS6000_ARG_SIZE (mode
, type
)
3189 /* If this is partially on the stack, then
3190 we only include the portion actually
3191 in registers here. */
3192 ? gen_rtx_REG (SImode
,
3193 GP_ARG_MIN_REG
+ align_words
)
3194 : gen_rtx_REG (mode
,
3195 GP_ARG_MIN_REG
+ align_words
))),
3197 gen_rtx_EXPR_LIST (VOIDmode
,
3198 gen_rtx_REG (mode
, cum
->fregno
),
3201 else if (align_words
< GP_ARG_NUM_REG
)
3202 return gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
3208 /* For an arg passed partly in registers and partly in memory,
3209 this is the number of registers used.
3210 For args passed entirely in registers or entirely in memory, zero. */
3213 function_arg_partial_nregs (cum
, mode
, type
, named
)
3214 CUMULATIVE_ARGS
*cum
;
3215 enum machine_mode mode
;
3217 int named ATTRIBUTE_UNUSED
;
3219 if (DEFAULT_ABI
== ABI_V4
)
3222 if (USE_FP_FOR_ARG_P (*cum
, mode
, type
)
3223 || USE_ALTIVEC_FOR_ARG_P (*cum
, mode
, type
))
3225 if (cum
->nargs_prototype
>= 0)
3229 if (cum
->words
< GP_ARG_NUM_REG
3230 && GP_ARG_NUM_REG
< (cum
->words
+ RS6000_ARG_SIZE (mode
, type
)))
3232 int ret
= GP_ARG_NUM_REG
- cum
->words
;
3233 if (ret
&& TARGET_DEBUG_ARG
)
3234 fprintf (stderr
, "function_arg_partial_nregs: %d\n", ret
);
3242 /* A C expression that indicates when an argument must be passed by
3243 reference. If nonzero for an argument, a copy of that argument is
3244 made in memory and a pointer to the argument is passed instead of
3245 the argument itself. The pointer is passed in whatever way is
3246 appropriate for passing a pointer to that type.
3248 Under V.4, structures and unions are passed by reference. */
3251 function_arg_pass_by_reference (cum
, mode
, type
, named
)
3252 CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
;
3253 enum machine_mode mode ATTRIBUTE_UNUSED
;
3255 int named ATTRIBUTE_UNUSED
;
3257 if (DEFAULT_ABI
== ABI_V4
3258 && ((type
&& AGGREGATE_TYPE_P (type
))
3261 if (TARGET_DEBUG_ARG
)
3262 fprintf (stderr
, "function_arg_pass_by_reference: aggregate\n");
3270 /* Perform any needed actions needed for a function that is receiving a
3271 variable number of arguments.
3275 MODE and TYPE are the mode and type of the current parameter.
3277 PRETEND_SIZE is a variable that should be set to the amount of stack
3278 that must be pushed by the prolog to pretend that our caller pushed
3281 Normally, this macro will push all remaining incoming registers on the
3282 stack and set PRETEND_SIZE to the length of the registers pushed. */
3285 setup_incoming_varargs (cum
, mode
, type
, pretend_size
, no_rtl
)
3286 CUMULATIVE_ARGS
*cum
;
3287 enum machine_mode mode
;
3293 CUMULATIVE_ARGS next_cum
;
3294 int reg_size
= TARGET_32BIT
? 4 : 8;
3295 rtx save_area
= NULL_RTX
, mem
;
3296 int first_reg_offset
, set
;
3300 fntype
= TREE_TYPE (current_function_decl
);
3301 stdarg_p
= (TYPE_ARG_TYPES (fntype
) != 0
3302 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
3303 != void_type_node
));
3305 /* For varargs, we do not want to skip the dummy va_dcl argument.
3306 For stdargs, we do want to skip the last named argument. */
3309 function_arg_advance (&next_cum
, mode
, type
, 1);
3311 if (DEFAULT_ABI
== ABI_V4
)
3313 /* Indicate to allocate space on the stack for varargs save area. */
3314 cfun
->machine
->sysv_varargs_p
= 1;
3316 save_area
= plus_constant (virtual_stack_vars_rtx
,
3317 - RS6000_VARARGS_SIZE
);
3319 first_reg_offset
= next_cum
.sysv_gregno
- GP_ARG_MIN_REG
;
3323 first_reg_offset
= next_cum
.words
;
3324 save_area
= virtual_incoming_args_rtx
;
3325 cfun
->machine
->sysv_varargs_p
= 0;
3327 if (MUST_PASS_IN_STACK (mode
, type
))
3328 first_reg_offset
+= RS6000_ARG_SIZE (TYPE_MODE (type
), type
);
3331 set
= get_varargs_alias_set ();
3332 if (! no_rtl
&& first_reg_offset
< GP_ARG_NUM_REG
)
3334 mem
= gen_rtx_MEM (BLKmode
,
3335 plus_constant (save_area
,
3336 first_reg_offset
* reg_size
)),
3337 set_mem_alias_set (mem
, set
);
3338 set_mem_align (mem
, BITS_PER_WORD
);
3341 (GP_ARG_MIN_REG
+ first_reg_offset
, mem
,
3342 GP_ARG_NUM_REG
- first_reg_offset
,
3343 (GP_ARG_NUM_REG
- first_reg_offset
) * UNITS_PER_WORD
);
3345 /* ??? Does ABI_V4 need this at all? */
3346 *pretend_size
= (GP_ARG_NUM_REG
- first_reg_offset
) * UNITS_PER_WORD
;
3349 /* Save FP registers if needed. */
3350 if (DEFAULT_ABI
== ABI_V4
3351 && TARGET_HARD_FLOAT
&& TARGET_FPRS
3353 && next_cum
.fregno
<= FP_ARG_V4_MAX_REG
)
3355 int fregno
= next_cum
.fregno
;
3356 rtx cr1
= gen_rtx_REG (CCmode
, CR1_REGNO
);
3357 rtx lab
= gen_label_rtx ();
3358 int off
= (GP_ARG_NUM_REG
* reg_size
) + ((fregno
- FP_ARG_MIN_REG
) * 8);
3360 emit_jump_insn (gen_rtx_SET (VOIDmode
,
3362 gen_rtx_IF_THEN_ELSE (VOIDmode
,
3363 gen_rtx_NE (VOIDmode
, cr1
,
3365 gen_rtx_LABEL_REF (VOIDmode
, lab
),
3368 while (fregno
<= FP_ARG_V4_MAX_REG
)
3370 mem
= gen_rtx_MEM (DFmode
, plus_constant (save_area
, off
));
3371 set_mem_alias_set (mem
, set
);
3372 emit_move_insn (mem
, gen_rtx_REG (DFmode
, fregno
));
3381 /* Create the va_list data type. */
3384 rs6000_build_va_list ()
3386 tree f_gpr
, f_fpr
, f_ovf
, f_sav
, record
, type_decl
;
3388 /* For AIX, prefer 'char *' because that's what the system
3389 header files like. */
3390 if (DEFAULT_ABI
!= ABI_V4
)
3391 return build_pointer_type (char_type_node
);
3393 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
3394 type_decl
= build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
3396 f_gpr
= build_decl (FIELD_DECL
, get_identifier ("gpr"),
3397 unsigned_char_type_node
);
3398 f_fpr
= build_decl (FIELD_DECL
, get_identifier ("fpr"),
3399 unsigned_char_type_node
);
3400 f_ovf
= build_decl (FIELD_DECL
, get_identifier ("overflow_arg_area"),
3402 f_sav
= build_decl (FIELD_DECL
, get_identifier ("reg_save_area"),
3405 DECL_FIELD_CONTEXT (f_gpr
) = record
;
3406 DECL_FIELD_CONTEXT (f_fpr
) = record
;
3407 DECL_FIELD_CONTEXT (f_ovf
) = record
;
3408 DECL_FIELD_CONTEXT (f_sav
) = record
;
3410 TREE_CHAIN (record
) = type_decl
;
3411 TYPE_NAME (record
) = type_decl
;
3412 TYPE_FIELDS (record
) = f_gpr
;
3413 TREE_CHAIN (f_gpr
) = f_fpr
;
3414 TREE_CHAIN (f_fpr
) = f_ovf
;
3415 TREE_CHAIN (f_ovf
) = f_sav
;
3417 layout_type (record
);
3419 /* The correct type is an array type of one element. */
3420 return build_array_type (record
, build_index_type (size_zero_node
));
3423 /* Implement va_start. */
3426 rs6000_va_start (valist
, nextarg
)
3430 HOST_WIDE_INT words
, n_gpr
, n_fpr
;
3431 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
3432 tree gpr
, fpr
, ovf
, sav
, t
;
3434 /* Only SVR4 needs something special. */
3435 if (DEFAULT_ABI
!= ABI_V4
)
3437 std_expand_builtin_va_start (valist
, nextarg
);
3441 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
3442 f_fpr
= TREE_CHAIN (f_gpr
);
3443 f_ovf
= TREE_CHAIN (f_fpr
);
3444 f_sav
= TREE_CHAIN (f_ovf
);
3446 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
3447 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
3448 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
3449 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
3450 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
3452 /* Count number of gp and fp argument registers used. */
3453 words
= current_function_args_info
.words
;
3454 n_gpr
= current_function_args_info
.sysv_gregno
- GP_ARG_MIN_REG
;
3455 n_fpr
= current_function_args_info
.fregno
- FP_ARG_MIN_REG
;
3457 if (TARGET_DEBUG_ARG
)
3459 fputs ("va_start: words = ", stderr
);
3460 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, words
);
3461 fputs (", n_gpr = ", stderr
);
3462 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, n_gpr
);
3463 fputs (", n_fpr = ", stderr
);
3464 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, n_fpr
);
3465 putc ('\n', stderr
);
3468 t
= build (MODIFY_EXPR
, TREE_TYPE (gpr
), gpr
, build_int_2 (n_gpr
, 0));
3469 TREE_SIDE_EFFECTS (t
) = 1;
3470 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3472 t
= build (MODIFY_EXPR
, TREE_TYPE (fpr
), fpr
, build_int_2 (n_fpr
, 0));
3473 TREE_SIDE_EFFECTS (t
) = 1;
3474 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3476 /* Find the overflow area. */
3477 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
3479 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), t
,
3480 build_int_2 (words
* UNITS_PER_WORD
, 0));
3481 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
3482 TREE_SIDE_EFFECTS (t
) = 1;
3483 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3485 /* Find the register save area. */
3486 t
= make_tree (TREE_TYPE (sav
), virtual_stack_vars_rtx
);
3487 t
= build (PLUS_EXPR
, TREE_TYPE (sav
), t
,
3488 build_int_2 (-RS6000_VARARGS_SIZE
, -1));
3489 t
= build (MODIFY_EXPR
, TREE_TYPE (sav
), sav
, t
);
3490 TREE_SIDE_EFFECTS (t
) = 1;
3491 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3494 /* Implement va_arg. */
3497 rs6000_va_arg (valist
, type
)
3500 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
3501 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
3502 int indirect_p
, size
, rsize
, n_reg
, sav_ofs
, sav_scale
;
3503 rtx lab_false
, lab_over
, addr_rtx
, r
;
3505 if (DEFAULT_ABI
!= ABI_V4
)
3506 return std_expand_builtin_va_arg (valist
, type
);
3508 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
3509 f_fpr
= TREE_CHAIN (f_gpr
);
3510 f_ovf
= TREE_CHAIN (f_fpr
);
3511 f_sav
= TREE_CHAIN (f_ovf
);
3513 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
3514 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
3515 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
3516 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
3517 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
3519 size
= int_size_in_bytes (type
);
3520 rsize
= (size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
3522 if (AGGREGATE_TYPE_P (type
) || TYPE_MODE (type
) == TFmode
)
3524 /* Aggregates and long doubles are passed by reference. */
3530 size
= UNITS_PER_WORD
;
3533 else if (FLOAT_TYPE_P (type
) && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3535 /* FP args go in FP registers, if present. */
3544 /* Otherwise into GP registers. */
3552 /* Pull the value out of the saved registers ... */
3554 lab_false
= gen_label_rtx ();
3555 lab_over
= gen_label_rtx ();
3556 addr_rtx
= gen_reg_rtx (Pmode
);
3558 /* AltiVec vectors never go in registers. */
3559 if (!TARGET_ALTIVEC
|| TREE_CODE (type
) != VECTOR_TYPE
)
3561 TREE_THIS_VOLATILE (reg
) = 1;
3562 emit_cmp_and_jump_insns
3563 (expand_expr (reg
, NULL_RTX
, QImode
, EXPAND_NORMAL
),
3564 GEN_INT (8 - n_reg
+ 1), GE
, const1_rtx
, QImode
, 1,
3567 /* Long long is aligned in the registers. */
3570 u
= build (BIT_AND_EXPR
, TREE_TYPE (reg
), reg
,
3571 build_int_2 (n_reg
- 1, 0));
3572 u
= build (PLUS_EXPR
, TREE_TYPE (reg
), reg
, u
);
3573 u
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, u
);
3574 TREE_SIDE_EFFECTS (u
) = 1;
3575 expand_expr (u
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3579 t
= build (PLUS_EXPR
, ptr_type_node
, sav
, build_int_2 (sav_ofs
, 0));
3583 u
= build (POSTINCREMENT_EXPR
, TREE_TYPE (reg
), reg
,
3584 build_int_2 (n_reg
, 0));
3585 TREE_SIDE_EFFECTS (u
) = 1;
3587 u
= build1 (CONVERT_EXPR
, integer_type_node
, u
);
3588 TREE_SIDE_EFFECTS (u
) = 1;
3590 u
= build (MULT_EXPR
, integer_type_node
, u
, build_int_2 (sav_scale
, 0));
3591 TREE_SIDE_EFFECTS (u
) = 1;
3593 t
= build (PLUS_EXPR
, ptr_type_node
, t
, u
);
3594 TREE_SIDE_EFFECTS (t
) = 1;
3596 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
3598 emit_move_insn (addr_rtx
, r
);
3600 emit_jump_insn (gen_jump (lab_over
));
3604 emit_label (lab_false
);
3606 /* ... otherwise out of the overflow area. */
3608 /* Make sure we don't find reg 7 for the next int arg.
3610 All AltiVec vectors go in the overflow area. So in the AltiVec
3611 case we need to get the vectors from the overflow area, but
3612 remember where the GPRs and FPRs are. */
3613 if (n_reg
> 1 && (TREE_CODE (type
) != VECTOR_TYPE
3614 || !TARGET_ALTIVEC
))
3616 t
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, build_int_2 (8, 0));
3617 TREE_SIDE_EFFECTS (t
) = 1;
3618 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3621 /* Care for on-stack alignment if needed. */
3628 /* AltiVec vectors are 16 byte aligned. */
3629 if (TARGET_ALTIVEC
&& TREE_CODE (type
) == VECTOR_TYPE
)
3634 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), ovf
, build_int_2 (align
, 0));
3635 t
= build (BIT_AND_EXPR
, TREE_TYPE (t
), t
, build_int_2 (-align
-1, -1));
3639 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
3641 emit_move_insn (addr_rtx
, r
);
3643 t
= build (PLUS_EXPR
, TREE_TYPE (t
), t
, build_int_2 (size
, 0));
3644 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
3645 TREE_SIDE_EFFECTS (t
) = 1;
3646 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3648 emit_label (lab_over
);
3652 r
= gen_rtx_MEM (Pmode
, addr_rtx
);
3653 set_mem_alias_set (r
, get_varargs_alias_set ());
3654 emit_move_insn (addr_rtx
, r
);
3662 #define def_builtin(MASK, NAME, TYPE, CODE) \
3664 if ((MASK) & target_flags) \
3665 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3669 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3671 static const struct builtin_description bdesc_3arg
[] =
3673 { MASK_ALTIVEC
, CODE_FOR_altivec_vmaddfp
, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP
},
3674 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhaddshs
, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS
},
3675 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhraddshs
, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS
},
3676 { MASK_ALTIVEC
, CODE_FOR_altivec_vmladduhm
, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM
},
3677 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumubm
, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM
},
3678 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsummbm
, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM
},
3679 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhm
, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM
},
3680 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshm
, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM
},
3681 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhs
, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS
},
3682 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshs
, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS
},
3683 { MASK_ALTIVEC
, CODE_FOR_altivec_vnmsubfp
, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP
},
3684 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4sf
, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF
},
3685 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4si
, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI
},
3686 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_8hi
, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI
},
3687 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_16qi
, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI
},
3688 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4sf
, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF
},
3689 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4si
, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI
},
3690 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_8hi
, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI
},
3691 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_16qi
, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI
},
3692 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_16qi
, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI
},
3693 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_8hi
, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI
},
3694 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4si
, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI
},
3695 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4sf
, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF
},
3698 /* DST operations: void foo (void *, const int, const char). */
3700 static const struct builtin_description bdesc_dst
[] =
3702 { MASK_ALTIVEC
, CODE_FOR_altivec_dst
, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST
},
3703 { MASK_ALTIVEC
, CODE_FOR_altivec_dstt
, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT
},
3704 { MASK_ALTIVEC
, CODE_FOR_altivec_dstst
, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST
},
3705 { MASK_ALTIVEC
, CODE_FOR_altivec_dststt
, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT
}
3708 /* Simple binary operations: VECc = foo (VECa, VECb). */
3710 static struct builtin_description bdesc_2arg
[] =
3712 { MASK_ALTIVEC
, CODE_FOR_addv16qi3
, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM
},
3713 { MASK_ALTIVEC
, CODE_FOR_addv8hi3
, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM
},
3714 { MASK_ALTIVEC
, CODE_FOR_addv4si3
, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM
},
3715 { MASK_ALTIVEC
, CODE_FOR_addv4sf3
, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP
},
3716 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddcuw
, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW
},
3717 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddubs
, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS
},
3718 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsbs
, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS
},
3719 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduhs
, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS
},
3720 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddshs
, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS
},
3721 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduws
, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS
},
3722 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsws
, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS
},
3723 { MASK_ALTIVEC
, CODE_FOR_andv4si3
, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND
},
3724 { MASK_ALTIVEC
, CODE_FOR_altivec_vandc
, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC
},
3725 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgub
, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB
},
3726 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsb
, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB
},
3727 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguh
, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH
},
3728 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsh
, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH
},
3729 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguw
, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW
},
3730 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsw
, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW
},
3731 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfux
, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX
},
3732 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfsx
, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX
},
3733 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpbfp
, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP
},
3734 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequb
, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB
},
3735 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequh
, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH
},
3736 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequw
, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW
},
3737 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpeqfp
, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP
},
3738 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgefp
, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP
},
3739 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtub
, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB
},
3740 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsb
, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB
},
3741 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuh
, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH
},
3742 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsh
, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH
},
3743 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuw
, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW
},
3744 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsw
, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW
},
3745 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtfp
, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP
},
3746 { MASK_ALTIVEC
, CODE_FOR_altivec_vctsxs
, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS
},
3747 { MASK_ALTIVEC
, CODE_FOR_altivec_vctuxs
, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS
},
3748 { MASK_ALTIVEC
, CODE_FOR_umaxv16qi3
, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB
},
3749 { MASK_ALTIVEC
, CODE_FOR_smaxv16qi3
, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB
},
3750 { MASK_ALTIVEC
, CODE_FOR_umaxv8hi3
, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH
},
3751 { MASK_ALTIVEC
, CODE_FOR_smaxv8hi3
, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH
},
3752 { MASK_ALTIVEC
, CODE_FOR_umaxv4si3
, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW
},
3753 { MASK_ALTIVEC
, CODE_FOR_smaxv4si3
, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW
},
3754 { MASK_ALTIVEC
, CODE_FOR_smaxv4sf3
, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP
},
3755 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghb
, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB
},
3756 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghh
, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH
},
3757 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghw
, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW
},
3758 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglb
, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB
},
3759 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglh
, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH
},
3760 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglw
, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW
},
3761 { MASK_ALTIVEC
, CODE_FOR_uminv16qi3
, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB
},
3762 { MASK_ALTIVEC
, CODE_FOR_sminv16qi3
, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB
},
3763 { MASK_ALTIVEC
, CODE_FOR_uminv8hi3
, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH
},
3764 { MASK_ALTIVEC
, CODE_FOR_sminv8hi3
, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH
},
3765 { MASK_ALTIVEC
, CODE_FOR_uminv4si3
, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW
},
3766 { MASK_ALTIVEC
, CODE_FOR_sminv4si3
, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW
},
3767 { MASK_ALTIVEC
, CODE_FOR_sminv4sf3
, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP
},
3768 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleub
, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB
},
3769 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesb
, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB
},
3770 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleuh
, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH
},
3771 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesh
, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH
},
3772 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuloub
, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB
},
3773 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosb
, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB
},
3774 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulouh
, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH
},
3775 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosh
, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH
},
3776 { MASK_ALTIVEC
, CODE_FOR_altivec_vnor
, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR
},
3777 { MASK_ALTIVEC
, CODE_FOR_iorv4si3
, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR
},
3778 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhum
, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM
},
3779 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwum
, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM
},
3780 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkpx
, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX
},
3781 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhss
, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS
},
3782 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshss
, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS
},
3783 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwss
, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS
},
3784 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswss
, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS
},
3785 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhus
, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS
},
3786 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshus
, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS
},
3787 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwus
, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS
},
3788 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswus
, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS
},
3789 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlb
, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB
},
3790 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlh
, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH
},
3791 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlw
, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW
},
3792 { MASK_ALTIVEC
, CODE_FOR_altivec_vslb
, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB
},
3793 { MASK_ALTIVEC
, CODE_FOR_altivec_vslh
, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH
},
3794 { MASK_ALTIVEC
, CODE_FOR_altivec_vslw
, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW
},
3795 { MASK_ALTIVEC
, CODE_FOR_altivec_vsl
, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL
},
3796 { MASK_ALTIVEC
, CODE_FOR_altivec_vslo
, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO
},
3797 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltb
, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB
},
3798 { MASK_ALTIVEC
, CODE_FOR_altivec_vsplth
, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH
},
3799 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltw
, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW
},
3800 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrb
, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB
},
3801 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrh
, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH
},
3802 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrw
, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW
},
3803 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrab
, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB
},
3804 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrah
, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH
},
3805 { MASK_ALTIVEC
, CODE_FOR_altivec_vsraw
, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW
},
3806 { MASK_ALTIVEC
, CODE_FOR_altivec_vsr
, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR
},
3807 { MASK_ALTIVEC
, CODE_FOR_altivec_vsro
, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO
},
3808 { MASK_ALTIVEC
, CODE_FOR_subv16qi3
, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM
},
3809 { MASK_ALTIVEC
, CODE_FOR_subv8hi3
, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM
},
3810 { MASK_ALTIVEC
, CODE_FOR_subv4si3
, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM
},
3811 { MASK_ALTIVEC
, CODE_FOR_subv4sf3
, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP
},
3812 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubcuw
, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW
},
3813 { MASK_ALTIVEC
, CODE_FOR_altivec_vsububs
, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS
},
3814 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsbs
, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS
},
3815 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuhs
, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS
},
3816 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubshs
, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS
},
3817 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuws
, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS
},
3818 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsws
, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS
},
3819 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4ubs
, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS
},
3820 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4sbs
, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS
},
3821 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4shs
, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS
},
3822 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum2sws
, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS
},
3823 { MASK_ALTIVEC
, CODE_FOR_altivec_vsumsws
, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS
},
3824 { MASK_ALTIVEC
, CODE_FOR_xorv4si3
, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR
},
3826 /* Place holder, leave as first spe builtin. */
3827 { 0, CODE_FOR_spe_evaddw
, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW
},
3828 { 0, CODE_FOR_spe_evand
, "__builtin_spe_evand", SPE_BUILTIN_EVAND
},
3829 { 0, CODE_FOR_spe_evandc
, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC
},
3830 { 0, CODE_FOR_spe_evdivws
, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS
},
3831 { 0, CODE_FOR_spe_evdivwu
, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU
},
3832 { 0, CODE_FOR_spe_eveqv
, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV
},
3833 { 0, CODE_FOR_spe_evfsadd
, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD
},
3834 { 0, CODE_FOR_spe_evfsdiv
, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV
},
3835 { 0, CODE_FOR_spe_evfsmul
, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL
},
3836 { 0, CODE_FOR_spe_evfssub
, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB
},
3837 { 0, CODE_FOR_spe_evmergehi
, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI
},
3838 { 0, CODE_FOR_spe_evmergehilo
, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO
},
3839 { 0, CODE_FOR_spe_evmergelo
, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO
},
3840 { 0, CODE_FOR_spe_evmergelohi
, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI
},
3841 { 0, CODE_FOR_spe_evmhegsmfaa
, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA
},
3842 { 0, CODE_FOR_spe_evmhegsmfan
, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN
},
3843 { 0, CODE_FOR_spe_evmhegsmiaa
, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA
},
3844 { 0, CODE_FOR_spe_evmhegsmian
, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN
},
3845 { 0, CODE_FOR_spe_evmhegumiaa
, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA
},
3846 { 0, CODE_FOR_spe_evmhegumian
, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN
},
3847 { 0, CODE_FOR_spe_evmhesmf
, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF
},
3848 { 0, CODE_FOR_spe_evmhesmfa
, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA
},
3849 { 0, CODE_FOR_spe_evmhesmfaaw
, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW
},
3850 { 0, CODE_FOR_spe_evmhesmfanw
, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW
},
3851 { 0, CODE_FOR_spe_evmhesmi
, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI
},
3852 { 0, CODE_FOR_spe_evmhesmia
, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA
},
3853 { 0, CODE_FOR_spe_evmhesmiaaw
, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW
},
3854 { 0, CODE_FOR_spe_evmhesmianw
, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW
},
3855 { 0, CODE_FOR_spe_evmhessf
, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF
},
3856 { 0, CODE_FOR_spe_evmhessfa
, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA
},
3857 { 0, CODE_FOR_spe_evmhessfaaw
, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW
},
3858 { 0, CODE_FOR_spe_evmhessfanw
, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW
},
3859 { 0, CODE_FOR_spe_evmhessiaaw
, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW
},
3860 { 0, CODE_FOR_spe_evmhessianw
, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW
},
3861 { 0, CODE_FOR_spe_evmheumi
, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI
},
3862 { 0, CODE_FOR_spe_evmheumia
, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA
},
3863 { 0, CODE_FOR_spe_evmheumiaaw
, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW
},
3864 { 0, CODE_FOR_spe_evmheumianw
, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW
},
3865 { 0, CODE_FOR_spe_evmheusiaaw
, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW
},
3866 { 0, CODE_FOR_spe_evmheusianw
, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW
},
3867 { 0, CODE_FOR_spe_evmhogsmfaa
, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA
},
3868 { 0, CODE_FOR_spe_evmhogsmfan
, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN
},
3869 { 0, CODE_FOR_spe_evmhogsmiaa
, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA
},
3870 { 0, CODE_FOR_spe_evmhogsmian
, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN
},
3871 { 0, CODE_FOR_spe_evmhogumiaa
, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA
},
3872 { 0, CODE_FOR_spe_evmhogumian
, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN
},
3873 { 0, CODE_FOR_spe_evmhosmf
, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF
},
3874 { 0, CODE_FOR_spe_evmhosmfa
, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA
},
3875 { 0, CODE_FOR_spe_evmhosmfaaw
, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW
},
3876 { 0, CODE_FOR_spe_evmhosmfanw
, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW
},
3877 { 0, CODE_FOR_spe_evmhosmi
, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI
},
3878 { 0, CODE_FOR_spe_evmhosmia
, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA
},
3879 { 0, CODE_FOR_spe_evmhosmiaaw
, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW
},
3880 { 0, CODE_FOR_spe_evmhosmianw
, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW
},
3881 { 0, CODE_FOR_spe_evmhossf
, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF
},
3882 { 0, CODE_FOR_spe_evmhossfa
, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA
},
3883 { 0, CODE_FOR_spe_evmhossfaaw
, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW
},
3884 { 0, CODE_FOR_spe_evmhossfanw
, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW
},
3885 { 0, CODE_FOR_spe_evmhossiaaw
, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW
},
3886 { 0, CODE_FOR_spe_evmhossianw
, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW
},
3887 { 0, CODE_FOR_spe_evmhoumi
, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI
},
3888 { 0, CODE_FOR_spe_evmhoumia
, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA
},
3889 { 0, CODE_FOR_spe_evmhoumiaaw
, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW
},
3890 { 0, CODE_FOR_spe_evmhoumianw
, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW
},
3891 { 0, CODE_FOR_spe_evmhousiaaw
, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW
},
3892 { 0, CODE_FOR_spe_evmhousianw
, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW
},
3893 { 0, CODE_FOR_spe_evmwhsmf
, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF
},
3894 { 0, CODE_FOR_spe_evmwhsmfa
, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA
},
3895 { 0, CODE_FOR_spe_evmwhsmi
, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI
},
3896 { 0, CODE_FOR_spe_evmwhsmia
, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA
},
3897 { 0, CODE_FOR_spe_evmwhssf
, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF
},
3898 { 0, CODE_FOR_spe_evmwhssfa
, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA
},
3899 { 0, CODE_FOR_spe_evmwhumi
, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI
},
3900 { 0, CODE_FOR_spe_evmwhumia
, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA
},
3901 { 0, CODE_FOR_spe_evmwlsmf
, "__builtin_spe_evmwlsmf", SPE_BUILTIN_EVMWLSMF
},
3902 { 0, CODE_FOR_spe_evmwlsmfa
, "__builtin_spe_evmwlsmfa", SPE_BUILTIN_EVMWLSMFA
},
3903 { 0, CODE_FOR_spe_evmwlsmfaaw
, "__builtin_spe_evmwlsmfaaw", SPE_BUILTIN_EVMWLSMFAAW
},
3904 { 0, CODE_FOR_spe_evmwlsmfanw
, "__builtin_spe_evmwlsmfanw", SPE_BUILTIN_EVMWLSMFANW
},
3905 { 0, CODE_FOR_spe_evmwlsmiaaw
, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW
},
3906 { 0, CODE_FOR_spe_evmwlsmianw
, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW
},
3907 { 0, CODE_FOR_spe_evmwlssf
, "__builtin_spe_evmwlssf", SPE_BUILTIN_EVMWLSSF
},
3908 { 0, CODE_FOR_spe_evmwlssfa
, "__builtin_spe_evmwlssfa", SPE_BUILTIN_EVMWLSSFA
},
3909 { 0, CODE_FOR_spe_evmwlssfaaw
, "__builtin_spe_evmwlssfaaw", SPE_BUILTIN_EVMWLSSFAAW
},
3910 { 0, CODE_FOR_spe_evmwlssfanw
, "__builtin_spe_evmwlssfanw", SPE_BUILTIN_EVMWLSSFANW
},
3911 { 0, CODE_FOR_spe_evmwlssiaaw
, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW
},
3912 { 0, CODE_FOR_spe_evmwlssianw
, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW
},
3913 { 0, CODE_FOR_spe_evmwlumi
, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI
},
3914 { 0, CODE_FOR_spe_evmwlumia
, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA
},
3915 { 0, CODE_FOR_spe_evmwlumiaaw
, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW
},
3916 { 0, CODE_FOR_spe_evmwlumianw
, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW
},
3917 { 0, CODE_FOR_spe_evmwlusiaaw
, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW
},
3918 { 0, CODE_FOR_spe_evmwlusianw
, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW
},
3919 { 0, CODE_FOR_spe_evmwsmf
, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF
},
3920 { 0, CODE_FOR_spe_evmwsmfa
, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA
},
3921 { 0, CODE_FOR_spe_evmwsmfaa
, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA
},
3922 { 0, CODE_FOR_spe_evmwsmfan
, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN
},
3923 { 0, CODE_FOR_spe_evmwsmi
, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI
},
3924 { 0, CODE_FOR_spe_evmwsmia
, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA
},
3925 { 0, CODE_FOR_spe_evmwsmiaa
, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA
},
3926 { 0, CODE_FOR_spe_evmwsmian
, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN
},
3927 { 0, CODE_FOR_spe_evmwssf
, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF
},
3928 { 0, CODE_FOR_spe_evmwssfa
, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA
},
3929 { 0, CODE_FOR_spe_evmwssfaa
, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA
},
3930 { 0, CODE_FOR_spe_evmwssfan
, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN
},
3931 { 0, CODE_FOR_spe_evmwumi
, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI
},
3932 { 0, CODE_FOR_spe_evmwumia
, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA
},
3933 { 0, CODE_FOR_spe_evmwumiaa
, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA
},
3934 { 0, CODE_FOR_spe_evmwumian
, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN
},
3935 { 0, CODE_FOR_spe_evnand
, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND
},
3936 { 0, CODE_FOR_spe_evnor
, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR
},
3937 { 0, CODE_FOR_spe_evor
, "__builtin_spe_evor", SPE_BUILTIN_EVOR
},
3938 { 0, CODE_FOR_spe_evorc
, "__builtin_spe_evorc", SPE_BUILTIN_EVORC
},
3939 { 0, CODE_FOR_spe_evrlw
, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW
},
3940 { 0, CODE_FOR_spe_evslw
, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW
},
3941 { 0, CODE_FOR_spe_evsrws
, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS
},
3942 { 0, CODE_FOR_spe_evsrwu
, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU
},
3943 { 0, CODE_FOR_spe_evsubfw
, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW
},
3945 /* SPE binary operations expecting a 5-bit unsigned literal. */
3946 { 0, CODE_FOR_spe_evaddiw
, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW
},
3948 { 0, CODE_FOR_spe_evrlwi
, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI
},
3949 { 0, CODE_FOR_spe_evslwi
, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI
},
3950 { 0, CODE_FOR_spe_evsrwis
, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS
},
3951 { 0, CODE_FOR_spe_evsrwiu
, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU
},
3952 { 0, CODE_FOR_spe_evsubifw
, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW
},
3953 { 0, CODE_FOR_spe_evmwhssfaa
, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA
},
3954 { 0, CODE_FOR_spe_evmwhssmaa
, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA
},
3955 { 0, CODE_FOR_spe_evmwhsmfaa
, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA
},
3956 { 0, CODE_FOR_spe_evmwhsmiaa
, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA
},
3957 { 0, CODE_FOR_spe_evmwhusiaa
, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA
},
3958 { 0, CODE_FOR_spe_evmwhumiaa
, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA
},
3959 { 0, CODE_FOR_spe_evmwhssfan
, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN
},
3960 { 0, CODE_FOR_spe_evmwhssian
, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN
},
3961 { 0, CODE_FOR_spe_evmwhsmfan
, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN
},
3962 { 0, CODE_FOR_spe_evmwhsmian
, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN
},
3963 { 0, CODE_FOR_spe_evmwhusian
, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN
},
3964 { 0, CODE_FOR_spe_evmwhumian
, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN
},
3965 { 0, CODE_FOR_spe_evmwhgssfaa
, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA
},
3966 { 0, CODE_FOR_spe_evmwhgsmfaa
, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA
},
3967 { 0, CODE_FOR_spe_evmwhgsmiaa
, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA
},
3968 { 0, CODE_FOR_spe_evmwhgumiaa
, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA
},
3969 { 0, CODE_FOR_spe_evmwhgssfan
, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN
},
3970 { 0, CODE_FOR_spe_evmwhgsmfan
, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN
},
3971 { 0, CODE_FOR_spe_evmwhgsmian
, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN
},
3972 { 0, CODE_FOR_spe_evmwhgumian
, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN
},
3973 { 0, CODE_FOR_spe_brinc
, "__builtin_spe_brinc", SPE_BUILTIN_BRINC
},
3975 /* Place-holder. Leave as last binary SPE builtin. */
3976 { 0, CODE_FOR_spe_evxor
, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR
},
3979 /* AltiVec predicates. */
3981 struct builtin_description_predicates
3983 const unsigned int mask
;
3984 const enum insn_code icode
;
3986 const char *const name
;
3987 const enum rs6000_builtins code
;
3990 static const struct builtin_description_predicates bdesc_altivec_preds
[] =
3992 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P
},
3993 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P
},
3994 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P
},
3995 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P
},
3996 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P
},
3997 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P
},
3998 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P
},
3999 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P
},
4000 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P
},
4001 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P
},
4002 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P
},
4003 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P
},
4004 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P
}
4007 /* SPE predicates. */
4008 static struct builtin_description bdesc_spe_predicates
[] =
4010 /* Place-holder. Leave as first. */
4011 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ
},
4012 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS
},
4013 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU
},
4014 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS
},
4015 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU
},
4016 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ
},
4017 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT
},
4018 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT
},
4019 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ
},
4020 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT
},
4021 /* Place-holder. Leave as last. */
4022 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT
},
4025 /* SPE evsel predicates. */
4026 static struct builtin_description bdesc_spe_evsel
[] =
4028 /* Place-holder. Leave as first. */
4029 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS
},
4030 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU
},
4031 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS
},
4032 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU
},
4033 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ
},
4034 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT
},
4035 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT
},
4036 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ
},
4037 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT
},
4038 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT
},
4039 /* Place-holder. Leave as last. */
4040 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ
},
4043 /* ABS* opreations. */
4045 static const struct builtin_description bdesc_abs
[] =
4047 { MASK_ALTIVEC
, CODE_FOR_absv4si2
, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI
},
4048 { MASK_ALTIVEC
, CODE_FOR_absv8hi2
, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI
},
4049 { MASK_ALTIVEC
, CODE_FOR_absv4sf2
, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF
},
4050 { MASK_ALTIVEC
, CODE_FOR_absv16qi2
, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI
},
4051 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v4si
, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI
},
4052 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v8hi
, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI
},
4053 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v16qi
, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI
}
4056 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4059 static struct builtin_description bdesc_1arg
[] =
4061 { MASK_ALTIVEC
, CODE_FOR_altivec_vexptefp
, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP
},
4062 { MASK_ALTIVEC
, CODE_FOR_altivec_vlogefp
, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP
},
4063 { MASK_ALTIVEC
, CODE_FOR_altivec_vrefp
, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP
},
4064 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfim
, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM
},
4065 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfin
, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN
},
4066 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfip
, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP
},
4067 { MASK_ALTIVEC
, CODE_FOR_ftruncv4sf2
, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ
},
4068 { MASK_ALTIVEC
, CODE_FOR_altivec_vrsqrtefp
, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP
},
4069 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisb
, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB
},
4070 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltish
, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH
},
4071 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisw
, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW
},
4072 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsb
, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB
},
4073 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhpx
, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX
},
4074 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsh
, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH
},
4075 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsb
, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB
},
4076 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklpx
, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX
},
4077 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsh
, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH
},
4079 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4080 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4081 { 0, CODE_FOR_spe_evabs
, "__builtin_spe_evabs", SPE_BUILTIN_EVABS
},
4082 { 0, CODE_FOR_spe_evaddsmiaaw
, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW
},
4083 { 0, CODE_FOR_spe_evaddssiaaw
, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW
},
4084 { 0, CODE_FOR_spe_evaddumiaaw
, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW
},
4085 { 0, CODE_FOR_spe_evaddusiaaw
, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW
},
4086 { 0, CODE_FOR_spe_evcntlsw
, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW
},
4087 { 0, CODE_FOR_spe_evcntlzw
, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW
},
4088 { 0, CODE_FOR_spe_evextsb
, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB
},
4089 { 0, CODE_FOR_spe_evextsh
, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH
},
4090 { 0, CODE_FOR_spe_evfsabs
, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS
},
4091 { 0, CODE_FOR_spe_evfscfsf
, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF
},
4092 { 0, CODE_FOR_spe_evfscfsi
, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI
},
4093 { 0, CODE_FOR_spe_evfscfuf
, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF
},
4094 { 0, CODE_FOR_spe_evfscfui
, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI
},
4095 { 0, CODE_FOR_spe_evfsctsf
, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF
},
4096 { 0, CODE_FOR_spe_evfsctsi
, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI
},
4097 { 0, CODE_FOR_spe_evfsctsiz
, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ
},
4098 { 0, CODE_FOR_spe_evfsctuf
, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF
},
4099 { 0, CODE_FOR_spe_evfsctui
, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI
},
4100 { 0, CODE_FOR_spe_evfsctuiz
, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ
},
4101 { 0, CODE_FOR_spe_evfsnabs
, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS
},
4102 { 0, CODE_FOR_spe_evfsneg
, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG
},
4103 { 0, CODE_FOR_spe_evmra
, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA
},
4104 { 0, CODE_FOR_spe_evneg
, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG
},
4105 { 0, CODE_FOR_spe_evrndw
, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW
},
4106 { 0, CODE_FOR_spe_evsubfsmiaaw
, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW
},
4107 { 0, CODE_FOR_spe_evsubfssiaaw
, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW
},
4108 { 0, CODE_FOR_spe_evsubfumiaaw
, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW
},
4109 { 0, CODE_FOR_spe_evsplatfi
, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI
},
4110 { 0, CODE_FOR_spe_evsplati
, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI
},
4112 /* Place-holder. Leave as last unary SPE builtin. */
4113 { 0, CODE_FOR_spe_evsubfusiaaw
, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW
},
4117 rs6000_expand_unop_builtin (icode
, arglist
, target
)
4118 enum insn_code icode
;
4123 tree arg0
= TREE_VALUE (arglist
);
4124 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4125 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4126 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4128 if (icode
== CODE_FOR_nothing
)
4129 /* Builtin not supported on this processor. */
4132 /* If we got invalid arguments bail out before generating bad rtl. */
4133 if (arg0
== error_mark_node
)
4136 if (icode
== CODE_FOR_altivec_vspltisb
4137 || icode
== CODE_FOR_altivec_vspltish
4138 || icode
== CODE_FOR_altivec_vspltisw
4139 || icode
== CODE_FOR_spe_evsplatfi
4140 || icode
== CODE_FOR_spe_evsplati
)
4142 /* Only allow 5-bit *signed* literals. */
4143 if (GET_CODE (op0
) != CONST_INT
4144 || INTVAL (op0
) > 0x1f
4145 || INTVAL (op0
) < -0x1f)
4147 error ("argument 1 must be a 5-bit signed literal");
4153 || GET_MODE (target
) != tmode
4154 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4155 target
= gen_reg_rtx (tmode
);
4157 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4158 op0
= copy_to_mode_reg (mode0
, op0
);
4160 pat
= GEN_FCN (icode
) (target
, op0
);
4169 altivec_expand_abs_builtin (icode
, arglist
, target
)
4170 enum insn_code icode
;
4174 rtx pat
, scratch1
, scratch2
;
4175 tree arg0
= TREE_VALUE (arglist
);
4176 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4177 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4178 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4180 /* If we have invalid arguments, bail out before generating bad rtl. */
4181 if (arg0
== error_mark_node
)
4185 || GET_MODE (target
) != tmode
4186 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4187 target
= gen_reg_rtx (tmode
);
4189 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4190 op0
= copy_to_mode_reg (mode0
, op0
);
4192 scratch1
= gen_reg_rtx (mode0
);
4193 scratch2
= gen_reg_rtx (mode0
);
4195 pat
= GEN_FCN (icode
) (target
, op0
, scratch1
, scratch2
);
4204 rs6000_expand_binop_builtin (icode
, arglist
, target
)
4205 enum insn_code icode
;
4210 tree arg0
= TREE_VALUE (arglist
);
4211 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4212 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4213 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4214 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4215 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4216 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4218 if (icode
== CODE_FOR_nothing
)
4219 /* Builtin not supported on this processor. */
4222 /* If we got invalid arguments bail out before generating bad rtl. */
4223 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
4226 if (icode
== CODE_FOR_altivec_vcfux
4227 || icode
== CODE_FOR_altivec_vcfsx
4228 || icode
== CODE_FOR_altivec_vctsxs
4229 || icode
== CODE_FOR_altivec_vctuxs
4230 || icode
== CODE_FOR_altivec_vspltb
4231 || icode
== CODE_FOR_altivec_vsplth
4232 || icode
== CODE_FOR_altivec_vspltw
4233 || icode
== CODE_FOR_spe_evaddiw
4234 || icode
== CODE_FOR_spe_evldd
4235 || icode
== CODE_FOR_spe_evldh
4236 || icode
== CODE_FOR_spe_evldw
4237 || icode
== CODE_FOR_spe_evlhhesplat
4238 || icode
== CODE_FOR_spe_evlhhossplat
4239 || icode
== CODE_FOR_spe_evlhhousplat
4240 || icode
== CODE_FOR_spe_evlwhe
4241 || icode
== CODE_FOR_spe_evlwhos
4242 || icode
== CODE_FOR_spe_evlwhou
4243 || icode
== CODE_FOR_spe_evlwhsplat
4244 || icode
== CODE_FOR_spe_evlwwsplat
4245 || icode
== CODE_FOR_spe_evrlwi
4246 || icode
== CODE_FOR_spe_evslwi
4247 || icode
== CODE_FOR_spe_evsrwis
4248 || icode
== CODE_FOR_spe_evsrwiu
)
4250 /* Only allow 5-bit unsigned literals. */
4251 if (TREE_CODE (arg1
) != INTEGER_CST
4252 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
4254 error ("argument 2 must be a 5-bit unsigned literal");
4260 || GET_MODE (target
) != tmode
4261 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4262 target
= gen_reg_rtx (tmode
);
4264 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4265 op0
= copy_to_mode_reg (mode0
, op0
);
4266 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4267 op1
= copy_to_mode_reg (mode1
, op1
);
4269 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
4278 altivec_expand_predicate_builtin (icode
, opcode
, arglist
, target
)
4279 enum insn_code icode
;
4285 tree cr6_form
= TREE_VALUE (arglist
);
4286 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
4287 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4288 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4289 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4290 enum machine_mode tmode
= SImode
;
4291 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4292 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4295 if (TREE_CODE (cr6_form
) != INTEGER_CST
)
4297 error ("argument 1 of __builtin_altivec_predicate must be a constant");
4301 cr6_form_int
= TREE_INT_CST_LOW (cr6_form
);
4306 /* If we have invalid arguments, bail out before generating bad rtl. */
4307 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
4311 || GET_MODE (target
) != tmode
4312 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4313 target
= gen_reg_rtx (tmode
);
4315 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4316 op0
= copy_to_mode_reg (mode0
, op0
);
4317 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4318 op1
= copy_to_mode_reg (mode1
, op1
);
4320 scratch
= gen_reg_rtx (mode0
);
4322 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
,
4323 gen_rtx (SYMBOL_REF
, Pmode
, opcode
));
4328 /* The vec_any* and vec_all* predicates use the same opcodes for two
4329 different operations, but the bits in CR6 will be different
4330 depending on what information we want. So we have to play tricks
4331 with CR6 to get the right bits out.
4333 If you think this is disgusting, look at the specs for the
4334 AltiVec predicates. */
4336 switch (cr6_form_int
)
4339 emit_insn (gen_cr6_test_for_zero (target
));
4342 emit_insn (gen_cr6_test_for_zero_reverse (target
));
4345 emit_insn (gen_cr6_test_for_lt (target
));
4348 emit_insn (gen_cr6_test_for_lt_reverse (target
));
4351 error ("argument 1 of __builtin_altivec_predicate is out of range");
4359 altivec_expand_stv_builtin (icode
, arglist
)
4360 enum insn_code icode
;
4363 tree arg0
= TREE_VALUE (arglist
);
4364 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4365 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4366 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4367 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4368 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4370 enum machine_mode mode0
= insn_data
[icode
].operand
[0].mode
;
4371 enum machine_mode mode1
= insn_data
[icode
].operand
[1].mode
;
4372 enum machine_mode mode2
= insn_data
[icode
].operand
[2].mode
;
4374 /* Invalid arguments. Bail before doing anything stoopid! */
4375 if (arg0
== error_mark_node
4376 || arg1
== error_mark_node
4377 || arg2
== error_mark_node
)
4380 if (! (*insn_data
[icode
].operand
[2].predicate
) (op0
, mode2
))
4381 op0
= copy_to_mode_reg (mode2
, op0
);
4382 if (! (*insn_data
[icode
].operand
[0].predicate
) (op1
, mode0
))
4383 op1
= copy_to_mode_reg (mode0
, op1
);
4384 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
4385 op2
= copy_to_mode_reg (mode1
, op2
);
4387 pat
= GEN_FCN (icode
) (op1
, op2
, op0
);
4394 rs6000_expand_ternop_builtin (icode
, arglist
, target
)
4395 enum insn_code icode
;
4400 tree arg0
= TREE_VALUE (arglist
);
4401 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4402 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4403 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4404 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4405 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4406 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4407 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4408 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4409 enum machine_mode mode2
= insn_data
[icode
].operand
[3].mode
;
4411 if (icode
== CODE_FOR_nothing
)
4412 /* Builtin not supported on this processor. */
4415 /* If we got invalid arguments bail out before generating bad rtl. */
4416 if (arg0
== error_mark_node
4417 || arg1
== error_mark_node
4418 || arg2
== error_mark_node
)
4421 if (icode
== CODE_FOR_altivec_vsldoi_4sf
4422 || icode
== CODE_FOR_altivec_vsldoi_4si
4423 || icode
== CODE_FOR_altivec_vsldoi_8hi
4424 || icode
== CODE_FOR_altivec_vsldoi_16qi
)
4426 /* Only allow 4-bit unsigned literals. */
4427 if (TREE_CODE (arg2
) != INTEGER_CST
4428 || TREE_INT_CST_LOW (arg2
) & ~0xf)
4430 error ("argument 3 must be a 4-bit unsigned literal");
4436 || GET_MODE (target
) != tmode
4437 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4438 target
= gen_reg_rtx (tmode
);
4440 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4441 op0
= copy_to_mode_reg (mode0
, op0
);
4442 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4443 op1
= copy_to_mode_reg (mode1
, op1
);
4444 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
4445 op2
= copy_to_mode_reg (mode2
, op2
);
4447 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
4455 /* Expand the lvx builtins. */
4457 altivec_expand_ld_builtin (exp
, target
, expandedp
)
4462 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4463 tree arglist
= TREE_OPERAND (exp
, 1);
4464 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4466 enum machine_mode tmode
, mode0
;
4468 enum insn_code icode
;
4472 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi
:
4473 icode
= CODE_FOR_altivec_lvx_16qi
;
4475 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi
:
4476 icode
= CODE_FOR_altivec_lvx_8hi
;
4478 case ALTIVEC_BUILTIN_LD_INTERNAL_4si
:
4479 icode
= CODE_FOR_altivec_lvx_4si
;
4481 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf
:
4482 icode
= CODE_FOR_altivec_lvx_4sf
;
4491 arg0
= TREE_VALUE (arglist
);
4492 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4493 tmode
= insn_data
[icode
].operand
[0].mode
;
4494 mode0
= insn_data
[icode
].operand
[1].mode
;
4497 || GET_MODE (target
) != tmode
4498 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4499 target
= gen_reg_rtx (tmode
);
4501 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4502 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
4504 pat
= GEN_FCN (icode
) (target
, op0
);
4511 /* Expand the stvx builtins. */
4513 altivec_expand_st_builtin (exp
, target
, expandedp
)
4515 rtx target ATTRIBUTE_UNUSED
;
4518 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4519 tree arglist
= TREE_OPERAND (exp
, 1);
4520 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4522 enum machine_mode mode0
, mode1
;
4524 enum insn_code icode
;
4528 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi
:
4529 icode
= CODE_FOR_altivec_stvx_16qi
;
4531 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi
:
4532 icode
= CODE_FOR_altivec_stvx_8hi
;
4534 case ALTIVEC_BUILTIN_ST_INTERNAL_4si
:
4535 icode
= CODE_FOR_altivec_stvx_4si
;
4537 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf
:
4538 icode
= CODE_FOR_altivec_stvx_4sf
;
4545 arg0
= TREE_VALUE (arglist
);
4546 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4547 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4548 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4549 mode0
= insn_data
[icode
].operand
[0].mode
;
4550 mode1
= insn_data
[icode
].operand
[1].mode
;
4552 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4553 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
4554 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
4555 op1
= copy_to_mode_reg (mode1
, op1
);
4557 pat
= GEN_FCN (icode
) (op0
, op1
);
4565 /* Expand the dst builtins. */
4567 altivec_expand_dst_builtin (exp
, target
, expandedp
)
4569 rtx target ATTRIBUTE_UNUSED
;
4572 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4573 tree arglist
= TREE_OPERAND (exp
, 1);
4574 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4575 tree arg0
, arg1
, arg2
;
4576 enum machine_mode mode0
, mode1
, mode2
;
4577 rtx pat
, op0
, op1
, op2
;
4578 struct builtin_description
*d
;
4583 /* Handle DST variants. */
4584 d
= (struct builtin_description
*) bdesc_dst
;
4585 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
4586 if (d
->code
== fcode
)
4588 arg0
= TREE_VALUE (arglist
);
4589 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4590 arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4591 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4592 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4593 op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4594 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
4595 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
4596 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
4598 /* Invalid arguments, bail out before generating bad rtl. */
4599 if (arg0
== error_mark_node
4600 || arg1
== error_mark_node
4601 || arg2
== error_mark_node
)
4604 if (TREE_CODE (arg2
) != INTEGER_CST
4605 || TREE_INT_CST_LOW (arg2
) & ~0x3)
4607 error ("argument to `%s' must be a 2-bit unsigned literal", d
->name
);
4611 if (! (*insn_data
[d
->icode
].operand
[0].predicate
) (op0
, mode0
))
4612 op0
= copy_to_mode_reg (mode0
, op0
);
4613 if (! (*insn_data
[d
->icode
].operand
[1].predicate
) (op1
, mode1
))
4614 op1
= copy_to_mode_reg (mode1
, op1
);
4616 pat
= GEN_FCN (d
->icode
) (op0
, op1
, op2
);
4627 /* Expand the builtin in EXP and store the result in TARGET. Store
4628 true in *EXPANDEDP if we found a builtin to expand. */
4630 altivec_expand_builtin (exp
, target
, expandedp
)
4635 struct builtin_description
*d
;
4636 struct builtin_description_predicates
*dp
;
4638 enum insn_code icode
;
4639 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4640 tree arglist
= TREE_OPERAND (exp
, 1);
4643 enum machine_mode tmode
, mode0
;
4644 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4646 target
= altivec_expand_ld_builtin (exp
, target
, expandedp
);
4650 target
= altivec_expand_st_builtin (exp
, target
, expandedp
);
4654 target
= altivec_expand_dst_builtin (exp
, target
, expandedp
);
4662 case ALTIVEC_BUILTIN_STVX
:
4663 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx
, arglist
);
4664 case ALTIVEC_BUILTIN_STVEBX
:
4665 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx
, arglist
);
4666 case ALTIVEC_BUILTIN_STVEHX
:
4667 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx
, arglist
);
4668 case ALTIVEC_BUILTIN_STVEWX
:
4669 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx
, arglist
);
4670 case ALTIVEC_BUILTIN_STVXL
:
4671 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl
, arglist
);
4673 case ALTIVEC_BUILTIN_MFVSCR
:
4674 icode
= CODE_FOR_altivec_mfvscr
;
4675 tmode
= insn_data
[icode
].operand
[0].mode
;
4678 || GET_MODE (target
) != tmode
4679 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4680 target
= gen_reg_rtx (tmode
);
4682 pat
= GEN_FCN (icode
) (target
);
4688 case ALTIVEC_BUILTIN_MTVSCR
:
4689 icode
= CODE_FOR_altivec_mtvscr
;
4690 arg0
= TREE_VALUE (arglist
);
4691 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4692 mode0
= insn_data
[icode
].operand
[0].mode
;
4694 /* If we got invalid arguments bail out before generating bad rtl. */
4695 if (arg0
== error_mark_node
)
4698 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4699 op0
= copy_to_mode_reg (mode0
, op0
);
4701 pat
= GEN_FCN (icode
) (op0
);
4706 case ALTIVEC_BUILTIN_DSSALL
:
4707 emit_insn (gen_altivec_dssall ());
4710 case ALTIVEC_BUILTIN_DSS
:
4711 icode
= CODE_FOR_altivec_dss
;
4712 arg0
= TREE_VALUE (arglist
);
4713 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4714 mode0
= insn_data
[icode
].operand
[0].mode
;
4716 /* If we got invalid arguments bail out before generating bad rtl. */
4717 if (arg0
== error_mark_node
)
4720 if (TREE_CODE (arg0
) != INTEGER_CST
4721 || TREE_INT_CST_LOW (arg0
) & ~0x3)
4723 error ("argument to dss must be a 2-bit unsigned literal");
4727 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4728 op0
= copy_to_mode_reg (mode0
, op0
);
4730 emit_insn (gen_altivec_dss (op0
));
4734 /* Expand abs* operations. */
4735 d
= (struct builtin_description
*) bdesc_abs
;
4736 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
4737 if (d
->code
== fcode
)
4738 return altivec_expand_abs_builtin (d
->icode
, arglist
, target
);
4740 /* Expand the AltiVec predicates. */
4741 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
4742 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
4743 if (dp
->code
== fcode
)
4744 return altivec_expand_predicate_builtin (dp
->icode
, dp
->opcode
, arglist
, target
);
4746 /* LV* are funky. We initialized them differently. */
4749 case ALTIVEC_BUILTIN_LVSL
:
4750 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl
,
4752 case ALTIVEC_BUILTIN_LVSR
:
4753 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr
,
4755 case ALTIVEC_BUILTIN_LVEBX
:
4756 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx
,
4758 case ALTIVEC_BUILTIN_LVEHX
:
4759 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx
,
4761 case ALTIVEC_BUILTIN_LVEWX
:
4762 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx
,
4764 case ALTIVEC_BUILTIN_LVXL
:
4765 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl
,
4767 case ALTIVEC_BUILTIN_LVX
:
4768 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx
,
4779 /* Binops that need to be initialized manually, but can be expanded
4780 automagically by rs6000_expand_binop_builtin. */
4781 static struct builtin_description bdesc_2arg_spe
[] =
4783 { 0, CODE_FOR_spe_evlddx
, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX
},
4784 { 0, CODE_FOR_spe_evldwx
, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX
},
4785 { 0, CODE_FOR_spe_evldhx
, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX
},
4786 { 0, CODE_FOR_spe_evlwhex
, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX
},
4787 { 0, CODE_FOR_spe_evlwhoux
, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX
},
4788 { 0, CODE_FOR_spe_evlwhosx
, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX
},
4789 { 0, CODE_FOR_spe_evlwwsplatx
, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX
},
4790 { 0, CODE_FOR_spe_evlwhsplatx
, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX
},
4791 { 0, CODE_FOR_spe_evlhhesplatx
, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX
},
4792 { 0, CODE_FOR_spe_evlhhousplatx
, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX
},
4793 { 0, CODE_FOR_spe_evlhhossplatx
, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX
},
4794 { 0, CODE_FOR_spe_evldd
, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD
},
4795 { 0, CODE_FOR_spe_evldw
, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW
},
4796 { 0, CODE_FOR_spe_evldh
, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH
},
4797 { 0, CODE_FOR_spe_evlwhe
, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE
},
4798 { 0, CODE_FOR_spe_evlwhou
, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU
},
4799 { 0, CODE_FOR_spe_evlwhos
, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS
},
4800 { 0, CODE_FOR_spe_evlwwsplat
, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT
},
4801 { 0, CODE_FOR_spe_evlwhsplat
, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT
},
4802 { 0, CODE_FOR_spe_evlhhesplat
, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT
},
4803 { 0, CODE_FOR_spe_evlhhousplat
, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT
},
4804 { 0, CODE_FOR_spe_evlhhossplat
, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT
}
4807 /* Expand the builtin in EXP and store the result in TARGET. Store
4808 true in *EXPANDEDP if we found a builtin to expand.
4810 This expands the SPE builtins that are not simple unary and binary
4813 spe_expand_builtin (exp
, target
, expandedp
)
4818 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4819 tree arglist
= TREE_OPERAND (exp
, 1);
4821 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4822 enum insn_code icode
;
4823 enum machine_mode tmode
, mode0
;
4825 struct builtin_description
*d
;
4830 /* Syntax check for a 5-bit unsigned immediate. */
4833 case SPE_BUILTIN_EVSTDD
:
4834 case SPE_BUILTIN_EVSTDH
:
4835 case SPE_BUILTIN_EVSTDW
:
4836 case SPE_BUILTIN_EVSTWHE
:
4837 case SPE_BUILTIN_EVSTWHO
:
4838 case SPE_BUILTIN_EVSTWWE
:
4839 case SPE_BUILTIN_EVSTWWO
:
4840 arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4841 if (TREE_CODE (arg1
) != INTEGER_CST
4842 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
4844 error ("argument 2 must be a 5-bit unsigned literal");
4852 d
= (struct builtin_description
*) bdesc_2arg_spe
;
4853 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg_spe
); ++i
, ++d
)
4854 if (d
->code
== fcode
)
4855 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
4857 d
= (struct builtin_description
*) bdesc_spe_predicates
;
4858 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, ++d
)
4859 if (d
->code
== fcode
)
4860 return spe_expand_predicate_builtin (d
->icode
, arglist
, target
);
4862 d
= (struct builtin_description
*) bdesc_spe_evsel
;
4863 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, ++d
)
4864 if (d
->code
== fcode
)
4865 return spe_expand_evsel_builtin (d
->icode
, arglist
, target
);
4869 case SPE_BUILTIN_EVSTDDX
:
4870 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx
, arglist
);
4871 case SPE_BUILTIN_EVSTDHX
:
4872 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx
, arglist
);
4873 case SPE_BUILTIN_EVSTDWX
:
4874 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx
, arglist
);
4875 case SPE_BUILTIN_EVSTWHEX
:
4876 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex
, arglist
);
4877 case SPE_BUILTIN_EVSTWHOX
:
4878 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox
, arglist
);
4879 case SPE_BUILTIN_EVSTWWEX
:
4880 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex
, arglist
);
4881 case SPE_BUILTIN_EVSTWWOX
:
4882 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox
, arglist
);
4883 case SPE_BUILTIN_EVSTDD
:
4884 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd
, arglist
);
4885 case SPE_BUILTIN_EVSTDH
:
4886 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh
, arglist
);
4887 case SPE_BUILTIN_EVSTDW
:
4888 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw
, arglist
);
4889 case SPE_BUILTIN_EVSTWHE
:
4890 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe
, arglist
);
4891 case SPE_BUILTIN_EVSTWHO
:
4892 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho
, arglist
);
4893 case SPE_BUILTIN_EVSTWWE
:
4894 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe
, arglist
);
4895 case SPE_BUILTIN_EVSTWWO
:
4896 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo
, arglist
);
4897 case SPE_BUILTIN_MFSPEFSCR
:
4898 icode
= CODE_FOR_spe_mfspefscr
;
4899 tmode
= insn_data
[icode
].operand
[0].mode
;
4902 || GET_MODE (target
) != tmode
4903 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4904 target
= gen_reg_rtx (tmode
);
4906 pat
= GEN_FCN (icode
) (target
);
4911 case SPE_BUILTIN_MTSPEFSCR
:
4912 icode
= CODE_FOR_spe_mtspefscr
;
4913 arg0
= TREE_VALUE (arglist
);
4914 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4915 mode0
= insn_data
[icode
].operand
[0].mode
;
4917 if (arg0
== error_mark_node
)
4920 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4921 op0
= copy_to_mode_reg (mode0
, op0
);
4923 pat
= GEN_FCN (icode
) (op0
);
4936 spe_expand_predicate_builtin (icode
, arglist
, target
)
4937 enum insn_code icode
;
4941 rtx pat
, scratch
, tmp
;
4942 tree form
= TREE_VALUE (arglist
);
4943 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
4944 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4945 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4946 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4947 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4948 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4952 if (TREE_CODE (form
) != INTEGER_CST
)
4954 error ("argument 1 of __builtin_spe_predicate must be a constant");
4958 form_int
= TREE_INT_CST_LOW (form
);
4963 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
4967 || GET_MODE (target
) != SImode
4968 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, SImode
))
4969 target
= gen_reg_rtx (SImode
);
4971 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4972 op0
= copy_to_mode_reg (mode0
, op0
);
4973 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4974 op1
= copy_to_mode_reg (mode1
, op1
);
4976 scratch
= gen_reg_rtx (CCmode
);
4978 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
4983 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
4984 _lower_. We use one compare, but look in different bits of the
4985 CR for each variant.
4987 There are 2 elements in each SPE simd type (upper/lower). The CR
4988 bits are set as follows:
4990 BIT0 | BIT 1 | BIT 2 | BIT 3
4991 U | L | (U | L) | (U & L)
4993 So, for an "all" relationship, BIT 3 would be set.
4994 For an "any" relationship, BIT 2 would be set. Etc.
4996 Following traditional nomenclature, these bits map to:
4998 BIT0 | BIT 1 | BIT 2 | BIT 3
5001 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5006 /* All variant. OV bit. */
5008 /* We need to get to the OV bit, which is the ORDERED bit. We
5009 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5010 that's ugly and will trigger a validate_condition_mode abort.
5011 So let's just use another pattern. */
5012 emit_insn (gen_move_from_CR_ov_bit (target
, scratch
));
5014 /* Any variant. EQ bit. */
5018 /* Upper variant. LT bit. */
5022 /* Lower variant. GT bit. */
5027 error ("argument 1 of __builtin_spe_predicate is out of range");
5031 tmp
= gen_rtx_fmt_ee (code
, SImode
, scratch
, const0_rtx
);
5032 emit_move_insn (target
, tmp
);
5037 /* The evsel builtins look like this:
5039 e = __builtin_spe_evsel_OP (a, b, c, d);
5043 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5044 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5048 spe_expand_evsel_builtin (icode
, arglist
, target
)
5049 enum insn_code icode
;
5054 tree arg0
= TREE_VALUE (arglist
);
5055 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5056 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5057 tree arg3
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist
))));
5058 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5059 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5060 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
5061 rtx op3
= expand_expr (arg3
, NULL_RTX
, VOIDmode
, 0);
5062 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5063 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5068 if (arg0
== error_mark_node
|| arg1
== error_mark_node
5069 || arg2
== error_mark_node
|| arg3
== error_mark_node
)
5073 || GET_MODE (target
) != mode0
5074 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, mode0
))
5075 target
= gen_reg_rtx (mode0
);
5077 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5078 op0
= copy_to_mode_reg (mode0
, op0
);
5079 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
5080 op1
= copy_to_mode_reg (mode0
, op1
);
5081 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
5082 op2
= copy_to_mode_reg (mode0
, op2
);
5083 if (! (*insn_data
[icode
].operand
[1].predicate
) (op3
, mode1
))
5084 op3
= copy_to_mode_reg (mode0
, op3
);
5086 /* Generate the compare. */
5087 scratch
= gen_reg_rtx (CCmode
);
5088 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
5093 if (mode0
== V2SImode
)
5094 emit_insn (gen_spe_evsel (target
, op2
, op3
, scratch
));
5096 emit_insn (gen_spe_evsel_fs (target
, op2
, op3
, scratch
));
5101 /* Expand an expression EXP that calls a built-in function,
5102 with result going to TARGET if that's convenient
5103 (and in mode MODE if that's convenient).
5104 SUBTARGET may be used as the target for computing one of EXP's operands.
5105 IGNORE is nonzero if the value is to be ignored. */
5108 rs6000_expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
5111 rtx subtarget ATTRIBUTE_UNUSED
;
5112 enum machine_mode mode ATTRIBUTE_UNUSED
;
5113 int ignore ATTRIBUTE_UNUSED
;
5115 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5116 tree arglist
= TREE_OPERAND (exp
, 1);
5117 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5118 struct builtin_description
*d
;
5125 ret
= altivec_expand_builtin (exp
, target
, &success
);
5132 ret
= spe_expand_builtin (exp
, target
, &success
);
5138 if (TARGET_ALTIVEC
|| TARGET_SPE
)
5140 /* Handle simple unary operations. */
5141 d
= (struct builtin_description
*) bdesc_1arg
;
5142 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
5143 if (d
->code
== fcode
)
5144 return rs6000_expand_unop_builtin (d
->icode
, arglist
, target
);
5146 /* Handle simple binary operations. */
5147 d
= (struct builtin_description
*) bdesc_2arg
;
5148 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5149 if (d
->code
== fcode
)
5150 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
5152 /* Handle simple ternary operations. */
5153 d
= (struct builtin_description
*) bdesc_3arg
;
5154 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
5155 if (d
->code
== fcode
)
5156 return rs6000_expand_ternop_builtin (d
->icode
, arglist
, target
);
5164 rs6000_init_builtins ()
5167 spe_init_builtins ();
5169 altivec_init_builtins ();
5170 if (TARGET_ALTIVEC
|| TARGET_SPE
)
5171 rs6000_common_init_builtins ();
5174 /* Search through a set of builtins and enable the mask bits.
5175 DESC is an array of builtins.
5176 SIZE is the totaly number of builtins.
5177 START is the builtin enum at which to start.
5178 END is the builtin enum at which to end. */
5180 enable_mask_for_builtins (desc
, size
, start
, end
)
5181 struct builtin_description
*desc
;
5183 enum rs6000_builtins start
, end
;
5187 for (i
= 0; i
< size
; ++i
)
5188 if (desc
[i
].code
== start
)
5194 for (; i
< size
; ++i
)
5196 /* Flip all the bits on. */
5197 desc
[i
].mask
= target_flags
;
5198 if (desc
[i
].code
== end
)
5204 spe_init_builtins ()
5206 tree endlink
= void_list_node
;
5207 tree puint_type_node
= build_pointer_type (unsigned_type_node
);
5208 tree pushort_type_node
= build_pointer_type (short_unsigned_type_node
);
5209 tree pv2si_type_node
= build_pointer_type (V2SI_type_node
);
5210 struct builtin_description
*d
;
5213 tree v2si_ftype_4_v2si
5214 = build_function_type
5216 tree_cons (NULL_TREE
, V2SI_type_node
,
5217 tree_cons (NULL_TREE
, V2SI_type_node
,
5218 tree_cons (NULL_TREE
, V2SI_type_node
,
5219 tree_cons (NULL_TREE
, V2SI_type_node
,
5222 tree v2sf_ftype_4_v2sf
5223 = build_function_type
5225 tree_cons (NULL_TREE
, V2SF_type_node
,
5226 tree_cons (NULL_TREE
, V2SF_type_node
,
5227 tree_cons (NULL_TREE
, V2SF_type_node
,
5228 tree_cons (NULL_TREE
, V2SF_type_node
,
5231 tree int_ftype_int_v2si_v2si
5232 = build_function_type
5234 tree_cons (NULL_TREE
, integer_type_node
,
5235 tree_cons (NULL_TREE
, V2SI_type_node
,
5236 tree_cons (NULL_TREE
, V2SI_type_node
,
5239 tree int_ftype_int_v2sf_v2sf
5240 = build_function_type
5242 tree_cons (NULL_TREE
, integer_type_node
,
5243 tree_cons (NULL_TREE
, V2SF_type_node
,
5244 tree_cons (NULL_TREE
, V2SF_type_node
,
5247 tree void_ftype_v2si_puint_int
5248 = build_function_type (void_type_node
,
5249 tree_cons (NULL_TREE
, V2SI_type_node
,
5250 tree_cons (NULL_TREE
, puint_type_node
,
5251 tree_cons (NULL_TREE
,
5255 tree void_ftype_v2si_puint_char
5256 = build_function_type (void_type_node
,
5257 tree_cons (NULL_TREE
, V2SI_type_node
,
5258 tree_cons (NULL_TREE
, puint_type_node
,
5259 tree_cons (NULL_TREE
,
5263 tree void_ftype_v2si_pv2si_int
5264 = build_function_type (void_type_node
,
5265 tree_cons (NULL_TREE
, V2SI_type_node
,
5266 tree_cons (NULL_TREE
, pv2si_type_node
,
5267 tree_cons (NULL_TREE
,
5271 tree void_ftype_v2si_pv2si_char
5272 = build_function_type (void_type_node
,
5273 tree_cons (NULL_TREE
, V2SI_type_node
,
5274 tree_cons (NULL_TREE
, pv2si_type_node
,
5275 tree_cons (NULL_TREE
,
5280 = build_function_type (void_type_node
,
5281 tree_cons (NULL_TREE
, integer_type_node
, endlink
));
5284 = build_function_type (integer_type_node
,
5285 tree_cons (NULL_TREE
, void_type_node
, endlink
));
5287 tree v2si_ftype_pv2si_int
5288 = build_function_type (V2SI_type_node
,
5289 tree_cons (NULL_TREE
, pv2si_type_node
,
5290 tree_cons (NULL_TREE
, integer_type_node
,
5293 tree v2si_ftype_puint_int
5294 = build_function_type (V2SI_type_node
,
5295 tree_cons (NULL_TREE
, puint_type_node
,
5296 tree_cons (NULL_TREE
, integer_type_node
,
5299 tree v2si_ftype_pushort_int
5300 = build_function_type (V2SI_type_node
,
5301 tree_cons (NULL_TREE
, pushort_type_node
,
5302 tree_cons (NULL_TREE
, integer_type_node
,
5305 /* The initialization of the simple binary and unary builtins is
5306 done in rs6000_common_init_builtins, but we have to enable the
5307 mask bits here manually because we have run out of `target_flags'
5308 bits. We really need to redesign this mask business. */
5310 enable_mask_for_builtins ((struct builtin_description
*) bdesc_2arg
,
5311 ARRAY_SIZE (bdesc_2arg
),
5314 enable_mask_for_builtins ((struct builtin_description
*) bdesc_1arg
,
5315 ARRAY_SIZE (bdesc_1arg
),
5317 SPE_BUILTIN_EVSUBFUSIAAW
);
5318 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_predicates
,
5319 ARRAY_SIZE (bdesc_spe_predicates
),
5320 SPE_BUILTIN_EVCMPEQ
,
5321 SPE_BUILTIN_EVFSTSTLT
);
5322 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_evsel
,
5323 ARRAY_SIZE (bdesc_spe_evsel
),
5324 SPE_BUILTIN_EVSEL_CMPGTS
,
5325 SPE_BUILTIN_EVSEL_FSTSTEQ
);
5327 /* Initialize irregular SPE builtins. */
5329 def_builtin (target_flags
, "__builtin_spe_mtspefscr", void_ftype_int
, SPE_BUILTIN_MTSPEFSCR
);
5330 def_builtin (target_flags
, "__builtin_spe_mfspefscr", int_ftype_void
, SPE_BUILTIN_MFSPEFSCR
);
5331 def_builtin (target_flags
, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDDX
);
5332 def_builtin (target_flags
, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDHX
);
5333 def_builtin (target_flags
, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDWX
);
5334 def_builtin (target_flags
, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHEX
);
5335 def_builtin (target_flags
, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHOX
);
5336 def_builtin (target_flags
, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWEX
);
5337 def_builtin (target_flags
, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWOX
);
5338 def_builtin (target_flags
, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDD
);
5339 def_builtin (target_flags
, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDH
);
5340 def_builtin (target_flags
, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDW
);
5341 def_builtin (target_flags
, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHE
);
5342 def_builtin (target_flags
, "__builtin_spe_evstwho", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHO
);
5343 def_builtin (target_flags
, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWE
);
5344 def_builtin (target_flags
, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWO
);
5347 def_builtin (target_flags
, "__builtin_spe_evlddx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDDX
);
5348 def_builtin (target_flags
, "__builtin_spe_evldwx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDWX
);
5349 def_builtin (target_flags
, "__builtin_spe_evldhx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDHX
);
5350 def_builtin (target_flags
, "__builtin_spe_evlwhex", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHEX
);
5351 def_builtin (target_flags
, "__builtin_spe_evlwhoux", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOUX
);
5352 def_builtin (target_flags
, "__builtin_spe_evlwhosx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOSX
);
5353 def_builtin (target_flags
, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLATX
);
5354 def_builtin (target_flags
, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLATX
);
5355 def_builtin (target_flags
, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLATX
);
5356 def_builtin (target_flags
, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLATX
);
5357 def_builtin (target_flags
, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLATX
);
5358 def_builtin (target_flags
, "__builtin_spe_evldd", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDD
);
5359 def_builtin (target_flags
, "__builtin_spe_evldw", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDW
);
5360 def_builtin (target_flags
, "__builtin_spe_evldh", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDH
);
5361 def_builtin (target_flags
, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLAT
);
5362 def_builtin (target_flags
, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLAT
);
5363 def_builtin (target_flags
, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLAT
);
5364 def_builtin (target_flags
, "__builtin_spe_evlwhe", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHE
);
5365 def_builtin (target_flags
, "__builtin_spe_evlwhos", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOS
);
5366 def_builtin (target_flags
, "__builtin_spe_evlwhou", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOU
);
5367 def_builtin (target_flags
, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLAT
);
5368 def_builtin (target_flags
, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLAT
);
5371 d
= (struct builtin_description
*) bdesc_spe_predicates
;
5372 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, d
++)
5376 switch (insn_data
[d
->icode
].operand
[1].mode
)
5379 type
= int_ftype_int_v2si_v2si
;
5382 type
= int_ftype_int_v2sf_v2sf
;
5388 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5391 /* Evsel predicates. */
5392 d
= (struct builtin_description
*) bdesc_spe_evsel
;
5393 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, d
++)
5397 switch (insn_data
[d
->icode
].operand
[1].mode
)
5400 type
= v2si_ftype_4_v2si
;
5403 type
= v2sf_ftype_4_v2sf
;
5409 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5414 altivec_init_builtins ()
5416 struct builtin_description
*d
;
5417 struct builtin_description_predicates
*dp
;
5419 tree pfloat_type_node
= build_pointer_type (float_type_node
);
5420 tree pint_type_node
= build_pointer_type (integer_type_node
);
5421 tree pshort_type_node
= build_pointer_type (short_integer_type_node
);
5422 tree pchar_type_node
= build_pointer_type (char_type_node
);
5424 tree pvoid_type_node
= build_pointer_type (void_type_node
);
5426 tree int_ftype_int_v4si_v4si
5427 = build_function_type_list (integer_type_node
,
5428 integer_type_node
, V4SI_type_node
,
5429 V4SI_type_node
, NULL_TREE
);
5430 tree v4sf_ftype_pfloat
5431 = build_function_type_list (V4SF_type_node
, pfloat_type_node
, NULL_TREE
);
5432 tree void_ftype_pfloat_v4sf
5433 = build_function_type_list (void_type_node
,
5434 pfloat_type_node
, V4SF_type_node
, NULL_TREE
);
5435 tree v4si_ftype_pint
5436 = build_function_type_list (V4SI_type_node
, pint_type_node
, NULL_TREE
); tree void_ftype_pint_v4si
5437 = build_function_type_list (void_type_node
,
5438 pint_type_node
, V4SI_type_node
, NULL_TREE
);
5439 tree v8hi_ftype_pshort
5440 = build_function_type_list (V8HI_type_node
, pshort_type_node
, NULL_TREE
);
5441 tree void_ftype_pshort_v8hi
5442 = build_function_type_list (void_type_node
,
5443 pshort_type_node
, V8HI_type_node
, NULL_TREE
);
5444 tree v16qi_ftype_pchar
5445 = build_function_type_list (V16QI_type_node
, pchar_type_node
, NULL_TREE
);
5446 tree void_ftype_pchar_v16qi
5447 = build_function_type_list (void_type_node
,
5448 pchar_type_node
, V16QI_type_node
, NULL_TREE
);
5449 tree void_ftype_v4si
5450 = build_function_type_list (void_type_node
, V4SI_type_node
, NULL_TREE
);
5451 tree v8hi_ftype_void
5452 = build_function_type (V8HI_type_node
, void_list_node
);
5453 tree void_ftype_void
5454 = build_function_type (void_type_node
, void_list_node
);
5456 = build_function_type_list (void_type_node
, char_type_node
, NULL_TREE
);
5457 tree v16qi_ftype_int_pvoid
5458 = build_function_type_list (V16QI_type_node
,
5459 integer_type_node
, pvoid_type_node
, NULL_TREE
);
5460 tree v8hi_ftype_int_pvoid
5461 = build_function_type_list (V8HI_type_node
,
5462 integer_type_node
, pvoid_type_node
, NULL_TREE
);
5463 tree v4si_ftype_int_pvoid
5464 = build_function_type_list (V4SI_type_node
,
5465 integer_type_node
, pvoid_type_node
, NULL_TREE
);
5466 tree void_ftype_v4si_int_pvoid
5467 = build_function_type_list (void_type_node
,
5468 V4SI_type_node
, integer_type_node
,
5469 pvoid_type_node
, NULL_TREE
);
5470 tree void_ftype_v16qi_int_pvoid
5471 = build_function_type_list (void_type_node
,
5472 V16QI_type_node
, integer_type_node
,
5473 pvoid_type_node
, NULL_TREE
);
5474 tree void_ftype_v8hi_int_pvoid
5475 = build_function_type_list (void_type_node
,
5476 V8HI_type_node
, integer_type_node
,
5477 pvoid_type_node
, NULL_TREE
);
5478 tree int_ftype_int_v8hi_v8hi
5479 = build_function_type_list (integer_type_node
,
5480 integer_type_node
, V8HI_type_node
,
5481 V8HI_type_node
, NULL_TREE
);
5482 tree int_ftype_int_v16qi_v16qi
5483 = build_function_type_list (integer_type_node
,
5484 integer_type_node
, V16QI_type_node
,
5485 V16QI_type_node
, NULL_TREE
);
5486 tree int_ftype_int_v4sf_v4sf
5487 = build_function_type_list (integer_type_node
,
5488 integer_type_node
, V4SF_type_node
,
5489 V4SF_type_node
, NULL_TREE
);
5490 tree v4si_ftype_v4si
5491 = build_function_type_list (V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5492 tree v8hi_ftype_v8hi
5493 = build_function_type_list (V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5494 tree v16qi_ftype_v16qi
5495 = build_function_type_list (V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5496 tree v4sf_ftype_v4sf
5497 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5498 tree void_ftype_pvoid_int_char
5499 = build_function_type_list (void_type_node
,
5500 pvoid_type_node
, integer_type_node
,
5501 char_type_node
, NULL_TREE
);
5503 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat
, ALTIVEC_BUILTIN_LD_INTERNAL_4sf
);
5504 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf
, ALTIVEC_BUILTIN_ST_INTERNAL_4sf
);
5505 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint
, ALTIVEC_BUILTIN_LD_INTERNAL_4si
);
5506 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si
, ALTIVEC_BUILTIN_ST_INTERNAL_4si
);
5507 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort
, ALTIVEC_BUILTIN_LD_INTERNAL_8hi
);
5508 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi
, ALTIVEC_BUILTIN_ST_INTERNAL_8hi
);
5509 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar
, ALTIVEC_BUILTIN_LD_INTERNAL_16qi
);
5510 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi
, ALTIVEC_BUILTIN_ST_INTERNAL_16qi
);
5511 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mtvscr", void_ftype_v4si
, ALTIVEC_BUILTIN_MTVSCR
);
5512 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mfvscr", v8hi_ftype_void
, ALTIVEC_BUILTIN_MFVSCR
);
5513 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dssall", void_ftype_void
, ALTIVEC_BUILTIN_DSSALL
);
5514 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dss", void_ftype_qi
, ALTIVEC_BUILTIN_DSS
);
5515 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVSL
);
5516 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVSR
);
5517 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVEBX
);
5518 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVEHX
);
5519 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVEWX
);
5520 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVXL
);
5521 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvx", v4si_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVX
);
5522 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVX
);
5523 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVEWX
);
5524 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVXL
);
5525 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid
, ALTIVEC_BUILTIN_STVEBX
);
5526 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid
, ALTIVEC_BUILTIN_STVEHX
);
5528 /* Add the DST variants. */
5529 d
= (struct builtin_description
*) bdesc_dst
;
5530 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
5531 def_builtin (d
->mask
, d
->name
, void_ftype_pvoid_int_char
, d
->code
);
5533 /* Initialize the predicates. */
5534 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
5535 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
5537 enum machine_mode mode1
;
5540 mode1
= insn_data
[dp
->icode
].operand
[1].mode
;
5545 type
= int_ftype_int_v4si_v4si
;
5548 type
= int_ftype_int_v8hi_v8hi
;
5551 type
= int_ftype_int_v16qi_v16qi
;
5554 type
= int_ftype_int_v4sf_v4sf
;
5560 def_builtin (dp
->mask
, dp
->name
, type
, dp
->code
);
5563 /* Initialize the abs* operators. */
5564 d
= (struct builtin_description
*) bdesc_abs
;
5565 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
5567 enum machine_mode mode0
;
5570 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
5575 type
= v4si_ftype_v4si
;
5578 type
= v8hi_ftype_v8hi
;
5581 type
= v16qi_ftype_v16qi
;
5584 type
= v4sf_ftype_v4sf
;
5590 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5595 rs6000_common_init_builtins ()
5597 struct builtin_description
*d
;
5600 tree v4sf_ftype_v4sf_v4sf_v16qi
5601 = build_function_type_list (V4SF_type_node
,
5602 V4SF_type_node
, V4SF_type_node
,
5603 V16QI_type_node
, NULL_TREE
);
5604 tree v4si_ftype_v4si_v4si_v16qi
5605 = build_function_type_list (V4SI_type_node
,
5606 V4SI_type_node
, V4SI_type_node
,
5607 V16QI_type_node
, NULL_TREE
);
5608 tree v8hi_ftype_v8hi_v8hi_v16qi
5609 = build_function_type_list (V8HI_type_node
,
5610 V8HI_type_node
, V8HI_type_node
,
5611 V16QI_type_node
, NULL_TREE
);
5612 tree v16qi_ftype_v16qi_v16qi_v16qi
5613 = build_function_type_list (V16QI_type_node
,
5614 V16QI_type_node
, V16QI_type_node
,
5615 V16QI_type_node
, NULL_TREE
);
5616 tree v4si_ftype_char
5617 = build_function_type_list (V4SI_type_node
, char_type_node
, NULL_TREE
);
5618 tree v8hi_ftype_char
5619 = build_function_type_list (V8HI_type_node
, char_type_node
, NULL_TREE
);
5620 tree v16qi_ftype_char
5621 = build_function_type_list (V16QI_type_node
, char_type_node
, NULL_TREE
);
5622 tree v8hi_ftype_v16qi
5623 = build_function_type_list (V8HI_type_node
, V16QI_type_node
, NULL_TREE
);
5624 tree v4sf_ftype_v4sf
5625 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5627 tree v2si_ftype_v2si_v2si
5628 = build_function_type_list (V2SI_type_node
,
5629 V2SI_type_node
, V2SI_type_node
, NULL_TREE
);
5631 tree v2sf_ftype_v2sf_v2sf
5632 = build_function_type_list (V2SF_type_node
,
5633 V2SF_type_node
, V2SF_type_node
, NULL_TREE
);
5635 tree v2si_ftype_int_int
5636 = build_function_type_list (V2SI_type_node
,
5637 integer_type_node
, integer_type_node
,
5640 tree v2si_ftype_v2si
5641 = build_function_type_list (V2SI_type_node
, V2SI_type_node
, NULL_TREE
);
5643 tree v2sf_ftype_v2sf
5644 = build_function_type_list (V2SF_type_node
,
5645 V2SF_type_node
, NULL_TREE
);
5647 tree v2sf_ftype_v2si
5648 = build_function_type_list (V2SF_type_node
,
5649 V2SI_type_node
, NULL_TREE
);
5651 tree v2si_ftype_v2sf
5652 = build_function_type_list (V2SI_type_node
,
5653 V2SF_type_node
, NULL_TREE
);
5655 tree v2si_ftype_v2si_char
5656 = build_function_type_list (V2SI_type_node
,
5657 V2SI_type_node
, char_type_node
, NULL_TREE
);
5659 tree v2si_ftype_int_char
5660 = build_function_type_list (V2SI_type_node
,
5661 integer_type_node
, char_type_node
, NULL_TREE
);
5663 tree v2si_ftype_char
5664 = build_function_type_list (V2SI_type_node
, char_type_node
, NULL_TREE
);
5666 tree int_ftype_int_int
5667 = build_function_type_list (integer_type_node
,
5668 integer_type_node
, integer_type_node
,
5671 tree v4si_ftype_v4si_v4si
5672 = build_function_type_list (V4SI_type_node
,
5673 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5674 tree v4sf_ftype_v4si_char
5675 = build_function_type_list (V4SF_type_node
,
5676 V4SI_type_node
, char_type_node
, NULL_TREE
);
5677 tree v4si_ftype_v4sf_char
5678 = build_function_type_list (V4SI_type_node
,
5679 V4SF_type_node
, char_type_node
, NULL_TREE
);
5680 tree v4si_ftype_v4si_char
5681 = build_function_type_list (V4SI_type_node
,
5682 V4SI_type_node
, char_type_node
, NULL_TREE
);
5683 tree v8hi_ftype_v8hi_char
5684 = build_function_type_list (V8HI_type_node
,
5685 V8HI_type_node
, char_type_node
, NULL_TREE
);
5686 tree v16qi_ftype_v16qi_char
5687 = build_function_type_list (V16QI_type_node
,
5688 V16QI_type_node
, char_type_node
, NULL_TREE
);
5689 tree v16qi_ftype_v16qi_v16qi_char
5690 = build_function_type_list (V16QI_type_node
,
5691 V16QI_type_node
, V16QI_type_node
,
5692 char_type_node
, NULL_TREE
);
5693 tree v8hi_ftype_v8hi_v8hi_char
5694 = build_function_type_list (V8HI_type_node
,
5695 V8HI_type_node
, V8HI_type_node
,
5696 char_type_node
, NULL_TREE
);
5697 tree v4si_ftype_v4si_v4si_char
5698 = build_function_type_list (V4SI_type_node
,
5699 V4SI_type_node
, V4SI_type_node
,
5700 char_type_node
, NULL_TREE
);
5701 tree v4sf_ftype_v4sf_v4sf_char
5702 = build_function_type_list (V4SF_type_node
,
5703 V4SF_type_node
, V4SF_type_node
,
5704 char_type_node
, NULL_TREE
);
5705 tree v4sf_ftype_v4sf_v4sf
5706 = build_function_type_list (V4SF_type_node
,
5707 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5708 tree v4sf_ftype_v4sf_v4sf_v4si
5709 = build_function_type_list (V4SF_type_node
,
5710 V4SF_type_node
, V4SF_type_node
,
5711 V4SI_type_node
, NULL_TREE
);
5712 tree v4sf_ftype_v4sf_v4sf_v4sf
5713 = build_function_type_list (V4SF_type_node
,
5714 V4SF_type_node
, V4SF_type_node
,
5715 V4SF_type_node
, NULL_TREE
);
5716 tree v4si_ftype_v4si_v4si_v4si
5717 = build_function_type_list (V4SI_type_node
,
5718 V4SI_type_node
, V4SI_type_node
,
5719 V4SI_type_node
, NULL_TREE
);
5720 tree v8hi_ftype_v8hi_v8hi
5721 = build_function_type_list (V8HI_type_node
,
5722 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5723 tree v8hi_ftype_v8hi_v8hi_v8hi
5724 = build_function_type_list (V8HI_type_node
,
5725 V8HI_type_node
, V8HI_type_node
,
5726 V8HI_type_node
, NULL_TREE
);
5727 tree v4si_ftype_v8hi_v8hi_v4si
5728 = build_function_type_list (V4SI_type_node
,
5729 V8HI_type_node
, V8HI_type_node
,
5730 V4SI_type_node
, NULL_TREE
);
5731 tree v4si_ftype_v16qi_v16qi_v4si
5732 = build_function_type_list (V4SI_type_node
,
5733 V16QI_type_node
, V16QI_type_node
,
5734 V4SI_type_node
, NULL_TREE
);
5735 tree v16qi_ftype_v16qi_v16qi
5736 = build_function_type_list (V16QI_type_node
,
5737 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5738 tree v4si_ftype_v4sf_v4sf
5739 = build_function_type_list (V4SI_type_node
,
5740 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5741 tree v8hi_ftype_v16qi_v16qi
5742 = build_function_type_list (V8HI_type_node
,
5743 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5744 tree v4si_ftype_v8hi_v8hi
5745 = build_function_type_list (V4SI_type_node
,
5746 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5747 tree v8hi_ftype_v4si_v4si
5748 = build_function_type_list (V8HI_type_node
,
5749 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5750 tree v16qi_ftype_v8hi_v8hi
5751 = build_function_type_list (V16QI_type_node
,
5752 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5753 tree v4si_ftype_v16qi_v4si
5754 = build_function_type_list (V4SI_type_node
,
5755 V16QI_type_node
, V4SI_type_node
, NULL_TREE
);
5756 tree v4si_ftype_v16qi_v16qi
5757 = build_function_type_list (V4SI_type_node
,
5758 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5759 tree v4si_ftype_v8hi_v4si
5760 = build_function_type_list (V4SI_type_node
,
5761 V8HI_type_node
, V4SI_type_node
, NULL_TREE
);
5762 tree v4si_ftype_v8hi
5763 = build_function_type_list (V4SI_type_node
, V8HI_type_node
, NULL_TREE
);
5764 tree int_ftype_v4si_v4si
5765 = build_function_type_list (integer_type_node
,
5766 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5767 tree int_ftype_v4sf_v4sf
5768 = build_function_type_list (integer_type_node
,
5769 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5770 tree int_ftype_v16qi_v16qi
5771 = build_function_type_list (integer_type_node
,
5772 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5773 tree int_ftype_v8hi_v8hi
5774 = build_function_type_list (integer_type_node
,
5775 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5777 /* Add the simple ternary operators. */
5778 d
= (struct builtin_description
*) bdesc_3arg
;
5779 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
5782 enum machine_mode mode0
, mode1
, mode2
, mode3
;
5785 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
5788 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
5789 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
5790 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
5791 mode3
= insn_data
[d
->icode
].operand
[3].mode
;
5793 /* When all four are of the same mode. */
5794 if (mode0
== mode1
&& mode1
== mode2
&& mode2
== mode3
)
5799 type
= v4si_ftype_v4si_v4si_v4si
;
5802 type
= v4sf_ftype_v4sf_v4sf_v4sf
;
5805 type
= v8hi_ftype_v8hi_v8hi_v8hi
;
5808 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
5814 else if (mode0
== mode1
&& mode1
== mode2
&& mode3
== V16QImode
)
5819 type
= v4si_ftype_v4si_v4si_v16qi
;
5822 type
= v4sf_ftype_v4sf_v4sf_v16qi
;
5825 type
= v8hi_ftype_v8hi_v8hi_v16qi
;
5828 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
5834 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
5835 && mode3
== V4SImode
)
5836 type
= v4si_ftype_v16qi_v16qi_v4si
;
5837 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
5838 && mode3
== V4SImode
)
5839 type
= v4si_ftype_v8hi_v8hi_v4si
;
5840 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
5841 && mode3
== V4SImode
)
5842 type
= v4sf_ftype_v4sf_v4sf_v4si
;
5844 /* vchar, vchar, vchar, 4 bit literal. */
5845 else if (mode0
== V16QImode
&& mode1
== mode0
&& mode2
== mode0
5847 type
= v16qi_ftype_v16qi_v16qi_char
;
5849 /* vshort, vshort, vshort, 4 bit literal. */
5850 else if (mode0
== V8HImode
&& mode1
== mode0
&& mode2
== mode0
5852 type
= v8hi_ftype_v8hi_v8hi_char
;
5854 /* vint, vint, vint, 4 bit literal. */
5855 else if (mode0
== V4SImode
&& mode1
== mode0
&& mode2
== mode0
5857 type
= v4si_ftype_v4si_v4si_char
;
5859 /* vfloat, vfloat, vfloat, 4 bit literal. */
5860 else if (mode0
== V4SFmode
&& mode1
== mode0
&& mode2
== mode0
5862 type
= v4sf_ftype_v4sf_v4sf_char
;
5867 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5870 /* Add the simple binary operators. */
5871 d
= (struct builtin_description
*) bdesc_2arg
;
5872 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5874 enum machine_mode mode0
, mode1
, mode2
;
5877 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
5880 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
5881 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
5882 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
5884 /* When all three operands are of the same mode. */
5885 if (mode0
== mode1
&& mode1
== mode2
)
5890 type
= v4sf_ftype_v4sf_v4sf
;
5893 type
= v4si_ftype_v4si_v4si
;
5896 type
= v16qi_ftype_v16qi_v16qi
;
5899 type
= v8hi_ftype_v8hi_v8hi
;
5902 type
= v2si_ftype_v2si_v2si
;
5905 type
= v2sf_ftype_v2sf_v2sf
;
5908 type
= int_ftype_int_int
;
5915 /* A few other combos we really don't want to do manually. */
5917 /* vint, vfloat, vfloat. */
5918 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
)
5919 type
= v4si_ftype_v4sf_v4sf
;
5921 /* vshort, vchar, vchar. */
5922 else if (mode0
== V8HImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
5923 type
= v8hi_ftype_v16qi_v16qi
;
5925 /* vint, vshort, vshort. */
5926 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
5927 type
= v4si_ftype_v8hi_v8hi
;
5929 /* vshort, vint, vint. */
5930 else if (mode0
== V8HImode
&& mode1
== V4SImode
&& mode2
== V4SImode
)
5931 type
= v8hi_ftype_v4si_v4si
;
5933 /* vchar, vshort, vshort. */
5934 else if (mode0
== V16QImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
5935 type
= v16qi_ftype_v8hi_v8hi
;
5937 /* vint, vchar, vint. */
5938 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V4SImode
)
5939 type
= v4si_ftype_v16qi_v4si
;
5941 /* vint, vchar, vchar. */
5942 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
5943 type
= v4si_ftype_v16qi_v16qi
;
5945 /* vint, vshort, vint. */
5946 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V4SImode
)
5947 type
= v4si_ftype_v8hi_v4si
;
5949 /* vint, vint, 5 bit literal. */
5950 else if (mode0
== V4SImode
&& mode1
== V4SImode
&& mode2
== QImode
)
5951 type
= v4si_ftype_v4si_char
;
5953 /* vshort, vshort, 5 bit literal. */
5954 else if (mode0
== V8HImode
&& mode1
== V8HImode
&& mode2
== QImode
)
5955 type
= v8hi_ftype_v8hi_char
;
5957 /* vchar, vchar, 5 bit literal. */
5958 else if (mode0
== V16QImode
&& mode1
== V16QImode
&& mode2
== QImode
)
5959 type
= v16qi_ftype_v16qi_char
;
5961 /* vfloat, vint, 5 bit literal. */
5962 else if (mode0
== V4SFmode
&& mode1
== V4SImode
&& mode2
== QImode
)
5963 type
= v4sf_ftype_v4si_char
;
5965 /* vint, vfloat, 5 bit literal. */
5966 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== QImode
)
5967 type
= v4si_ftype_v4sf_char
;
5969 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== SImode
)
5970 type
= v2si_ftype_int_int
;
5972 else if (mode0
== V2SImode
&& mode1
== V2SImode
&& mode2
== QImode
)
5973 type
= v2si_ftype_v2si_char
;
5975 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== QImode
)
5976 type
= v2si_ftype_int_char
;
5979 else if (mode0
== SImode
)
5984 type
= int_ftype_v4si_v4si
;
5987 type
= int_ftype_v4sf_v4sf
;
5990 type
= int_ftype_v16qi_v16qi
;
5993 type
= int_ftype_v8hi_v8hi
;
6003 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6006 /* Add the simple unary operators. */
6007 d
= (struct builtin_description
*) bdesc_1arg
;
6008 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
6010 enum machine_mode mode0
, mode1
;
6013 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
6016 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6017 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
6019 if (mode0
== V4SImode
&& mode1
== QImode
)
6020 type
= v4si_ftype_char
;
6021 else if (mode0
== V8HImode
&& mode1
== QImode
)
6022 type
= v8hi_ftype_char
;
6023 else if (mode0
== V16QImode
&& mode1
== QImode
)
6024 type
= v16qi_ftype_char
;
6025 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
)
6026 type
= v4sf_ftype_v4sf
;
6027 else if (mode0
== V8HImode
&& mode1
== V16QImode
)
6028 type
= v8hi_ftype_v16qi
;
6029 else if (mode0
== V4SImode
&& mode1
== V8HImode
)
6030 type
= v4si_ftype_v8hi
;
6031 else if (mode0
== V2SImode
&& mode1
== V2SImode
)
6032 type
= v2si_ftype_v2si
;
6033 else if (mode0
== V2SFmode
&& mode1
== V2SFmode
)
6034 type
= v2sf_ftype_v2sf
;
6035 else if (mode0
== V2SFmode
&& mode1
== V2SImode
)
6036 type
= v2sf_ftype_v2si
;
6037 else if (mode0
== V2SImode
&& mode1
== V2SFmode
)
6038 type
= v2si_ftype_v2sf
;
6039 else if (mode0
== V2SImode
&& mode1
== QImode
)
6040 type
= v2si_ftype_char
;
6044 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6049 /* Expand a block move operation, and return 1 if successful. Return 0
6050 if we should let the compiler generate normal code.
6052 operands[0] is the destination
6053 operands[1] is the source
6054 operands[2] is the length
6055 operands[3] is the alignment */
6057 #define MAX_MOVE_REG 4
6060 expand_block_move (operands
)
6063 rtx orig_dest
= operands
[0];
6064 rtx orig_src
= operands
[1];
6065 rtx bytes_rtx
= operands
[2];
6066 rtx align_rtx
= operands
[3];
6067 int constp
= (GET_CODE (bytes_rtx
) == CONST_INT
);
6073 /* If this is not a fixed size move, just call memcpy */
6077 /* If this is not a fixed size alignment, abort */
6078 if (GET_CODE (align_rtx
) != CONST_INT
)
6080 align
= INTVAL (align_rtx
);
6082 /* Anything to move? */
6083 bytes
= INTVAL (bytes_rtx
);
6087 /* store_one_arg depends on expand_block_move to handle at least the size of
6088 reg_parm_stack_space. */
6089 if (bytes
> (TARGET_POWERPC64
? 64 : 32))
6092 if (TARGET_STRING
) /* string instructions are available */
6094 for (offset
= 0; bytes
> 0; offset
+= move_bytes
, bytes
-= move_bytes
)
6097 rtx (*movstrsi
) PARAMS ((rtx
, rtx
, rtx
, rtx
));
6098 rtx (*mov
) PARAMS ((rtx
, rtx
));
6100 enum machine_mode mode
= BLKmode
;
6103 if (bytes
> 24 /* move up to 32 bytes at a time */
6111 && ! fixed_regs
[12])
6113 move_bytes
= (bytes
> 32) ? 32 : bytes
;
6114 gen_func
.movstrsi
= gen_movstrsi_8reg
;
6116 else if (bytes
> 16 /* move up to 24 bytes at a time */
6122 && ! fixed_regs
[10])
6124 move_bytes
= (bytes
> 24) ? 24 : bytes
;
6125 gen_func
.movstrsi
= gen_movstrsi_6reg
;
6127 else if (bytes
> 8 /* move up to 16 bytes at a time */
6133 move_bytes
= (bytes
> 16) ? 16 : bytes
;
6134 gen_func
.movstrsi
= gen_movstrsi_4reg
;
6136 else if (bytes
>= 8 && TARGET_POWERPC64
6137 /* 64-bit loads and stores require word-aligned
6139 && (align
>= 8 || (! STRICT_ALIGNMENT
&& align
>= 4)))
6143 gen_func
.mov
= gen_movdi
;
6145 else if (bytes
> 4 && !TARGET_POWERPC64
)
6146 { /* move up to 8 bytes at a time */
6147 move_bytes
= (bytes
> 8) ? 8 : bytes
;
6148 gen_func
.movstrsi
= gen_movstrsi_2reg
;
6150 else if (bytes
>= 4 && (align
>= 4 || ! STRICT_ALIGNMENT
))
6151 { /* move 4 bytes */
6154 gen_func
.mov
= gen_movsi
;
6156 else if (bytes
== 2 && (align
>= 2 || ! STRICT_ALIGNMENT
))
6157 { /* move 2 bytes */
6160 gen_func
.mov
= gen_movhi
;
6162 else if (bytes
== 1) /* move 1 byte */
6166 gen_func
.mov
= gen_movqi
;
6169 { /* move up to 4 bytes at a time */
6170 move_bytes
= (bytes
> 4) ? 4 : bytes
;
6171 gen_func
.movstrsi
= gen_movstrsi_1reg
;
6174 src
= adjust_address (orig_src
, mode
, offset
);
6175 dest
= adjust_address (orig_dest
, mode
, offset
);
6177 if (mode
== BLKmode
)
6179 /* Move the address into scratch registers. The movstrsi
6180 patterns require zero offset. */
6181 if (!REG_P (XEXP (src
, 0)))
6183 rtx src_reg
= copy_addr_to_reg (XEXP (src
, 0));
6184 src
= replace_equiv_address (src
, src_reg
);
6186 set_mem_size (src
, GEN_INT (move_bytes
));
6188 if (!REG_P (XEXP (dest
, 0)))
6190 rtx dest_reg
= copy_addr_to_reg (XEXP (dest
, 0));
6191 dest
= replace_equiv_address (dest
, dest_reg
);
6193 set_mem_size (dest
, GEN_INT (move_bytes
));
6195 emit_insn ((*gen_func
.movstrsi
) (dest
, src
,
6196 GEN_INT (move_bytes
& 31),
6201 rtx tmp_reg
= gen_reg_rtx (mode
);
6203 emit_insn ((*gen_func
.mov
) (tmp_reg
, src
));
6204 emit_insn ((*gen_func
.mov
) (dest
, tmp_reg
));
6209 else /* string instructions not available */
6211 rtx stores
[MAX_MOVE_REG
];
6215 for (offset
= 0; bytes
> 0; offset
+= move_bytes
, bytes
-= move_bytes
)
6217 rtx (*gen_mov_func
) PARAMS ((rtx
, rtx
));
6218 enum machine_mode mode
;
6219 rtx src
, dest
, tmp_reg
;
6221 /* Generate the appropriate load and store, saving the stores
6223 if (bytes
>= 8 && TARGET_POWERPC64
6224 /* 64-bit loads and stores require word-aligned
6226 && (align
>= 8 || (! STRICT_ALIGNMENT
&& align
>= 4)))
6230 gen_mov_func
= gen_movdi
;
6232 else if (bytes
>= 4 && (align
>= 4 || ! STRICT_ALIGNMENT
))
6236 gen_mov_func
= gen_movsi
;
6238 else if (bytes
>= 2 && (align
>= 2 || ! STRICT_ALIGNMENT
))
6242 gen_mov_func
= gen_movhi
;
6248 gen_mov_func
= gen_movqi
;
6251 src
= adjust_address (orig_src
, mode
, offset
);
6252 dest
= adjust_address (orig_dest
, mode
, offset
);
6253 tmp_reg
= gen_reg_rtx (mode
);
6255 emit_insn ((*gen_mov_func
) (tmp_reg
, src
));
6256 stores
[num_reg
++] = (*gen_mov_func
) (dest
, tmp_reg
);
6258 if (num_reg
>= MAX_MOVE_REG
)
6260 for (i
= 0; i
< num_reg
; i
++)
6261 emit_insn (stores
[i
]);
6266 for (i
= 0; i
< num_reg
; i
++)
6267 emit_insn (stores
[i
]);
6274 /* Return 1 if OP is a load multiple operation. It is known to be a
6275 PARALLEL and the first section will be tested. */
6278 load_multiple_operation (op
, mode
)
6280 enum machine_mode mode ATTRIBUTE_UNUSED
;
6282 int count
= XVECLEN (op
, 0);
6283 unsigned int dest_regno
;
6287 /* Perform a quick check so we don't blow up below. */
6289 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6290 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6291 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
6294 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6295 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
6297 for (i
= 1; i
< count
; i
++)
6299 rtx elt
= XVECEXP (op
, 0, i
);
6301 if (GET_CODE (elt
) != SET
6302 || GET_CODE (SET_DEST (elt
)) != REG
6303 || GET_MODE (SET_DEST (elt
)) != SImode
6304 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
6305 || GET_CODE (SET_SRC (elt
)) != MEM
6306 || GET_MODE (SET_SRC (elt
)) != SImode
6307 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
6308 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
6309 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
6310 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != i
* 4)
6317 /* Similar, but tests for store multiple. Here, the second vector element
6318 is a CLOBBER. It will be tested later. */
6321 store_multiple_operation (op
, mode
)
6323 enum machine_mode mode ATTRIBUTE_UNUSED
;
6325 int count
= XVECLEN (op
, 0) - 1;
6326 unsigned int src_regno
;
6330 /* Perform a quick check so we don't blow up below. */
6332 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6333 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
6334 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
6337 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6338 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
6340 for (i
= 1; i
< count
; i
++)
6342 rtx elt
= XVECEXP (op
, 0, i
+ 1);
6344 if (GET_CODE (elt
) != SET
6345 || GET_CODE (SET_SRC (elt
)) != REG
6346 || GET_MODE (SET_SRC (elt
)) != SImode
6347 || REGNO (SET_SRC (elt
)) != src_regno
+ i
6348 || GET_CODE (SET_DEST (elt
)) != MEM
6349 || GET_MODE (SET_DEST (elt
)) != SImode
6350 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
6351 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
6352 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
6353 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != i
* 4)
6360 /* Return 1 for a parallel vrsave operation. */
6363 vrsave_operation (op
, mode
)
6365 enum machine_mode mode ATTRIBUTE_UNUSED
;
6367 int count
= XVECLEN (op
, 0);
6368 unsigned int dest_regno
, src_regno
;
6372 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6373 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6374 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC_VOLATILE
)
6377 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6378 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6380 if (dest_regno
!= VRSAVE_REGNO
6381 && src_regno
!= VRSAVE_REGNO
)
6384 for (i
= 1; i
< count
; i
++)
6386 rtx elt
= XVECEXP (op
, 0, i
);
6388 if (GET_CODE (elt
) != CLOBBER
6389 && GET_CODE (elt
) != SET
)
6396 /* Return 1 for an PARALLEL suitable for mtcrf. */
6399 mtcrf_operation (op
, mode
)
6401 enum machine_mode mode ATTRIBUTE_UNUSED
;
6403 int count
= XVECLEN (op
, 0);
6407 /* Perform a quick check so we don't blow up below. */
6409 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6410 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC
6411 || XVECLEN (SET_SRC (XVECEXP (op
, 0, 0)), 0) != 2)
6413 src_reg
= XVECEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0, 0);
6415 if (GET_CODE (src_reg
) != REG
6416 || GET_MODE (src_reg
) != SImode
6417 || ! INT_REGNO_P (REGNO (src_reg
)))
6420 for (i
= 0; i
< count
; i
++)
6422 rtx exp
= XVECEXP (op
, 0, i
);
6426 if (GET_CODE (exp
) != SET
6427 || GET_CODE (SET_DEST (exp
)) != REG
6428 || GET_MODE (SET_DEST (exp
)) != CCmode
6429 || ! CR_REGNO_P (REGNO (SET_DEST (exp
))))
6431 unspec
= SET_SRC (exp
);
6432 maskval
= 1 << (MAX_CR_REGNO
- REGNO (SET_DEST (exp
)));
6434 if (GET_CODE (unspec
) != UNSPEC
6435 || XINT (unspec
, 1) != 20
6436 || XVECLEN (unspec
, 0) != 2
6437 || XVECEXP (unspec
, 0, 0) != src_reg
6438 || GET_CODE (XVECEXP (unspec
, 0, 1)) != CONST_INT
6439 || INTVAL (XVECEXP (unspec
, 0, 1)) != maskval
)
6445 /* Return 1 for an PARALLEL suitable for lmw. */
6448 lmw_operation (op
, mode
)
6450 enum machine_mode mode ATTRIBUTE_UNUSED
;
6452 int count
= XVECLEN (op
, 0);
6453 unsigned int dest_regno
;
6455 unsigned int base_regno
;
6456 HOST_WIDE_INT offset
;
6459 /* Perform a quick check so we don't blow up below. */
6461 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6462 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6463 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
6466 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6467 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
6470 || count
!= 32 - (int) dest_regno
)
6473 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr
, 0))
6476 base_regno
= REGNO (src_addr
);
6477 if (base_regno
== 0)
6480 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, src_addr
, 0))
6482 offset
= INTVAL (XEXP (src_addr
, 1));
6483 base_regno
= REGNO (XEXP (src_addr
, 0));
6488 for (i
= 0; i
< count
; i
++)
6490 rtx elt
= XVECEXP (op
, 0, i
);
6493 HOST_WIDE_INT newoffset
;
6495 if (GET_CODE (elt
) != SET
6496 || GET_CODE (SET_DEST (elt
)) != REG
6497 || GET_MODE (SET_DEST (elt
)) != SImode
6498 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
6499 || GET_CODE (SET_SRC (elt
)) != MEM
6500 || GET_MODE (SET_SRC (elt
)) != SImode
)
6502 newaddr
= XEXP (SET_SRC (elt
), 0);
6503 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr
, 0))
6508 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, newaddr
, 0))
6510 addr_reg
= XEXP (newaddr
, 0);
6511 newoffset
= INTVAL (XEXP (newaddr
, 1));
6515 if (REGNO (addr_reg
) != base_regno
6516 || newoffset
!= offset
+ 4 * i
)
6523 /* Return 1 for an PARALLEL suitable for stmw. */
6526 stmw_operation (op
, mode
)
6528 enum machine_mode mode ATTRIBUTE_UNUSED
;
6530 int count
= XVECLEN (op
, 0);
6531 unsigned int src_regno
;
6533 unsigned int base_regno
;
6534 HOST_WIDE_INT offset
;
6537 /* Perform a quick check so we don't blow up below. */
6539 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6540 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
6541 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
6544 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6545 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
6548 || count
!= 32 - (int) src_regno
)
6551 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr
, 0))
6554 base_regno
= REGNO (dest_addr
);
6555 if (base_regno
== 0)
6558 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, dest_addr
, 0))
6560 offset
= INTVAL (XEXP (dest_addr
, 1));
6561 base_regno
= REGNO (XEXP (dest_addr
, 0));
6566 for (i
= 0; i
< count
; i
++)
6568 rtx elt
= XVECEXP (op
, 0, i
);
6571 HOST_WIDE_INT newoffset
;
6573 if (GET_CODE (elt
) != SET
6574 || GET_CODE (SET_SRC (elt
)) != REG
6575 || GET_MODE (SET_SRC (elt
)) != SImode
6576 || REGNO (SET_SRC (elt
)) != src_regno
+ i
6577 || GET_CODE (SET_DEST (elt
)) != MEM
6578 || GET_MODE (SET_DEST (elt
)) != SImode
)
6580 newaddr
= XEXP (SET_DEST (elt
), 0);
6581 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr
, 0))
6586 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, newaddr
, 0))
6588 addr_reg
= XEXP (newaddr
, 0);
6589 newoffset
= INTVAL (XEXP (newaddr
, 1));
6593 if (REGNO (addr_reg
) != base_regno
6594 || newoffset
!= offset
+ 4 * i
)
6601 /* A validation routine: say whether CODE, a condition code, and MODE
6602 match. The other alternatives either don't make sense or should
6603 never be generated. */
6606 validate_condition_mode (code
, mode
)
6608 enum machine_mode mode
;
6610 if (GET_RTX_CLASS (code
) != '<'
6611 || GET_MODE_CLASS (mode
) != MODE_CC
)
6614 /* These don't make sense. */
6615 if ((code
== GT
|| code
== LT
|| code
== GE
|| code
== LE
)
6616 && mode
== CCUNSmode
)
6619 if ((code
== GTU
|| code
== LTU
|| code
== GEU
|| code
== LEU
)
6620 && mode
!= CCUNSmode
)
6623 if (mode
!= CCFPmode
6624 && (code
== ORDERED
|| code
== UNORDERED
6625 || code
== UNEQ
|| code
== LTGT
6626 || code
== UNGT
|| code
== UNLT
6627 || code
== UNGE
|| code
== UNLE
))
6630 /* These should never be generated except for
6631 flag_unsafe_math_optimizations and flag_finite_math_only. */
6632 if (mode
== CCFPmode
6633 && ! flag_unsafe_math_optimizations
6634 && ! flag_finite_math_only
6635 && (code
== LE
|| code
== GE
6636 || code
== UNEQ
|| code
== LTGT
6637 || code
== UNGT
|| code
== UNLT
))
6640 /* These are invalid; the information is not there. */
6641 if (mode
== CCEQmode
6642 && code
!= EQ
&& code
!= NE
)
6646 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
6647 We only check the opcode against the mode of the CC value here. */
6650 branch_comparison_operator (op
, mode
)
6652 enum machine_mode mode ATTRIBUTE_UNUSED
;
6654 enum rtx_code code
= GET_CODE (op
);
6655 enum machine_mode cc_mode
;
6657 if (GET_RTX_CLASS (code
) != '<')
6660 cc_mode
= GET_MODE (XEXP (op
, 0));
6661 if (GET_MODE_CLASS (cc_mode
) != MODE_CC
)
6664 validate_condition_mode (code
, cc_mode
);
6669 /* Return 1 if OP is a comparison operation that is valid for a branch
6670 insn and which is true if the corresponding bit in the CC register
6674 branch_positive_comparison_operator (op
, mode
)
6676 enum machine_mode mode
;
6680 if (! branch_comparison_operator (op
, mode
))
6683 code
= GET_CODE (op
);
6684 return (code
== EQ
|| code
== LT
|| code
== GT
6685 || (TARGET_SPE
&& TARGET_HARD_FLOAT
&& !TARGET_FPRS
&& code
== NE
)
6686 || code
== LTU
|| code
== GTU
6687 || code
== UNORDERED
);
6690 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
6691 We check the opcode against the mode of the CC value and disallow EQ or
6692 NE comparisons for integers. */
6695 scc_comparison_operator (op
, mode
)
6697 enum machine_mode mode
;
6699 enum rtx_code code
= GET_CODE (op
);
6700 enum machine_mode cc_mode
;
6702 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
6705 if (GET_RTX_CLASS (code
) != '<')
6708 cc_mode
= GET_MODE (XEXP (op
, 0));
6709 if (GET_MODE_CLASS (cc_mode
) != MODE_CC
)
6712 validate_condition_mode (code
, cc_mode
);
6714 if (code
== NE
&& cc_mode
!= CCFPmode
)
6721 trap_comparison_operator (op
, mode
)
6723 enum machine_mode mode
;
6725 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
6727 return GET_RTX_CLASS (GET_CODE (op
)) == '<';
6731 boolean_operator (op
, mode
)
6733 enum machine_mode mode ATTRIBUTE_UNUSED
;
6735 enum rtx_code code
= GET_CODE (op
);
6736 return (code
== AND
|| code
== IOR
|| code
== XOR
);
6740 boolean_or_operator (op
, mode
)
6742 enum machine_mode mode ATTRIBUTE_UNUSED
;
6744 enum rtx_code code
= GET_CODE (op
);
6745 return (code
== IOR
|| code
== XOR
);
6749 min_max_operator (op
, mode
)
6751 enum machine_mode mode ATTRIBUTE_UNUSED
;
6753 enum rtx_code code
= GET_CODE (op
);
6754 return (code
== SMIN
|| code
== SMAX
|| code
== UMIN
|| code
== UMAX
);
6757 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
6758 mask required to convert the result of a rotate insn into a shift
6759 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
6762 includes_lshift_p (shiftop
, andop
)
6766 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
6768 shift_mask
<<= INTVAL (shiftop
);
6770 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
6773 /* Similar, but for right shift. */
6776 includes_rshift_p (shiftop
, andop
)
6780 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
6782 shift_mask
>>= INTVAL (shiftop
);
6784 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
6787 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
6788 to perform a left shift. It must have exactly SHIFTOP least
6789 signifigant 0's, then one or more 1's, then zero or more 0's. */
6792 includes_rldic_lshift_p (shiftop
, andop
)
6796 if (GET_CODE (andop
) == CONST_INT
)
6798 HOST_WIDE_INT c
, lsb
, shift_mask
;
6801 if (c
== 0 || c
== ~0)
6805 shift_mask
<<= INTVAL (shiftop
);
6807 /* Find the least signifigant one bit. */
6810 /* It must coincide with the LSB of the shift mask. */
6811 if (-lsb
!= shift_mask
)
6814 /* Invert to look for the next transition (if any). */
6817 /* Remove the low group of ones (originally low group of zeros). */
6820 /* Again find the lsb, and check we have all 1's above. */
6824 else if (GET_CODE (andop
) == CONST_DOUBLE
6825 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
6827 HOST_WIDE_INT low
, high
, lsb
;
6828 HOST_WIDE_INT shift_mask_low
, shift_mask_high
;
6830 low
= CONST_DOUBLE_LOW (andop
);
6831 if (HOST_BITS_PER_WIDE_INT
< 64)
6832 high
= CONST_DOUBLE_HIGH (andop
);
6834 if ((low
== 0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== 0))
6835 || (low
== ~0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0)))
6838 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
6840 shift_mask_high
= ~0;
6841 if (INTVAL (shiftop
) > 32)
6842 shift_mask_high
<<= INTVAL (shiftop
) - 32;
6846 if (-lsb
!= shift_mask_high
|| INTVAL (shiftop
) < 32)
6853 return high
== -lsb
;
6856 shift_mask_low
= ~0;
6857 shift_mask_low
<<= INTVAL (shiftop
);
6861 if (-lsb
!= shift_mask_low
)
6864 if (HOST_BITS_PER_WIDE_INT
< 64)
6869 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
6872 return high
== -lsb
;
6876 return low
== -lsb
&& (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0);
6882 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
6883 to perform a left shift. It must have SHIFTOP or more least
6884 signifigant 0's, with the remainder of the word 1's. */
6887 includes_rldicr_lshift_p (shiftop
, andop
)
6891 if (GET_CODE (andop
) == CONST_INT
)
6893 HOST_WIDE_INT c
, lsb
, shift_mask
;
6896 shift_mask
<<= INTVAL (shiftop
);
6899 /* Find the least signifigant one bit. */
6902 /* It must be covered by the shift mask.
6903 This test also rejects c == 0. */
6904 if ((lsb
& shift_mask
) == 0)
6907 /* Check we have all 1's above the transition, and reject all 1's. */
6908 return c
== -lsb
&& lsb
!= 1;
6910 else if (GET_CODE (andop
) == CONST_DOUBLE
6911 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
6913 HOST_WIDE_INT low
, lsb
, shift_mask_low
;
6915 low
= CONST_DOUBLE_LOW (andop
);
6917 if (HOST_BITS_PER_WIDE_INT
< 64)
6919 HOST_WIDE_INT high
, shift_mask_high
;
6921 high
= CONST_DOUBLE_HIGH (andop
);
6925 shift_mask_high
= ~0;
6926 if (INTVAL (shiftop
) > 32)
6927 shift_mask_high
<<= INTVAL (shiftop
) - 32;
6931 if ((lsb
& shift_mask_high
) == 0)
6934 return high
== -lsb
;
6940 shift_mask_low
= ~0;
6941 shift_mask_low
<<= INTVAL (shiftop
);
6945 if ((lsb
& shift_mask_low
) == 0)
6948 return low
== -lsb
&& lsb
!= 1;
6954 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
6955 for lfq and stfq insns.
6957 Note reg1 and reg2 *must* be hard registers. To be sure we will
6958 abort if we are passed pseudo registers. */
6961 registers_ok_for_quad_peep (reg1
, reg2
)
6964 /* We might have been passed a SUBREG. */
6965 if (GET_CODE (reg1
) != REG
|| GET_CODE (reg2
) != REG
)
6968 return (REGNO (reg1
) == REGNO (reg2
) - 1);
6971 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
6972 addr1 and addr2 must be in consecutive memory locations
6973 (addr2 == addr1 + 8). */
6976 addrs_ok_for_quad_peep (addr1
, addr2
)
6983 /* Extract an offset (if used) from the first addr. */
6984 if (GET_CODE (addr1
) == PLUS
)
6986 /* If not a REG, return zero. */
6987 if (GET_CODE (XEXP (addr1
, 0)) != REG
)
6991 reg1
= REGNO (XEXP (addr1
, 0));
6992 /* The offset must be constant! */
6993 if (GET_CODE (XEXP (addr1
, 1)) != CONST_INT
)
6995 offset1
= INTVAL (XEXP (addr1
, 1));
6998 else if (GET_CODE (addr1
) != REG
)
7002 reg1
= REGNO (addr1
);
7003 /* This was a simple (mem (reg)) expression. Offset is 0. */
7007 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
7008 if (GET_CODE (addr2
) != PLUS
)
7011 if (GET_CODE (XEXP (addr2
, 0)) != REG
7012 || GET_CODE (XEXP (addr2
, 1)) != CONST_INT
)
7015 if (reg1
!= REGNO (XEXP (addr2
, 0)))
7018 /* The offset for the second addr must be 8 more than the first addr. */
7019 if (INTVAL (XEXP (addr2
, 1)) != offset1
+ 8)
7022 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7027 /* Return the register class of a scratch register needed to copy IN into
7028 or out of a register in CLASS in MODE. If it can be done directly,
7029 NO_REGS is returned. */
7032 secondary_reload_class (class, mode
, in
)
7033 enum reg_class
class;
7034 enum machine_mode mode ATTRIBUTE_UNUSED
;
7039 if (TARGET_ELF
|| (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))
7041 /* We cannot copy a symbolic operand directly into anything
7042 other than BASE_REGS for TARGET_ELF. So indicate that a
7043 register from BASE_REGS is needed as an intermediate
7046 On Darwin, pic addresses require a load from memory, which
7047 needs a base register. */
7048 if (class != BASE_REGS
7049 && (GET_CODE (in
) == SYMBOL_REF
7050 || GET_CODE (in
) == HIGH
7051 || GET_CODE (in
) == LABEL_REF
7052 || GET_CODE (in
) == CONST
))
7056 if (GET_CODE (in
) == REG
)
7059 if (regno
>= FIRST_PSEUDO_REGISTER
)
7061 regno
= true_regnum (in
);
7062 if (regno
>= FIRST_PSEUDO_REGISTER
)
7066 else if (GET_CODE (in
) == SUBREG
)
7068 regno
= true_regnum (in
);
7069 if (regno
>= FIRST_PSEUDO_REGISTER
)
7075 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7077 if (class == GENERAL_REGS
|| class == BASE_REGS
7078 || (regno
>= 0 && INT_REGNO_P (regno
)))
7081 /* Constants, memory, and FP registers can go into FP registers. */
7082 if ((regno
== -1 || FP_REGNO_P (regno
))
7083 && (class == FLOAT_REGS
|| class == NON_SPECIAL_REGS
))
7086 /* Memory, and AltiVec registers can go into AltiVec registers. */
7087 if ((regno
== -1 || ALTIVEC_REGNO_P (regno
))
7088 && class == ALTIVEC_REGS
)
7091 /* We can copy among the CR registers. */
7092 if ((class == CR_REGS
|| class == CR0_REGS
)
7093 && regno
>= 0 && CR_REGNO_P (regno
))
7096 /* Otherwise, we need GENERAL_REGS. */
7097 return GENERAL_REGS
;
7100 /* Given a comparison operation, return the bit number in CCR to test. We
7101 know this is a valid comparison.
7103 SCC_P is 1 if this is for an scc. That means that %D will have been
7104 used instead of %C, so the bits will be in different places.
7106 Return -1 if OP isn't a valid comparison for some reason. */
7113 enum rtx_code code
= GET_CODE (op
);
7114 enum machine_mode cc_mode
;
7119 if (GET_RTX_CLASS (code
) != '<')
7124 if (GET_CODE (reg
) != REG
7125 || ! CR_REGNO_P (REGNO (reg
)))
7128 cc_mode
= GET_MODE (reg
);
7129 cc_regnum
= REGNO (reg
);
7130 base_bit
= 4 * (cc_regnum
- CR0_REGNO
);
7132 validate_condition_mode (code
, cc_mode
);
7137 if (TARGET_SPE
&& TARGET_HARD_FLOAT
&& cc_mode
== CCFPmode
)
7138 return base_bit
+ 1;
7139 return scc_p
? base_bit
+ 3 : base_bit
+ 2;
7141 if (TARGET_SPE
&& TARGET_HARD_FLOAT
&& cc_mode
== CCFPmode
)
7142 return base_bit
+ 1;
7143 return base_bit
+ 2;
7144 case GT
: case GTU
: case UNLE
:
7145 return base_bit
+ 1;
7146 case LT
: case LTU
: case UNGE
:
7148 case ORDERED
: case UNORDERED
:
7149 return base_bit
+ 3;
7152 /* If scc, we will have done a cror to put the bit in the
7153 unordered position. So test that bit. For integer, this is ! LT
7154 unless this is an scc insn. */
7155 return scc_p
? base_bit
+ 3 : base_bit
;
7158 return scc_p
? base_bit
+ 3 : base_bit
+ 1;
7165 /* Return the GOT register. */
7168 rs6000_got_register (value
)
7169 rtx value ATTRIBUTE_UNUSED
;
7171 /* The second flow pass currently (June 1999) can't update
7172 regs_ever_live without disturbing other parts of the compiler, so
7173 update it here to make the prolog/epilogue code happy. */
7174 if (no_new_pseudos
&& ! regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
])
7175 regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
7177 current_function_uses_pic_offset_table
= 1;
7179 return pic_offset_table_rtx
;
7182 /* Function to init struct machine_function.
7183 This will be called, via a pointer variable,
7184 from push_function_context. */
7186 static struct machine_function
*
7187 rs6000_init_machine_status ()
7189 return ggc_alloc_cleared (sizeof (machine_function
));
7192 /* These macros test for integers and extract the low-order bits. */
7194 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7195 && GET_MODE (X) == VOIDmode)
7197 #define INT_LOWPART(X) \
7198 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7205 unsigned long val
= INT_LOWPART (op
);
7207 /* If the high bit is zero, the value is the first 1 bit we find
7209 if ((val
& 0x80000000) == 0)
7211 if ((val
& 0xffffffff) == 0)
7215 while (((val
<<= 1) & 0x80000000) == 0)
7220 /* If the high bit is set and the low bit is not, or the mask is all
7221 1's, the value is zero. */
7222 if ((val
& 1) == 0 || (val
& 0xffffffff) == 0xffffffff)
7225 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7228 while (((val
>>= 1) & 1) != 0)
7239 unsigned long val
= INT_LOWPART (op
);
7241 /* If the low bit is zero, the value is the first 1 bit we find from
7245 if ((val
& 0xffffffff) == 0)
7249 while (((val
>>= 1) & 1) == 0)
7255 /* If the low bit is set and the high bit is not, or the mask is all
7256 1's, the value is 31. */
7257 if ((val
& 0x80000000) == 0 || (val
& 0xffffffff) == 0xffffffff)
7260 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7263 while (((val
<<= 1) & 0x80000000) != 0)
7269 /* Print an operand. Recognize special options, documented below. */
7272 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7273 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7275 #define SMALL_DATA_RELOC "sda21"
7276 #define SMALL_DATA_REG 0
7280 print_operand (file
, x
, code
)
7287 unsigned HOST_WIDE_INT uval
;
7292 /* Write out an instruction after the call which may be replaced
7293 with glue code by the loader. This depends on the AIX version. */
7294 asm_fprintf (file
, RS6000_CALL_GLUE
);
7297 /* %a is output_address. */
7300 /* If X is a constant integer whose low-order 5 bits are zero,
7301 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
7302 in the AIX assembler where "sri" with a zero shift count
7303 writes a trash instruction. */
7304 if (GET_CODE (x
) == CONST_INT
&& (INTVAL (x
) & 31) == 0)
7311 /* If constant, low-order 16 bits of constant, unsigned.
7312 Otherwise, write normally. */
7314 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 0xffff);
7316 print_operand (file
, x
, 0);
7320 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7321 for 64-bit mask direction. */
7322 putc (((INT_LOWPART(x
) & 1) == 0 ? 'r' : 'l'), file
);
7325 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7329 /* There used to be a comment for 'C' reading "This is an
7330 optional cror needed for certain floating-point
7331 comparisons. Otherwise write nothing." */
7333 /* Similar, except that this is for an scc, so we must be able to
7334 encode the test in a single bit that is one. We do the above
7335 for any LE, GE, GEU, or LEU and invert the bit for NE. */
7336 if (GET_CODE (x
) == LE
|| GET_CODE (x
) == GE
7337 || GET_CODE (x
) == LEU
|| GET_CODE (x
) == GEU
)
7339 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7341 fprintf (file
, "cror %d,%d,%d\n\t", base_bit
+ 3,
7343 base_bit
+ (GET_CODE (x
) == GE
|| GET_CODE (x
) == GEU
));
7346 else if (GET_CODE (x
) == NE
)
7348 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7350 fprintf (file
, "crnor %d,%d,%d\n\t", base_bit
+ 3,
7351 base_bit
+ 2, base_bit
+ 2);
7353 else if (TARGET_SPE
&& TARGET_HARD_FLOAT
7354 && GET_CODE (x
) == EQ
7355 && GET_MODE (XEXP (x
, 0)) == CCFPmode
)
7357 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7359 fprintf (file
, "crnor %d,%d,%d\n\t", base_bit
+ 1,
7360 base_bit
+ 1, base_bit
+ 1);
7365 /* X is a CR register. Print the number of the EQ bit of the CR */
7366 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7367 output_operand_lossage ("invalid %%E value");
7369 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
) + 2);
7373 /* X is a CR register. Print the shift count needed to move it
7374 to the high-order four bits. */
7375 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7376 output_operand_lossage ("invalid %%f value");
7378 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
));
7382 /* Similar, but print the count for the rotate in the opposite
7384 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7385 output_operand_lossage ("invalid %%F value");
7387 fprintf (file
, "%d", 32 - 4 * (REGNO (x
) - CR0_REGNO
));
7391 /* X is a constant integer. If it is negative, print "m",
7392 otherwise print "z". This is to make an aze or ame insn. */
7393 if (GET_CODE (x
) != CONST_INT
)
7394 output_operand_lossage ("invalid %%G value");
7395 else if (INTVAL (x
) >= 0)
7402 /* If constant, output low-order five bits. Otherwise, write
7405 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 31);
7407 print_operand (file
, x
, 0);
7411 /* If constant, output low-order six bits. Otherwise, write
7414 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 63);
7416 print_operand (file
, x
, 0);
7420 /* Print `i' if this is a constant, else nothing. */
7426 /* Write the bit number in CCR for jump. */
7429 output_operand_lossage ("invalid %%j code");
7431 fprintf (file
, "%d", i
);
7435 /* Similar, but add one for shift count in rlinm for scc and pass
7436 scc flag to `ccr_bit'. */
7439 output_operand_lossage ("invalid %%J code");
7441 /* If we want bit 31, write a shift count of zero, not 32. */
7442 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
7446 /* X must be a constant. Write the 1's complement of the
7449 output_operand_lossage ("invalid %%k value");
7451 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ~ INT_LOWPART (x
));
7455 /* X must be a symbolic constant on ELF. Write an
7456 expression suitable for an 'addi' that adds in the low 16
7458 if (GET_CODE (x
) != CONST
)
7460 print_operand_address (file
, x
);
7465 if (GET_CODE (XEXP (x
, 0)) != PLUS
7466 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) != SYMBOL_REF
7467 && GET_CODE (XEXP (XEXP (x
, 0), 0)) != LABEL_REF
)
7468 || GET_CODE (XEXP (XEXP (x
, 0), 1)) != CONST_INT
)
7469 output_operand_lossage ("invalid %%K value");
7470 print_operand_address (file
, XEXP (XEXP (x
, 0), 0));
7472 /* For GNU as, there must be a non-alphanumeric character
7473 between 'l' and the number. The '-' is added by
7474 print_operand() already. */
7475 if (INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0)
7477 print_operand (file
, XEXP (XEXP (x
, 0), 1), 0);
7481 /* %l is output_asm_label. */
7484 /* Write second word of DImode or DFmode reference. Works on register
7485 or non-indexed memory only. */
7486 if (GET_CODE (x
) == REG
)
7487 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
7488 else if (GET_CODE (x
) == MEM
)
7490 /* Handle possible auto-increment. Since it is pre-increment and
7491 we have already done it, we can just use an offset of word. */
7492 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
7493 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
7494 output_address (plus_constant (XEXP (XEXP (x
, 0), 0),
7497 output_address (XEXP (adjust_address_nv (x
, SImode
,
7501 if (small_data_operand (x
, GET_MODE (x
)))
7502 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
7503 reg_names
[SMALL_DATA_REG
]);
7508 /* MB value for a mask operand. */
7509 if (! mask_operand (x
, SImode
))
7510 output_operand_lossage ("invalid %%m value");
7512 fprintf (file
, "%d", extract_MB (x
));
7516 /* ME value for a mask operand. */
7517 if (! mask_operand (x
, SImode
))
7518 output_operand_lossage ("invalid %%M value");
7520 fprintf (file
, "%d", extract_ME (x
));
7523 /* %n outputs the negative of its operand. */
7526 /* Write the number of elements in the vector times 4. */
7527 if (GET_CODE (x
) != PARALLEL
)
7528 output_operand_lossage ("invalid %%N value");
7530 fprintf (file
, "%d", XVECLEN (x
, 0) * 4);
7534 /* Similar, but subtract 1 first. */
7535 if (GET_CODE (x
) != PARALLEL
)
7536 output_operand_lossage ("invalid %%O value");
7538 fprintf (file
, "%d", (XVECLEN (x
, 0) - 1) * 4);
7542 /* X is a CONST_INT that is a power of two. Output the logarithm. */
7544 || INT_LOWPART (x
) < 0
7545 || (i
= exact_log2 (INT_LOWPART (x
))) < 0)
7546 output_operand_lossage ("invalid %%p value");
7548 fprintf (file
, "%d", i
);
7552 /* The operand must be an indirect memory reference. The result
7553 is the register number. */
7554 if (GET_CODE (x
) != MEM
|| GET_CODE (XEXP (x
, 0)) != REG
7555 || REGNO (XEXP (x
, 0)) >= 32)
7556 output_operand_lossage ("invalid %%P value");
7558 fprintf (file
, "%d", REGNO (XEXP (x
, 0)));
7562 /* This outputs the logical code corresponding to a boolean
7563 expression. The expression may have one or both operands
7564 negated (if one, only the first one). For condition register
7565 logical operations, it will also treat the negated
7566 CR codes as NOTs, but not handle NOTs of them. */
7568 const char *const *t
= 0;
7570 enum rtx_code code
= GET_CODE (x
);
7571 static const char * const tbl
[3][3] = {
7572 { "and", "andc", "nor" },
7573 { "or", "orc", "nand" },
7574 { "xor", "eqv", "xor" } };
7578 else if (code
== IOR
)
7580 else if (code
== XOR
)
7583 output_operand_lossage ("invalid %%q value");
7585 if (GET_CODE (XEXP (x
, 0)) != NOT
)
7589 if (GET_CODE (XEXP (x
, 1)) == NOT
)
7600 /* X is a CR register. Print the mask for `mtcrf'. */
7601 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7602 output_operand_lossage ("invalid %%R value");
7604 fprintf (file
, "%d", 128 >> (REGNO (x
) - CR0_REGNO
));
7608 /* Low 5 bits of 32 - value */
7610 output_operand_lossage ("invalid %%s value");
7612 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, (32 - INT_LOWPART (x
)) & 31);
7616 /* PowerPC64 mask position. All 0's is excluded.
7617 CONST_INT 32-bit mask is considered sign-extended so any
7618 transition must occur within the CONST_INT, not on the boundary. */
7619 if (! mask64_operand (x
, DImode
))
7620 output_operand_lossage ("invalid %%S value");
7622 uval
= INT_LOWPART (x
);
7624 if (uval
& 1) /* Clear Left */
7626 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 63 << 1) - 1;
7629 else /* Clear Right */
7632 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 63 << 1) - 1;
7639 fprintf (file
, "%d", i
);
7643 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
7644 if (GET_CODE (x
) != REG
|| GET_MODE (x
) != CCmode
)
7647 /* Bit 3 is OV bit. */
7648 i
= 4 * (REGNO (x
) - CR0_REGNO
) + 3;
7650 /* If we want bit 31, write a shift count of zero, not 32. */
7651 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
7655 /* Print the symbolic name of a branch target register. */
7656 if (GET_CODE (x
) != REG
|| (REGNO (x
) != LINK_REGISTER_REGNUM
7657 && REGNO (x
) != COUNT_REGISTER_REGNUM
))
7658 output_operand_lossage ("invalid %%T value");
7659 else if (REGNO (x
) == LINK_REGISTER_REGNUM
)
7660 fputs (TARGET_NEW_MNEMONICS
? "lr" : "r", file
);
7662 fputs ("ctr", file
);
7666 /* High-order 16 bits of constant for use in unsigned operand. */
7668 output_operand_lossage ("invalid %%u value");
7670 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
7671 (INT_LOWPART (x
) >> 16) & 0xffff);
7675 /* High-order 16 bits of constant for use in signed operand. */
7677 output_operand_lossage ("invalid %%v value");
7679 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
7680 (INT_LOWPART (x
) >> 16) & 0xffff);
7684 /* Print `u' if this has an auto-increment or auto-decrement. */
7685 if (GET_CODE (x
) == MEM
7686 && (GET_CODE (XEXP (x
, 0)) == PRE_INC
7687 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
))
7692 /* Print the trap code for this operand. */
7693 switch (GET_CODE (x
))
7696 fputs ("eq", file
); /* 4 */
7699 fputs ("ne", file
); /* 24 */
7702 fputs ("lt", file
); /* 16 */
7705 fputs ("le", file
); /* 20 */
7708 fputs ("gt", file
); /* 8 */
7711 fputs ("ge", file
); /* 12 */
7714 fputs ("llt", file
); /* 2 */
7717 fputs ("lle", file
); /* 6 */
7720 fputs ("lgt", file
); /* 1 */
7723 fputs ("lge", file
); /* 5 */
7731 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
7734 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
7735 ((INT_LOWPART (x
) & 0xffff) ^ 0x8000) - 0x8000);
7737 print_operand (file
, x
, 0);
7741 /* MB value for a PowerPC64 rldic operand. */
7742 val
= (GET_CODE (x
) == CONST_INT
7743 ? INTVAL (x
) : CONST_DOUBLE_HIGH (x
));
7748 for (i
= 0; i
< HOST_BITS_PER_WIDE_INT
; i
++)
7749 if ((val
<<= 1) < 0)
7752 #if HOST_BITS_PER_WIDE_INT == 32
7753 if (GET_CODE (x
) == CONST_INT
&& i
>= 0)
7754 i
+= 32; /* zero-extend high-part was all 0's */
7755 else if (GET_CODE (x
) == CONST_DOUBLE
&& i
== 32)
7757 val
= CONST_DOUBLE_LOW (x
);
7764 for ( ; i
< 64; i
++)
7765 if ((val
<<= 1) < 0)
7770 fprintf (file
, "%d", i
+ 1);
7774 if (GET_CODE (x
) == MEM
7775 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x
, 0), 0))
7780 /* Like 'L', for third word of TImode */
7781 if (GET_CODE (x
) == REG
)
7782 fprintf (file
, "%s", reg_names
[REGNO (x
) + 2]);
7783 else if (GET_CODE (x
) == MEM
)
7785 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
7786 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
7787 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 8));
7789 output_address (XEXP (adjust_address_nv (x
, SImode
, 8), 0));
7790 if (small_data_operand (x
, GET_MODE (x
)))
7791 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
7792 reg_names
[SMALL_DATA_REG
]);
7797 /* X is a SYMBOL_REF. Write out the name preceded by a
7798 period and without any trailing data in brackets. Used for function
7799 names. If we are configured for System V (or the embedded ABI) on
7800 the PowerPC, do not emit the period, since those systems do not use
7801 TOCs and the like. */
7802 if (GET_CODE (x
) != SYMBOL_REF
)
7805 if (XSTR (x
, 0)[0] != '.')
7807 switch (DEFAULT_ABI
)
7817 case ABI_AIX_NODESC
:
7823 RS6000_OUTPUT_BASENAME (file
, XSTR (x
, 0));
7825 assemble_name (file
, XSTR (x
, 0));
7830 /* Like 'L', for last word of TImode. */
7831 if (GET_CODE (x
) == REG
)
7832 fprintf (file
, "%s", reg_names
[REGNO (x
) + 3]);
7833 else if (GET_CODE (x
) == MEM
)
7835 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
7836 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
7837 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 12));
7839 output_address (XEXP (adjust_address_nv (x
, SImode
, 12), 0));
7840 if (small_data_operand (x
, GET_MODE (x
)))
7841 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
7842 reg_names
[SMALL_DATA_REG
]);
7846 /* Print AltiVec or SPE memory operand. */
7851 if (GET_CODE (x
) != MEM
)
7859 if (GET_CODE (tmp
) == REG
)
7861 fprintf (file
, "0(%s)", reg_names
[REGNO (tmp
)]);
7864 /* Handle [reg+UIMM]. */
7865 else if (GET_CODE (tmp
) == PLUS
&&
7866 GET_CODE (XEXP (tmp
, 1)) == CONST_INT
)
7870 if (GET_CODE (XEXP (tmp
, 0)) != REG
)
7873 x
= INTVAL (XEXP (tmp
, 1));
7874 fprintf (file
, "%d(%s)", x
, reg_names
[REGNO (XEXP (tmp
, 0))]);
7878 /* Fall through. Must be [reg+reg]. */
7880 if (GET_CODE (tmp
) == REG
)
7881 fprintf (file
, "0,%s", reg_names
[REGNO (tmp
)]);
7882 else if (GET_CODE (tmp
) == PLUS
&& GET_CODE (XEXP (tmp
, 1)) == REG
)
7884 if (REGNO (XEXP (tmp
, 0)) == 0)
7885 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 1)) ],
7886 reg_names
[ REGNO (XEXP (tmp
, 0)) ]);
7888 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 0)) ],
7889 reg_names
[ REGNO (XEXP (tmp
, 1)) ]);
7897 if (GET_CODE (x
) == REG
)
7898 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
7899 else if (GET_CODE (x
) == MEM
)
7901 /* We need to handle PRE_INC and PRE_DEC here, since we need to
7902 know the width from the mode. */
7903 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
)
7904 fprintf (file
, "%d(%s)", GET_MODE_SIZE (GET_MODE (x
)),
7905 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
7906 else if (GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
7907 fprintf (file
, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x
)),
7908 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
7910 output_address (XEXP (x
, 0));
7913 output_addr_const (file
, x
);
7917 output_operand_lossage ("invalid %%xn code");
7921 /* Print the address of an operand. */
7924 print_operand_address (file
, x
)
7928 if (GET_CODE (x
) == REG
)
7929 fprintf (file
, "0(%s)", reg_names
[ REGNO (x
) ]);
7930 else if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
7931 || GET_CODE (x
) == LABEL_REF
)
7933 output_addr_const (file
, x
);
7934 if (small_data_operand (x
, GET_MODE (x
)))
7935 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
7936 reg_names
[SMALL_DATA_REG
]);
7937 else if (TARGET_TOC
)
7940 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == REG
)
7942 if (REGNO (XEXP (x
, 0)) == 0)
7943 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 1)) ],
7944 reg_names
[ REGNO (XEXP (x
, 0)) ]);
7946 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 0)) ],
7947 reg_names
[ REGNO (XEXP (x
, 1)) ]);
7949 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
7951 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (XEXP (x
, 1)));
7952 fprintf (file
, "(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
7955 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
7956 && CONSTANT_P (XEXP (x
, 1)))
7958 output_addr_const (file
, XEXP (x
, 1));
7959 fprintf (file
, "@l(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
7963 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
7964 && CONSTANT_P (XEXP (x
, 1)))
7966 fprintf (file
, "lo16(");
7967 output_addr_const (file
, XEXP (x
, 1));
7968 fprintf (file
, ")(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
7971 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x
))
7973 if (TARGET_AIX
&& (!TARGET_ELF
|| !TARGET_MINIMAL_TOC
))
7975 rtx contains_minus
= XEXP (x
, 1);
7979 /* Find the (minus (sym) (toc)) buried in X, and temporarily
7980 turn it into (sym) for output_addr_const. */
7981 while (GET_CODE (XEXP (contains_minus
, 0)) != MINUS
)
7982 contains_minus
= XEXP (contains_minus
, 0);
7984 minus
= XEXP (contains_minus
, 0);
7985 symref
= XEXP (minus
, 0);
7986 XEXP (contains_minus
, 0) = symref
;
7991 name
= XSTR (symref
, 0);
7992 newname
= alloca (strlen (name
) + sizeof ("@toc"));
7993 strcpy (newname
, name
);
7994 strcat (newname
, "@toc");
7995 XSTR (symref
, 0) = newname
;
7997 output_addr_const (file
, XEXP (x
, 1));
7999 XSTR (symref
, 0) = name
;
8000 XEXP (contains_minus
, 0) = minus
;
8003 output_addr_const (file
, XEXP (x
, 1));
8005 fprintf (file
, "(%s)", reg_names
[REGNO (XEXP (x
, 0))]);
8011 /* Target hook for assembling integer objects. The PowerPC version has
8012 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8013 is defined. It also needs to handle DI-mode objects on 64-bit
8017 rs6000_assemble_integer (x
, size
, aligned_p
)
8022 #ifdef RELOCATABLE_NEEDS_FIXUP
8023 /* Special handling for SI values. */
8024 if (size
== 4 && aligned_p
)
8026 extern int in_toc_section
PARAMS ((void));
8027 static int recurse
= 0;
8029 /* For -mrelocatable, we mark all addresses that need to be fixed up
8030 in the .fixup section. */
8031 if (TARGET_RELOCATABLE
8032 && !in_toc_section ()
8033 && !in_text_section ()
8035 && GET_CODE (x
) != CONST_INT
8036 && GET_CODE (x
) != CONST_DOUBLE
8042 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCP", fixuplabelno
);
8044 ASM_OUTPUT_LABEL (asm_out_file
, buf
);
8045 fprintf (asm_out_file
, "\t.long\t(");
8046 output_addr_const (asm_out_file
, x
);
8047 fprintf (asm_out_file
, ")@fixup\n");
8048 fprintf (asm_out_file
, "\t.section\t\".fixup\",\"aw\"\n");
8049 ASM_OUTPUT_ALIGN (asm_out_file
, 2);
8050 fprintf (asm_out_file
, "\t.long\t");
8051 assemble_name (asm_out_file
, buf
);
8052 fprintf (asm_out_file
, "\n\t.previous\n");
8056 /* Remove initial .'s to turn a -mcall-aixdesc function
8057 address into the address of the descriptor, not the function
8059 else if (GET_CODE (x
) == SYMBOL_REF
8060 && XSTR (x
, 0)[0] == '.'
8061 && DEFAULT_ABI
== ABI_AIX
)
8063 const char *name
= XSTR (x
, 0);
8064 while (*name
== '.')
8067 fprintf (asm_out_file
, "\t.long\t%s\n", name
);
8071 #endif /* RELOCATABLE_NEEDS_FIXUP */
8072 return default_assemble_integer (x
, size
, aligned_p
);
8075 #ifdef HAVE_GAS_HIDDEN
8076 /* Emit an assembler directive to set symbol visibility for DECL to
8080 rs6000_assemble_visibility (decl
, visibility_type
)
8082 const char *visibility_type
;
8084 default_assemble_visibility (decl
, visibility_type
);
8086 /* Functions need to have their entry point symbol visibility set as
8087 well as their descriptor symbol visibility. */
8088 if (DEFAULT_ABI
== ABI_AIX
&& TREE_CODE (decl
) == FUNCTION_DECL
)
8092 name
= ((* targetm
.strip_name_encoding
)
8093 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
))));
8095 fprintf (asm_out_file
, "\t.%s\t.%s\n", visibility_type
, name
);
8101 rs6000_reverse_condition (mode
, code
)
8102 enum machine_mode mode
;
8105 /* Reversal of FP compares takes care -- an ordered compare
8106 becomes an unordered compare and vice versa. */
8107 if (mode
== CCFPmode
)
8108 return reverse_condition_maybe_unordered (code
);
8110 return reverse_condition (code
);
8113 /* Generate a compare for CODE. Return a brand-new rtx that
8114 represents the result of the compare. */
8117 rs6000_generate_compare (code
)
8120 enum machine_mode comp_mode
;
8123 if (rs6000_compare_fp_p
)
8124 comp_mode
= CCFPmode
;
8125 else if (code
== GTU
|| code
== LTU
8126 || code
== GEU
|| code
== LEU
)
8127 comp_mode
= CCUNSmode
;
8131 /* First, the compare. */
8132 compare_result
= gen_reg_rtx (comp_mode
);
8134 /* SPE FP compare instructions on the GPRs. Yuck! */
8135 if ((TARGET_SPE
&& TARGET_HARD_FLOAT
) && rs6000_compare_fp_p
)
8137 rtx cmp
, or1
, or2
, or_result
, compare_result2
;
8145 cmp
= flag_unsafe_math_optimizations
8146 ? gen_tstsfeq_gpr (compare_result
, rs6000_compare_op0
,
8148 : gen_cmpsfeq_gpr (compare_result
, rs6000_compare_op0
,
8149 rs6000_compare_op1
);
8157 cmp
= flag_unsafe_math_optimizations
8158 ? gen_tstsfgt_gpr (compare_result
, rs6000_compare_op0
,
8160 : gen_cmpsfgt_gpr (compare_result
, rs6000_compare_op0
,
8161 rs6000_compare_op1
);
8169 cmp
= flag_unsafe_math_optimizations
8170 ? gen_tstsflt_gpr (compare_result
, rs6000_compare_op0
,
8172 : gen_cmpsflt_gpr (compare_result
, rs6000_compare_op0
,
8173 rs6000_compare_op1
);
8179 /* Synthesize LE and GE from LT/GT || EQ. */
8180 if (code
== LE
|| code
== GE
|| code
== LEU
|| code
== GEU
)
8182 /* Synthesize GE/LE frome GT/LT || EQ. */
8188 case LE
: code
= LT
; break;
8189 case GE
: code
= GT
; break;
8190 case LEU
: code
= LT
; break;
8191 case GEU
: code
= GT
; break;
8195 or1
= gen_reg_rtx (SImode
);
8196 or2
= gen_reg_rtx (SImode
);
8197 or_result
= gen_reg_rtx (CCEQmode
);
8198 compare_result2
= gen_reg_rtx (CCFPmode
);
8201 cmp
= flag_unsafe_math_optimizations
8202 ? gen_tstsfeq_gpr (compare_result2
, rs6000_compare_op0
,
8204 : gen_cmpsfeq_gpr (compare_result2
, rs6000_compare_op0
,
8205 rs6000_compare_op1
);
8208 /* The MC8540 FP compare instructions set the CR bits
8209 differently than other PPC compare instructions. For
8210 that matter, there is no generic test instruction, but a
8211 testgt, testlt, and testeq. For a true condition, bit 2
8212 is set (x1xx) in the CR. Following the traditional CR
8218 ... bit 2 would be a GT CR alias, so later on we
8219 look in the GT bits for the branch instructins.
8220 However, we must be careful to emit correct RTL in
8221 the meantime, so optimizations don't get confused. */
8223 or1
= gen_rtx (NE
, SImode
, compare_result
, const0_rtx
);
8224 or2
= gen_rtx (NE
, SImode
, compare_result2
, const0_rtx
);
8226 /* OR them together. */
8227 cmp
= gen_rtx_SET (VOIDmode
, or_result
,
8228 gen_rtx_COMPARE (CCEQmode
,
8229 gen_rtx_IOR (SImode
, or1
, or2
),
8231 compare_result
= or_result
;
8236 /* We only care about 1 bit (x1xx), so map everything to NE to
8237 maintain rtl sanity. We'll get to the right bit (x1xx) at
8238 code output time. */
8239 if (code
== NE
|| code
== LTGT
)
8240 /* Do the inverse here because we have no cmpne
8241 instruction. We use the cmpeq instruction and expect
8242 to get a 0 instead. */
8251 emit_insn (gen_rtx_SET (VOIDmode
, compare_result
,
8252 gen_rtx_COMPARE (comp_mode
,
8254 rs6000_compare_op1
)));
8256 /* Some kinds of FP comparisons need an OR operation;
8257 except for flag_unsafe_math_optimizations we don't bother. */
8258 if (rs6000_compare_fp_p
8259 && ! flag_unsafe_math_optimizations
8260 && ! (TARGET_HARD_FLOAT
&& TARGET_SPE
)
8261 && (code
== LE
|| code
== GE
8262 || code
== UNEQ
|| code
== LTGT
8263 || code
== UNGT
|| code
== UNLT
))
8265 enum rtx_code or1
, or2
;
8266 rtx or1_rtx
, or2_rtx
, compare2_rtx
;
8267 rtx or_result
= gen_reg_rtx (CCEQmode
);
8271 case LE
: or1
= LT
; or2
= EQ
; break;
8272 case GE
: or1
= GT
; or2
= EQ
; break;
8273 case UNEQ
: or1
= UNORDERED
; or2
= EQ
; break;
8274 case LTGT
: or1
= LT
; or2
= GT
; break;
8275 case UNGT
: or1
= UNORDERED
; or2
= GT
; break;
8276 case UNLT
: or1
= UNORDERED
; or2
= LT
; break;
8279 validate_condition_mode (or1
, comp_mode
);
8280 validate_condition_mode (or2
, comp_mode
);
8281 or1_rtx
= gen_rtx (or1
, SImode
, compare_result
, const0_rtx
);
8282 or2_rtx
= gen_rtx (or2
, SImode
, compare_result
, const0_rtx
);
8283 compare2_rtx
= gen_rtx_COMPARE (CCEQmode
,
8284 gen_rtx_IOR (SImode
, or1_rtx
, or2_rtx
),
8286 emit_insn (gen_rtx_SET (VOIDmode
, or_result
, compare2_rtx
));
8288 compare_result
= or_result
;
8292 validate_condition_mode (code
, GET_MODE (compare_result
));
8294 return gen_rtx (code
, VOIDmode
, compare_result
, const0_rtx
);
8298 /* Emit the RTL for an sCOND pattern. */
8301 rs6000_emit_sCOND (code
, result
)
8306 enum machine_mode op_mode
;
8308 condition_rtx
= rs6000_generate_compare (code
);
8310 op_mode
= GET_MODE (rs6000_compare_op0
);
8311 if (op_mode
== VOIDmode
)
8312 op_mode
= GET_MODE (rs6000_compare_op1
);
8314 if (TARGET_POWERPC64
&& (op_mode
== DImode
|| rs6000_compare_fp_p
))
8316 PUT_MODE (condition_rtx
, DImode
);
8317 convert_move (result
, condition_rtx
, 0);
8321 PUT_MODE (condition_rtx
, SImode
);
8322 emit_insn (gen_rtx_SET (VOIDmode
, result
, condition_rtx
));
8326 /* Emit a branch of kind CODE to location LOC. */
8329 rs6000_emit_cbranch (code
, loc
)
8333 rtx condition_rtx
, loc_ref
;
8335 condition_rtx
= rs6000_generate_compare (code
);
8336 loc_ref
= gen_rtx_LABEL_REF (VOIDmode
, loc
);
8337 emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
,
8338 gen_rtx_IF_THEN_ELSE (VOIDmode
, condition_rtx
,
8342 /* Return the string to output a conditional branch to LABEL, which is
8343 the operand number of the label, or -1 if the branch is really a
8346 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
8347 condition code register and its mode specifies what kind of
8350 REVERSED is nonzero if we should reverse the sense of the comparison.
8352 INSN is the insn. */
8355 output_cbranch (op
, label
, reversed
, insn
)
8361 static char string
[64];
8362 enum rtx_code code
= GET_CODE (op
);
8363 rtx cc_reg
= XEXP (op
, 0);
8364 enum machine_mode mode
= GET_MODE (cc_reg
);
8365 int cc_regno
= REGNO (cc_reg
) - CR0_REGNO
;
8366 int need_longbranch
= label
!= NULL
&& get_attr_length (insn
) == 8;
8367 int really_reversed
= reversed
^ need_longbranch
;
8373 validate_condition_mode (code
, mode
);
8375 /* Work out which way this really branches. We could use
8376 reverse_condition_maybe_unordered here always but this
8377 makes the resulting assembler clearer. */
8378 if (really_reversed
)
8379 code
= rs6000_reverse_condition (mode
, code
);
8381 if ((TARGET_SPE
&& TARGET_HARD_FLOAT
) && mode
== CCFPmode
)
8383 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8386 /* Opposite of GT. */
8388 else if (code
== NE
)
8396 /* Not all of these are actually distinct opcodes, but
8397 we distinguish them for clarity of the resulting assembler. */
8399 ccode
= "ne"; break;
8401 ccode
= "eq"; break;
8403 ccode
= "ge"; break;
8404 case GT
: case GTU
: case UNGT
:
8405 ccode
= "gt"; break;
8407 ccode
= "le"; break;
8408 case LT
: case LTU
: case UNLT
:
8409 ccode
= "lt"; break;
8410 case UNORDERED
: ccode
= "un"; break;
8411 case ORDERED
: ccode
= "nu"; break;
8412 case UNGE
: ccode
= "nl"; break;
8413 case UNLE
: ccode
= "ng"; break;
8418 /* Maybe we have a guess as to how likely the branch is.
8419 The old mnemonics don't have a way to specify this information. */
8421 note
= find_reg_note (insn
, REG_BR_PROB
, NULL_RTX
);
8422 if (note
!= NULL_RTX
)
8424 /* PROB is the difference from 50%. */
8425 int prob
= INTVAL (XEXP (note
, 0)) - REG_BR_PROB_BASE
/ 2;
8426 bool always_hint
= rs6000_cpu
!= PROCESSOR_POWER4
;
8428 /* Only hint for highly probable/improbable branches on newer
8429 cpus as static prediction overrides processor dynamic
8430 prediction. For older cpus we may as well always hint, but
8431 assume not taken for branches that are very close to 50% as a
8432 mispredicted taken branch is more expensive than a
8433 mispredicted not-taken branch. */
8435 || abs (prob
) > REG_BR_PROB_BASE
/ 100 * 48)
8437 if (abs (prob
) > REG_BR_PROB_BASE
/ 20
8438 && ((prob
> 0) ^ need_longbranch
))
8446 s
+= sprintf (s
, "{b%sr|b%slr%s} ", ccode
, ccode
, pred
);
8448 s
+= sprintf (s
, "{b%s|b%s%s} ", ccode
, ccode
, pred
);
8450 /* We need to escape any '%' characters in the reg_names string.
8451 Assume they'd only be the first character... */
8452 if (reg_names
[cc_regno
+ CR0_REGNO
][0] == '%')
8454 s
+= sprintf (s
, "%s", reg_names
[cc_regno
+ CR0_REGNO
]);
8458 /* If the branch distance was too far, we may have to use an
8459 unconditional branch to go the distance. */
8460 if (need_longbranch
)
8461 s
+= sprintf (s
, ",$+8\n\tb %s", label
);
8463 s
+= sprintf (s
, ",%s", label
);
8469 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
8470 operands of the last comparison is nonzero/true, FALSE_COND if it
8471 is zero/false. Return 0 if the hardware has no such operation. */
8474 rs6000_emit_cmove (dest
, op
, true_cond
, false_cond
)
8480 enum rtx_code code
= GET_CODE (op
);
8481 rtx op0
= rs6000_compare_op0
;
8482 rtx op1
= rs6000_compare_op1
;
8484 enum machine_mode compare_mode
= GET_MODE (op0
);
8485 enum machine_mode result_mode
= GET_MODE (dest
);
8488 /* These modes should always match. */
8489 if (GET_MODE (op1
) != compare_mode
8490 /* In the isel case however, we can use a compare immediate, so
8491 op1 may be a small constant. */
8492 && (!TARGET_ISEL
|| !short_cint_operand (op1
, VOIDmode
)))
8494 if (GET_MODE (true_cond
) != result_mode
)
8496 if (GET_MODE (false_cond
) != result_mode
)
8499 /* First, work out if the hardware can do this at all, or
8500 if it's too slow... */
8501 if (! rs6000_compare_fp_p
)
8504 return rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
);
8508 /* Eliminate half of the comparisons by switching operands, this
8509 makes the remaining code simpler. */
8510 if (code
== UNLT
|| code
== UNGT
|| code
== UNORDERED
|| code
== NE
8511 || code
== LTGT
|| code
== LT
)
8513 code
= reverse_condition_maybe_unordered (code
);
8515 true_cond
= false_cond
;
8519 /* UNEQ and LTGT take four instructions for a comparison with zero,
8520 it'll probably be faster to use a branch here too. */
8524 if (GET_CODE (op1
) == CONST_DOUBLE
)
8525 REAL_VALUE_FROM_CONST_DOUBLE (c1
, op1
);
8527 /* We're going to try to implement comparions by performing
8528 a subtract, then comparing against zero. Unfortunately,
8529 Inf - Inf is NaN which is not zero, and so if we don't
8530 know that the operand is finite and the comparison
8531 would treat EQ different to UNORDERED, we can't do it. */
8532 if (! flag_unsafe_math_optimizations
8533 && code
!= GT
&& code
!= UNGE
8534 && (GET_CODE (op1
) != CONST_DOUBLE
|| real_isinf (&c1
))
8535 /* Constructs of the form (a OP b ? a : b) are safe. */
8536 && ((! rtx_equal_p (op0
, false_cond
) && ! rtx_equal_p (op1
, false_cond
))
8537 || (! rtx_equal_p (op0
, true_cond
)
8538 && ! rtx_equal_p (op1
, true_cond
))))
8540 /* At this point we know we can use fsel. */
8542 /* Reduce the comparison to a comparison against zero. */
8543 temp
= gen_reg_rtx (compare_mode
);
8544 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8545 gen_rtx_MINUS (compare_mode
, op0
, op1
)));
8547 op1
= CONST0_RTX (compare_mode
);
8549 /* If we don't care about NaNs we can reduce some of the comparisons
8550 down to faster ones. */
8551 if (flag_unsafe_math_optimizations
)
8557 true_cond
= false_cond
;
8570 /* Now, reduce everything down to a GE. */
8577 temp
= gen_reg_rtx (compare_mode
);
8578 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
8583 temp
= gen_reg_rtx (compare_mode
);
8584 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_ABS (compare_mode
, op0
)));
8589 temp
= gen_reg_rtx (compare_mode
);
8590 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8591 gen_rtx_NEG (compare_mode
,
8592 gen_rtx_ABS (compare_mode
, op0
))));
8597 temp
= gen_reg_rtx (result_mode
);
8598 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8599 gen_rtx_IF_THEN_ELSE (result_mode
,
8600 gen_rtx_GE (VOIDmode
,
8602 true_cond
, false_cond
)));
8604 true_cond
= false_cond
;
8606 temp
= gen_reg_rtx (compare_mode
);
8607 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
8612 temp
= gen_reg_rtx (result_mode
);
8613 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8614 gen_rtx_IF_THEN_ELSE (result_mode
,
8615 gen_rtx_GE (VOIDmode
,
8617 true_cond
, false_cond
)));
8619 false_cond
= true_cond
;
8621 temp
= gen_reg_rtx (compare_mode
);
8622 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
8630 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
8631 gen_rtx_IF_THEN_ELSE (result_mode
,
8632 gen_rtx_GE (VOIDmode
,
8634 true_cond
, false_cond
)));
8638 /* Same as above, but for ints (isel). */
8641 rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
)
8647 rtx condition_rtx
, cr
;
8649 /* All isel implementations thus far are 32-bits. */
8650 if (GET_MODE (rs6000_compare_op0
) != SImode
)
8653 /* We still have to do the compare, because isel doesn't do a
8654 compare, it just looks at the CRx bits set by a previous compare
8656 condition_rtx
= rs6000_generate_compare (GET_CODE (op
));
8657 cr
= XEXP (condition_rtx
, 0);
8659 if (GET_MODE (cr
) == CCmode
)
8660 emit_insn (gen_isel_signed (dest
, condition_rtx
,
8661 true_cond
, false_cond
, cr
));
8663 emit_insn (gen_isel_unsigned (dest
, condition_rtx
,
8664 true_cond
, false_cond
, cr
));
8670 output_isel (operands
)
8675 code
= GET_CODE (operands
[1]);
8676 if (code
== GE
|| code
== GEU
|| code
== LE
|| code
== LEU
|| code
== NE
)
8678 PUT_CODE (operands
[1], reverse_condition (code
));
8679 return "isel %0,%3,%2,%j1";
8682 return "isel %0,%2,%3,%j1";
8686 rs6000_emit_minmax (dest
, code
, op0
, op1
)
8692 enum machine_mode mode
= GET_MODE (op0
);
8696 if (code
== SMAX
|| code
== SMIN
)
8701 if (code
== SMAX
|| code
== UMAX
)
8702 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
8705 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
8707 if (target
== NULL_RTX
)
8710 emit_move_insn (dest
, target
);
8713 /* This page contains routines that are used to determine what the
8714 function prologue and epilogue code will do and write them out. */
8716 /* Return the first fixed-point register that is required to be
8717 saved. 32 if none. */
8720 first_reg_to_save ()
8724 /* Find lowest numbered live register. */
8725 for (first_reg
= 13; first_reg
<= 31; first_reg
++)
8726 if (regs_ever_live
[first_reg
]
8727 && (! call_used_regs
[first_reg
]
8728 || (first_reg
== RS6000_PIC_OFFSET_TABLE_REGNUM
8729 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
8730 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
)))))
8735 && current_function_uses_pic_offset_table
8736 && first_reg
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
8737 return RS6000_PIC_OFFSET_TABLE_REGNUM
;
8743 /* Similar, for FP regs. */
8746 first_fp_reg_to_save ()
8750 /* Find lowest numbered live register. */
8751 for (first_reg
= 14 + 32; first_reg
<= 63; first_reg
++)
8752 if (regs_ever_live
[first_reg
])
8758 /* Similar, for AltiVec regs. */
8761 first_altivec_reg_to_save ()
8765 /* Stack frame remains as is unless we are in AltiVec ABI. */
8766 if (! TARGET_ALTIVEC_ABI
)
8767 return LAST_ALTIVEC_REGNO
+ 1;
8769 /* Find lowest numbered live register. */
8770 for (i
= FIRST_ALTIVEC_REGNO
+ 20; i
<= LAST_ALTIVEC_REGNO
; ++i
)
8771 if (regs_ever_live
[i
])
8777 /* Return a 32-bit mask of the AltiVec registers we need to set in
8778 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
8779 the 32-bit word is 0. */
8782 compute_vrsave_mask ()
8784 unsigned int i
, mask
= 0;
8786 /* First, find out if we use _any_ altivec registers. */
8787 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
8788 if (regs_ever_live
[i
])
8789 mask
|= ALTIVEC_REG_BIT (i
);
8794 /* Next, add all registers that are call-clobbered. We do this
8795 because post-reload register optimizers such as regrename_optimize
8796 may choose to use them. They never change the register class
8797 chosen by reload, so cannot create new uses of altivec registers
8798 if there were none before, so the early exit above is safe. */
8799 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
8800 altivec registers not saved in the mask, which might well make the
8801 adjustments below more effective in eliding the save/restore of
8802 VRSAVE in small functions. */
8803 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
8804 if (call_used_regs
[i
])
8805 mask
|= ALTIVEC_REG_BIT (i
);
8807 /* Next, remove the argument registers from the set. These must
8808 be in the VRSAVE mask set by the caller, so we don't need to add
8809 them in again. More importantly, the mask we compute here is
8810 used to generate CLOBBERs in the set_vrsave insn, and we do not
8811 wish the argument registers to die. */
8812 for (i
= cfun
->args_info
.vregno
; i
>= ALTIVEC_ARG_MIN_REG
; --i
)
8813 mask
&= ~ALTIVEC_REG_BIT (i
);
8815 /* Similarly, remove the return value from the set. */
8818 diddle_return_value (is_altivec_return_reg
, &yes
);
8820 mask
&= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN
);
8827 is_altivec_return_reg (reg
, xyes
)
8831 bool *yes
= (bool *) xyes
;
8832 if (REGNO (reg
) == ALTIVEC_ARG_RETURN
)
8837 /* Calculate the stack information for the current function. This is
8838 complicated by having two separate calling sequences, the AIX calling
8839 sequence and the V.4 calling sequence.
8841 AIX (and Darwin/Mac OS X) stack frames look like:
8843 SP----> +---------------------------------------+
8844 | back chain to caller | 0 0
8845 +---------------------------------------+
8846 | saved CR | 4 8 (8-11)
8847 +---------------------------------------+
8849 +---------------------------------------+
8850 | reserved for compilers | 12 24
8851 +---------------------------------------+
8852 | reserved for binders | 16 32
8853 +---------------------------------------+
8854 | saved TOC pointer | 20 40
8855 +---------------------------------------+
8856 | Parameter save area (P) | 24 48
8857 +---------------------------------------+
8858 | Alloca space (A) | 24+P etc.
8859 +---------------------------------------+
8860 | Local variable space (L) | 24+P+A
8861 +---------------------------------------+
8862 | Float/int conversion temporary (X) | 24+P+A+L
8863 +---------------------------------------+
8864 | Save area for AltiVec registers (W) | 24+P+A+L+X
8865 +---------------------------------------+
8866 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
8867 +---------------------------------------+
8868 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
8869 +---------------------------------------+
8870 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
8871 +---------------------------------------+
8872 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
8873 +---------------------------------------+
8874 old SP->| back chain to caller's caller |
8875 +---------------------------------------+
8877 The required alignment for AIX configurations is two words (i.e., 8
8881 V.4 stack frames look like:
8883 SP----> +---------------------------------------+
8884 | back chain to caller | 0
8885 +---------------------------------------+
8886 | caller's saved LR | 4
8887 +---------------------------------------+
8888 | Parameter save area (P) | 8
8889 +---------------------------------------+
8890 | Alloca space (A) | 8+P
8891 +---------------------------------------+
8892 | Varargs save area (V) | 8+P+A
8893 +---------------------------------------+
8894 | Local variable space (L) | 8+P+A+V
8895 +---------------------------------------+
8896 | Float/int conversion temporary (X) | 8+P+A+V+L
8897 +---------------------------------------+
8898 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
8899 +---------------------------------------+
8900 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
8901 +---------------------------------------+
8902 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
8903 +---------------------------------------+
8904 | SPE: area for 64-bit GP registers |
8905 +---------------------------------------+
8906 | SPE alignment padding |
8907 +---------------------------------------+
8908 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
8909 +---------------------------------------+
8910 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
8911 +---------------------------------------+
8912 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
8913 +---------------------------------------+
8914 old SP->| back chain to caller's caller |
8915 +---------------------------------------+
8917 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
8918 given. (But note below and in sysv4.h that we require only 8 and
8919 may round up the size of our stack frame anyways. The historical
8920 reason is early versions of powerpc-linux which didn't properly
8921 align the stack at program startup. A happy side-effect is that
8922 -mno-eabi libraries can be used with -meabi programs.)
8924 The EABI configuration defaults to the V.4 layout, unless
8925 -mcall-aix is used, in which case the AIX layout is used. However,
8926 the stack alignment requirements may differ. If -mno-eabi is not
8927 given, the required stack alignment is 8 bytes; if -mno-eabi is
8928 given, the required alignment is 16 bytes. (But see V.4 comment
8931 #ifndef ABI_STACK_BOUNDARY
8932 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
8936 rs6000_stack_info ()
8938 static rs6000_stack_t info
, zero_info
;
8939 rs6000_stack_t
*info_ptr
= &info
;
8940 int reg_size
= TARGET_POWERPC64
? 8 : 4;
8941 enum rs6000_abi abi
;
8945 /* Zero all fields portably. */
8948 /* Select which calling sequence. */
8949 info_ptr
->abi
= abi
= DEFAULT_ABI
;
8951 /* Calculate which registers need to be saved & save area size. */
8952 info_ptr
->first_gp_reg_save
= first_reg_to_save ();
8953 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
8954 even if it currently looks like we won't. */
8955 if (((TARGET_TOC
&& TARGET_MINIMAL_TOC
)
8956 || (flag_pic
== 1 && abi
== ABI_V4
)
8957 || (flag_pic
&& abi
== ABI_DARWIN
))
8958 && info_ptr
->first_gp_reg_save
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
8959 info_ptr
->gp_size
= reg_size
* (32 - RS6000_PIC_OFFSET_TABLE_REGNUM
);
8961 info_ptr
->gp_size
= reg_size
* (32 - info_ptr
->first_gp_reg_save
);
8963 /* For the SPE, we have an additional upper 32-bits on each GPR.
8964 Ideally we should save the entire 64-bits only when the upper
8965 half is used in SIMD instructions. Since we only record
8966 registers live (not the size they are used in), this proves
8967 difficult because we'd have to traverse the instruction chain at
8968 the right time, taking reload into account. This is a real pain,
8969 so we opt to save the GPRs in 64-bits always. Anyone overly
8970 concerned with frame size can fix this. ;-).
8972 So... since we save all GPRs (except the SP) in 64-bits, the
8973 traditional GP save area will be empty. */
8975 info_ptr
->gp_size
= 0;
8977 info_ptr
->first_fp_reg_save
= first_fp_reg_to_save ();
8978 info_ptr
->fp_size
= 8 * (64 - info_ptr
->first_fp_reg_save
);
8980 info_ptr
->first_altivec_reg_save
= first_altivec_reg_to_save ();
8981 info_ptr
->altivec_size
= 16 * (LAST_ALTIVEC_REGNO
+ 1
8982 - info_ptr
->first_altivec_reg_save
);
8984 /* Does this function call anything? */
8985 info_ptr
->calls_p
= (! current_function_is_leaf
8986 || cfun
->machine
->ra_needs_full_frame
);
8988 /* Determine if we need to save the link register. */
8989 if (rs6000_ra_ever_killed ()
8990 || (DEFAULT_ABI
== ABI_AIX
&& current_function_profile
)
8991 #ifdef TARGET_RELOCATABLE
8992 || (TARGET_RELOCATABLE
&& (get_pool_size () != 0))
8994 || (info_ptr
->first_fp_reg_save
!= 64
8995 && !FP_SAVE_INLINE (info_ptr
->first_fp_reg_save
))
8996 || info_ptr
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
8997 || (abi
== ABI_V4
&& current_function_calls_alloca
)
8998 || (DEFAULT_ABI
== ABI_DARWIN
9000 && current_function_uses_pic_offset_table
)
9001 || info_ptr
->calls_p
)
9003 info_ptr
->lr_save_p
= 1;
9004 regs_ever_live
[LINK_REGISTER_REGNUM
] = 1;
9007 /* Determine if we need to save the condition code registers. */
9008 if (regs_ever_live
[CR2_REGNO
]
9009 || regs_ever_live
[CR3_REGNO
]
9010 || regs_ever_live
[CR4_REGNO
])
9012 info_ptr
->cr_save_p
= 1;
9014 info_ptr
->cr_size
= reg_size
;
9017 /* If the current function calls __builtin_eh_return, then we need
9018 to allocate stack space for registers that will hold data for
9019 the exception handler. */
9020 if (current_function_calls_eh_return
)
9023 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; ++i
)
9026 /* SPE saves EH registers in 64-bits. */
9027 ehrd_size
= i
* (TARGET_SPE_ABI
? UNITS_PER_SPE_WORD
: UNITS_PER_WORD
);
9032 /* Determine various sizes. */
9033 info_ptr
->reg_size
= reg_size
;
9034 info_ptr
->fixed_size
= RS6000_SAVE_AREA
;
9035 info_ptr
->varargs_size
= RS6000_VARARGS_AREA
;
9036 info_ptr
->vars_size
= RS6000_ALIGN (get_frame_size (), 8);
9037 info_ptr
->parm_size
= RS6000_ALIGN (current_function_outgoing_args_size
,
9041 info_ptr
->spe_gp_size
= 8 * (32 - info_ptr
->first_gp_reg_save
);
9043 info_ptr
->spe_gp_size
= 0;
9045 if (TARGET_ALTIVEC_ABI
&& TARGET_ALTIVEC_VRSAVE
)
9047 info_ptr
->vrsave_mask
= compute_vrsave_mask ();
9048 info_ptr
->vrsave_size
= info_ptr
->vrsave_mask
? 4 : 0;
9052 info_ptr
->vrsave_mask
= 0;
9053 info_ptr
->vrsave_size
= 0;
9056 /* Calculate the offsets. */
9064 case ABI_AIX_NODESC
:
9066 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
9067 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
9069 if (TARGET_ALTIVEC_ABI
)
9071 info_ptr
->vrsave_save_offset
9072 = info_ptr
->gp_save_offset
- info_ptr
->vrsave_size
;
9074 /* Align stack so vector save area is on a quadword boundary. */
9075 if (info_ptr
->altivec_size
!= 0)
9076 info_ptr
->altivec_padding_size
9077 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
9079 info_ptr
->altivec_padding_size
= 0;
9081 info_ptr
->altivec_save_offset
9082 = info_ptr
->vrsave_save_offset
9083 - info_ptr
->altivec_padding_size
9084 - info_ptr
->altivec_size
;
9086 /* Adjust for AltiVec case. */
9087 info_ptr
->ehrd_offset
= info_ptr
->altivec_save_offset
- ehrd_size
;
9090 info_ptr
->ehrd_offset
= info_ptr
->gp_save_offset
- ehrd_size
;
9091 info_ptr
->cr_save_offset
= reg_size
; /* first word when 64-bit. */
9092 info_ptr
->lr_save_offset
= 2*reg_size
;
9096 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
9097 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
9098 info_ptr
->cr_save_offset
= info_ptr
->gp_save_offset
- info_ptr
->cr_size
;
9102 /* Align stack so SPE GPR save area is aligned on a
9103 double-word boundary. */
9104 if (info_ptr
->spe_gp_size
!= 0)
9105 info_ptr
->spe_padding_size
9106 = 8 - (-info_ptr
->cr_save_offset
% 8);
9108 info_ptr
->spe_padding_size
= 0;
9110 info_ptr
->spe_gp_save_offset
9111 = info_ptr
->cr_save_offset
9112 - info_ptr
->spe_padding_size
9113 - info_ptr
->spe_gp_size
;
9115 /* Adjust for SPE case. */
9116 info_ptr
->toc_save_offset
9117 = info_ptr
->spe_gp_save_offset
- info_ptr
->toc_size
;
9119 else if (TARGET_ALTIVEC_ABI
)
9121 info_ptr
->vrsave_save_offset
9122 = info_ptr
->cr_save_offset
- info_ptr
->vrsave_size
;
9124 /* Align stack so vector save area is on a quadword boundary. */
9125 if (info_ptr
->altivec_size
!= 0)
9126 info_ptr
->altivec_padding_size
9127 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
9129 info_ptr
->altivec_padding_size
= 0;
9131 info_ptr
->altivec_save_offset
9132 = info_ptr
->vrsave_save_offset
9133 - info_ptr
->altivec_padding_size
9134 - info_ptr
->altivec_size
;
9136 /* Adjust for AltiVec case. */
9137 info_ptr
->toc_save_offset
9138 = info_ptr
->altivec_save_offset
- info_ptr
->toc_size
;
9141 info_ptr
->toc_save_offset
= info_ptr
->cr_save_offset
- info_ptr
->toc_size
;
9142 info_ptr
->ehrd_offset
= info_ptr
->toc_save_offset
- ehrd_size
;
9143 info_ptr
->lr_save_offset
= reg_size
;
9147 info_ptr
->save_size
= RS6000_ALIGN (info_ptr
->fp_size
9149 + info_ptr
->altivec_size
9150 + info_ptr
->altivec_padding_size
9151 + info_ptr
->vrsave_size
9152 + info_ptr
->spe_gp_size
9153 + info_ptr
->spe_padding_size
9157 + info_ptr
->vrsave_size
9158 + info_ptr
->toc_size
,
9159 (TARGET_ALTIVEC_ABI
|| ABI_DARWIN
)
9162 total_raw_size
= (info_ptr
->vars_size
9163 + info_ptr
->parm_size
9164 + info_ptr
->save_size
9165 + info_ptr
->varargs_size
9166 + info_ptr
->fixed_size
);
9168 info_ptr
->total_size
=
9169 RS6000_ALIGN (total_raw_size
, ABI_STACK_BOUNDARY
/ BITS_PER_UNIT
);
9171 /* Determine if we need to allocate any stack frame:
9173 For AIX we need to push the stack if a frame pointer is needed
9174 (because the stack might be dynamically adjusted), if we are
9175 debugging, if we make calls, or if the sum of fp_save, gp_save,
9176 and local variables are more than the space needed to save all
9177 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9178 + 18*8 = 288 (GPR13 reserved).
9180 For V.4 we don't have the stack cushion that AIX uses, but assume
9181 that the debugger can handle stackless frames. */
9183 if (info_ptr
->calls_p
)
9184 info_ptr
->push_p
= 1;
9186 else if (abi
== ABI_V4
)
9187 info_ptr
->push_p
= total_raw_size
> info_ptr
->fixed_size
;
9190 info_ptr
->push_p
= (frame_pointer_needed
9191 || (abi
!= ABI_DARWIN
&& write_symbols
!= NO_DEBUG
)
9192 || ((total_raw_size
- info_ptr
->fixed_size
)
9193 > (TARGET_32BIT
? 220 : 288)));
9195 /* Zero offsets if we're not saving those registers. */
9196 if (info_ptr
->fp_size
== 0)
9197 info_ptr
->fp_save_offset
= 0;
9199 if (info_ptr
->gp_size
== 0)
9200 info_ptr
->gp_save_offset
= 0;
9202 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->altivec_size
== 0)
9203 info_ptr
->altivec_save_offset
= 0;
9205 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->vrsave_mask
== 0)
9206 info_ptr
->vrsave_save_offset
= 0;
9208 if (! TARGET_SPE_ABI
|| info_ptr
->spe_gp_size
== 0)
9209 info_ptr
->spe_gp_save_offset
= 0;
9211 if (! info_ptr
->lr_save_p
)
9212 info_ptr
->lr_save_offset
= 0;
9214 if (! info_ptr
->cr_save_p
)
9215 info_ptr
->cr_save_offset
= 0;
9217 if (! info_ptr
->toc_save_p
)
9218 info_ptr
->toc_save_offset
= 0;
9224 debug_stack_info (info
)
9225 rs6000_stack_t
*info
;
9227 const char *abi_string
;
9230 info
= rs6000_stack_info ();
9232 fprintf (stderr
, "\nStack information for function %s:\n",
9233 ((current_function_decl
&& DECL_NAME (current_function_decl
))
9234 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl
))
9239 default: abi_string
= "Unknown"; break;
9240 case ABI_NONE
: abi_string
= "NONE"; break;
9242 case ABI_AIX_NODESC
: abi_string
= "AIX"; break;
9243 case ABI_DARWIN
: abi_string
= "Darwin"; break;
9244 case ABI_V4
: abi_string
= "V.4"; break;
9247 fprintf (stderr
, "\tABI = %5s\n", abi_string
);
9249 if (TARGET_ALTIVEC_ABI
)
9250 fprintf (stderr
, "\tALTIVEC ABI extensions enabled.\n");
9253 fprintf (stderr
, "\tSPE ABI extensions enabled.\n");
9255 if (info
->first_gp_reg_save
!= 32)
9256 fprintf (stderr
, "\tfirst_gp_reg_save = %5d\n", info
->first_gp_reg_save
);
9258 if (info
->first_fp_reg_save
!= 64)
9259 fprintf (stderr
, "\tfirst_fp_reg_save = %5d\n", info
->first_fp_reg_save
);
9261 if (info
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
)
9262 fprintf (stderr
, "\tfirst_altivec_reg_save = %5d\n",
9263 info
->first_altivec_reg_save
);
9265 if (info
->lr_save_p
)
9266 fprintf (stderr
, "\tlr_save_p = %5d\n", info
->lr_save_p
);
9268 if (info
->cr_save_p
)
9269 fprintf (stderr
, "\tcr_save_p = %5d\n", info
->cr_save_p
);
9271 if (info
->toc_save_p
)
9272 fprintf (stderr
, "\ttoc_save_p = %5d\n", info
->toc_save_p
);
9274 if (info
->vrsave_mask
)
9275 fprintf (stderr
, "\tvrsave_mask = 0x%x\n", info
->vrsave_mask
);
9278 fprintf (stderr
, "\tpush_p = %5d\n", info
->push_p
);
9281 fprintf (stderr
, "\tcalls_p = %5d\n", info
->calls_p
);
9283 if (info
->gp_save_offset
)
9284 fprintf (stderr
, "\tgp_save_offset = %5d\n", info
->gp_save_offset
);
9286 if (info
->fp_save_offset
)
9287 fprintf (stderr
, "\tfp_save_offset = %5d\n", info
->fp_save_offset
);
9289 if (info
->altivec_save_offset
)
9290 fprintf (stderr
, "\taltivec_save_offset = %5d\n",
9291 info
->altivec_save_offset
);
9293 if (info
->spe_gp_save_offset
)
9294 fprintf (stderr
, "\tspe_gp_save_offset = %5d\n",
9295 info
->spe_gp_save_offset
);
9297 if (info
->vrsave_save_offset
)
9298 fprintf (stderr
, "\tvrsave_save_offset = %5d\n",
9299 info
->vrsave_save_offset
);
9301 if (info
->lr_save_offset
)
9302 fprintf (stderr
, "\tlr_save_offset = %5d\n", info
->lr_save_offset
);
9304 if (info
->cr_save_offset
)
9305 fprintf (stderr
, "\tcr_save_offset = %5d\n", info
->cr_save_offset
);
9307 if (info
->toc_save_offset
)
9308 fprintf (stderr
, "\ttoc_save_offset = %5d\n", info
->toc_save_offset
);
9310 if (info
->varargs_save_offset
)
9311 fprintf (stderr
, "\tvarargs_save_offset = %5d\n", info
->varargs_save_offset
);
9313 if (info
->total_size
)
9314 fprintf (stderr
, "\ttotal_size = %5d\n", info
->total_size
);
9316 if (info
->varargs_size
)
9317 fprintf (stderr
, "\tvarargs_size = %5d\n", info
->varargs_size
);
9319 if (info
->vars_size
)
9320 fprintf (stderr
, "\tvars_size = %5d\n", info
->vars_size
);
9322 if (info
->parm_size
)
9323 fprintf (stderr
, "\tparm_size = %5d\n", info
->parm_size
);
9325 if (info
->fixed_size
)
9326 fprintf (stderr
, "\tfixed_size = %5d\n", info
->fixed_size
);
9329 fprintf (stderr
, "\tgp_size = %5d\n", info
->gp_size
);
9331 if (info
->spe_gp_size
)
9332 fprintf (stderr
, "\tspe_gp_size = %5d\n", info
->spe_gp_size
);
9335 fprintf (stderr
, "\tfp_size = %5d\n", info
->fp_size
);
9337 if (info
->altivec_size
)
9338 fprintf (stderr
, "\taltivec_size = %5d\n", info
->altivec_size
);
9340 if (info
->vrsave_size
)
9341 fprintf (stderr
, "\tvrsave_size = %5d\n", info
->vrsave_size
);
9343 if (info
->altivec_padding_size
)
9344 fprintf (stderr
, "\taltivec_padding_size= %5d\n",
9345 info
->altivec_padding_size
);
9347 if (info
->spe_padding_size
)
9348 fprintf (stderr
, "\tspe_padding_size = %5d\n",
9349 info
->spe_padding_size
);
9352 fprintf (stderr
, "\tlr_size = %5d\n", info
->lr_size
);
9355 fprintf (stderr
, "\tcr_size = %5d\n", info
->cr_size
);
9358 fprintf (stderr
, "\ttoc_size = %5d\n", info
->toc_size
);
9360 if (info
->save_size
)
9361 fprintf (stderr
, "\tsave_size = %5d\n", info
->save_size
);
9363 if (info
->reg_size
!= 4)
9364 fprintf (stderr
, "\treg_size = %5d\n", info
->reg_size
);
9366 fprintf (stderr
, "\n");
9370 rs6000_return_addr (count
, frame
)
9374 /* Currently we don't optimize very well between prolog and body
9375 code and for PIC code the code can be actually quite bad, so
9376 don't try to be too clever here. */
9377 if (count
!= 0 || flag_pic
!= 0)
9379 cfun
->machine
->ra_needs_full_frame
= 1;
9386 plus_constant (copy_to_reg
9387 (gen_rtx_MEM (Pmode
,
9388 memory_address (Pmode
, frame
))),
9389 RETURN_ADDRESS_OFFSET
)));
9392 return get_hard_reg_initial_val (Pmode
, LINK_REGISTER_REGNUM
);
9395 /* Say whether a function is a candidate for sibcall handling or not.
9396 We do not allow indirect calls to be optimized into sibling calls.
9397 Also, we can't do it if there are any vector parameters; there's
9398 nowhere to put the VRsave code so it works; note that functions with
9399 vector parameters are required to have a prototype, so the argument
9400 type info must be available here. (The tail recursion case can work
9401 with vector parameters, but there's no way to distinguish here.) */
9403 function_ok_for_sibcall (fndecl
)
9409 if (TARGET_ALTIVEC_VRSAVE
)
9411 for (type
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
9412 type
; type
= TREE_CHAIN (type
))
9414 if (TREE_CODE (TREE_VALUE (type
)) == VECTOR_TYPE
)
9418 if (DEFAULT_ABI
== ABI_DARWIN
9419 || (*targetm
.binds_local_p
) (fndecl
))
9421 tree attr_list
= TYPE_ATTRIBUTES (TREE_TYPE (fndecl
));
9423 if (!lookup_attribute ("longcall", attr_list
)
9424 || lookup_attribute ("shortcall", attr_list
))
9431 /* function rewritten to handle sibcalls */
9433 rs6000_ra_ever_killed ()
9439 #ifdef ASM_OUTPUT_MI_THUNK
9440 if (current_function_is_thunk
)
9443 /* regs_ever_live has LR marked as used if any sibcalls
9444 are present. Which it is, but this should not force
9445 saving and restoring in the prologue/epilog. Likewise,
9446 reg_set_between_p thinks a sibcall clobbers LR, so
9447 that is inappropriate. */
9448 /* Also, the prologue can generate a store into LR that
9449 doesn't really count, like this:
9451 bcl to set PIC register
9454 When we're called from the epilog, we need to avoid counting
9455 this as a store; thus we ignore any insns with a REG_MAYBE_DEAD note. */
9457 push_topmost_sequence ();
9459 pop_topmost_sequence ();
9460 reg
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
9462 for (insn
= NEXT_INSN (top
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
9466 if (FIND_REG_INC_NOTE (insn
, reg
))
9468 else if (GET_CODE (insn
) == CALL_INSN
9469 && !SIBLING_CALL_P (insn
))
9471 else if (set_of (reg
, insn
) != NULL_RTX
9472 && find_reg_note (insn
, REG_MAYBE_DEAD
, NULL_RTX
) == 0)
9479 /* Add a REG_MAYBE_DEAD note to the insn. */
9481 rs6000_maybe_dead (insn
)
9484 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
,
9489 /* Emit instructions needed to load the TOC register.
9490 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9491 a constant pool; or for SVR4 -fpic. */
9494 rs6000_emit_load_toc_table (fromprolog
)
9498 dest
= gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
);
9500 if (TARGET_ELF
&& DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
9502 rtx temp
= (fromprolog
9503 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
9504 : gen_reg_rtx (Pmode
));
9505 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp
)));
9506 rs6000_maybe_dead (emit_move_insn (dest
, temp
));
9508 else if (TARGET_ELF
&& DEFAULT_ABI
!= ABI_AIX
&& flag_pic
== 2)
9511 rtx tempLR
= (fromprolog
9512 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
9513 : gen_reg_rtx (Pmode
));
9514 rtx temp0
= (fromprolog
9515 ? gen_rtx_REG (Pmode
, 0)
9516 : gen_reg_rtx (Pmode
));
9519 /* possibly create the toc section */
9520 if (! toc_initialized
)
9523 function_section (current_function_decl
);
9530 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
9531 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9533 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCL", rs6000_pic_labelno
);
9534 symL
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9536 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR
,
9538 rs6000_maybe_dead (emit_move_insn (dest
, tempLR
));
9539 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0
, dest
,
9546 static int reload_toc_labelno
= 0;
9548 tocsym
= gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
);
9550 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCG", reload_toc_labelno
++);
9551 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9553 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR
,
9556 rs6000_maybe_dead (emit_move_insn (dest
, tempLR
));
9557 rs6000_maybe_dead (emit_move_insn (temp0
,
9558 gen_rtx_MEM (Pmode
, dest
)));
9560 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest
, temp0
, dest
)));
9562 else if (TARGET_ELF
&& !TARGET_AIX
&& flag_pic
== 0 && TARGET_MINIMAL_TOC
)
9564 /* This is for AIX code running in non-PIC ELF32. */
9567 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCTOC", 1);
9568 realsym
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9570 rs6000_maybe_dead (emit_insn (gen_elf_high (dest
, realsym
)));
9571 rs6000_maybe_dead (emit_insn (gen_elf_low (dest
, dest
, realsym
)));
9573 else if (DEFAULT_ABI
== ABI_AIX
)
9576 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest
)));
9578 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest
)));
9585 get_TOC_alias_set ()
9587 static int set
= -1;
9589 set
= new_alias_set ();
9593 /* This retuns nonzero if the current function uses the TOC. This is
9594 determined by the presence of (unspec ... 7), which is generated by
9595 the various load_toc_* patterns. */
9602 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
9605 rtx pat
= PATTERN (insn
);
9608 if (GET_CODE (pat
) == PARALLEL
)
9609 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
9610 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, i
)) == UNSPEC
9611 && XINT (XVECEXP (PATTERN (insn
), 0, i
), 1) == 7)
9618 create_TOC_reference (symbol
)
9621 return gen_rtx_PLUS (Pmode
,
9622 gen_rtx_REG (Pmode
, TOC_REGISTER
),
9623 gen_rtx_CONST (Pmode
,
9624 gen_rtx_MINUS (Pmode
, symbol
,
9625 gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
))));
9629 /* __throw will restore its own return address to be the same as the
9630 return address of the function that the throw is being made to.
9631 This is unfortunate, because we want to check the original
9632 return address to see if we need to restore the TOC.
9633 So we have to squirrel it away here.
9634 This is used only in compiling __throw and __rethrow.
9636 Most of this code should be removed by CSE. */
9637 static rtx insn_after_throw
;
9639 /* This does the saving... */
9641 rs6000_aix_emit_builtin_unwind_init ()
9644 rtx stack_top
= gen_reg_rtx (Pmode
);
9645 rtx opcode_addr
= gen_reg_rtx (Pmode
);
9647 insn_after_throw
= gen_reg_rtx (SImode
);
9649 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
9650 emit_move_insn (stack_top
, mem
);
9652 mem
= gen_rtx_MEM (Pmode
,
9653 gen_rtx_PLUS (Pmode
, stack_top
,
9654 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
9655 emit_move_insn (opcode_addr
, mem
);
9656 emit_move_insn (insn_after_throw
, gen_rtx_MEM (SImode
, opcode_addr
));
9659 /* Emit insns to _restore_ the TOC register, at runtime (specifically
9660 in _eh.o). Only used on AIX.
9662 The idea is that on AIX, function calls look like this:
9663 bl somefunction-trampoline
9667 somefunction-trampoline:
9669 ... load function address in the count register ...
9671 or like this, if the linker determines that this is not a cross-module call
9672 and so the TOC need not be restored:
9675 or like this, if the compiler could determine that this is not a
9678 now, the tricky bit here is that register 2 is saved and restored
9679 by the _linker_, so we can't readily generate debugging information
9680 for it. So we need to go back up the call chain looking at the
9681 insns at return addresses to see which calls saved the TOC register
9682 and so see where it gets restored from.
9684 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
9685 just before the actual epilogue.
9687 On the bright side, this incurs no space or time overhead unless an
9688 exception is thrown, except for the extra code in libgcc.a.
9690 The parameter STACKSIZE is a register containing (at runtime)
9691 the amount to be popped off the stack in addition to the stack frame
9692 of this routine (which will be __throw or __rethrow, and so is
9693 guaranteed to have a stack frame). */
9696 rs6000_emit_eh_toc_restore (stacksize
)
9700 rtx bottom_of_stack
= gen_reg_rtx (Pmode
);
9701 rtx tocompare
= gen_reg_rtx (SImode
);
9702 rtx opcode
= gen_reg_rtx (SImode
);
9703 rtx opcode_addr
= gen_reg_rtx (Pmode
);
9705 rtx loop_start
= gen_label_rtx ();
9706 rtx no_toc_restore_needed
= gen_label_rtx ();
9707 rtx loop_exit
= gen_label_rtx ();
9709 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
9710 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9711 emit_move_insn (bottom_of_stack
, mem
);
9713 top_of_stack
= expand_binop (Pmode
, add_optab
,
9714 bottom_of_stack
, stacksize
,
9715 NULL_RTX
, 1, OPTAB_WIDEN
);
9717 emit_move_insn (tocompare
, gen_int_mode (TARGET_32BIT
? 0x80410014
9718 : 0xE8410028, SImode
));
9720 if (insn_after_throw
== NULL_RTX
)
9722 emit_move_insn (opcode
, insn_after_throw
);
9724 emit_note (NULL
, NOTE_INSN_LOOP_BEG
);
9725 emit_label (loop_start
);
9727 do_compare_rtx_and_jump (opcode
, tocompare
, NE
, 1,
9728 SImode
, NULL_RTX
, NULL_RTX
,
9729 no_toc_restore_needed
);
9731 mem
= gen_rtx_MEM (Pmode
,
9732 gen_rtx_PLUS (Pmode
, bottom_of_stack
,
9733 GEN_INT (5 * GET_MODE_SIZE (Pmode
))));
9734 emit_move_insn (gen_rtx_REG (Pmode
, 2), mem
);
9736 emit_label (no_toc_restore_needed
);
9737 do_compare_rtx_and_jump (top_of_stack
, bottom_of_stack
, EQ
, 1,
9738 Pmode
, NULL_RTX
, NULL_RTX
,
9741 mem
= gen_rtx_MEM (Pmode
, bottom_of_stack
);
9742 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9743 emit_move_insn (bottom_of_stack
, mem
);
9745 mem
= gen_rtx_MEM (Pmode
,
9746 gen_rtx_PLUS (Pmode
, bottom_of_stack
,
9747 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
9748 emit_move_insn (opcode_addr
, mem
);
9749 emit_move_insn (opcode
, gen_rtx_MEM (SImode
, opcode_addr
));
9751 emit_note (NULL
, NOTE_INSN_LOOP_CONT
);
9752 emit_jump (loop_start
);
9753 emit_note (NULL
, NOTE_INSN_LOOP_END
);
9754 emit_label (loop_exit
);
9756 #endif /* TARGET_AIX */
9758 /* This ties together stack memory (MEM with an alias set of
9759 rs6000_sr_alias_set) and the change to the stack pointer. */
9762 rs6000_emit_stack_tie ()
9764 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
));
9766 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9767 emit_insn (gen_stack_tie (mem
));
9770 /* Emit the correct code for allocating stack space, as insns.
9771 If COPY_R12, make sure a copy of the old frame is left in r12.
9772 The generated code may use hard register 0 as a temporary. */
9775 rs6000_emit_allocate_stack (size
, copy_r12
)
9780 rtx stack_reg
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
9781 rtx tmp_reg
= gen_rtx_REG (Pmode
, 0);
9782 rtx todec
= GEN_INT (-size
);
9784 if (current_function_limit_stack
)
9786 if (REG_P (stack_limit_rtx
)
9787 && REGNO (stack_limit_rtx
) > 1
9788 && REGNO (stack_limit_rtx
) <= 31)
9790 emit_insn (Pmode
== SImode
9791 ? gen_addsi3 (tmp_reg
,
9794 : gen_adddi3 (tmp_reg
,
9798 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
9801 else if (GET_CODE (stack_limit_rtx
) == SYMBOL_REF
9803 && DEFAULT_ABI
== ABI_V4
)
9805 rtx toload
= gen_rtx_CONST (VOIDmode
,
9806 gen_rtx_PLUS (Pmode
,
9810 emit_insn (gen_elf_high (tmp_reg
, toload
));
9811 emit_insn (gen_elf_low (tmp_reg
, tmp_reg
, toload
));
9812 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
9816 warning ("stack limit expression is not supported");
9819 if (copy_r12
|| ! TARGET_UPDATE
)
9820 emit_move_insn (gen_rtx_REG (Pmode
, 12), stack_reg
);
9826 /* Need a note here so that try_split doesn't get confused. */
9827 if (get_last_insn() == NULL_RTX
)
9828 emit_note (0, NOTE_INSN_DELETED
);
9829 insn
= emit_move_insn (tmp_reg
, todec
);
9830 try_split (PATTERN (insn
), insn
, 0);
9834 if (Pmode
== SImode
)
9835 insn
= emit_insn (gen_movsi_update (stack_reg
, stack_reg
,
9838 insn
= emit_insn (gen_movdi_update (stack_reg
, stack_reg
,
9843 if (Pmode
== SImode
)
9844 insn
= emit_insn (gen_addsi3 (stack_reg
, stack_reg
, todec
));
9846 insn
= emit_insn (gen_adddi3 (stack_reg
, stack_reg
, todec
));
9847 emit_move_insn (gen_rtx_MEM (Pmode
, stack_reg
),
9848 gen_rtx_REG (Pmode
, 12));
9851 RTX_FRAME_RELATED_P (insn
) = 1;
9853 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
9854 gen_rtx_SET (VOIDmode
, stack_reg
,
9855 gen_rtx_PLUS (Pmode
, stack_reg
,
9860 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
9863 (mem (plus (blah) (regXX)))
9867 (mem (plus (blah) (const VALUE_OF_REGXX))). */
9870 altivec_frame_fixup (insn
, reg
, val
)
9876 real
= copy_rtx (PATTERN (insn
));
9878 real
= replace_rtx (real
, reg
, GEN_INT (val
));
9880 RTX_FRAME_RELATED_P (insn
) = 1;
9881 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
9886 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
9887 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
9888 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
9889 deduce these equivalences by itself so it wasn't necessary to hold
9890 its hand so much. */
9893 rs6000_frame_related (insn
, reg
, val
, reg2
, rreg
)
9902 /* copy_rtx will not make unique copies of registers, so we need to
9903 ensure we don't have unwanted sharing here. */
9905 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
9908 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
9910 real
= copy_rtx (PATTERN (insn
));
9912 if (reg2
!= NULL_RTX
)
9913 real
= replace_rtx (real
, reg2
, rreg
);
9915 real
= replace_rtx (real
, reg
,
9916 gen_rtx_PLUS (Pmode
, gen_rtx_REG (Pmode
,
9917 STACK_POINTER_REGNUM
),
9920 /* We expect that 'real' is either a SET or a PARALLEL containing
9921 SETs (and possibly other stuff). In a PARALLEL, all the SETs
9922 are important so they all have to be marked RTX_FRAME_RELATED_P. */
9924 if (GET_CODE (real
) == SET
)
9928 temp
= simplify_rtx (SET_SRC (set
));
9930 SET_SRC (set
) = temp
;
9931 temp
= simplify_rtx (SET_DEST (set
));
9933 SET_DEST (set
) = temp
;
9934 if (GET_CODE (SET_DEST (set
)) == MEM
)
9936 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
9938 XEXP (SET_DEST (set
), 0) = temp
;
9941 else if (GET_CODE (real
) == PARALLEL
)
9944 for (i
= 0; i
< XVECLEN (real
, 0); i
++)
9945 if (GET_CODE (XVECEXP (real
, 0, i
)) == SET
)
9947 rtx set
= XVECEXP (real
, 0, i
);
9949 temp
= simplify_rtx (SET_SRC (set
));
9951 SET_SRC (set
) = temp
;
9952 temp
= simplify_rtx (SET_DEST (set
));
9954 SET_DEST (set
) = temp
;
9955 if (GET_CODE (SET_DEST (set
)) == MEM
)
9957 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
9959 XEXP (SET_DEST (set
), 0) = temp
;
9961 RTX_FRAME_RELATED_P (set
) = 1;
9967 RTX_FRAME_RELATED_P (insn
) = 1;
9968 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
9973 /* Returns an insn that has a vrsave set operation with the
9974 appropriate CLOBBERs. */
9977 generate_set_vrsave (reg
, info
, epiloguep
)
9979 rs6000_stack_t
*info
;
9983 rtx insn
, clobs
[TOTAL_ALTIVEC_REGS
+ 1];
9984 rtx vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
9987 = gen_rtx_SET (VOIDmode
,
9989 gen_rtx_UNSPEC_VOLATILE (SImode
,
9990 gen_rtvec (2, reg
, vrsave
),
9995 /* We need to clobber the registers in the mask so the scheduler
9996 does not move sets to VRSAVE before sets of AltiVec registers.
9998 However, if the function receives nonlocal gotos, reload will set
9999 all call saved registers live. We will end up with:
10001 (set (reg 999) (mem))
10002 (parallel [ (set (reg vrsave) (unspec blah))
10003 (clobber (reg 999))])
10005 The clobber will cause the store into reg 999 to be dead, and
10006 flow will attempt to delete an epilogue insn. In this case, we
10007 need an unspec use/set of the register. */
10009 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10010 if (info
->vrsave_mask
!= 0 && ALTIVEC_REG_BIT (i
) != 0)
10012 if (!epiloguep
|| call_used_regs
[i
])
10013 clobs
[nclobs
++] = gen_rtx_CLOBBER (VOIDmode
,
10014 gen_rtx_REG (V4SImode
, i
));
10017 rtx reg
= gen_rtx_REG (V4SImode
, i
);
10020 = gen_rtx_SET (VOIDmode
,
10022 gen_rtx_UNSPEC (V4SImode
,
10023 gen_rtvec (1, reg
), 27));
10027 insn
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nclobs
));
10029 for (i
= 0; i
< nclobs
; ++i
)
10030 XVECEXP (insn
, 0, i
) = clobs
[i
];
10035 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10036 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
10039 emit_frame_save (frame_reg
, frame_ptr
, mode
, regno
, offset
, total_size
)
10042 enum machine_mode mode
;
10043 unsigned int regno
;
10047 rtx reg
, offset_rtx
, insn
, mem
, addr
, int_rtx
;
10048 rtx replacea
, replaceb
;
10050 int_rtx
= GEN_INT (offset
);
10052 /* Some cases that need register indexed addressing. */
10053 if ((TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
10055 && SPE_VECTOR_MODE (mode
)
10056 && !SPE_CONST_OFFSET_OK (offset
)))
10058 /* Whomever calls us must make sure r11 is available in the
10059 flow path of instructions in the prologue. */
10060 offset_rtx
= gen_rtx_REG (Pmode
, 11);
10061 emit_move_insn (offset_rtx
, int_rtx
);
10063 replacea
= offset_rtx
;
10064 replaceb
= int_rtx
;
10068 offset_rtx
= int_rtx
;
10069 replacea
= NULL_RTX
;
10070 replaceb
= NULL_RTX
;
10073 reg
= gen_rtx_REG (mode
, regno
);
10074 addr
= gen_rtx_PLUS (Pmode
, frame_reg
, offset_rtx
);
10075 mem
= gen_rtx_MEM (mode
, addr
);
10076 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10078 insn
= emit_move_insn (mem
, reg
);
10080 rs6000_frame_related (insn
, frame_ptr
, total_size
, replacea
, replaceb
);
10083 /* Emit an offset memory reference suitable for a frame store, while
10084 converting to a valid addressing mode. */
10087 gen_frame_mem_offset (mode
, reg
, offset
)
10088 enum machine_mode mode
;
10092 rtx int_rtx
, offset_rtx
;
10094 int_rtx
= GEN_INT (offset
);
10096 if (TARGET_SPE_ABI
&& SPE_VECTOR_MODE (mode
))
10098 offset_rtx
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
10099 emit_move_insn (offset_rtx
, int_rtx
);
10102 offset_rtx
= int_rtx
;
10104 return gen_rtx_MEM (mode
, gen_rtx_PLUS (Pmode
, reg
, offset_rtx
));
10107 /* Emit function prologue as insns. */
10110 rs6000_emit_prologue ()
10112 rs6000_stack_t
*info
= rs6000_stack_info ();
10113 enum machine_mode reg_mode
= TARGET_POWERPC64
? DImode
: SImode
;
10114 int reg_size
= TARGET_POWERPC64
? 8 : 4;
10115 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
10116 rtx frame_ptr_rtx
= gen_rtx_REG (Pmode
, 12);
10117 rtx frame_reg_rtx
= sp_reg_rtx
;
10118 rtx cr_save_rtx
= NULL
;
10120 int saving_FPRs_inline
;
10121 int using_store_multiple
;
10122 HOST_WIDE_INT sp_offset
= 0;
10124 if (TARGET_SPE_ABI
)
10126 reg_mode
= V2SImode
;
10130 using_store_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
10132 && info
->first_gp_reg_save
< 31);
10133 saving_FPRs_inline
= (info
->first_fp_reg_save
== 64
10134 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
10136 /* For V.4, update stack before we do any saving and set back pointer. */
10137 if (info
->push_p
&& DEFAULT_ABI
== ABI_V4
)
10139 if (info
->total_size
< 32767)
10140 sp_offset
= info
->total_size
;
10142 frame_reg_rtx
= frame_ptr_rtx
;
10143 rs6000_emit_allocate_stack (info
->total_size
,
10144 (frame_reg_rtx
!= sp_reg_rtx
10145 && (info
->cr_save_p
10147 || info
->first_fp_reg_save
< 64
10148 || info
->first_gp_reg_save
< 32
10150 if (frame_reg_rtx
!= sp_reg_rtx
)
10151 rs6000_emit_stack_tie ();
10154 /* Save AltiVec registers if needed. */
10155 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
10159 /* There should be a non inline version of this, for when we
10160 are saving lots of vector registers. */
10161 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10162 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
10164 rtx areg
, savereg
, mem
;
10167 offset
= info
->altivec_save_offset
+ sp_offset
10168 + 16 * (i
- info
->first_altivec_reg_save
);
10170 savereg
= gen_rtx_REG (V4SImode
, i
);
10172 areg
= gen_rtx_REG (Pmode
, 0);
10173 emit_move_insn (areg
, GEN_INT (offset
));
10175 /* AltiVec addressing mode is [reg+reg]. */
10176 mem
= gen_rtx_MEM (V4SImode
,
10177 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
));
10179 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10181 insn
= emit_move_insn (mem
, savereg
);
10183 altivec_frame_fixup (insn
, areg
, offset
);
10187 /* VRSAVE is a bit vector representing which AltiVec registers
10188 are used. The OS uses this to determine which vector
10189 registers to save on a context switch. We need to save
10190 VRSAVE on the stack frame, add whatever AltiVec registers we
10191 used in this function, and do the corresponding magic in the
10194 if (TARGET_ALTIVEC
&& info
->vrsave_mask
!= 0)
10196 rtx reg
, mem
, vrsave
;
10199 /* Get VRSAVE onto a GPR. */
10200 reg
= gen_rtx_REG (SImode
, 12);
10201 vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
10203 emit_insn (gen_get_vrsave_internal (reg
));
10205 emit_insn (gen_rtx_SET (VOIDmode
, reg
, vrsave
));
10208 offset
= info
->vrsave_save_offset
+ sp_offset
;
10210 = gen_rtx_MEM (SImode
,
10211 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, GEN_INT (offset
)));
10212 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10213 insn
= emit_move_insn (mem
, reg
);
10215 /* Include the registers in the mask. */
10216 emit_insn (gen_iorsi3 (reg
, reg
, GEN_INT ((int) info
->vrsave_mask
)));
10218 insn
= emit_insn (generate_set_vrsave (reg
, info
, 0));
10221 /* If we use the link register, get it into r0. */
10222 if (info
->lr_save_p
)
10223 emit_move_insn (gen_rtx_REG (Pmode
, 0),
10224 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10226 /* If we need to save CR, put it into r12. */
10227 if (info
->cr_save_p
&& frame_reg_rtx
!= frame_ptr_rtx
)
10229 cr_save_rtx
= gen_rtx_REG (SImode
, 12);
10230 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
10233 /* Do any required saving of fpr's. If only one or two to save, do
10234 it ourselves. Otherwise, call function. */
10235 if (saving_FPRs_inline
)
10238 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10239 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
10240 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
10241 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, DFmode
,
10242 info
->first_fp_reg_save
+ i
,
10243 info
->fp_save_offset
+ sp_offset
+ 8 * i
,
10246 else if (info
->first_fp_reg_save
!= 64)
10250 const char *alloc_rname
;
10252 p
= rtvec_alloc (2 + 64 - info
->first_fp_reg_save
);
10254 RTVEC_ELT (p
, 0) = gen_rtx_CLOBBER (VOIDmode
,
10255 gen_rtx_REG (Pmode
,
10256 LINK_REGISTER_REGNUM
));
10257 sprintf (rname
, "%s%d%s", SAVE_FP_PREFIX
,
10258 info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
);
10259 alloc_rname
= ggc_strdup (rname
);
10260 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
10261 gen_rtx_SYMBOL_REF (Pmode
,
10263 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10265 rtx addr
, reg
, mem
;
10266 reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
10267 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10268 GEN_INT (info
->fp_save_offset
10269 + sp_offset
+ 8*i
));
10270 mem
= gen_rtx_MEM (DFmode
, addr
);
10271 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10273 RTVEC_ELT (p
, i
+ 2) = gen_rtx_SET (VOIDmode
, mem
, reg
);
10275 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10276 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10277 NULL_RTX
, NULL_RTX
);
10280 /* Save GPRs. This is done as a PARALLEL if we are using
10281 the store-multiple instructions. */
10282 if (using_store_multiple
)
10286 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
10287 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10289 rtx addr
, reg
, mem
;
10290 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
10291 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10292 GEN_INT (info
->gp_save_offset
10295 mem
= gen_rtx_MEM (reg_mode
, addr
);
10296 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10298 RTVEC_ELT (p
, i
) = gen_rtx_SET (VOIDmode
, mem
, reg
);
10300 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10301 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10302 NULL_RTX
, NULL_RTX
);
10307 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10308 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
10309 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
10310 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
10311 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
10312 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
10314 rtx addr
, reg
, mem
;
10315 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
10317 if (TARGET_SPE_ABI
)
10319 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
10322 if (!SPE_CONST_OFFSET_OK (offset
))
10324 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
10325 emit_move_insn (b
, GEN_INT (offset
));
10328 b
= GEN_INT (offset
);
10330 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
10331 mem
= gen_rtx_MEM (V2SImode
, addr
);
10332 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10333 insn
= emit_move_insn (mem
, reg
);
10335 if (GET_CODE (b
) == CONST_INT
)
10336 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10337 NULL_RTX
, NULL_RTX
);
10339 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10340 b
, GEN_INT (offset
));
10344 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10345 GEN_INT (info
->gp_save_offset
10348 mem
= gen_rtx_MEM (reg_mode
, addr
);
10349 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10351 insn
= emit_move_insn (mem
, reg
);
10352 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10353 NULL_RTX
, NULL_RTX
);
10358 /* ??? There's no need to emit actual instructions here, but it's the
10359 easiest way to get the frame unwind information emitted. */
10360 if (current_function_calls_eh_return
)
10362 unsigned int i
, regno
;
10366 regno
= EH_RETURN_DATA_REGNO (i
);
10367 if (regno
== INVALID_REGNUM
)
10370 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, reg_mode
, regno
,
10371 info
->ehrd_offset
+ sp_offset
10372 + reg_size
* (int) i
,
10377 /* Save lr if we used it. */
10378 if (info
->lr_save_p
)
10380 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10381 GEN_INT (info
->lr_save_offset
+ sp_offset
));
10382 rtx reg
= gen_rtx_REG (Pmode
, 0);
10383 rtx mem
= gen_rtx_MEM (Pmode
, addr
);
10384 /* This should not be of rs6000_sr_alias_set, because of
10385 __builtin_return_address. */
10387 insn
= emit_move_insn (mem
, reg
);
10388 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10389 reg
, gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10392 /* Save CR if we use any that must be preserved. */
10393 if (info
->cr_save_p
)
10395 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10396 GEN_INT (info
->cr_save_offset
+ sp_offset
));
10397 rtx mem
= gen_rtx_MEM (SImode
, addr
);
10399 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10401 /* If r12 was used to hold the original sp, copy cr into r0 now
10403 if (REGNO (frame_reg_rtx
) == 12)
10405 cr_save_rtx
= gen_rtx_REG (SImode
, 0);
10406 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
10408 insn
= emit_move_insn (mem
, cr_save_rtx
);
10410 /* Now, there's no way that dwarf2out_frame_debug_expr is going
10411 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
10412 OK. All we have to do is specify that _one_ condition code
10413 register is saved in this stack slot. The thrower's epilogue
10414 will then restore all the call-saved registers.
10415 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
10416 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10417 cr_save_rtx
, gen_rtx_REG (SImode
, CR2_REGNO
));
10420 /* Update stack and set back pointer unless this is V.4,
10421 for which it was done previously. */
10422 if (info
->push_p
&& DEFAULT_ABI
!= ABI_V4
)
10423 rs6000_emit_allocate_stack (info
->total_size
, FALSE
);
10425 /* Set frame pointer, if needed. */
10426 if (frame_pointer_needed
)
10428 insn
= emit_move_insn (gen_rtx_REG (Pmode
, FRAME_POINTER_REGNUM
),
10430 RTX_FRAME_RELATED_P (insn
) = 1;
10433 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
10434 if ((TARGET_TOC
&& TARGET_MINIMAL_TOC
&& get_pool_size () != 0)
10435 || (DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1
10436 && regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
]))
10438 /* If emit_load_toc_table will use the link register, we need to save
10439 it. We use R11 for this purpose because emit_load_toc_table
10440 can use register 0. This allows us to use a plain 'blr' to return
10441 from the procedure more often. */
10442 int save_LR_around_toc_setup
= (TARGET_ELF
&& flag_pic
!= 0
10443 && ! info
->lr_save_p
10444 && EXIT_BLOCK_PTR
->pred
!= NULL
);
10445 if (save_LR_around_toc_setup
)
10446 emit_move_insn (gen_rtx_REG (Pmode
, 11),
10447 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10449 rs6000_emit_load_toc_table (TRUE
);
10451 if (save_LR_around_toc_setup
)
10452 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
10453 gen_rtx_REG (Pmode
, 11));
10456 if (DEFAULT_ABI
== ABI_DARWIN
10457 && flag_pic
&& current_function_uses_pic_offset_table
)
10459 rtx dest
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
10461 char *picbase
= machopic_function_base_name ();
10462 rtx src
= gen_rtx_SYMBOL_REF (Pmode
, ggc_alloc_string (picbase
, -1));
10464 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest
, src
)));
10467 rs6000_maybe_dead (
10468 emit_move_insn (gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
),
10469 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)));
10473 /* Write function prologue. */
10476 rs6000_output_function_prologue (file
, size
)
10478 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
10480 rs6000_stack_t
*info
= rs6000_stack_info ();
10482 if (TARGET_DEBUG_STACK
)
10483 debug_stack_info (info
);
10485 /* Write .extern for any function we will call to save and restore
10487 if (info
->first_fp_reg_save
< 64
10488 && !FP_SAVE_INLINE (info
->first_fp_reg_save
))
10489 fprintf (file
, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
10490 SAVE_FP_PREFIX
, info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
,
10491 RESTORE_FP_PREFIX
, info
->first_fp_reg_save
- 32,
10492 RESTORE_FP_SUFFIX
);
10494 /* Write .extern for AIX common mode routines, if needed. */
10495 if (! TARGET_POWER
&& ! TARGET_POWERPC
&& ! common_mode_defined
)
10497 fputs ("\t.extern __mulh\n", file
);
10498 fputs ("\t.extern __mull\n", file
);
10499 fputs ("\t.extern __divss\n", file
);
10500 fputs ("\t.extern __divus\n", file
);
10501 fputs ("\t.extern __quoss\n", file
);
10502 fputs ("\t.extern __quous\n", file
);
10503 common_mode_defined
= 1;
10506 if (! HAVE_prologue
)
10510 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
10511 the "toplevel" insn chain. */
10512 emit_note (0, NOTE_INSN_DELETED
);
10513 rs6000_emit_prologue ();
10514 emit_note (0, NOTE_INSN_DELETED
);
10516 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10520 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
10522 INSN_ADDRESSES_NEW (insn
, addr
);
10527 if (TARGET_DEBUG_STACK
)
10528 debug_rtx_list (get_insns (), 100);
10529 final (get_insns (), file
, FALSE
, FALSE
);
10533 rs6000_pic_labelno
++;
10536 /* Emit function epilogue as insns.
10538 At present, dwarf2out_frame_debug_expr doesn't understand
10539 register restores, so we don't bother setting RTX_FRAME_RELATED_P
10540 anywhere in the epilogue. Most of the insns below would in any case
10541 need special notes to explain where r11 is in relation to the stack. */
10544 rs6000_emit_epilogue (sibcall
)
10547 rs6000_stack_t
*info
;
10548 int restoring_FPRs_inline
;
10549 int using_load_multiple
;
10550 int using_mfcr_multiple
;
10551 int use_backchain_to_restore_sp
;
10553 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, 1);
10554 rtx frame_reg_rtx
= sp_reg_rtx
;
10555 enum machine_mode reg_mode
= TARGET_POWERPC64
? DImode
: SImode
;
10556 int reg_size
= TARGET_POWERPC64
? 8 : 4;
10559 if (TARGET_SPE_ABI
)
10561 reg_mode
= V2SImode
;
10565 info
= rs6000_stack_info ();
10566 using_load_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
10568 && info
->first_gp_reg_save
< 31);
10569 restoring_FPRs_inline
= (sibcall
10570 || current_function_calls_eh_return
10571 || info
->first_fp_reg_save
== 64
10572 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
10573 use_backchain_to_restore_sp
= (frame_pointer_needed
10574 || current_function_calls_alloca
10575 || info
->total_size
> 32767);
10576 using_mfcr_multiple
= (rs6000_cpu
== PROCESSOR_PPC601
10577 || rs6000_cpu
== PROCESSOR_PPC603
10578 || rs6000_cpu
== PROCESSOR_PPC750
10581 /* If we have a frame pointer, a call to alloca, or a large stack
10582 frame, restore the old stack pointer using the backchain. Otherwise,
10583 we know what size to update it with. */
10584 if (use_backchain_to_restore_sp
)
10586 /* Under V.4, don't reset the stack pointer until after we're done
10587 loading the saved registers. */
10588 if (DEFAULT_ABI
== ABI_V4
)
10589 frame_reg_rtx
= gen_rtx_REG (Pmode
, 11);
10591 emit_move_insn (frame_reg_rtx
,
10592 gen_rtx_MEM (Pmode
, sp_reg_rtx
));
10595 else if (info
->push_p
)
10597 if (DEFAULT_ABI
== ABI_V4
)
10598 sp_offset
= info
->total_size
;
10601 emit_insn (TARGET_32BIT
10602 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
10603 GEN_INT (info
->total_size
))
10604 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
10605 GEN_INT (info
->total_size
)));
10609 /* Restore AltiVec registers if needed. */
10610 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
10614 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10615 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
10617 rtx addr
, areg
, mem
;
10619 areg
= gen_rtx_REG (Pmode
, 0);
10621 (areg
, GEN_INT (info
->altivec_save_offset
10623 + 16 * (i
- info
->first_altivec_reg_save
)));
10625 /* AltiVec addressing mode is [reg+reg]. */
10626 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
);
10627 mem
= gen_rtx_MEM (V4SImode
, addr
);
10628 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10630 emit_move_insn (gen_rtx_REG (V4SImode
, i
), mem
);
10634 /* Restore VRSAVE if needed. */
10635 if (TARGET_ALTIVEC_ABI
&& info
->vrsave_mask
!= 0)
10637 rtx addr
, mem
, reg
;
10639 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10640 GEN_INT (info
->vrsave_save_offset
+ sp_offset
));
10641 mem
= gen_rtx_MEM (SImode
, addr
);
10642 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10643 reg
= gen_rtx_REG (SImode
, 12);
10644 emit_move_insn (reg
, mem
);
10646 emit_insn (generate_set_vrsave (reg
, info
, 1));
10649 /* Get the old lr if we saved it. */
10650 if (info
->lr_save_p
)
10652 rtx mem
= gen_frame_mem_offset (Pmode
, frame_reg_rtx
,
10653 info
->lr_save_offset
+ sp_offset
);
10655 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10657 emit_move_insn (gen_rtx_REG (Pmode
, 0), mem
);
10660 /* Get the old cr if we saved it. */
10661 if (info
->cr_save_p
)
10663 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10664 GEN_INT (info
->cr_save_offset
+ sp_offset
));
10665 rtx mem
= gen_rtx_MEM (SImode
, addr
);
10667 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10669 emit_move_insn (gen_rtx_REG (SImode
, 12), mem
);
10672 /* Set LR here to try to overlap restores below. */
10673 if (info
->lr_save_p
)
10674 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
10675 gen_rtx_REG (Pmode
, 0));
10677 /* Load exception handler data registers, if needed. */
10678 if (current_function_calls_eh_return
)
10680 unsigned int i
, regno
;
10686 regno
= EH_RETURN_DATA_REGNO (i
);
10687 if (regno
== INVALID_REGNUM
)
10690 mem
= gen_frame_mem_offset (reg_mode
, frame_reg_rtx
,
10691 info
->ehrd_offset
+ sp_offset
10692 + reg_size
* (int) i
);
10693 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10695 emit_move_insn (gen_rtx_REG (reg_mode
, regno
), mem
);
10699 /* Restore GPRs. This is done as a PARALLEL if we are using
10700 the load-multiple instructions. */
10701 if (using_load_multiple
)
10704 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
10705 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10707 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10708 GEN_INT (info
->gp_save_offset
10711 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
10713 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10716 gen_rtx_SET (VOIDmode
,
10717 gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
),
10720 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10723 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10724 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
10725 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
10726 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
10727 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
10728 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
10730 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10731 GEN_INT (info
->gp_save_offset
10734 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
10736 /* Restore 64-bit quantities for SPE. */
10737 if (TARGET_SPE_ABI
)
10739 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
10742 if (!SPE_CONST_OFFSET_OK (offset
))
10744 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
10745 emit_move_insn (b
, GEN_INT (offset
));
10748 b
= GEN_INT (offset
);
10750 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
10751 mem
= gen_rtx_MEM (V2SImode
, addr
);
10754 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10756 emit_move_insn (gen_rtx_REG (reg_mode
,
10757 info
->first_gp_reg_save
+ i
), mem
);
10760 /* Restore fpr's if we need to do it without calling a function. */
10761 if (restoring_FPRs_inline
)
10762 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10763 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
10764 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
10767 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10768 GEN_INT (info
->fp_save_offset
10771 mem
= gen_rtx_MEM (DFmode
, addr
);
10772 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10774 emit_move_insn (gen_rtx_REG (DFmode
,
10775 info
->first_fp_reg_save
+ i
),
10779 /* If we saved cr, restore it here. Just those that were used. */
10780 if (info
->cr_save_p
)
10782 rtx r12_rtx
= gen_rtx_REG (SImode
, 12);
10785 if (using_mfcr_multiple
)
10787 for (i
= 0; i
< 8; i
++)
10788 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
10794 if (using_mfcr_multiple
&& count
> 1)
10799 p
= rtvec_alloc (count
);
10802 for (i
= 0; i
< 8; i
++)
10803 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
10805 rtvec r
= rtvec_alloc (2);
10806 RTVEC_ELT (r
, 0) = r12_rtx
;
10807 RTVEC_ELT (r
, 1) = GEN_INT (1 << (7-i
));
10808 RTVEC_ELT (p
, ndx
) =
10809 gen_rtx_SET (VOIDmode
, gen_rtx_REG (CCmode
, CR0_REGNO
+i
),
10810 gen_rtx_UNSPEC (CCmode
, r
, 20));
10813 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10818 for (i
= 0; i
< 8; i
++)
10819 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
10821 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode
,
10827 /* If this is V.4, unwind the stack pointer after all of the loads
10828 have been done. We need to emit a block here so that sched
10829 doesn't decide to move the sp change before the register restores
10830 (which may not have any obvious dependency on the stack). This
10831 doesn't hurt performance, because there is no scheduling that can
10832 be done after this point. */
10833 if (DEFAULT_ABI
== ABI_V4
)
10835 if (frame_reg_rtx
!= sp_reg_rtx
)
10836 rs6000_emit_stack_tie ();
10838 if (use_backchain_to_restore_sp
)
10840 emit_move_insn (sp_reg_rtx
, frame_reg_rtx
);
10842 else if (sp_offset
!= 0)
10844 emit_insn (Pmode
== SImode
10845 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
10846 GEN_INT (sp_offset
))
10847 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
10848 GEN_INT (sp_offset
)));
10852 if (current_function_calls_eh_return
)
10854 rtx sa
= EH_RETURN_STACKADJ_RTX
;
10855 emit_insn (Pmode
== SImode
10856 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
, sa
)
10857 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
, sa
));
10863 if (! restoring_FPRs_inline
)
10864 p
= rtvec_alloc (3 + 64 - info
->first_fp_reg_save
);
10866 p
= rtvec_alloc (2);
10868 RTVEC_ELT (p
, 0) = gen_rtx_RETURN (VOIDmode
);
10869 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
10870 gen_rtx_REG (Pmode
,
10871 LINK_REGISTER_REGNUM
));
10873 /* If we have to restore more than two FP registers, branch to the
10874 restore function. It will return to our caller. */
10875 if (! restoring_FPRs_inline
)
10879 const char *alloc_rname
;
10881 sprintf (rname
, "%s%d%s", RESTORE_FP_PREFIX
,
10882 info
->first_fp_reg_save
- 32, RESTORE_FP_SUFFIX
);
10883 alloc_rname
= ggc_strdup (rname
);
10884 RTVEC_ELT (p
, 2) = gen_rtx_USE (VOIDmode
,
10885 gen_rtx_SYMBOL_REF (Pmode
,
10888 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10891 addr
= gen_rtx_PLUS (Pmode
, sp_reg_rtx
,
10892 GEN_INT (info
->fp_save_offset
+ 8*i
));
10893 mem
= gen_rtx_MEM (DFmode
, addr
);
10894 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10896 RTVEC_ELT (p
, i
+3) =
10897 gen_rtx_SET (VOIDmode
,
10898 gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
),
10903 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10907 /* Write function epilogue. */
10910 rs6000_output_function_epilogue (file
, size
)
10912 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
10914 rs6000_stack_t
*info
= rs6000_stack_info ();
10916 if (! HAVE_epilogue
)
10918 rtx insn
= get_last_insn ();
10919 /* If the last insn was a BARRIER, we don't have to write anything except
10920 the trace table. */
10921 if (GET_CODE (insn
) == NOTE
)
10922 insn
= prev_nonnote_insn (insn
);
10923 if (insn
== 0 || GET_CODE (insn
) != BARRIER
)
10925 /* This is slightly ugly, but at least we don't have two
10926 copies of the epilogue-emitting code. */
10929 /* A NOTE_INSN_DELETED is supposed to be at the start
10930 and end of the "toplevel" insn chain. */
10931 emit_note (0, NOTE_INSN_DELETED
);
10932 rs6000_emit_epilogue (FALSE
);
10933 emit_note (0, NOTE_INSN_DELETED
);
10935 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10939 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
10941 INSN_ADDRESSES_NEW (insn
, addr
);
10946 if (TARGET_DEBUG_STACK
)
10947 debug_rtx_list (get_insns (), 100);
10948 final (get_insns (), file
, FALSE
, FALSE
);
10953 /* Output a traceback table here. See /usr/include/sys/debug.h for info
10956 We don't output a traceback table if -finhibit-size-directive was
10957 used. The documentation for -finhibit-size-directive reads
10958 ``don't output a @code{.size} assembler directive, or anything
10959 else that would cause trouble if the function is split in the
10960 middle, and the two halves are placed at locations far apart in
10961 memory.'' The traceback table has this property, since it
10962 includes the offset from the start of the function to the
10963 traceback table itself.
10965 System V.4 Powerpc's (and the embedded ABI derived from it) use a
10966 different traceback table. */
10967 if (DEFAULT_ABI
== ABI_AIX
&& ! flag_inhibit_size_directive
10968 && rs6000_traceback
!= traceback_none
)
10970 const char *fname
= XSTR (XEXP (DECL_RTL (current_function_decl
), 0), 0);
10971 const char *language_string
= lang_hooks
.name
;
10972 int fixed_parms
= 0, float_parms
= 0, parm_info
= 0;
10974 int optional_tbtab
;
10976 if (rs6000_traceback
== traceback_full
)
10977 optional_tbtab
= 1;
10978 else if (rs6000_traceback
== traceback_part
)
10979 optional_tbtab
= 0;
10981 optional_tbtab
= !optimize_size
&& !TARGET_ELF
;
10983 while (*fname
== '.') /* V.4 encodes . in the name */
10986 /* Need label immediately before tbtab, so we can compute its offset
10987 from the function start. */
10990 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
10991 ASM_OUTPUT_LABEL (file
, fname
);
10993 /* The .tbtab pseudo-op can only be used for the first eight
10994 expressions, since it can't handle the possibly variable
10995 length fields that follow. However, if you omit the optional
10996 fields, the assembler outputs zeros for all optional fields
10997 anyways, giving each variable length field is minimum length
10998 (as defined in sys/debug.h). Thus we can not use the .tbtab
10999 pseudo-op at all. */
11001 /* An all-zero word flags the start of the tbtab, for debuggers
11002 that have to find it by searching forward from the entry
11003 point or from the current pc. */
11004 fputs ("\t.long 0\n", file
);
11006 /* Tbtab format type. Use format type 0. */
11007 fputs ("\t.byte 0,", file
);
11009 /* Language type. Unfortunately, there doesn't seem to be any
11010 official way to get this info, so we use language_string. C
11011 is 0. C++ is 9. No number defined for Obj-C, so use the
11012 value for C for now. There is no official value for Java,
11013 although IBM appears to be using 13. There is no official value
11014 for Chill, so we've chosen 44 pseudo-randomly. */
11015 if (! strcmp (language_string
, "GNU C")
11016 || ! strcmp (language_string
, "GNU Objective-C"))
11018 else if (! strcmp (language_string
, "GNU F77"))
11020 else if (! strcmp (language_string
, "GNU Ada"))
11022 else if (! strcmp (language_string
, "GNU Pascal"))
11024 else if (! strcmp (language_string
, "GNU C++"))
11026 else if (! strcmp (language_string
, "GNU Java"))
11028 else if (! strcmp (language_string
, "GNU CHILL"))
11032 fprintf (file
, "%d,", i
);
11034 /* 8 single bit fields: global linkage (not set for C extern linkage,
11035 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
11036 from start of procedure stored in tbtab, internal function, function
11037 has controlled storage, function has no toc, function uses fp,
11038 function logs/aborts fp operations. */
11039 /* Assume that fp operations are used if any fp reg must be saved. */
11040 fprintf (file
, "%d,",
11041 (optional_tbtab
<< 5) | ((info
->first_fp_reg_save
!= 64) << 1));
11043 /* 6 bitfields: function is interrupt handler, name present in
11044 proc table, function calls alloca, on condition directives
11045 (controls stack walks, 3 bits), saves condition reg, saves
11047 /* The `function calls alloca' bit seems to be set whenever reg 31 is
11048 set up as a frame pointer, even when there is no alloca call. */
11049 fprintf (file
, "%d,",
11050 ((optional_tbtab
<< 6)
11051 | ((optional_tbtab
& frame_pointer_needed
) << 5)
11052 | (info
->cr_save_p
<< 1)
11053 | (info
->lr_save_p
)));
11055 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
11057 fprintf (file
, "%d,",
11058 (info
->push_p
<< 7) | (64 - info
->first_fp_reg_save
));
11060 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
11061 fprintf (file
, "%d,", (32 - first_reg_to_save ()));
11063 if (optional_tbtab
)
11065 /* Compute the parameter info from the function decl argument
11068 int next_parm_info_bit
= 31;
11070 for (decl
= DECL_ARGUMENTS (current_function_decl
);
11071 decl
; decl
= TREE_CHAIN (decl
))
11073 rtx parameter
= DECL_INCOMING_RTL (decl
);
11074 enum machine_mode mode
= GET_MODE (parameter
);
11076 if (GET_CODE (parameter
) == REG
)
11078 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
11084 if (mode
== SFmode
)
11086 else if (mode
== DFmode
)
11091 /* If only one bit will fit, don't or in this entry. */
11092 if (next_parm_info_bit
> 0)
11093 parm_info
|= (bits
<< (next_parm_info_bit
- 1));
11094 next_parm_info_bit
-= 2;
11098 fixed_parms
+= ((GET_MODE_SIZE (mode
)
11099 + (UNITS_PER_WORD
- 1))
11101 next_parm_info_bit
-= 1;
11107 /* Number of fixed point parameters. */
11108 /* This is actually the number of words of fixed point parameters; thus
11109 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11110 fprintf (file
, "%d,", fixed_parms
);
11112 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11114 /* This is actually the number of fp registers that hold parameters;
11115 and thus the maximum value is 13. */
11116 /* Set parameters on stack bit if parameters are not in their original
11117 registers, regardless of whether they are on the stack? Xlc
11118 seems to set the bit when not optimizing. */
11119 fprintf (file
, "%d\n", ((float_parms
<< 1) | (! optimize
)));
11121 if (! optional_tbtab
)
11124 /* Optional fields follow. Some are variable length. */
11126 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11127 11 double float. */
11128 /* There is an entry for each parameter in a register, in the order that
11129 they occur in the parameter list. Any intervening arguments on the
11130 stack are ignored. If the list overflows a long (max possible length
11131 34 bits) then completely leave off all elements that don't fit. */
11132 /* Only emit this long if there was at least one parameter. */
11133 if (fixed_parms
|| float_parms
)
11134 fprintf (file
, "\t.long %d\n", parm_info
);
11136 /* Offset from start of code to tb table. */
11137 fputs ("\t.long ", file
);
11138 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
11140 RS6000_OUTPUT_BASENAME (file
, fname
);
11142 assemble_name (file
, fname
);
11144 fputs ("-.", file
);
11146 RS6000_OUTPUT_BASENAME (file
, fname
);
11148 assemble_name (file
, fname
);
11152 /* Interrupt handler mask. */
11153 /* Omit this long, since we never set the interrupt handler bit
11156 /* Number of CTL (controlled storage) anchors. */
11157 /* Omit this long, since the has_ctl bit is never set above. */
11159 /* Displacement into stack of each CTL anchor. */
11160 /* Omit this list of longs, because there are no CTL anchors. */
11162 /* Length of function name. */
11163 fprintf (file
, "\t.short %d\n", (int) strlen (fname
));
11165 /* Function name. */
11166 assemble_string (fname
, strlen (fname
));
11168 /* Register for alloca automatic storage; this is always reg 31.
11169 Only emit this if the alloca bit was set above. */
11170 if (frame_pointer_needed
)
11171 fputs ("\t.byte 31\n", file
);
11173 fputs ("\t.align 2\n", file
);
11177 /* A C compound statement that outputs the assembler code for a thunk
11178 function, used to implement C++ virtual function calls with
11179 multiple inheritance. The thunk acts as a wrapper around a virtual
11180 function, adjusting the implicit object parameter before handing
11181 control off to the real function.
11183 First, emit code to add the integer DELTA to the location that
11184 contains the incoming first argument. Assume that this argument
11185 contains a pointer, and is the one used to pass the `this' pointer
11186 in C++. This is the incoming argument *before* the function
11187 prologue, e.g. `%o0' on a sparc. The addition must preserve the
11188 values of all other incoming arguments.
11190 After the addition, emit code to jump to FUNCTION, which is a
11191 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
11192 not touch the return address. Hence returning from FUNCTION will
11193 return to whoever called the current `thunk'.
11195 The effect must be as if FUNCTION had been called directly with the
11196 adjusted first argument. This macro is responsible for emitting
11197 all of the code for a thunk function; output_function_prologue()
11198 and output_function_epilogue() are not invoked.
11200 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
11201 been extracted from it.) It might possibly be useful on some
11202 targets, but probably not.
11204 If you do not define this macro, the target-independent code in the
11205 C++ frontend will generate a less efficient heavyweight thunk that
11206 calls FUNCTION instead of jumping to it. The generic approach does
11207 not support varargs. */
11210 output_mi_thunk (file
, thunk_fndecl
, delta
, function
)
11212 tree thunk_fndecl ATTRIBUTE_UNUSED
;
11216 const char *this_reg
=
11217 reg_names
[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function
))) ? 4 : 3 ];
11218 const char *prefix
;
11220 const char *r0
= reg_names
[0];
11221 const char *toc
= reg_names
[2];
11222 const char *schain
= reg_names
[11];
11223 const char *r12
= reg_names
[12];
11225 static int labelno
= 0;
11227 /* Small constants that can be done by one add instruction. */
11228 if (delta
>= -32768 && delta
<= 32767)
11230 if (! TARGET_NEW_MNEMONICS
)
11231 fprintf (file
, "\tcal %s,%d(%s)\n", this_reg
, delta
, this_reg
);
11233 fprintf (file
, "\taddi %s,%s,%d\n", this_reg
, this_reg
, delta
);
11236 /* 64-bit constants. If "int" is 32 bits, we'll never hit this abort. */
11237 else if (TARGET_64BIT
&& (delta
< -2147483647 - 1 || delta
> 2147483647))
11240 /* Large constants that can be done by one addis instruction. */
11241 else if ((delta
& 0xffff) == 0)
11242 asm_fprintf (file
, "\t{cau|addis} %s,%s,%d\n", this_reg
, this_reg
,
11245 /* 32-bit constants that can be done by an add and addis instruction. */
11248 /* Break into two pieces, propagating the sign bit from the low
11249 word to the upper word. */
11250 int delta_low
= ((delta
& 0xffff) ^ 0x8000) - 0x8000;
11251 int delta_high
= (delta
- delta_low
) >> 16;
11253 asm_fprintf (file
, "\t{cau|addis} %s,%s,%d\n", this_reg
, this_reg
,
11256 if (! TARGET_NEW_MNEMONICS
)
11257 fprintf (file
, "\tcal %s,%d(%s)\n", this_reg
, delta_low
, this_reg
);
11259 fprintf (file
, "\taddi %s,%s,%d\n", this_reg
, this_reg
, delta_low
);
11262 /* Get the prefix in front of the names. */
11263 switch (DEFAULT_ABI
)
11273 case ABI_AIX_NODESC
:
11278 /* If the function is compiled in this module, jump to it directly.
11279 Otherwise, load up its address and jump to it. */
11281 fname
= XSTR (XEXP (DECL_RTL (function
), 0), 0);
11283 if (current_file_function_operand (XEXP (DECL_RTL (function
), 0), VOIDmode
)
11284 && (! lookup_attribute ("longcall",
11285 TYPE_ATTRIBUTES (TREE_TYPE (function
)))
11286 || lookup_attribute ("shortcall",
11287 TYPE_ATTRIBUTES (TREE_TYPE (function
)))))
11290 fprintf (file
, "\tb %s", prefix
);
11291 assemble_name (file
, fname
);
11292 if (DEFAULT_ABI
== ABI_V4
&& flag_pic
) fputs ("@local", file
);
11298 switch (DEFAULT_ABI
)
11304 /* Set up a TOC entry for the function. */
11305 ASM_GENERATE_INTERNAL_LABEL (buf
, "Lthunk", labelno
);
11307 ASM_OUTPUT_INTERNAL_LABEL (file
, "Lthunk", labelno
);
11310 if (TARGET_MINIMAL_TOC
)
11311 fputs (TARGET_32BIT
? "\t.long " : DOUBLE_INT_ASM_OP
, file
);
11314 fputs ("\t.tc ", file
);
11315 assemble_name (file
, fname
);
11316 fputs ("[TC],", file
);
11318 assemble_name (file
, fname
);
11321 function_section (current_function_decl
);
11324 if (TARGET_MINIMAL_TOC
)
11325 asm_fprintf (file
, (TARGET_32BIT
)
11326 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12
,
11327 TARGET_ELF
? ".LCTOC0@toc" : ".LCTOC..1", toc
);
11328 asm_fprintf (file
, (TARGET_32BIT
) ? "\t{l|lwz} %s," : "\tld %s,", r12
);
11329 assemble_name (file
, buf
);
11330 if (TARGET_ELF
&& TARGET_MINIMAL_TOC
)
11331 fputs ("-(.LCTOC1)", file
);
11332 asm_fprintf (file
, "(%s)\n", TARGET_MINIMAL_TOC
? r12
: toc
);
11334 (TARGET_32BIT
) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
11338 (TARGET_32BIT
) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
11341 asm_fprintf (file
, "\tmtctr %s\n", r0
);
11343 (TARGET_32BIT
) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
11346 asm_fprintf (file
, "\tbctr\n");
11349 case ABI_AIX_NODESC
:
11351 fprintf (file
, "\tb %s", prefix
);
11352 assemble_name (file
, fname
);
11353 if (flag_pic
) fputs ("@plt", file
);
11359 fprintf (file
, "\tb %s", prefix
);
11360 if (flag_pic
&& !machopic_name_defined_p (fname
))
11361 assemble_name (file
, machopic_stub_name (fname
));
11363 assemble_name (file
, fname
);
11372 /* A quick summary of the various types of 'constant-pool tables'
11375 Target Flags Name One table per
11376 AIX (none) AIX TOC object file
11377 AIX -mfull-toc AIX TOC object file
11378 AIX -mminimal-toc AIX minimal TOC translation unit
11379 SVR4/EABI (none) SVR4 SDATA object file
11380 SVR4/EABI -fpic SVR4 pic object file
11381 SVR4/EABI -fPIC SVR4 PIC translation unit
11382 SVR4/EABI -mrelocatable EABI TOC function
11383 SVR4/EABI -maix AIX TOC object file
11384 SVR4/EABI -maix -mminimal-toc
11385 AIX minimal TOC translation unit
11387 Name Reg. Set by entries contains:
11388 made by addrs? fp? sum?
11390 AIX TOC 2 crt0 as Y option option
11391 AIX minimal TOC 30 prolog gcc Y Y option
11392 SVR4 SDATA 13 crt0 gcc N Y N
11393 SVR4 pic 30 prolog ld Y not yet N
11394 SVR4 PIC 30 prolog gcc Y option option
11395 EABI TOC 30 prolog gcc Y option option
11399 /* Hash table stuff for keeping track of TOC entries. */
11401 struct toc_hash_struct
11403 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
11404 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
11406 enum machine_mode key_mode
;
11410 static htab_t toc_hash_table
;
11412 /* Hash functions for the hash table. */
11415 rs6000_hash_constant (k
)
11418 enum rtx_code code
= GET_CODE (k
);
11419 enum machine_mode mode
= GET_MODE (k
);
11420 unsigned result
= (code
<< 3) ^ mode
;
11421 const char *format
;
11424 format
= GET_RTX_FORMAT (code
);
11425 flen
= strlen (format
);
11431 return result
* 1231 + (unsigned) INSN_UID (XEXP (k
, 0));
11434 if (mode
!= VOIDmode
)
11435 return real_hash (CONST_DOUBLE_REAL_VALUE (k
)) * result
;
11447 for (; fidx
< flen
; fidx
++)
11448 switch (format
[fidx
])
11453 const char *str
= XSTR (k
, fidx
);
11454 len
= strlen (str
);
11455 result
= result
* 613 + len
;
11456 for (i
= 0; i
< len
; i
++)
11457 result
= result
* 613 + (unsigned) str
[i
];
11462 result
= result
* 1231 + rs6000_hash_constant (XEXP (k
, fidx
));
11466 result
= result
* 613 + (unsigned) XINT (k
, fidx
);
11469 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT
))
11470 result
= result
* 613 + (unsigned) XWINT (k
, fidx
);
11474 for (i
= 0; i
< sizeof(HOST_WIDE_INT
)/sizeof(unsigned); i
++)
11475 result
= result
* 613 + (unsigned) (XWINT (k
, fidx
)
11487 toc_hash_function (hash_entry
)
11488 const void * hash_entry
;
11490 const struct toc_hash_struct
*thc
=
11491 (const struct toc_hash_struct
*) hash_entry
;
11492 return rs6000_hash_constant (thc
->key
) ^ thc
->key_mode
;
11495 /* Compare H1 and H2 for equivalence. */
11498 toc_hash_eq (h1
, h2
)
11502 rtx r1
= ((const struct toc_hash_struct
*) h1
)->key
;
11503 rtx r2
= ((const struct toc_hash_struct
*) h2
)->key
;
11505 if (((const struct toc_hash_struct
*) h1
)->key_mode
11506 != ((const struct toc_hash_struct
*) h2
)->key_mode
)
11509 return rtx_equal_p (r1
, r2
);
11512 /* Mark the hash table-entry HASH_ENTRY. */
11515 toc_hash_mark_entry (hash_slot
, unused
)
11517 void * unused ATTRIBUTE_UNUSED
;
11519 const struct toc_hash_struct
* hash_entry
=
11520 *(const struct toc_hash_struct
**) hash_slot
;
11521 rtx r
= hash_entry
->key
;
11522 ggc_set_mark (hash_entry
);
11523 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
11524 if (GET_CODE (r
) == LABEL_REF
)
11527 ggc_set_mark (XEXP (r
, 0));
11534 /* Mark all the elements of the TOC hash-table *HT. */
11537 toc_hash_mark_table (vht
)
11542 htab_traverse (*ht
, toc_hash_mark_entry
, (void *)0);
11545 /* These are the names given by the C++ front-end to vtables, and
11546 vtable-like objects. Ideally, this logic should not be here;
11547 instead, there should be some programmatic way of inquiring as
11548 to whether or not an object is a vtable. */
11550 #define VTABLE_NAME_P(NAME) \
11551 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
11552 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
11553 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
11554 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
11557 rs6000_output_symbol_ref (file
, x
)
11561 /* Currently C++ toc references to vtables can be emitted before it
11562 is decided whether the vtable is public or private. If this is
11563 the case, then the linker will eventually complain that there is
11564 a reference to an unknown section. Thus, for vtables only,
11565 we emit the TOC reference to reference the symbol and not the
11567 const char *name
= XSTR (x
, 0);
11569 if (VTABLE_NAME_P (name
))
11571 RS6000_OUTPUT_BASENAME (file
, name
);
11574 assemble_name (file
, name
);
11577 /* Output a TOC entry. We derive the entry name from what is being
11581 output_toc (file
, x
, labelno
, mode
)
11585 enum machine_mode mode
;
11588 const char *name
= buf
;
11589 const char *real_name
;
11596 /* When the linker won't eliminate them, don't output duplicate
11597 TOC entries (this happens on AIX if there is any kind of TOC,
11598 and on SVR4 under -fPIC or -mrelocatable). */
11601 struct toc_hash_struct
*h
;
11604 h
= ggc_alloc (sizeof (*h
));
11606 h
->key_mode
= mode
;
11607 h
->labelno
= labelno
;
11609 found
= htab_find_slot (toc_hash_table
, h
, 1);
11610 if (*found
== NULL
)
11612 else /* This is indeed a duplicate.
11613 Set this label equal to that label. */
11615 fputs ("\t.set ", file
);
11616 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
11617 fprintf (file
, "%d,", labelno
);
11618 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
11619 fprintf (file
, "%d\n", ((*(const struct toc_hash_struct
**)
11625 /* If we're going to put a double constant in the TOC, make sure it's
11626 aligned properly when strict alignment is on. */
11627 if (GET_CODE (x
) == CONST_DOUBLE
11628 && STRICT_ALIGNMENT
11629 && GET_MODE_BITSIZE (mode
) >= 64
11630 && ! (TARGET_NO_FP_IN_TOC
&& ! TARGET_MINIMAL_TOC
)) {
11631 ASM_OUTPUT_ALIGN (file
, 3);
11634 ASM_OUTPUT_INTERNAL_LABEL (file
, "LC", labelno
);
11636 /* Handle FP constants specially. Note that if we have a minimal
11637 TOC, things we put here aren't actually in the TOC, so we can allow
11639 if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
11641 REAL_VALUE_TYPE rv
;
11644 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
11645 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
11649 if (TARGET_MINIMAL_TOC
)
11650 fputs (DOUBLE_INT_ASM_OP
, file
);
11652 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
11653 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11654 fprintf (file
, "0x%lx%08lx\n",
11655 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11660 if (TARGET_MINIMAL_TOC
)
11661 fputs ("\t.long ", file
);
11663 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
11664 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11665 fprintf (file
, "0x%lx,0x%lx\n",
11666 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11670 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
11672 REAL_VALUE_TYPE rv
;
11675 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
11676 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
11680 if (TARGET_MINIMAL_TOC
)
11681 fputs (DOUBLE_INT_ASM_OP
, file
);
11683 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
11684 fprintf (file
, "0x%lx00000000\n", l
& 0xffffffff);
11689 if (TARGET_MINIMAL_TOC
)
11690 fputs ("\t.long ", file
);
11692 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
11693 fprintf (file
, "0x%lx\n", l
& 0xffffffff);
11697 else if (GET_MODE (x
) == VOIDmode
11698 && (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
))
11700 unsigned HOST_WIDE_INT low
;
11701 HOST_WIDE_INT high
;
11703 if (GET_CODE (x
) == CONST_DOUBLE
)
11705 low
= CONST_DOUBLE_LOW (x
);
11706 high
= CONST_DOUBLE_HIGH (x
);
11709 #if HOST_BITS_PER_WIDE_INT == 32
11712 high
= (low
& 0x80000000) ? ~0 : 0;
11716 low
= INTVAL (x
) & 0xffffffff;
11717 high
= (HOST_WIDE_INT
) INTVAL (x
) >> 32;
11721 /* TOC entries are always Pmode-sized, but since this
11722 is a bigendian machine then if we're putting smaller
11723 integer constants in the TOC we have to pad them.
11724 (This is still a win over putting the constants in
11725 a separate constant pool, because then we'd have
11726 to have both a TOC entry _and_ the actual constant.)
11728 For a 32-bit target, CONST_INT values are loaded and shifted
11729 entirely within `low' and can be stored in one TOC entry. */
11731 if (TARGET_64BIT
&& POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
11732 abort ();/* It would be easy to make this work, but it doesn't now. */
11734 if (POINTER_SIZE
> GET_MODE_BITSIZE (mode
))
11736 #if HOST_BITS_PER_WIDE_INT == 32
11737 lshift_double (low
, high
, POINTER_SIZE
- GET_MODE_BITSIZE (mode
),
11738 POINTER_SIZE
, &low
, &high
, 0);
11741 low
<<= POINTER_SIZE
- GET_MODE_BITSIZE (mode
);
11742 high
= (HOST_WIDE_INT
) low
>> 32;
11749 if (TARGET_MINIMAL_TOC
)
11750 fputs (DOUBLE_INT_ASM_OP
, file
);
11752 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
11753 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
11754 fprintf (file
, "0x%lx%08lx\n",
11755 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
11760 if (POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
11762 if (TARGET_MINIMAL_TOC
)
11763 fputs ("\t.long ", file
);
11765 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
11766 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
11767 fprintf (file
, "0x%lx,0x%lx\n",
11768 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
11772 if (TARGET_MINIMAL_TOC
)
11773 fputs ("\t.long ", file
);
11775 fprintf (file
, "\t.tc IS_%lx[TC],", (long) low
& 0xffffffff);
11776 fprintf (file
, "0x%lx\n", (long) low
& 0xffffffff);
11782 if (GET_CODE (x
) == CONST
)
11784 if (GET_CODE (XEXP (x
, 0)) != PLUS
)
11787 base
= XEXP (XEXP (x
, 0), 0);
11788 offset
= INTVAL (XEXP (XEXP (x
, 0), 1));
11791 if (GET_CODE (base
) == SYMBOL_REF
)
11792 name
= XSTR (base
, 0);
11793 else if (GET_CODE (base
) == LABEL_REF
)
11794 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (XEXP (base
, 0)));
11795 else if (GET_CODE (base
) == CODE_LABEL
)
11796 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (base
));
11800 real_name
= (*targetm
.strip_name_encoding
) (name
);
11801 if (TARGET_MINIMAL_TOC
)
11802 fputs (TARGET_32BIT
? "\t.long " : DOUBLE_INT_ASM_OP
, file
);
11805 fprintf (file
, "\t.tc %s", real_name
);
11808 fprintf (file
, ".N%d", - offset
);
11810 fprintf (file
, ".P%d", offset
);
11812 fputs ("[TC],", file
);
11815 /* Currently C++ toc references to vtables can be emitted before it
11816 is decided whether the vtable is public or private. If this is
11817 the case, then the linker will eventually complain that there is
11818 a TOC reference to an unknown section. Thus, for vtables only,
11819 we emit the TOC reference to reference the symbol and not the
11821 if (VTABLE_NAME_P (name
))
11823 RS6000_OUTPUT_BASENAME (file
, name
);
11825 fprintf (file
, "%d", offset
);
11826 else if (offset
> 0)
11827 fprintf (file
, "+%d", offset
);
11830 output_addr_const (file
, x
);
11834 /* Output an assembler pseudo-op to write an ASCII string of N characters
11835 starting at P to FILE.
11837 On the RS/6000, we have to do this using the .byte operation and
11838 write out special characters outside the quoted string.
11839 Also, the assembler is broken; very long strings are truncated,
11840 so we must artificially break them up early. */
11843 output_ascii (file
, p
, n
)
11849 int i
, count_string
;
11850 const char *for_string
= "\t.byte \"";
11851 const char *for_decimal
= "\t.byte ";
11852 const char *to_close
= NULL
;
11855 for (i
= 0; i
< n
; i
++)
11858 if (c
>= ' ' && c
< 0177)
11861 fputs (for_string
, file
);
11864 /* Write two quotes to get one. */
11872 for_decimal
= "\"\n\t.byte ";
11876 if (count_string
>= 512)
11878 fputs (to_close
, file
);
11880 for_string
= "\t.byte \"";
11881 for_decimal
= "\t.byte ";
11889 fputs (for_decimal
, file
);
11890 fprintf (file
, "%d", c
);
11892 for_string
= "\n\t.byte \"";
11893 for_decimal
= ", ";
11899 /* Now close the string if we have written one. Then end the line. */
11901 fputs (to_close
, file
);
11904 /* Generate a unique section name for FILENAME for a section type
11905 represented by SECTION_DESC. Output goes into BUF.
11907 SECTION_DESC can be any string, as long as it is different for each
11908 possible section type.
11910 We name the section in the same manner as xlc. The name begins with an
11911 underscore followed by the filename (after stripping any leading directory
11912 names) with the last period replaced by the string SECTION_DESC. If
11913 FILENAME does not contain a period, SECTION_DESC is appended to the end of
11917 rs6000_gen_section_name (buf
, filename
, section_desc
)
11919 const char *filename
;
11920 const char *section_desc
;
11922 const char *q
, *after_last_slash
, *last_period
= 0;
11926 after_last_slash
= filename
;
11927 for (q
= filename
; *q
; q
++)
11930 after_last_slash
= q
+ 1;
11931 else if (*q
== '.')
11935 len
= strlen (after_last_slash
) + strlen (section_desc
) + 2;
11936 *buf
= (char *) xmalloc (len
);
11941 for (q
= after_last_slash
; *q
; q
++)
11943 if (q
== last_period
)
11945 strcpy (p
, section_desc
);
11946 p
+= strlen (section_desc
);
11949 else if (ISALNUM (*q
))
11953 if (last_period
== 0)
11954 strcpy (p
, section_desc
);
11959 /* Emit profile function. */
11962 output_profile_hook (labelno
)
11963 int labelno ATTRIBUTE_UNUSED
;
11965 if (DEFAULT_ABI
== ABI_AIX
)
11967 #ifdef NO_PROFILE_COUNTERS
11968 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 0);
11971 const char *label_name
;
11974 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
11975 label_name
= (*targetm
.strip_name_encoding
) (ggc_strdup (buf
));
11976 fun
= gen_rtx_SYMBOL_REF (Pmode
, label_name
);
11978 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 1,
11982 else if (DEFAULT_ABI
== ABI_DARWIN
)
11984 const char *mcount_name
= RS6000_MCOUNT
;
11985 int caller_addr_regno
= LINK_REGISTER_REGNUM
;
11987 /* Be conservative and always set this, at least for now. */
11988 current_function_uses_pic_offset_table
= 1;
11991 /* For PIC code, set up a stub and collect the caller's address
11992 from r0, which is where the prologue puts it. */
11995 mcount_name
= machopic_stub_name (mcount_name
);
11996 if (current_function_uses_pic_offset_table
)
11997 caller_addr_regno
= 0;
12000 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, mcount_name
),
12002 gen_rtx_REG (Pmode
, caller_addr_regno
), Pmode
);
12006 /* Write function profiler code. */
12009 output_function_profiler (file
, labelno
)
12016 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
12017 switch (DEFAULT_ABI
)
12024 /* Fall through. */
12026 case ABI_AIX_NODESC
:
12029 warning ("no profiling of 64-bit code for this ABI");
12032 fprintf (file
, "\tmflr %s\n", reg_names
[0]);
12035 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file
);
12036 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12037 reg_names
[0], save_lr
, reg_names
[1]);
12038 asm_fprintf (file
, "\tmflr %s\n", reg_names
[12]);
12039 asm_fprintf (file
, "\t{l|lwz} %s,", reg_names
[0]);
12040 assemble_name (file
, buf
);
12041 asm_fprintf (file
, "@got(%s)\n", reg_names
[12]);
12043 else if (flag_pic
> 1)
12045 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12046 reg_names
[0], save_lr
, reg_names
[1]);
12047 /* Now, we need to get the address of the label. */
12048 fputs ("\tbl 1f\n\t.long ", file
);
12049 assemble_name (file
, buf
);
12050 fputs ("-.\n1:", file
);
12051 asm_fprintf (file
, "\tmflr %s\n", reg_names
[11]);
12052 asm_fprintf (file
, "\t{l|lwz} %s,0(%s)\n",
12053 reg_names
[0], reg_names
[11]);
12054 asm_fprintf (file
, "\t{cax|add} %s,%s,%s\n",
12055 reg_names
[0], reg_names
[0], reg_names
[11]);
12059 asm_fprintf (file
, "\t{liu|lis} %s,", reg_names
[12]);
12060 assemble_name (file
, buf
);
12061 fputs ("@ha\n", file
);
12062 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12063 reg_names
[0], save_lr
, reg_names
[1]);
12064 asm_fprintf (file
, "\t{cal|la} %s,", reg_names
[0]);
12065 assemble_name (file
, buf
);
12066 asm_fprintf (file
, "@l(%s)\n", reg_names
[12]);
12069 if (current_function_needs_context
&& DEFAULT_ABI
== ABI_AIX_NODESC
)
12071 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12072 reg_names
[STATIC_CHAIN_REGNUM
],
12074 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
12075 asm_fprintf (file
, "\t{l|lwz} %s,%d(%s)\n",
12076 reg_names
[STATIC_CHAIN_REGNUM
],
12080 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12081 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
12086 /* Don't do anything, done in output_profile_hook (). */
12091 /* Adjust the cost of a scheduling dependency. Return the new cost of
12092 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
12095 rs6000_adjust_cost (insn
, link
, dep_insn
, cost
)
12098 rtx dep_insn ATTRIBUTE_UNUSED
;
12101 if (! recog_memoized (insn
))
12104 if (REG_NOTE_KIND (link
) != 0)
12107 if (REG_NOTE_KIND (link
) == 0)
12109 /* Data dependency; DEP_INSN writes a register that INSN reads
12110 some cycles later. */
12111 switch (get_attr_type (insn
))
12114 /* Tell the first scheduling pass about the latency between
12115 a mtctr and bctr (and mtlr and br/blr). The first
12116 scheduling pass will not know about this latency since
12117 the mtctr instruction, which has the latency associated
12118 to it, will be generated by reload. */
12119 return TARGET_POWER
? 5 : 4;
12121 /* Leave some extra cycles between a compare and its
12122 dependent branch, to inhibit expensive mispredicts. */
12123 if ((rs6000_cpu_attr
== CPU_PPC603
12124 || rs6000_cpu_attr
== CPU_PPC604
12125 || rs6000_cpu_attr
== CPU_PPC604E
12126 || rs6000_cpu_attr
== CPU_PPC620
12127 || rs6000_cpu_attr
== CPU_PPC630
12128 || rs6000_cpu_attr
== CPU_PPC750
12129 || rs6000_cpu_attr
== CPU_PPC7400
12130 || rs6000_cpu_attr
== CPU_PPC7450
12131 || rs6000_cpu_attr
== CPU_POWER4
)
12132 && recog_memoized (dep_insn
)
12133 && (INSN_CODE (dep_insn
) >= 0)
12134 && (get_attr_type (dep_insn
) == TYPE_COMPARE
12135 || get_attr_type (dep_insn
) == TYPE_DELAYED_COMPARE
12136 || get_attr_type (dep_insn
) == TYPE_FPCOMPARE
12137 || get_attr_type (dep_insn
) == TYPE_CR_LOGICAL
))
12142 /* Fall out to return default cost. */
12148 /* A C statement (sans semicolon) to update the integer scheduling
12149 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12150 INSN earlier, increase the priority to execute INSN later. Do not
12151 define this macro if you do not need to adjust the scheduling
12152 priorities of insns. */
12155 rs6000_adjust_priority (insn
, priority
)
12156 rtx insn ATTRIBUTE_UNUSED
;
12159 /* On machines (like the 750) which have asymmetric integer units,
12160 where one integer unit can do multiply and divides and the other
12161 can't, reduce the priority of multiply/divide so it is scheduled
12162 before other integer operations. */
12165 if (! INSN_P (insn
))
12168 if (GET_CODE (PATTERN (insn
)) == USE
)
12171 switch (rs6000_cpu_attr
) {
12173 switch (get_attr_type (insn
))
12180 fprintf (stderr
, "priority was %#x (%d) before adjustment\n",
12181 priority
, priority
);
12182 if (priority
>= 0 && priority
< 0x01000000)
12192 /* Return how many instructions the machine can issue per cycle. */
12195 rs6000_issue_rate ()
12197 switch (rs6000_cpu_attr
) {
12198 case CPU_RIOS1
: /* ? */
12200 case CPU_PPC601
: /* ? */
12220 /* Length in units of the trampoline for entering a nested function. */
12223 rs6000_trampoline_size ()
12227 switch (DEFAULT_ABI
)
12233 ret
= (TARGET_32BIT
) ? 12 : 24;
12238 case ABI_AIX_NODESC
:
12239 ret
= (TARGET_32BIT
) ? 40 : 48;
12246 /* Emit RTL insns to initialize the variable parts of a trampoline.
12247 FNADDR is an RTX for the address of the function's pure code.
12248 CXT is an RTX for the static chain value for the function. */
12251 rs6000_initialize_trampoline (addr
, fnaddr
, cxt
)
12256 enum machine_mode pmode
= Pmode
;
12257 int regsize
= (TARGET_32BIT
) ? 4 : 8;
12258 rtx ctx_reg
= force_reg (pmode
, cxt
);
12260 switch (DEFAULT_ABI
)
12265 /* Macros to shorten the code expansions below. */
12266 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12267 #define MEM_PLUS(addr,offset) \
12268 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12270 /* Under AIX, just build the 3 word function descriptor */
12273 rtx fn_reg
= gen_reg_rtx (pmode
);
12274 rtx toc_reg
= gen_reg_rtx (pmode
);
12275 emit_move_insn (fn_reg
, MEM_DEREF (fnaddr
));
12276 emit_move_insn (toc_reg
, MEM_PLUS (fnaddr
, regsize
));
12277 emit_move_insn (MEM_DEREF (addr
), fn_reg
);
12278 emit_move_insn (MEM_PLUS (addr
, regsize
), toc_reg
);
12279 emit_move_insn (MEM_PLUS (addr
, 2*regsize
), ctx_reg
);
12283 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
12286 case ABI_AIX_NODESC
:
12287 emit_library_call (gen_rtx_SYMBOL_REF (SImode
, "__trampoline_setup"),
12288 FALSE
, VOIDmode
, 4,
12290 GEN_INT (rs6000_trampoline_size ()), SImode
,
12300 /* Table of valid machine attributes. */
12302 const struct attribute_spec rs6000_attribute_table
[] =
12304 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12305 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
12306 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
12307 { NULL
, 0, 0, false, false, false, NULL
}
12310 /* Handle a "longcall" or "shortcall" attribute; arguments as in
12311 struct attribute_spec.handler. */
12314 rs6000_handle_longcall_attribute (node
, name
, args
, flags
, no_add_attrs
)
12317 tree args ATTRIBUTE_UNUSED
;
12318 int flags ATTRIBUTE_UNUSED
;
12319 bool *no_add_attrs
;
12321 if (TREE_CODE (*node
) != FUNCTION_TYPE
12322 && TREE_CODE (*node
) != FIELD_DECL
12323 && TREE_CODE (*node
) != TYPE_DECL
)
12325 warning ("`%s' attribute only applies to functions",
12326 IDENTIFIER_POINTER (name
));
12327 *no_add_attrs
= true;
12333 /* Set longcall attributes on all functions declared when
12334 rs6000_default_long_calls is true. */
12336 rs6000_set_default_type_attributes (type
)
12339 if (rs6000_default_long_calls
12340 && (TREE_CODE (type
) == FUNCTION_TYPE
12341 || TREE_CODE (type
) == METHOD_TYPE
))
12342 TYPE_ATTRIBUTES (type
) = tree_cons (get_identifier ("longcall"),
12344 TYPE_ATTRIBUTES (type
));
12347 /* Return a reference suitable for calling a function with the
12348 longcall attribute. */
12351 rs6000_longcall_ref (call_ref
)
12354 const char *call_name
;
12357 if (GET_CODE (call_ref
) != SYMBOL_REF
)
12360 /* System V adds '.' to the internal name, so skip them. */
12361 call_name
= XSTR (call_ref
, 0);
12362 if (*call_name
== '.')
12364 while (*call_name
== '.')
12367 node
= get_identifier (call_name
);
12368 call_ref
= gen_rtx_SYMBOL_REF (VOIDmode
, IDENTIFIER_POINTER (node
));
12371 return force_reg (Pmode
, call_ref
);
12375 #ifdef USING_ELFOS_H
12377 /* A C statement or statements to switch to the appropriate section
12378 for output of RTX in mode MODE. You can assume that RTX is some
12379 kind of constant in RTL. The argument MODE is redundant except in
12380 the case of a `const_int' rtx. Select the section by calling
12381 `text_section' or one of the alternatives for other sections.
12383 Do not define this macro if you put all constants in the read-only
12387 rs6000_elf_select_rtx_section (mode
, x
, align
)
12388 enum machine_mode mode
;
12390 unsigned HOST_WIDE_INT align
;
12392 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
12395 default_elf_select_rtx_section (mode
, x
, align
);
12398 /* A C statement or statements to switch to the appropriate
12399 section for output of DECL. DECL is either a `VAR_DECL' node
12400 or a constant of some sort. RELOC indicates whether forming
12401 the initial value of DECL requires link-time relocations. */
12404 rs6000_elf_select_section (decl
, reloc
, align
)
12407 unsigned HOST_WIDE_INT align
;
12409 default_elf_select_section_1 (decl
, reloc
, align
,
12410 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
12413 /* A C statement to build up a unique section name, expressed as a
12414 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
12415 RELOC indicates whether the initial value of EXP requires
12416 link-time relocations. If you do not define this macro, GCC will use
12417 the symbol name prefixed by `.' as the section name. Note - this
12418 macro can now be called for uninitialized data items as well as
12419 initialized data and functions. */
12422 rs6000_elf_unique_section (decl
, reloc
)
12426 default_unique_section_1 (decl
, reloc
,
12427 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
12431 /* If we are referencing a function that is static or is known to be
12432 in this file, make the SYMBOL_REF special. We can use this to indicate
12433 that we can branch to this function without emitting a no-op after the
12434 call. For real AIX calling sequences, we also replace the
12435 function name with the real name (1 or 2 leading .'s), rather than
12436 the function descriptor name. This saves a lot of overriding code
12437 to read the prefixes. */
12440 rs6000_elf_encode_section_info (decl
, first
)
12447 if (TREE_CODE (decl
) == FUNCTION_DECL
)
12449 rtx sym_ref
= XEXP (DECL_RTL (decl
), 0);
12450 if ((*targetm
.binds_local_p
) (decl
))
12451 SYMBOL_REF_FLAG (sym_ref
) = 1;
12453 if (DEFAULT_ABI
== ABI_AIX
)
12455 size_t len1
= (DEFAULT_ABI
== ABI_AIX
) ? 1 : 2;
12456 size_t len2
= strlen (XSTR (sym_ref
, 0));
12457 char *str
= alloca (len1
+ len2
+ 1);
12460 memcpy (str
+ len1
, XSTR (sym_ref
, 0), len2
+ 1);
12462 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len1
+ len2
);
12465 else if (rs6000_sdata
!= SDATA_NONE
12466 && DEFAULT_ABI
== ABI_V4
12467 && TREE_CODE (decl
) == VAR_DECL
)
12469 int size
= int_size_in_bytes (TREE_TYPE (decl
));
12470 tree section_name
= DECL_SECTION_NAME (decl
);
12471 const char *name
= (char *)0;
12476 if (TREE_CODE (section_name
) == STRING_CST
)
12478 name
= TREE_STRING_POINTER (section_name
);
12479 len
= TREE_STRING_LENGTH (section_name
);
12485 if ((size
> 0 && size
<= g_switch_value
)
12487 && ((len
== sizeof (".sdata") - 1
12488 && strcmp (name
, ".sdata") == 0)
12489 || (len
== sizeof (".sdata2") - 1
12490 && strcmp (name
, ".sdata2") == 0)
12491 || (len
== sizeof (".sbss") - 1
12492 && strcmp (name
, ".sbss") == 0)
12493 || (len
== sizeof (".sbss2") - 1
12494 && strcmp (name
, ".sbss2") == 0)
12495 || (len
== sizeof (".PPC.EMB.sdata0") - 1
12496 && strcmp (name
, ".PPC.EMB.sdata0") == 0)
12497 || (len
== sizeof (".PPC.EMB.sbss0") - 1
12498 && strcmp (name
, ".PPC.EMB.sbss0") == 0))))
12500 rtx sym_ref
= XEXP (DECL_RTL (decl
), 0);
12501 size_t len
= strlen (XSTR (sym_ref
, 0));
12502 char *str
= alloca (len
+ 2);
12505 memcpy (str
+ 1, XSTR (sym_ref
, 0), len
+ 1);
12506 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len
+ 1);
12511 static const char *
12512 rs6000_elf_strip_name_encoding (str
)
12515 while (*str
== '*' || *str
== '@')
12521 rs6000_elf_in_small_data_p (decl
)
12524 if (rs6000_sdata
== SDATA_NONE
)
12527 if (TREE_CODE (decl
) == VAR_DECL
&& DECL_SECTION_NAME (decl
))
12529 const char *section
= TREE_STRING_POINTER (DECL_SECTION_NAME (decl
));
12530 if (strcmp (section
, ".sdata") == 0
12531 || strcmp (section
, ".sdata2") == 0
12532 || strcmp (section
, ".sbss") == 0)
12537 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
12540 && size
<= g_switch_value
12541 && (rs6000_sdata
!= SDATA_DATA
|| TREE_PUBLIC (decl
)))
12548 #endif /* USING_ELFOS_H */
12551 /* Return a REG that occurs in ADDR with coefficient 1.
12552 ADDR can be effectively incremented by incrementing REG.
12554 r0 is special and we must not select it as an address
12555 register by this routine since our caller will try to
12556 increment the returned register via an "la" instruction. */
12559 find_addr_reg (addr
)
12562 while (GET_CODE (addr
) == PLUS
)
12564 if (GET_CODE (XEXP (addr
, 0)) == REG
12565 && REGNO (XEXP (addr
, 0)) != 0)
12566 addr
= XEXP (addr
, 0);
12567 else if (GET_CODE (XEXP (addr
, 1)) == REG
12568 && REGNO (XEXP (addr
, 1)) != 0)
12569 addr
= XEXP (addr
, 1);
12570 else if (CONSTANT_P (XEXP (addr
, 0)))
12571 addr
= XEXP (addr
, 1);
12572 else if (CONSTANT_P (XEXP (addr
, 1)))
12573 addr
= XEXP (addr
, 0);
12577 if (GET_CODE (addr
) == REG
&& REGNO (addr
) != 0)
12583 rs6000_fatal_bad_address (op
)
12586 fatal_insn ("bad address", op
);
12589 /* Called to register all of our global variables with the garbage
12593 rs6000_add_gc_roots ()
12595 toc_hash_table
= htab_create (1021, toc_hash_function
, toc_hash_eq
, NULL
);
12596 ggc_add_root (&toc_hash_table
, 1, sizeof (toc_hash_table
),
12597 toc_hash_mark_table
);
12603 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
12604 reference and a constant. */
12607 symbolic_operand (op
)
12610 switch (GET_CODE (op
))
12617 return (GET_CODE (op
) == SYMBOL_REF
||
12618 (GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
12619 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
12620 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
12627 #ifdef RS6000_LONG_BRANCH
12629 static tree stub_list
= 0;
12631 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
12632 procedure calls to the linked list. */
12635 add_compiler_stub (label_name
, function_name
, line_number
)
12637 tree function_name
;
12640 tree stub
= build_tree_list (function_name
, label_name
);
12641 TREE_TYPE (stub
) = build_int_2 (line_number
, 0);
12642 TREE_CHAIN (stub
) = stub_list
;
12646 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
12647 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
12648 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
12650 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
12651 handling procedure calls from the linked list and initializes the
12655 output_compiler_stub ()
12658 char label_buf
[256];
12662 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
12664 fprintf (asm_out_file
,
12665 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub
)));
12667 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12668 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
12669 fprintf (asm_out_file
, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub
));
12670 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12672 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
))[0] == '*')
12674 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
))+1);
12677 label_buf
[0] = '_';
12678 strcpy (label_buf
+1,
12679 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
)));
12682 strcpy (tmp_buf
, "lis r12,hi16(");
12683 strcat (tmp_buf
, label_buf
);
12684 strcat (tmp_buf
, ")\n\tori r12,r12,lo16(");
12685 strcat (tmp_buf
, label_buf
);
12686 strcat (tmp_buf
, ")\n\tmtctr r12\n\tbctr");
12687 output_asm_insn (tmp_buf
, 0);
12689 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12690 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
12691 fprintf(asm_out_file
, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub
));
12692 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12698 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
12699 already there or not. */
12702 no_previous_def (function_name
)
12703 tree function_name
;
12706 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
12707 if (function_name
== STUB_FUNCTION_NAME (stub
))
12712 /* GET_PREV_LABEL gets the label name from the previous definition of
12716 get_prev_label (function_name
)
12717 tree function_name
;
12720 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
12721 if (function_name
== STUB_FUNCTION_NAME (stub
))
12722 return STUB_LABEL_NAME (stub
);
12726 /* INSN is either a function call or a millicode call. It may have an
12727 unconditional jump in its delay slot.
12729 CALL_DEST is the routine we are calling. */
12732 output_call (insn
, call_dest
, operand_number
)
12735 int operand_number
;
12737 static char buf
[256];
12738 if (GET_CODE (call_dest
) == SYMBOL_REF
&& TARGET_LONG_BRANCH
&& !flag_pic
)
12741 tree funname
= get_identifier (XSTR (call_dest
, 0));
12743 if (no_previous_def (funname
))
12745 int line_number
= 0;
12746 rtx label_rtx
= gen_label_rtx ();
12747 char *label_buf
, temp_buf
[256];
12748 ASM_GENERATE_INTERNAL_LABEL (temp_buf
, "L",
12749 CODE_LABEL_NUMBER (label_rtx
));
12750 label_buf
= temp_buf
[0] == '*' ? temp_buf
+ 1 : temp_buf
;
12751 labelname
= get_identifier (label_buf
);
12752 for (; insn
&& GET_CODE (insn
) != NOTE
; insn
= PREV_INSN (insn
));
12754 line_number
= NOTE_LINE_NUMBER (insn
);
12755 add_compiler_stub (labelname
, funname
, line_number
);
12758 labelname
= get_prev_label (funname
);
12760 sprintf (buf
, "jbsr %%z%d,%.246s",
12761 operand_number
, IDENTIFIER_POINTER (labelname
));
12766 sprintf (buf
, "bl %%z%d", operand_number
);
12771 #endif /* RS6000_LONG_BRANCH */
12773 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
12775 const char *const symbol_ = (SYMBOL); \
12776 char *buffer_ = (BUF); \
12777 if (symbol_[0] == '"') \
12779 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
12781 else if (name_needs_quotes(symbol_)) \
12783 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
12787 sprintf(buffer_, "L%d$%s", (N), symbol_); \
12792 /* Generate PIC and indirect symbol stubs. */
12795 machopic_output_stub (file
, symb
, stub
)
12797 const char *symb
, *stub
;
12799 unsigned int length
;
12800 char *symbol_name
, *lazy_ptr_name
;
12801 char *local_label_0
;
12802 static int label
= 0;
12804 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
12805 symb
= (*targetm
.strip_name_encoding
) (symb
);
12809 length
= strlen (symb
);
12810 symbol_name
= alloca (length
+ 32);
12811 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name
, symb
, length
);
12813 lazy_ptr_name
= alloca (length
+ 32);
12814 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name
, symb
, length
);
12816 local_label_0
= alloca (length
+ 32);
12817 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0
, symb
, length
, 0);
12820 machopic_picsymbol_stub_section ();
12822 machopic_symbol_stub_section ();
12824 fprintf (file
, "%s:\n", stub
);
12825 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
12829 fprintf (file
, "\tmflr r0\n");
12830 fprintf (file
, "\tbcl 20,31,%s\n", local_label_0
);
12831 fprintf (file
, "%s:\n\tmflr r11\n", local_label_0
);
12832 fprintf (file
, "\taddis r11,r11,ha16(%s-%s)\n",
12833 lazy_ptr_name
, local_label_0
);
12834 fprintf (file
, "\tmtlr r0\n");
12835 fprintf (file
, "\tlwz r12,lo16(%s-%s)(r11)\n",
12836 lazy_ptr_name
, local_label_0
);
12837 fprintf (file
, "\tmtctr r12\n");
12838 fprintf (file
, "\taddi r11,r11,lo16(%s-%s)\n",
12839 lazy_ptr_name
, local_label_0
);
12840 fprintf (file
, "\tbctr\n");
12843 fprintf (file
, "non-pure not supported\n");
12845 machopic_lazy_symbol_ptr_section ();
12846 fprintf (file
, "%s:\n", lazy_ptr_name
);
12847 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
12848 fprintf (file
, "\t.long dyld_stub_binding_helper\n");
12851 /* Legitimize PIC addresses. If the address is already
12852 position-independent, we return ORIG. Newly generated
12853 position-independent addresses go into a reg. This is REG if non
12854 zero, otherwise we allocate register(s) as necessary. */
12856 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
12859 rs6000_machopic_legitimize_pic_address (orig
, mode
, reg
)
12861 enum machine_mode mode
;
12866 if (reg
== NULL
&& ! reload_in_progress
&& ! reload_completed
)
12867 reg
= gen_reg_rtx (Pmode
);
12869 if (GET_CODE (orig
) == CONST
)
12871 if (GET_CODE (XEXP (orig
, 0)) == PLUS
12872 && XEXP (XEXP (orig
, 0), 0) == pic_offset_table_rtx
)
12875 if (GET_CODE (XEXP (orig
, 0)) == PLUS
)
12878 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0),
12881 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1),
12887 if (GET_CODE (offset
) == CONST_INT
)
12889 if (SMALL_INT (offset
))
12890 return plus_constant (base
, INTVAL (offset
));
12891 else if (! reload_in_progress
&& ! reload_completed
)
12892 offset
= force_reg (Pmode
, offset
);
12895 rtx mem
= force_const_mem (Pmode
, orig
);
12896 return machopic_legitimize_pic_address (mem
, Pmode
, reg
);
12899 return gen_rtx (PLUS
, Pmode
, base
, offset
);
12902 /* Fall back on generic machopic code. */
12903 return machopic_legitimize_pic_address (orig
, mode
, reg
);
12906 /* This is just a placeholder to make linking work without having to
12907 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
12908 ever needed for Darwin (not too likely!) this would have to get a
12909 real definition. */
12916 #endif /* TARGET_MACHO */
12919 static unsigned int
12920 rs6000_elf_section_type_flags (decl
, name
, reloc
)
12926 = default_section_type_flags_1 (decl
, name
, reloc
,
12927 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
12929 if (TARGET_RELOCATABLE
)
12930 flags
|= SECTION_WRITE
;
12935 /* Record an element in the table of global constructors. SYMBOL is
12936 a SYMBOL_REF of the function to be called; PRIORITY is a number
12937 between 0 and MAX_INIT_PRIORITY.
12939 This differs from default_named_section_asm_out_constructor in
12940 that we have special handling for -mrelocatable. */
12943 rs6000_elf_asm_out_constructor (symbol
, priority
)
12947 const char *section
= ".ctors";
12950 if (priority
!= DEFAULT_INIT_PRIORITY
)
12952 sprintf (buf
, ".ctors.%.5u",
12953 /* Invert the numbering so the linker puts us in the proper
12954 order; constructors are run from right to left, and the
12955 linker sorts in increasing order. */
12956 MAX_INIT_PRIORITY
- priority
);
12960 named_section_flags (section
, SECTION_WRITE
);
12961 assemble_align (POINTER_SIZE
);
12963 if (TARGET_RELOCATABLE
)
12965 fputs ("\t.long (", asm_out_file
);
12966 output_addr_const (asm_out_file
, symbol
);
12967 fputs (")@fixup\n", asm_out_file
);
12970 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
12974 rs6000_elf_asm_out_destructor (symbol
, priority
)
12978 const char *section
= ".dtors";
12981 if (priority
!= DEFAULT_INIT_PRIORITY
)
12983 sprintf (buf
, ".dtors.%.5u",
12984 /* Invert the numbering so the linker puts us in the proper
12985 order; constructors are run from right to left, and the
12986 linker sorts in increasing order. */
12987 MAX_INIT_PRIORITY
- priority
);
12991 named_section_flags (section
, SECTION_WRITE
);
12992 assemble_align (POINTER_SIZE
);
12994 if (TARGET_RELOCATABLE
)
12996 fputs ("\t.long (", asm_out_file
);
12997 output_addr_const (asm_out_file
, symbol
);
12998 fputs (")@fixup\n", asm_out_file
);
13001 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
13007 rs6000_xcoff_asm_globalize_label (stream
, name
)
13011 fputs (GLOBAL_ASM_OP
, stream
);
13012 RS6000_OUTPUT_BASENAME (stream
, name
);
13013 putc ('\n', stream
);
13017 rs6000_xcoff_asm_named_section (name
, flags
)
13019 unsigned int flags
;
13022 static const char * const suffix
[3] = { "PR", "RO", "RW" };
13024 if (flags
& SECTION_CODE
)
13026 else if (flags
& SECTION_WRITE
)
13031 fprintf (asm_out_file
, "\t.csect %s%s[%s],%u\n",
13032 (flags
& SECTION_CODE
) ? "." : "",
13033 name
, suffix
[smclass
], flags
& SECTION_ENTSIZE
);
13037 rs6000_xcoff_select_section (decl
, reloc
, align
)
13040 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
13042 if (decl_readonly_section_1 (decl
, reloc
, 1))
13044 if (TREE_PUBLIC (decl
))
13045 read_only_data_section ();
13047 read_only_private_data_section ();
13051 if (TREE_PUBLIC (decl
))
13054 private_data_section ();
13059 rs6000_xcoff_unique_section (decl
, reloc
)
13061 int reloc ATTRIBUTE_UNUSED
;
13065 /* Use select_section for private and uninitialized data. */
13066 if (!TREE_PUBLIC (decl
)
13067 || DECL_COMMON (decl
)
13068 || DECL_INITIAL (decl
) == NULL_TREE
13069 || DECL_INITIAL (decl
) == error_mark_node
13070 || (flag_zero_initialized_in_bss
13071 && initializer_zerop (DECL_INITIAL (decl
))))
13074 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
13075 name
= (*targetm
.strip_name_encoding
) (name
);
13076 DECL_SECTION_NAME (decl
) = build_string (strlen (name
), name
);
13079 /* Select section for constant in constant pool.
13081 On RS/6000, all constants are in the private read-only data area.
13082 However, if this is being placed in the TOC it must be output as a
13086 rs6000_xcoff_select_rtx_section (mode
, x
, align
)
13087 enum machine_mode mode
;
13089 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
13091 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
13094 read_only_private_data_section ();
13097 /* Remove any trailing [DS] or the like from the symbol name. */
13099 static const char *
13100 rs6000_xcoff_strip_name_encoding (name
)
13106 len
= strlen (name
);
13107 if (name
[len
- 1] == ']')
13108 return ggc_alloc_string (name
, len
- 4);
13113 /* Section attributes. AIX is always PIC. */
13115 static unsigned int
13116 rs6000_xcoff_section_type_flags (decl
, name
, reloc
)
13121 unsigned int align
;
13122 unsigned int flags
= default_section_type_flags_1 (decl
, name
, reloc
, 1);
13124 /* Align to at least UNIT size. */
13125 if (flags
& SECTION_CODE
)
13126 align
= MIN_UNITS_PER_WORD
;
13128 /* Increase alignment of large objects if not already stricter. */
13129 align
= MAX ((DECL_ALIGN (decl
) / BITS_PER_UNIT
),
13130 int_size_in_bytes (TREE_TYPE (decl
)) > MIN_UNITS_PER_WORD
13131 ? UNITS_PER_FP_WORD
: MIN_UNITS_PER_WORD
);
13133 return flags
| (exact_log2 (align
) & SECTION_ENTSIZE
);
13136 #endif /* TARGET_XCOFF */
13138 /* Note that this is also used for PPC64 Linux. */
13141 rs6000_xcoff_encode_section_info (decl
, first
)
13143 int first ATTRIBUTE_UNUSED
;
13145 if (TREE_CODE (decl
) == FUNCTION_DECL
13146 && (*targetm
.binds_local_p
) (decl
))
13147 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl
), 0)) = 1;
13150 /* Cross-module name binding. For AIX and PPC64 Linux, which always are
13151 PIC, use private copy of flag_pic. */
13154 rs6000_binds_local_p (decl
)
13157 return default_binds_local_p_1 (decl
, flag_pic
|| rs6000_flag_pic
);