1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
41 #include "basic-block.h"
42 #include "integrate.h"
48 #include "target-def.h"
49 #include "langhooks.h"
52 #ifndef TARGET_NO_PROTOTYPE
53 #define TARGET_NO_PROTOTYPE 0
56 #define min(A,B) ((A) < (B) ? (A) : (B))
57 #define max(A,B) ((A) > (B) ? (A) : (B))
61 enum processor_type rs6000_cpu
;
62 struct rs6000_cpu_select rs6000_select
[3] =
64 /* switch name, tune arch */
65 { (const char *)0, "--with-cpu=", 1, 1 },
66 { (const char *)0, "-mcpu=", 1, 1 },
67 { (const char *)0, "-mtune=", 1, 0 },
70 /* Size of long double */
71 const char *rs6000_long_double_size_string
;
72 int rs6000_long_double_type_size
;
74 /* Whether -mabi=altivec has appeared */
75 int rs6000_altivec_abi
;
77 /* Whether VRSAVE instructions should be generated. */
78 int rs6000_altivec_vrsave
;
80 /* String from -mvrsave= option. */
81 const char *rs6000_altivec_vrsave_string
;
83 /* Nonzero if we want SPE ABI extensions. */
86 /* Whether isel instructions should be generated. */
89 /* Nonzero if we have FPRs. */
92 /* String from -misel=. */
93 const char *rs6000_isel_string
;
95 /* Set to non-zero once AIX common-mode calls have been defined. */
96 static int common_mode_defined
;
98 /* Private copy of original value of flag_pic for ABI_AIX. */
99 static int rs6000_flag_pic
;
101 /* Save information from a "cmpxx" operation until the branch or scc is
103 rtx rs6000_compare_op0
, rs6000_compare_op1
;
104 int rs6000_compare_fp_p
;
106 /* Label number of label created for -mrelocatable, to call to so we can
107 get the address of the GOT section */
108 int rs6000_pic_labelno
;
111 /* Which abi to adhere to */
112 const char *rs6000_abi_name
= RS6000_ABI_NAME
;
114 /* Semantics of the small data area */
115 enum rs6000_sdata_type rs6000_sdata
= SDATA_DATA
;
117 /* Which small data model to use */
118 const char *rs6000_sdata_name
= (char *)0;
120 /* Counter for labels which are to be placed in .fixup. */
121 int fixuplabelno
= 0;
124 /* ABI enumeration available for subtarget to use. */
125 enum rs6000_abi rs6000_current_abi
;
127 /* ABI string from -mabi= option. */
128 const char *rs6000_abi_string
;
131 const char *rs6000_debug_name
;
132 int rs6000_debug_stack
; /* debug stack applications */
133 int rs6000_debug_arg
; /* debug argument handling */
135 const char *rs6000_traceback_name
;
137 traceback_default
= 0,
143 /* Flag to say the TOC is initialized */
145 char toc_label_name
[10];
147 /* Alias set for saves and restores from the rs6000 stack. */
148 static int rs6000_sr_alias_set
;
150 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
151 The only place that looks at this is rs6000_set_default_type_attributes;
152 everywhere else should rely on the presence or absence of a longcall
153 attribute on the function declaration. */
154 int rs6000_default_long_calls
;
155 const char *rs6000_longcall_switch
;
157 struct builtin_description
159 /* mask is not const because we're going to alter it below. This
160 nonsense will go away when we rewrite the -march infrastructure
161 to give us more target flag bits. */
163 const enum insn_code icode
;
164 const char *const name
;
165 const enum rs6000_builtins code
;
168 static void rs6000_add_gc_roots
PARAMS ((void));
169 static int num_insns_constant_wide
PARAMS ((HOST_WIDE_INT
));
170 static void validate_condition_mode
171 PARAMS ((enum rtx_code
, enum machine_mode
));
172 static rtx rs6000_generate_compare
PARAMS ((enum rtx_code
));
173 static void rs6000_maybe_dead
PARAMS ((rtx
));
174 static void rs6000_emit_stack_tie
PARAMS ((void));
175 static void rs6000_frame_related
PARAMS ((rtx
, rtx
, HOST_WIDE_INT
, rtx
, rtx
));
176 static void emit_frame_save
PARAMS ((rtx
, rtx
, enum machine_mode
,
177 unsigned int, int, int));
178 static rtx gen_frame_mem_offset
PARAMS ((enum machine_mode
, rtx
, int));
179 static void rs6000_emit_allocate_stack
PARAMS ((HOST_WIDE_INT
, int));
180 static unsigned rs6000_hash_constant
PARAMS ((rtx
));
181 static unsigned toc_hash_function
PARAMS ((const void *));
182 static int toc_hash_eq
PARAMS ((const void *, const void *));
183 static int toc_hash_mark_entry
PARAMS ((void **, void *));
184 static void toc_hash_mark_table
PARAMS ((void *));
185 static int constant_pool_expr_1
PARAMS ((rtx
, int *, int *));
186 static struct machine_function
* rs6000_init_machine_status
PARAMS ((void));
187 static bool rs6000_assemble_integer
PARAMS ((rtx
, unsigned int, int));
188 #ifdef HAVE_GAS_HIDDEN
189 static void rs6000_assemble_visibility
PARAMS ((tree
, const char *));
191 static int rs6000_ra_ever_killed
PARAMS ((void));
192 static tree rs6000_handle_longcall_attribute
PARAMS ((tree
*, tree
, tree
, int, bool *));
193 const struct attribute_spec rs6000_attribute_table
[];
194 static void rs6000_set_default_type_attributes
PARAMS ((tree
));
195 static void rs6000_output_function_prologue
PARAMS ((FILE *, HOST_WIDE_INT
));
196 static void rs6000_output_function_epilogue
PARAMS ((FILE *, HOST_WIDE_INT
));
197 static rtx rs6000_emit_set_long_const
PARAMS ((rtx
,
198 HOST_WIDE_INT
, HOST_WIDE_INT
));
200 static unsigned int rs6000_elf_section_type_flags
PARAMS ((tree
, const char *,
202 static void rs6000_elf_asm_out_constructor
PARAMS ((rtx
, int));
203 static void rs6000_elf_asm_out_destructor
PARAMS ((rtx
, int));
204 static void rs6000_elf_select_section
PARAMS ((tree
, int,
205 unsigned HOST_WIDE_INT
));
206 static void rs6000_elf_unique_section
PARAMS ((tree
, int));
207 static void rs6000_elf_select_rtx_section
PARAMS ((enum machine_mode
, rtx
,
208 unsigned HOST_WIDE_INT
));
209 static void rs6000_elf_encode_section_info
PARAMS ((tree
, int))
211 static const char *rs6000_elf_strip_name_encoding
PARAMS ((const char *));
212 static bool rs6000_elf_in_small_data_p
PARAMS ((tree
));
215 static void rs6000_xcoff_asm_globalize_label
PARAMS ((FILE *, const char *));
216 static void rs6000_xcoff_asm_named_section
PARAMS ((const char *, unsigned int));
217 static void rs6000_xcoff_select_section
PARAMS ((tree
, int,
218 unsigned HOST_WIDE_INT
));
219 static void rs6000_xcoff_unique_section
PARAMS ((tree
, int));
220 static void rs6000_xcoff_select_rtx_section
PARAMS ((enum machine_mode
, rtx
,
221 unsigned HOST_WIDE_INT
));
222 static const char * rs6000_xcoff_strip_name_encoding
PARAMS ((const char *));
223 static unsigned int rs6000_xcoff_section_type_flags
PARAMS ((tree
, const char *, int));
225 static void rs6000_xcoff_encode_section_info
PARAMS ((tree
, int))
227 static bool rs6000_binds_local_p
PARAMS ((tree
));
228 static int rs6000_adjust_cost
PARAMS ((rtx
, rtx
, rtx
, int));
229 static int rs6000_adjust_priority
PARAMS ((rtx
, int));
230 static int rs6000_issue_rate
PARAMS ((void));
232 static void rs6000_init_builtins
PARAMS ((void));
233 static rtx rs6000_expand_unop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
234 static rtx rs6000_expand_binop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
235 static rtx rs6000_expand_ternop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
236 static rtx rs6000_expand_builtin
PARAMS ((tree
, rtx
, rtx
, enum machine_mode
, int));
237 static void altivec_init_builtins
PARAMS ((void));
238 static void rs6000_common_init_builtins
PARAMS ((void));
240 static void enable_mask_for_builtins
PARAMS ((struct builtin_description
*,
241 int, enum rs6000_builtins
,
242 enum rs6000_builtins
));
243 static void spe_init_builtins
PARAMS ((void));
244 static rtx spe_expand_builtin
PARAMS ((tree
, rtx
, bool *));
245 static rtx spe_expand_predicate_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
246 static rtx spe_expand_evsel_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
247 static int rs6000_emit_int_cmove
PARAMS ((rtx
, rtx
, rtx
, rtx
));
249 static rtx altivec_expand_builtin
PARAMS ((tree
, rtx
, bool *));
250 static rtx altivec_expand_ld_builtin
PARAMS ((tree
, rtx
, bool *));
251 static rtx altivec_expand_st_builtin
PARAMS ((tree
, rtx
, bool *));
252 static rtx altivec_expand_dst_builtin
PARAMS ((tree
, rtx
, bool *));
253 static rtx altivec_expand_abs_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
254 static rtx altivec_expand_predicate_builtin
PARAMS ((enum insn_code
, const char *, tree
, rtx
));
255 static rtx altivec_expand_stv_builtin
PARAMS ((enum insn_code
, tree
));
256 static void rs6000_parse_abi_options
PARAMS ((void));
257 static void rs6000_parse_vrsave_option
PARAMS ((void));
258 static void rs6000_parse_isel_option
PARAMS ((void));
259 static int first_altivec_reg_to_save
PARAMS ((void));
260 static unsigned int compute_vrsave_mask
PARAMS ((void));
261 static void is_altivec_return_reg
PARAMS ((rtx
, void *));
262 static rtx generate_set_vrsave
PARAMS ((rtx
, rs6000_stack_t
*, int));
263 static void altivec_frame_fixup
PARAMS ((rtx
, rtx
, HOST_WIDE_INT
));
264 static int easy_vector_constant
PARAMS ((rtx
));
266 /* Default register names. */
267 char rs6000_reg_names
[][8] =
269 "0", "1", "2", "3", "4", "5", "6", "7",
270 "8", "9", "10", "11", "12", "13", "14", "15",
271 "16", "17", "18", "19", "20", "21", "22", "23",
272 "24", "25", "26", "27", "28", "29", "30", "31",
273 "0", "1", "2", "3", "4", "5", "6", "7",
274 "8", "9", "10", "11", "12", "13", "14", "15",
275 "16", "17", "18", "19", "20", "21", "22", "23",
276 "24", "25", "26", "27", "28", "29", "30", "31",
277 "mq", "lr", "ctr","ap",
278 "0", "1", "2", "3", "4", "5", "6", "7",
280 /* AltiVec registers. */
281 "0", "1", "2", "3", "4", "5", "6", "7",
282 "8", "9", "10", "11", "12", "13", "14", "15",
283 "16", "17", "18", "19", "20", "21", "22", "23",
284 "24", "25", "26", "27", "28", "29", "30", "31",
288 #ifdef TARGET_REGNAMES
289 static const char alt_reg_names
[][8] =
291 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
292 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
293 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
294 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
295 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
296 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
297 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
298 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
299 "mq", "lr", "ctr", "ap",
300 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
302 /* AltiVec registers. */
303 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
304 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
305 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
306 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
311 #ifndef MASK_STRICT_ALIGN
312 #define MASK_STRICT_ALIGN 0
315 /* Initialize the GCC target structure. */
316 #undef TARGET_ATTRIBUTE_TABLE
317 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
318 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
319 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
321 #undef TARGET_ASM_ALIGNED_DI_OP
322 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
324 /* Default unaligned ops are only provided for ELF. Find the ops needed
325 for non-ELF systems. */
326 #ifndef OBJECT_FORMAT_ELF
328 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
330 #undef TARGET_ASM_UNALIGNED_HI_OP
331 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
332 #undef TARGET_ASM_UNALIGNED_SI_OP
333 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
334 #undef TARGET_ASM_UNALIGNED_DI_OP
335 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
338 #undef TARGET_ASM_UNALIGNED_HI_OP
339 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
340 #undef TARGET_ASM_UNALIGNED_SI_OP
341 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
345 /* This hook deals with fixups for relocatable code and DI-mode objects
347 #undef TARGET_ASM_INTEGER
348 #define TARGET_ASM_INTEGER rs6000_assemble_integer
350 #ifdef HAVE_GAS_HIDDEN
351 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
352 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
355 #undef TARGET_ASM_FUNCTION_PROLOGUE
356 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
357 #undef TARGET_ASM_FUNCTION_EPILOGUE
358 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
360 #undef TARGET_SCHED_ISSUE_RATE
361 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
362 #undef TARGET_SCHED_ADJUST_COST
363 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
364 #undef TARGET_SCHED_ADJUST_PRIORITY
365 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
367 #undef TARGET_INIT_BUILTINS
368 #define TARGET_INIT_BUILTINS rs6000_init_builtins
370 #undef TARGET_EXPAND_BUILTIN
371 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
373 #undef TARGET_BINDS_LOCAL_P
374 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
376 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
377 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
379 struct gcc_target targetm
= TARGET_INITIALIZER
;
381 /* Override command line options. Mostly we process the processor
382 type and sometimes adjust other TARGET_ options. */
385 rs6000_override_options (default_cpu
)
386 const char *default_cpu
;
389 struct rs6000_cpu_select
*ptr
;
391 /* Simplify the entries below by making a mask for any POWER
392 variant and any PowerPC variant. */
394 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
395 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
396 | MASK_PPC_GFXOPT | MASK_POWERPC64)
397 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
401 const char *const name
; /* Canonical processor name. */
402 const enum processor_type processor
; /* Processor type enum value. */
403 const int target_enable
; /* Target flags to enable. */
404 const int target_disable
; /* Target flags to disable. */
405 } const processor_target_table
[]
406 = {{"common", PROCESSOR_COMMON
, MASK_NEW_MNEMONICS
,
407 POWER_MASKS
| POWERPC_MASKS
},
408 {"power", PROCESSOR_POWER
,
409 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
410 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
411 {"power2", PROCESSOR_POWER
,
412 MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
,
413 POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
414 {"power3", PROCESSOR_PPC630
,
415 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
416 POWER_MASKS
| MASK_PPC_GPOPT
},
417 {"power4", PROCESSOR_POWER4
,
418 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
419 POWER_MASKS
| MASK_PPC_GPOPT
},
420 {"powerpc", PROCESSOR_POWERPC
,
421 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
422 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
423 {"powerpc64", PROCESSOR_POWERPC64
,
424 MASK_POWERPC
| MASK_POWERPC64
| MASK_NEW_MNEMONICS
,
425 POWER_MASKS
| POWERPC_OPT_MASKS
},
426 {"rios", PROCESSOR_RIOS1
,
427 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
428 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
429 {"rios1", PROCESSOR_RIOS1
,
430 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
431 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
432 {"rsc", PROCESSOR_PPC601
,
433 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
434 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
435 {"rsc1", PROCESSOR_PPC601
,
436 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
437 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
438 {"rios2", PROCESSOR_RIOS2
,
439 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
| MASK_POWER2
,
440 POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
441 {"rs64a", PROCESSOR_RS64A
,
442 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
443 POWER_MASKS
| POWERPC_OPT_MASKS
},
444 {"401", PROCESSOR_PPC403
,
445 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
446 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
447 {"403", PROCESSOR_PPC403
,
448 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
| MASK_STRICT_ALIGN
,
449 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
450 {"405", PROCESSOR_PPC405
,
451 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
452 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
453 {"505", PROCESSOR_MPCCORE
,
454 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
455 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
456 {"601", PROCESSOR_PPC601
,
457 MASK_POWER
| MASK_POWERPC
| MASK_NEW_MNEMONICS
| MASK_MULTIPLE
| MASK_STRING
,
458 MASK_POWER2
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
459 {"602", PROCESSOR_PPC603
,
460 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
461 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
462 {"603", PROCESSOR_PPC603
,
463 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
464 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
465 {"603e", PROCESSOR_PPC603
,
466 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
467 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
468 {"ec603e", PROCESSOR_PPC603
,
469 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
470 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
471 {"604", PROCESSOR_PPC604
,
472 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
473 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
474 {"604e", PROCESSOR_PPC604e
,
475 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
476 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
477 {"620", PROCESSOR_PPC620
,
478 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
479 POWER_MASKS
| MASK_PPC_GPOPT
},
480 {"630", PROCESSOR_PPC630
,
481 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
482 POWER_MASKS
| MASK_PPC_GPOPT
},
483 {"740", PROCESSOR_PPC750
,
484 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
485 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
486 {"750", PROCESSOR_PPC750
,
487 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
488 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
489 {"7400", PROCESSOR_PPC7400
,
490 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
491 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
492 {"7450", PROCESSOR_PPC7450
,
493 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
494 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
495 {"8540", PROCESSOR_PPC8540
,
496 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
497 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
498 {"801", PROCESSOR_MPCCORE
,
499 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
500 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
501 {"821", PROCESSOR_MPCCORE
,
502 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
503 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
504 {"823", PROCESSOR_MPCCORE
,
505 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
506 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
507 {"860", PROCESSOR_MPCCORE
,
508 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
509 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
}};
511 const size_t ptt_size
= ARRAY_SIZE (processor_target_table
);
513 /* Save current -mmultiple/-mno-multiple status. */
514 int multiple
= TARGET_MULTIPLE
;
515 /* Save current -mstring/-mno-string status. */
516 int string
= TARGET_STRING
;
518 /* Identify the processor type. */
519 rs6000_select
[0].string
= default_cpu
;
520 rs6000_cpu
= TARGET_POWERPC64
? PROCESSOR_DEFAULT64
: PROCESSOR_DEFAULT
;
522 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
524 ptr
= &rs6000_select
[i
];
525 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
527 for (j
= 0; j
< ptt_size
; j
++)
528 if (! strcmp (ptr
->string
, processor_target_table
[j
].name
))
531 rs6000_cpu
= processor_target_table
[j
].processor
;
535 target_flags
|= processor_target_table
[j
].target_enable
;
536 target_flags
&= ~processor_target_table
[j
].target_disable
;
542 error ("bad value (%s) for %s switch", ptr
->string
, ptr
->name
);
546 if (rs6000_cpu
== PROCESSOR_PPC8540
)
549 /* If we are optimizing big endian systems for space, use the store
550 multiple instructions. */
551 if (BYTES_BIG_ENDIAN
&& optimize_size
)
552 target_flags
|= MASK_MULTIPLE
;
554 /* If -mmultiple or -mno-multiple was explicitly used, don't
555 override with the processor default */
556 if (TARGET_MULTIPLE_SET
)
557 target_flags
= (target_flags
& ~MASK_MULTIPLE
) | multiple
;
559 /* If -mstring or -mno-string was explicitly used, don't override
560 with the processor default. */
561 if (TARGET_STRING_SET
)
562 target_flags
= (target_flags
& ~MASK_STRING
) | string
;
564 /* Don't allow -mmultiple or -mstring on little endian systems
565 unless the cpu is a 750, because the hardware doesn't support the
566 instructions used in little endian mode, and causes an alignment
567 trap. The 750 does not cause an alignment trap (except when the
568 target is unaligned). */
570 if (! BYTES_BIG_ENDIAN
&& rs6000_cpu
!= PROCESSOR_PPC750
)
574 target_flags
&= ~MASK_MULTIPLE
;
575 if (TARGET_MULTIPLE_SET
)
576 warning ("-mmultiple is not supported on little endian systems");
581 target_flags
&= ~MASK_STRING
;
582 if (TARGET_STRING_SET
)
583 warning ("-mstring is not supported on little endian systems");
587 if (flag_pic
!= 0 && DEFAULT_ABI
== ABI_AIX
)
589 rs6000_flag_pic
= flag_pic
;
593 #ifdef XCOFF_DEBUGGING_INFO
594 if (flag_function_sections
&& (write_symbols
!= NO_DEBUG
)
595 && DEFAULT_ABI
== ABI_AIX
)
597 warning ("-ffunction-sections disabled on AIX when debugging");
598 flag_function_sections
= 0;
601 if (flag_data_sections
&& (DEFAULT_ABI
== ABI_AIX
))
603 warning ("-fdata-sections not supported on AIX");
604 flag_data_sections
= 0;
608 /* For Darwin, always silently make -fpic and -fPIC identical. */
609 if (flag_pic
== 1 && DEFAULT_ABI
== ABI_DARWIN
)
612 /* Set debug flags */
613 if (rs6000_debug_name
)
615 if (! strcmp (rs6000_debug_name
, "all"))
616 rs6000_debug_stack
= rs6000_debug_arg
= 1;
617 else if (! strcmp (rs6000_debug_name
, "stack"))
618 rs6000_debug_stack
= 1;
619 else if (! strcmp (rs6000_debug_name
, "arg"))
620 rs6000_debug_arg
= 1;
622 error ("unknown -mdebug-%s switch", rs6000_debug_name
);
625 if (rs6000_traceback_name
)
627 if (! strncmp (rs6000_traceback_name
, "full", 4))
628 rs6000_traceback
= traceback_full
;
629 else if (! strncmp (rs6000_traceback_name
, "part", 4))
630 rs6000_traceback
= traceback_part
;
631 else if (! strncmp (rs6000_traceback_name
, "no", 2))
632 rs6000_traceback
= traceback_none
;
634 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
635 rs6000_traceback_name
);
638 /* Set size of long double */
639 rs6000_long_double_type_size
= 64;
640 if (rs6000_long_double_size_string
)
643 int size
= strtol (rs6000_long_double_size_string
, &tail
, 10);
644 if (*tail
!= '\0' || (size
!= 64 && size
!= 128))
645 error ("Unknown switch -mlong-double-%s",
646 rs6000_long_double_size_string
);
648 rs6000_long_double_type_size
= size
;
651 /* Handle -mabi= options. */
652 rs6000_parse_abi_options ();
654 /* Handle -mvrsave= option. */
655 rs6000_parse_vrsave_option ();
657 /* Handle -misel= option. */
658 rs6000_parse_isel_option ();
660 #ifdef SUBTARGET_OVERRIDE_OPTIONS
661 SUBTARGET_OVERRIDE_OPTIONS
;
663 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
664 SUBSUBTARGET_OVERRIDE_OPTIONS
;
667 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
668 using TARGET_OPTIONS to handle a toggle switch, but we're out of
669 bits in target_flags so TARGET_SWITCHES cannot be used.
670 Assumption here is that rs6000_longcall_switch points into the
671 text of the complete option, rather than being a copy, so we can
672 scan back for the presence or absence of the no- modifier. */
673 if (rs6000_longcall_switch
)
675 const char *base
= rs6000_longcall_switch
;
676 while (base
[-1] != 'm') base
--;
678 if (*rs6000_longcall_switch
!= '\0')
679 error ("invalid option `%s'", base
);
680 rs6000_default_long_calls
= (base
[0] != 'n');
683 #ifdef TARGET_REGNAMES
684 /* If the user desires alternate register names, copy in the
685 alternate names now. */
687 memcpy (rs6000_reg_names
, alt_reg_names
, sizeof (rs6000_reg_names
));
690 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
691 If -maix-struct-return or -msvr4-struct-return was explicitly
692 used, don't override with the ABI default. */
693 if (!(target_flags
& MASK_AIX_STRUCT_RET_SET
))
695 if (DEFAULT_ABI
== ABI_V4
&& !DRAFT_V4_STRUCT_RET
)
696 target_flags
= (target_flags
& ~MASK_AIX_STRUCT_RET
);
698 target_flags
|= MASK_AIX_STRUCT_RET
;
701 /* Register global variables with the garbage collector. */
702 rs6000_add_gc_roots ();
704 /* Allocate an alias set for register saves & restores from stack. */
705 rs6000_sr_alias_set
= new_alias_set ();
708 ASM_GENERATE_INTERNAL_LABEL (toc_label_name
, "LCTOC", 1);
710 /* We can only guarantee the availability of DI pseudo-ops when
711 assembling for 64-bit targets. */
714 targetm
.asm_out
.aligned_op
.di
= NULL
;
715 targetm
.asm_out
.unaligned_op
.di
= NULL
;
718 /* Arrange to save and restore machine status around nested functions. */
719 init_machine_status
= rs6000_init_machine_status
;
722 /* Handle -misel= option. */
724 rs6000_parse_isel_option ()
726 if (rs6000_isel_string
== 0)
728 else if (! strcmp (rs6000_isel_string
, "yes"))
730 else if (! strcmp (rs6000_isel_string
, "no"))
733 error ("unknown -misel= option specified: '%s'",
737 /* Handle -mvrsave= options. */
739 rs6000_parse_vrsave_option ()
741 /* Generate VRSAVE instructions by default. */
742 if (rs6000_altivec_vrsave_string
== 0
743 || ! strcmp (rs6000_altivec_vrsave_string
, "yes"))
744 rs6000_altivec_vrsave
= 1;
745 else if (! strcmp (rs6000_altivec_vrsave_string
, "no"))
746 rs6000_altivec_vrsave
= 0;
748 error ("unknown -mvrsave= option specified: '%s'",
749 rs6000_altivec_vrsave_string
);
752 /* Handle -mabi= options. */
754 rs6000_parse_abi_options ()
756 if (rs6000_abi_string
== 0)
758 else if (! strcmp (rs6000_abi_string
, "altivec"))
759 rs6000_altivec_abi
= 1;
760 else if (! strcmp (rs6000_abi_string
, "no-altivec"))
761 rs6000_altivec_abi
= 0;
762 else if (! strcmp (rs6000_abi_string
, "spe"))
764 else if (! strcmp (rs6000_abi_string
, "no-spe"))
767 error ("unknown ABI specified: '%s'", rs6000_abi_string
);
771 optimization_options (level
, size
)
772 int level ATTRIBUTE_UNUSED
;
773 int size ATTRIBUTE_UNUSED
;
777 /* Do anything needed at the start of the asm file. */
780 rs6000_file_start (file
, default_cpu
)
782 const char *default_cpu
;
786 const char *start
= buffer
;
787 struct rs6000_cpu_select
*ptr
;
789 if (flag_verbose_asm
)
791 sprintf (buffer
, "\n%s rs6000/powerpc options:", ASM_COMMENT_START
);
792 rs6000_select
[0].string
= default_cpu
;
794 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
796 ptr
= &rs6000_select
[i
];
797 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
799 fprintf (file
, "%s %s%s", start
, ptr
->name
, ptr
->string
);
805 switch (rs6000_sdata
)
807 case SDATA_NONE
: fprintf (file
, "%s -msdata=none", start
); start
= ""; break;
808 case SDATA_DATA
: fprintf (file
, "%s -msdata=data", start
); start
= ""; break;
809 case SDATA_SYSV
: fprintf (file
, "%s -msdata=sysv", start
); start
= ""; break;
810 case SDATA_EABI
: fprintf (file
, "%s -msdata=eabi", start
); start
= ""; break;
813 if (rs6000_sdata
&& g_switch_value
)
815 fprintf (file
, "%s -G %d", start
, g_switch_value
);
825 /* Return non-zero if this function is known to have a null epilogue. */
830 if (reload_completed
)
832 rs6000_stack_t
*info
= rs6000_stack_info ();
834 if (info
->first_gp_reg_save
== 32
835 && info
->first_fp_reg_save
== 64
836 && info
->first_altivec_reg_save
== LAST_ALTIVEC_REGNO
+ 1
839 && info
->vrsave_mask
== 0
847 /* Returns 1 always. */
850 any_operand (op
, mode
)
851 rtx op ATTRIBUTE_UNUSED
;
852 enum machine_mode mode ATTRIBUTE_UNUSED
;
857 /* Returns 1 if op is the count register. */
859 count_register_operand (op
, mode
)
861 enum machine_mode mode ATTRIBUTE_UNUSED
;
863 if (GET_CODE (op
) != REG
)
866 if (REGNO (op
) == COUNT_REGISTER_REGNUM
)
869 if (REGNO (op
) > FIRST_PSEUDO_REGISTER
)
875 /* Returns 1 if op is an altivec register. */
877 altivec_register_operand (op
, mode
)
879 enum machine_mode mode ATTRIBUTE_UNUSED
;
882 return (register_operand (op
, mode
)
883 && (GET_CODE (op
) != REG
884 || REGNO (op
) > FIRST_PSEUDO_REGISTER
885 || ALTIVEC_REGNO_P (REGNO (op
))));
889 xer_operand (op
, mode
)
891 enum machine_mode mode ATTRIBUTE_UNUSED
;
893 if (GET_CODE (op
) != REG
)
896 if (XER_REGNO_P (REGNO (op
)))
902 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
903 by such constants completes more quickly. */
906 s8bit_cint_operand (op
, mode
)
908 enum machine_mode mode ATTRIBUTE_UNUSED
;
910 return ( GET_CODE (op
) == CONST_INT
911 && (INTVAL (op
) >= -128 && INTVAL (op
) <= 127));
914 /* Return 1 if OP is a constant that can fit in a D field. */
917 short_cint_operand (op
, mode
)
919 enum machine_mode mode ATTRIBUTE_UNUSED
;
921 return (GET_CODE (op
) == CONST_INT
922 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I'));
925 /* Similar for an unsigned D field. */
928 u_short_cint_operand (op
, mode
)
930 enum machine_mode mode ATTRIBUTE_UNUSED
;
932 return (GET_CODE (op
) == CONST_INT
933 && CONST_OK_FOR_LETTER_P (INTVAL (op
) & GET_MODE_MASK (mode
), 'K'));
936 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
939 non_short_cint_operand (op
, mode
)
941 enum machine_mode mode ATTRIBUTE_UNUSED
;
943 return (GET_CODE (op
) == CONST_INT
944 && (unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x8000) >= 0x10000);
947 /* Returns 1 if OP is a CONST_INT that is a positive value
948 and an exact power of 2. */
951 exact_log2_cint_operand (op
, mode
)
953 enum machine_mode mode ATTRIBUTE_UNUSED
;
955 return (GET_CODE (op
) == CONST_INT
957 && exact_log2 (INTVAL (op
)) >= 0);
960 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
964 gpc_reg_operand (op
, mode
)
966 enum machine_mode mode
;
968 return (register_operand (op
, mode
)
969 && (GET_CODE (op
) != REG
970 || (REGNO (op
) >= ARG_POINTER_REGNUM
971 && !XER_REGNO_P (REGNO (op
)))
972 || REGNO (op
) < MQ_REGNO
));
975 /* Returns 1 if OP is either a pseudo-register or a register denoting a
979 cc_reg_operand (op
, mode
)
981 enum machine_mode mode
;
983 return (register_operand (op
, mode
)
984 && (GET_CODE (op
) != REG
985 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
986 || CR_REGNO_P (REGNO (op
))));
989 /* Returns 1 if OP is either a pseudo-register or a register denoting a
990 CR field that isn't CR0. */
993 cc_reg_not_cr0_operand (op
, mode
)
995 enum machine_mode mode
;
997 return (register_operand (op
, mode
)
998 && (GET_CODE (op
) != REG
999 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
1000 || CR_REGNO_NOT_CR0_P (REGNO (op
))));
1003 /* Returns 1 if OP is either a constant integer valid for a D-field or
1004 a non-special register. If a register, it must be in the proper
1005 mode unless MODE is VOIDmode. */
1008 reg_or_short_operand (op
, mode
)
1010 enum machine_mode mode
;
1012 return short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1015 /* Similar, except check if the negation of the constant would be
1016 valid for a D-field. */
1019 reg_or_neg_short_operand (op
, mode
)
1021 enum machine_mode mode
;
1023 if (GET_CODE (op
) == CONST_INT
)
1024 return CONST_OK_FOR_LETTER_P (INTVAL (op
), 'P');
1026 return gpc_reg_operand (op
, mode
);
1029 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1030 a non-special register. If a register, it must be in the proper
1031 mode unless MODE is VOIDmode. */
1034 reg_or_aligned_short_operand (op
, mode
)
1036 enum machine_mode mode
;
1038 if (gpc_reg_operand (op
, mode
))
1040 else if (short_cint_operand (op
, mode
) && !(INTVAL (op
) & 3))
1047 /* Return 1 if the operand is either a register or an integer whose
1048 high-order 16 bits are zero. */
1051 reg_or_u_short_operand (op
, mode
)
1053 enum machine_mode mode
;
1055 return u_short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1058 /* Return 1 is the operand is either a non-special register or ANY
1059 constant integer. */
1062 reg_or_cint_operand (op
, mode
)
1064 enum machine_mode mode
;
1066 return (GET_CODE (op
) == CONST_INT
|| gpc_reg_operand (op
, mode
));
1069 /* Return 1 is the operand is either a non-special register or ANY
1070 32-bit signed constant integer. */
1073 reg_or_arith_cint_operand (op
, mode
)
1075 enum machine_mode mode
;
1077 return (gpc_reg_operand (op
, mode
)
1078 || (GET_CODE (op
) == CONST_INT
1079 #if HOST_BITS_PER_WIDE_INT != 32
1080 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80000000)
1081 < (unsigned HOST_WIDE_INT
) 0x100000000ll
)
1086 /* Return 1 is the operand is either a non-special register or a 32-bit
1087 signed constant integer valid for 64-bit addition. */
1090 reg_or_add_cint64_operand (op
, mode
)
1092 enum machine_mode mode
;
1094 return (gpc_reg_operand (op
, mode
)
1095 || (GET_CODE (op
) == CONST_INT
1096 #if HOST_BITS_PER_WIDE_INT == 32
1097 && INTVAL (op
) < 0x7fff8000
1099 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80008000)
1105 /* Return 1 is the operand is either a non-special register or a 32-bit
1106 signed constant integer valid for 64-bit subtraction. */
1109 reg_or_sub_cint64_operand (op
, mode
)
1111 enum machine_mode mode
;
1113 return (gpc_reg_operand (op
, mode
)
1114 || (GET_CODE (op
) == CONST_INT
1115 #if HOST_BITS_PER_WIDE_INT == 32
1116 && (- INTVAL (op
)) < 0x7fff8000
1118 && ((unsigned HOST_WIDE_INT
) ((- INTVAL (op
)) + 0x80008000)
1124 /* Return 1 is the operand is either a non-special register or ANY
1125 32-bit unsigned constant integer. */
1128 reg_or_logical_cint_operand (op
, mode
)
1130 enum machine_mode mode
;
1132 if (GET_CODE (op
) == CONST_INT
)
1134 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
)
1136 if (GET_MODE_BITSIZE (mode
) <= 32)
1139 if (INTVAL (op
) < 0)
1143 return ((INTVAL (op
) & GET_MODE_MASK (mode
)
1144 & (~ (unsigned HOST_WIDE_INT
) 0xffffffff)) == 0);
1146 else if (GET_CODE (op
) == CONST_DOUBLE
)
1148 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
1152 return CONST_DOUBLE_HIGH (op
) == 0;
1155 return gpc_reg_operand (op
, mode
);
1158 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1161 got_operand (op
, mode
)
1163 enum machine_mode mode ATTRIBUTE_UNUSED
;
1165 return (GET_CODE (op
) == SYMBOL_REF
1166 || GET_CODE (op
) == CONST
1167 || GET_CODE (op
) == LABEL_REF
);
1170 /* Return 1 if the operand is a simple references that can be loaded via
1171 the GOT (labels involving addition aren't allowed). */
1174 got_no_const_operand (op
, mode
)
1176 enum machine_mode mode ATTRIBUTE_UNUSED
;
1178 return (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
);
1181 /* Return the number of instructions it takes to form a constant in an
1182 integer register. */
1185 num_insns_constant_wide (value
)
1186 HOST_WIDE_INT value
;
1188 /* signed constant loadable with {cal|addi} */
1189 if (CONST_OK_FOR_LETTER_P (value
, 'I'))
1192 /* constant loadable with {cau|addis} */
1193 else if (CONST_OK_FOR_LETTER_P (value
, 'L'))
1196 #if HOST_BITS_PER_WIDE_INT == 64
1197 else if (TARGET_POWERPC64
)
1199 HOST_WIDE_INT low
= ((value
& 0xffffffff) ^ 0x80000000) - 0x80000000;
1200 HOST_WIDE_INT high
= value
>> 31;
1202 if (high
== 0 || high
== -1)
1208 return num_insns_constant_wide (high
) + 1;
1210 return (num_insns_constant_wide (high
)
1211 + num_insns_constant_wide (low
) + 1);
1220 num_insns_constant (op
, mode
)
1222 enum machine_mode mode
;
1224 if (GET_CODE (op
) == CONST_INT
)
1226 #if HOST_BITS_PER_WIDE_INT == 64
1227 if ((INTVAL (op
) >> 31) != 0 && (INTVAL (op
) >> 31) != -1
1228 && mask64_operand (op
, mode
))
1232 return num_insns_constant_wide (INTVAL (op
));
1235 else if (GET_CODE (op
) == CONST_DOUBLE
&& mode
== SFmode
)
1240 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1241 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1242 return num_insns_constant_wide ((HOST_WIDE_INT
) l
);
1245 else if (GET_CODE (op
) == CONST_DOUBLE
)
1251 int endian
= (WORDS_BIG_ENDIAN
== 0);
1253 if (mode
== VOIDmode
|| mode
== DImode
)
1255 high
= CONST_DOUBLE_HIGH (op
);
1256 low
= CONST_DOUBLE_LOW (op
);
1260 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1261 REAL_VALUE_TO_TARGET_DOUBLE (rv
, l
);
1263 low
= l
[1 - endian
];
1267 return (num_insns_constant_wide (low
)
1268 + num_insns_constant_wide (high
));
1272 if (high
== 0 && low
>= 0)
1273 return num_insns_constant_wide (low
);
1275 else if (high
== -1 && low
< 0)
1276 return num_insns_constant_wide (low
);
1278 else if (mask64_operand (op
, mode
))
1282 return num_insns_constant_wide (high
) + 1;
1285 return (num_insns_constant_wide (high
)
1286 + num_insns_constant_wide (low
) + 1);
1294 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1295 register with one instruction per word. We only do this if we can
1296 safely read CONST_DOUBLE_{LOW,HIGH}. */
1299 easy_fp_constant (op
, mode
)
1301 enum machine_mode mode
;
1303 if (GET_CODE (op
) != CONST_DOUBLE
1304 || GET_MODE (op
) != mode
1305 || (GET_MODE_CLASS (mode
) != MODE_FLOAT
&& mode
!= DImode
))
1308 /* Consider all constants with -msoft-float to be easy. */
1309 if ((TARGET_SOFT_FLOAT
|| !TARGET_FPRS
)
1313 /* If we are using V.4 style PIC, consider all constants to be hard. */
1314 if (flag_pic
&& DEFAULT_ABI
== ABI_V4
)
1317 #ifdef TARGET_RELOCATABLE
1318 /* Similarly if we are using -mrelocatable, consider all constants
1320 if (TARGET_RELOCATABLE
)
1329 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1330 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
1332 return (num_insns_constant_wide ((HOST_WIDE_INT
) k
[0]) == 1
1333 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[1]) == 1);
1336 else if (mode
== SFmode
)
1341 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1342 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1344 return num_insns_constant_wide (l
) == 1;
1347 else if (mode
== DImode
)
1348 return ((TARGET_POWERPC64
1349 && GET_CODE (op
) == CONST_DOUBLE
&& CONST_DOUBLE_LOW (op
) == 0)
1350 || (num_insns_constant (op
, DImode
) <= 2));
1352 else if (mode
== SImode
)
1358 /* Return 1 if the operand is a CONST_INT and can be put into a
1359 register with one instruction. */
1362 easy_vector_constant (op
)
1368 if (GET_CODE (op
) != CONST_VECTOR
)
1371 units
= CONST_VECTOR_NUNITS (op
);
1373 /* We can generate 0 easily. Look for that. */
1374 for (i
= 0; i
< units
; ++i
)
1376 elt
= CONST_VECTOR_ELT (op
, i
);
1378 /* We could probably simplify this by just checking for equality
1379 with CONST0_RTX for the current mode, but let's be safe
1382 switch (GET_CODE (elt
))
1385 if (INTVAL (elt
) != 0)
1389 if (CONST_DOUBLE_LOW (elt
) != 0 || CONST_DOUBLE_HIGH (elt
) != 0)
1397 /* We could probably generate a few other constants trivially, but
1398 gcc doesn't generate them yet. FIXME later. */
1402 /* Return 1 if the operand is the constant 0. This works for scalars
1403 as well as vectors. */
1405 zero_constant (op
, mode
)
1407 enum machine_mode mode
;
1409 return op
== CONST0_RTX (mode
);
1412 /* Return 1 if the operand is 0.0. */
1414 zero_fp_constant (op
, mode
)
1416 enum machine_mode mode
;
1418 return GET_MODE_CLASS (mode
) == MODE_FLOAT
&& op
== CONST0_RTX (mode
);
1421 /* Return 1 if the operand is in volatile memory. Note that during
1422 the RTL generation phase, memory_operand does not return TRUE for
1423 volatile memory references. So this function allows us to
1424 recognize volatile references where its safe. */
1427 volatile_mem_operand (op
, mode
)
1429 enum machine_mode mode
;
1431 if (GET_CODE (op
) != MEM
)
1434 if (!MEM_VOLATILE_P (op
))
1437 if (mode
!= GET_MODE (op
))
1440 if (reload_completed
)
1441 return memory_operand (op
, mode
);
1443 if (reload_in_progress
)
1444 return strict_memory_address_p (mode
, XEXP (op
, 0));
1446 return memory_address_p (mode
, XEXP (op
, 0));
1449 /* Return 1 if the operand is an offsettable memory operand. */
1452 offsettable_mem_operand (op
, mode
)
1454 enum machine_mode mode
;
1456 return ((GET_CODE (op
) == MEM
)
1457 && offsettable_address_p (reload_completed
|| reload_in_progress
,
1458 mode
, XEXP (op
, 0)));
1461 /* Return 1 if the operand is either an easy FP constant (see above) or
1465 mem_or_easy_const_operand (op
, mode
)
1467 enum machine_mode mode
;
1469 return memory_operand (op
, mode
) || easy_fp_constant (op
, mode
);
1472 /* Return 1 if the operand is either a non-special register or an item
1473 that can be used as the operand of a `mode' add insn. */
1476 add_operand (op
, mode
)
1478 enum machine_mode mode
;
1480 if (GET_CODE (op
) == CONST_INT
)
1481 return (CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1482 || CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1484 return gpc_reg_operand (op
, mode
);
1487 /* Return 1 if OP is a constant but not a valid add_operand. */
1490 non_add_cint_operand (op
, mode
)
1492 enum machine_mode mode ATTRIBUTE_UNUSED
;
1494 return (GET_CODE (op
) == CONST_INT
1495 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1496 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1499 /* Return 1 if the operand is a non-special register or a constant that
1500 can be used as the operand of an OR or XOR insn on the RS/6000. */
1503 logical_operand (op
, mode
)
1505 enum machine_mode mode
;
1507 HOST_WIDE_INT opl
, oph
;
1509 if (gpc_reg_operand (op
, mode
))
1512 if (GET_CODE (op
) == CONST_INT
)
1514 opl
= INTVAL (op
) & GET_MODE_MASK (mode
);
1516 #if HOST_BITS_PER_WIDE_INT <= 32
1517 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
&& opl
< 0)
1521 else if (GET_CODE (op
) == CONST_DOUBLE
)
1523 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1526 opl
= CONST_DOUBLE_LOW (op
);
1527 oph
= CONST_DOUBLE_HIGH (op
);
1534 return ((opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff) == 0
1535 || (opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff0000) == 0);
1538 /* Return 1 if C is a constant that is not a logical operand (as
1539 above), but could be split into one. */
1542 non_logical_cint_operand (op
, mode
)
1544 enum machine_mode mode
;
1546 return ((GET_CODE (op
) == CONST_INT
|| GET_CODE (op
) == CONST_DOUBLE
)
1547 && ! logical_operand (op
, mode
)
1548 && reg_or_logical_cint_operand (op
, mode
));
1551 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1552 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1553 Reject all ones and all zeros, since these should have been optimized
1554 away and confuse the making of MB and ME. */
1557 mask_operand (op
, mode
)
1559 enum machine_mode mode ATTRIBUTE_UNUSED
;
1561 HOST_WIDE_INT c
, lsb
;
1563 if (GET_CODE (op
) != CONST_INT
)
1568 /* Fail in 64-bit mode if the mask wraps around because the upper
1569 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1570 if (TARGET_POWERPC64
&& (c
& 0x80000001) == 0x80000001)
1573 /* We don't change the number of transitions by inverting,
1574 so make sure we start with the LS bit zero. */
1578 /* Reject all zeros or all ones. */
1582 /* Find the first transition. */
1585 /* Invert to look for a second transition. */
1588 /* Erase first transition. */
1591 /* Find the second transition (if any). */
1594 /* Match if all the bits above are 1's (or c is zero). */
1598 /* Return 1 for the PowerPC64 rlwinm corner case. */
1601 mask_operand_wrap (op
, mode
)
1603 enum machine_mode mode ATTRIBUTE_UNUSED
;
1605 HOST_WIDE_INT c
, lsb
;
1607 if (GET_CODE (op
) != CONST_INT
)
1612 if ((c
& 0x80000001) != 0x80000001)
1626 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1627 It is if there are no more than one 1->0 or 0->1 transitions.
1628 Reject all zeros, since zero should have been optimized away and
1629 confuses the making of MB and ME. */
1632 mask64_operand (op
, mode
)
1634 enum machine_mode mode ATTRIBUTE_UNUSED
;
1636 if (GET_CODE (op
) == CONST_INT
)
1638 HOST_WIDE_INT c
, lsb
;
1642 /* Reject all zeros. */
1646 /* We don't change the number of transitions by inverting,
1647 so make sure we start with the LS bit zero. */
1651 /* Find the transition, and check that all bits above are 1's. */
1658 /* Like mask64_operand, but allow up to three transitions. This
1659 predicate is used by insn patterns that generate two rldicl or
1660 rldicr machine insns. */
1663 mask64_2_operand (op
, mode
)
1665 enum machine_mode mode ATTRIBUTE_UNUSED
;
1667 if (GET_CODE (op
) == CONST_INT
)
1669 HOST_WIDE_INT c
, lsb
;
1673 /* Disallow all zeros. */
1677 /* We don't change the number of transitions by inverting,
1678 so make sure we start with the LS bit zero. */
1682 /* Find the first transition. */
1685 /* Invert to look for a second transition. */
1688 /* Erase first transition. */
1691 /* Find the second transition. */
1694 /* Invert to look for a third transition. */
1697 /* Erase second transition. */
1700 /* Find the third transition (if any). */
1703 /* Match if all the bits above are 1's (or c is zero). */
1709 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1710 implement ANDing by the mask IN. */
1712 build_mask64_2_operands (in
, out
)
1716 #if HOST_BITS_PER_WIDE_INT >= 64
1717 unsigned HOST_WIDE_INT c
, lsb
, m1
, m2
;
1720 if (GET_CODE (in
) != CONST_INT
)
1726 /* Assume c initially something like 0x00fff000000fffff. The idea
1727 is to rotate the word so that the middle ^^^^^^ group of zeros
1728 is at the MS end and can be cleared with an rldicl mask. We then
1729 rotate back and clear off the MS ^^ group of zeros with a
1731 c
= ~c
; /* c == 0xff000ffffff00000 */
1732 lsb
= c
& -c
; /* lsb == 0x0000000000100000 */
1733 m1
= -lsb
; /* m1 == 0xfffffffffff00000 */
1734 c
= ~c
; /* c == 0x00fff000000fffff */
1735 c
&= -lsb
; /* c == 0x00fff00000000000 */
1736 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
1737 c
= ~c
; /* c == 0xff000fffffffffff */
1738 c
&= -lsb
; /* c == 0xff00000000000000 */
1740 while ((lsb
>>= 1) != 0)
1741 shift
++; /* shift == 44 on exit from loop */
1742 m1
<<= 64 - shift
; /* m1 == 0xffffff0000000000 */
1743 m1
= ~m1
; /* m1 == 0x000000ffffffffff */
1744 m2
= ~c
; /* m2 == 0x00ffffffffffffff */
1748 /* Assume c initially something like 0xff000f0000000000. The idea
1749 is to rotate the word so that the ^^^ middle group of zeros
1750 is at the LS end and can be cleared with an rldicr mask. We then
1751 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1753 lsb
= c
& -c
; /* lsb == 0x0000010000000000 */
1754 m2
= -lsb
; /* m2 == 0xffffff0000000000 */
1755 c
= ~c
; /* c == 0x00fff0ffffffffff */
1756 c
&= -lsb
; /* c == 0x00fff00000000000 */
1757 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
1758 c
= ~c
; /* c == 0xff000fffffffffff */
1759 c
&= -lsb
; /* c == 0xff00000000000000 */
1761 while ((lsb
>>= 1) != 0)
1762 shift
++; /* shift == 44 on exit from loop */
1763 m1
= ~c
; /* m1 == 0x00ffffffffffffff */
1764 m1
>>= shift
; /* m1 == 0x0000000000000fff */
1765 m1
= ~m1
; /* m1 == 0xfffffffffffff000 */
1768 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1769 masks will be all 1's. We are guaranteed more than one transition. */
1770 out
[0] = GEN_INT (64 - shift
);
1771 out
[1] = GEN_INT (m1
);
1772 out
[2] = GEN_INT (shift
);
1773 out
[3] = GEN_INT (m2
);
1779 /* Return 1 if the operand is either a non-special register or a constant
1780 that can be used as the operand of a PowerPC64 logical AND insn. */
1783 and64_operand (op
, mode
)
1785 enum machine_mode mode
;
1787 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
1788 return (gpc_reg_operand (op
, mode
) || mask64_operand (op
, mode
));
1790 return (logical_operand (op
, mode
) || mask64_operand (op
, mode
));
1793 /* Like the above, but also match constants that can be implemented
1794 with two rldicl or rldicr insns. */
1797 and64_2_operand (op
, mode
)
1799 enum machine_mode mode
;
1801 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
1802 return gpc_reg_operand (op
, mode
) || mask64_2_operand (op
, mode
);
1804 return logical_operand (op
, mode
) || mask64_2_operand (op
, mode
);
1807 /* Return 1 if the operand is either a non-special register or a
1808 constant that can be used as the operand of an RS/6000 logical AND insn. */
1811 and_operand (op
, mode
)
1813 enum machine_mode mode
;
1815 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
1816 return (gpc_reg_operand (op
, mode
) || mask_operand (op
, mode
));
1818 return (logical_operand (op
, mode
) || mask_operand (op
, mode
));
1821 /* Return 1 if the operand is a general register or memory operand. */
1824 reg_or_mem_operand (op
, mode
)
1826 enum machine_mode mode
;
1828 return (gpc_reg_operand (op
, mode
)
1829 || memory_operand (op
, mode
)
1830 || volatile_mem_operand (op
, mode
));
1833 /* Return 1 if the operand is a general register or memory operand without
1834 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1838 lwa_operand (op
, mode
)
1840 enum machine_mode mode
;
1844 if (reload_completed
&& GET_CODE (inner
) == SUBREG
)
1845 inner
= SUBREG_REG (inner
);
1847 return gpc_reg_operand (inner
, mode
)
1848 || (memory_operand (inner
, mode
)
1849 && GET_CODE (XEXP (inner
, 0)) != PRE_INC
1850 && GET_CODE (XEXP (inner
, 0)) != PRE_DEC
1851 && (GET_CODE (XEXP (inner
, 0)) != PLUS
1852 || GET_CODE (XEXP (XEXP (inner
, 0), 1)) != CONST_INT
1853 || INTVAL (XEXP (XEXP (inner
, 0), 1)) % 4 == 0));
1856 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
1859 symbol_ref_operand (op
, mode
)
1861 enum machine_mode mode
;
1863 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1866 return (GET_CODE (op
) == SYMBOL_REF
);
1869 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1870 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
1873 call_operand (op
, mode
)
1875 enum machine_mode mode
;
1877 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1880 return (GET_CODE (op
) == SYMBOL_REF
1881 || (GET_CODE (op
) == REG
1882 && (REGNO (op
) == LINK_REGISTER_REGNUM
1883 || REGNO (op
) == COUNT_REGISTER_REGNUM
1884 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
)));
1887 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1888 this file and the function is not weakly defined. */
1891 current_file_function_operand (op
, mode
)
1893 enum machine_mode mode ATTRIBUTE_UNUSED
;
1895 return (GET_CODE (op
) == SYMBOL_REF
1896 && (SYMBOL_REF_FLAG (op
)
1897 || (op
== XEXP (DECL_RTL (current_function_decl
), 0)
1898 && ! DECL_WEAK (current_function_decl
))));
1901 /* Return 1 if this operand is a valid input for a move insn. */
1904 input_operand (op
, mode
)
1906 enum machine_mode mode
;
1908 /* Memory is always valid. */
1909 if (memory_operand (op
, mode
))
1912 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1913 if (GET_CODE (op
) == CONSTANT_P_RTX
)
1916 /* For floating-point, easy constants are valid. */
1917 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
1919 && easy_fp_constant (op
, mode
))
1922 /* Allow any integer constant. */
1923 if (GET_MODE_CLASS (mode
) == MODE_INT
1924 && (GET_CODE (op
) == CONST_INT
1925 || GET_CODE (op
) == CONST_DOUBLE
))
1928 /* For floating-point or multi-word mode, the only remaining valid type
1930 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
1931 || GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
1932 return register_operand (op
, mode
);
1934 /* The only cases left are integral modes one word or smaller (we
1935 do not get called for MODE_CC values). These can be in any
1937 if (register_operand (op
, mode
))
1940 /* A SYMBOL_REF referring to the TOC is valid. */
1941 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op
))
1944 /* A constant pool expression (relative to the TOC) is valid */
1945 if (TOC_RELATIVE_EXPR_P (op
))
1948 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1950 if (DEFAULT_ABI
== ABI_V4
1951 && (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == CONST
)
1952 && small_data_operand (op
, Pmode
))
1958 /* Return 1 for an operand in small memory on V.4/eabi. */
1961 small_data_operand (op
, mode
)
1962 rtx op ATTRIBUTE_UNUSED
;
1963 enum machine_mode mode ATTRIBUTE_UNUSED
;
1968 if (rs6000_sdata
== SDATA_NONE
|| rs6000_sdata
== SDATA_DATA
)
1971 if (DEFAULT_ABI
!= ABI_V4
)
1974 if (GET_CODE (op
) == SYMBOL_REF
)
1977 else if (GET_CODE (op
) != CONST
1978 || GET_CODE (XEXP (op
, 0)) != PLUS
1979 || GET_CODE (XEXP (XEXP (op
, 0), 0)) != SYMBOL_REF
1980 || GET_CODE (XEXP (XEXP (op
, 0), 1)) != CONST_INT
)
1985 rtx sum
= XEXP (op
, 0);
1986 HOST_WIDE_INT summand
;
1988 /* We have to be careful here, because it is the referenced address
1989 that must be 32k from _SDA_BASE_, not just the symbol. */
1990 summand
= INTVAL (XEXP (sum
, 1));
1991 if (summand
< 0 || summand
> g_switch_value
)
1994 sym_ref
= XEXP (sum
, 0);
1997 if (*XSTR (sym_ref
, 0) != '@')
2008 constant_pool_expr_1 (op
, have_sym
, have_toc
)
2013 switch (GET_CODE(op
))
2016 if (CONSTANT_POOL_ADDRESS_P (op
))
2018 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op
), Pmode
))
2026 else if (! strcmp (XSTR (op
, 0), toc_label_name
))
2035 return (constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
)
2036 && constant_pool_expr_1 (XEXP (op
, 1), have_sym
, have_toc
));
2038 return constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
);
2047 constant_pool_expr_p (op
)
2052 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_sym
;
2056 toc_relative_expr_p (op
)
2061 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_toc
;
2064 /* Try machine-dependent ways of modifying an illegitimate address
2065 to be legitimate. If we find one, return the new, valid address.
2066 This is used from only one place: `memory_address' in explow.c.
2068 OLDX is the address as it was before break_out_memory_refs was
2069 called. In some cases it is useful to look at this to decide what
2072 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2074 It is always safe for this function to do nothing. It exists to
2075 recognize opportunities to optimize the output.
2077 On RS/6000, first check for the sum of a register with a constant
2078 integer that is out of range. If so, generate code to add the
2079 constant with the low-order 16 bits masked to the register and force
2080 this result into another register (this can be done with `cau').
2081 Then generate an address of REG+(CONST&0xffff), allowing for the
2082 possibility of bit 16 being a one.
2084 Then check for the sum of a register and something not constant, try to
2085 load the other things into a register and return the sum. */
2087 rs6000_legitimize_address (x
, oldx
, mode
)
2089 rtx oldx ATTRIBUTE_UNUSED
;
2090 enum machine_mode mode
;
2092 if (GET_CODE (x
) == PLUS
2093 && GET_CODE (XEXP (x
, 0)) == REG
2094 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2095 && (unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1)) + 0x8000) >= 0x10000)
2097 HOST_WIDE_INT high_int
, low_int
;
2099 low_int
= ((INTVAL (XEXP (x
, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2100 high_int
= INTVAL (XEXP (x
, 1)) - low_int
;
2101 sum
= force_operand (gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2102 GEN_INT (high_int
)), 0);
2103 return gen_rtx_PLUS (Pmode
, sum
, GEN_INT (low_int
));
2105 else if (GET_CODE (x
) == PLUS
2106 && GET_CODE (XEXP (x
, 0)) == REG
2107 && GET_CODE (XEXP (x
, 1)) != CONST_INT
2108 && GET_MODE_NUNITS (mode
) == 1
2109 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
2112 && (TARGET_POWERPC64
|| mode
!= DImode
)
2115 return gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2116 force_reg (Pmode
, force_operand (XEXP (x
, 1), 0)));
2118 else if (ALTIVEC_VECTOR_MODE (mode
))
2122 /* Make sure both operands are registers. */
2123 if (GET_CODE (x
) == PLUS
)
2124 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, XEXP (x
, 0)),
2125 force_reg (Pmode
, XEXP (x
, 1)));
2127 reg
= force_reg (Pmode
, x
);
2130 else if (SPE_VECTOR_MODE (mode
))
2132 /* We accept [reg + reg] and [reg + OFFSET]. */
2134 if (GET_CODE (x
) == PLUS
)
2136 rtx op1
= XEXP (x
, 0);
2137 rtx op2
= XEXP (x
, 1);
2139 op1
= force_reg (Pmode
, op1
);
2141 if (GET_CODE (op2
) != REG
2142 && (GET_CODE (op2
) != CONST_INT
2143 || !SPE_CONST_OFFSET_OK (INTVAL (op2
))))
2144 op2
= force_reg (Pmode
, op2
);
2146 return gen_rtx_PLUS (Pmode
, op1
, op2
);
2149 return force_reg (Pmode
, x
);
2151 else if (TARGET_ELF
&& TARGET_32BIT
&& TARGET_NO_TOC
&& ! flag_pic
2152 && GET_CODE (x
) != CONST_INT
2153 && GET_CODE (x
) != CONST_DOUBLE
2155 && GET_MODE_NUNITS (mode
) == 1
2156 && (GET_MODE_BITSIZE (mode
) <= 32
2157 || ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) && mode
== DFmode
)))
2159 rtx reg
= gen_reg_rtx (Pmode
);
2160 emit_insn (gen_elf_high (reg
, (x
)));
2161 return gen_rtx_LO_SUM (Pmode
, reg
, (x
));
2163 else if (TARGET_MACHO
&& TARGET_32BIT
&& TARGET_NO_TOC
2165 && GET_CODE (x
) != CONST_INT
2166 && GET_CODE (x
) != CONST_DOUBLE
2168 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) || mode
!= DFmode
)
2172 rtx reg
= gen_reg_rtx (Pmode
);
2173 emit_insn (gen_macho_high (reg
, (x
)));
2174 return gen_rtx_LO_SUM (Pmode
, reg
, (x
));
2177 && CONSTANT_POOL_EXPR_P (x
)
2178 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), Pmode
))
2180 return create_TOC_reference (x
);
2186 /* The convention appears to be to define this wherever it is used.
2187 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2188 is now used here. */
2189 #ifndef REG_MODE_OK_FOR_BASE_P
2190 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2193 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2194 replace the input X, or the original X if no replacement is called for.
2195 The output parameter *WIN is 1 if the calling macro should goto WIN,
2198 For RS/6000, we wish to handle large displacements off a base
2199 register by splitting the addend across an addiu/addis and the mem insn.
2200 This cuts number of extra insns needed from 3 to 1.
2202 On Darwin, we use this to generate code for floating point constants.
2203 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2204 The Darwin code is inside #if TARGET_MACHO because only then is
2205 machopic_function_base_name() defined. */
2207 rs6000_legitimize_reload_address (x
, mode
, opnum
, type
, ind_levels
, win
)
2209 enum machine_mode mode
;
2212 int ind_levels ATTRIBUTE_UNUSED
;
2215 /* We must recognize output that we have already generated ourselves. */
2216 if (GET_CODE (x
) == PLUS
2217 && GET_CODE (XEXP (x
, 0)) == PLUS
2218 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
2219 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2220 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2222 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2223 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
2224 opnum
, (enum reload_type
)type
);
2230 if (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
2231 && GET_CODE (x
) == LO_SUM
2232 && GET_CODE (XEXP (x
, 0)) == PLUS
2233 && XEXP (XEXP (x
, 0), 0) == pic_offset_table_rtx
2234 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == HIGH
2235 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 0)) == CONST
2236 && XEXP (XEXP (XEXP (x
, 0), 1), 0) == XEXP (x
, 1)
2237 && GET_CODE (XEXP (XEXP (x
, 1), 0)) == MINUS
2238 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 0)) == SYMBOL_REF
2239 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 1)) == SYMBOL_REF
)
2241 /* Result of previous invocation of this function on Darwin
2242 floating point constant. */
2243 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2244 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
2245 opnum
, (enum reload_type
)type
);
2250 if (GET_CODE (x
) == PLUS
2251 && GET_CODE (XEXP (x
, 0)) == REG
2252 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2253 && REG_MODE_OK_FOR_BASE_P (XEXP (x
, 0), mode
)
2254 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2255 && !SPE_VECTOR_MODE (mode
)
2256 && !ALTIVEC_VECTOR_MODE (mode
))
2258 HOST_WIDE_INT val
= INTVAL (XEXP (x
, 1));
2259 HOST_WIDE_INT low
= ((val
& 0xffff) ^ 0x8000) - 0x8000;
2261 = (((val
- low
) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2263 /* Check for 32-bit overflow. */
2264 if (high
+ low
!= val
)
2270 /* Reload the high part into a base reg; leave the low part
2271 in the mem directly. */
2273 x
= gen_rtx_PLUS (GET_MODE (x
),
2274 gen_rtx_PLUS (GET_MODE (x
), XEXP (x
, 0),
2278 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2279 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
2280 opnum
, (enum reload_type
)type
);
2285 if (GET_CODE (x
) == SYMBOL_REF
2286 && DEFAULT_ABI
== ABI_DARWIN
2287 && !ALTIVEC_VECTOR_MODE (mode
)
2290 /* Darwin load of floating point constant. */
2291 rtx offset
= gen_rtx (CONST
, Pmode
,
2292 gen_rtx (MINUS
, Pmode
, x
,
2293 gen_rtx (SYMBOL_REF
, Pmode
,
2294 machopic_function_base_name ())));
2295 x
= gen_rtx (LO_SUM
, GET_MODE (x
),
2296 gen_rtx (PLUS
, Pmode
, pic_offset_table_rtx
,
2297 gen_rtx (HIGH
, Pmode
, offset
)), offset
);
2298 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2299 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
2300 opnum
, (enum reload_type
)type
);
2306 && CONSTANT_POOL_EXPR_P (x
)
2307 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), mode
))
2309 (x
) = create_TOC_reference (x
);
2317 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2318 that is a valid memory address for an instruction.
2319 The MODE argument is the machine mode for the MEM expression
2320 that wants to use this address.
2322 On the RS/6000, there are four valid address: a SYMBOL_REF that
2323 refers to a constant pool entry of an address (or the sum of it
2324 plus a constant), a short (16-bit signed) constant plus a register,
2325 the sum of two registers, or a register indirect, possibly with an
2326 auto-increment. For DFmode and DImode with an constant plus register,
2327 we must ensure that both words are addressable or PowerPC64 with offset
2330 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2331 32-bit DImode, TImode), indexed addressing cannot be used because
2332 adjacent memory cells are accessed by adding word-sized offsets
2333 during assembly output. */
2335 rs6000_legitimate_address (mode
, x
, reg_ok_strict
)
2336 enum machine_mode mode
;
2340 if (LEGITIMATE_INDIRECT_ADDRESS_P (x
, reg_ok_strict
))
2342 if ((GET_CODE (x
) == PRE_INC
|| GET_CODE (x
) == PRE_DEC
)
2343 && !ALTIVEC_VECTOR_MODE (mode
)
2344 && !SPE_VECTOR_MODE (mode
)
2346 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x
, 0), reg_ok_strict
))
2348 if (LEGITIMATE_SMALL_DATA_P (mode
, x
))
2350 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x
))
2352 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2354 && GET_CODE (x
) == PLUS
2355 && GET_CODE (XEXP (x
, 0)) == REG
2356 && XEXP (x
, 0) == virtual_stack_vars_rtx
2357 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2359 if (LEGITIMATE_OFFSET_ADDRESS_P (mode
, x
, reg_ok_strict
))
2362 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
2365 && (TARGET_POWERPC64
|| mode
!= DImode
)
2366 && LEGITIMATE_INDEXED_ADDRESS_P (x
, reg_ok_strict
))
2368 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode
, x
, reg_ok_strict
))
2373 /* Try to output insns to set TARGET equal to the constant C if it can
2374 be done in less than N insns. Do all computations in MODE.
2375 Returns the place where the output has been placed if it can be
2376 done and the insns have been emitted. If it would take more than N
2377 insns, zero is returned and no insns and emitted. */
2380 rs6000_emit_set_const (dest
, mode
, source
, n
)
2382 enum machine_mode mode
;
2383 int n ATTRIBUTE_UNUSED
;
2385 rtx result
, insn
, set
;
2386 HOST_WIDE_INT c0
, c1
;
2388 if (mode
== QImode
|| mode
== HImode
)
2391 dest
= gen_reg_rtx (mode
);
2392 emit_insn (gen_rtx_SET (VOIDmode
, dest
, source
));
2395 else if (mode
== SImode
)
2397 result
= no_new_pseudos
? dest
: gen_reg_rtx (SImode
);
2399 emit_insn (gen_rtx_SET (VOIDmode
, result
,
2400 GEN_INT (INTVAL (source
)
2401 & (~ (HOST_WIDE_INT
) 0xffff))));
2402 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
2403 gen_rtx_IOR (SImode
, result
,
2404 GEN_INT (INTVAL (source
) & 0xffff))));
2407 else if (mode
== DImode
)
2409 if (GET_CODE (source
) == CONST_INT
)
2411 c0
= INTVAL (source
);
2414 else if (GET_CODE (source
) == CONST_DOUBLE
)
2416 #if HOST_BITS_PER_WIDE_INT >= 64
2417 c0
= CONST_DOUBLE_LOW (source
);
2420 c0
= CONST_DOUBLE_LOW (source
);
2421 c1
= CONST_DOUBLE_HIGH (source
);
2427 result
= rs6000_emit_set_long_const (dest
, c0
, c1
);
2432 insn
= get_last_insn ();
2433 set
= single_set (insn
);
2434 if (! CONSTANT_P (SET_SRC (set
)))
2435 set_unique_reg_note (insn
, REG_EQUAL
, source
);
2440 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2441 fall back to a straight forward decomposition. We do this to avoid
2442 exponential run times encountered when looking for longer sequences
2443 with rs6000_emit_set_const. */
2445 rs6000_emit_set_long_const (dest
, c1
, c2
)
2447 HOST_WIDE_INT c1
, c2
;
2449 if (!TARGET_POWERPC64
)
2451 rtx operand1
, operand2
;
2453 operand1
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
== 0,
2455 operand2
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
!= 0,
2457 emit_move_insn (operand1
, GEN_INT (c1
));
2458 emit_move_insn (operand2
, GEN_INT (c2
));
2462 HOST_WIDE_INT ud1
, ud2
, ud3
, ud4
;
2465 ud2
= (c1
& 0xffff0000) >> 16;
2466 #if HOST_BITS_PER_WIDE_INT >= 64
2470 ud4
= (c2
& 0xffff0000) >> 16;
2472 if ((ud4
== 0xffff && ud3
== 0xffff && ud2
== 0xffff && (ud1
& 0x8000))
2473 || (ud4
== 0 && ud3
== 0 && ud2
== 0 && ! (ud1
& 0x8000)))
2476 emit_move_insn (dest
, GEN_INT (((ud1
^ 0x8000) - 0x8000)));
2478 emit_move_insn (dest
, GEN_INT (ud1
));
2481 else if ((ud4
== 0xffff && ud3
== 0xffff && (ud2
& 0x8000))
2482 || (ud4
== 0 && ud3
== 0 && ! (ud2
& 0x8000)))
2485 emit_move_insn (dest
, GEN_INT (((ud2
<< 16) ^ 0x80000000)
2488 emit_move_insn (dest
, GEN_INT (ud2
<< 16));
2490 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2492 else if ((ud4
== 0xffff && (ud3
& 0x8000))
2493 || (ud4
== 0 && ! (ud3
& 0x8000)))
2496 emit_move_insn (dest
, GEN_INT (((ud3
<< 16) ^ 0x80000000)
2499 emit_move_insn (dest
, GEN_INT (ud3
<< 16));
2502 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud2
)));
2503 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (16)));
2505 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2510 emit_move_insn (dest
, GEN_INT (((ud4
<< 16) ^ 0x80000000)
2513 emit_move_insn (dest
, GEN_INT (ud4
<< 16));
2516 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud3
)));
2518 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (32)));
2520 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
,
2521 GEN_INT (ud2
<< 16)));
2523 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2529 /* Emit a move from SOURCE to DEST in mode MODE. */
2531 rs6000_emit_move (dest
, source
, mode
)
2534 enum machine_mode mode
;
2538 operands
[1] = source
;
2540 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2541 if (GET_CODE (operands
[1]) == CONST_DOUBLE
2542 && ! FLOAT_MODE_P (mode
)
2543 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
2545 /* FIXME. This should never happen. */
2546 /* Since it seems that it does, do the safe thing and convert
2548 operands
[1] = gen_int_mode (CONST_DOUBLE_LOW (operands
[1]), mode
);
2550 if (GET_CODE (operands
[1]) == CONST_DOUBLE
2551 && ! FLOAT_MODE_P (mode
)
2552 && ((CONST_DOUBLE_HIGH (operands
[1]) == 0
2553 && CONST_DOUBLE_LOW (operands
[1]) >= 0)
2554 || (CONST_DOUBLE_HIGH (operands
[1]) == -1
2555 && CONST_DOUBLE_LOW (operands
[1]) < 0)))
2558 /* Check if GCC is setting up a block move that will end up using FP
2559 registers as temporaries. We must make sure this is acceptable. */
2560 if (GET_CODE (operands
[0]) == MEM
2561 && GET_CODE (operands
[1]) == MEM
2563 && (SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[0]))
2564 || SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[1])))
2565 && ! (SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[0]) > 32
2566 ? 32 : MEM_ALIGN (operands
[0])))
2567 || SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[1]) > 32
2569 : MEM_ALIGN (operands
[1]))))
2570 && ! MEM_VOLATILE_P (operands
[0])
2571 && ! MEM_VOLATILE_P (operands
[1]))
2573 emit_move_insn (adjust_address (operands
[0], SImode
, 0),
2574 adjust_address (operands
[1], SImode
, 0));
2575 emit_move_insn (adjust_address (operands
[0], SImode
, 4),
2576 adjust_address (operands
[1], SImode
, 4));
2580 if (! no_new_pseudos
&& GET_CODE (operands
[0]) != REG
)
2581 operands
[1] = force_reg (mode
, operands
[1]);
2583 if (mode
== SFmode
&& ! TARGET_POWERPC
2584 && TARGET_HARD_FLOAT
&& TARGET_FPRS
2585 && GET_CODE (operands
[0]) == MEM
)
2589 if (reload_in_progress
|| reload_completed
)
2590 regnum
= true_regnum (operands
[1]);
2591 else if (GET_CODE (operands
[1]) == REG
)
2592 regnum
= REGNO (operands
[1]);
2596 /* If operands[1] is a register, on POWER it may have
2597 double-precision data in it, so truncate it to single
2599 if (FP_REGNO_P (regnum
) || regnum
>= FIRST_PSEUDO_REGISTER
)
2602 newreg
= (no_new_pseudos
? operands
[1] : gen_reg_rtx (mode
));
2603 emit_insn (gen_aux_truncdfsf2 (newreg
, operands
[1]));
2604 operands
[1] = newreg
;
2608 /* Handle the case where reload calls us with an invalid address;
2609 and the case of CONSTANT_P_RTX. */
2610 if (!ALTIVEC_VECTOR_MODE (mode
)
2611 && (! general_operand (operands
[1], mode
)
2612 || ! nonimmediate_operand (operands
[0], mode
)
2613 || GET_CODE (operands
[1]) == CONSTANT_P_RTX
))
2615 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2619 /* FIXME: In the long term, this switch statement should go away
2620 and be replaced by a sequence of tests based on things like
2626 if (CONSTANT_P (operands
[1])
2627 && GET_CODE (operands
[1]) != CONST_INT
)
2628 operands
[1] = force_const_mem (mode
, operands
[1]);
2634 if (CONSTANT_P (operands
[1])
2635 && ! easy_fp_constant (operands
[1], mode
))
2636 operands
[1] = force_const_mem (mode
, operands
[1]);
2646 if (CONSTANT_P (operands
[1])
2647 && !easy_vector_constant (operands
[1]))
2648 operands
[1] = force_const_mem (mode
, operands
[1]);
2653 /* Use default pattern for address of ELF small data */
2656 && DEFAULT_ABI
== ABI_V4
2657 && (GET_CODE (operands
[1]) == SYMBOL_REF
2658 || GET_CODE (operands
[1]) == CONST
)
2659 && small_data_operand (operands
[1], mode
))
2661 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2665 if (DEFAULT_ABI
== ABI_V4
2666 && mode
== Pmode
&& mode
== SImode
2667 && flag_pic
== 1 && got_operand (operands
[1], mode
))
2669 emit_insn (gen_movsi_got (operands
[0], operands
[1]));
2673 if ((TARGET_ELF
|| DEFAULT_ABI
== ABI_DARWIN
)
2674 && TARGET_NO_TOC
&& ! flag_pic
2676 && CONSTANT_P (operands
[1])
2677 && GET_CODE (operands
[1]) != HIGH
2678 && GET_CODE (operands
[1]) != CONST_INT
)
2680 rtx target
= (no_new_pseudos
? operands
[0] : gen_reg_rtx (mode
));
2682 /* If this is a function address on -mcall-aixdesc,
2683 convert it to the address of the descriptor. */
2684 if (DEFAULT_ABI
== ABI_AIX
2685 && GET_CODE (operands
[1]) == SYMBOL_REF
2686 && XSTR (operands
[1], 0)[0] == '.')
2688 const char *name
= XSTR (operands
[1], 0);
2690 while (*name
== '.')
2692 new_ref
= gen_rtx_SYMBOL_REF (Pmode
, name
);
2693 CONSTANT_POOL_ADDRESS_P (new_ref
)
2694 = CONSTANT_POOL_ADDRESS_P (operands
[1]);
2695 SYMBOL_REF_FLAG (new_ref
) = SYMBOL_REF_FLAG (operands
[1]);
2696 SYMBOL_REF_USED (new_ref
) = SYMBOL_REF_USED (operands
[1]);
2697 operands
[1] = new_ref
;
2700 if (DEFAULT_ABI
== ABI_DARWIN
)
2702 emit_insn (gen_macho_high (target
, operands
[1]));
2703 emit_insn (gen_macho_low (operands
[0], target
, operands
[1]));
2707 emit_insn (gen_elf_high (target
, operands
[1]));
2708 emit_insn (gen_elf_low (operands
[0], target
, operands
[1]));
2712 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2713 and we have put it in the TOC, we just need to make a TOC-relative
2716 && GET_CODE (operands
[1]) == SYMBOL_REF
2717 && CONSTANT_POOL_EXPR_P (operands
[1])
2718 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands
[1]),
2719 get_pool_mode (operands
[1])))
2721 operands
[1] = create_TOC_reference (operands
[1]);
2723 else if (mode
== Pmode
2724 && CONSTANT_P (operands
[1])
2725 && ((GET_CODE (operands
[1]) != CONST_INT
2726 && ! easy_fp_constant (operands
[1], mode
))
2727 || (GET_CODE (operands
[1]) == CONST_INT
2728 && num_insns_constant (operands
[1], mode
) > 2)
2729 || (GET_CODE (operands
[0]) == REG
2730 && FP_REGNO_P (REGNO (operands
[0]))))
2731 && GET_CODE (operands
[1]) != HIGH
2732 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands
[1])
2733 && ! TOC_RELATIVE_EXPR_P (operands
[1]))
2735 /* Emit a USE operation so that the constant isn't deleted if
2736 expensive optimizations are turned on because nobody
2737 references it. This should only be done for operands that
2738 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2739 This should not be done for operands that contain LABEL_REFs.
2740 For now, we just handle the obvious case. */
2741 if (GET_CODE (operands
[1]) != LABEL_REF
)
2742 emit_insn (gen_rtx_USE (VOIDmode
, operands
[1]));
2745 /* Darwin uses a special PIC legitimizer. */
2746 if (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
)
2749 rs6000_machopic_legitimize_pic_address (operands
[1], mode
,
2751 if (operands
[0] != operands
[1])
2752 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2757 /* If we are to limit the number of things we put in the TOC and
2758 this is a symbol plus a constant we can add in one insn,
2759 just put the symbol in the TOC and add the constant. Don't do
2760 this if reload is in progress. */
2761 if (GET_CODE (operands
[1]) == CONST
2762 && TARGET_NO_SUM_IN_TOC
&& ! reload_in_progress
2763 && GET_CODE (XEXP (operands
[1], 0)) == PLUS
2764 && add_operand (XEXP (XEXP (operands
[1], 0), 1), mode
)
2765 && (GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == LABEL_REF
2766 || GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == SYMBOL_REF
)
2767 && ! side_effects_p (operands
[0]))
2770 force_const_mem (mode
, XEXP (XEXP (operands
[1], 0), 0));
2771 rtx other
= XEXP (XEXP (operands
[1], 0), 1);
2773 sym
= force_reg (mode
, sym
);
2775 emit_insn (gen_addsi3 (operands
[0], sym
, other
));
2777 emit_insn (gen_adddi3 (operands
[0], sym
, other
));
2781 operands
[1] = force_const_mem (mode
, operands
[1]);
2784 && CONSTANT_POOL_EXPR_P (XEXP (operands
[1], 0))
2785 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2786 get_pool_constant (XEXP (operands
[1], 0)),
2787 get_pool_mode (XEXP (operands
[1], 0))))
2790 = gen_rtx_MEM (mode
,
2791 create_TOC_reference (XEXP (operands
[1], 0)));
2792 set_mem_alias_set (operands
[1], get_TOC_alias_set ());
2793 RTX_UNCHANGING_P (operands
[1]) = 1;
2799 if (GET_CODE (operands
[0]) == MEM
2800 && GET_CODE (XEXP (operands
[0], 0)) != REG
2801 && ! reload_in_progress
)
2803 = replace_equiv_address (operands
[0],
2804 copy_addr_to_reg (XEXP (operands
[0], 0)));
2806 if (GET_CODE (operands
[1]) == MEM
2807 && GET_CODE (XEXP (operands
[1], 0)) != REG
2808 && ! reload_in_progress
)
2810 = replace_equiv_address (operands
[1],
2811 copy_addr_to_reg (XEXP (operands
[1], 0)));
2818 /* Above, we may have called force_const_mem which may have returned
2819 an invalid address. If we can, fix this up; otherwise, reload will
2820 have to deal with it. */
2821 if (GET_CODE (operands
[1]) == MEM
2822 && ! memory_address_p (mode
, XEXP (operands
[1], 0))
2823 && ! reload_in_progress
)
2824 operands
[1] = adjust_address (operands
[1], mode
, 0);
2826 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2830 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2831 for a call to a function whose data type is FNTYPE.
2832 For a library call, FNTYPE is 0.
2834 For incoming args we set the number of arguments in the prototype large
2835 so we never return a PARALLEL. */
2838 init_cumulative_args (cum
, fntype
, libname
, incoming
)
2839 CUMULATIVE_ARGS
*cum
;
2841 rtx libname ATTRIBUTE_UNUSED
;
2844 static CUMULATIVE_ARGS zero_cumulative
;
2846 *cum
= zero_cumulative
;
2848 cum
->fregno
= FP_ARG_MIN_REG
;
2849 cum
->vregno
= ALTIVEC_ARG_MIN_REG
;
2850 cum
->prototype
= (fntype
&& TYPE_ARG_TYPES (fntype
));
2851 cum
->call_cookie
= CALL_NORMAL
;
2852 cum
->sysv_gregno
= GP_ARG_MIN_REG
;
2855 cum
->nargs_prototype
= 1000; /* don't return a PARALLEL */
2857 else if (cum
->prototype
)
2858 cum
->nargs_prototype
= (list_length (TYPE_ARG_TYPES (fntype
)) - 1
2859 + (TYPE_MODE (TREE_TYPE (fntype
)) == BLKmode
2860 || RETURN_IN_MEMORY (TREE_TYPE (fntype
))));
2863 cum
->nargs_prototype
= 0;
2865 cum
->orig_nargs
= cum
->nargs_prototype
;
2867 /* Check for a longcall attribute. */
2869 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
))
2870 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
2871 cum
->call_cookie
= CALL_LONG
;
2873 if (TARGET_DEBUG_ARG
)
2875 fprintf (stderr
, "\ninit_cumulative_args:");
2878 tree ret_type
= TREE_TYPE (fntype
);
2879 fprintf (stderr
, " ret code = %s,",
2880 tree_code_name
[ (int)TREE_CODE (ret_type
) ]);
2883 if (cum
->call_cookie
& CALL_LONG
)
2884 fprintf (stderr
, " longcall,");
2886 fprintf (stderr
, " proto = %d, nargs = %d\n",
2887 cum
->prototype
, cum
->nargs_prototype
);
2891 /* If defined, a C expression which determines whether, and in which
2892 direction, to pad out an argument with extra space. The value
2893 should be of type `enum direction': either `upward' to pad above
2894 the argument, `downward' to pad below, or `none' to inhibit
2897 For the AIX ABI structs are always stored left shifted in their
2901 function_arg_padding (mode
, type
)
2902 enum machine_mode mode
;
2905 if (type
!= 0 && AGGREGATE_TYPE_P (type
))
2908 /* This is the default definition. */
2909 return (! BYTES_BIG_ENDIAN
2912 ? (type
&& TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
2913 && int_size_in_bytes (type
) < (PARM_BOUNDARY
/ BITS_PER_UNIT
))
2914 : GET_MODE_BITSIZE (mode
) < PARM_BOUNDARY
)
2915 ? downward
: upward
));
2918 /* If defined, a C expression that gives the alignment boundary, in bits,
2919 of an argument with the specified mode and type. If it is not defined,
2920 PARM_BOUNDARY is used for all arguments.
2922 V.4 wants long longs to be double word aligned. */
2925 function_arg_boundary (mode
, type
)
2926 enum machine_mode mode
;
2927 tree type ATTRIBUTE_UNUSED
;
2929 if (DEFAULT_ABI
== ABI_V4
&& (mode
== DImode
|| mode
== DFmode
))
2931 else if (SPE_VECTOR_MODE (mode
))
2933 else if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
2936 return PARM_BOUNDARY
;
2939 /* Update the data in CUM to advance over an argument
2940 of mode MODE and data type TYPE.
2941 (TYPE is null for libcalls where that information may not be available.) */
2944 function_arg_advance (cum
, mode
, type
, named
)
2945 CUMULATIVE_ARGS
*cum
;
2946 enum machine_mode mode
;
2950 cum
->nargs_prototype
--;
2952 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
2954 if (cum
->vregno
<= ALTIVEC_ARG_MAX_REG
&& cum
->nargs_prototype
>= 0)
2957 cum
->words
+= RS6000_ARG_SIZE (mode
, type
);
2959 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
)
2960 && named
&& cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
2962 else if (DEFAULT_ABI
== ABI_V4
)
2964 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
2965 && (mode
== SFmode
|| mode
== DFmode
))
2967 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
2972 cum
->words
+= cum
->words
& 1;
2973 cum
->words
+= RS6000_ARG_SIZE (mode
, type
);
2979 int gregno
= cum
->sysv_gregno
;
2981 /* Aggregates and IEEE quad get passed by reference. */
2982 if ((type
&& AGGREGATE_TYPE_P (type
))
2986 n_words
= RS6000_ARG_SIZE (mode
, type
);
2988 /* Long long and SPE vectors are put in odd registers. */
2989 if (n_words
== 2 && (gregno
& 1) == 0)
2992 /* Long long and SPE vectors are not split between registers
2994 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
2996 /* Long long is aligned on the stack. */
2998 cum
->words
+= cum
->words
& 1;
2999 cum
->words
+= n_words
;
3002 /* Note: continuing to accumulate gregno past when we've started
3003 spilling to the stack indicates the fact that we've started
3004 spilling to the stack to expand_builtin_saveregs. */
3005 cum
->sysv_gregno
= gregno
+ n_words
;
3008 if (TARGET_DEBUG_ARG
)
3010 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
3011 cum
->words
, cum
->fregno
);
3012 fprintf (stderr
, "gregno = %2d, nargs = %4d, proto = %d, ",
3013 cum
->sysv_gregno
, cum
->nargs_prototype
, cum
->prototype
);
3014 fprintf (stderr
, "mode = %4s, named = %d\n",
3015 GET_MODE_NAME (mode
), named
);
3020 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
3021 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
3023 cum
->words
+= align
+ RS6000_ARG_SIZE (mode
, type
);
3025 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
3026 && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3029 if (TARGET_DEBUG_ARG
)
3031 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
3032 cum
->words
, cum
->fregno
);
3033 fprintf (stderr
, "nargs = %4d, proto = %d, mode = %4s, ",
3034 cum
->nargs_prototype
, cum
->prototype
, GET_MODE_NAME (mode
));
3035 fprintf (stderr
, "named = %d, align = %d\n", named
, align
);
3040 /* Determine where to put an argument to a function.
3041 Value is zero to push the argument on the stack,
3042 or a hard register in which to store the argument.
3044 MODE is the argument's machine mode.
3045 TYPE is the data type of the argument (as a tree).
3046 This is null for libcalls where that information may
3048 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3049 the preceding args and about the function being called.
3050 NAMED is nonzero if this argument is a named parameter
3051 (otherwise it is an extra parameter matching an ellipsis).
3053 On RS/6000 the first eight words of non-FP are normally in registers
3054 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3055 Under V.4, the first 8 FP args are in registers.
3057 If this is floating-point and no prototype is specified, we use
3058 both an FP and integer register (or possibly FP reg and stack). Library
3059 functions (when TYPE is zero) always have the proper types for args,
3060 so we can pass the FP value just in one register. emit_library_function
3061 doesn't support PARALLEL anyway. */
3064 function_arg (cum
, mode
, type
, named
)
3065 CUMULATIVE_ARGS
*cum
;
3066 enum machine_mode mode
;
3070 enum rs6000_abi abi
= DEFAULT_ABI
;
3072 /* Return a marker to indicate whether CR1 needs to set or clear the
3073 bit that V.4 uses to say fp args were passed in registers.
3074 Assume that we don't need the marker for software floating point,
3075 or compiler generated library calls. */
3076 if (mode
== VOIDmode
)
3079 && cum
->nargs_prototype
< 0
3080 && type
&& (cum
->prototype
|| TARGET_NO_PROTOTYPE
))
3082 /* For the SPE, we need to crxor CR6 always. */
3084 return GEN_INT (cum
->call_cookie
| CALL_V4_SET_FP_ARGS
);
3085 else if (TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3086 return GEN_INT (cum
->call_cookie
3087 | ((cum
->fregno
== FP_ARG_MIN_REG
)
3088 ? CALL_V4_SET_FP_ARGS
3089 : CALL_V4_CLEAR_FP_ARGS
));
3092 return GEN_INT (cum
->call_cookie
);
3095 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3097 if (named
&& cum
->vregno
<= ALTIVEC_ARG_MAX_REG
)
3098 return gen_rtx_REG (mode
, cum
->vregno
);
3102 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
) && named
)
3104 if (cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
3105 return gen_rtx_REG (mode
, cum
->sysv_gregno
);
3109 else if (abi
== ABI_V4
)
3111 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
3112 && (mode
== SFmode
|| mode
== DFmode
))
3114 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
3115 return gen_rtx_REG (mode
, cum
->fregno
);
3122 int gregno
= cum
->sysv_gregno
;
3124 /* Aggregates and IEEE quad get passed by reference. */
3125 if ((type
&& AGGREGATE_TYPE_P (type
))
3129 n_words
= RS6000_ARG_SIZE (mode
, type
);
3131 /* Long long and SPE vectors are put in odd registers. */
3132 if (n_words
== 2 && (gregno
& 1) == 0)
3135 /* Long long and SPE vectors are not split between registers
3137 if (gregno
+ n_words
- 1 <= GP_ARG_MAX_REG
)
3139 /* SPE vectors in ... get split into 2 registers. */
3140 if (TARGET_SPE
&& TARGET_SPE_ABI
3141 && SPE_VECTOR_MODE (mode
) && !named
)
3144 enum machine_mode m
= GET_MODE_INNER (mode
);
3146 r1
= gen_rtx_REG (m
, gregno
);
3147 r1
= gen_rtx_EXPR_LIST (m
, r1
, const0_rtx
);
3148 r2
= gen_rtx_REG (m
, gregno
+ 1);
3149 r2
= gen_rtx_EXPR_LIST (m
, r2
, GEN_INT (4));
3150 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r2
));
3152 return gen_rtx_REG (mode
, gregno
);
3160 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
3161 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
3162 int align_words
= cum
->words
+ align
;
3164 if (type
&& TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
3167 if (USE_FP_FOR_ARG_P (*cum
, mode
, type
))
3170 || ((cum
->nargs_prototype
> 0)
3171 /* IBM AIX extended its linkage convention definition always
3172 to require FP args after register save area hole on the
3174 && (DEFAULT_ABI
!= ABI_AIX
3176 || (align_words
< GP_ARG_NUM_REG
))))
3177 return gen_rtx_REG (mode
, cum
->fregno
);
3179 return gen_rtx_PARALLEL (mode
,
3181 gen_rtx_EXPR_LIST (VOIDmode
,
3182 ((align_words
>= GP_ARG_NUM_REG
)
3185 + RS6000_ARG_SIZE (mode
, type
)
3187 /* If this is partially on the stack, then
3188 we only include the portion actually
3189 in registers here. */
3190 ? gen_rtx_REG (SImode
,
3191 GP_ARG_MIN_REG
+ align_words
)
3192 : gen_rtx_REG (mode
,
3193 GP_ARG_MIN_REG
+ align_words
))),
3195 gen_rtx_EXPR_LIST (VOIDmode
,
3196 gen_rtx_REG (mode
, cum
->fregno
),
3199 else if (align_words
< GP_ARG_NUM_REG
)
3200 return gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
3206 /* For an arg passed partly in registers and partly in memory,
3207 this is the number of registers used.
3208 For args passed entirely in registers or entirely in memory, zero. */
3211 function_arg_partial_nregs (cum
, mode
, type
, named
)
3212 CUMULATIVE_ARGS
*cum
;
3213 enum machine_mode mode
;
3215 int named ATTRIBUTE_UNUSED
;
3217 if (DEFAULT_ABI
== ABI_V4
)
3220 if (USE_FP_FOR_ARG_P (*cum
, mode
, type
)
3221 || USE_ALTIVEC_FOR_ARG_P (*cum
, mode
, type
))
3223 if (cum
->nargs_prototype
>= 0)
3227 if (cum
->words
< GP_ARG_NUM_REG
3228 && GP_ARG_NUM_REG
< (cum
->words
+ RS6000_ARG_SIZE (mode
, type
)))
3230 int ret
= GP_ARG_NUM_REG
- cum
->words
;
3231 if (ret
&& TARGET_DEBUG_ARG
)
3232 fprintf (stderr
, "function_arg_partial_nregs: %d\n", ret
);
3240 /* A C expression that indicates when an argument must be passed by
3241 reference. If nonzero for an argument, a copy of that argument is
3242 made in memory and a pointer to the argument is passed instead of
3243 the argument itself. The pointer is passed in whatever way is
3244 appropriate for passing a pointer to that type.
3246 Under V.4, structures and unions are passed by reference. */
3249 function_arg_pass_by_reference (cum
, mode
, type
, named
)
3250 CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
;
3251 enum machine_mode mode ATTRIBUTE_UNUSED
;
3253 int named ATTRIBUTE_UNUSED
;
3255 if (DEFAULT_ABI
== ABI_V4
3256 && ((type
&& AGGREGATE_TYPE_P (type
))
3259 if (TARGET_DEBUG_ARG
)
3260 fprintf (stderr
, "function_arg_pass_by_reference: aggregate\n");
3268 /* Perform any needed actions needed for a function that is receiving a
3269 variable number of arguments.
3273 MODE and TYPE are the mode and type of the current parameter.
3275 PRETEND_SIZE is a variable that should be set to the amount of stack
3276 that must be pushed by the prolog to pretend that our caller pushed
3279 Normally, this macro will push all remaining incoming registers on the
3280 stack and set PRETEND_SIZE to the length of the registers pushed. */
3283 setup_incoming_varargs (cum
, mode
, type
, pretend_size
, no_rtl
)
3284 CUMULATIVE_ARGS
*cum
;
3285 enum machine_mode mode
;
3291 CUMULATIVE_ARGS next_cum
;
3292 int reg_size
= TARGET_32BIT
? 4 : 8;
3293 rtx save_area
= NULL_RTX
, mem
;
3294 int first_reg_offset
, set
;
3298 fntype
= TREE_TYPE (current_function_decl
);
3299 stdarg_p
= (TYPE_ARG_TYPES (fntype
) != 0
3300 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
3301 != void_type_node
));
3303 /* For varargs, we do not want to skip the dummy va_dcl argument.
3304 For stdargs, we do want to skip the last named argument. */
3307 function_arg_advance (&next_cum
, mode
, type
, 1);
3309 if (DEFAULT_ABI
== ABI_V4
)
3311 /* Indicate to allocate space on the stack for varargs save area. */
3312 cfun
->machine
->sysv_varargs_p
= 1;
3314 save_area
= plus_constant (virtual_stack_vars_rtx
,
3315 - RS6000_VARARGS_SIZE
);
3317 first_reg_offset
= next_cum
.sysv_gregno
- GP_ARG_MIN_REG
;
3321 first_reg_offset
= next_cum
.words
;
3322 save_area
= virtual_incoming_args_rtx
;
3323 cfun
->machine
->sysv_varargs_p
= 0;
3325 if (MUST_PASS_IN_STACK (mode
, type
))
3326 first_reg_offset
+= RS6000_ARG_SIZE (TYPE_MODE (type
), type
);
3329 set
= get_varargs_alias_set ();
3330 if (! no_rtl
&& first_reg_offset
< GP_ARG_NUM_REG
)
3332 mem
= gen_rtx_MEM (BLKmode
,
3333 plus_constant (save_area
,
3334 first_reg_offset
* reg_size
)),
3335 set_mem_alias_set (mem
, set
);
3336 set_mem_align (mem
, BITS_PER_WORD
);
3339 (GP_ARG_MIN_REG
+ first_reg_offset
, mem
,
3340 GP_ARG_NUM_REG
- first_reg_offset
,
3341 (GP_ARG_NUM_REG
- first_reg_offset
) * UNITS_PER_WORD
);
3343 /* ??? Does ABI_V4 need this at all? */
3344 *pretend_size
= (GP_ARG_NUM_REG
- first_reg_offset
) * UNITS_PER_WORD
;
3347 /* Save FP registers if needed. */
3348 if (DEFAULT_ABI
== ABI_V4
3349 && TARGET_HARD_FLOAT
&& TARGET_FPRS
3351 && next_cum
.fregno
<= FP_ARG_V4_MAX_REG
)
3353 int fregno
= next_cum
.fregno
;
3354 rtx cr1
= gen_rtx_REG (CCmode
, CR1_REGNO
);
3355 rtx lab
= gen_label_rtx ();
3356 int off
= (GP_ARG_NUM_REG
* reg_size
) + ((fregno
- FP_ARG_MIN_REG
) * 8);
3358 emit_jump_insn (gen_rtx_SET (VOIDmode
,
3360 gen_rtx_IF_THEN_ELSE (VOIDmode
,
3361 gen_rtx_NE (VOIDmode
, cr1
,
3363 gen_rtx_LABEL_REF (VOIDmode
, lab
),
3366 while (fregno
<= FP_ARG_V4_MAX_REG
)
3368 mem
= gen_rtx_MEM (DFmode
, plus_constant (save_area
, off
));
3369 set_mem_alias_set (mem
, set
);
3370 emit_move_insn (mem
, gen_rtx_REG (DFmode
, fregno
));
3379 /* Create the va_list data type. */
3382 rs6000_build_va_list ()
3384 tree f_gpr
, f_fpr
, f_ovf
, f_sav
, record
, type_decl
;
3386 /* For AIX, prefer 'char *' because that's what the system
3387 header files like. */
3388 if (DEFAULT_ABI
!= ABI_V4
)
3389 return build_pointer_type (char_type_node
);
3391 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
3392 type_decl
= build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
3394 f_gpr
= build_decl (FIELD_DECL
, get_identifier ("gpr"),
3395 unsigned_char_type_node
);
3396 f_fpr
= build_decl (FIELD_DECL
, get_identifier ("fpr"),
3397 unsigned_char_type_node
);
3398 f_ovf
= build_decl (FIELD_DECL
, get_identifier ("overflow_arg_area"),
3400 f_sav
= build_decl (FIELD_DECL
, get_identifier ("reg_save_area"),
3403 DECL_FIELD_CONTEXT (f_gpr
) = record
;
3404 DECL_FIELD_CONTEXT (f_fpr
) = record
;
3405 DECL_FIELD_CONTEXT (f_ovf
) = record
;
3406 DECL_FIELD_CONTEXT (f_sav
) = record
;
3408 TREE_CHAIN (record
) = type_decl
;
3409 TYPE_NAME (record
) = type_decl
;
3410 TYPE_FIELDS (record
) = f_gpr
;
3411 TREE_CHAIN (f_gpr
) = f_fpr
;
3412 TREE_CHAIN (f_fpr
) = f_ovf
;
3413 TREE_CHAIN (f_ovf
) = f_sav
;
3415 layout_type (record
);
3417 /* The correct type is an array type of one element. */
3418 return build_array_type (record
, build_index_type (size_zero_node
));
3421 /* Implement va_start. */
3424 rs6000_va_start (valist
, nextarg
)
3428 HOST_WIDE_INT words
, n_gpr
, n_fpr
;
3429 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
3430 tree gpr
, fpr
, ovf
, sav
, t
;
3432 /* Only SVR4 needs something special. */
3433 if (DEFAULT_ABI
!= ABI_V4
)
3435 std_expand_builtin_va_start (valist
, nextarg
);
3439 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
3440 f_fpr
= TREE_CHAIN (f_gpr
);
3441 f_ovf
= TREE_CHAIN (f_fpr
);
3442 f_sav
= TREE_CHAIN (f_ovf
);
3444 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
3445 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
3446 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
3447 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
3448 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
3450 /* Count number of gp and fp argument registers used. */
3451 words
= current_function_args_info
.words
;
3452 n_gpr
= current_function_args_info
.sysv_gregno
- GP_ARG_MIN_REG
;
3453 n_fpr
= current_function_args_info
.fregno
- FP_ARG_MIN_REG
;
3455 if (TARGET_DEBUG_ARG
)
3457 fputs ("va_start: words = ", stderr
);
3458 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, words
);
3459 fputs (", n_gpr = ", stderr
);
3460 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, n_gpr
);
3461 fputs (", n_fpr = ", stderr
);
3462 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, n_fpr
);
3463 putc ('\n', stderr
);
3466 t
= build (MODIFY_EXPR
, TREE_TYPE (gpr
), gpr
, build_int_2 (n_gpr
, 0));
3467 TREE_SIDE_EFFECTS (t
) = 1;
3468 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3470 t
= build (MODIFY_EXPR
, TREE_TYPE (fpr
), fpr
, build_int_2 (n_fpr
, 0));
3471 TREE_SIDE_EFFECTS (t
) = 1;
3472 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3474 /* Find the overflow area. */
3475 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
3477 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), t
,
3478 build_int_2 (words
* UNITS_PER_WORD
, 0));
3479 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
3480 TREE_SIDE_EFFECTS (t
) = 1;
3481 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3483 /* Find the register save area. */
3484 t
= make_tree (TREE_TYPE (sav
), virtual_stack_vars_rtx
);
3485 t
= build (PLUS_EXPR
, TREE_TYPE (sav
), t
,
3486 build_int_2 (-RS6000_VARARGS_SIZE
, -1));
3487 t
= build (MODIFY_EXPR
, TREE_TYPE (sav
), sav
, t
);
3488 TREE_SIDE_EFFECTS (t
) = 1;
3489 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3492 /* Implement va_arg. */
3495 rs6000_va_arg (valist
, type
)
3498 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
3499 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
3500 int indirect_p
, size
, rsize
, n_reg
, sav_ofs
, sav_scale
;
3501 rtx lab_false
, lab_over
, addr_rtx
, r
;
3503 if (DEFAULT_ABI
!= ABI_V4
)
3504 return std_expand_builtin_va_arg (valist
, type
);
3506 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
3507 f_fpr
= TREE_CHAIN (f_gpr
);
3508 f_ovf
= TREE_CHAIN (f_fpr
);
3509 f_sav
= TREE_CHAIN (f_ovf
);
3511 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
3512 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
3513 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
3514 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
3515 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
3517 size
= int_size_in_bytes (type
);
3518 rsize
= (size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
3520 if (AGGREGATE_TYPE_P (type
) || TYPE_MODE (type
) == TFmode
)
3522 /* Aggregates and long doubles are passed by reference. */
3528 size
= UNITS_PER_WORD
;
3531 else if (FLOAT_TYPE_P (type
) && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3533 /* FP args go in FP registers, if present. */
3542 /* Otherwise into GP registers. */
3550 /* Pull the value out of the saved registers ... */
3552 lab_false
= gen_label_rtx ();
3553 lab_over
= gen_label_rtx ();
3554 addr_rtx
= gen_reg_rtx (Pmode
);
3556 /* AltiVec vectors never go in registers. */
3557 if (!TARGET_ALTIVEC
|| TREE_CODE (type
) != VECTOR_TYPE
)
3559 TREE_THIS_VOLATILE (reg
) = 1;
3560 emit_cmp_and_jump_insns
3561 (expand_expr (reg
, NULL_RTX
, QImode
, EXPAND_NORMAL
),
3562 GEN_INT (8 - n_reg
+ 1), GE
, const1_rtx
, QImode
, 1,
3565 /* Long long is aligned in the registers. */
3568 u
= build (BIT_AND_EXPR
, TREE_TYPE (reg
), reg
,
3569 build_int_2 (n_reg
- 1, 0));
3570 u
= build (PLUS_EXPR
, TREE_TYPE (reg
), reg
, u
);
3571 u
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, u
);
3572 TREE_SIDE_EFFECTS (u
) = 1;
3573 expand_expr (u
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3577 t
= build (PLUS_EXPR
, ptr_type_node
, sav
, build_int_2 (sav_ofs
, 0));
3581 u
= build (POSTINCREMENT_EXPR
, TREE_TYPE (reg
), reg
,
3582 build_int_2 (n_reg
, 0));
3583 TREE_SIDE_EFFECTS (u
) = 1;
3585 u
= build1 (CONVERT_EXPR
, integer_type_node
, u
);
3586 TREE_SIDE_EFFECTS (u
) = 1;
3588 u
= build (MULT_EXPR
, integer_type_node
, u
, build_int_2 (sav_scale
, 0));
3589 TREE_SIDE_EFFECTS (u
) = 1;
3591 t
= build (PLUS_EXPR
, ptr_type_node
, t
, u
);
3592 TREE_SIDE_EFFECTS (t
) = 1;
3594 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
3596 emit_move_insn (addr_rtx
, r
);
3598 emit_jump_insn (gen_jump (lab_over
));
3602 emit_label (lab_false
);
3604 /* ... otherwise out of the overflow area. */
3606 /* Make sure we don't find reg 7 for the next int arg.
3608 All AltiVec vectors go in the overflow area. So in the AltiVec
3609 case we need to get the vectors from the overflow area, but
3610 remember where the GPRs and FPRs are. */
3611 if (n_reg
> 1 && (TREE_CODE (type
) != VECTOR_TYPE
3612 || !TARGET_ALTIVEC
))
3614 t
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, build_int_2 (8, 0));
3615 TREE_SIDE_EFFECTS (t
) = 1;
3616 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3619 /* Care for on-stack alignment if needed. */
3626 /* AltiVec vectors are 16 byte aligned. */
3627 if (TARGET_ALTIVEC
&& TREE_CODE (type
) == VECTOR_TYPE
)
3632 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), ovf
, build_int_2 (align
, 0));
3633 t
= build (BIT_AND_EXPR
, TREE_TYPE (t
), t
, build_int_2 (-align
-1, -1));
3637 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
3639 emit_move_insn (addr_rtx
, r
);
3641 t
= build (PLUS_EXPR
, TREE_TYPE (t
), t
, build_int_2 (size
, 0));
3642 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
3643 TREE_SIDE_EFFECTS (t
) = 1;
3644 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3646 emit_label (lab_over
);
3650 r
= gen_rtx_MEM (Pmode
, addr_rtx
);
3651 set_mem_alias_set (r
, get_varargs_alias_set ());
3652 emit_move_insn (addr_rtx
, r
);
3660 #define def_builtin(MASK, NAME, TYPE, CODE) \
3662 if ((MASK) & target_flags) \
3663 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3667 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3669 static const struct builtin_description bdesc_3arg
[] =
3671 { MASK_ALTIVEC
, CODE_FOR_altivec_vmaddfp
, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP
},
3672 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhaddshs
, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS
},
3673 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhraddshs
, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS
},
3674 { MASK_ALTIVEC
, CODE_FOR_altivec_vmladduhm
, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM
},
3675 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumubm
, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM
},
3676 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsummbm
, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM
},
3677 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhm
, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM
},
3678 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshm
, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM
},
3679 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhs
, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS
},
3680 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshs
, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS
},
3681 { MASK_ALTIVEC
, CODE_FOR_altivec_vnmsubfp
, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP
},
3682 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4sf
, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF
},
3683 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4si
, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI
},
3684 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_8hi
, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI
},
3685 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_16qi
, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI
},
3686 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4sf
, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF
},
3687 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4si
, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI
},
3688 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_8hi
, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI
},
3689 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_16qi
, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI
},
3690 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_16qi
, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI
},
3691 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_8hi
, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI
},
3692 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4si
, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI
},
3693 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4sf
, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF
},
3696 /* DST operations: void foo (void *, const int, const char). */
3698 static const struct builtin_description bdesc_dst
[] =
3700 { MASK_ALTIVEC
, CODE_FOR_altivec_dst
, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST
},
3701 { MASK_ALTIVEC
, CODE_FOR_altivec_dstt
, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT
},
3702 { MASK_ALTIVEC
, CODE_FOR_altivec_dstst
, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST
},
3703 { MASK_ALTIVEC
, CODE_FOR_altivec_dststt
, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT
}
3706 /* Simple binary operations: VECc = foo (VECa, VECb). */
3708 static struct builtin_description bdesc_2arg
[] =
3710 { MASK_ALTIVEC
, CODE_FOR_addv16qi3
, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM
},
3711 { MASK_ALTIVEC
, CODE_FOR_addv8hi3
, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM
},
3712 { MASK_ALTIVEC
, CODE_FOR_addv4si3
, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM
},
3713 { MASK_ALTIVEC
, CODE_FOR_addv4sf3
, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP
},
3714 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddcuw
, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW
},
3715 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddubs
, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS
},
3716 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsbs
, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS
},
3717 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduhs
, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS
},
3718 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddshs
, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS
},
3719 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduws
, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS
},
3720 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsws
, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS
},
3721 { MASK_ALTIVEC
, CODE_FOR_andv4si3
, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND
},
3722 { MASK_ALTIVEC
, CODE_FOR_altivec_vandc
, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC
},
3723 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgub
, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB
},
3724 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsb
, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB
},
3725 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguh
, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH
},
3726 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsh
, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH
},
3727 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguw
, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW
},
3728 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsw
, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW
},
3729 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfux
, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX
},
3730 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfsx
, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX
},
3731 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpbfp
, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP
},
3732 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequb
, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB
},
3733 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequh
, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH
},
3734 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequw
, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW
},
3735 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpeqfp
, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP
},
3736 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgefp
, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP
},
3737 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtub
, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB
},
3738 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsb
, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB
},
3739 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuh
, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH
},
3740 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsh
, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH
},
3741 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuw
, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW
},
3742 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsw
, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW
},
3743 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtfp
, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP
},
3744 { MASK_ALTIVEC
, CODE_FOR_altivec_vctsxs
, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS
},
3745 { MASK_ALTIVEC
, CODE_FOR_altivec_vctuxs
, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS
},
3746 { MASK_ALTIVEC
, CODE_FOR_umaxv16qi3
, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB
},
3747 { MASK_ALTIVEC
, CODE_FOR_smaxv16qi3
, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB
},
3748 { MASK_ALTIVEC
, CODE_FOR_umaxv8hi3
, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH
},
3749 { MASK_ALTIVEC
, CODE_FOR_smaxv8hi3
, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH
},
3750 { MASK_ALTIVEC
, CODE_FOR_umaxv4si3
, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW
},
3751 { MASK_ALTIVEC
, CODE_FOR_smaxv4si3
, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW
},
3752 { MASK_ALTIVEC
, CODE_FOR_smaxv4sf3
, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP
},
3753 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghb
, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB
},
3754 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghh
, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH
},
3755 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghw
, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW
},
3756 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglb
, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB
},
3757 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglh
, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH
},
3758 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglw
, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW
},
3759 { MASK_ALTIVEC
, CODE_FOR_uminv16qi3
, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB
},
3760 { MASK_ALTIVEC
, CODE_FOR_sminv16qi3
, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB
},
3761 { MASK_ALTIVEC
, CODE_FOR_uminv8hi3
, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH
},
3762 { MASK_ALTIVEC
, CODE_FOR_sminv8hi3
, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH
},
3763 { MASK_ALTIVEC
, CODE_FOR_uminv4si3
, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW
},
3764 { MASK_ALTIVEC
, CODE_FOR_sminv4si3
, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW
},
3765 { MASK_ALTIVEC
, CODE_FOR_sminv4sf3
, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP
},
3766 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleub
, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB
},
3767 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesb
, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB
},
3768 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleuh
, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH
},
3769 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesh
, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH
},
3770 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuloub
, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB
},
3771 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosb
, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB
},
3772 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulouh
, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH
},
3773 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosh
, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH
},
3774 { MASK_ALTIVEC
, CODE_FOR_altivec_vnor
, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR
},
3775 { MASK_ALTIVEC
, CODE_FOR_iorv4si3
, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR
},
3776 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhum
, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM
},
3777 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwum
, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM
},
3778 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkpx
, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX
},
3779 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhss
, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS
},
3780 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshss
, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS
},
3781 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwss
, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS
},
3782 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswss
, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS
},
3783 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhus
, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS
},
3784 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshus
, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS
},
3785 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwus
, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS
},
3786 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswus
, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS
},
3787 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlb
, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB
},
3788 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlh
, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH
},
3789 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlw
, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW
},
3790 { MASK_ALTIVEC
, CODE_FOR_altivec_vslb
, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB
},
3791 { MASK_ALTIVEC
, CODE_FOR_altivec_vslh
, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH
},
3792 { MASK_ALTIVEC
, CODE_FOR_altivec_vslw
, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW
},
3793 { MASK_ALTIVEC
, CODE_FOR_altivec_vsl
, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL
},
3794 { MASK_ALTIVEC
, CODE_FOR_altivec_vslo
, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO
},
3795 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltb
, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB
},
3796 { MASK_ALTIVEC
, CODE_FOR_altivec_vsplth
, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH
},
3797 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltw
, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW
},
3798 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrb
, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB
},
3799 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrh
, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH
},
3800 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrw
, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW
},
3801 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrab
, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB
},
3802 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrah
, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH
},
3803 { MASK_ALTIVEC
, CODE_FOR_altivec_vsraw
, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW
},
3804 { MASK_ALTIVEC
, CODE_FOR_altivec_vsr
, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR
},
3805 { MASK_ALTIVEC
, CODE_FOR_altivec_vsro
, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO
},
3806 { MASK_ALTIVEC
, CODE_FOR_subv16qi3
, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM
},
3807 { MASK_ALTIVEC
, CODE_FOR_subv8hi3
, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM
},
3808 { MASK_ALTIVEC
, CODE_FOR_subv4si3
, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM
},
3809 { MASK_ALTIVEC
, CODE_FOR_subv4sf3
, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP
},
3810 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubcuw
, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW
},
3811 { MASK_ALTIVEC
, CODE_FOR_altivec_vsububs
, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS
},
3812 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsbs
, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS
},
3813 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuhs
, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS
},
3814 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubshs
, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS
},
3815 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuws
, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS
},
3816 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsws
, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS
},
3817 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4ubs
, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS
},
3818 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4sbs
, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS
},
3819 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4shs
, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS
},
3820 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum2sws
, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS
},
3821 { MASK_ALTIVEC
, CODE_FOR_altivec_vsumsws
, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS
},
3822 { MASK_ALTIVEC
, CODE_FOR_xorv4si3
, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR
},
3824 /* Place holder, leave as first spe builtin. */
3825 { 0, CODE_FOR_spe_evaddw
, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW
},
3826 { 0, CODE_FOR_spe_evand
, "__builtin_spe_evand", SPE_BUILTIN_EVAND
},
3827 { 0, CODE_FOR_spe_evandc
, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC
},
3828 { 0, CODE_FOR_spe_evdivws
, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS
},
3829 { 0, CODE_FOR_spe_evdivwu
, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU
},
3830 { 0, CODE_FOR_spe_eveqv
, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV
},
3831 { 0, CODE_FOR_spe_evfsadd
, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD
},
3832 { 0, CODE_FOR_spe_evfsdiv
, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV
},
3833 { 0, CODE_FOR_spe_evfsmul
, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL
},
3834 { 0, CODE_FOR_spe_evfssub
, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB
},
3835 { 0, CODE_FOR_spe_evmergehi
, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI
},
3836 { 0, CODE_FOR_spe_evmergehilo
, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO
},
3837 { 0, CODE_FOR_spe_evmergelo
, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO
},
3838 { 0, CODE_FOR_spe_evmergelohi
, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI
},
3839 { 0, CODE_FOR_spe_evmhegsmfaa
, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA
},
3840 { 0, CODE_FOR_spe_evmhegsmfan
, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN
},
3841 { 0, CODE_FOR_spe_evmhegsmiaa
, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA
},
3842 { 0, CODE_FOR_spe_evmhegsmian
, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN
},
3843 { 0, CODE_FOR_spe_evmhegumiaa
, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA
},
3844 { 0, CODE_FOR_spe_evmhegumian
, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN
},
3845 { 0, CODE_FOR_spe_evmhesmf
, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF
},
3846 { 0, CODE_FOR_spe_evmhesmfa
, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA
},
3847 { 0, CODE_FOR_spe_evmhesmfaaw
, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW
},
3848 { 0, CODE_FOR_spe_evmhesmfanw
, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW
},
3849 { 0, CODE_FOR_spe_evmhesmi
, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI
},
3850 { 0, CODE_FOR_spe_evmhesmia
, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA
},
3851 { 0, CODE_FOR_spe_evmhesmiaaw
, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW
},
3852 { 0, CODE_FOR_spe_evmhesmianw
, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW
},
3853 { 0, CODE_FOR_spe_evmhessf
, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF
},
3854 { 0, CODE_FOR_spe_evmhessfa
, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA
},
3855 { 0, CODE_FOR_spe_evmhessfaaw
, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW
},
3856 { 0, CODE_FOR_spe_evmhessfanw
, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW
},
3857 { 0, CODE_FOR_spe_evmhessiaaw
, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW
},
3858 { 0, CODE_FOR_spe_evmhessianw
, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW
},
3859 { 0, CODE_FOR_spe_evmheumi
, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI
},
3860 { 0, CODE_FOR_spe_evmheumia
, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA
},
3861 { 0, CODE_FOR_spe_evmheumiaaw
, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW
},
3862 { 0, CODE_FOR_spe_evmheumianw
, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW
},
3863 { 0, CODE_FOR_spe_evmheusiaaw
, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW
},
3864 { 0, CODE_FOR_spe_evmheusianw
, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW
},
3865 { 0, CODE_FOR_spe_evmhogsmfaa
, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA
},
3866 { 0, CODE_FOR_spe_evmhogsmfan
, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN
},
3867 { 0, CODE_FOR_spe_evmhogsmiaa
, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA
},
3868 { 0, CODE_FOR_spe_evmhogsmian
, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN
},
3869 { 0, CODE_FOR_spe_evmhogumiaa
, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA
},
3870 { 0, CODE_FOR_spe_evmhogumian
, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN
},
3871 { 0, CODE_FOR_spe_evmhosmf
, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF
},
3872 { 0, CODE_FOR_spe_evmhosmfa
, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA
},
3873 { 0, CODE_FOR_spe_evmhosmfaaw
, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW
},
3874 { 0, CODE_FOR_spe_evmhosmfanw
, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW
},
3875 { 0, CODE_FOR_spe_evmhosmi
, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI
},
3876 { 0, CODE_FOR_spe_evmhosmia
, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA
},
3877 { 0, CODE_FOR_spe_evmhosmiaaw
, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW
},
3878 { 0, CODE_FOR_spe_evmhosmianw
, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW
},
3879 { 0, CODE_FOR_spe_evmhossf
, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF
},
3880 { 0, CODE_FOR_spe_evmhossfa
, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA
},
3881 { 0, CODE_FOR_spe_evmhossfaaw
, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW
},
3882 { 0, CODE_FOR_spe_evmhossfanw
, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW
},
3883 { 0, CODE_FOR_spe_evmhossiaaw
, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW
},
3884 { 0, CODE_FOR_spe_evmhossianw
, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW
},
3885 { 0, CODE_FOR_spe_evmhoumi
, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI
},
3886 { 0, CODE_FOR_spe_evmhoumia
, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA
},
3887 { 0, CODE_FOR_spe_evmhoumiaaw
, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW
},
3888 { 0, CODE_FOR_spe_evmhoumianw
, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW
},
3889 { 0, CODE_FOR_spe_evmhousiaaw
, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW
},
3890 { 0, CODE_FOR_spe_evmhousianw
, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW
},
3891 { 0, CODE_FOR_spe_evmwhsmf
, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF
},
3892 { 0, CODE_FOR_spe_evmwhsmfa
, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA
},
3893 { 0, CODE_FOR_spe_evmwhsmi
, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI
},
3894 { 0, CODE_FOR_spe_evmwhsmia
, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA
},
3895 { 0, CODE_FOR_spe_evmwhssf
, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF
},
3896 { 0, CODE_FOR_spe_evmwhssfa
, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA
},
3897 { 0, CODE_FOR_spe_evmwhumi
, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI
},
3898 { 0, CODE_FOR_spe_evmwhumia
, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA
},
3899 { 0, CODE_FOR_spe_evmwlsmf
, "__builtin_spe_evmwlsmf", SPE_BUILTIN_EVMWLSMF
},
3900 { 0, CODE_FOR_spe_evmwlsmfa
, "__builtin_spe_evmwlsmfa", SPE_BUILTIN_EVMWLSMFA
},
3901 { 0, CODE_FOR_spe_evmwlsmfaaw
, "__builtin_spe_evmwlsmfaaw", SPE_BUILTIN_EVMWLSMFAAW
},
3902 { 0, CODE_FOR_spe_evmwlsmfanw
, "__builtin_spe_evmwlsmfanw", SPE_BUILTIN_EVMWLSMFANW
},
3903 { 0, CODE_FOR_spe_evmwlsmiaaw
, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW
},
3904 { 0, CODE_FOR_spe_evmwlsmianw
, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW
},
3905 { 0, CODE_FOR_spe_evmwlssf
, "__builtin_spe_evmwlssf", SPE_BUILTIN_EVMWLSSF
},
3906 { 0, CODE_FOR_spe_evmwlssfa
, "__builtin_spe_evmwlssfa", SPE_BUILTIN_EVMWLSSFA
},
3907 { 0, CODE_FOR_spe_evmwlssfaaw
, "__builtin_spe_evmwlssfaaw", SPE_BUILTIN_EVMWLSSFAAW
},
3908 { 0, CODE_FOR_spe_evmwlssfanw
, "__builtin_spe_evmwlssfanw", SPE_BUILTIN_EVMWLSSFANW
},
3909 { 0, CODE_FOR_spe_evmwlssiaaw
, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW
},
3910 { 0, CODE_FOR_spe_evmwlssianw
, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW
},
3911 { 0, CODE_FOR_spe_evmwlumi
, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI
},
3912 { 0, CODE_FOR_spe_evmwlumia
, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA
},
3913 { 0, CODE_FOR_spe_evmwlumiaaw
, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW
},
3914 { 0, CODE_FOR_spe_evmwlumianw
, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW
},
3915 { 0, CODE_FOR_spe_evmwlusiaaw
, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW
},
3916 { 0, CODE_FOR_spe_evmwlusianw
, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW
},
3917 { 0, CODE_FOR_spe_evmwsmf
, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF
},
3918 { 0, CODE_FOR_spe_evmwsmfa
, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA
},
3919 { 0, CODE_FOR_spe_evmwsmfaa
, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA
},
3920 { 0, CODE_FOR_spe_evmwsmfan
, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN
},
3921 { 0, CODE_FOR_spe_evmwsmi
, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI
},
3922 { 0, CODE_FOR_spe_evmwsmia
, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA
},
3923 { 0, CODE_FOR_spe_evmwsmiaa
, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA
},
3924 { 0, CODE_FOR_spe_evmwsmian
, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN
},
3925 { 0, CODE_FOR_spe_evmwssf
, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF
},
3926 { 0, CODE_FOR_spe_evmwssfa
, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA
},
3927 { 0, CODE_FOR_spe_evmwssfaa
, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA
},
3928 { 0, CODE_FOR_spe_evmwssfan
, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN
},
3929 { 0, CODE_FOR_spe_evmwumi
, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI
},
3930 { 0, CODE_FOR_spe_evmwumia
, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA
},
3931 { 0, CODE_FOR_spe_evmwumiaa
, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA
},
3932 { 0, CODE_FOR_spe_evmwumian
, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN
},
3933 { 0, CODE_FOR_spe_evnand
, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND
},
3934 { 0, CODE_FOR_spe_evnor
, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR
},
3935 { 0, CODE_FOR_spe_evor
, "__builtin_spe_evor", SPE_BUILTIN_EVOR
},
3936 { 0, CODE_FOR_spe_evorc
, "__builtin_spe_evorc", SPE_BUILTIN_EVORC
},
3937 { 0, CODE_FOR_spe_evrlw
, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW
},
3938 { 0, CODE_FOR_spe_evslw
, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW
},
3939 { 0, CODE_FOR_spe_evsrws
, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS
},
3940 { 0, CODE_FOR_spe_evsrwu
, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU
},
3941 { 0, CODE_FOR_spe_evsubfw
, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW
},
3943 /* SPE binary operations expecting a 5-bit unsigned literal. */
3944 { 0, CODE_FOR_spe_evaddiw
, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW
},
3946 { 0, CODE_FOR_spe_evrlwi
, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI
},
3947 { 0, CODE_FOR_spe_evslwi
, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI
},
3948 { 0, CODE_FOR_spe_evsrwis
, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS
},
3949 { 0, CODE_FOR_spe_evsrwiu
, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU
},
3950 { 0, CODE_FOR_spe_evsubifw
, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW
},
3951 { 0, CODE_FOR_spe_evmwhssfaa
, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA
},
3952 { 0, CODE_FOR_spe_evmwhssmaa
, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA
},
3953 { 0, CODE_FOR_spe_evmwhsmfaa
, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA
},
3954 { 0, CODE_FOR_spe_evmwhsmiaa
, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA
},
3955 { 0, CODE_FOR_spe_evmwhusiaa
, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA
},
3956 { 0, CODE_FOR_spe_evmwhumiaa
, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA
},
3957 { 0, CODE_FOR_spe_evmwhssfan
, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN
},
3958 { 0, CODE_FOR_spe_evmwhssian
, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN
},
3959 { 0, CODE_FOR_spe_evmwhsmfan
, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN
},
3960 { 0, CODE_FOR_spe_evmwhsmian
, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN
},
3961 { 0, CODE_FOR_spe_evmwhusian
, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN
},
3962 { 0, CODE_FOR_spe_evmwhumian
, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN
},
3963 { 0, CODE_FOR_spe_evmwhgssfaa
, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA
},
3964 { 0, CODE_FOR_spe_evmwhgsmfaa
, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA
},
3965 { 0, CODE_FOR_spe_evmwhgsmiaa
, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA
},
3966 { 0, CODE_FOR_spe_evmwhgumiaa
, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA
},
3967 { 0, CODE_FOR_spe_evmwhgssfan
, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN
},
3968 { 0, CODE_FOR_spe_evmwhgsmfan
, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN
},
3969 { 0, CODE_FOR_spe_evmwhgsmian
, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN
},
3970 { 0, CODE_FOR_spe_evmwhgumian
, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN
},
3971 { 0, CODE_FOR_spe_brinc
, "__builtin_spe_brinc", SPE_BUILTIN_BRINC
},
3973 /* Place-holder. Leave as last binary SPE builtin. */
3974 { 0, CODE_FOR_spe_evxor
, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR
},
3977 /* AltiVec predicates. */
3979 struct builtin_description_predicates
3981 const unsigned int mask
;
3982 const enum insn_code icode
;
3984 const char *const name
;
3985 const enum rs6000_builtins code
;
3988 static const struct builtin_description_predicates bdesc_altivec_preds
[] =
3990 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P
},
3991 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P
},
3992 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P
},
3993 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P
},
3994 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P
},
3995 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P
},
3996 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P
},
3997 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P
},
3998 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P
},
3999 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P
},
4000 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P
},
4001 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P
},
4002 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P
}
4005 /* SPE predicates. */
4006 static struct builtin_description bdesc_spe_predicates
[] =
4008 /* Place-holder. Leave as first. */
4009 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ
},
4010 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS
},
4011 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU
},
4012 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS
},
4013 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU
},
4014 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ
},
4015 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT
},
4016 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT
},
4017 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ
},
4018 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT
},
4019 /* Place-holder. Leave as last. */
4020 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT
},
4023 /* SPE evsel predicates. */
4024 static struct builtin_description bdesc_spe_evsel
[] =
4026 /* Place-holder. Leave as first. */
4027 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS
},
4028 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU
},
4029 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS
},
4030 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU
},
4031 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ
},
4032 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT
},
4033 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT
},
4034 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ
},
4035 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT
},
4036 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT
},
4037 /* Place-holder. Leave as last. */
4038 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ
},
4041 /* ABS* opreations. */
4043 static const struct builtin_description bdesc_abs
[] =
4045 { MASK_ALTIVEC
, CODE_FOR_absv4si2
, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI
},
4046 { MASK_ALTIVEC
, CODE_FOR_absv8hi2
, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI
},
4047 { MASK_ALTIVEC
, CODE_FOR_absv4sf2
, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF
},
4048 { MASK_ALTIVEC
, CODE_FOR_absv16qi2
, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI
},
4049 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v4si
, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI
},
4050 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v8hi
, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI
},
4051 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v16qi
, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI
}
4054 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4057 static struct builtin_description bdesc_1arg
[] =
4059 { MASK_ALTIVEC
, CODE_FOR_altivec_vexptefp
, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP
},
4060 { MASK_ALTIVEC
, CODE_FOR_altivec_vlogefp
, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP
},
4061 { MASK_ALTIVEC
, CODE_FOR_altivec_vrefp
, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP
},
4062 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfim
, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM
},
4063 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfin
, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN
},
4064 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfip
, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP
},
4065 { MASK_ALTIVEC
, CODE_FOR_ftruncv4sf2
, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ
},
4066 { MASK_ALTIVEC
, CODE_FOR_altivec_vrsqrtefp
, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP
},
4067 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisb
, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB
},
4068 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltish
, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH
},
4069 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisw
, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW
},
4070 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsb
, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB
},
4071 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhpx
, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX
},
4072 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsh
, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH
},
4073 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsb
, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB
},
4074 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklpx
, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX
},
4075 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsh
, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH
},
4077 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4078 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4079 { 0, CODE_FOR_spe_evabs
, "__builtin_spe_evabs", SPE_BUILTIN_EVABS
},
4080 { 0, CODE_FOR_spe_evaddsmiaaw
, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW
},
4081 { 0, CODE_FOR_spe_evaddssiaaw
, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW
},
4082 { 0, CODE_FOR_spe_evaddumiaaw
, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW
},
4083 { 0, CODE_FOR_spe_evaddusiaaw
, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW
},
4084 { 0, CODE_FOR_spe_evcntlsw
, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW
},
4085 { 0, CODE_FOR_spe_evcntlzw
, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW
},
4086 { 0, CODE_FOR_spe_evextsb
, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB
},
4087 { 0, CODE_FOR_spe_evextsh
, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH
},
4088 { 0, CODE_FOR_spe_evfsabs
, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS
},
4089 { 0, CODE_FOR_spe_evfscfsf
, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF
},
4090 { 0, CODE_FOR_spe_evfscfsi
, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI
},
4091 { 0, CODE_FOR_spe_evfscfuf
, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF
},
4092 { 0, CODE_FOR_spe_evfscfui
, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI
},
4093 { 0, CODE_FOR_spe_evfsctsf
, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF
},
4094 { 0, CODE_FOR_spe_evfsctsi
, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI
},
4095 { 0, CODE_FOR_spe_evfsctsiz
, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ
},
4096 { 0, CODE_FOR_spe_evfsctuf
, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF
},
4097 { 0, CODE_FOR_spe_evfsctui
, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI
},
4098 { 0, CODE_FOR_spe_evfsctuiz
, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ
},
4099 { 0, CODE_FOR_spe_evfsnabs
, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS
},
4100 { 0, CODE_FOR_spe_evfsneg
, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG
},
4101 { 0, CODE_FOR_spe_evmra
, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA
},
4102 { 0, CODE_FOR_spe_evneg
, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG
},
4103 { 0, CODE_FOR_spe_evrndw
, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW
},
4104 { 0, CODE_FOR_spe_evsubfsmiaaw
, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW
},
4105 { 0, CODE_FOR_spe_evsubfssiaaw
, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW
},
4106 { 0, CODE_FOR_spe_evsubfumiaaw
, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW
},
4107 { 0, CODE_FOR_spe_evsplatfi
, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI
},
4108 { 0, CODE_FOR_spe_evsplati
, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI
},
4110 /* Place-holder. Leave as last unary SPE builtin. */
4111 { 0, CODE_FOR_spe_evsubfusiaaw
, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW
},
4115 rs6000_expand_unop_builtin (icode
, arglist
, target
)
4116 enum insn_code icode
;
4121 tree arg0
= TREE_VALUE (arglist
);
4122 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4123 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4124 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4126 if (icode
== CODE_FOR_nothing
)
4127 /* Builtin not supported on this processor. */
4130 /* If we got invalid arguments bail out before generating bad rtl. */
4131 if (arg0
== error_mark_node
)
4134 if (icode
== CODE_FOR_altivec_vspltisb
4135 || icode
== CODE_FOR_altivec_vspltish
4136 || icode
== CODE_FOR_altivec_vspltisw
4137 || icode
== CODE_FOR_spe_evsplatfi
4138 || icode
== CODE_FOR_spe_evsplati
)
4140 /* Only allow 5-bit *signed* literals. */
4141 if (GET_CODE (op0
) != CONST_INT
4142 || INTVAL (op0
) > 0x1f
4143 || INTVAL (op0
) < -0x1f)
4145 error ("argument 1 must be a 5-bit signed literal");
4151 || GET_MODE (target
) != tmode
4152 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4153 target
= gen_reg_rtx (tmode
);
4155 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4156 op0
= copy_to_mode_reg (mode0
, op0
);
4158 pat
= GEN_FCN (icode
) (target
, op0
);
4167 altivec_expand_abs_builtin (icode
, arglist
, target
)
4168 enum insn_code icode
;
4172 rtx pat
, scratch1
, scratch2
;
4173 tree arg0
= TREE_VALUE (arglist
);
4174 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4175 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4176 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4178 /* If we have invalid arguments, bail out before generating bad rtl. */
4179 if (arg0
== error_mark_node
)
4183 || GET_MODE (target
) != tmode
4184 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4185 target
= gen_reg_rtx (tmode
);
4187 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4188 op0
= copy_to_mode_reg (mode0
, op0
);
4190 scratch1
= gen_reg_rtx (mode0
);
4191 scratch2
= gen_reg_rtx (mode0
);
4193 pat
= GEN_FCN (icode
) (target
, op0
, scratch1
, scratch2
);
4202 rs6000_expand_binop_builtin (icode
, arglist
, target
)
4203 enum insn_code icode
;
4208 tree arg0
= TREE_VALUE (arglist
);
4209 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4210 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4211 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4212 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4213 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4214 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4216 if (icode
== CODE_FOR_nothing
)
4217 /* Builtin not supported on this processor. */
4220 /* If we got invalid arguments bail out before generating bad rtl. */
4221 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
4224 if (icode
== CODE_FOR_altivec_vcfux
4225 || icode
== CODE_FOR_altivec_vcfsx
4226 || icode
== CODE_FOR_altivec_vctsxs
4227 || icode
== CODE_FOR_altivec_vctuxs
4228 || icode
== CODE_FOR_altivec_vspltb
4229 || icode
== CODE_FOR_altivec_vsplth
4230 || icode
== CODE_FOR_altivec_vspltw
4231 || icode
== CODE_FOR_spe_evaddiw
4232 || icode
== CODE_FOR_spe_evldd
4233 || icode
== CODE_FOR_spe_evldh
4234 || icode
== CODE_FOR_spe_evldw
4235 || icode
== CODE_FOR_spe_evlhhesplat
4236 || icode
== CODE_FOR_spe_evlhhossplat
4237 || icode
== CODE_FOR_spe_evlhhousplat
4238 || icode
== CODE_FOR_spe_evlwhe
4239 || icode
== CODE_FOR_spe_evlwhos
4240 || icode
== CODE_FOR_spe_evlwhou
4241 || icode
== CODE_FOR_spe_evlwhsplat
4242 || icode
== CODE_FOR_spe_evlwwsplat
4243 || icode
== CODE_FOR_spe_evrlwi
4244 || icode
== CODE_FOR_spe_evslwi
4245 || icode
== CODE_FOR_spe_evsrwis
4246 || icode
== CODE_FOR_spe_evsrwiu
)
4248 /* Only allow 5-bit unsigned literals. */
4249 if (TREE_CODE (arg1
) != INTEGER_CST
4250 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
4252 error ("argument 2 must be a 5-bit unsigned literal");
4258 || GET_MODE (target
) != tmode
4259 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4260 target
= gen_reg_rtx (tmode
);
4262 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4263 op0
= copy_to_mode_reg (mode0
, op0
);
4264 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4265 op1
= copy_to_mode_reg (mode1
, op1
);
4267 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
4276 altivec_expand_predicate_builtin (icode
, opcode
, arglist
, target
)
4277 enum insn_code icode
;
4283 tree cr6_form
= TREE_VALUE (arglist
);
4284 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
4285 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4286 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4287 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4288 enum machine_mode tmode
= SImode
;
4289 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4290 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4293 if (TREE_CODE (cr6_form
) != INTEGER_CST
)
4295 error ("argument 1 of __builtin_altivec_predicate must be a constant");
4299 cr6_form_int
= TREE_INT_CST_LOW (cr6_form
);
4304 /* If we have invalid arguments, bail out before generating bad rtl. */
4305 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
4309 || GET_MODE (target
) != tmode
4310 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4311 target
= gen_reg_rtx (tmode
);
4313 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4314 op0
= copy_to_mode_reg (mode0
, op0
);
4315 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4316 op1
= copy_to_mode_reg (mode1
, op1
);
4318 scratch
= gen_reg_rtx (mode0
);
4320 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
,
4321 gen_rtx (SYMBOL_REF
, Pmode
, opcode
));
4326 /* The vec_any* and vec_all* predicates use the same opcodes for two
4327 different operations, but the bits in CR6 will be different
4328 depending on what information we want. So we have to play tricks
4329 with CR6 to get the right bits out.
4331 If you think this is disgusting, look at the specs for the
4332 AltiVec predicates. */
4334 switch (cr6_form_int
)
4337 emit_insn (gen_cr6_test_for_zero (target
));
4340 emit_insn (gen_cr6_test_for_zero_reverse (target
));
4343 emit_insn (gen_cr6_test_for_lt (target
));
4346 emit_insn (gen_cr6_test_for_lt_reverse (target
));
4349 error ("argument 1 of __builtin_altivec_predicate is out of range");
4357 altivec_expand_stv_builtin (icode
, arglist
)
4358 enum insn_code icode
;
4361 tree arg0
= TREE_VALUE (arglist
);
4362 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4363 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4364 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4365 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4366 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4368 enum machine_mode mode0
= insn_data
[icode
].operand
[0].mode
;
4369 enum machine_mode mode1
= insn_data
[icode
].operand
[1].mode
;
4370 enum machine_mode mode2
= insn_data
[icode
].operand
[2].mode
;
4372 /* Invalid arguments. Bail before doing anything stoopid! */
4373 if (arg0
== error_mark_node
4374 || arg1
== error_mark_node
4375 || arg2
== error_mark_node
)
4378 if (! (*insn_data
[icode
].operand
[2].predicate
) (op0
, mode2
))
4379 op0
= copy_to_mode_reg (mode2
, op0
);
4380 if (! (*insn_data
[icode
].operand
[0].predicate
) (op1
, mode0
))
4381 op1
= copy_to_mode_reg (mode0
, op1
);
4382 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
4383 op2
= copy_to_mode_reg (mode1
, op2
);
4385 pat
= GEN_FCN (icode
) (op1
, op2
, op0
);
4392 rs6000_expand_ternop_builtin (icode
, arglist
, target
)
4393 enum insn_code icode
;
4398 tree arg0
= TREE_VALUE (arglist
);
4399 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4400 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4401 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4402 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4403 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4404 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4405 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4406 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4407 enum machine_mode mode2
= insn_data
[icode
].operand
[3].mode
;
4409 if (icode
== CODE_FOR_nothing
)
4410 /* Builtin not supported on this processor. */
4413 /* If we got invalid arguments bail out before generating bad rtl. */
4414 if (arg0
== error_mark_node
4415 || arg1
== error_mark_node
4416 || arg2
== error_mark_node
)
4419 if (icode
== CODE_FOR_altivec_vsldoi_4sf
4420 || icode
== CODE_FOR_altivec_vsldoi_4si
4421 || icode
== CODE_FOR_altivec_vsldoi_8hi
4422 || icode
== CODE_FOR_altivec_vsldoi_16qi
)
4424 /* Only allow 4-bit unsigned literals. */
4425 if (TREE_CODE (arg2
) != INTEGER_CST
4426 || TREE_INT_CST_LOW (arg2
) & ~0xf)
4428 error ("argument 3 must be a 4-bit unsigned literal");
4434 || GET_MODE (target
) != tmode
4435 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4436 target
= gen_reg_rtx (tmode
);
4438 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4439 op0
= copy_to_mode_reg (mode0
, op0
);
4440 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4441 op1
= copy_to_mode_reg (mode1
, op1
);
4442 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
4443 op2
= copy_to_mode_reg (mode2
, op2
);
4445 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
4453 /* Expand the lvx builtins. */
4455 altivec_expand_ld_builtin (exp
, target
, expandedp
)
4460 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4461 tree arglist
= TREE_OPERAND (exp
, 1);
4462 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4464 enum machine_mode tmode
, mode0
;
4466 enum insn_code icode
;
4470 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi
:
4471 icode
= CODE_FOR_altivec_lvx_16qi
;
4473 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi
:
4474 icode
= CODE_FOR_altivec_lvx_8hi
;
4476 case ALTIVEC_BUILTIN_LD_INTERNAL_4si
:
4477 icode
= CODE_FOR_altivec_lvx_4si
;
4479 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf
:
4480 icode
= CODE_FOR_altivec_lvx_4sf
;
4489 arg0
= TREE_VALUE (arglist
);
4490 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4491 tmode
= insn_data
[icode
].operand
[0].mode
;
4492 mode0
= insn_data
[icode
].operand
[1].mode
;
4495 || GET_MODE (target
) != tmode
4496 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4497 target
= gen_reg_rtx (tmode
);
4499 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4500 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
4502 pat
= GEN_FCN (icode
) (target
, op0
);
4509 /* Expand the stvx builtins. */
4511 altivec_expand_st_builtin (exp
, target
, expandedp
)
4513 rtx target ATTRIBUTE_UNUSED
;
4516 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4517 tree arglist
= TREE_OPERAND (exp
, 1);
4518 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4520 enum machine_mode mode0
, mode1
;
4522 enum insn_code icode
;
4526 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi
:
4527 icode
= CODE_FOR_altivec_stvx_16qi
;
4529 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi
:
4530 icode
= CODE_FOR_altivec_stvx_8hi
;
4532 case ALTIVEC_BUILTIN_ST_INTERNAL_4si
:
4533 icode
= CODE_FOR_altivec_stvx_4si
;
4535 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf
:
4536 icode
= CODE_FOR_altivec_stvx_4sf
;
4543 arg0
= TREE_VALUE (arglist
);
4544 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4545 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4546 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4547 mode0
= insn_data
[icode
].operand
[0].mode
;
4548 mode1
= insn_data
[icode
].operand
[1].mode
;
4550 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4551 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
4552 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
4553 op1
= copy_to_mode_reg (mode1
, op1
);
4555 pat
= GEN_FCN (icode
) (op0
, op1
);
4563 /* Expand the dst builtins. */
4565 altivec_expand_dst_builtin (exp
, target
, expandedp
)
4567 rtx target ATTRIBUTE_UNUSED
;
4570 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4571 tree arglist
= TREE_OPERAND (exp
, 1);
4572 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4573 tree arg0
, arg1
, arg2
;
4574 enum machine_mode mode0
, mode1
, mode2
;
4575 rtx pat
, op0
, op1
, op2
;
4576 struct builtin_description
*d
;
4581 /* Handle DST variants. */
4582 d
= (struct builtin_description
*) bdesc_dst
;
4583 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
4584 if (d
->code
== fcode
)
4586 arg0
= TREE_VALUE (arglist
);
4587 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4588 arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4589 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4590 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4591 op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4592 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
4593 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
4594 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
4596 /* Invalid arguments, bail out before generating bad rtl. */
4597 if (arg0
== error_mark_node
4598 || arg1
== error_mark_node
4599 || arg2
== error_mark_node
)
4602 if (TREE_CODE (arg2
) != INTEGER_CST
4603 || TREE_INT_CST_LOW (arg2
) & ~0x3)
4605 error ("argument to `%s' must be a 2-bit unsigned literal", d
->name
);
4609 if (! (*insn_data
[d
->icode
].operand
[0].predicate
) (op0
, mode0
))
4610 op0
= copy_to_mode_reg (mode0
, op0
);
4611 if (! (*insn_data
[d
->icode
].operand
[1].predicate
) (op1
, mode1
))
4612 op1
= copy_to_mode_reg (mode1
, op1
);
4614 pat
= GEN_FCN (d
->icode
) (op0
, op1
, op2
);
4625 /* Expand the builtin in EXP and store the result in TARGET. Store
4626 true in *EXPANDEDP if we found a builtin to expand. */
4628 altivec_expand_builtin (exp
, target
, expandedp
)
4633 struct builtin_description
*d
;
4634 struct builtin_description_predicates
*dp
;
4636 enum insn_code icode
;
4637 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4638 tree arglist
= TREE_OPERAND (exp
, 1);
4641 enum machine_mode tmode
, mode0
;
4642 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4644 target
= altivec_expand_ld_builtin (exp
, target
, expandedp
);
4648 target
= altivec_expand_st_builtin (exp
, target
, expandedp
);
4652 target
= altivec_expand_dst_builtin (exp
, target
, expandedp
);
4660 case ALTIVEC_BUILTIN_STVX
:
4661 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx
, arglist
);
4662 case ALTIVEC_BUILTIN_STVEBX
:
4663 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx
, arglist
);
4664 case ALTIVEC_BUILTIN_STVEHX
:
4665 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx
, arglist
);
4666 case ALTIVEC_BUILTIN_STVEWX
:
4667 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx
, arglist
);
4668 case ALTIVEC_BUILTIN_STVXL
:
4669 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl
, arglist
);
4671 case ALTIVEC_BUILTIN_MFVSCR
:
4672 icode
= CODE_FOR_altivec_mfvscr
;
4673 tmode
= insn_data
[icode
].operand
[0].mode
;
4676 || GET_MODE (target
) != tmode
4677 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4678 target
= gen_reg_rtx (tmode
);
4680 pat
= GEN_FCN (icode
) (target
);
4686 case ALTIVEC_BUILTIN_MTVSCR
:
4687 icode
= CODE_FOR_altivec_mtvscr
;
4688 arg0
= TREE_VALUE (arglist
);
4689 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4690 mode0
= insn_data
[icode
].operand
[0].mode
;
4692 /* If we got invalid arguments bail out before generating bad rtl. */
4693 if (arg0
== error_mark_node
)
4696 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4697 op0
= copy_to_mode_reg (mode0
, op0
);
4699 pat
= GEN_FCN (icode
) (op0
);
4704 case ALTIVEC_BUILTIN_DSSALL
:
4705 emit_insn (gen_altivec_dssall ());
4708 case ALTIVEC_BUILTIN_DSS
:
4709 icode
= CODE_FOR_altivec_dss
;
4710 arg0
= TREE_VALUE (arglist
);
4711 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4712 mode0
= insn_data
[icode
].operand
[0].mode
;
4714 /* If we got invalid arguments bail out before generating bad rtl. */
4715 if (arg0
== error_mark_node
)
4718 if (TREE_CODE (arg0
) != INTEGER_CST
4719 || TREE_INT_CST_LOW (arg0
) & ~0x3)
4721 error ("argument to dss must be a 2-bit unsigned literal");
4725 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4726 op0
= copy_to_mode_reg (mode0
, op0
);
4728 emit_insn (gen_altivec_dss (op0
));
4732 /* Expand abs* operations. */
4733 d
= (struct builtin_description
*) bdesc_abs
;
4734 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
4735 if (d
->code
== fcode
)
4736 return altivec_expand_abs_builtin (d
->icode
, arglist
, target
);
4738 /* Expand the AltiVec predicates. */
4739 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
4740 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
4741 if (dp
->code
== fcode
)
4742 return altivec_expand_predicate_builtin (dp
->icode
, dp
->opcode
, arglist
, target
);
4744 /* LV* are funky. We initialized them differently. */
4747 case ALTIVEC_BUILTIN_LVSL
:
4748 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl
,
4750 case ALTIVEC_BUILTIN_LVSR
:
4751 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr
,
4753 case ALTIVEC_BUILTIN_LVEBX
:
4754 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx
,
4756 case ALTIVEC_BUILTIN_LVEHX
:
4757 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx
,
4759 case ALTIVEC_BUILTIN_LVEWX
:
4760 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx
,
4762 case ALTIVEC_BUILTIN_LVXL
:
4763 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl
,
4765 case ALTIVEC_BUILTIN_LVX
:
4766 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx
,
4777 /* Binops that need to be initialized manually, but can be expanded
4778 automagically by rs6000_expand_binop_builtin. */
4779 static struct builtin_description bdesc_2arg_spe
[] =
4781 { 0, CODE_FOR_spe_evlddx
, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX
},
4782 { 0, CODE_FOR_spe_evldwx
, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX
},
4783 { 0, CODE_FOR_spe_evldhx
, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX
},
4784 { 0, CODE_FOR_spe_evlwhex
, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX
},
4785 { 0, CODE_FOR_spe_evlwhoux
, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX
},
4786 { 0, CODE_FOR_spe_evlwhosx
, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX
},
4787 { 0, CODE_FOR_spe_evlwwsplatx
, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX
},
4788 { 0, CODE_FOR_spe_evlwhsplatx
, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX
},
4789 { 0, CODE_FOR_spe_evlhhesplatx
, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX
},
4790 { 0, CODE_FOR_spe_evlhhousplatx
, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX
},
4791 { 0, CODE_FOR_spe_evlhhossplatx
, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX
},
4792 { 0, CODE_FOR_spe_evldd
, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD
},
4793 { 0, CODE_FOR_spe_evldw
, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW
},
4794 { 0, CODE_FOR_spe_evldh
, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH
},
4795 { 0, CODE_FOR_spe_evlwhe
, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE
},
4796 { 0, CODE_FOR_spe_evlwhou
, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU
},
4797 { 0, CODE_FOR_spe_evlwhos
, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS
},
4798 { 0, CODE_FOR_spe_evlwwsplat
, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT
},
4799 { 0, CODE_FOR_spe_evlwhsplat
, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT
},
4800 { 0, CODE_FOR_spe_evlhhesplat
, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT
},
4801 { 0, CODE_FOR_spe_evlhhousplat
, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT
},
4802 { 0, CODE_FOR_spe_evlhhossplat
, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT
}
4805 /* Expand the builtin in EXP and store the result in TARGET. Store
4806 true in *EXPANDEDP if we found a builtin to expand.
4808 This expands the SPE builtins that are not simple unary and binary
4811 spe_expand_builtin (exp
, target
, expandedp
)
4816 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4817 tree arglist
= TREE_OPERAND (exp
, 1);
4819 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4820 enum insn_code icode
;
4821 enum machine_mode tmode
, mode0
;
4823 struct builtin_description
*d
;
4828 /* Syntax check for a 5-bit unsigned immediate. */
4831 case SPE_BUILTIN_EVSTDD
:
4832 case SPE_BUILTIN_EVSTDH
:
4833 case SPE_BUILTIN_EVSTDW
:
4834 case SPE_BUILTIN_EVSTWHE
:
4835 case SPE_BUILTIN_EVSTWHO
:
4836 case SPE_BUILTIN_EVSTWWE
:
4837 case SPE_BUILTIN_EVSTWWO
:
4838 arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4839 if (TREE_CODE (arg1
) != INTEGER_CST
4840 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
4842 error ("argument 2 must be a 5-bit unsigned literal");
4850 d
= (struct builtin_description
*) bdesc_2arg_spe
;
4851 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg_spe
); ++i
, ++d
)
4852 if (d
->code
== fcode
)
4853 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
4855 d
= (struct builtin_description
*) bdesc_spe_predicates
;
4856 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, ++d
)
4857 if (d
->code
== fcode
)
4858 return spe_expand_predicate_builtin (d
->icode
, arglist
, target
);
4860 d
= (struct builtin_description
*) bdesc_spe_evsel
;
4861 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, ++d
)
4862 if (d
->code
== fcode
)
4863 return spe_expand_evsel_builtin (d
->icode
, arglist
, target
);
4867 case SPE_BUILTIN_EVSTDDX
:
4868 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx
, arglist
);
4869 case SPE_BUILTIN_EVSTDHX
:
4870 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx
, arglist
);
4871 case SPE_BUILTIN_EVSTDWX
:
4872 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx
, arglist
);
4873 case SPE_BUILTIN_EVSTWHEX
:
4874 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex
, arglist
);
4875 case SPE_BUILTIN_EVSTWHOX
:
4876 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox
, arglist
);
4877 case SPE_BUILTIN_EVSTWWEX
:
4878 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex
, arglist
);
4879 case SPE_BUILTIN_EVSTWWOX
:
4880 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox
, arglist
);
4881 case SPE_BUILTIN_EVSTDD
:
4882 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd
, arglist
);
4883 case SPE_BUILTIN_EVSTDH
:
4884 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh
, arglist
);
4885 case SPE_BUILTIN_EVSTDW
:
4886 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw
, arglist
);
4887 case SPE_BUILTIN_EVSTWHE
:
4888 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe
, arglist
);
4889 case SPE_BUILTIN_EVSTWHO
:
4890 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho
, arglist
);
4891 case SPE_BUILTIN_EVSTWWE
:
4892 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe
, arglist
);
4893 case SPE_BUILTIN_EVSTWWO
:
4894 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo
, arglist
);
4895 case SPE_BUILTIN_MFSPEFSCR
:
4896 icode
= CODE_FOR_spe_mfspefscr
;
4897 tmode
= insn_data
[icode
].operand
[0].mode
;
4900 || GET_MODE (target
) != tmode
4901 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4902 target
= gen_reg_rtx (tmode
);
4904 pat
= GEN_FCN (icode
) (target
);
4909 case SPE_BUILTIN_MTSPEFSCR
:
4910 icode
= CODE_FOR_spe_mtspefscr
;
4911 arg0
= TREE_VALUE (arglist
);
4912 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4913 mode0
= insn_data
[icode
].operand
[0].mode
;
4915 if (arg0
== error_mark_node
)
4918 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4919 op0
= copy_to_mode_reg (mode0
, op0
);
4921 pat
= GEN_FCN (icode
) (op0
);
4934 spe_expand_predicate_builtin (icode
, arglist
, target
)
4935 enum insn_code icode
;
4939 rtx pat
, scratch
, tmp
;
4940 tree form
= TREE_VALUE (arglist
);
4941 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
4942 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4943 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4944 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4945 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4946 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4950 if (TREE_CODE (form
) != INTEGER_CST
)
4952 error ("argument 1 of __builtin_spe_predicate must be a constant");
4956 form_int
= TREE_INT_CST_LOW (form
);
4961 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
4965 || GET_MODE (target
) != SImode
4966 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, SImode
))
4967 target
= gen_reg_rtx (SImode
);
4969 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4970 op0
= copy_to_mode_reg (mode0
, op0
);
4971 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4972 op1
= copy_to_mode_reg (mode1
, op1
);
4974 scratch
= gen_reg_rtx (CCmode
);
4976 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
4981 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
4982 _lower_. We use one compare, but look in different bits of the
4983 CR for each variant.
4985 There are 2 elements in each SPE simd type (upper/lower). The CR
4986 bits are set as follows:
4988 BIT0 | BIT 1 | BIT 2 | BIT 3
4989 U | L | (U | L) | (U & L)
4991 So, for an "all" relationship, BIT 3 would be set.
4992 For an "any" relationship, BIT 2 would be set. Etc.
4994 Following traditional nomenclature, these bits map to:
4996 BIT0 | BIT 1 | BIT 2 | BIT 3
4999 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5004 /* All variant. OV bit. */
5006 /* We need to get to the OV bit, which is the ORDERED bit. We
5007 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5008 that's ugly and will trigger a validate_condition_mode abort.
5009 So let's just use another pattern. */
5010 emit_insn (gen_move_from_CR_ov_bit (target
, scratch
));
5012 /* Any variant. EQ bit. */
5016 /* Upper variant. LT bit. */
5020 /* Lower variant. GT bit. */
5025 error ("argument 1 of __builtin_spe_predicate is out of range");
5029 tmp
= gen_rtx_fmt_ee (code
, SImode
, scratch
, const0_rtx
);
5030 emit_move_insn (target
, tmp
);
5035 /* The evsel builtins look like this:
5037 e = __builtin_spe_evsel_OP (a, b, c, d);
5041 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5042 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5046 spe_expand_evsel_builtin (icode
, arglist
, target
)
5047 enum insn_code icode
;
5052 tree arg0
= TREE_VALUE (arglist
);
5053 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5054 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5055 tree arg3
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist
))));
5056 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5057 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5058 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
5059 rtx op3
= expand_expr (arg3
, NULL_RTX
, VOIDmode
, 0);
5060 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5061 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5066 if (arg0
== error_mark_node
|| arg1
== error_mark_node
5067 || arg2
== error_mark_node
|| arg3
== error_mark_node
)
5071 || GET_MODE (target
) != mode0
5072 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, mode0
))
5073 target
= gen_reg_rtx (mode0
);
5075 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5076 op0
= copy_to_mode_reg (mode0
, op0
);
5077 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
5078 op1
= copy_to_mode_reg (mode0
, op1
);
5079 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
5080 op2
= copy_to_mode_reg (mode0
, op2
);
5081 if (! (*insn_data
[icode
].operand
[1].predicate
) (op3
, mode1
))
5082 op3
= copy_to_mode_reg (mode0
, op3
);
5084 /* Generate the compare. */
5085 scratch
= gen_reg_rtx (CCmode
);
5086 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
5091 if (mode0
== V2SImode
)
5092 emit_insn (gen_spe_evsel (target
, op2
, op3
, scratch
));
5094 emit_insn (gen_spe_evsel_fs (target
, op2
, op3
, scratch
));
5099 /* Expand an expression EXP that calls a built-in function,
5100 with result going to TARGET if that's convenient
5101 (and in mode MODE if that's convenient).
5102 SUBTARGET may be used as the target for computing one of EXP's operands.
5103 IGNORE is nonzero if the value is to be ignored. */
5106 rs6000_expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
5109 rtx subtarget ATTRIBUTE_UNUSED
;
5110 enum machine_mode mode ATTRIBUTE_UNUSED
;
5111 int ignore ATTRIBUTE_UNUSED
;
5113 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5114 tree arglist
= TREE_OPERAND (exp
, 1);
5115 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5116 struct builtin_description
*d
;
5123 ret
= altivec_expand_builtin (exp
, target
, &success
);
5130 ret
= spe_expand_builtin (exp
, target
, &success
);
5136 if (TARGET_ALTIVEC
|| TARGET_SPE
)
5138 /* Handle simple unary operations. */
5139 d
= (struct builtin_description
*) bdesc_1arg
;
5140 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
5141 if (d
->code
== fcode
)
5142 return rs6000_expand_unop_builtin (d
->icode
, arglist
, target
);
5144 /* Handle simple binary operations. */
5145 d
= (struct builtin_description
*) bdesc_2arg
;
5146 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5147 if (d
->code
== fcode
)
5148 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
5150 /* Handle simple ternary operations. */
5151 d
= (struct builtin_description
*) bdesc_3arg
;
5152 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
5153 if (d
->code
== fcode
)
5154 return rs6000_expand_ternop_builtin (d
->icode
, arglist
, target
);
5162 rs6000_init_builtins ()
5165 spe_init_builtins ();
5167 altivec_init_builtins ();
5168 if (TARGET_ALTIVEC
|| TARGET_SPE
)
5169 rs6000_common_init_builtins ();
5172 /* Search through a set of builtins and enable the mask bits.
5173 DESC is an array of builtins.
5174 SIZE is the totaly number of builtins.
5175 START is the builtin enum at which to start.
5176 END is the builtin enum at which to end. */
5178 enable_mask_for_builtins (desc
, size
, start
, end
)
5179 struct builtin_description
*desc
;
5181 enum rs6000_builtins start
, end
;
5185 for (i
= 0; i
< size
; ++i
)
5186 if (desc
[i
].code
== start
)
5192 for (; i
< size
; ++i
)
5194 /* Flip all the bits on. */
5195 desc
[i
].mask
= target_flags
;
5196 if (desc
[i
].code
== end
)
5202 spe_init_builtins ()
5204 tree endlink
= void_list_node
;
5205 tree puint_type_node
= build_pointer_type (unsigned_type_node
);
5206 tree pushort_type_node
= build_pointer_type (short_unsigned_type_node
);
5207 tree pv2si_type_node
= build_pointer_type (V2SI_type_node
);
5208 struct builtin_description
*d
;
5211 tree v2si_ftype_4_v2si
5212 = build_function_type
5214 tree_cons (NULL_TREE
, V2SI_type_node
,
5215 tree_cons (NULL_TREE
, V2SI_type_node
,
5216 tree_cons (NULL_TREE
, V2SI_type_node
,
5217 tree_cons (NULL_TREE
, V2SI_type_node
,
5220 tree v2sf_ftype_4_v2sf
5221 = build_function_type
5223 tree_cons (NULL_TREE
, V2SF_type_node
,
5224 tree_cons (NULL_TREE
, V2SF_type_node
,
5225 tree_cons (NULL_TREE
, V2SF_type_node
,
5226 tree_cons (NULL_TREE
, V2SF_type_node
,
5229 tree int_ftype_int_v2si_v2si
5230 = build_function_type
5232 tree_cons (NULL_TREE
, integer_type_node
,
5233 tree_cons (NULL_TREE
, V2SI_type_node
,
5234 tree_cons (NULL_TREE
, V2SI_type_node
,
5237 tree int_ftype_int_v2sf_v2sf
5238 = build_function_type
5240 tree_cons (NULL_TREE
, integer_type_node
,
5241 tree_cons (NULL_TREE
, V2SF_type_node
,
5242 tree_cons (NULL_TREE
, V2SF_type_node
,
5245 tree void_ftype_v2si_puint_int
5246 = build_function_type (void_type_node
,
5247 tree_cons (NULL_TREE
, V2SI_type_node
,
5248 tree_cons (NULL_TREE
, puint_type_node
,
5249 tree_cons (NULL_TREE
,
5253 tree void_ftype_v2si_puint_char
5254 = build_function_type (void_type_node
,
5255 tree_cons (NULL_TREE
, V2SI_type_node
,
5256 tree_cons (NULL_TREE
, puint_type_node
,
5257 tree_cons (NULL_TREE
,
5261 tree void_ftype_v2si_pv2si_int
5262 = build_function_type (void_type_node
,
5263 tree_cons (NULL_TREE
, V2SI_type_node
,
5264 tree_cons (NULL_TREE
, pv2si_type_node
,
5265 tree_cons (NULL_TREE
,
5269 tree void_ftype_v2si_pv2si_char
5270 = build_function_type (void_type_node
,
5271 tree_cons (NULL_TREE
, V2SI_type_node
,
5272 tree_cons (NULL_TREE
, pv2si_type_node
,
5273 tree_cons (NULL_TREE
,
5278 = build_function_type (void_type_node
,
5279 tree_cons (NULL_TREE
, integer_type_node
, endlink
));
5282 = build_function_type (integer_type_node
,
5283 tree_cons (NULL_TREE
, void_type_node
, endlink
));
5285 tree v2si_ftype_pv2si_int
5286 = build_function_type (V2SI_type_node
,
5287 tree_cons (NULL_TREE
, pv2si_type_node
,
5288 tree_cons (NULL_TREE
, integer_type_node
,
5291 tree v2si_ftype_puint_int
5292 = build_function_type (V2SI_type_node
,
5293 tree_cons (NULL_TREE
, puint_type_node
,
5294 tree_cons (NULL_TREE
, integer_type_node
,
5297 tree v2si_ftype_pushort_int
5298 = build_function_type (V2SI_type_node
,
5299 tree_cons (NULL_TREE
, pushort_type_node
,
5300 tree_cons (NULL_TREE
, integer_type_node
,
5303 /* The initialization of the simple binary and unary builtins is
5304 done in rs6000_common_init_builtins, but we have to enable the
5305 mask bits here manually because we have run out of `target_flags'
5306 bits. We really need to redesign this mask business. */
5308 enable_mask_for_builtins ((struct builtin_description
*) bdesc_2arg
,
5309 ARRAY_SIZE (bdesc_2arg
),
5312 enable_mask_for_builtins ((struct builtin_description
*) bdesc_1arg
,
5313 ARRAY_SIZE (bdesc_1arg
),
5315 SPE_BUILTIN_EVSUBFUSIAAW
);
5316 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_predicates
,
5317 ARRAY_SIZE (bdesc_spe_predicates
),
5318 SPE_BUILTIN_EVCMPEQ
,
5319 SPE_BUILTIN_EVFSTSTLT
);
5320 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_evsel
,
5321 ARRAY_SIZE (bdesc_spe_evsel
),
5322 SPE_BUILTIN_EVSEL_CMPGTS
,
5323 SPE_BUILTIN_EVSEL_FSTSTEQ
);
5325 /* Initialize irregular SPE builtins. */
5327 def_builtin (target_flags
, "__builtin_spe_mtspefscr", void_ftype_int
, SPE_BUILTIN_MTSPEFSCR
);
5328 def_builtin (target_flags
, "__builtin_spe_mfspefscr", int_ftype_void
, SPE_BUILTIN_MFSPEFSCR
);
5329 def_builtin (target_flags
, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDDX
);
5330 def_builtin (target_flags
, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDHX
);
5331 def_builtin (target_flags
, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDWX
);
5332 def_builtin (target_flags
, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHEX
);
5333 def_builtin (target_flags
, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHOX
);
5334 def_builtin (target_flags
, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWEX
);
5335 def_builtin (target_flags
, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWOX
);
5336 def_builtin (target_flags
, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDD
);
5337 def_builtin (target_flags
, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDH
);
5338 def_builtin (target_flags
, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDW
);
5339 def_builtin (target_flags
, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHE
);
5340 def_builtin (target_flags
, "__builtin_spe_evstwho", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHO
);
5341 def_builtin (target_flags
, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWE
);
5342 def_builtin (target_flags
, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWO
);
5345 def_builtin (target_flags
, "__builtin_spe_evlddx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDDX
);
5346 def_builtin (target_flags
, "__builtin_spe_evldwx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDWX
);
5347 def_builtin (target_flags
, "__builtin_spe_evldhx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDHX
);
5348 def_builtin (target_flags
, "__builtin_spe_evlwhex", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHEX
);
5349 def_builtin (target_flags
, "__builtin_spe_evlwhoux", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOUX
);
5350 def_builtin (target_flags
, "__builtin_spe_evlwhosx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOSX
);
5351 def_builtin (target_flags
, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLATX
);
5352 def_builtin (target_flags
, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLATX
);
5353 def_builtin (target_flags
, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLATX
);
5354 def_builtin (target_flags
, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLATX
);
5355 def_builtin (target_flags
, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLATX
);
5356 def_builtin (target_flags
, "__builtin_spe_evldd", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDD
);
5357 def_builtin (target_flags
, "__builtin_spe_evldw", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDW
);
5358 def_builtin (target_flags
, "__builtin_spe_evldh", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDH
);
5359 def_builtin (target_flags
, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLAT
);
5360 def_builtin (target_flags
, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLAT
);
5361 def_builtin (target_flags
, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLAT
);
5362 def_builtin (target_flags
, "__builtin_spe_evlwhe", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHE
);
5363 def_builtin (target_flags
, "__builtin_spe_evlwhos", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOS
);
5364 def_builtin (target_flags
, "__builtin_spe_evlwhou", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOU
);
5365 def_builtin (target_flags
, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLAT
);
5366 def_builtin (target_flags
, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLAT
);
5369 d
= (struct builtin_description
*) bdesc_spe_predicates
;
5370 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, d
++)
5374 switch (insn_data
[d
->icode
].operand
[1].mode
)
5377 type
= int_ftype_int_v2si_v2si
;
5380 type
= int_ftype_int_v2sf_v2sf
;
5386 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5389 /* Evsel predicates. */
5390 d
= (struct builtin_description
*) bdesc_spe_evsel
;
5391 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, d
++)
5395 switch (insn_data
[d
->icode
].operand
[1].mode
)
5398 type
= v2si_ftype_4_v2si
;
5401 type
= v2sf_ftype_4_v2sf
;
5407 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5412 altivec_init_builtins ()
5414 struct builtin_description
*d
;
5415 struct builtin_description_predicates
*dp
;
5417 tree pfloat_type_node
= build_pointer_type (float_type_node
);
5418 tree pint_type_node
= build_pointer_type (integer_type_node
);
5419 tree pshort_type_node
= build_pointer_type (short_integer_type_node
);
5420 tree pchar_type_node
= build_pointer_type (char_type_node
);
5422 tree pvoid_type_node
= build_pointer_type (void_type_node
);
5424 tree int_ftype_int_v4si_v4si
5425 = build_function_type_list (integer_type_node
,
5426 integer_type_node
, V4SI_type_node
,
5427 V4SI_type_node
, NULL_TREE
);
5428 tree v4sf_ftype_pfloat
5429 = build_function_type_list (V4SF_type_node
, pfloat_type_node
, NULL_TREE
);
5430 tree void_ftype_pfloat_v4sf
5431 = build_function_type_list (void_type_node
,
5432 pfloat_type_node
, V4SF_type_node
, NULL_TREE
);
5433 tree v4si_ftype_pint
5434 = build_function_type_list (V4SI_type_node
, pint_type_node
, NULL_TREE
); tree void_ftype_pint_v4si
5435 = build_function_type_list (void_type_node
,
5436 pint_type_node
, V4SI_type_node
, NULL_TREE
);
5437 tree v8hi_ftype_pshort
5438 = build_function_type_list (V8HI_type_node
, pshort_type_node
, NULL_TREE
);
5439 tree void_ftype_pshort_v8hi
5440 = build_function_type_list (void_type_node
,
5441 pshort_type_node
, V8HI_type_node
, NULL_TREE
);
5442 tree v16qi_ftype_pchar
5443 = build_function_type_list (V16QI_type_node
, pchar_type_node
, NULL_TREE
);
5444 tree void_ftype_pchar_v16qi
5445 = build_function_type_list (void_type_node
,
5446 pchar_type_node
, V16QI_type_node
, NULL_TREE
);
5447 tree void_ftype_v4si
5448 = build_function_type_list (void_type_node
, V4SI_type_node
, NULL_TREE
);
5449 tree v8hi_ftype_void
5450 = build_function_type (V8HI_type_node
, void_list_node
);
5451 tree void_ftype_void
5452 = build_function_type (void_type_node
, void_list_node
);
5454 = build_function_type_list (void_type_node
, char_type_node
, NULL_TREE
);
5455 tree v16qi_ftype_int_pvoid
5456 = build_function_type_list (V16QI_type_node
,
5457 integer_type_node
, pvoid_type_node
, NULL_TREE
);
5458 tree v8hi_ftype_int_pvoid
5459 = build_function_type_list (V8HI_type_node
,
5460 integer_type_node
, pvoid_type_node
, NULL_TREE
);
5461 tree v4si_ftype_int_pvoid
5462 = build_function_type_list (V4SI_type_node
,
5463 integer_type_node
, pvoid_type_node
, NULL_TREE
);
5464 tree void_ftype_v4si_int_pvoid
5465 = build_function_type_list (void_type_node
,
5466 V4SI_type_node
, integer_type_node
,
5467 pvoid_type_node
, NULL_TREE
);
5468 tree void_ftype_v16qi_int_pvoid
5469 = build_function_type_list (void_type_node
,
5470 V16QI_type_node
, integer_type_node
,
5471 pvoid_type_node
, NULL_TREE
);
5472 tree void_ftype_v8hi_int_pvoid
5473 = build_function_type_list (void_type_node
,
5474 V8HI_type_node
, integer_type_node
,
5475 pvoid_type_node
, NULL_TREE
);
5476 tree int_ftype_int_v8hi_v8hi
5477 = build_function_type_list (integer_type_node
,
5478 integer_type_node
, V8HI_type_node
,
5479 V8HI_type_node
, NULL_TREE
);
5480 tree int_ftype_int_v16qi_v16qi
5481 = build_function_type_list (integer_type_node
,
5482 integer_type_node
, V16QI_type_node
,
5483 V16QI_type_node
, NULL_TREE
);
5484 tree int_ftype_int_v4sf_v4sf
5485 = build_function_type_list (integer_type_node
,
5486 integer_type_node
, V4SF_type_node
,
5487 V4SF_type_node
, NULL_TREE
);
5488 tree v4si_ftype_v4si
5489 = build_function_type_list (V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5490 tree v8hi_ftype_v8hi
5491 = build_function_type_list (V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5492 tree v16qi_ftype_v16qi
5493 = build_function_type_list (V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5494 tree v4sf_ftype_v4sf
5495 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5496 tree void_ftype_pvoid_int_char
5497 = build_function_type_list (void_type_node
,
5498 pvoid_type_node
, integer_type_node
,
5499 char_type_node
, NULL_TREE
);
5501 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat
, ALTIVEC_BUILTIN_LD_INTERNAL_4sf
);
5502 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf
, ALTIVEC_BUILTIN_ST_INTERNAL_4sf
);
5503 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint
, ALTIVEC_BUILTIN_LD_INTERNAL_4si
);
5504 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si
, ALTIVEC_BUILTIN_ST_INTERNAL_4si
);
5505 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort
, ALTIVEC_BUILTIN_LD_INTERNAL_8hi
);
5506 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi
, ALTIVEC_BUILTIN_ST_INTERNAL_8hi
);
5507 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar
, ALTIVEC_BUILTIN_LD_INTERNAL_16qi
);
5508 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi
, ALTIVEC_BUILTIN_ST_INTERNAL_16qi
);
5509 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mtvscr", void_ftype_v4si
, ALTIVEC_BUILTIN_MTVSCR
);
5510 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mfvscr", v8hi_ftype_void
, ALTIVEC_BUILTIN_MFVSCR
);
5511 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dssall", void_ftype_void
, ALTIVEC_BUILTIN_DSSALL
);
5512 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dss", void_ftype_qi
, ALTIVEC_BUILTIN_DSS
);
5513 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVSL
);
5514 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVSR
);
5515 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVEBX
);
5516 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVEHX
);
5517 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVEWX
);
5518 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVXL
);
5519 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvx", v4si_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVX
);
5520 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVX
);
5521 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVEWX
);
5522 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVXL
);
5523 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid
, ALTIVEC_BUILTIN_STVEBX
);
5524 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid
, ALTIVEC_BUILTIN_STVEHX
);
5526 /* Add the DST variants. */
5527 d
= (struct builtin_description
*) bdesc_dst
;
5528 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
5529 def_builtin (d
->mask
, d
->name
, void_ftype_pvoid_int_char
, d
->code
);
5531 /* Initialize the predicates. */
5532 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
5533 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
5535 enum machine_mode mode1
;
5538 mode1
= insn_data
[dp
->icode
].operand
[1].mode
;
5543 type
= int_ftype_int_v4si_v4si
;
5546 type
= int_ftype_int_v8hi_v8hi
;
5549 type
= int_ftype_int_v16qi_v16qi
;
5552 type
= int_ftype_int_v4sf_v4sf
;
5558 def_builtin (dp
->mask
, dp
->name
, type
, dp
->code
);
5561 /* Initialize the abs* operators. */
5562 d
= (struct builtin_description
*) bdesc_abs
;
5563 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
5565 enum machine_mode mode0
;
5568 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
5573 type
= v4si_ftype_v4si
;
5576 type
= v8hi_ftype_v8hi
;
5579 type
= v16qi_ftype_v16qi
;
5582 type
= v4sf_ftype_v4sf
;
5588 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5593 rs6000_common_init_builtins ()
5595 struct builtin_description
*d
;
5598 tree v4sf_ftype_v4sf_v4sf_v16qi
5599 = build_function_type_list (V4SF_type_node
,
5600 V4SF_type_node
, V4SF_type_node
,
5601 V16QI_type_node
, NULL_TREE
);
5602 tree v4si_ftype_v4si_v4si_v16qi
5603 = build_function_type_list (V4SI_type_node
,
5604 V4SI_type_node
, V4SI_type_node
,
5605 V16QI_type_node
, NULL_TREE
);
5606 tree v8hi_ftype_v8hi_v8hi_v16qi
5607 = build_function_type_list (V8HI_type_node
,
5608 V8HI_type_node
, V8HI_type_node
,
5609 V16QI_type_node
, NULL_TREE
);
5610 tree v16qi_ftype_v16qi_v16qi_v16qi
5611 = build_function_type_list (V16QI_type_node
,
5612 V16QI_type_node
, V16QI_type_node
,
5613 V16QI_type_node
, NULL_TREE
);
5614 tree v4si_ftype_char
5615 = build_function_type_list (V4SI_type_node
, char_type_node
, NULL_TREE
);
5616 tree v8hi_ftype_char
5617 = build_function_type_list (V8HI_type_node
, char_type_node
, NULL_TREE
);
5618 tree v16qi_ftype_char
5619 = build_function_type_list (V16QI_type_node
, char_type_node
, NULL_TREE
);
5620 tree v8hi_ftype_v16qi
5621 = build_function_type_list (V8HI_type_node
, V16QI_type_node
, NULL_TREE
);
5622 tree v4sf_ftype_v4sf
5623 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5625 tree v2si_ftype_v2si_v2si
5626 = build_function_type_list (V2SI_type_node
,
5627 V2SI_type_node
, V2SI_type_node
, NULL_TREE
);
5629 tree v2sf_ftype_v2sf_v2sf
5630 = build_function_type_list (V2SF_type_node
,
5631 V2SF_type_node
, V2SF_type_node
, NULL_TREE
);
5633 tree v2si_ftype_int_int
5634 = build_function_type_list (V2SI_type_node
,
5635 integer_type_node
, integer_type_node
,
5638 tree v2si_ftype_v2si
5639 = build_function_type_list (V2SI_type_node
, V2SI_type_node
, NULL_TREE
);
5641 tree v2sf_ftype_v2sf
5642 = build_function_type_list (V2SF_type_node
,
5643 V2SF_type_node
, NULL_TREE
);
5645 tree v2sf_ftype_v2si
5646 = build_function_type_list (V2SF_type_node
,
5647 V2SI_type_node
, NULL_TREE
);
5649 tree v2si_ftype_v2sf
5650 = build_function_type_list (V2SI_type_node
,
5651 V2SF_type_node
, NULL_TREE
);
5653 tree v2si_ftype_v2si_char
5654 = build_function_type_list (V2SI_type_node
,
5655 V2SI_type_node
, char_type_node
, NULL_TREE
);
5657 tree v2si_ftype_int_char
5658 = build_function_type_list (V2SI_type_node
,
5659 integer_type_node
, char_type_node
, NULL_TREE
);
5661 tree v2si_ftype_char
5662 = build_function_type_list (V2SI_type_node
, char_type_node
, NULL_TREE
);
5664 tree int_ftype_int_int
5665 = build_function_type_list (integer_type_node
,
5666 integer_type_node
, integer_type_node
,
5669 tree v4si_ftype_v4si_v4si
5670 = build_function_type_list (V4SI_type_node
,
5671 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5672 tree v4sf_ftype_v4si_char
5673 = build_function_type_list (V4SF_type_node
,
5674 V4SI_type_node
, char_type_node
, NULL_TREE
);
5675 tree v4si_ftype_v4sf_char
5676 = build_function_type_list (V4SI_type_node
,
5677 V4SF_type_node
, char_type_node
, NULL_TREE
);
5678 tree v4si_ftype_v4si_char
5679 = build_function_type_list (V4SI_type_node
,
5680 V4SI_type_node
, char_type_node
, NULL_TREE
);
5681 tree v8hi_ftype_v8hi_char
5682 = build_function_type_list (V8HI_type_node
,
5683 V8HI_type_node
, char_type_node
, NULL_TREE
);
5684 tree v16qi_ftype_v16qi_char
5685 = build_function_type_list (V16QI_type_node
,
5686 V16QI_type_node
, char_type_node
, NULL_TREE
);
5687 tree v16qi_ftype_v16qi_v16qi_char
5688 = build_function_type_list (V16QI_type_node
,
5689 V16QI_type_node
, V16QI_type_node
,
5690 char_type_node
, NULL_TREE
);
5691 tree v8hi_ftype_v8hi_v8hi_char
5692 = build_function_type_list (V8HI_type_node
,
5693 V8HI_type_node
, V8HI_type_node
,
5694 char_type_node
, NULL_TREE
);
5695 tree v4si_ftype_v4si_v4si_char
5696 = build_function_type_list (V4SI_type_node
,
5697 V4SI_type_node
, V4SI_type_node
,
5698 char_type_node
, NULL_TREE
);
5699 tree v4sf_ftype_v4sf_v4sf_char
5700 = build_function_type_list (V4SF_type_node
,
5701 V4SF_type_node
, V4SF_type_node
,
5702 char_type_node
, NULL_TREE
);
5703 tree v4sf_ftype_v4sf_v4sf
5704 = build_function_type_list (V4SF_type_node
,
5705 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5706 tree v4sf_ftype_v4sf_v4sf_v4si
5707 = build_function_type_list (V4SF_type_node
,
5708 V4SF_type_node
, V4SF_type_node
,
5709 V4SI_type_node
, NULL_TREE
);
5710 tree v4sf_ftype_v4sf_v4sf_v4sf
5711 = build_function_type_list (V4SF_type_node
,
5712 V4SF_type_node
, V4SF_type_node
,
5713 V4SF_type_node
, NULL_TREE
);
5714 tree v4si_ftype_v4si_v4si_v4si
5715 = build_function_type_list (V4SI_type_node
,
5716 V4SI_type_node
, V4SI_type_node
,
5717 V4SI_type_node
, NULL_TREE
);
5718 tree v8hi_ftype_v8hi_v8hi
5719 = build_function_type_list (V8HI_type_node
,
5720 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5721 tree v8hi_ftype_v8hi_v8hi_v8hi
5722 = build_function_type_list (V8HI_type_node
,
5723 V8HI_type_node
, V8HI_type_node
,
5724 V8HI_type_node
, NULL_TREE
);
5725 tree v4si_ftype_v8hi_v8hi_v4si
5726 = build_function_type_list (V4SI_type_node
,
5727 V8HI_type_node
, V8HI_type_node
,
5728 V4SI_type_node
, NULL_TREE
);
5729 tree v4si_ftype_v16qi_v16qi_v4si
5730 = build_function_type_list (V4SI_type_node
,
5731 V16QI_type_node
, V16QI_type_node
,
5732 V4SI_type_node
, NULL_TREE
);
5733 tree v16qi_ftype_v16qi_v16qi
5734 = build_function_type_list (V16QI_type_node
,
5735 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5736 tree v4si_ftype_v4sf_v4sf
5737 = build_function_type_list (V4SI_type_node
,
5738 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5739 tree v8hi_ftype_v16qi_v16qi
5740 = build_function_type_list (V8HI_type_node
,
5741 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5742 tree v4si_ftype_v8hi_v8hi
5743 = build_function_type_list (V4SI_type_node
,
5744 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5745 tree v8hi_ftype_v4si_v4si
5746 = build_function_type_list (V8HI_type_node
,
5747 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5748 tree v16qi_ftype_v8hi_v8hi
5749 = build_function_type_list (V16QI_type_node
,
5750 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5751 tree v4si_ftype_v16qi_v4si
5752 = build_function_type_list (V4SI_type_node
,
5753 V16QI_type_node
, V4SI_type_node
, NULL_TREE
);
5754 tree v4si_ftype_v16qi_v16qi
5755 = build_function_type_list (V4SI_type_node
,
5756 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5757 tree v4si_ftype_v8hi_v4si
5758 = build_function_type_list (V4SI_type_node
,
5759 V8HI_type_node
, V4SI_type_node
, NULL_TREE
);
5760 tree v4si_ftype_v8hi
5761 = build_function_type_list (V4SI_type_node
, V8HI_type_node
, NULL_TREE
);
5762 tree int_ftype_v4si_v4si
5763 = build_function_type_list (integer_type_node
,
5764 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5765 tree int_ftype_v4sf_v4sf
5766 = build_function_type_list (integer_type_node
,
5767 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5768 tree int_ftype_v16qi_v16qi
5769 = build_function_type_list (integer_type_node
,
5770 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5771 tree int_ftype_v8hi_v8hi
5772 = build_function_type_list (integer_type_node
,
5773 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5775 /* Add the simple ternary operators. */
5776 d
= (struct builtin_description
*) bdesc_3arg
;
5777 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
5780 enum machine_mode mode0
, mode1
, mode2
, mode3
;
5783 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
5786 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
5787 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
5788 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
5789 mode3
= insn_data
[d
->icode
].operand
[3].mode
;
5791 /* When all four are of the same mode. */
5792 if (mode0
== mode1
&& mode1
== mode2
&& mode2
== mode3
)
5797 type
= v4si_ftype_v4si_v4si_v4si
;
5800 type
= v4sf_ftype_v4sf_v4sf_v4sf
;
5803 type
= v8hi_ftype_v8hi_v8hi_v8hi
;
5806 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
5812 else if (mode0
== mode1
&& mode1
== mode2
&& mode3
== V16QImode
)
5817 type
= v4si_ftype_v4si_v4si_v16qi
;
5820 type
= v4sf_ftype_v4sf_v4sf_v16qi
;
5823 type
= v8hi_ftype_v8hi_v8hi_v16qi
;
5826 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
5832 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
5833 && mode3
== V4SImode
)
5834 type
= v4si_ftype_v16qi_v16qi_v4si
;
5835 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
5836 && mode3
== V4SImode
)
5837 type
= v4si_ftype_v8hi_v8hi_v4si
;
5838 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
5839 && mode3
== V4SImode
)
5840 type
= v4sf_ftype_v4sf_v4sf_v4si
;
5842 /* vchar, vchar, vchar, 4 bit literal. */
5843 else if (mode0
== V16QImode
&& mode1
== mode0
&& mode2
== mode0
5845 type
= v16qi_ftype_v16qi_v16qi_char
;
5847 /* vshort, vshort, vshort, 4 bit literal. */
5848 else if (mode0
== V8HImode
&& mode1
== mode0
&& mode2
== mode0
5850 type
= v8hi_ftype_v8hi_v8hi_char
;
5852 /* vint, vint, vint, 4 bit literal. */
5853 else if (mode0
== V4SImode
&& mode1
== mode0
&& mode2
== mode0
5855 type
= v4si_ftype_v4si_v4si_char
;
5857 /* vfloat, vfloat, vfloat, 4 bit literal. */
5858 else if (mode0
== V4SFmode
&& mode1
== mode0
&& mode2
== mode0
5860 type
= v4sf_ftype_v4sf_v4sf_char
;
5865 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5868 /* Add the simple binary operators. */
5869 d
= (struct builtin_description
*) bdesc_2arg
;
5870 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5872 enum machine_mode mode0
, mode1
, mode2
;
5875 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
5878 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
5879 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
5880 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
5882 /* When all three operands are of the same mode. */
5883 if (mode0
== mode1
&& mode1
== mode2
)
5888 type
= v4sf_ftype_v4sf_v4sf
;
5891 type
= v4si_ftype_v4si_v4si
;
5894 type
= v16qi_ftype_v16qi_v16qi
;
5897 type
= v8hi_ftype_v8hi_v8hi
;
5900 type
= v2si_ftype_v2si_v2si
;
5903 type
= v2sf_ftype_v2sf_v2sf
;
5906 type
= int_ftype_int_int
;
5913 /* A few other combos we really don't want to do manually. */
5915 /* vint, vfloat, vfloat. */
5916 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
)
5917 type
= v4si_ftype_v4sf_v4sf
;
5919 /* vshort, vchar, vchar. */
5920 else if (mode0
== V8HImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
5921 type
= v8hi_ftype_v16qi_v16qi
;
5923 /* vint, vshort, vshort. */
5924 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
5925 type
= v4si_ftype_v8hi_v8hi
;
5927 /* vshort, vint, vint. */
5928 else if (mode0
== V8HImode
&& mode1
== V4SImode
&& mode2
== V4SImode
)
5929 type
= v8hi_ftype_v4si_v4si
;
5931 /* vchar, vshort, vshort. */
5932 else if (mode0
== V16QImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
5933 type
= v16qi_ftype_v8hi_v8hi
;
5935 /* vint, vchar, vint. */
5936 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V4SImode
)
5937 type
= v4si_ftype_v16qi_v4si
;
5939 /* vint, vchar, vchar. */
5940 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
5941 type
= v4si_ftype_v16qi_v16qi
;
5943 /* vint, vshort, vint. */
5944 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V4SImode
)
5945 type
= v4si_ftype_v8hi_v4si
;
5947 /* vint, vint, 5 bit literal. */
5948 else if (mode0
== V4SImode
&& mode1
== V4SImode
&& mode2
== QImode
)
5949 type
= v4si_ftype_v4si_char
;
5951 /* vshort, vshort, 5 bit literal. */
5952 else if (mode0
== V8HImode
&& mode1
== V8HImode
&& mode2
== QImode
)
5953 type
= v8hi_ftype_v8hi_char
;
5955 /* vchar, vchar, 5 bit literal. */
5956 else if (mode0
== V16QImode
&& mode1
== V16QImode
&& mode2
== QImode
)
5957 type
= v16qi_ftype_v16qi_char
;
5959 /* vfloat, vint, 5 bit literal. */
5960 else if (mode0
== V4SFmode
&& mode1
== V4SImode
&& mode2
== QImode
)
5961 type
= v4sf_ftype_v4si_char
;
5963 /* vint, vfloat, 5 bit literal. */
5964 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== QImode
)
5965 type
= v4si_ftype_v4sf_char
;
5967 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== SImode
)
5968 type
= v2si_ftype_int_int
;
5970 else if (mode0
== V2SImode
&& mode1
== V2SImode
&& mode2
== QImode
)
5971 type
= v2si_ftype_v2si_char
;
5973 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== QImode
)
5974 type
= v2si_ftype_int_char
;
5977 else if (mode0
== SImode
)
5982 type
= int_ftype_v4si_v4si
;
5985 type
= int_ftype_v4sf_v4sf
;
5988 type
= int_ftype_v16qi_v16qi
;
5991 type
= int_ftype_v8hi_v8hi
;
6001 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6004 /* Add the simple unary operators. */
6005 d
= (struct builtin_description
*) bdesc_1arg
;
6006 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
6008 enum machine_mode mode0
, mode1
;
6011 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
6014 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6015 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
6017 if (mode0
== V4SImode
&& mode1
== QImode
)
6018 type
= v4si_ftype_char
;
6019 else if (mode0
== V8HImode
&& mode1
== QImode
)
6020 type
= v8hi_ftype_char
;
6021 else if (mode0
== V16QImode
&& mode1
== QImode
)
6022 type
= v16qi_ftype_char
;
6023 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
)
6024 type
= v4sf_ftype_v4sf
;
6025 else if (mode0
== V8HImode
&& mode1
== V16QImode
)
6026 type
= v8hi_ftype_v16qi
;
6027 else if (mode0
== V4SImode
&& mode1
== V8HImode
)
6028 type
= v4si_ftype_v8hi
;
6029 else if (mode0
== V2SImode
&& mode1
== V2SImode
)
6030 type
= v2si_ftype_v2si
;
6031 else if (mode0
== V2SFmode
&& mode1
== V2SFmode
)
6032 type
= v2sf_ftype_v2sf
;
6033 else if (mode0
== V2SFmode
&& mode1
== V2SImode
)
6034 type
= v2sf_ftype_v2si
;
6035 else if (mode0
== V2SImode
&& mode1
== V2SFmode
)
6036 type
= v2si_ftype_v2sf
;
6037 else if (mode0
== V2SImode
&& mode1
== QImode
)
6038 type
= v2si_ftype_char
;
6042 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6047 /* Expand a block move operation, and return 1 if successful. Return 0
6048 if we should let the compiler generate normal code.
6050 operands[0] is the destination
6051 operands[1] is the source
6052 operands[2] is the length
6053 operands[3] is the alignment */
6055 #define MAX_MOVE_REG 4
6058 expand_block_move (operands
)
6061 rtx orig_dest
= operands
[0];
6062 rtx orig_src
= operands
[1];
6063 rtx bytes_rtx
= operands
[2];
6064 rtx align_rtx
= operands
[3];
6065 int constp
= (GET_CODE (bytes_rtx
) == CONST_INT
);
6071 /* If this is not a fixed size move, just call memcpy */
6075 /* If this is not a fixed size alignment, abort */
6076 if (GET_CODE (align_rtx
) != CONST_INT
)
6078 align
= INTVAL (align_rtx
);
6080 /* Anything to move? */
6081 bytes
= INTVAL (bytes_rtx
);
6085 /* store_one_arg depends on expand_block_move to handle at least the size of
6086 reg_parm_stack_space. */
6087 if (bytes
> (TARGET_POWERPC64
? 64 : 32))
6090 if (TARGET_STRING
) /* string instructions are available */
6092 for (offset
= 0; bytes
> 0; offset
+= move_bytes
, bytes
-= move_bytes
)
6095 rtx (*movstrsi
) PARAMS ((rtx
, rtx
, rtx
, rtx
));
6096 rtx (*mov
) PARAMS ((rtx
, rtx
));
6098 enum machine_mode mode
= BLKmode
;
6101 if (bytes
> 24 /* move up to 32 bytes at a time */
6109 && ! fixed_regs
[12])
6111 move_bytes
= (bytes
> 32) ? 32 : bytes
;
6112 gen_func
.movstrsi
= gen_movstrsi_8reg
;
6114 else if (bytes
> 16 /* move up to 24 bytes at a time */
6120 && ! fixed_regs
[10])
6122 move_bytes
= (bytes
> 24) ? 24 : bytes
;
6123 gen_func
.movstrsi
= gen_movstrsi_6reg
;
6125 else if (bytes
> 8 /* move up to 16 bytes at a time */
6131 move_bytes
= (bytes
> 16) ? 16 : bytes
;
6132 gen_func
.movstrsi
= gen_movstrsi_4reg
;
6134 else if (bytes
>= 8 && TARGET_POWERPC64
6135 /* 64-bit loads and stores require word-aligned
6137 && (align
>= 8 || (! STRICT_ALIGNMENT
&& align
>= 4)))
6141 gen_func
.mov
= gen_movdi
;
6143 else if (bytes
> 4 && !TARGET_POWERPC64
)
6144 { /* move up to 8 bytes at a time */
6145 move_bytes
= (bytes
> 8) ? 8 : bytes
;
6146 gen_func
.movstrsi
= gen_movstrsi_2reg
;
6148 else if (bytes
>= 4 && (align
>= 4 || ! STRICT_ALIGNMENT
))
6149 { /* move 4 bytes */
6152 gen_func
.mov
= gen_movsi
;
6154 else if (bytes
== 2 && (align
>= 2 || ! STRICT_ALIGNMENT
))
6155 { /* move 2 bytes */
6158 gen_func
.mov
= gen_movhi
;
6160 else if (bytes
== 1) /* move 1 byte */
6164 gen_func
.mov
= gen_movqi
;
6167 { /* move up to 4 bytes at a time */
6168 move_bytes
= (bytes
> 4) ? 4 : bytes
;
6169 gen_func
.movstrsi
= gen_movstrsi_1reg
;
6172 src
= adjust_address (orig_src
, mode
, offset
);
6173 dest
= adjust_address (orig_dest
, mode
, offset
);
6175 if (mode
== BLKmode
)
6177 /* Move the address into scratch registers. The movstrsi
6178 patterns require zero offset. */
6179 if (!REG_P (XEXP (src
, 0)))
6181 rtx src_reg
= copy_addr_to_reg (XEXP (src
, 0));
6182 src
= replace_equiv_address (src
, src_reg
);
6184 set_mem_size (src
, GEN_INT (move_bytes
));
6186 if (!REG_P (XEXP (dest
, 0)))
6188 rtx dest_reg
= copy_addr_to_reg (XEXP (dest
, 0));
6189 dest
= replace_equiv_address (dest
, dest_reg
);
6191 set_mem_size (dest
, GEN_INT (move_bytes
));
6193 emit_insn ((*gen_func
.movstrsi
) (dest
, src
,
6194 GEN_INT (move_bytes
& 31),
6199 rtx tmp_reg
= gen_reg_rtx (mode
);
6201 emit_insn ((*gen_func
.mov
) (tmp_reg
, src
));
6202 emit_insn ((*gen_func
.mov
) (dest
, tmp_reg
));
6207 else /* string instructions not available */
6209 rtx stores
[MAX_MOVE_REG
];
6213 for (offset
= 0; bytes
> 0; offset
+= move_bytes
, bytes
-= move_bytes
)
6215 rtx (*gen_mov_func
) PARAMS ((rtx
, rtx
));
6216 enum machine_mode mode
;
6217 rtx src
, dest
, tmp_reg
;
6219 /* Generate the appropriate load and store, saving the stores
6221 if (bytes
>= 8 && TARGET_POWERPC64
6222 /* 64-bit loads and stores require word-aligned
6224 && (align
>= 8 || (! STRICT_ALIGNMENT
&& align
>= 4)))
6228 gen_mov_func
= gen_movdi
;
6230 else if (bytes
>= 4 && (align
>= 4 || ! STRICT_ALIGNMENT
))
6234 gen_mov_func
= gen_movsi
;
6236 else if (bytes
>= 2 && (align
>= 2 || ! STRICT_ALIGNMENT
))
6240 gen_mov_func
= gen_movhi
;
6246 gen_mov_func
= gen_movqi
;
6249 src
= adjust_address (orig_src
, mode
, offset
);
6250 dest
= adjust_address (orig_dest
, mode
, offset
);
6251 tmp_reg
= gen_reg_rtx (mode
);
6253 emit_insn ((*gen_mov_func
) (tmp_reg
, src
));
6254 stores
[num_reg
++] = (*gen_mov_func
) (dest
, tmp_reg
);
6256 if (num_reg
>= MAX_MOVE_REG
)
6258 for (i
= 0; i
< num_reg
; i
++)
6259 emit_insn (stores
[i
]);
6264 for (i
= 0; i
< num_reg
; i
++)
6265 emit_insn (stores
[i
]);
6272 /* Return 1 if OP is a load multiple operation. It is known to be a
6273 PARALLEL and the first section will be tested. */
6276 load_multiple_operation (op
, mode
)
6278 enum machine_mode mode ATTRIBUTE_UNUSED
;
6280 int count
= XVECLEN (op
, 0);
6281 unsigned int dest_regno
;
6285 /* Perform a quick check so we don't blow up below. */
6287 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6288 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6289 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
6292 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6293 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
6295 for (i
= 1; i
< count
; i
++)
6297 rtx elt
= XVECEXP (op
, 0, i
);
6299 if (GET_CODE (elt
) != SET
6300 || GET_CODE (SET_DEST (elt
)) != REG
6301 || GET_MODE (SET_DEST (elt
)) != SImode
6302 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
6303 || GET_CODE (SET_SRC (elt
)) != MEM
6304 || GET_MODE (SET_SRC (elt
)) != SImode
6305 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
6306 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
6307 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
6308 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != i
* 4)
6315 /* Similar, but tests for store multiple. Here, the second vector element
6316 is a CLOBBER. It will be tested later. */
6319 store_multiple_operation (op
, mode
)
6321 enum machine_mode mode ATTRIBUTE_UNUSED
;
6323 int count
= XVECLEN (op
, 0) - 1;
6324 unsigned int src_regno
;
6328 /* Perform a quick check so we don't blow up below. */
6330 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6331 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
6332 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
6335 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6336 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
6338 for (i
= 1; i
< count
; i
++)
6340 rtx elt
= XVECEXP (op
, 0, i
+ 1);
6342 if (GET_CODE (elt
) != SET
6343 || GET_CODE (SET_SRC (elt
)) != REG
6344 || GET_MODE (SET_SRC (elt
)) != SImode
6345 || REGNO (SET_SRC (elt
)) != src_regno
+ i
6346 || GET_CODE (SET_DEST (elt
)) != MEM
6347 || GET_MODE (SET_DEST (elt
)) != SImode
6348 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
6349 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
6350 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
6351 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != i
* 4)
6358 /* Return 1 for a parallel vrsave operation. */
6361 vrsave_operation (op
, mode
)
6363 enum machine_mode mode ATTRIBUTE_UNUSED
;
6365 int count
= XVECLEN (op
, 0);
6366 unsigned int dest_regno
, src_regno
;
6370 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6371 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6372 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC_VOLATILE
)
6375 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6376 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6378 if (dest_regno
!= VRSAVE_REGNO
6379 && src_regno
!= VRSAVE_REGNO
)
6382 for (i
= 1; i
< count
; i
++)
6384 rtx elt
= XVECEXP (op
, 0, i
);
6386 if (GET_CODE (elt
) != CLOBBER
6387 && GET_CODE (elt
) != SET
)
6394 /* Return 1 for an PARALLEL suitable for mtcrf. */
6397 mtcrf_operation (op
, mode
)
6399 enum machine_mode mode ATTRIBUTE_UNUSED
;
6401 int count
= XVECLEN (op
, 0);
6405 /* Perform a quick check so we don't blow up below. */
6407 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6408 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC
6409 || XVECLEN (SET_SRC (XVECEXP (op
, 0, 0)), 0) != 2)
6411 src_reg
= XVECEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0, 0);
6413 if (GET_CODE (src_reg
) != REG
6414 || GET_MODE (src_reg
) != SImode
6415 || ! INT_REGNO_P (REGNO (src_reg
)))
6418 for (i
= 0; i
< count
; i
++)
6420 rtx exp
= XVECEXP (op
, 0, i
);
6424 if (GET_CODE (exp
) != SET
6425 || GET_CODE (SET_DEST (exp
)) != REG
6426 || GET_MODE (SET_DEST (exp
)) != CCmode
6427 || ! CR_REGNO_P (REGNO (SET_DEST (exp
))))
6429 unspec
= SET_SRC (exp
);
6430 maskval
= 1 << (MAX_CR_REGNO
- REGNO (SET_DEST (exp
)));
6432 if (GET_CODE (unspec
) != UNSPEC
6433 || XINT (unspec
, 1) != 20
6434 || XVECLEN (unspec
, 0) != 2
6435 || XVECEXP (unspec
, 0, 0) != src_reg
6436 || GET_CODE (XVECEXP (unspec
, 0, 1)) != CONST_INT
6437 || INTVAL (XVECEXP (unspec
, 0, 1)) != maskval
)
6443 /* Return 1 for an PARALLEL suitable for lmw. */
6446 lmw_operation (op
, mode
)
6448 enum machine_mode mode ATTRIBUTE_UNUSED
;
6450 int count
= XVECLEN (op
, 0);
6451 unsigned int dest_regno
;
6453 unsigned int base_regno
;
6454 HOST_WIDE_INT offset
;
6457 /* Perform a quick check so we don't blow up below. */
6459 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6460 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6461 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
6464 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6465 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
6468 || count
!= 32 - (int) dest_regno
)
6471 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr
, 0))
6474 base_regno
= REGNO (src_addr
);
6475 if (base_regno
== 0)
6478 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, src_addr
, 0))
6480 offset
= INTVAL (XEXP (src_addr
, 1));
6481 base_regno
= REGNO (XEXP (src_addr
, 0));
6486 for (i
= 0; i
< count
; i
++)
6488 rtx elt
= XVECEXP (op
, 0, i
);
6491 HOST_WIDE_INT newoffset
;
6493 if (GET_CODE (elt
) != SET
6494 || GET_CODE (SET_DEST (elt
)) != REG
6495 || GET_MODE (SET_DEST (elt
)) != SImode
6496 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
6497 || GET_CODE (SET_SRC (elt
)) != MEM
6498 || GET_MODE (SET_SRC (elt
)) != SImode
)
6500 newaddr
= XEXP (SET_SRC (elt
), 0);
6501 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr
, 0))
6506 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, newaddr
, 0))
6508 addr_reg
= XEXP (newaddr
, 0);
6509 newoffset
= INTVAL (XEXP (newaddr
, 1));
6513 if (REGNO (addr_reg
) != base_regno
6514 || newoffset
!= offset
+ 4 * i
)
6521 /* Return 1 for an PARALLEL suitable for stmw. */
6524 stmw_operation (op
, mode
)
6526 enum machine_mode mode ATTRIBUTE_UNUSED
;
6528 int count
= XVECLEN (op
, 0);
6529 unsigned int src_regno
;
6531 unsigned int base_regno
;
6532 HOST_WIDE_INT offset
;
6535 /* Perform a quick check so we don't blow up below. */
6537 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6538 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
6539 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
6542 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6543 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
6546 || count
!= 32 - (int) src_regno
)
6549 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr
, 0))
6552 base_regno
= REGNO (dest_addr
);
6553 if (base_regno
== 0)
6556 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, dest_addr
, 0))
6558 offset
= INTVAL (XEXP (dest_addr
, 1));
6559 base_regno
= REGNO (XEXP (dest_addr
, 0));
6564 for (i
= 0; i
< count
; i
++)
6566 rtx elt
= XVECEXP (op
, 0, i
);
6569 HOST_WIDE_INT newoffset
;
6571 if (GET_CODE (elt
) != SET
6572 || GET_CODE (SET_SRC (elt
)) != REG
6573 || GET_MODE (SET_SRC (elt
)) != SImode
6574 || REGNO (SET_SRC (elt
)) != src_regno
+ i
6575 || GET_CODE (SET_DEST (elt
)) != MEM
6576 || GET_MODE (SET_DEST (elt
)) != SImode
)
6578 newaddr
= XEXP (SET_DEST (elt
), 0);
6579 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr
, 0))
6584 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, newaddr
, 0))
6586 addr_reg
= XEXP (newaddr
, 0);
6587 newoffset
= INTVAL (XEXP (newaddr
, 1));
6591 if (REGNO (addr_reg
) != base_regno
6592 || newoffset
!= offset
+ 4 * i
)
6599 /* A validation routine: say whether CODE, a condition code, and MODE
6600 match. The other alternatives either don't make sense or should
6601 never be generated. */
6604 validate_condition_mode (code
, mode
)
6606 enum machine_mode mode
;
6608 if (GET_RTX_CLASS (code
) != '<'
6609 || GET_MODE_CLASS (mode
) != MODE_CC
)
6612 /* These don't make sense. */
6613 if ((code
== GT
|| code
== LT
|| code
== GE
|| code
== LE
)
6614 && mode
== CCUNSmode
)
6617 if ((code
== GTU
|| code
== LTU
|| code
== GEU
|| code
== LEU
)
6618 && mode
!= CCUNSmode
)
6621 if (mode
!= CCFPmode
6622 && (code
== ORDERED
|| code
== UNORDERED
6623 || code
== UNEQ
|| code
== LTGT
6624 || code
== UNGT
|| code
== UNLT
6625 || code
== UNGE
|| code
== UNLE
))
6628 /* These should never be generated except for
6629 flag_unsafe_math_optimizations and flag_finite_math_only. */
6630 if (mode
== CCFPmode
6631 && ! flag_unsafe_math_optimizations
6632 && ! flag_finite_math_only
6633 && (code
== LE
|| code
== GE
6634 || code
== UNEQ
|| code
== LTGT
6635 || code
== UNGT
|| code
== UNLT
))
6638 /* These are invalid; the information is not there. */
6639 if (mode
== CCEQmode
6640 && code
!= EQ
&& code
!= NE
)
6644 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
6645 We only check the opcode against the mode of the CC value here. */
6648 branch_comparison_operator (op
, mode
)
6650 enum machine_mode mode ATTRIBUTE_UNUSED
;
6652 enum rtx_code code
= GET_CODE (op
);
6653 enum machine_mode cc_mode
;
6655 if (GET_RTX_CLASS (code
) != '<')
6658 cc_mode
= GET_MODE (XEXP (op
, 0));
6659 if (GET_MODE_CLASS (cc_mode
) != MODE_CC
)
6662 validate_condition_mode (code
, cc_mode
);
6667 /* Return 1 if OP is a comparison operation that is valid for a branch
6668 insn and which is true if the corresponding bit in the CC register
6672 branch_positive_comparison_operator (op
, mode
)
6674 enum machine_mode mode
;
6678 if (! branch_comparison_operator (op
, mode
))
6681 code
= GET_CODE (op
);
6682 return (code
== EQ
|| code
== LT
|| code
== GT
6683 || (TARGET_SPE
&& TARGET_HARD_FLOAT
&& !TARGET_FPRS
&& code
== NE
)
6684 || code
== LTU
|| code
== GTU
6685 || code
== UNORDERED
);
6688 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
6689 We check the opcode against the mode of the CC value and disallow EQ or
6690 NE comparisons for integers. */
6693 scc_comparison_operator (op
, mode
)
6695 enum machine_mode mode
;
6697 enum rtx_code code
= GET_CODE (op
);
6698 enum machine_mode cc_mode
;
6700 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
6703 if (GET_RTX_CLASS (code
) != '<')
6706 cc_mode
= GET_MODE (XEXP (op
, 0));
6707 if (GET_MODE_CLASS (cc_mode
) != MODE_CC
)
6710 validate_condition_mode (code
, cc_mode
);
6712 if (code
== NE
&& cc_mode
!= CCFPmode
)
6719 trap_comparison_operator (op
, mode
)
6721 enum machine_mode mode
;
6723 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
6725 return GET_RTX_CLASS (GET_CODE (op
)) == '<';
6729 boolean_operator (op
, mode
)
6731 enum machine_mode mode ATTRIBUTE_UNUSED
;
6733 enum rtx_code code
= GET_CODE (op
);
6734 return (code
== AND
|| code
== IOR
|| code
== XOR
);
6738 boolean_or_operator (op
, mode
)
6740 enum machine_mode mode ATTRIBUTE_UNUSED
;
6742 enum rtx_code code
= GET_CODE (op
);
6743 return (code
== IOR
|| code
== XOR
);
6747 min_max_operator (op
, mode
)
6749 enum machine_mode mode ATTRIBUTE_UNUSED
;
6751 enum rtx_code code
= GET_CODE (op
);
6752 return (code
== SMIN
|| code
== SMAX
|| code
== UMIN
|| code
== UMAX
);
6755 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
6756 mask required to convert the result of a rotate insn into a shift
6757 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
6760 includes_lshift_p (shiftop
, andop
)
6764 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
6766 shift_mask
<<= INTVAL (shiftop
);
6768 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
6771 /* Similar, but for right shift. */
6774 includes_rshift_p (shiftop
, andop
)
6778 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
6780 shift_mask
>>= INTVAL (shiftop
);
6782 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
6785 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
6786 to perform a left shift. It must have exactly SHIFTOP least
6787 signifigant 0's, then one or more 1's, then zero or more 0's. */
6790 includes_rldic_lshift_p (shiftop
, andop
)
6794 if (GET_CODE (andop
) == CONST_INT
)
6796 HOST_WIDE_INT c
, lsb
, shift_mask
;
6799 if (c
== 0 || c
== ~0)
6803 shift_mask
<<= INTVAL (shiftop
);
6805 /* Find the least signifigant one bit. */
6808 /* It must coincide with the LSB of the shift mask. */
6809 if (-lsb
!= shift_mask
)
6812 /* Invert to look for the next transition (if any). */
6815 /* Remove the low group of ones (originally low group of zeros). */
6818 /* Again find the lsb, and check we have all 1's above. */
6822 else if (GET_CODE (andop
) == CONST_DOUBLE
6823 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
6825 HOST_WIDE_INT low
, high
, lsb
;
6826 HOST_WIDE_INT shift_mask_low
, shift_mask_high
;
6828 low
= CONST_DOUBLE_LOW (andop
);
6829 if (HOST_BITS_PER_WIDE_INT
< 64)
6830 high
= CONST_DOUBLE_HIGH (andop
);
6832 if ((low
== 0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== 0))
6833 || (low
== ~0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0)))
6836 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
6838 shift_mask_high
= ~0;
6839 if (INTVAL (shiftop
) > 32)
6840 shift_mask_high
<<= INTVAL (shiftop
) - 32;
6844 if (-lsb
!= shift_mask_high
|| INTVAL (shiftop
) < 32)
6851 return high
== -lsb
;
6854 shift_mask_low
= ~0;
6855 shift_mask_low
<<= INTVAL (shiftop
);
6859 if (-lsb
!= shift_mask_low
)
6862 if (HOST_BITS_PER_WIDE_INT
< 64)
6867 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
6870 return high
== -lsb
;
6874 return low
== -lsb
&& (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0);
6880 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
6881 to perform a left shift. It must have SHIFTOP or more least
6882 signifigant 0's, with the remainder of the word 1's. */
6885 includes_rldicr_lshift_p (shiftop
, andop
)
6889 if (GET_CODE (andop
) == CONST_INT
)
6891 HOST_WIDE_INT c
, lsb
, shift_mask
;
6894 shift_mask
<<= INTVAL (shiftop
);
6897 /* Find the least signifigant one bit. */
6900 /* It must be covered by the shift mask.
6901 This test also rejects c == 0. */
6902 if ((lsb
& shift_mask
) == 0)
6905 /* Check we have all 1's above the transition, and reject all 1's. */
6906 return c
== -lsb
&& lsb
!= 1;
6908 else if (GET_CODE (andop
) == CONST_DOUBLE
6909 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
6911 HOST_WIDE_INT low
, lsb
, shift_mask_low
;
6913 low
= CONST_DOUBLE_LOW (andop
);
6915 if (HOST_BITS_PER_WIDE_INT
< 64)
6917 HOST_WIDE_INT high
, shift_mask_high
;
6919 high
= CONST_DOUBLE_HIGH (andop
);
6923 shift_mask_high
= ~0;
6924 if (INTVAL (shiftop
) > 32)
6925 shift_mask_high
<<= INTVAL (shiftop
) - 32;
6929 if ((lsb
& shift_mask_high
) == 0)
6932 return high
== -lsb
;
6938 shift_mask_low
= ~0;
6939 shift_mask_low
<<= INTVAL (shiftop
);
6943 if ((lsb
& shift_mask_low
) == 0)
6946 return low
== -lsb
&& lsb
!= 1;
6952 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
6953 for lfq and stfq insns.
6955 Note reg1 and reg2 *must* be hard registers. To be sure we will
6956 abort if we are passed pseudo registers. */
6959 registers_ok_for_quad_peep (reg1
, reg2
)
6962 /* We might have been passed a SUBREG. */
6963 if (GET_CODE (reg1
) != REG
|| GET_CODE (reg2
) != REG
)
6966 return (REGNO (reg1
) == REGNO (reg2
) - 1);
6969 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
6970 addr1 and addr2 must be in consecutive memory locations
6971 (addr2 == addr1 + 8). */
6974 addrs_ok_for_quad_peep (addr1
, addr2
)
6981 /* Extract an offset (if used) from the first addr. */
6982 if (GET_CODE (addr1
) == PLUS
)
6984 /* If not a REG, return zero. */
6985 if (GET_CODE (XEXP (addr1
, 0)) != REG
)
6989 reg1
= REGNO (XEXP (addr1
, 0));
6990 /* The offset must be constant! */
6991 if (GET_CODE (XEXP (addr1
, 1)) != CONST_INT
)
6993 offset1
= INTVAL (XEXP (addr1
, 1));
6996 else if (GET_CODE (addr1
) != REG
)
7000 reg1
= REGNO (addr1
);
7001 /* This was a simple (mem (reg)) expression. Offset is 0. */
7005 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
7006 if (GET_CODE (addr2
) != PLUS
)
7009 if (GET_CODE (XEXP (addr2
, 0)) != REG
7010 || GET_CODE (XEXP (addr2
, 1)) != CONST_INT
)
7013 if (reg1
!= REGNO (XEXP (addr2
, 0)))
7016 /* The offset for the second addr must be 8 more than the first addr. */
7017 if (INTVAL (XEXP (addr2
, 1)) != offset1
+ 8)
7020 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7025 /* Return the register class of a scratch register needed to copy IN into
7026 or out of a register in CLASS in MODE. If it can be done directly,
7027 NO_REGS is returned. */
7030 secondary_reload_class (class, mode
, in
)
7031 enum reg_class
class;
7032 enum machine_mode mode ATTRIBUTE_UNUSED
;
7037 if (TARGET_ELF
|| (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))
7039 /* We cannot copy a symbolic operand directly into anything
7040 other than BASE_REGS for TARGET_ELF. So indicate that a
7041 register from BASE_REGS is needed as an intermediate
7044 On Darwin, pic addresses require a load from memory, which
7045 needs a base register. */
7046 if (class != BASE_REGS
7047 && (GET_CODE (in
) == SYMBOL_REF
7048 || GET_CODE (in
) == HIGH
7049 || GET_CODE (in
) == LABEL_REF
7050 || GET_CODE (in
) == CONST
))
7054 if (GET_CODE (in
) == REG
)
7057 if (regno
>= FIRST_PSEUDO_REGISTER
)
7059 regno
= true_regnum (in
);
7060 if (regno
>= FIRST_PSEUDO_REGISTER
)
7064 else if (GET_CODE (in
) == SUBREG
)
7066 regno
= true_regnum (in
);
7067 if (regno
>= FIRST_PSEUDO_REGISTER
)
7073 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7075 if (class == GENERAL_REGS
|| class == BASE_REGS
7076 || (regno
>= 0 && INT_REGNO_P (regno
)))
7079 /* Constants, memory, and FP registers can go into FP registers. */
7080 if ((regno
== -1 || FP_REGNO_P (regno
))
7081 && (class == FLOAT_REGS
|| class == NON_SPECIAL_REGS
))
7084 /* Memory, and AltiVec registers can go into AltiVec registers. */
7085 if ((regno
== -1 || ALTIVEC_REGNO_P (regno
))
7086 && class == ALTIVEC_REGS
)
7089 /* We can copy among the CR registers. */
7090 if ((class == CR_REGS
|| class == CR0_REGS
)
7091 && regno
>= 0 && CR_REGNO_P (regno
))
7094 /* Otherwise, we need GENERAL_REGS. */
7095 return GENERAL_REGS
;
7098 /* Given a comparison operation, return the bit number in CCR to test. We
7099 know this is a valid comparison.
7101 SCC_P is 1 if this is for an scc. That means that %D will have been
7102 used instead of %C, so the bits will be in different places.
7104 Return -1 if OP isn't a valid comparison for some reason. */
7111 enum rtx_code code
= GET_CODE (op
);
7112 enum machine_mode cc_mode
;
7117 if (GET_RTX_CLASS (code
) != '<')
7122 if (GET_CODE (reg
) != REG
7123 || ! CR_REGNO_P (REGNO (reg
)))
7126 cc_mode
= GET_MODE (reg
);
7127 cc_regnum
= REGNO (reg
);
7128 base_bit
= 4 * (cc_regnum
- CR0_REGNO
);
7130 validate_condition_mode (code
, cc_mode
);
7135 if (TARGET_SPE
&& TARGET_HARD_FLOAT
&& cc_mode
== CCFPmode
)
7136 return base_bit
+ 1;
7137 return scc_p
? base_bit
+ 3 : base_bit
+ 2;
7139 if (TARGET_SPE
&& TARGET_HARD_FLOAT
&& cc_mode
== CCFPmode
)
7140 return base_bit
+ 1;
7141 return base_bit
+ 2;
7142 case GT
: case GTU
: case UNLE
:
7143 return base_bit
+ 1;
7144 case LT
: case LTU
: case UNGE
:
7146 case ORDERED
: case UNORDERED
:
7147 return base_bit
+ 3;
7150 /* If scc, we will have done a cror to put the bit in the
7151 unordered position. So test that bit. For integer, this is ! LT
7152 unless this is an scc insn. */
7153 return scc_p
? base_bit
+ 3 : base_bit
;
7156 return scc_p
? base_bit
+ 3 : base_bit
+ 1;
7163 /* Return the GOT register. */
7166 rs6000_got_register (value
)
7167 rtx value ATTRIBUTE_UNUSED
;
7169 /* The second flow pass currently (June 1999) can't update
7170 regs_ever_live without disturbing other parts of the compiler, so
7171 update it here to make the prolog/epilogue code happy. */
7172 if (no_new_pseudos
&& ! regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
])
7173 regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
7175 current_function_uses_pic_offset_table
= 1;
7177 return pic_offset_table_rtx
;
7180 /* Function to init struct machine_function.
7181 This will be called, via a pointer variable,
7182 from push_function_context. */
7184 static struct machine_function
*
7185 rs6000_init_machine_status ()
7187 return ggc_alloc_cleared (sizeof (machine_function
));
7190 /* These macros test for integers and extract the low-order bits. */
7192 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7193 && GET_MODE (X) == VOIDmode)
7195 #define INT_LOWPART(X) \
7196 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7203 unsigned long val
= INT_LOWPART (op
);
7205 /* If the high bit is zero, the value is the first 1 bit we find
7207 if ((val
& 0x80000000) == 0)
7209 if ((val
& 0xffffffff) == 0)
7213 while (((val
<<= 1) & 0x80000000) == 0)
7218 /* If the high bit is set and the low bit is not, or the mask is all
7219 1's, the value is zero. */
7220 if ((val
& 1) == 0 || (val
& 0xffffffff) == 0xffffffff)
7223 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7226 while (((val
>>= 1) & 1) != 0)
7237 unsigned long val
= INT_LOWPART (op
);
7239 /* If the low bit is zero, the value is the first 1 bit we find from
7243 if ((val
& 0xffffffff) == 0)
7247 while (((val
>>= 1) & 1) == 0)
7253 /* If the low bit is set and the high bit is not, or the mask is all
7254 1's, the value is 31. */
7255 if ((val
& 0x80000000) == 0 || (val
& 0xffffffff) == 0xffffffff)
7258 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7261 while (((val
<<= 1) & 0x80000000) != 0)
7267 /* Print an operand. Recognize special options, documented below. */
7270 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7271 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7273 #define SMALL_DATA_RELOC "sda21"
7274 #define SMALL_DATA_REG 0
7278 print_operand (file
, x
, code
)
7285 unsigned HOST_WIDE_INT uval
;
7290 /* Write out an instruction after the call which may be replaced
7291 with glue code by the loader. This depends on the AIX version. */
7292 asm_fprintf (file
, RS6000_CALL_GLUE
);
7295 /* %a is output_address. */
7298 /* If X is a constant integer whose low-order 5 bits are zero,
7299 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
7300 in the AIX assembler where "sri" with a zero shift count
7301 writes a trash instruction. */
7302 if (GET_CODE (x
) == CONST_INT
&& (INTVAL (x
) & 31) == 0)
7309 /* If constant, low-order 16 bits of constant, unsigned.
7310 Otherwise, write normally. */
7312 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 0xffff);
7314 print_operand (file
, x
, 0);
7318 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7319 for 64-bit mask direction. */
7320 putc (((INT_LOWPART(x
) & 1) == 0 ? 'r' : 'l'), file
);
7323 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7327 /* There used to be a comment for 'C' reading "This is an
7328 optional cror needed for certain floating-point
7329 comparisons. Otherwise write nothing." */
7331 /* Similar, except that this is for an scc, so we must be able to
7332 encode the test in a single bit that is one. We do the above
7333 for any LE, GE, GEU, or LEU and invert the bit for NE. */
7334 if (GET_CODE (x
) == LE
|| GET_CODE (x
) == GE
7335 || GET_CODE (x
) == LEU
|| GET_CODE (x
) == GEU
)
7337 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7339 fprintf (file
, "cror %d,%d,%d\n\t", base_bit
+ 3,
7341 base_bit
+ (GET_CODE (x
) == GE
|| GET_CODE (x
) == GEU
));
7344 else if (GET_CODE (x
) == NE
)
7346 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7348 fprintf (file
, "crnor %d,%d,%d\n\t", base_bit
+ 3,
7349 base_bit
+ 2, base_bit
+ 2);
7351 else if (TARGET_SPE
&& TARGET_HARD_FLOAT
7352 && GET_CODE (x
) == EQ
7353 && GET_MODE (XEXP (x
, 0)) == CCFPmode
)
7355 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7357 fprintf (file
, "crnor %d,%d,%d\n\t", base_bit
+ 1,
7358 base_bit
+ 1, base_bit
+ 1);
7363 /* X is a CR register. Print the number of the EQ bit of the CR */
7364 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7365 output_operand_lossage ("invalid %%E value");
7367 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
) + 2);
7371 /* X is a CR register. Print the shift count needed to move it
7372 to the high-order four bits. */
7373 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7374 output_operand_lossage ("invalid %%f value");
7376 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
));
7380 /* Similar, but print the count for the rotate in the opposite
7382 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7383 output_operand_lossage ("invalid %%F value");
7385 fprintf (file
, "%d", 32 - 4 * (REGNO (x
) - CR0_REGNO
));
7389 /* X is a constant integer. If it is negative, print "m",
7390 otherwise print "z". This is to make a aze or ame insn. */
7391 if (GET_CODE (x
) != CONST_INT
)
7392 output_operand_lossage ("invalid %%G value");
7393 else if (INTVAL (x
) >= 0)
7400 /* If constant, output low-order five bits. Otherwise, write
7403 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 31);
7405 print_operand (file
, x
, 0);
7409 /* If constant, output low-order six bits. Otherwise, write
7412 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 63);
7414 print_operand (file
, x
, 0);
7418 /* Print `i' if this is a constant, else nothing. */
7424 /* Write the bit number in CCR for jump. */
7427 output_operand_lossage ("invalid %%j code");
7429 fprintf (file
, "%d", i
);
7433 /* Similar, but add one for shift count in rlinm for scc and pass
7434 scc flag to `ccr_bit'. */
7437 output_operand_lossage ("invalid %%J code");
7439 /* If we want bit 31, write a shift count of zero, not 32. */
7440 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
7444 /* X must be a constant. Write the 1's complement of the
7447 output_operand_lossage ("invalid %%k value");
7449 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ~ INT_LOWPART (x
));
7453 /* X must be a symbolic constant on ELF. Write an
7454 expression suitable for an 'addi' that adds in the low 16
7456 if (GET_CODE (x
) != CONST
)
7458 print_operand_address (file
, x
);
7463 if (GET_CODE (XEXP (x
, 0)) != PLUS
7464 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) != SYMBOL_REF
7465 && GET_CODE (XEXP (XEXP (x
, 0), 0)) != LABEL_REF
)
7466 || GET_CODE (XEXP (XEXP (x
, 0), 1)) != CONST_INT
)
7467 output_operand_lossage ("invalid %%K value");
7468 print_operand_address (file
, XEXP (XEXP (x
, 0), 0));
7470 /* For GNU as, there must be a non-alphanumeric character
7471 between 'l' and the number. The '-' is added by
7472 print_operand() already. */
7473 if (INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0)
7475 print_operand (file
, XEXP (XEXP (x
, 0), 1), 0);
7479 /* %l is output_asm_label. */
7482 /* Write second word of DImode or DFmode reference. Works on register
7483 or non-indexed memory only. */
7484 if (GET_CODE (x
) == REG
)
7485 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
7486 else if (GET_CODE (x
) == MEM
)
7488 /* Handle possible auto-increment. Since it is pre-increment and
7489 we have already done it, we can just use an offset of word. */
7490 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
7491 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
7492 output_address (plus_constant (XEXP (XEXP (x
, 0), 0),
7495 output_address (XEXP (adjust_address_nv (x
, SImode
,
7499 if (small_data_operand (x
, GET_MODE (x
)))
7500 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
7501 reg_names
[SMALL_DATA_REG
]);
7506 /* MB value for a mask operand. */
7507 if (! mask_operand (x
, SImode
))
7508 output_operand_lossage ("invalid %%m value");
7510 fprintf (file
, "%d", extract_MB (x
));
7514 /* ME value for a mask operand. */
7515 if (! mask_operand (x
, SImode
))
7516 output_operand_lossage ("invalid %%M value");
7518 fprintf (file
, "%d", extract_ME (x
));
7521 /* %n outputs the negative of its operand. */
7524 /* Write the number of elements in the vector times 4. */
7525 if (GET_CODE (x
) != PARALLEL
)
7526 output_operand_lossage ("invalid %%N value");
7528 fprintf (file
, "%d", XVECLEN (x
, 0) * 4);
7532 /* Similar, but subtract 1 first. */
7533 if (GET_CODE (x
) != PARALLEL
)
7534 output_operand_lossage ("invalid %%O value");
7536 fprintf (file
, "%d", (XVECLEN (x
, 0) - 1) * 4);
7540 /* X is a CONST_INT that is a power of two. Output the logarithm. */
7542 || INT_LOWPART (x
) < 0
7543 || (i
= exact_log2 (INT_LOWPART (x
))) < 0)
7544 output_operand_lossage ("invalid %%p value");
7546 fprintf (file
, "%d", i
);
7550 /* The operand must be an indirect memory reference. The result
7551 is the register number. */
7552 if (GET_CODE (x
) != MEM
|| GET_CODE (XEXP (x
, 0)) != REG
7553 || REGNO (XEXP (x
, 0)) >= 32)
7554 output_operand_lossage ("invalid %%P value");
7556 fprintf (file
, "%d", REGNO (XEXP (x
, 0)));
7560 /* This outputs the logical code corresponding to a boolean
7561 expression. The expression may have one or both operands
7562 negated (if one, only the first one). For condition register
7563 logical operations, it will also treat the negated
7564 CR codes as NOTs, but not handle NOTs of them. */
7566 const char *const *t
= 0;
7568 enum rtx_code code
= GET_CODE (x
);
7569 static const char * const tbl
[3][3] = {
7570 { "and", "andc", "nor" },
7571 { "or", "orc", "nand" },
7572 { "xor", "eqv", "xor" } };
7576 else if (code
== IOR
)
7578 else if (code
== XOR
)
7581 output_operand_lossage ("invalid %%q value");
7583 if (GET_CODE (XEXP (x
, 0)) != NOT
)
7587 if (GET_CODE (XEXP (x
, 1)) == NOT
)
7598 /* X is a CR register. Print the mask for `mtcrf'. */
7599 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7600 output_operand_lossage ("invalid %%R value");
7602 fprintf (file
, "%d", 128 >> (REGNO (x
) - CR0_REGNO
));
7606 /* Low 5 bits of 32 - value */
7608 output_operand_lossage ("invalid %%s value");
7610 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, (32 - INT_LOWPART (x
)) & 31);
7614 /* PowerPC64 mask position. All 0's is excluded.
7615 CONST_INT 32-bit mask is considered sign-extended so any
7616 transition must occur within the CONST_INT, not on the boundary. */
7617 if (! mask64_operand (x
, DImode
))
7618 output_operand_lossage ("invalid %%S value");
7620 uval
= INT_LOWPART (x
);
7622 if (uval
& 1) /* Clear Left */
7624 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 63 << 1) - 1;
7627 else /* Clear Right */
7630 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 63 << 1) - 1;
7637 fprintf (file
, "%d", i
);
7641 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
7642 if (GET_CODE (x
) != REG
|| GET_MODE (x
) != CCmode
)
7645 /* Bit 3 is OV bit. */
7646 i
= 4 * (REGNO (x
) - CR0_REGNO
) + 3;
7648 /* If we want bit 31, write a shift count of zero, not 32. */
7649 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
7653 /* Print the symbolic name of a branch target register. */
7654 if (GET_CODE (x
) != REG
|| (REGNO (x
) != LINK_REGISTER_REGNUM
7655 && REGNO (x
) != COUNT_REGISTER_REGNUM
))
7656 output_operand_lossage ("invalid %%T value");
7657 else if (REGNO (x
) == LINK_REGISTER_REGNUM
)
7658 fputs (TARGET_NEW_MNEMONICS
? "lr" : "r", file
);
7660 fputs ("ctr", file
);
7664 /* High-order 16 bits of constant for use in unsigned operand. */
7666 output_operand_lossage ("invalid %%u value");
7668 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
7669 (INT_LOWPART (x
) >> 16) & 0xffff);
7673 /* High-order 16 bits of constant for use in signed operand. */
7675 output_operand_lossage ("invalid %%v value");
7677 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
7678 (INT_LOWPART (x
) >> 16) & 0xffff);
7682 /* Print `u' if this has an auto-increment or auto-decrement. */
7683 if (GET_CODE (x
) == MEM
7684 && (GET_CODE (XEXP (x
, 0)) == PRE_INC
7685 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
))
7690 /* Print the trap code for this operand. */
7691 switch (GET_CODE (x
))
7694 fputs ("eq", file
); /* 4 */
7697 fputs ("ne", file
); /* 24 */
7700 fputs ("lt", file
); /* 16 */
7703 fputs ("le", file
); /* 20 */
7706 fputs ("gt", file
); /* 8 */
7709 fputs ("ge", file
); /* 12 */
7712 fputs ("llt", file
); /* 2 */
7715 fputs ("lle", file
); /* 6 */
7718 fputs ("lgt", file
); /* 1 */
7721 fputs ("lge", file
); /* 5 */
7729 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
7732 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
7733 ((INT_LOWPART (x
) & 0xffff) ^ 0x8000) - 0x8000);
7735 print_operand (file
, x
, 0);
7739 /* MB value for a PowerPC64 rldic operand. */
7740 val
= (GET_CODE (x
) == CONST_INT
7741 ? INTVAL (x
) : CONST_DOUBLE_HIGH (x
));
7746 for (i
= 0; i
< HOST_BITS_PER_WIDE_INT
; i
++)
7747 if ((val
<<= 1) < 0)
7750 #if HOST_BITS_PER_WIDE_INT == 32
7751 if (GET_CODE (x
) == CONST_INT
&& i
>= 0)
7752 i
+= 32; /* zero-extend high-part was all 0's */
7753 else if (GET_CODE (x
) == CONST_DOUBLE
&& i
== 32)
7755 val
= CONST_DOUBLE_LOW (x
);
7762 for ( ; i
< 64; i
++)
7763 if ((val
<<= 1) < 0)
7768 fprintf (file
, "%d", i
+ 1);
7772 if (GET_CODE (x
) == MEM
7773 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x
, 0), 0))
7778 /* Like 'L', for third word of TImode */
7779 if (GET_CODE (x
) == REG
)
7780 fprintf (file
, "%s", reg_names
[REGNO (x
) + 2]);
7781 else if (GET_CODE (x
) == MEM
)
7783 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
7784 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
7785 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 8));
7787 output_address (XEXP (adjust_address_nv (x
, SImode
, 8), 0));
7788 if (small_data_operand (x
, GET_MODE (x
)))
7789 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
7790 reg_names
[SMALL_DATA_REG
]);
7795 /* X is a SYMBOL_REF. Write out the name preceded by a
7796 period and without any trailing data in brackets. Used for function
7797 names. If we are configured for System V (or the embedded ABI) on
7798 the PowerPC, do not emit the period, since those systems do not use
7799 TOCs and the like. */
7800 if (GET_CODE (x
) != SYMBOL_REF
)
7803 if (XSTR (x
, 0)[0] != '.')
7805 switch (DEFAULT_ABI
)
7815 case ABI_AIX_NODESC
:
7821 RS6000_OUTPUT_BASENAME (file
, XSTR (x
, 0));
7823 assemble_name (file
, XSTR (x
, 0));
7828 /* Like 'L', for last word of TImode. */
7829 if (GET_CODE (x
) == REG
)
7830 fprintf (file
, "%s", reg_names
[REGNO (x
) + 3]);
7831 else if (GET_CODE (x
) == MEM
)
7833 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
7834 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
7835 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 12));
7837 output_address (XEXP (adjust_address_nv (x
, SImode
, 12), 0));
7838 if (small_data_operand (x
, GET_MODE (x
)))
7839 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
7840 reg_names
[SMALL_DATA_REG
]);
7844 /* Print AltiVec or SPE memory operand. */
7849 if (GET_CODE (x
) != MEM
)
7857 if (GET_CODE (tmp
) == REG
)
7859 fprintf (file
, "0(%s)", reg_names
[REGNO (tmp
)]);
7862 /* Handle [reg+UIMM]. */
7863 else if (GET_CODE (tmp
) == PLUS
&&
7864 GET_CODE (XEXP (tmp
, 1)) == CONST_INT
)
7868 if (GET_CODE (XEXP (tmp
, 0)) != REG
)
7871 x
= INTVAL (XEXP (tmp
, 1));
7872 fprintf (file
, "%d(%s)", x
, reg_names
[REGNO (XEXP (tmp
, 0))]);
7876 /* Fall through. Must be [reg+reg]. */
7878 if (GET_CODE (tmp
) == REG
)
7879 fprintf (file
, "0,%s", reg_names
[REGNO (tmp
)]);
7880 else if (GET_CODE (tmp
) == PLUS
&& GET_CODE (XEXP (tmp
, 1)) == REG
)
7882 if (REGNO (XEXP (tmp
, 0)) == 0)
7883 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 1)) ],
7884 reg_names
[ REGNO (XEXP (tmp
, 0)) ]);
7886 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 0)) ],
7887 reg_names
[ REGNO (XEXP (tmp
, 1)) ]);
7895 if (GET_CODE (x
) == REG
)
7896 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
7897 else if (GET_CODE (x
) == MEM
)
7899 /* We need to handle PRE_INC and PRE_DEC here, since we need to
7900 know the width from the mode. */
7901 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
)
7902 fprintf (file
, "%d(%s)", GET_MODE_SIZE (GET_MODE (x
)),
7903 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
7904 else if (GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
7905 fprintf (file
, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x
)),
7906 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
7908 output_address (XEXP (x
, 0));
7911 output_addr_const (file
, x
);
7915 output_operand_lossage ("invalid %%xn code");
7919 /* Print the address of an operand. */
7922 print_operand_address (file
, x
)
7926 if (GET_CODE (x
) == REG
)
7927 fprintf (file
, "0(%s)", reg_names
[ REGNO (x
) ]);
7928 else if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
7929 || GET_CODE (x
) == LABEL_REF
)
7931 output_addr_const (file
, x
);
7932 if (small_data_operand (x
, GET_MODE (x
)))
7933 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
7934 reg_names
[SMALL_DATA_REG
]);
7935 else if (TARGET_TOC
)
7938 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == REG
)
7940 if (REGNO (XEXP (x
, 0)) == 0)
7941 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 1)) ],
7942 reg_names
[ REGNO (XEXP (x
, 0)) ]);
7944 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 0)) ],
7945 reg_names
[ REGNO (XEXP (x
, 1)) ]);
7947 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
7949 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (XEXP (x
, 1)));
7950 fprintf (file
, "(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
7953 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
7954 && CONSTANT_P (XEXP (x
, 1)))
7956 output_addr_const (file
, XEXP (x
, 1));
7957 fprintf (file
, "@l(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
7961 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
7962 && CONSTANT_P (XEXP (x
, 1)))
7964 fprintf (file
, "lo16(");
7965 output_addr_const (file
, XEXP (x
, 1));
7966 fprintf (file
, ")(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
7969 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x
))
7971 if (TARGET_AIX
&& (!TARGET_ELF
|| !TARGET_MINIMAL_TOC
))
7973 rtx contains_minus
= XEXP (x
, 1);
7977 /* Find the (minus (sym) (toc)) buried in X, and temporarily
7978 turn it into (sym) for output_addr_const. */
7979 while (GET_CODE (XEXP (contains_minus
, 0)) != MINUS
)
7980 contains_minus
= XEXP (contains_minus
, 0);
7982 minus
= XEXP (contains_minus
, 0);
7983 symref
= XEXP (minus
, 0);
7984 XEXP (contains_minus
, 0) = symref
;
7989 name
= XSTR (symref
, 0);
7990 newname
= alloca (strlen (name
) + sizeof ("@toc"));
7991 strcpy (newname
, name
);
7992 strcat (newname
, "@toc");
7993 XSTR (symref
, 0) = newname
;
7995 output_addr_const (file
, XEXP (x
, 1));
7997 XSTR (symref
, 0) = name
;
7998 XEXP (contains_minus
, 0) = minus
;
8001 output_addr_const (file
, XEXP (x
, 1));
8003 fprintf (file
, "(%s)", reg_names
[REGNO (XEXP (x
, 0))]);
8009 /* Target hook for assembling integer objects. The powerpc version has
8010 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8011 is defined. It also needs to handle DI-mode objects on 64-bit
8015 rs6000_assemble_integer (x
, size
, aligned_p
)
8020 #ifdef RELOCATABLE_NEEDS_FIXUP
8021 /* Special handling for SI values. */
8022 if (size
== 4 && aligned_p
)
8024 extern int in_toc_section
PARAMS ((void));
8025 static int recurse
= 0;
8027 /* For -mrelocatable, we mark all addresses that need to be fixed up
8028 in the .fixup section. */
8029 if (TARGET_RELOCATABLE
8030 && !in_toc_section ()
8031 && !in_text_section ()
8033 && GET_CODE (x
) != CONST_INT
8034 && GET_CODE (x
) != CONST_DOUBLE
8040 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCP", fixuplabelno
);
8042 ASM_OUTPUT_LABEL (asm_out_file
, buf
);
8043 fprintf (asm_out_file
, "\t.long\t(");
8044 output_addr_const (asm_out_file
, x
);
8045 fprintf (asm_out_file
, ")@fixup\n");
8046 fprintf (asm_out_file
, "\t.section\t\".fixup\",\"aw\"\n");
8047 ASM_OUTPUT_ALIGN (asm_out_file
, 2);
8048 fprintf (asm_out_file
, "\t.long\t");
8049 assemble_name (asm_out_file
, buf
);
8050 fprintf (asm_out_file
, "\n\t.previous\n");
8054 /* Remove initial .'s to turn a -mcall-aixdesc function
8055 address into the address of the descriptor, not the function
8057 else if (GET_CODE (x
) == SYMBOL_REF
8058 && XSTR (x
, 0)[0] == '.'
8059 && DEFAULT_ABI
== ABI_AIX
)
8061 const char *name
= XSTR (x
, 0);
8062 while (*name
== '.')
8065 fprintf (asm_out_file
, "\t.long\t%s\n", name
);
8069 #endif /* RELOCATABLE_NEEDS_FIXUP */
8070 return default_assemble_integer (x
, size
, aligned_p
);
8073 #ifdef HAVE_GAS_HIDDEN
8074 /* Emit an assembler directive to set symbol visibility for DECL to
8078 rs6000_assemble_visibility (decl
, visibility_type
)
8080 const char *visibility_type
;
8082 default_assemble_visibility (decl
, visibility_type
);
8084 /* Functions need to have their entry point symbol visibility set as
8085 well as their descriptor symbol visibility. */
8086 if (DEFAULT_ABI
== ABI_AIX
&& TREE_CODE (decl
) == FUNCTION_DECL
)
8090 name
= ((* targetm
.strip_name_encoding
)
8091 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
))));
8093 fprintf (asm_out_file
, "\t.%s\t.%s\n", visibility_type
, name
);
8099 rs6000_reverse_condition (mode
, code
)
8100 enum machine_mode mode
;
8103 /* Reversal of FP compares takes care -- an ordered compare
8104 becomes an unordered compare and vice versa. */
8105 if (mode
== CCFPmode
)
8106 return reverse_condition_maybe_unordered (code
);
8108 return reverse_condition (code
);
8111 /* Generate a compare for CODE. Return a brand-new rtx that
8112 represents the result of the compare. */
8115 rs6000_generate_compare (code
)
8118 enum machine_mode comp_mode
;
8121 if (rs6000_compare_fp_p
)
8122 comp_mode
= CCFPmode
;
8123 else if (code
== GTU
|| code
== LTU
8124 || code
== GEU
|| code
== LEU
)
8125 comp_mode
= CCUNSmode
;
8129 /* First, the compare. */
8130 compare_result
= gen_reg_rtx (comp_mode
);
8132 /* SPE FP compare instructions on the GPRs. Yuck! */
8133 if ((TARGET_SPE
&& TARGET_HARD_FLOAT
) && rs6000_compare_fp_p
)
8135 rtx cmp
, or1
, or2
, or_result
, compare_result2
;
8143 cmp
= flag_unsafe_math_optimizations
8144 ? gen_tstsfeq_gpr (compare_result
, rs6000_compare_op0
,
8146 : gen_cmpsfeq_gpr (compare_result
, rs6000_compare_op0
,
8147 rs6000_compare_op1
);
8155 cmp
= flag_unsafe_math_optimizations
8156 ? gen_tstsfgt_gpr (compare_result
, rs6000_compare_op0
,
8158 : gen_cmpsfgt_gpr (compare_result
, rs6000_compare_op0
,
8159 rs6000_compare_op1
);
8167 cmp
= flag_unsafe_math_optimizations
8168 ? gen_tstsflt_gpr (compare_result
, rs6000_compare_op0
,
8170 : gen_cmpsflt_gpr (compare_result
, rs6000_compare_op0
,
8171 rs6000_compare_op1
);
8177 /* Synthesize LE and GE from LT/GT || EQ. */
8178 if (code
== LE
|| code
== GE
|| code
== LEU
|| code
== GEU
)
8180 /* Synthesize GE/LE frome GT/LT || EQ. */
8186 case LE
: code
= LT
; break;
8187 case GE
: code
= GT
; break;
8188 case LEU
: code
= LT
; break;
8189 case GEU
: code
= GT
; break;
8193 or1
= gen_reg_rtx (SImode
);
8194 or2
= gen_reg_rtx (SImode
);
8195 or_result
= gen_reg_rtx (CCEQmode
);
8196 compare_result2
= gen_reg_rtx (CCFPmode
);
8199 cmp
= flag_unsafe_math_optimizations
8200 ? gen_tstsfeq_gpr (compare_result2
, rs6000_compare_op0
,
8202 : gen_cmpsfeq_gpr (compare_result2
, rs6000_compare_op0
,
8203 rs6000_compare_op1
);
8206 /* The MC8540 FP compare instructions set the CR bits
8207 differently than other PPC compare instructions. For
8208 that matter, there is no generic test instruction, but a
8209 testgt, testlt, and testeq. For a true condition, bit 2
8210 is set (x1xx) in the CR. Following the traditional CR
8216 ... bit 2 would be a GT CR alias, so later on we
8217 look in the GT bits for the branch instructins.
8218 However, we must be careful to emit correct RTL in
8219 the meantime, so optimizations don't get confused. */
8221 or1
= gen_rtx (NE
, SImode
, compare_result
, const0_rtx
);
8222 or2
= gen_rtx (NE
, SImode
, compare_result2
, const0_rtx
);
8224 /* OR them together. */
8225 cmp
= gen_rtx_SET (VOIDmode
, or_result
,
8226 gen_rtx_COMPARE (CCEQmode
,
8227 gen_rtx_IOR (SImode
, or1
, or2
),
8229 compare_result
= or_result
;
8234 /* We only care about 1 bit (x1xx), so map everything to NE to
8235 maintain rtl sanity. We'll get to the right bit (x1xx) at
8236 code output time. */
8237 if (code
== NE
|| code
== LTGT
)
8238 /* Do the inverse here because we have no cmpne
8239 instruction. We use the cmpeq instruction and expect
8240 to get a 0 instead. */
8249 emit_insn (gen_rtx_SET (VOIDmode
, compare_result
,
8250 gen_rtx_COMPARE (comp_mode
,
8252 rs6000_compare_op1
)));
8254 /* Some kinds of FP comparisons need an OR operation;
8255 except for flag_unsafe_math_optimizations we don't bother. */
8256 if (rs6000_compare_fp_p
8257 && ! flag_unsafe_math_optimizations
8258 && ! (TARGET_HARD_FLOAT
&& TARGET_SPE
)
8259 && (code
== LE
|| code
== GE
8260 || code
== UNEQ
|| code
== LTGT
8261 || code
== UNGT
|| code
== UNLT
))
8263 enum rtx_code or1
, or2
;
8264 rtx or1_rtx
, or2_rtx
, compare2_rtx
;
8265 rtx or_result
= gen_reg_rtx (CCEQmode
);
8269 case LE
: or1
= LT
; or2
= EQ
; break;
8270 case GE
: or1
= GT
; or2
= EQ
; break;
8271 case UNEQ
: or1
= UNORDERED
; or2
= EQ
; break;
8272 case LTGT
: or1
= LT
; or2
= GT
; break;
8273 case UNGT
: or1
= UNORDERED
; or2
= GT
; break;
8274 case UNLT
: or1
= UNORDERED
; or2
= LT
; break;
8277 validate_condition_mode (or1
, comp_mode
);
8278 validate_condition_mode (or2
, comp_mode
);
8279 or1_rtx
= gen_rtx (or1
, SImode
, compare_result
, const0_rtx
);
8280 or2_rtx
= gen_rtx (or2
, SImode
, compare_result
, const0_rtx
);
8281 compare2_rtx
= gen_rtx_COMPARE (CCEQmode
,
8282 gen_rtx_IOR (SImode
, or1_rtx
, or2_rtx
),
8284 emit_insn (gen_rtx_SET (VOIDmode
, or_result
, compare2_rtx
));
8286 compare_result
= or_result
;
8290 validate_condition_mode (code
, GET_MODE (compare_result
));
8292 return gen_rtx (code
, VOIDmode
, compare_result
, const0_rtx
);
8296 /* Emit the RTL for an sCOND pattern. */
8299 rs6000_emit_sCOND (code
, result
)
8304 enum machine_mode op_mode
;
8306 condition_rtx
= rs6000_generate_compare (code
);
8308 op_mode
= GET_MODE (rs6000_compare_op0
);
8309 if (op_mode
== VOIDmode
)
8310 op_mode
= GET_MODE (rs6000_compare_op1
);
8312 if (TARGET_POWERPC64
&& (op_mode
== DImode
|| rs6000_compare_fp_p
))
8314 PUT_MODE (condition_rtx
, DImode
);
8315 convert_move (result
, condition_rtx
, 0);
8319 PUT_MODE (condition_rtx
, SImode
);
8320 emit_insn (gen_rtx_SET (VOIDmode
, result
, condition_rtx
));
8324 /* Emit a branch of kind CODE to location LOC. */
8327 rs6000_emit_cbranch (code
, loc
)
8331 rtx condition_rtx
, loc_ref
;
8333 condition_rtx
= rs6000_generate_compare (code
);
8334 loc_ref
= gen_rtx_LABEL_REF (VOIDmode
, loc
);
8335 emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
,
8336 gen_rtx_IF_THEN_ELSE (VOIDmode
, condition_rtx
,
8340 /* Return the string to output a conditional branch to LABEL, which is
8341 the operand number of the label, or -1 if the branch is really a
8344 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
8345 condition code register and its mode specifies what kind of
8348 REVERSED is non-zero if we should reverse the sense of the comparison.
8350 INSN is the insn. */
8353 output_cbranch (op
, label
, reversed
, insn
)
8359 static char string
[64];
8360 enum rtx_code code
= GET_CODE (op
);
8361 rtx cc_reg
= XEXP (op
, 0);
8362 enum machine_mode mode
= GET_MODE (cc_reg
);
8363 int cc_regno
= REGNO (cc_reg
) - CR0_REGNO
;
8364 int need_longbranch
= label
!= NULL
&& get_attr_length (insn
) == 8;
8365 int really_reversed
= reversed
^ need_longbranch
;
8371 validate_condition_mode (code
, mode
);
8373 /* Work out which way this really branches. We could use
8374 reverse_condition_maybe_unordered here always but this
8375 makes the resulting assembler clearer. */
8376 if (really_reversed
)
8377 code
= rs6000_reverse_condition (mode
, code
);
8379 if ((TARGET_SPE
&& TARGET_HARD_FLOAT
) && mode
== CCFPmode
)
8381 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8384 /* Opposite of GT. */
8386 else if (code
== NE
)
8394 /* Not all of these are actually distinct opcodes, but
8395 we distinguish them for clarity of the resulting assembler. */
8397 ccode
= "ne"; break;
8399 ccode
= "eq"; break;
8401 ccode
= "ge"; break;
8402 case GT
: case GTU
: case UNGT
:
8403 ccode
= "gt"; break;
8405 ccode
= "le"; break;
8406 case LT
: case LTU
: case UNLT
:
8407 ccode
= "lt"; break;
8408 case UNORDERED
: ccode
= "un"; break;
8409 case ORDERED
: ccode
= "nu"; break;
8410 case UNGE
: ccode
= "nl"; break;
8411 case UNLE
: ccode
= "ng"; break;
8416 /* Maybe we have a guess as to how likely the branch is.
8417 The old mnemonics don't have a way to specify this information. */
8419 note
= find_reg_note (insn
, REG_BR_PROB
, NULL_RTX
);
8420 if (note
!= NULL_RTX
)
8422 /* PROB is the difference from 50%. */
8423 int prob
= INTVAL (XEXP (note
, 0)) - REG_BR_PROB_BASE
/ 2;
8424 bool always_hint
= rs6000_cpu
!= PROCESSOR_POWER4
;
8426 /* Only hint for highly probable/improbable branches on newer
8427 cpus as static prediction overrides processor dynamic
8428 prediction. For older cpus we may as well always hint, but
8429 assume not taken for branches that are very close to 50% as a
8430 mispredicted taken branch is more expensive than a
8431 mispredicted not-taken branch. */
8433 || abs (prob
) > REG_BR_PROB_BASE
/ 100 * 48)
8435 if (abs (prob
) > REG_BR_PROB_BASE
/ 20
8436 && ((prob
> 0) ^ need_longbranch
))
8444 s
+= sprintf (s
, "{b%sr|b%slr%s} ", ccode
, ccode
, pred
);
8446 s
+= sprintf (s
, "{b%s|b%s%s} ", ccode
, ccode
, pred
);
8448 /* We need to escape any '%' characters in the reg_names string.
8449 Assume they'd only be the first character... */
8450 if (reg_names
[cc_regno
+ CR0_REGNO
][0] == '%')
8452 s
+= sprintf (s
, "%s", reg_names
[cc_regno
+ CR0_REGNO
]);
8456 /* If the branch distance was too far, we may have to use an
8457 unconditional branch to go the distance. */
8458 if (need_longbranch
)
8459 s
+= sprintf (s
, ",$+8\n\tb %s", label
);
8461 s
+= sprintf (s
, ",%s", label
);
8467 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
8468 operands of the last comparison is nonzero/true, FALSE_COND if it
8469 is zero/false. Return 0 if the hardware has no such operation. */
8472 rs6000_emit_cmove (dest
, op
, true_cond
, false_cond
)
8478 enum rtx_code code
= GET_CODE (op
);
8479 rtx op0
= rs6000_compare_op0
;
8480 rtx op1
= rs6000_compare_op1
;
8482 enum machine_mode compare_mode
= GET_MODE (op0
);
8483 enum machine_mode result_mode
= GET_MODE (dest
);
8486 /* These modes should always match. */
8487 if (GET_MODE (op1
) != compare_mode
8488 /* In the isel case however, we can use a compare immediate, so
8489 op1 may be a small constant. */
8490 && (!TARGET_ISEL
|| !short_cint_operand (op1
, VOIDmode
)))
8492 if (GET_MODE (true_cond
) != result_mode
)
8494 if (GET_MODE (false_cond
) != result_mode
)
8497 /* First, work out if the hardware can do this at all, or
8498 if it's too slow... */
8499 if (! rs6000_compare_fp_p
)
8502 return rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
);
8506 /* Eliminate half of the comparisons by switching operands, this
8507 makes the remaining code simpler. */
8508 if (code
== UNLT
|| code
== UNGT
|| code
== UNORDERED
|| code
== NE
8509 || code
== LTGT
|| code
== LT
)
8511 code
= reverse_condition_maybe_unordered (code
);
8513 true_cond
= false_cond
;
8517 /* UNEQ and LTGT take four instructions for a comparison with zero,
8518 it'll probably be faster to use a branch here too. */
8522 if (GET_CODE (op1
) == CONST_DOUBLE
)
8523 REAL_VALUE_FROM_CONST_DOUBLE (c1
, op1
);
8525 /* We're going to try to implement comparions by performing
8526 a subtract, then comparing against zero. Unfortunately,
8527 Inf - Inf is NaN which is not zero, and so if we don't
8528 know that the operand is finite and the comparison
8529 would treat EQ different to UNORDERED, we can't do it. */
8530 if (! flag_unsafe_math_optimizations
8531 && code
!= GT
&& code
!= UNGE
8532 && (GET_CODE (op1
) != CONST_DOUBLE
|| target_isinf (c1
))
8533 /* Constructs of the form (a OP b ? a : b) are safe. */
8534 && ((! rtx_equal_p (op0
, false_cond
) && ! rtx_equal_p (op1
, false_cond
))
8535 || (! rtx_equal_p (op0
, true_cond
)
8536 && ! rtx_equal_p (op1
, true_cond
))))
8538 /* At this point we know we can use fsel. */
8540 /* Reduce the comparison to a comparison against zero. */
8541 temp
= gen_reg_rtx (compare_mode
);
8542 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8543 gen_rtx_MINUS (compare_mode
, op0
, op1
)));
8545 op1
= CONST0_RTX (compare_mode
);
8547 /* If we don't care about NaNs we can reduce some of the comparisons
8548 down to faster ones. */
8549 if (flag_unsafe_math_optimizations
)
8555 true_cond
= false_cond
;
8568 /* Now, reduce everything down to a GE. */
8575 temp
= gen_reg_rtx (compare_mode
);
8576 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
8581 temp
= gen_reg_rtx (compare_mode
);
8582 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_ABS (compare_mode
, op0
)));
8587 temp
= gen_reg_rtx (compare_mode
);
8588 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8589 gen_rtx_NEG (compare_mode
,
8590 gen_rtx_ABS (compare_mode
, op0
))));
8595 temp
= gen_reg_rtx (result_mode
);
8596 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8597 gen_rtx_IF_THEN_ELSE (result_mode
,
8598 gen_rtx_GE (VOIDmode
,
8600 true_cond
, false_cond
)));
8602 true_cond
= false_cond
;
8604 temp
= gen_reg_rtx (compare_mode
);
8605 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
8610 temp
= gen_reg_rtx (result_mode
);
8611 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8612 gen_rtx_IF_THEN_ELSE (result_mode
,
8613 gen_rtx_GE (VOIDmode
,
8615 true_cond
, false_cond
)));
8617 false_cond
= true_cond
;
8619 temp
= gen_reg_rtx (compare_mode
);
8620 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
8628 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
8629 gen_rtx_IF_THEN_ELSE (result_mode
,
8630 gen_rtx_GE (VOIDmode
,
8632 true_cond
, false_cond
)));
8636 /* Same as above, but for ints (isel). */
8639 rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
)
8645 rtx condition_rtx
, cr
;
8647 /* All isel implementations thus far are 32-bits. */
8648 if (GET_MODE (rs6000_compare_op0
) != SImode
)
8651 /* We still have to do the compare, because isel doesn't do a
8652 compare, it just looks at the CRx bits set by a previous compare
8654 condition_rtx
= rs6000_generate_compare (GET_CODE (op
));
8655 cr
= XEXP (condition_rtx
, 0);
8657 if (GET_MODE (cr
) == CCmode
)
8658 emit_insn (gen_isel_signed (dest
, condition_rtx
,
8659 true_cond
, false_cond
, cr
));
8661 emit_insn (gen_isel_unsigned (dest
, condition_rtx
,
8662 true_cond
, false_cond
, cr
));
8668 output_isel (operands
)
8673 code
= GET_CODE (operands
[1]);
8674 if (code
== GE
|| code
== GEU
|| code
== LE
|| code
== LEU
|| code
== NE
)
8676 PUT_CODE (operands
[1], reverse_condition (code
));
8677 return "isel %0,%3,%2,%j1";
8680 return "isel %0,%2,%3,%j1";
8684 rs6000_emit_minmax (dest
, code
, op0
, op1
)
8690 enum machine_mode mode
= GET_MODE (op0
);
8694 if (code
== SMAX
|| code
== SMIN
)
8699 if (code
== SMAX
|| code
== UMAX
)
8700 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
8703 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
8705 if (target
== NULL_RTX
)
8708 emit_move_insn (dest
, target
);
8711 /* This page contains routines that are used to determine what the
8712 function prologue and epilogue code will do and write them out. */
8714 /* Return the first fixed-point register that is required to be
8715 saved. 32 if none. */
8718 first_reg_to_save ()
8722 /* Find lowest numbered live register. */
8723 for (first_reg
= 13; first_reg
<= 31; first_reg
++)
8724 if (regs_ever_live
[first_reg
]
8725 && (! call_used_regs
[first_reg
]
8726 || (first_reg
== RS6000_PIC_OFFSET_TABLE_REGNUM
8727 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
8728 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
)))))
8733 && current_function_uses_pic_offset_table
8734 && first_reg
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
8735 return RS6000_PIC_OFFSET_TABLE_REGNUM
;
8741 /* Similar, for FP regs. */
8744 first_fp_reg_to_save ()
8748 /* Find lowest numbered live register. */
8749 for (first_reg
= 14 + 32; first_reg
<= 63; first_reg
++)
8750 if (regs_ever_live
[first_reg
])
8756 /* Similar, for AltiVec regs. */
8759 first_altivec_reg_to_save ()
8763 /* Stack frame remains as is unless we are in AltiVec ABI. */
8764 if (! TARGET_ALTIVEC_ABI
)
8765 return LAST_ALTIVEC_REGNO
+ 1;
8767 /* Find lowest numbered live register. */
8768 for (i
= FIRST_ALTIVEC_REGNO
+ 20; i
<= LAST_ALTIVEC_REGNO
; ++i
)
8769 if (regs_ever_live
[i
])
8775 /* Return a 32-bit mask of the AltiVec registers we need to set in
8776 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
8777 the 32-bit word is 0. */
8780 compute_vrsave_mask ()
8782 unsigned int i
, mask
= 0;
8784 /* First, find out if we use _any_ altivec registers. */
8785 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
8786 if (regs_ever_live
[i
])
8787 mask
|= ALTIVEC_REG_BIT (i
);
8792 /* Next, add all registers that are call-clobbered. We do this
8793 because post-reload register optimizers such as regrename_optimize
8794 may choose to use them. They never change the register class
8795 chosen by reload, so cannot create new uses of altivec registers
8796 if there were none before, so the early exit above is safe. */
8797 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
8798 altivec registers not saved in the mask, which might well make the
8799 adjustments below more effective in eliding the save/restore of
8800 VRSAVE in small functions. */
8801 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
8802 if (call_used_regs
[i
])
8803 mask
|= ALTIVEC_REG_BIT (i
);
8805 /* Next, remove the argument registers from the set. These must
8806 be in the VRSAVE mask set by the caller, so we don't need to add
8807 them in again. More importantly, the mask we compute here is
8808 used to generate CLOBBERs in the set_vrsave insn, and we do not
8809 wish the argument registers to die. */
8810 for (i
= cfun
->args_info
.vregno
; i
>= ALTIVEC_ARG_MIN_REG
; --i
)
8811 mask
&= ~ALTIVEC_REG_BIT (i
);
8813 /* Similarly, remove the return value from the set. */
8816 diddle_return_value (is_altivec_return_reg
, &yes
);
8818 mask
&= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN
);
8825 is_altivec_return_reg (reg
, xyes
)
8829 bool *yes
= (bool *) xyes
;
8830 if (REGNO (reg
) == ALTIVEC_ARG_RETURN
)
8835 /* Calculate the stack information for the current function. This is
8836 complicated by having two separate calling sequences, the AIX calling
8837 sequence and the V.4 calling sequence.
8839 AIX (and Darwin/Mac OS X) stack frames look like:
8841 SP----> +---------------------------------------+
8842 | back chain to caller | 0 0
8843 +---------------------------------------+
8844 | saved CR | 4 8 (8-11)
8845 +---------------------------------------+
8847 +---------------------------------------+
8848 | reserved for compilers | 12 24
8849 +---------------------------------------+
8850 | reserved for binders | 16 32
8851 +---------------------------------------+
8852 | saved TOC pointer | 20 40
8853 +---------------------------------------+
8854 | Parameter save area (P) | 24 48
8855 +---------------------------------------+
8856 | Alloca space (A) | 24+P etc.
8857 +---------------------------------------+
8858 | Local variable space (L) | 24+P+A
8859 +---------------------------------------+
8860 | Float/int conversion temporary (X) | 24+P+A+L
8861 +---------------------------------------+
8862 | Save area for AltiVec registers (W) | 24+P+A+L+X
8863 +---------------------------------------+
8864 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
8865 +---------------------------------------+
8866 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
8867 +---------------------------------------+
8868 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
8869 +---------------------------------------+
8870 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
8871 +---------------------------------------+
8872 old SP->| back chain to caller's caller |
8873 +---------------------------------------+
8875 The required alignment for AIX configurations is two words (i.e., 8
8879 V.4 stack frames look like:
8881 SP----> +---------------------------------------+
8882 | back chain to caller | 0
8883 +---------------------------------------+
8884 | caller's saved LR | 4
8885 +---------------------------------------+
8886 | Parameter save area (P) | 8
8887 +---------------------------------------+
8888 | Alloca space (A) | 8+P
8889 +---------------------------------------+
8890 | Varargs save area (V) | 8+P+A
8891 +---------------------------------------+
8892 | Local variable space (L) | 8+P+A+V
8893 +---------------------------------------+
8894 | Float/int conversion temporary (X) | 8+P+A+V+L
8895 +---------------------------------------+
8896 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
8897 +---------------------------------------+
8898 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
8899 +---------------------------------------+
8900 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
8901 +---------------------------------------+
8902 | SPE: area for 64-bit GP registers |
8903 +---------------------------------------+
8904 | SPE alignment padding |
8905 +---------------------------------------+
8906 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
8907 +---------------------------------------+
8908 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
8909 +---------------------------------------+
8910 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
8911 +---------------------------------------+
8912 old SP->| back chain to caller's caller |
8913 +---------------------------------------+
8915 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
8916 given. (But note below and in sysv4.h that we require only 8 and
8917 may round up the size of our stack frame anyways. The historical
8918 reason is early versions of powerpc-linux which didn't properly
8919 align the stack at program startup. A happy side-effect is that
8920 -mno-eabi libraries can be used with -meabi programs.)
8922 The EABI configuration defaults to the V.4 layout, unless
8923 -mcall-aix is used, in which case the AIX layout is used. However,
8924 the stack alignment requirements may differ. If -mno-eabi is not
8925 given, the required stack alignment is 8 bytes; if -mno-eabi is
8926 given, the required alignment is 16 bytes. (But see V.4 comment
8929 #ifndef ABI_STACK_BOUNDARY
8930 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
8934 rs6000_stack_info ()
8936 static rs6000_stack_t info
, zero_info
;
8937 rs6000_stack_t
*info_ptr
= &info
;
8938 int reg_size
= TARGET_POWERPC64
? 8 : 4;
8939 enum rs6000_abi abi
;
8943 /* Zero all fields portably. */
8946 /* Select which calling sequence. */
8947 info_ptr
->abi
= abi
= DEFAULT_ABI
;
8949 /* Calculate which registers need to be saved & save area size. */
8950 info_ptr
->first_gp_reg_save
= first_reg_to_save ();
8951 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
8952 even if it currently looks like we won't. */
8953 if (((TARGET_TOC
&& TARGET_MINIMAL_TOC
)
8954 || (flag_pic
== 1 && abi
== ABI_V4
)
8955 || (flag_pic
&& abi
== ABI_DARWIN
))
8956 && info_ptr
->first_gp_reg_save
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
8957 info_ptr
->gp_size
= reg_size
* (32 - RS6000_PIC_OFFSET_TABLE_REGNUM
);
8959 info_ptr
->gp_size
= reg_size
* (32 - info_ptr
->first_gp_reg_save
);
8961 /* For the SPE, we have an additional upper 32-bits on each GPR.
8962 Ideally we should save the entire 64-bits only when the upper
8963 half is used in SIMD instructions. Since we only record
8964 registers live (not the size they are used in), this proves
8965 difficult because we'd have to traverse the instruction chain at
8966 the right time, taking reload into account. This is a real pain,
8967 so we opt to save the GPRs in 64-bits always. Anyone overly
8968 concerned with frame size can fix this. ;-).
8970 So... since we save all GPRs (except the SP) in 64-bits, the
8971 traditional GP save area will be empty. */
8973 info_ptr
->gp_size
= 0;
8975 info_ptr
->first_fp_reg_save
= first_fp_reg_to_save ();
8976 info_ptr
->fp_size
= 8 * (64 - info_ptr
->first_fp_reg_save
);
8978 info_ptr
->first_altivec_reg_save
= first_altivec_reg_to_save ();
8979 info_ptr
->altivec_size
= 16 * (LAST_ALTIVEC_REGNO
+ 1
8980 - info_ptr
->first_altivec_reg_save
);
8982 /* Does this function call anything? */
8983 info_ptr
->calls_p
= (! current_function_is_leaf
8984 || cfun
->machine
->ra_needs_full_frame
);
8986 /* Determine if we need to save the link register. */
8987 if (rs6000_ra_ever_killed ()
8988 || (DEFAULT_ABI
== ABI_AIX
&& current_function_profile
)
8989 #ifdef TARGET_RELOCATABLE
8990 || (TARGET_RELOCATABLE
&& (get_pool_size () != 0))
8992 || (info_ptr
->first_fp_reg_save
!= 64
8993 && !FP_SAVE_INLINE (info_ptr
->first_fp_reg_save
))
8994 || info_ptr
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
8995 || (abi
== ABI_V4
&& current_function_calls_alloca
)
8996 || (DEFAULT_ABI
== ABI_DARWIN
8998 && current_function_uses_pic_offset_table
)
8999 || info_ptr
->calls_p
)
9001 info_ptr
->lr_save_p
= 1;
9002 regs_ever_live
[LINK_REGISTER_REGNUM
] = 1;
9005 /* Determine if we need to save the condition code registers. */
9006 if (regs_ever_live
[CR2_REGNO
]
9007 || regs_ever_live
[CR3_REGNO
]
9008 || regs_ever_live
[CR4_REGNO
])
9010 info_ptr
->cr_save_p
= 1;
9012 info_ptr
->cr_size
= reg_size
;
9015 /* If the current function calls __builtin_eh_return, then we need
9016 to allocate stack space for registers that will hold data for
9017 the exception handler. */
9018 if (current_function_calls_eh_return
)
9021 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; ++i
)
9024 /* SPE saves EH registers in 64-bits. */
9025 ehrd_size
= i
* (TARGET_SPE_ABI
? UNITS_PER_SPE_WORD
: UNITS_PER_WORD
);
9030 /* Determine various sizes. */
9031 info_ptr
->reg_size
= reg_size
;
9032 info_ptr
->fixed_size
= RS6000_SAVE_AREA
;
9033 info_ptr
->varargs_size
= RS6000_VARARGS_AREA
;
9034 info_ptr
->vars_size
= RS6000_ALIGN (get_frame_size (), 8);
9035 info_ptr
->parm_size
= RS6000_ALIGN (current_function_outgoing_args_size
,
9039 info_ptr
->spe_gp_size
= 8 * (32 - info_ptr
->first_gp_reg_save
);
9041 info_ptr
->spe_gp_size
= 0;
9043 if (TARGET_ALTIVEC_ABI
&& TARGET_ALTIVEC_VRSAVE
)
9045 info_ptr
->vrsave_mask
= compute_vrsave_mask ();
9046 info_ptr
->vrsave_size
= info_ptr
->vrsave_mask
? 4 : 0;
9050 info_ptr
->vrsave_mask
= 0;
9051 info_ptr
->vrsave_size
= 0;
9054 /* Calculate the offsets. */
9062 case ABI_AIX_NODESC
:
9064 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
9065 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
9067 if (TARGET_ALTIVEC_ABI
)
9069 info_ptr
->vrsave_save_offset
9070 = info_ptr
->gp_save_offset
- info_ptr
->vrsave_size
;
9072 /* Align stack so vector save area is on a quadword boundary. */
9073 if (info_ptr
->altivec_size
!= 0)
9074 info_ptr
->altivec_padding_size
9075 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
9077 info_ptr
->altivec_padding_size
= 0;
9079 info_ptr
->altivec_save_offset
9080 = info_ptr
->vrsave_save_offset
9081 - info_ptr
->altivec_padding_size
9082 - info_ptr
->altivec_size
;
9084 /* Adjust for AltiVec case. */
9085 info_ptr
->ehrd_offset
= info_ptr
->altivec_save_offset
- ehrd_size
;
9088 info_ptr
->ehrd_offset
= info_ptr
->gp_save_offset
- ehrd_size
;
9089 info_ptr
->cr_save_offset
= reg_size
; /* first word when 64-bit. */
9090 info_ptr
->lr_save_offset
= 2*reg_size
;
9094 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
9095 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
9096 info_ptr
->cr_save_offset
= info_ptr
->gp_save_offset
- info_ptr
->cr_size
;
9100 /* Align stack so SPE GPR save area is aligned on a
9101 double-word boundary. */
9102 if (info_ptr
->spe_gp_size
!= 0)
9103 info_ptr
->spe_padding_size
9104 = 8 - (-info_ptr
->cr_save_offset
% 8);
9106 info_ptr
->spe_padding_size
= 0;
9108 info_ptr
->spe_gp_save_offset
9109 = info_ptr
->cr_save_offset
9110 - info_ptr
->spe_padding_size
9111 - info_ptr
->spe_gp_size
;
9113 /* Adjust for SPE case. */
9114 info_ptr
->toc_save_offset
9115 = info_ptr
->spe_gp_save_offset
- info_ptr
->toc_size
;
9117 else if (TARGET_ALTIVEC_ABI
)
9119 info_ptr
->vrsave_save_offset
9120 = info_ptr
->cr_save_offset
- info_ptr
->vrsave_size
;
9122 /* Align stack so vector save area is on a quadword boundary. */
9123 if (info_ptr
->altivec_size
!= 0)
9124 info_ptr
->altivec_padding_size
9125 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
9127 info_ptr
->altivec_padding_size
= 0;
9129 info_ptr
->altivec_save_offset
9130 = info_ptr
->vrsave_save_offset
9131 - info_ptr
->altivec_padding_size
9132 - info_ptr
->altivec_size
;
9134 /* Adjust for AltiVec case. */
9135 info_ptr
->toc_save_offset
9136 = info_ptr
->altivec_save_offset
- info_ptr
->toc_size
;
9139 info_ptr
->toc_save_offset
= info_ptr
->cr_save_offset
- info_ptr
->toc_size
;
9140 info_ptr
->ehrd_offset
= info_ptr
->toc_save_offset
- ehrd_size
;
9141 info_ptr
->lr_save_offset
= reg_size
;
9145 info_ptr
->save_size
= RS6000_ALIGN (info_ptr
->fp_size
9147 + info_ptr
->altivec_size
9148 + info_ptr
->altivec_padding_size
9149 + info_ptr
->vrsave_size
9150 + info_ptr
->spe_gp_size
9151 + info_ptr
->spe_padding_size
9155 + info_ptr
->vrsave_size
9156 + info_ptr
->toc_size
,
9157 (TARGET_ALTIVEC_ABI
|| ABI_DARWIN
)
9160 total_raw_size
= (info_ptr
->vars_size
9161 + info_ptr
->parm_size
9162 + info_ptr
->save_size
9163 + info_ptr
->varargs_size
9164 + info_ptr
->fixed_size
);
9166 info_ptr
->total_size
=
9167 RS6000_ALIGN (total_raw_size
, ABI_STACK_BOUNDARY
/ BITS_PER_UNIT
);
9169 /* Determine if we need to allocate any stack frame:
9171 For AIX we need to push the stack if a frame pointer is needed
9172 (because the stack might be dynamically adjusted), if we are
9173 debugging, if we make calls, or if the sum of fp_save, gp_save,
9174 and local variables are more than the space needed to save all
9175 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9176 + 18*8 = 288 (GPR13 reserved).
9178 For V.4 we don't have the stack cushion that AIX uses, but assume
9179 that the debugger can handle stackless frames. */
9181 if (info_ptr
->calls_p
)
9182 info_ptr
->push_p
= 1;
9184 else if (abi
== ABI_V4
)
9185 info_ptr
->push_p
= total_raw_size
> info_ptr
->fixed_size
;
9188 info_ptr
->push_p
= (frame_pointer_needed
9189 || (abi
!= ABI_DARWIN
&& write_symbols
!= NO_DEBUG
)
9190 || ((total_raw_size
- info_ptr
->fixed_size
)
9191 > (TARGET_32BIT
? 220 : 288)));
9193 /* Zero offsets if we're not saving those registers. */
9194 if (info_ptr
->fp_size
== 0)
9195 info_ptr
->fp_save_offset
= 0;
9197 if (info_ptr
->gp_size
== 0)
9198 info_ptr
->gp_save_offset
= 0;
9200 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->altivec_size
== 0)
9201 info_ptr
->altivec_save_offset
= 0;
9203 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->vrsave_mask
== 0)
9204 info_ptr
->vrsave_save_offset
= 0;
9206 if (! TARGET_SPE_ABI
|| info_ptr
->spe_gp_size
== 0)
9207 info_ptr
->spe_gp_save_offset
= 0;
9209 if (! info_ptr
->lr_save_p
)
9210 info_ptr
->lr_save_offset
= 0;
9212 if (! info_ptr
->cr_save_p
)
9213 info_ptr
->cr_save_offset
= 0;
9215 if (! info_ptr
->toc_save_p
)
9216 info_ptr
->toc_save_offset
= 0;
9222 debug_stack_info (info
)
9223 rs6000_stack_t
*info
;
9225 const char *abi_string
;
9228 info
= rs6000_stack_info ();
9230 fprintf (stderr
, "\nStack information for function %s:\n",
9231 ((current_function_decl
&& DECL_NAME (current_function_decl
))
9232 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl
))
9237 default: abi_string
= "Unknown"; break;
9238 case ABI_NONE
: abi_string
= "NONE"; break;
9240 case ABI_AIX_NODESC
: abi_string
= "AIX"; break;
9241 case ABI_DARWIN
: abi_string
= "Darwin"; break;
9242 case ABI_V4
: abi_string
= "V.4"; break;
9245 fprintf (stderr
, "\tABI = %5s\n", abi_string
);
9247 if (TARGET_ALTIVEC_ABI
)
9248 fprintf (stderr
, "\tALTIVEC ABI extensions enabled.\n");
9251 fprintf (stderr
, "\tSPE ABI extensions enabled.\n");
9253 if (info
->first_gp_reg_save
!= 32)
9254 fprintf (stderr
, "\tfirst_gp_reg_save = %5d\n", info
->first_gp_reg_save
);
9256 if (info
->first_fp_reg_save
!= 64)
9257 fprintf (stderr
, "\tfirst_fp_reg_save = %5d\n", info
->first_fp_reg_save
);
9259 if (info
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
)
9260 fprintf (stderr
, "\tfirst_altivec_reg_save = %5d\n",
9261 info
->first_altivec_reg_save
);
9263 if (info
->lr_save_p
)
9264 fprintf (stderr
, "\tlr_save_p = %5d\n", info
->lr_save_p
);
9266 if (info
->cr_save_p
)
9267 fprintf (stderr
, "\tcr_save_p = %5d\n", info
->cr_save_p
);
9269 if (info
->toc_save_p
)
9270 fprintf (stderr
, "\ttoc_save_p = %5d\n", info
->toc_save_p
);
9272 if (info
->vrsave_mask
)
9273 fprintf (stderr
, "\tvrsave_mask = 0x%x\n", info
->vrsave_mask
);
9276 fprintf (stderr
, "\tpush_p = %5d\n", info
->push_p
);
9279 fprintf (stderr
, "\tcalls_p = %5d\n", info
->calls_p
);
9281 if (info
->gp_save_offset
)
9282 fprintf (stderr
, "\tgp_save_offset = %5d\n", info
->gp_save_offset
);
9284 if (info
->fp_save_offset
)
9285 fprintf (stderr
, "\tfp_save_offset = %5d\n", info
->fp_save_offset
);
9287 if (info
->altivec_save_offset
)
9288 fprintf (stderr
, "\taltivec_save_offset = %5d\n",
9289 info
->altivec_save_offset
);
9291 if (info
->spe_gp_save_offset
)
9292 fprintf (stderr
, "\tspe_gp_save_offset = %5d\n",
9293 info
->spe_gp_save_offset
);
9295 if (info
->vrsave_save_offset
)
9296 fprintf (stderr
, "\tvrsave_save_offset = %5d\n",
9297 info
->vrsave_save_offset
);
9299 if (info
->lr_save_offset
)
9300 fprintf (stderr
, "\tlr_save_offset = %5d\n", info
->lr_save_offset
);
9302 if (info
->cr_save_offset
)
9303 fprintf (stderr
, "\tcr_save_offset = %5d\n", info
->cr_save_offset
);
9305 if (info
->toc_save_offset
)
9306 fprintf (stderr
, "\ttoc_save_offset = %5d\n", info
->toc_save_offset
);
9308 if (info
->varargs_save_offset
)
9309 fprintf (stderr
, "\tvarargs_save_offset = %5d\n", info
->varargs_save_offset
);
9311 if (info
->total_size
)
9312 fprintf (stderr
, "\ttotal_size = %5d\n", info
->total_size
);
9314 if (info
->varargs_size
)
9315 fprintf (stderr
, "\tvarargs_size = %5d\n", info
->varargs_size
);
9317 if (info
->vars_size
)
9318 fprintf (stderr
, "\tvars_size = %5d\n", info
->vars_size
);
9320 if (info
->parm_size
)
9321 fprintf (stderr
, "\tparm_size = %5d\n", info
->parm_size
);
9323 if (info
->fixed_size
)
9324 fprintf (stderr
, "\tfixed_size = %5d\n", info
->fixed_size
);
9327 fprintf (stderr
, "\tgp_size = %5d\n", info
->gp_size
);
9329 if (info
->spe_gp_size
)
9330 fprintf (stderr
, "\tspe_gp_size = %5d\n", info
->spe_gp_size
);
9333 fprintf (stderr
, "\tfp_size = %5d\n", info
->fp_size
);
9335 if (info
->altivec_size
)
9336 fprintf (stderr
, "\taltivec_size = %5d\n", info
->altivec_size
);
9338 if (info
->vrsave_size
)
9339 fprintf (stderr
, "\tvrsave_size = %5d\n", info
->vrsave_size
);
9341 if (info
->altivec_padding_size
)
9342 fprintf (stderr
, "\taltivec_padding_size= %5d\n",
9343 info
->altivec_padding_size
);
9345 if (info
->spe_padding_size
)
9346 fprintf (stderr
, "\tspe_padding_size = %5d\n",
9347 info
->spe_padding_size
);
9350 fprintf (stderr
, "\tlr_size = %5d\n", info
->lr_size
);
9353 fprintf (stderr
, "\tcr_size = %5d\n", info
->cr_size
);
9356 fprintf (stderr
, "\ttoc_size = %5d\n", info
->toc_size
);
9358 if (info
->save_size
)
9359 fprintf (stderr
, "\tsave_size = %5d\n", info
->save_size
);
9361 if (info
->reg_size
!= 4)
9362 fprintf (stderr
, "\treg_size = %5d\n", info
->reg_size
);
9364 fprintf (stderr
, "\n");
9368 rs6000_return_addr (count
, frame
)
9372 /* Currently we don't optimize very well between prolog and body
9373 code and for PIC code the code can be actually quite bad, so
9374 don't try to be too clever here. */
9375 if (count
!= 0 || flag_pic
!= 0)
9377 cfun
->machine
->ra_needs_full_frame
= 1;
9384 plus_constant (copy_to_reg
9385 (gen_rtx_MEM (Pmode
,
9386 memory_address (Pmode
, frame
))),
9387 RETURN_ADDRESS_OFFSET
)));
9390 return get_hard_reg_initial_val (Pmode
, LINK_REGISTER_REGNUM
);
9393 /* Say whether a function is a candidate for sibcall handling or not.
9394 We do not allow indirect calls to be optimized into sibling calls.
9395 Also, we can't do it if there are any vector parameters; there's
9396 nowhere to put the VRsave code so it works; note that functions with
9397 vector parameters are required to have a prototype, so the argument
9398 type info must be available here. (The tail recursion case can work
9399 with vector parameters, but there's no way to distinguish here.) */
9401 function_ok_for_sibcall (fndecl
)
9407 if (TARGET_ALTIVEC_VRSAVE
)
9409 for (type
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
9410 type
; type
= TREE_CHAIN (type
))
9412 if (TREE_CODE (TREE_VALUE (type
)) == VECTOR_TYPE
)
9416 if (DEFAULT_ABI
== ABI_DARWIN
9417 || (*targetm
.binds_local_p
) (fndecl
))
9419 tree attr_list
= TYPE_ATTRIBUTES (TREE_TYPE (fndecl
));
9421 if (!lookup_attribute ("longcall", attr_list
)
9422 || lookup_attribute ("shortcall", attr_list
))
9429 /* function rewritten to handle sibcalls */
9431 rs6000_ra_ever_killed ()
9437 #ifdef ASM_OUTPUT_MI_THUNK
9438 if (current_function_is_thunk
)
9441 /* regs_ever_live has LR marked as used if any sibcalls
9442 are present. Which it is, but this should not force
9443 saving and restoring in the prologue/epilog. Likewise,
9444 reg_set_between_p thinks a sibcall clobbers LR, so
9445 that is inappropriate. */
9446 /* Also, the prologue can generate a store into LR that
9447 doesn't really count, like this:
9449 bcl to set PIC register
9452 When we're called from the epilog, we need to avoid counting
9453 this as a store; thus we ignore any insns with a REG_MAYBE_DEAD note. */
9455 push_topmost_sequence ();
9457 pop_topmost_sequence ();
9458 reg
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
9460 for (insn
= NEXT_INSN (top
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
9464 if (FIND_REG_INC_NOTE (insn
, reg
))
9466 else if (GET_CODE (insn
) == CALL_INSN
9467 && !SIBLING_CALL_P (insn
))
9469 else if (set_of (reg
, insn
) != NULL_RTX
9470 && find_reg_note (insn
, REG_MAYBE_DEAD
, NULL_RTX
) == 0)
9477 /* Add a REG_MAYBE_DEAD note to the insn. */
9479 rs6000_maybe_dead (insn
)
9482 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
,
9487 /* Emit instructions needed to load the TOC register.
9488 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9489 a constant pool; or for SVR4 -fpic. */
9492 rs6000_emit_load_toc_table (fromprolog
)
9496 dest
= gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
);
9498 if (TARGET_ELF
&& DEFAULT_ABI
!= ABI_AIX
)
9500 if (DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
9502 rtx temp
= (fromprolog
9503 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
9504 : gen_reg_rtx (Pmode
));
9505 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp
)));
9506 rs6000_maybe_dead (emit_move_insn (dest
, temp
));
9508 else if (flag_pic
== 2)
9511 rtx tempLR
= (fromprolog
9512 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
9513 : gen_reg_rtx (Pmode
));
9514 rtx temp0
= (fromprolog
9515 ? gen_rtx_REG (Pmode
, 0)
9516 : gen_reg_rtx (Pmode
));
9519 /* possibly create the toc section */
9520 if (! toc_initialized
)
9523 function_section (current_function_decl
);
9530 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
9531 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9533 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCL", rs6000_pic_labelno
);
9534 symL
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9536 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR
,
9538 rs6000_maybe_dead (emit_move_insn (dest
, tempLR
));
9539 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0
, dest
,
9546 static int reload_toc_labelno
= 0;
9548 tocsym
= gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
);
9550 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCG", reload_toc_labelno
++);
9551 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9553 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR
,
9556 rs6000_maybe_dead (emit_move_insn (dest
, tempLR
));
9557 rs6000_maybe_dead (emit_move_insn (temp0
,
9558 gen_rtx_MEM (Pmode
, dest
)));
9560 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest
, temp0
, dest
)));
9562 else if (flag_pic
== 0 && TARGET_MINIMAL_TOC
)
9564 /* This is for AIX code running in non-PIC ELF. */
9567 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCTOC", 1);
9568 realsym
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9570 rs6000_maybe_dead (emit_insn (gen_elf_high (dest
, realsym
)));
9571 rs6000_maybe_dead (emit_insn (gen_elf_low (dest
, dest
, realsym
)));
9579 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest
)));
9581 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest
)));
9586 get_TOC_alias_set ()
9588 static int set
= -1;
9590 set
= new_alias_set ();
9594 /* This retuns nonzero if the current function uses the TOC. This is
9595 determined by the presence of (unspec ... 7), which is generated by
9596 the various load_toc_* patterns. */
9603 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
9606 rtx pat
= PATTERN (insn
);
9609 if (GET_CODE (pat
) == PARALLEL
)
9610 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
9611 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, i
)) == UNSPEC
9612 && XINT (XVECEXP (PATTERN (insn
), 0, i
), 1) == 7)
9619 create_TOC_reference (symbol
)
9622 return gen_rtx_PLUS (Pmode
,
9623 gen_rtx_REG (Pmode
, TOC_REGISTER
),
9624 gen_rtx_CONST (Pmode
,
9625 gen_rtx_MINUS (Pmode
, symbol
,
9626 gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
))));
9630 /* __throw will restore its own return address to be the same as the
9631 return address of the function that the throw is being made to.
9632 This is unfortunate, because we want to check the original
9633 return address to see if we need to restore the TOC.
9634 So we have to squirrel it away here.
9635 This is used only in compiling __throw and __rethrow.
9637 Most of this code should be removed by CSE. */
9638 static rtx insn_after_throw
;
9640 /* This does the saving... */
9642 rs6000_aix_emit_builtin_unwind_init ()
9645 rtx stack_top
= gen_reg_rtx (Pmode
);
9646 rtx opcode_addr
= gen_reg_rtx (Pmode
);
9648 insn_after_throw
= gen_reg_rtx (SImode
);
9650 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
9651 emit_move_insn (stack_top
, mem
);
9653 mem
= gen_rtx_MEM (Pmode
,
9654 gen_rtx_PLUS (Pmode
, stack_top
,
9655 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
9656 emit_move_insn (opcode_addr
, mem
);
9657 emit_move_insn (insn_after_throw
, gen_rtx_MEM (SImode
, opcode_addr
));
9660 /* Emit insns to _restore_ the TOC register, at runtime (specifically
9661 in _eh.o). Only used on AIX.
9663 The idea is that on AIX, function calls look like this:
9664 bl somefunction-trampoline
9668 somefunction-trampoline:
9670 ... load function address in the count register ...
9672 or like this, if the linker determines that this is not a cross-module call
9673 and so the TOC need not be restored:
9676 or like this, if the compiler could determine that this is not a
9679 now, the tricky bit here is that register 2 is saved and restored
9680 by the _linker_, so we can't readily generate debugging information
9681 for it. So we need to go back up the call chain looking at the
9682 insns at return addresses to see which calls saved the TOC register
9683 and so see where it gets restored from.
9685 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
9686 just before the actual epilogue.
9688 On the bright side, this incurs no space or time overhead unless an
9689 exception is thrown, except for the extra code in libgcc.a.
9691 The parameter STACKSIZE is a register containing (at runtime)
9692 the amount to be popped off the stack in addition to the stack frame
9693 of this routine (which will be __throw or __rethrow, and so is
9694 guaranteed to have a stack frame). */
9697 rs6000_emit_eh_toc_restore (stacksize
)
9701 rtx bottom_of_stack
= gen_reg_rtx (Pmode
);
9702 rtx tocompare
= gen_reg_rtx (SImode
);
9703 rtx opcode
= gen_reg_rtx (SImode
);
9704 rtx opcode_addr
= gen_reg_rtx (Pmode
);
9706 rtx loop_start
= gen_label_rtx ();
9707 rtx no_toc_restore_needed
= gen_label_rtx ();
9708 rtx loop_exit
= gen_label_rtx ();
9710 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
9711 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9712 emit_move_insn (bottom_of_stack
, mem
);
9714 top_of_stack
= expand_binop (Pmode
, add_optab
,
9715 bottom_of_stack
, stacksize
,
9716 NULL_RTX
, 1, OPTAB_WIDEN
);
9718 emit_move_insn (tocompare
, gen_int_mode (TARGET_32BIT
? 0x80410014
9719 : 0xE8410028, SImode
));
9721 if (insn_after_throw
== NULL_RTX
)
9723 emit_move_insn (opcode
, insn_after_throw
);
9725 emit_note (NULL
, NOTE_INSN_LOOP_BEG
);
9726 emit_label (loop_start
);
9728 do_compare_rtx_and_jump (opcode
, tocompare
, NE
, 1,
9729 SImode
, NULL_RTX
, NULL_RTX
,
9730 no_toc_restore_needed
);
9732 mem
= gen_rtx_MEM (Pmode
,
9733 gen_rtx_PLUS (Pmode
, bottom_of_stack
,
9734 GEN_INT (5 * GET_MODE_SIZE (Pmode
))));
9735 emit_move_insn (gen_rtx_REG (Pmode
, 2), mem
);
9737 emit_label (no_toc_restore_needed
);
9738 do_compare_rtx_and_jump (top_of_stack
, bottom_of_stack
, EQ
, 1,
9739 Pmode
, NULL_RTX
, NULL_RTX
,
9742 mem
= gen_rtx_MEM (Pmode
, bottom_of_stack
);
9743 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9744 emit_move_insn (bottom_of_stack
, mem
);
9746 mem
= gen_rtx_MEM (Pmode
,
9747 gen_rtx_PLUS (Pmode
, bottom_of_stack
,
9748 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
9749 emit_move_insn (opcode_addr
, mem
);
9750 emit_move_insn (opcode
, gen_rtx_MEM (SImode
, opcode_addr
));
9752 emit_note (NULL
, NOTE_INSN_LOOP_CONT
);
9753 emit_jump (loop_start
);
9754 emit_note (NULL
, NOTE_INSN_LOOP_END
);
9755 emit_label (loop_exit
);
9757 #endif /* TARGET_AIX */
9759 /* This ties together stack memory (MEM with an alias set of
9760 rs6000_sr_alias_set) and the change to the stack pointer. */
9763 rs6000_emit_stack_tie ()
9765 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
));
9767 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9768 emit_insn (gen_stack_tie (mem
));
9771 /* Emit the correct code for allocating stack space, as insns.
9772 If COPY_R12, make sure a copy of the old frame is left in r12.
9773 The generated code may use hard register 0 as a temporary. */
9776 rs6000_emit_allocate_stack (size
, copy_r12
)
9781 rtx stack_reg
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
9782 rtx tmp_reg
= gen_rtx_REG (Pmode
, 0);
9783 rtx todec
= GEN_INT (-size
);
9785 if (current_function_limit_stack
)
9787 if (REG_P (stack_limit_rtx
)
9788 && REGNO (stack_limit_rtx
) > 1
9789 && REGNO (stack_limit_rtx
) <= 31)
9791 emit_insn (Pmode
== SImode
9792 ? gen_addsi3 (tmp_reg
,
9795 : gen_adddi3 (tmp_reg
,
9799 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
9802 else if (GET_CODE (stack_limit_rtx
) == SYMBOL_REF
9804 && DEFAULT_ABI
== ABI_V4
)
9806 rtx toload
= gen_rtx_CONST (VOIDmode
,
9807 gen_rtx_PLUS (Pmode
,
9811 emit_insn (gen_elf_high (tmp_reg
, toload
));
9812 emit_insn (gen_elf_low (tmp_reg
, tmp_reg
, toload
));
9813 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
9817 warning ("stack limit expression is not supported");
9820 if (copy_r12
|| ! TARGET_UPDATE
)
9821 emit_move_insn (gen_rtx_REG (Pmode
, 12), stack_reg
);
9827 /* Need a note here so that try_split doesn't get confused. */
9828 if (get_last_insn() == NULL_RTX
)
9829 emit_note (0, NOTE_INSN_DELETED
);
9830 insn
= emit_move_insn (tmp_reg
, todec
);
9831 try_split (PATTERN (insn
), insn
, 0);
9835 if (Pmode
== SImode
)
9836 insn
= emit_insn (gen_movsi_update (stack_reg
, stack_reg
,
9839 insn
= emit_insn (gen_movdi_update (stack_reg
, stack_reg
,
9844 if (Pmode
== SImode
)
9845 insn
= emit_insn (gen_addsi3 (stack_reg
, stack_reg
, todec
));
9847 insn
= emit_insn (gen_adddi3 (stack_reg
, stack_reg
, todec
));
9848 emit_move_insn (gen_rtx_MEM (Pmode
, stack_reg
),
9849 gen_rtx_REG (Pmode
, 12));
9852 RTX_FRAME_RELATED_P (insn
) = 1;
9854 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
9855 gen_rtx_SET (VOIDmode
, stack_reg
,
9856 gen_rtx_PLUS (Pmode
, stack_reg
,
9861 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
9864 (mem (plus (blah) (regXX)))
9868 (mem (plus (blah) (const VALUE_OF_REGXX))). */
9871 altivec_frame_fixup (insn
, reg
, val
)
9877 real
= copy_rtx (PATTERN (insn
));
9879 real
= replace_rtx (real
, reg
, GEN_INT (val
));
9881 RTX_FRAME_RELATED_P (insn
) = 1;
9882 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
9887 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
9888 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
9889 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
9890 deduce these equivalences by itself so it wasn't necessary to hold
9891 its hand so much. */
9894 rs6000_frame_related (insn
, reg
, val
, reg2
, rreg
)
9903 /* copy_rtx will not make unique copies of registers, so we need to
9904 ensure we don't have unwanted sharing here. */
9906 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
9909 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
9911 real
= copy_rtx (PATTERN (insn
));
9913 if (reg2
!= NULL_RTX
)
9914 real
= replace_rtx (real
, reg2
, rreg
);
9916 real
= replace_rtx (real
, reg
,
9917 gen_rtx_PLUS (Pmode
, gen_rtx_REG (Pmode
,
9918 STACK_POINTER_REGNUM
),
9921 /* We expect that 'real' is either a SET or a PARALLEL containing
9922 SETs (and possibly other stuff). In a PARALLEL, all the SETs
9923 are important so they all have to be marked RTX_FRAME_RELATED_P. */
9925 if (GET_CODE (real
) == SET
)
9929 temp
= simplify_rtx (SET_SRC (set
));
9931 SET_SRC (set
) = temp
;
9932 temp
= simplify_rtx (SET_DEST (set
));
9934 SET_DEST (set
) = temp
;
9935 if (GET_CODE (SET_DEST (set
)) == MEM
)
9937 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
9939 XEXP (SET_DEST (set
), 0) = temp
;
9942 else if (GET_CODE (real
) == PARALLEL
)
9945 for (i
= 0; i
< XVECLEN (real
, 0); i
++)
9946 if (GET_CODE (XVECEXP (real
, 0, i
)) == SET
)
9948 rtx set
= XVECEXP (real
, 0, i
);
9950 temp
= simplify_rtx (SET_SRC (set
));
9952 SET_SRC (set
) = temp
;
9953 temp
= simplify_rtx (SET_DEST (set
));
9955 SET_DEST (set
) = temp
;
9956 if (GET_CODE (SET_DEST (set
)) == MEM
)
9958 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
9960 XEXP (SET_DEST (set
), 0) = temp
;
9962 RTX_FRAME_RELATED_P (set
) = 1;
9968 RTX_FRAME_RELATED_P (insn
) = 1;
9969 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
9974 /* Returns an insn that has a vrsave set operation with the
9975 appropriate CLOBBERs. */
9978 generate_set_vrsave (reg
, info
, epiloguep
)
9980 rs6000_stack_t
*info
;
9984 rtx insn
, clobs
[TOTAL_ALTIVEC_REGS
+ 1];
9985 rtx vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
9988 = gen_rtx_SET (VOIDmode
,
9990 gen_rtx_UNSPEC_VOLATILE (SImode
,
9991 gen_rtvec (2, reg
, vrsave
),
9996 /* We need to clobber the registers in the mask so the scheduler
9997 does not move sets to VRSAVE before sets of AltiVec registers.
9999 However, if the function receives nonlocal gotos, reload will set
10000 all call saved registers live. We will end up with:
10002 (set (reg 999) (mem))
10003 (parallel [ (set (reg vrsave) (unspec blah))
10004 (clobber (reg 999))])
10006 The clobber will cause the store into reg 999 to be dead, and
10007 flow will attempt to delete an epilogue insn. In this case, we
10008 need an unspec use/set of the register. */
10010 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10011 if (info
->vrsave_mask
!= 0 && ALTIVEC_REG_BIT (i
) != 0)
10013 if (!epiloguep
|| call_used_regs
[i
])
10014 clobs
[nclobs
++] = gen_rtx_CLOBBER (VOIDmode
,
10015 gen_rtx_REG (V4SImode
, i
));
10018 rtx reg
= gen_rtx_REG (V4SImode
, i
);
10021 = gen_rtx_SET (VOIDmode
,
10023 gen_rtx_UNSPEC (V4SImode
,
10024 gen_rtvec (1, reg
), 27));
10028 insn
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nclobs
));
10030 for (i
= 0; i
< nclobs
; ++i
)
10031 XVECEXP (insn
, 0, i
) = clobs
[i
];
10036 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10037 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
10040 emit_frame_save (frame_reg
, frame_ptr
, mode
, regno
, offset
, total_size
)
10043 enum machine_mode mode
;
10044 unsigned int regno
;
10048 rtx reg
, offset_rtx
, insn
, mem
, addr
, int_rtx
;
10049 rtx replacea
, replaceb
;
10051 int_rtx
= GEN_INT (offset
);
10053 /* Some cases that need register indexed addressing. */
10054 if ((TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
10056 && SPE_VECTOR_MODE (mode
)
10057 && !SPE_CONST_OFFSET_OK (offset
)))
10059 /* Whomever calls us must make sure r11 is available in the
10060 flow path of instructions in the prologue. */
10061 offset_rtx
= gen_rtx_REG (Pmode
, 11);
10062 emit_move_insn (offset_rtx
, int_rtx
);
10064 replacea
= offset_rtx
;
10065 replaceb
= int_rtx
;
10069 offset_rtx
= int_rtx
;
10070 replacea
= NULL_RTX
;
10071 replaceb
= NULL_RTX
;
10074 reg
= gen_rtx_REG (mode
, regno
);
10075 addr
= gen_rtx_PLUS (Pmode
, frame_reg
, offset_rtx
);
10076 mem
= gen_rtx_MEM (mode
, addr
);
10077 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10079 insn
= emit_move_insn (mem
, reg
);
10081 rs6000_frame_related (insn
, frame_ptr
, total_size
, replacea
, replaceb
);
10084 /* Emit an offset memory reference suitable for a frame store, while
10085 converting to a valid addressing mode. */
10088 gen_frame_mem_offset (mode
, reg
, offset
)
10089 enum machine_mode mode
;
10093 rtx int_rtx
, offset_rtx
;
10095 int_rtx
= GEN_INT (offset
);
10097 if (TARGET_SPE_ABI
&& SPE_VECTOR_MODE (mode
))
10099 offset_rtx
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
10100 emit_move_insn (offset_rtx
, int_rtx
);
10103 offset_rtx
= int_rtx
;
10105 return gen_rtx_MEM (mode
, gen_rtx_PLUS (Pmode
, reg
, offset_rtx
));
10108 /* Emit function prologue as insns. */
10111 rs6000_emit_prologue ()
10113 rs6000_stack_t
*info
= rs6000_stack_info ();
10114 enum machine_mode reg_mode
= TARGET_POWERPC64
? DImode
: SImode
;
10115 int reg_size
= TARGET_POWERPC64
? 8 : 4;
10116 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
10117 rtx frame_ptr_rtx
= gen_rtx_REG (Pmode
, 12);
10118 rtx frame_reg_rtx
= sp_reg_rtx
;
10119 rtx cr_save_rtx
= NULL
;
10121 int saving_FPRs_inline
;
10122 int using_store_multiple
;
10123 HOST_WIDE_INT sp_offset
= 0;
10125 if (TARGET_SPE_ABI
)
10127 reg_mode
= V2SImode
;
10131 using_store_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
10133 && info
->first_gp_reg_save
< 31);
10134 saving_FPRs_inline
= (info
->first_fp_reg_save
== 64
10135 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
10137 /* For V.4, update stack before we do any saving and set back pointer. */
10138 if (info
->push_p
&& DEFAULT_ABI
== ABI_V4
)
10140 if (info
->total_size
< 32767)
10141 sp_offset
= info
->total_size
;
10143 frame_reg_rtx
= frame_ptr_rtx
;
10144 rs6000_emit_allocate_stack (info
->total_size
,
10145 (frame_reg_rtx
!= sp_reg_rtx
10146 && (info
->cr_save_p
10148 || info
->first_fp_reg_save
< 64
10149 || info
->first_gp_reg_save
< 32
10151 if (frame_reg_rtx
!= sp_reg_rtx
)
10152 rs6000_emit_stack_tie ();
10155 /* Save AltiVec registers if needed. */
10156 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
10160 /* There should be a non inline version of this, for when we
10161 are saving lots of vector registers. */
10162 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10163 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
10165 rtx areg
, savereg
, mem
;
10168 offset
= info
->altivec_save_offset
+ sp_offset
10169 + 16 * (i
- info
->first_altivec_reg_save
);
10171 savereg
= gen_rtx_REG (V4SImode
, i
);
10173 areg
= gen_rtx_REG (Pmode
, 0);
10174 emit_move_insn (areg
, GEN_INT (offset
));
10176 /* AltiVec addressing mode is [reg+reg]. */
10177 mem
= gen_rtx_MEM (V4SImode
,
10178 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
));
10180 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10182 insn
= emit_move_insn (mem
, savereg
);
10184 altivec_frame_fixup (insn
, areg
, offset
);
10188 /* VRSAVE is a bit vector representing which AltiVec registers
10189 are used. The OS uses this to determine which vector
10190 registers to save on a context switch. We need to save
10191 VRSAVE on the stack frame, add whatever AltiVec registers we
10192 used in this function, and do the corresponding magic in the
10195 if (TARGET_ALTIVEC
&& info
->vrsave_mask
!= 0)
10197 rtx reg
, mem
, vrsave
;
10200 /* Get VRSAVE onto a GPR. */
10201 reg
= gen_rtx_REG (SImode
, 12);
10202 vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
10204 emit_insn (gen_get_vrsave_internal (reg
));
10206 emit_insn (gen_rtx_SET (VOIDmode
, reg
, vrsave
));
10209 offset
= info
->vrsave_save_offset
+ sp_offset
;
10211 = gen_rtx_MEM (SImode
,
10212 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, GEN_INT (offset
)));
10213 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10214 insn
= emit_move_insn (mem
, reg
);
10216 /* Include the registers in the mask. */
10217 emit_insn (gen_iorsi3 (reg
, reg
, GEN_INT ((int) info
->vrsave_mask
)));
10219 insn
= emit_insn (generate_set_vrsave (reg
, info
, 0));
10222 /* If we use the link register, get it into r0. */
10223 if (info
->lr_save_p
)
10224 emit_move_insn (gen_rtx_REG (Pmode
, 0),
10225 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10227 /* If we need to save CR, put it into r12. */
10228 if (info
->cr_save_p
&& frame_reg_rtx
!= frame_ptr_rtx
)
10230 cr_save_rtx
= gen_rtx_REG (SImode
, 12);
10231 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
10234 /* Do any required saving of fpr's. If only one or two to save, do
10235 it ourselves. Otherwise, call function. */
10236 if (saving_FPRs_inline
)
10239 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10240 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
10241 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
10242 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, DFmode
,
10243 info
->first_fp_reg_save
+ i
,
10244 info
->fp_save_offset
+ sp_offset
+ 8 * i
,
10247 else if (info
->first_fp_reg_save
!= 64)
10251 const char *alloc_rname
;
10253 p
= rtvec_alloc (2 + 64 - info
->first_fp_reg_save
);
10255 RTVEC_ELT (p
, 0) = gen_rtx_CLOBBER (VOIDmode
,
10256 gen_rtx_REG (Pmode
,
10257 LINK_REGISTER_REGNUM
));
10258 sprintf (rname
, "%s%d%s", SAVE_FP_PREFIX
,
10259 info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
);
10260 alloc_rname
= ggc_strdup (rname
);
10261 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
10262 gen_rtx_SYMBOL_REF (Pmode
,
10264 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10266 rtx addr
, reg
, mem
;
10267 reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
10268 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10269 GEN_INT (info
->fp_save_offset
10270 + sp_offset
+ 8*i
));
10271 mem
= gen_rtx_MEM (DFmode
, addr
);
10272 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10274 RTVEC_ELT (p
, i
+ 2) = gen_rtx_SET (VOIDmode
, mem
, reg
);
10276 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10277 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10278 NULL_RTX
, NULL_RTX
);
10281 /* Save GPRs. This is done as a PARALLEL if we are using
10282 the store-multiple instructions. */
10283 if (using_store_multiple
)
10287 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
10288 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10290 rtx addr
, reg
, mem
;
10291 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
10292 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10293 GEN_INT (info
->gp_save_offset
10296 mem
= gen_rtx_MEM (reg_mode
, addr
);
10297 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10299 RTVEC_ELT (p
, i
) = gen_rtx_SET (VOIDmode
, mem
, reg
);
10301 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10302 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10303 NULL_RTX
, NULL_RTX
);
10308 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10309 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
10310 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
10311 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
10312 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
10313 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
10315 rtx addr
, reg
, mem
;
10316 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
10318 if (TARGET_SPE_ABI
)
10320 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
10323 if (!SPE_CONST_OFFSET_OK (offset
))
10325 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
10326 emit_move_insn (b
, GEN_INT (offset
));
10329 b
= GEN_INT (offset
);
10331 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
10332 mem
= gen_rtx_MEM (V2SImode
, addr
);
10333 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10334 insn
= emit_move_insn (mem
, reg
);
10336 if (GET_CODE (b
) == CONST_INT
)
10337 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10338 NULL_RTX
, NULL_RTX
);
10340 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10341 b
, GEN_INT (offset
));
10345 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10346 GEN_INT (info
->gp_save_offset
10349 mem
= gen_rtx_MEM (reg_mode
, addr
);
10350 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10352 insn
= emit_move_insn (mem
, reg
);
10353 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10354 NULL_RTX
, NULL_RTX
);
10359 /* ??? There's no need to emit actual instructions here, but it's the
10360 easiest way to get the frame unwind information emitted. */
10361 if (current_function_calls_eh_return
)
10363 unsigned int i
, regno
;
10367 regno
= EH_RETURN_DATA_REGNO (i
);
10368 if (regno
== INVALID_REGNUM
)
10371 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, reg_mode
, regno
,
10372 info
->ehrd_offset
+ sp_offset
10373 + reg_size
* (int) i
,
10378 /* Save lr if we used it. */
10379 if (info
->lr_save_p
)
10381 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10382 GEN_INT (info
->lr_save_offset
+ sp_offset
));
10383 rtx reg
= gen_rtx_REG (Pmode
, 0);
10384 rtx mem
= gen_rtx_MEM (Pmode
, addr
);
10385 /* This should not be of rs6000_sr_alias_set, because of
10386 __builtin_return_address. */
10388 insn
= emit_move_insn (mem
, reg
);
10389 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10390 reg
, gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10393 /* Save CR if we use any that must be preserved. */
10394 if (info
->cr_save_p
)
10396 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10397 GEN_INT (info
->cr_save_offset
+ sp_offset
));
10398 rtx mem
= gen_rtx_MEM (SImode
, addr
);
10400 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10402 /* If r12 was used to hold the original sp, copy cr into r0 now
10404 if (REGNO (frame_reg_rtx
) == 12)
10406 cr_save_rtx
= gen_rtx_REG (SImode
, 0);
10407 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
10409 insn
= emit_move_insn (mem
, cr_save_rtx
);
10411 /* Now, there's no way that dwarf2out_frame_debug_expr is going
10412 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
10413 OK. All we have to do is specify that _one_ condition code
10414 register is saved in this stack slot. The thrower's epilogue
10415 will then restore all the call-saved registers.
10416 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
10417 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10418 cr_save_rtx
, gen_rtx_REG (SImode
, CR2_REGNO
));
10421 /* Update stack and set back pointer unless this is V.4,
10422 for which it was done previously. */
10423 if (info
->push_p
&& DEFAULT_ABI
!= ABI_V4
)
10424 rs6000_emit_allocate_stack (info
->total_size
, FALSE
);
10426 /* Set frame pointer, if needed. */
10427 if (frame_pointer_needed
)
10429 insn
= emit_move_insn (gen_rtx_REG (Pmode
, FRAME_POINTER_REGNUM
),
10431 RTX_FRAME_RELATED_P (insn
) = 1;
10434 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
10435 if ((TARGET_TOC
&& TARGET_MINIMAL_TOC
&& get_pool_size () != 0)
10436 || (DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1
10437 && regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
]))
10439 /* If emit_load_toc_table will use the link register, we need to save
10440 it. We use R11 for this purpose because emit_load_toc_table
10441 can use register 0. This allows us to use a plain 'blr' to return
10442 from the procedure more often. */
10443 int save_LR_around_toc_setup
= (TARGET_ELF
&& flag_pic
!= 0
10444 && ! info
->lr_save_p
10445 && EXIT_BLOCK_PTR
->pred
!= NULL
);
10446 if (save_LR_around_toc_setup
)
10447 emit_move_insn (gen_rtx_REG (Pmode
, 11),
10448 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10450 rs6000_emit_load_toc_table (TRUE
);
10452 if (save_LR_around_toc_setup
)
10453 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
10454 gen_rtx_REG (Pmode
, 11));
10457 if (DEFAULT_ABI
== ABI_DARWIN
10458 && flag_pic
&& current_function_uses_pic_offset_table
)
10460 rtx dest
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
10462 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest
)));
10464 rs6000_maybe_dead (
10465 emit_move_insn (gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
),
10466 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)));
10470 /* Write function prologue. */
10473 rs6000_output_function_prologue (file
, size
)
10475 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
10477 rs6000_stack_t
*info
= rs6000_stack_info ();
10479 if (TARGET_DEBUG_STACK
)
10480 debug_stack_info (info
);
10482 /* Write .extern for any function we will call to save and restore
10484 if (info
->first_fp_reg_save
< 64
10485 && !FP_SAVE_INLINE (info
->first_fp_reg_save
))
10486 fprintf (file
, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
10487 SAVE_FP_PREFIX
, info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
,
10488 RESTORE_FP_PREFIX
, info
->first_fp_reg_save
- 32,
10489 RESTORE_FP_SUFFIX
);
10491 /* Write .extern for AIX common mode routines, if needed. */
10492 if (! TARGET_POWER
&& ! TARGET_POWERPC
&& ! common_mode_defined
)
10494 fputs ("\t.extern __mulh\n", file
);
10495 fputs ("\t.extern __mull\n", file
);
10496 fputs ("\t.extern __divss\n", file
);
10497 fputs ("\t.extern __divus\n", file
);
10498 fputs ("\t.extern __quoss\n", file
);
10499 fputs ("\t.extern __quous\n", file
);
10500 common_mode_defined
= 1;
10503 if (! HAVE_prologue
)
10507 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
10508 the "toplevel" insn chain. */
10509 emit_note (0, NOTE_INSN_DELETED
);
10510 rs6000_emit_prologue ();
10511 emit_note (0, NOTE_INSN_DELETED
);
10513 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10517 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
10519 INSN_ADDRESSES_NEW (insn
, addr
);
10524 if (TARGET_DEBUG_STACK
)
10525 debug_rtx_list (get_insns (), 100);
10526 final (get_insns (), file
, FALSE
, FALSE
);
10530 rs6000_pic_labelno
++;
10533 /* Emit function epilogue as insns.
10535 At present, dwarf2out_frame_debug_expr doesn't understand
10536 register restores, so we don't bother setting RTX_FRAME_RELATED_P
10537 anywhere in the epilogue. Most of the insns below would in any case
10538 need special notes to explain where r11 is in relation to the stack. */
10541 rs6000_emit_epilogue (sibcall
)
10544 rs6000_stack_t
*info
;
10545 int restoring_FPRs_inline
;
10546 int using_load_multiple
;
10547 int using_mfcr_multiple
;
10548 int use_backchain_to_restore_sp
;
10550 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, 1);
10551 rtx frame_reg_rtx
= sp_reg_rtx
;
10552 enum machine_mode reg_mode
= TARGET_POWERPC64
? DImode
: SImode
;
10553 int reg_size
= TARGET_POWERPC64
? 8 : 4;
10556 if (TARGET_SPE_ABI
)
10558 reg_mode
= V2SImode
;
10562 info
= rs6000_stack_info ();
10563 using_load_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
10565 && info
->first_gp_reg_save
< 31);
10566 restoring_FPRs_inline
= (sibcall
10567 || current_function_calls_eh_return
10568 || info
->first_fp_reg_save
== 64
10569 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
10570 use_backchain_to_restore_sp
= (frame_pointer_needed
10571 || current_function_calls_alloca
10572 || info
->total_size
> 32767);
10573 using_mfcr_multiple
= (rs6000_cpu
== PROCESSOR_PPC601
10574 || rs6000_cpu
== PROCESSOR_PPC603
10575 || rs6000_cpu
== PROCESSOR_PPC750
10578 /* If we have a frame pointer, a call to alloca, or a large stack
10579 frame, restore the old stack pointer using the backchain. Otherwise,
10580 we know what size to update it with. */
10581 if (use_backchain_to_restore_sp
)
10583 /* Under V.4, don't reset the stack pointer until after we're done
10584 loading the saved registers. */
10585 if (DEFAULT_ABI
== ABI_V4
)
10586 frame_reg_rtx
= gen_rtx_REG (Pmode
, 11);
10588 emit_move_insn (frame_reg_rtx
,
10589 gen_rtx_MEM (Pmode
, sp_reg_rtx
));
10592 else if (info
->push_p
)
10594 if (DEFAULT_ABI
== ABI_V4
)
10595 sp_offset
= info
->total_size
;
10598 emit_insn (TARGET_32BIT
10599 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
10600 GEN_INT (info
->total_size
))
10601 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
10602 GEN_INT (info
->total_size
)));
10606 /* Restore AltiVec registers if needed. */
10607 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
10611 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10612 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
10614 rtx addr
, areg
, mem
;
10616 areg
= gen_rtx_REG (Pmode
, 0);
10618 (areg
, GEN_INT (info
->altivec_save_offset
10620 + 16 * (i
- info
->first_altivec_reg_save
)));
10622 /* AltiVec addressing mode is [reg+reg]. */
10623 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
);
10624 mem
= gen_rtx_MEM (V4SImode
, addr
);
10625 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10627 emit_move_insn (gen_rtx_REG (V4SImode
, i
), mem
);
10631 /* Restore VRSAVE if needed. */
10632 if (TARGET_ALTIVEC_ABI
&& info
->vrsave_mask
!= 0)
10634 rtx addr
, mem
, reg
;
10636 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10637 GEN_INT (info
->vrsave_save_offset
+ sp_offset
));
10638 mem
= gen_rtx_MEM (SImode
, addr
);
10639 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10640 reg
= gen_rtx_REG (SImode
, 12);
10641 emit_move_insn (reg
, mem
);
10643 emit_insn (generate_set_vrsave (reg
, info
, 1));
10646 /* Get the old lr if we saved it. */
10647 if (info
->lr_save_p
)
10649 rtx mem
= gen_frame_mem_offset (Pmode
, frame_reg_rtx
,
10650 info
->lr_save_offset
+ sp_offset
);
10652 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10654 emit_move_insn (gen_rtx_REG (Pmode
, 0), mem
);
10657 /* Get the old cr if we saved it. */
10658 if (info
->cr_save_p
)
10660 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10661 GEN_INT (info
->cr_save_offset
+ sp_offset
));
10662 rtx mem
= gen_rtx_MEM (SImode
, addr
);
10664 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10666 emit_move_insn (gen_rtx_REG (SImode
, 12), mem
);
10669 /* Set LR here to try to overlap restores below. */
10670 if (info
->lr_save_p
)
10671 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
10672 gen_rtx_REG (Pmode
, 0));
10674 /* Load exception handler data registers, if needed. */
10675 if (current_function_calls_eh_return
)
10677 unsigned int i
, regno
;
10683 regno
= EH_RETURN_DATA_REGNO (i
);
10684 if (regno
== INVALID_REGNUM
)
10687 mem
= gen_frame_mem_offset (reg_mode
, frame_reg_rtx
,
10688 info
->ehrd_offset
+ sp_offset
10689 + reg_size
* (int) i
);
10690 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10692 emit_move_insn (gen_rtx_REG (reg_mode
, regno
), mem
);
10696 /* Restore GPRs. This is done as a PARALLEL if we are using
10697 the load-multiple instructions. */
10698 if (using_load_multiple
)
10701 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
10702 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10704 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10705 GEN_INT (info
->gp_save_offset
10708 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
10710 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10713 gen_rtx_SET (VOIDmode
,
10714 gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
),
10717 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10720 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10721 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
10722 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
10723 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
10724 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
10725 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
10727 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10728 GEN_INT (info
->gp_save_offset
10731 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
10733 /* Restore 64-bit quantities for SPE. */
10734 if (TARGET_SPE_ABI
)
10736 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
10739 if (!SPE_CONST_OFFSET_OK (offset
))
10741 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
10742 emit_move_insn (b
, GEN_INT (offset
));
10745 b
= GEN_INT (offset
);
10747 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
10748 mem
= gen_rtx_MEM (V2SImode
, addr
);
10751 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10753 emit_move_insn (gen_rtx_REG (reg_mode
,
10754 info
->first_gp_reg_save
+ i
), mem
);
10757 /* Restore fpr's if we need to do it without calling a function. */
10758 if (restoring_FPRs_inline
)
10759 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10760 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
10761 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
10764 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10765 GEN_INT (info
->fp_save_offset
10768 mem
= gen_rtx_MEM (DFmode
, addr
);
10769 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10771 emit_move_insn (gen_rtx_REG (DFmode
,
10772 info
->first_fp_reg_save
+ i
),
10776 /* If we saved cr, restore it here. Just those that were used. */
10777 if (info
->cr_save_p
)
10779 rtx r12_rtx
= gen_rtx_REG (SImode
, 12);
10782 if (using_mfcr_multiple
)
10784 for (i
= 0; i
< 8; i
++)
10785 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
10791 if (using_mfcr_multiple
&& count
> 1)
10796 p
= rtvec_alloc (count
);
10799 for (i
= 0; i
< 8; i
++)
10800 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
10802 rtvec r
= rtvec_alloc (2);
10803 RTVEC_ELT (r
, 0) = r12_rtx
;
10804 RTVEC_ELT (r
, 1) = GEN_INT (1 << (7-i
));
10805 RTVEC_ELT (p
, ndx
) =
10806 gen_rtx_SET (VOIDmode
, gen_rtx_REG (CCmode
, CR0_REGNO
+i
),
10807 gen_rtx_UNSPEC (CCmode
, r
, 20));
10810 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10815 for (i
= 0; i
< 8; i
++)
10816 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
10818 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode
,
10824 /* If this is V.4, unwind the stack pointer after all of the loads
10825 have been done. We need to emit a block here so that sched
10826 doesn't decide to move the sp change before the register restores
10827 (which may not have any obvious dependency on the stack). This
10828 doesn't hurt performance, because there is no scheduling that can
10829 be done after this point. */
10830 if (DEFAULT_ABI
== ABI_V4
)
10832 if (frame_reg_rtx
!= sp_reg_rtx
)
10833 rs6000_emit_stack_tie ();
10835 if (use_backchain_to_restore_sp
)
10837 emit_move_insn (sp_reg_rtx
, frame_reg_rtx
);
10839 else if (sp_offset
!= 0)
10841 emit_insn (Pmode
== SImode
10842 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
10843 GEN_INT (sp_offset
))
10844 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
10845 GEN_INT (sp_offset
)));
10849 if (current_function_calls_eh_return
)
10851 rtx sa
= EH_RETURN_STACKADJ_RTX
;
10852 emit_insn (Pmode
== SImode
10853 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
, sa
)
10854 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
, sa
));
10860 if (! restoring_FPRs_inline
)
10861 p
= rtvec_alloc (3 + 64 - info
->first_fp_reg_save
);
10863 p
= rtvec_alloc (2);
10865 RTVEC_ELT (p
, 0) = gen_rtx_RETURN (VOIDmode
);
10866 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
10867 gen_rtx_REG (Pmode
,
10868 LINK_REGISTER_REGNUM
));
10870 /* If we have to restore more than two FP registers, branch to the
10871 restore function. It will return to our caller. */
10872 if (! restoring_FPRs_inline
)
10876 const char *alloc_rname
;
10878 sprintf (rname
, "%s%d%s", RESTORE_FP_PREFIX
,
10879 info
->first_fp_reg_save
- 32, RESTORE_FP_SUFFIX
);
10880 alloc_rname
= ggc_strdup (rname
);
10881 RTVEC_ELT (p
, 2) = gen_rtx_USE (VOIDmode
,
10882 gen_rtx_SYMBOL_REF (Pmode
,
10885 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10888 addr
= gen_rtx_PLUS (Pmode
, sp_reg_rtx
,
10889 GEN_INT (info
->fp_save_offset
+ 8*i
));
10890 mem
= gen_rtx_MEM (DFmode
, addr
);
10891 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10893 RTVEC_ELT (p
, i
+3) =
10894 gen_rtx_SET (VOIDmode
,
10895 gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
),
10900 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10904 /* Write function epilogue. */
10907 rs6000_output_function_epilogue (file
, size
)
10909 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
10911 rs6000_stack_t
*info
= rs6000_stack_info ();
10913 if (! HAVE_epilogue
)
10915 rtx insn
= get_last_insn ();
10916 /* If the last insn was a BARRIER, we don't have to write anything except
10917 the trace table. */
10918 if (GET_CODE (insn
) == NOTE
)
10919 insn
= prev_nonnote_insn (insn
);
10920 if (insn
== 0 || GET_CODE (insn
) != BARRIER
)
10922 /* This is slightly ugly, but at least we don't have two
10923 copies of the epilogue-emitting code. */
10926 /* A NOTE_INSN_DELETED is supposed to be at the start
10927 and end of the "toplevel" insn chain. */
10928 emit_note (0, NOTE_INSN_DELETED
);
10929 rs6000_emit_epilogue (FALSE
);
10930 emit_note (0, NOTE_INSN_DELETED
);
10932 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10936 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
10938 INSN_ADDRESSES_NEW (insn
, addr
);
10943 if (TARGET_DEBUG_STACK
)
10944 debug_rtx_list (get_insns (), 100);
10945 final (get_insns (), file
, FALSE
, FALSE
);
10950 /* Output a traceback table here. See /usr/include/sys/debug.h for info
10953 We don't output a traceback table if -finhibit-size-directive was
10954 used. The documentation for -finhibit-size-directive reads
10955 ``don't output a @code{.size} assembler directive, or anything
10956 else that would cause trouble if the function is split in the
10957 middle, and the two halves are placed at locations far apart in
10958 memory.'' The traceback table has this property, since it
10959 includes the offset from the start of the function to the
10960 traceback table itself.
10962 System V.4 Powerpc's (and the embedded ABI derived from it) use a
10963 different traceback table. */
10964 if (DEFAULT_ABI
== ABI_AIX
&& ! flag_inhibit_size_directive
10965 && rs6000_traceback
!= traceback_none
)
10967 const char *fname
= XSTR (XEXP (DECL_RTL (current_function_decl
), 0), 0);
10968 const char *language_string
= lang_hooks
.name
;
10969 int fixed_parms
= 0, float_parms
= 0, parm_info
= 0;
10971 int optional_tbtab
;
10973 if (rs6000_traceback
== traceback_full
)
10974 optional_tbtab
= 1;
10975 else if (rs6000_traceback
== traceback_part
)
10976 optional_tbtab
= 0;
10978 optional_tbtab
= !optimize_size
&& !TARGET_ELF
;
10980 while (*fname
== '.') /* V.4 encodes . in the name */
10983 /* Need label immediately before tbtab, so we can compute its offset
10984 from the function start. */
10987 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
10988 ASM_OUTPUT_LABEL (file
, fname
);
10990 /* The .tbtab pseudo-op can only be used for the first eight
10991 expressions, since it can't handle the possibly variable
10992 length fields that follow. However, if you omit the optional
10993 fields, the assembler outputs zeros for all optional fields
10994 anyways, giving each variable length field is minimum length
10995 (as defined in sys/debug.h). Thus we can not use the .tbtab
10996 pseudo-op at all. */
10998 /* An all-zero word flags the start of the tbtab, for debuggers
10999 that have to find it by searching forward from the entry
11000 point or from the current pc. */
11001 fputs ("\t.long 0\n", file
);
11003 /* Tbtab format type. Use format type 0. */
11004 fputs ("\t.byte 0,", file
);
11006 /* Language type. Unfortunately, there doesn't seem to be any
11007 official way to get this info, so we use language_string. C
11008 is 0. C++ is 9. No number defined for Obj-C, so use the
11009 value for C for now. There is no official value for Java,
11010 although IBM appears to be using 13. There is no official value
11011 for Chill, so we've chosen 44 pseudo-randomly. */
11012 if (! strcmp (language_string
, "GNU C")
11013 || ! strcmp (language_string
, "GNU Objective-C"))
11015 else if (! strcmp (language_string
, "GNU F77"))
11017 else if (! strcmp (language_string
, "GNU Ada"))
11019 else if (! strcmp (language_string
, "GNU Pascal"))
11021 else if (! strcmp (language_string
, "GNU C++"))
11023 else if (! strcmp (language_string
, "GNU Java"))
11025 else if (! strcmp (language_string
, "GNU CHILL"))
11029 fprintf (file
, "%d,", i
);
11031 /* 8 single bit fields: global linkage (not set for C extern linkage,
11032 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
11033 from start of procedure stored in tbtab, internal function, function
11034 has controlled storage, function has no toc, function uses fp,
11035 function logs/aborts fp operations. */
11036 /* Assume that fp operations are used if any fp reg must be saved. */
11037 fprintf (file
, "%d,",
11038 (optional_tbtab
<< 5) | ((info
->first_fp_reg_save
!= 64) << 1));
11040 /* 6 bitfields: function is interrupt handler, name present in
11041 proc table, function calls alloca, on condition directives
11042 (controls stack walks, 3 bits), saves condition reg, saves
11044 /* The `function calls alloca' bit seems to be set whenever reg 31 is
11045 set up as a frame pointer, even when there is no alloca call. */
11046 fprintf (file
, "%d,",
11047 ((optional_tbtab
<< 6)
11048 | ((optional_tbtab
& frame_pointer_needed
) << 5)
11049 | (info
->cr_save_p
<< 1)
11050 | (info
->lr_save_p
)));
11052 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
11054 fprintf (file
, "%d,",
11055 (info
->push_p
<< 7) | (64 - info
->first_fp_reg_save
));
11057 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
11058 fprintf (file
, "%d,", (32 - first_reg_to_save ()));
11060 if (optional_tbtab
)
11062 /* Compute the parameter info from the function decl argument
11065 int next_parm_info_bit
= 31;
11067 for (decl
= DECL_ARGUMENTS (current_function_decl
);
11068 decl
; decl
= TREE_CHAIN (decl
))
11070 rtx parameter
= DECL_INCOMING_RTL (decl
);
11071 enum machine_mode mode
= GET_MODE (parameter
);
11073 if (GET_CODE (parameter
) == REG
)
11075 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
11081 if (mode
== SFmode
)
11083 else if (mode
== DFmode
)
11088 /* If only one bit will fit, don't or in this entry. */
11089 if (next_parm_info_bit
> 0)
11090 parm_info
|= (bits
<< (next_parm_info_bit
- 1));
11091 next_parm_info_bit
-= 2;
11095 fixed_parms
+= ((GET_MODE_SIZE (mode
)
11096 + (UNITS_PER_WORD
- 1))
11098 next_parm_info_bit
-= 1;
11104 /* Number of fixed point parameters. */
11105 /* This is actually the number of words of fixed point parameters; thus
11106 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11107 fprintf (file
, "%d,", fixed_parms
);
11109 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11111 /* This is actually the number of fp registers that hold parameters;
11112 and thus the maximum value is 13. */
11113 /* Set parameters on stack bit if parameters are not in their original
11114 registers, regardless of whether they are on the stack? Xlc
11115 seems to set the bit when not optimizing. */
11116 fprintf (file
, "%d\n", ((float_parms
<< 1) | (! optimize
)));
11118 if (! optional_tbtab
)
11121 /* Optional fields follow. Some are variable length. */
11123 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11124 11 double float. */
11125 /* There is an entry for each parameter in a register, in the order that
11126 they occur in the parameter list. Any intervening arguments on the
11127 stack are ignored. If the list overflows a long (max possible length
11128 34 bits) then completely leave off all elements that don't fit. */
11129 /* Only emit this long if there was at least one parameter. */
11130 if (fixed_parms
|| float_parms
)
11131 fprintf (file
, "\t.long %d\n", parm_info
);
11133 /* Offset from start of code to tb table. */
11134 fputs ("\t.long ", file
);
11135 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
11137 RS6000_OUTPUT_BASENAME (file
, fname
);
11139 assemble_name (file
, fname
);
11141 fputs ("-.", file
);
11143 RS6000_OUTPUT_BASENAME (file
, fname
);
11145 assemble_name (file
, fname
);
11149 /* Interrupt handler mask. */
11150 /* Omit this long, since we never set the interrupt handler bit
11153 /* Number of CTL (controlled storage) anchors. */
11154 /* Omit this long, since the has_ctl bit is never set above. */
11156 /* Displacement into stack of each CTL anchor. */
11157 /* Omit this list of longs, because there are no CTL anchors. */
11159 /* Length of function name. */
11160 fprintf (file
, "\t.short %d\n", (int) strlen (fname
));
11162 /* Function name. */
11163 assemble_string (fname
, strlen (fname
));
11165 /* Register for alloca automatic storage; this is always reg 31.
11166 Only emit this if the alloca bit was set above. */
11167 if (frame_pointer_needed
)
11168 fputs ("\t.byte 31\n", file
);
11170 fputs ("\t.align 2\n", file
);
11174 /* A C compound statement that outputs the assembler code for a thunk
11175 function, used to implement C++ virtual function calls with
11176 multiple inheritance. The thunk acts as a wrapper around a virtual
11177 function, adjusting the implicit object parameter before handing
11178 control off to the real function.
11180 First, emit code to add the integer DELTA to the location that
11181 contains the incoming first argument. Assume that this argument
11182 contains a pointer, and is the one used to pass the `this' pointer
11183 in C++. This is the incoming argument *before* the function
11184 prologue, e.g. `%o0' on a sparc. The addition must preserve the
11185 values of all other incoming arguments.
11187 After the addition, emit code to jump to FUNCTION, which is a
11188 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
11189 not touch the return address. Hence returning from FUNCTION will
11190 return to whoever called the current `thunk'.
11192 The effect must be as if FUNCTION had been called directly with the
11193 adjusted first argument. This macro is responsible for emitting
11194 all of the code for a thunk function; output_function_prologue()
11195 and output_function_epilogue() are not invoked.
11197 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
11198 been extracted from it.) It might possibly be useful on some
11199 targets, but probably not.
11201 If you do not define this macro, the target-independent code in the
11202 C++ frontend will generate a less efficient heavyweight thunk that
11203 calls FUNCTION instead of jumping to it. The generic approach does
11204 not support varargs. */
11207 output_mi_thunk (file
, thunk_fndecl
, delta
, function
)
11209 tree thunk_fndecl ATTRIBUTE_UNUSED
;
11213 const char *this_reg
=
11214 reg_names
[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function
))) ? 4 : 3 ];
11215 const char *prefix
;
11217 const char *r0
= reg_names
[0];
11218 const char *toc
= reg_names
[2];
11219 const char *schain
= reg_names
[11];
11220 const char *r12
= reg_names
[12];
11222 static int labelno
= 0;
11224 /* Small constants that can be done by one add instruction. */
11225 if (delta
>= -32768 && delta
<= 32767)
11227 if (! TARGET_NEW_MNEMONICS
)
11228 fprintf (file
, "\tcal %s,%d(%s)\n", this_reg
, delta
, this_reg
);
11230 fprintf (file
, "\taddi %s,%s,%d\n", this_reg
, this_reg
, delta
);
11233 /* 64-bit constants. If "int" is 32 bits, we'll never hit this abort. */
11234 else if (TARGET_64BIT
&& (delta
< -2147483647 - 1 || delta
> 2147483647))
11237 /* Large constants that can be done by one addis instruction. */
11238 else if ((delta
& 0xffff) == 0)
11239 asm_fprintf (file
, "\t{cau|addis} %s,%s,%d\n", this_reg
, this_reg
,
11242 /* 32-bit constants that can be done by an add and addis instruction. */
11245 /* Break into two pieces, propagating the sign bit from the low
11246 word to the upper word. */
11247 int delta_low
= ((delta
& 0xffff) ^ 0x8000) - 0x8000;
11248 int delta_high
= (delta
- delta_low
) >> 16;
11250 asm_fprintf (file
, "\t{cau|addis} %s,%s,%d\n", this_reg
, this_reg
,
11253 if (! TARGET_NEW_MNEMONICS
)
11254 fprintf (file
, "\tcal %s,%d(%s)\n", this_reg
, delta_low
, this_reg
);
11256 fprintf (file
, "\taddi %s,%s,%d\n", this_reg
, this_reg
, delta_low
);
11259 /* Get the prefix in front of the names. */
11260 switch (DEFAULT_ABI
)
11270 case ABI_AIX_NODESC
:
11275 /* If the function is compiled in this module, jump to it directly.
11276 Otherwise, load up its address and jump to it. */
11278 fname
= XSTR (XEXP (DECL_RTL (function
), 0), 0);
11280 if (current_file_function_operand (XEXP (DECL_RTL (function
), 0), VOIDmode
)
11281 && (! lookup_attribute ("longcall",
11282 TYPE_ATTRIBUTES (TREE_TYPE (function
)))
11283 || lookup_attribute ("shortcall",
11284 TYPE_ATTRIBUTES (TREE_TYPE (function
)))))
11287 fprintf (file
, "\tb %s", prefix
);
11288 assemble_name (file
, fname
);
11289 if (DEFAULT_ABI
== ABI_V4
&& flag_pic
) fputs ("@local", file
);
11295 switch (DEFAULT_ABI
)
11301 /* Set up a TOC entry for the function. */
11302 ASM_GENERATE_INTERNAL_LABEL (buf
, "Lthunk", labelno
);
11304 ASM_OUTPUT_INTERNAL_LABEL (file
, "Lthunk", labelno
);
11307 if (TARGET_MINIMAL_TOC
)
11308 fputs (TARGET_32BIT
? "\t.long " : DOUBLE_INT_ASM_OP
, file
);
11311 fputs ("\t.tc ", file
);
11312 assemble_name (file
, fname
);
11313 fputs ("[TC],", file
);
11315 assemble_name (file
, fname
);
11318 function_section (current_function_decl
);
11321 if (TARGET_MINIMAL_TOC
)
11322 asm_fprintf (file
, (TARGET_32BIT
)
11323 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12
,
11324 TARGET_ELF
? ".LCTOC0@toc" : ".LCTOC..1", toc
);
11325 asm_fprintf (file
, (TARGET_32BIT
) ? "\t{l|lwz} %s," : "\tld %s,", r12
);
11326 assemble_name (file
, buf
);
11327 if (TARGET_ELF
&& TARGET_MINIMAL_TOC
)
11328 fputs ("-(.LCTOC1)", file
);
11329 asm_fprintf (file
, "(%s)\n", TARGET_MINIMAL_TOC
? r12
: toc
);
11331 (TARGET_32BIT
) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
11335 (TARGET_32BIT
) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
11338 asm_fprintf (file
, "\tmtctr %s\n", r0
);
11340 (TARGET_32BIT
) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
11343 asm_fprintf (file
, "\tbctr\n");
11346 case ABI_AIX_NODESC
:
11348 fprintf (file
, "\tb %s", prefix
);
11349 assemble_name (file
, fname
);
11350 if (flag_pic
) fputs ("@plt", file
);
11356 fprintf (file
, "\tb %s", prefix
);
11357 if (flag_pic
&& !machopic_name_defined_p (fname
))
11358 assemble_name (file
, machopic_stub_name (fname
));
11360 assemble_name (file
, fname
);
11369 /* A quick summary of the various types of 'constant-pool tables'
11372 Target Flags Name One table per
11373 AIX (none) AIX TOC object file
11374 AIX -mfull-toc AIX TOC object file
11375 AIX -mminimal-toc AIX minimal TOC translation unit
11376 SVR4/EABI (none) SVR4 SDATA object file
11377 SVR4/EABI -fpic SVR4 pic object file
11378 SVR4/EABI -fPIC SVR4 PIC translation unit
11379 SVR4/EABI -mrelocatable EABI TOC function
11380 SVR4/EABI -maix AIX TOC object file
11381 SVR4/EABI -maix -mminimal-toc
11382 AIX minimal TOC translation unit
11384 Name Reg. Set by entries contains:
11385 made by addrs? fp? sum?
11387 AIX TOC 2 crt0 as Y option option
11388 AIX minimal TOC 30 prolog gcc Y Y option
11389 SVR4 SDATA 13 crt0 gcc N Y N
11390 SVR4 pic 30 prolog ld Y not yet N
11391 SVR4 PIC 30 prolog gcc Y option option
11392 EABI TOC 30 prolog gcc Y option option
11396 /* Hash table stuff for keeping track of TOC entries. */
11398 struct toc_hash_struct
11400 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
11401 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
11403 enum machine_mode key_mode
;
11407 static htab_t toc_hash_table
;
11409 /* Hash functions for the hash table. */
11412 rs6000_hash_constant (k
)
11415 unsigned result
= (GET_CODE (k
) << 3) ^ GET_MODE (k
);
11416 const char *format
= GET_RTX_FORMAT (GET_CODE (k
));
11417 int flen
= strlen (format
);
11420 if (GET_CODE (k
) == LABEL_REF
)
11421 return result
* 1231 + (unsigned) INSN_UID (XEXP (k
, 0));
11423 if (GET_CODE (k
) == CODE_LABEL
)
11428 for (; fidx
< flen
; fidx
++)
11429 switch (format
[fidx
])
11434 const char *str
= XSTR (k
, fidx
);
11435 len
= strlen (str
);
11436 result
= result
* 613 + len
;
11437 for (i
= 0; i
< len
; i
++)
11438 result
= result
* 613 + (unsigned) str
[i
];
11443 result
= result
* 1231 + rs6000_hash_constant (XEXP (k
, fidx
));
11447 result
= result
* 613 + (unsigned) XINT (k
, fidx
);
11450 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT
))
11451 result
= result
* 613 + (unsigned) XWINT (k
, fidx
);
11455 for (i
= 0; i
< sizeof(HOST_WIDE_INT
)/sizeof(unsigned); i
++)
11456 result
= result
* 613 + (unsigned) (XWINT (k
, fidx
)
11467 toc_hash_function (hash_entry
)
11468 const void * hash_entry
;
11470 const struct toc_hash_struct
*thc
=
11471 (const struct toc_hash_struct
*) hash_entry
;
11472 return rs6000_hash_constant (thc
->key
) ^ thc
->key_mode
;
11475 /* Compare H1 and H2 for equivalence. */
11478 toc_hash_eq (h1
, h2
)
11482 rtx r1
= ((const struct toc_hash_struct
*) h1
)->key
;
11483 rtx r2
= ((const struct toc_hash_struct
*) h2
)->key
;
11485 if (((const struct toc_hash_struct
*) h1
)->key_mode
11486 != ((const struct toc_hash_struct
*) h2
)->key_mode
)
11489 return rtx_equal_p (r1
, r2
);
11492 /* Mark the hash table-entry HASH_ENTRY. */
11495 toc_hash_mark_entry (hash_slot
, unused
)
11497 void * unused ATTRIBUTE_UNUSED
;
11499 const struct toc_hash_struct
* hash_entry
=
11500 *(const struct toc_hash_struct
**) hash_slot
;
11501 rtx r
= hash_entry
->key
;
11502 ggc_set_mark (hash_entry
);
11503 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
11504 if (GET_CODE (r
) == LABEL_REF
)
11507 ggc_set_mark (XEXP (r
, 0));
11514 /* Mark all the elements of the TOC hash-table *HT. */
11517 toc_hash_mark_table (vht
)
11522 htab_traverse (*ht
, toc_hash_mark_entry
, (void *)0);
11525 /* These are the names given by the C++ front-end to vtables, and
11526 vtable-like objects. Ideally, this logic should not be here;
11527 instead, there should be some programmatic way of inquiring as
11528 to whether or not an object is a vtable. */
11530 #define VTABLE_NAME_P(NAME) \
11531 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
11532 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
11533 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
11534 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
11537 rs6000_output_symbol_ref (file
, x
)
11541 /* Currently C++ toc references to vtables can be emitted before it
11542 is decided whether the vtable is public or private. If this is
11543 the case, then the linker will eventually complain that there is
11544 a reference to an unknown section. Thus, for vtables only,
11545 we emit the TOC reference to reference the symbol and not the
11547 const char *name
= XSTR (x
, 0);
11549 if (VTABLE_NAME_P (name
))
11551 RS6000_OUTPUT_BASENAME (file
, name
);
11554 assemble_name (file
, name
);
11557 /* Output a TOC entry. We derive the entry name from what is being
11561 output_toc (file
, x
, labelno
, mode
)
11565 enum machine_mode mode
;
11568 const char *name
= buf
;
11569 const char *real_name
;
11576 /* When the linker won't eliminate them, don't output duplicate
11577 TOC entries (this happens on AIX if there is any kind of TOC,
11578 and on SVR4 under -fPIC or -mrelocatable). */
11581 struct toc_hash_struct
*h
;
11584 h
= ggc_alloc (sizeof (*h
));
11586 h
->key_mode
= mode
;
11587 h
->labelno
= labelno
;
11589 found
= htab_find_slot (toc_hash_table
, h
, 1);
11590 if (*found
== NULL
)
11592 else /* This is indeed a duplicate.
11593 Set this label equal to that label. */
11595 fputs ("\t.set ", file
);
11596 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
11597 fprintf (file
, "%d,", labelno
);
11598 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
11599 fprintf (file
, "%d\n", ((*(const struct toc_hash_struct
**)
11605 /* If we're going to put a double constant in the TOC, make sure it's
11606 aligned properly when strict alignment is on. */
11607 if (GET_CODE (x
) == CONST_DOUBLE
11608 && STRICT_ALIGNMENT
11609 && GET_MODE_BITSIZE (mode
) >= 64
11610 && ! (TARGET_NO_FP_IN_TOC
&& ! TARGET_MINIMAL_TOC
)) {
11611 ASM_OUTPUT_ALIGN (file
, 3);
11614 ASM_OUTPUT_INTERNAL_LABEL (file
, "LC", labelno
);
11616 /* Handle FP constants specially. Note that if we have a minimal
11617 TOC, things we put here aren't actually in the TOC, so we can allow
11619 if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
11621 REAL_VALUE_TYPE rv
;
11624 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
11625 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
11629 if (TARGET_MINIMAL_TOC
)
11630 fputs (DOUBLE_INT_ASM_OP
, file
);
11632 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
11633 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11634 fprintf (file
, "0x%lx%08lx\n",
11635 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11640 if (TARGET_MINIMAL_TOC
)
11641 fputs ("\t.long ", file
);
11643 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
11644 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11645 fprintf (file
, "0x%lx,0x%lx\n",
11646 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11650 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
11652 REAL_VALUE_TYPE rv
;
11655 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
11656 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
11660 if (TARGET_MINIMAL_TOC
)
11661 fputs (DOUBLE_INT_ASM_OP
, file
);
11663 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
11664 fprintf (file
, "0x%lx00000000\n", l
& 0xffffffff);
11669 if (TARGET_MINIMAL_TOC
)
11670 fputs ("\t.long ", file
);
11672 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
11673 fprintf (file
, "0x%lx\n", l
& 0xffffffff);
11677 else if (GET_MODE (x
) == VOIDmode
11678 && (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
))
11680 unsigned HOST_WIDE_INT low
;
11681 HOST_WIDE_INT high
;
11683 if (GET_CODE (x
) == CONST_DOUBLE
)
11685 low
= CONST_DOUBLE_LOW (x
);
11686 high
= CONST_DOUBLE_HIGH (x
);
11689 #if HOST_BITS_PER_WIDE_INT == 32
11692 high
= (low
& 0x80000000) ? ~0 : 0;
11696 low
= INTVAL (x
) & 0xffffffff;
11697 high
= (HOST_WIDE_INT
) INTVAL (x
) >> 32;
11701 /* TOC entries are always Pmode-sized, but since this
11702 is a bigendian machine then if we're putting smaller
11703 integer constants in the TOC we have to pad them.
11704 (This is still a win over putting the constants in
11705 a separate constant pool, because then we'd have
11706 to have both a TOC entry _and_ the actual constant.)
11708 For a 32-bit target, CONST_INT values are loaded and shifted
11709 entirely within `low' and can be stored in one TOC entry. */
11711 if (TARGET_64BIT
&& POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
11712 abort ();/* It would be easy to make this work, but it doesn't now. */
11714 if (POINTER_SIZE
> GET_MODE_BITSIZE (mode
))
11716 #if HOST_BITS_PER_WIDE_INT == 32
11717 lshift_double (low
, high
, POINTER_SIZE
- GET_MODE_BITSIZE (mode
),
11718 POINTER_SIZE
, &low
, &high
, 0);
11721 low
<<= POINTER_SIZE
- GET_MODE_BITSIZE (mode
);
11722 high
= (HOST_WIDE_INT
) low
>> 32;
11729 if (TARGET_MINIMAL_TOC
)
11730 fputs (DOUBLE_INT_ASM_OP
, file
);
11732 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
11733 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
11734 fprintf (file
, "0x%lx%08lx\n",
11735 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
11740 if (POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
11742 if (TARGET_MINIMAL_TOC
)
11743 fputs ("\t.long ", file
);
11745 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
11746 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
11747 fprintf (file
, "0x%lx,0x%lx\n",
11748 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
11752 if (TARGET_MINIMAL_TOC
)
11753 fputs ("\t.long ", file
);
11755 fprintf (file
, "\t.tc IS_%lx[TC],", (long) low
& 0xffffffff);
11756 fprintf (file
, "0x%lx\n", (long) low
& 0xffffffff);
11762 if (GET_CODE (x
) == CONST
)
11764 if (GET_CODE (XEXP (x
, 0)) != PLUS
)
11767 base
= XEXP (XEXP (x
, 0), 0);
11768 offset
= INTVAL (XEXP (XEXP (x
, 0), 1));
11771 if (GET_CODE (base
) == SYMBOL_REF
)
11772 name
= XSTR (base
, 0);
11773 else if (GET_CODE (base
) == LABEL_REF
)
11774 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (XEXP (base
, 0)));
11775 else if (GET_CODE (base
) == CODE_LABEL
)
11776 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (base
));
11780 real_name
= (*targetm
.strip_name_encoding
) (name
);
11781 if (TARGET_MINIMAL_TOC
)
11782 fputs (TARGET_32BIT
? "\t.long " : DOUBLE_INT_ASM_OP
, file
);
11785 fprintf (file
, "\t.tc %s", real_name
);
11788 fprintf (file
, ".N%d", - offset
);
11790 fprintf (file
, ".P%d", offset
);
11792 fputs ("[TC],", file
);
11795 /* Currently C++ toc references to vtables can be emitted before it
11796 is decided whether the vtable is public or private. If this is
11797 the case, then the linker will eventually complain that there is
11798 a TOC reference to an unknown section. Thus, for vtables only,
11799 we emit the TOC reference to reference the symbol and not the
11801 if (VTABLE_NAME_P (name
))
11803 RS6000_OUTPUT_BASENAME (file
, name
);
11805 fprintf (file
, "%d", offset
);
11806 else if (offset
> 0)
11807 fprintf (file
, "+%d", offset
);
11810 output_addr_const (file
, x
);
11814 /* Output an assembler pseudo-op to write an ASCII string of N characters
11815 starting at P to FILE.
11817 On the RS/6000, we have to do this using the .byte operation and
11818 write out special characters outside the quoted string.
11819 Also, the assembler is broken; very long strings are truncated,
11820 so we must artificially break them up early. */
11823 output_ascii (file
, p
, n
)
11829 int i
, count_string
;
11830 const char *for_string
= "\t.byte \"";
11831 const char *for_decimal
= "\t.byte ";
11832 const char *to_close
= NULL
;
11835 for (i
= 0; i
< n
; i
++)
11838 if (c
>= ' ' && c
< 0177)
11841 fputs (for_string
, file
);
11844 /* Write two quotes to get one. */
11852 for_decimal
= "\"\n\t.byte ";
11856 if (count_string
>= 512)
11858 fputs (to_close
, file
);
11860 for_string
= "\t.byte \"";
11861 for_decimal
= "\t.byte ";
11869 fputs (for_decimal
, file
);
11870 fprintf (file
, "%d", c
);
11872 for_string
= "\n\t.byte \"";
11873 for_decimal
= ", ";
11879 /* Now close the string if we have written one. Then end the line. */
11881 fputs (to_close
, file
);
11884 /* Generate a unique section name for FILENAME for a section type
11885 represented by SECTION_DESC. Output goes into BUF.
11887 SECTION_DESC can be any string, as long as it is different for each
11888 possible section type.
11890 We name the section in the same manner as xlc. The name begins with an
11891 underscore followed by the filename (after stripping any leading directory
11892 names) with the last period replaced by the string SECTION_DESC. If
11893 FILENAME does not contain a period, SECTION_DESC is appended to the end of
11897 rs6000_gen_section_name (buf
, filename
, section_desc
)
11899 const char *filename
;
11900 const char *section_desc
;
11902 const char *q
, *after_last_slash
, *last_period
= 0;
11906 after_last_slash
= filename
;
11907 for (q
= filename
; *q
; q
++)
11910 after_last_slash
= q
+ 1;
11911 else if (*q
== '.')
11915 len
= strlen (after_last_slash
) + strlen (section_desc
) + 2;
11916 *buf
= (char *) xmalloc (len
);
11921 for (q
= after_last_slash
; *q
; q
++)
11923 if (q
== last_period
)
11925 strcpy (p
, section_desc
);
11926 p
+= strlen (section_desc
);
11929 else if (ISALNUM (*q
))
11933 if (last_period
== 0)
11934 strcpy (p
, section_desc
);
11939 /* Emit profile function. */
11942 output_profile_hook (labelno
)
11943 int labelno ATTRIBUTE_UNUSED
;
11945 if (DEFAULT_ABI
== ABI_AIX
)
11947 #ifdef NO_PROFILE_COUNTERS
11948 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 0);
11951 const char *label_name
;
11954 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
11955 label_name
= (*targetm
.strip_name_encoding
) (ggc_strdup (buf
));
11956 fun
= gen_rtx_SYMBOL_REF (Pmode
, label_name
);
11958 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 1,
11962 else if (DEFAULT_ABI
== ABI_DARWIN
)
11964 const char *mcount_name
= RS6000_MCOUNT
;
11965 int caller_addr_regno
= LINK_REGISTER_REGNUM
;
11967 /* Be conservative and always set this, at least for now. */
11968 current_function_uses_pic_offset_table
= 1;
11971 /* For PIC code, set up a stub and collect the caller's address
11972 from r0, which is where the prologue puts it. */
11975 mcount_name
= machopic_stub_name (mcount_name
);
11976 if (current_function_uses_pic_offset_table
)
11977 caller_addr_regno
= 0;
11980 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, mcount_name
),
11982 gen_rtx_REG (Pmode
, caller_addr_regno
), Pmode
);
11986 /* Write function profiler code. */
11989 output_function_profiler (file
, labelno
)
11996 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
11997 switch (DEFAULT_ABI
)
12004 /* Fall through. */
12006 case ABI_AIX_NODESC
:
12009 warning ("no profiling of 64-bit code for this ABI");
12012 fprintf (file
, "\tmflr %s\n", reg_names
[0]);
12015 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file
);
12016 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12017 reg_names
[0], save_lr
, reg_names
[1]);
12018 asm_fprintf (file
, "\tmflr %s\n", reg_names
[12]);
12019 asm_fprintf (file
, "\t{l|lwz} %s,", reg_names
[0]);
12020 assemble_name (file
, buf
);
12021 asm_fprintf (file
, "@got(%s)\n", reg_names
[12]);
12023 else if (flag_pic
> 1)
12025 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12026 reg_names
[0], save_lr
, reg_names
[1]);
12027 /* Now, we need to get the address of the label. */
12028 fputs ("\tbl 1f\n\t.long ", file
);
12029 assemble_name (file
, buf
);
12030 fputs ("-.\n1:", file
);
12031 asm_fprintf (file
, "\tmflr %s\n", reg_names
[11]);
12032 asm_fprintf (file
, "\t{l|lwz} %s,0(%s)\n",
12033 reg_names
[0], reg_names
[11]);
12034 asm_fprintf (file
, "\t{cax|add} %s,%s,%s\n",
12035 reg_names
[0], reg_names
[0], reg_names
[11]);
12039 asm_fprintf (file
, "\t{liu|lis} %s,", reg_names
[12]);
12040 assemble_name (file
, buf
);
12041 fputs ("@ha\n", file
);
12042 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12043 reg_names
[0], save_lr
, reg_names
[1]);
12044 asm_fprintf (file
, "\t{cal|la} %s,", reg_names
[0]);
12045 assemble_name (file
, buf
);
12046 asm_fprintf (file
, "@l(%s)\n", reg_names
[12]);
12049 if (current_function_needs_context
&& DEFAULT_ABI
== ABI_AIX_NODESC
)
12051 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12052 reg_names
[STATIC_CHAIN_REGNUM
],
12054 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
12055 asm_fprintf (file
, "\t{l|lwz} %s,%d(%s)\n",
12056 reg_names
[STATIC_CHAIN_REGNUM
],
12060 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12061 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
12066 /* Don't do anything, done in output_profile_hook (). */
12071 /* Adjust the cost of a scheduling dependency. Return the new cost of
12072 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
12075 rs6000_adjust_cost (insn
, link
, dep_insn
, cost
)
12078 rtx dep_insn ATTRIBUTE_UNUSED
;
12081 if (! recog_memoized (insn
))
12084 if (REG_NOTE_KIND (link
) != 0)
12087 if (REG_NOTE_KIND (link
) == 0)
12089 /* Data dependency; DEP_INSN writes a register that INSN reads
12090 some cycles later. */
12091 switch (get_attr_type (insn
))
12094 /* Tell the first scheduling pass about the latency between
12095 a mtctr and bctr (and mtlr and br/blr). The first
12096 scheduling pass will not know about this latency since
12097 the mtctr instruction, which has the latency associated
12098 to it, will be generated by reload. */
12099 return TARGET_POWER
? 5 : 4;
12101 /* Leave some extra cycles between a compare and its
12102 dependent branch, to inhibit expensive mispredicts. */
12103 if ((rs6000_cpu_attr
== CPU_PPC603
12104 || rs6000_cpu_attr
== CPU_PPC604
12105 || rs6000_cpu_attr
== CPU_PPC604E
12106 || rs6000_cpu_attr
== CPU_PPC620
12107 || rs6000_cpu_attr
== CPU_PPC630
12108 || rs6000_cpu_attr
== CPU_PPC750
12109 || rs6000_cpu_attr
== CPU_PPC7400
12110 || rs6000_cpu_attr
== CPU_PPC7450
12111 || rs6000_cpu_attr
== CPU_POWER4
)
12112 && recog_memoized (dep_insn
)
12113 && (INSN_CODE (dep_insn
) >= 0)
12114 && (get_attr_type (dep_insn
) == TYPE_COMPARE
12115 || get_attr_type (dep_insn
) == TYPE_DELAYED_COMPARE
12116 || get_attr_type (dep_insn
) == TYPE_FPCOMPARE
12117 || get_attr_type (dep_insn
) == TYPE_CR_LOGICAL
))
12122 /* Fall out to return default cost. */
12128 /* A C statement (sans semicolon) to update the integer scheduling
12129 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12130 INSN earlier, increase the priority to execute INSN later. Do not
12131 define this macro if you do not need to adjust the scheduling
12132 priorities of insns. */
12135 rs6000_adjust_priority (insn
, priority
)
12136 rtx insn ATTRIBUTE_UNUSED
;
12139 /* On machines (like the 750) which have asymmetric integer units,
12140 where one integer unit can do multiply and divides and the other
12141 can't, reduce the priority of multiply/divide so it is scheduled
12142 before other integer operations. */
12145 if (! INSN_P (insn
))
12148 if (GET_CODE (PATTERN (insn
)) == USE
)
12151 switch (rs6000_cpu_attr
) {
12153 switch (get_attr_type (insn
))
12160 fprintf (stderr
, "priority was %#x (%d) before adjustment\n",
12161 priority
, priority
);
12162 if (priority
>= 0 && priority
< 0x01000000)
12172 /* Return how many instructions the machine can issue per cycle. */
12175 rs6000_issue_rate ()
12177 switch (rs6000_cpu_attr
) {
12178 case CPU_RIOS1
: /* ? */
12180 case CPU_PPC601
: /* ? */
12200 /* Length in units of the trampoline for entering a nested function. */
12203 rs6000_trampoline_size ()
12207 switch (DEFAULT_ABI
)
12213 ret
= (TARGET_32BIT
) ? 12 : 24;
12218 case ABI_AIX_NODESC
:
12219 ret
= (TARGET_32BIT
) ? 40 : 48;
12226 /* Emit RTL insns to initialize the variable parts of a trampoline.
12227 FNADDR is an RTX for the address of the function's pure code.
12228 CXT is an RTX for the static chain value for the function. */
12231 rs6000_initialize_trampoline (addr
, fnaddr
, cxt
)
12236 enum machine_mode pmode
= Pmode
;
12237 int regsize
= (TARGET_32BIT
) ? 4 : 8;
12238 rtx ctx_reg
= force_reg (pmode
, cxt
);
12240 switch (DEFAULT_ABI
)
12245 /* Macros to shorten the code expansions below. */
12246 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12247 #define MEM_PLUS(addr,offset) \
12248 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12250 /* Under AIX, just build the 3 word function descriptor */
12253 rtx fn_reg
= gen_reg_rtx (pmode
);
12254 rtx toc_reg
= gen_reg_rtx (pmode
);
12255 emit_move_insn (fn_reg
, MEM_DEREF (fnaddr
));
12256 emit_move_insn (toc_reg
, MEM_PLUS (fnaddr
, regsize
));
12257 emit_move_insn (MEM_DEREF (addr
), fn_reg
);
12258 emit_move_insn (MEM_PLUS (addr
, regsize
), toc_reg
);
12259 emit_move_insn (MEM_PLUS (addr
, 2*regsize
), ctx_reg
);
12263 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
12266 case ABI_AIX_NODESC
:
12267 emit_library_call (gen_rtx_SYMBOL_REF (SImode
, "__trampoline_setup"),
12268 FALSE
, VOIDmode
, 4,
12270 GEN_INT (rs6000_trampoline_size ()), SImode
,
12280 /* Table of valid machine attributes. */
12282 const struct attribute_spec rs6000_attribute_table
[] =
12284 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12285 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
12286 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
12287 { NULL
, 0, 0, false, false, false, NULL
}
12290 /* Handle a "longcall" or "shortcall" attribute; arguments as in
12291 struct attribute_spec.handler. */
12294 rs6000_handle_longcall_attribute (node
, name
, args
, flags
, no_add_attrs
)
12297 tree args ATTRIBUTE_UNUSED
;
12298 int flags ATTRIBUTE_UNUSED
;
12299 bool *no_add_attrs
;
12301 if (TREE_CODE (*node
) != FUNCTION_TYPE
12302 && TREE_CODE (*node
) != FIELD_DECL
12303 && TREE_CODE (*node
) != TYPE_DECL
)
12305 warning ("`%s' attribute only applies to functions",
12306 IDENTIFIER_POINTER (name
));
12307 *no_add_attrs
= true;
12313 /* Set longcall attributes on all functions declared when
12314 rs6000_default_long_calls is true. */
12316 rs6000_set_default_type_attributes (type
)
12319 if (rs6000_default_long_calls
12320 && (TREE_CODE (type
) == FUNCTION_TYPE
12321 || TREE_CODE (type
) == METHOD_TYPE
))
12322 TYPE_ATTRIBUTES (type
) = tree_cons (get_identifier ("longcall"),
12324 TYPE_ATTRIBUTES (type
));
12327 /* Return a reference suitable for calling a function with the
12328 longcall attribute. */
12331 rs6000_longcall_ref (call_ref
)
12334 const char *call_name
;
12337 if (GET_CODE (call_ref
) != SYMBOL_REF
)
12340 /* System V adds '.' to the internal name, so skip them. */
12341 call_name
= XSTR (call_ref
, 0);
12342 if (*call_name
== '.')
12344 while (*call_name
== '.')
12347 node
= get_identifier (call_name
);
12348 call_ref
= gen_rtx_SYMBOL_REF (VOIDmode
, IDENTIFIER_POINTER (node
));
12351 return force_reg (Pmode
, call_ref
);
12355 #ifdef USING_ELFOS_H
12357 /* A C statement or statements to switch to the appropriate section
12358 for output of RTX in mode MODE. You can assume that RTX is some
12359 kind of constant in RTL. The argument MODE is redundant except in
12360 the case of a `const_int' rtx. Select the section by calling
12361 `text_section' or one of the alternatives for other sections.
12363 Do not define this macro if you put all constants in the read-only
12367 rs6000_elf_select_rtx_section (mode
, x
, align
)
12368 enum machine_mode mode
;
12370 unsigned HOST_WIDE_INT align
;
12372 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
12375 default_elf_select_rtx_section (mode
, x
, align
);
12378 /* A C statement or statements to switch to the appropriate
12379 section for output of DECL. DECL is either a `VAR_DECL' node
12380 or a constant of some sort. RELOC indicates whether forming
12381 the initial value of DECL requires link-time relocations. */
12384 rs6000_elf_select_section (decl
, reloc
, align
)
12387 unsigned HOST_WIDE_INT align
;
12389 default_elf_select_section_1 (decl
, reloc
, align
,
12390 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
12393 /* A C statement to build up a unique section name, expressed as a
12394 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
12395 RELOC indicates whether the initial value of EXP requires
12396 link-time relocations. If you do not define this macro, GCC will use
12397 the symbol name prefixed by `.' as the section name. Note - this
12398 macro can now be called for uninitialized data items as well as
12399 initialised data and functions. */
12402 rs6000_elf_unique_section (decl
, reloc
)
12406 default_unique_section_1 (decl
, reloc
,
12407 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
12411 /* If we are referencing a function that is static or is known to be
12412 in this file, make the SYMBOL_REF special. We can use this to indicate
12413 that we can branch to this function without emitting a no-op after the
12414 call. For real AIX calling sequences, we also replace the
12415 function name with the real name (1 or 2 leading .'s), rather than
12416 the function descriptor name. This saves a lot of overriding code
12417 to read the prefixes. */
12420 rs6000_elf_encode_section_info (decl
, first
)
12427 if (TREE_CODE (decl
) == FUNCTION_DECL
)
12429 rtx sym_ref
= XEXP (DECL_RTL (decl
), 0);
12430 if ((TREE_ASM_WRITTEN (decl
) || ! TREE_PUBLIC (decl
))
12431 && ! DECL_WEAK (decl
))
12432 SYMBOL_REF_FLAG (sym_ref
) = 1;
12434 if (DEFAULT_ABI
== ABI_AIX
)
12436 size_t len1
= (DEFAULT_ABI
== ABI_AIX
) ? 1 : 2;
12437 size_t len2
= strlen (XSTR (sym_ref
, 0));
12438 char *str
= alloca (len1
+ len2
+ 1);
12441 memcpy (str
+ len1
, XSTR (sym_ref
, 0), len2
+ 1);
12443 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len1
+ len2
);
12446 else if (rs6000_sdata
!= SDATA_NONE
12447 && DEFAULT_ABI
== ABI_V4
12448 && TREE_CODE (decl
) == VAR_DECL
)
12450 int size
= int_size_in_bytes (TREE_TYPE (decl
));
12451 tree section_name
= DECL_SECTION_NAME (decl
);
12452 const char *name
= (char *)0;
12457 if (TREE_CODE (section_name
) == STRING_CST
)
12459 name
= TREE_STRING_POINTER (section_name
);
12460 len
= TREE_STRING_LENGTH (section_name
);
12466 if ((size
> 0 && size
<= g_switch_value
)
12468 && ((len
== sizeof (".sdata") - 1
12469 && strcmp (name
, ".sdata") == 0)
12470 || (len
== sizeof (".sdata2") - 1
12471 && strcmp (name
, ".sdata2") == 0)
12472 || (len
== sizeof (".sbss") - 1
12473 && strcmp (name
, ".sbss") == 0)
12474 || (len
== sizeof (".sbss2") - 1
12475 && strcmp (name
, ".sbss2") == 0)
12476 || (len
== sizeof (".PPC.EMB.sdata0") - 1
12477 && strcmp (name
, ".PPC.EMB.sdata0") == 0)
12478 || (len
== sizeof (".PPC.EMB.sbss0") - 1
12479 && strcmp (name
, ".PPC.EMB.sbss0") == 0))))
12481 rtx sym_ref
= XEXP (DECL_RTL (decl
), 0);
12482 size_t len
= strlen (XSTR (sym_ref
, 0));
12483 char *str
= alloca (len
+ 2);
12486 memcpy (str
+ 1, XSTR (sym_ref
, 0), len
+ 1);
12487 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len
+ 1);
12492 static const char *
12493 rs6000_elf_strip_name_encoding (str
)
12496 while (*str
== '*' || *str
== '@')
12502 rs6000_elf_in_small_data_p (decl
)
12505 if (rs6000_sdata
== SDATA_NONE
)
12508 if (TREE_CODE (decl
) == VAR_DECL
&& DECL_SECTION_NAME (decl
))
12510 const char *section
= TREE_STRING_POINTER (DECL_SECTION_NAME (decl
));
12511 if (strcmp (section
, ".sdata") == 0
12512 || strcmp (section
, ".sdata2") == 0
12513 || strcmp (section
, ".sbss") == 0)
12518 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
12521 && size
<= g_switch_value
12522 && (rs6000_sdata
!= SDATA_DATA
|| TREE_PUBLIC (decl
)))
12529 #endif /* USING_ELFOS_H */
12532 /* Return a REG that occurs in ADDR with coefficient 1.
12533 ADDR can be effectively incremented by incrementing REG.
12535 r0 is special and we must not select it as an address
12536 register by this routine since our caller will try to
12537 increment the returned register via an "la" instruction. */
12540 find_addr_reg (addr
)
12543 while (GET_CODE (addr
) == PLUS
)
12545 if (GET_CODE (XEXP (addr
, 0)) == REG
12546 && REGNO (XEXP (addr
, 0)) != 0)
12547 addr
= XEXP (addr
, 0);
12548 else if (GET_CODE (XEXP (addr
, 1)) == REG
12549 && REGNO (XEXP (addr
, 1)) != 0)
12550 addr
= XEXP (addr
, 1);
12551 else if (CONSTANT_P (XEXP (addr
, 0)))
12552 addr
= XEXP (addr
, 1);
12553 else if (CONSTANT_P (XEXP (addr
, 1)))
12554 addr
= XEXP (addr
, 0);
12558 if (GET_CODE (addr
) == REG
&& REGNO (addr
) != 0)
12564 rs6000_fatal_bad_address (op
)
12567 fatal_insn ("bad address", op
);
12570 /* Called to register all of our global variables with the garbage
12574 rs6000_add_gc_roots ()
12576 toc_hash_table
= htab_create (1021, toc_hash_function
, toc_hash_eq
, NULL
);
12577 ggc_add_root (&toc_hash_table
, 1, sizeof (toc_hash_table
),
12578 toc_hash_mark_table
);
12584 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
12585 reference and a constant. */
12588 symbolic_operand (op
)
12591 switch (GET_CODE (op
))
12598 return (GET_CODE (op
) == SYMBOL_REF
||
12599 (GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
12600 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
12601 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
12608 #ifdef RS6000_LONG_BRANCH
12610 static tree stub_list
= 0;
12612 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
12613 procedure calls to the linked list. */
12616 add_compiler_stub (label_name
, function_name
, line_number
)
12618 tree function_name
;
12621 tree stub
= build_tree_list (function_name
, label_name
);
12622 TREE_TYPE (stub
) = build_int_2 (line_number
, 0);
12623 TREE_CHAIN (stub
) = stub_list
;
12627 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
12628 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
12629 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
12631 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
12632 handling procedure calls from the linked list and initializes the
12636 output_compiler_stub ()
12639 char label_buf
[256];
12643 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
12645 fprintf (asm_out_file
,
12646 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub
)));
12648 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12649 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
12650 fprintf (asm_out_file
, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub
));
12651 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12653 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
))[0] == '*')
12655 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
))+1);
12658 label_buf
[0] = '_';
12659 strcpy (label_buf
+1,
12660 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
)));
12663 strcpy (tmp_buf
, "lis r12,hi16(");
12664 strcat (tmp_buf
, label_buf
);
12665 strcat (tmp_buf
, ")\n\tori r12,r12,lo16(");
12666 strcat (tmp_buf
, label_buf
);
12667 strcat (tmp_buf
, ")\n\tmtctr r12\n\tbctr");
12668 output_asm_insn (tmp_buf
, 0);
12670 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12671 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
12672 fprintf(asm_out_file
, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub
));
12673 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12679 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
12680 already there or not. */
12683 no_previous_def (function_name
)
12684 tree function_name
;
12687 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
12688 if (function_name
== STUB_FUNCTION_NAME (stub
))
12693 /* GET_PREV_LABEL gets the label name from the previous definition of
12697 get_prev_label (function_name
)
12698 tree function_name
;
12701 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
12702 if (function_name
== STUB_FUNCTION_NAME (stub
))
12703 return STUB_LABEL_NAME (stub
);
12707 /* INSN is either a function call or a millicode call. It may have an
12708 unconditional jump in its delay slot.
12710 CALL_DEST is the routine we are calling. */
12713 output_call (insn
, call_dest
, operand_number
)
12716 int operand_number
;
12718 static char buf
[256];
12719 if (GET_CODE (call_dest
) == SYMBOL_REF
&& TARGET_LONG_BRANCH
&& !flag_pic
)
12722 tree funname
= get_identifier (XSTR (call_dest
, 0));
12724 if (no_previous_def (funname
))
12726 int line_number
= 0;
12727 rtx label_rtx
= gen_label_rtx ();
12728 char *label_buf
, temp_buf
[256];
12729 ASM_GENERATE_INTERNAL_LABEL (temp_buf
, "L",
12730 CODE_LABEL_NUMBER (label_rtx
));
12731 label_buf
= temp_buf
[0] == '*' ? temp_buf
+ 1 : temp_buf
;
12732 labelname
= get_identifier (label_buf
);
12733 for (; insn
&& GET_CODE (insn
) != NOTE
; insn
= PREV_INSN (insn
));
12735 line_number
= NOTE_LINE_NUMBER (insn
);
12736 add_compiler_stub (labelname
, funname
, line_number
);
12739 labelname
= get_prev_label (funname
);
12741 sprintf (buf
, "jbsr %%z%d,%.246s",
12742 operand_number
, IDENTIFIER_POINTER (labelname
));
12747 sprintf (buf
, "bl %%z%d", operand_number
);
12752 #endif /* RS6000_LONG_BRANCH */
12754 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
12756 const char *const symbol_ = (SYMBOL); \
12757 char *buffer_ = (BUF); \
12758 if (symbol_[0] == '"') \
12760 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
12762 else if (name_needs_quotes(symbol_)) \
12764 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
12768 sprintf(buffer_, "L%d$%s", (N), symbol_); \
12773 /* Generate PIC and indirect symbol stubs. */
12776 machopic_output_stub (file
, symb
, stub
)
12778 const char *symb
, *stub
;
12780 unsigned int length
;
12781 char *symbol_name
, *lazy_ptr_name
;
12782 char *local_label_0
;
12783 static int label
= 0;
12785 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
12786 symb
= (*targetm
.strip_name_encoding
) (symb
);
12790 length
= strlen (symb
);
12791 symbol_name
= alloca (length
+ 32);
12792 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name
, symb
, length
);
12794 lazy_ptr_name
= alloca (length
+ 32);
12795 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name
, symb
, length
);
12797 local_label_0
= alloca (length
+ 32);
12798 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0
, symb
, length
, 0);
12801 machopic_picsymbol_stub_section ();
12803 machopic_symbol_stub_section ();
12805 fprintf (file
, "%s:\n", stub
);
12806 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
12810 fprintf (file
, "\tmflr r0\n");
12811 fprintf (file
, "\tbcl 20,31,%s\n", local_label_0
);
12812 fprintf (file
, "%s:\n\tmflr r11\n", local_label_0
);
12813 fprintf (file
, "\taddis r11,r11,ha16(%s-%s)\n",
12814 lazy_ptr_name
, local_label_0
);
12815 fprintf (file
, "\tmtlr r0\n");
12816 fprintf (file
, "\tlwz r12,lo16(%s-%s)(r11)\n",
12817 lazy_ptr_name
, local_label_0
);
12818 fprintf (file
, "\tmtctr r12\n");
12819 fprintf (file
, "\taddi r11,r11,lo16(%s-%s)\n",
12820 lazy_ptr_name
, local_label_0
);
12821 fprintf (file
, "\tbctr\n");
12824 fprintf (file
, "non-pure not supported\n");
12826 machopic_lazy_symbol_ptr_section ();
12827 fprintf (file
, "%s:\n", lazy_ptr_name
);
12828 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
12829 fprintf (file
, "\t.long dyld_stub_binding_helper\n");
12832 /* Legitimize PIC addresses. If the address is already
12833 position-independent, we return ORIG. Newly generated
12834 position-independent addresses go into a reg. This is REG if non
12835 zero, otherwise we allocate register(s) as necessary. */
12837 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
12840 rs6000_machopic_legitimize_pic_address (orig
, mode
, reg
)
12842 enum machine_mode mode
;
12847 if (reg
== NULL
&& ! reload_in_progress
&& ! reload_completed
)
12848 reg
= gen_reg_rtx (Pmode
);
12850 if (GET_CODE (orig
) == CONST
)
12852 if (GET_CODE (XEXP (orig
, 0)) == PLUS
12853 && XEXP (XEXP (orig
, 0), 0) == pic_offset_table_rtx
)
12856 if (GET_CODE (XEXP (orig
, 0)) == PLUS
)
12859 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0),
12862 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1),
12868 if (GET_CODE (offset
) == CONST_INT
)
12870 if (SMALL_INT (offset
))
12871 return plus_constant (base
, INTVAL (offset
));
12872 else if (! reload_in_progress
&& ! reload_completed
)
12873 offset
= force_reg (Pmode
, offset
);
12876 rtx mem
= force_const_mem (Pmode
, orig
);
12877 return machopic_legitimize_pic_address (mem
, Pmode
, reg
);
12880 return gen_rtx (PLUS
, Pmode
, base
, offset
);
12883 /* Fall back on generic machopic code. */
12884 return machopic_legitimize_pic_address (orig
, mode
, reg
);
12887 /* This is just a placeholder to make linking work without having to
12888 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
12889 ever needed for Darwin (not too likely!) this would have to get a
12890 real definition. */
12897 #endif /* TARGET_MACHO */
12900 static unsigned int
12901 rs6000_elf_section_type_flags (decl
, name
, reloc
)
12907 = default_section_type_flags_1 (decl
, name
, reloc
,
12908 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
12910 if (TARGET_RELOCATABLE
)
12911 flags
|= SECTION_WRITE
;
12916 /* Record an element in the table of global constructors. SYMBOL is
12917 a SYMBOL_REF of the function to be called; PRIORITY is a number
12918 between 0 and MAX_INIT_PRIORITY.
12920 This differs from default_named_section_asm_out_constructor in
12921 that we have special handling for -mrelocatable. */
12924 rs6000_elf_asm_out_constructor (symbol
, priority
)
12928 const char *section
= ".ctors";
12931 if (priority
!= DEFAULT_INIT_PRIORITY
)
12933 sprintf (buf
, ".ctors.%.5u",
12934 /* Invert the numbering so the linker puts us in the proper
12935 order; constructors are run from right to left, and the
12936 linker sorts in increasing order. */
12937 MAX_INIT_PRIORITY
- priority
);
12941 named_section_flags (section
, SECTION_WRITE
);
12942 assemble_align (POINTER_SIZE
);
12944 if (TARGET_RELOCATABLE
)
12946 fputs ("\t.long (", asm_out_file
);
12947 output_addr_const (asm_out_file
, symbol
);
12948 fputs (")@fixup\n", asm_out_file
);
12951 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
12955 rs6000_elf_asm_out_destructor (symbol
, priority
)
12959 const char *section
= ".dtors";
12962 if (priority
!= DEFAULT_INIT_PRIORITY
)
12964 sprintf (buf
, ".dtors.%.5u",
12965 /* Invert the numbering so the linker puts us in the proper
12966 order; constructors are run from right to left, and the
12967 linker sorts in increasing order. */
12968 MAX_INIT_PRIORITY
- priority
);
12972 named_section_flags (section
, SECTION_WRITE
);
12973 assemble_align (POINTER_SIZE
);
12975 if (TARGET_RELOCATABLE
)
12977 fputs ("\t.long (", asm_out_file
);
12978 output_addr_const (asm_out_file
, symbol
);
12979 fputs (")@fixup\n", asm_out_file
);
12982 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
12988 rs6000_xcoff_asm_globalize_label (stream
, name
)
12992 fputs (GLOBAL_ASM_OP
, stream
);
12993 RS6000_OUTPUT_BASENAME (stream
, name
);
12994 putc ('\n', stream
);
12998 rs6000_xcoff_asm_named_section (name
, flags
)
13000 unsigned int flags
;
13003 static const char * const suffix
[3] = { "PR", "RO", "RW" };
13005 if (flags
& SECTION_CODE
)
13007 else if (flags
& SECTION_WRITE
)
13012 fprintf (asm_out_file
, "\t.csect %s%s[%s],%u\n",
13013 (flags
& SECTION_CODE
) ? "." : "",
13014 name
, suffix
[smclass
], flags
& SECTION_ENTSIZE
);
13018 rs6000_xcoff_select_section (decl
, reloc
, align
)
13021 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
13023 if (decl_readonly_section_1 (decl
, reloc
, 1))
13025 if (TREE_PUBLIC (decl
))
13026 read_only_data_section ();
13028 read_only_private_data_section ();
13032 if (TREE_PUBLIC (decl
))
13035 private_data_section ();
13040 rs6000_xcoff_unique_section (decl
, reloc
)
13042 int reloc ATTRIBUTE_UNUSED
;
13046 /* Use select_section for private and uninitialized data. */
13047 if (!TREE_PUBLIC (decl
)
13048 || DECL_COMMON (decl
)
13049 || DECL_INITIAL (decl
) == NULL_TREE
13050 || DECL_INITIAL (decl
) == error_mark_node
13051 || (flag_zero_initialized_in_bss
13052 && initializer_zerop (DECL_INITIAL (decl
))))
13055 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
13056 name
= (*targetm
.strip_name_encoding
) (name
);
13057 DECL_SECTION_NAME (decl
) = build_string (strlen (name
), name
);
13060 /* Select section for constant in constant pool.
13062 On RS/6000, all constants are in the private read-only data area.
13063 However, if this is being placed in the TOC it must be output as a
13067 rs6000_xcoff_select_rtx_section (mode
, x
, align
)
13068 enum machine_mode mode
;
13070 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
13072 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
13075 read_only_private_data_section ();
13078 /* Remove any trailing [DS] or the like from the symbol name. */
13080 static const char *
13081 rs6000_xcoff_strip_name_encoding (name
)
13087 len
= strlen (name
);
13088 if (name
[len
- 1] == ']')
13089 return ggc_alloc_string (name
, len
- 4);
13094 /* Section attributes. AIX is always PIC. */
13096 static unsigned int
13097 rs6000_xcoff_section_type_flags (decl
, name
, reloc
)
13102 unsigned int align
;
13103 unsigned int flags
= default_section_type_flags_1 (decl
, name
, reloc
, 1);
13105 /* Align to at least UNIT size. */
13106 if (flags
& SECTION_CODE
)
13107 align
= MIN_UNITS_PER_WORD
;
13109 /* Increase alignment of large objects if not already stricter. */
13110 align
= MAX ((DECL_ALIGN (decl
) / BITS_PER_UNIT
),
13111 int_size_in_bytes (TREE_TYPE (decl
)) > MIN_UNITS_PER_WORD
13112 ? UNITS_PER_FP_WORD
: MIN_UNITS_PER_WORD
);
13114 return flags
| (exact_log2 (align
) & SECTION_ENTSIZE
);
13117 #endif /* TARGET_XCOFF */
13119 /* Note that this is also used for PPC64 Linux. */
13122 rs6000_xcoff_encode_section_info (decl
, first
)
13124 int first ATTRIBUTE_UNUSED
;
13126 if (TREE_CODE (decl
) == FUNCTION_DECL
13127 && (TREE_ASM_WRITTEN (decl
) || ! TREE_PUBLIC (decl
))
13128 && ! DECL_WEAK (decl
))
13129 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl
), 0)) = 1;
13132 /* Cross-module name binding. For AIX and PPC64 Linux, which always are
13133 PIC, use private copy of flag_pic. */
13136 rs6000_binds_local_p (decl
)
13139 return default_binds_local_p_1 (decl
, flag_pic
|| rs6000_flag_pic
);