]> gcc.gnu.org Git - gcc.git/blob - gcc/config/rs6000/rs6000.c
t-rs6000: Add dependence of cfglayout.h to rs6000.o.
[gcc.git] / gcc / config / rs6000 / rs6000.c
1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "obstack.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "except.h"
41 #include "function.h"
42 #include "output.h"
43 #include "basic-block.h"
44 #include "integrate.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "hashtab.h"
48 #include "tm_p.h"
49 #include "target.h"
50 #include "target-def.h"
51 #include "langhooks.h"
52 #include "reload.h"
53 #include "cfglayout.h"
54
55 #ifndef TARGET_NO_PROTOTYPE
56 #define TARGET_NO_PROTOTYPE 0
57 #endif
58
59 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
60 && easy_vector_same (x, y))
61
62 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
63 && !((n) & 1) \
64 && easy_vector_same (x, y))
65
66 #define min(A,B) ((A) < (B) ? (A) : (B))
67 #define max(A,B) ((A) > (B) ? (A) : (B))
68
69 /* Target cpu type */
70
71 enum processor_type rs6000_cpu;
72 struct rs6000_cpu_select rs6000_select[3] =
73 {
74 /* switch name, tune arch */
75 { (const char *)0, "--with-cpu=", 1, 1 },
76 { (const char *)0, "-mcpu=", 1, 1 },
77 { (const char *)0, "-mtune=", 1, 0 },
78 };
79
80 /* Size of long double */
81 const char *rs6000_long_double_size_string;
82 int rs6000_long_double_type_size;
83
84 /* Whether -mabi=altivec has appeared */
85 int rs6000_altivec_abi;
86
87 /* Whether VRSAVE instructions should be generated. */
88 int rs6000_altivec_vrsave;
89
90 /* String from -mvrsave= option. */
91 const char *rs6000_altivec_vrsave_string;
92
93 /* Nonzero if we want SPE ABI extensions. */
94 int rs6000_spe_abi;
95
96 /* Whether isel instructions should be generated. */
97 int rs6000_isel;
98
99 /* Whether SPE simd instructions should be generated. */
100 int rs6000_spe;
101
102 /* Nonzero if floating point operations are done in the GPRs. */
103 int rs6000_float_gprs = 0;
104
105 /* String from -mfloat-gprs=. */
106 const char *rs6000_float_gprs_string;
107
108 /* String from -misel=. */
109 const char *rs6000_isel_string;
110
111 /* String from -mspe=. */
112 const char *rs6000_spe_string;
113
114 /* Set to nonzero once AIX common-mode calls have been defined. */
115 static GTY(()) int common_mode_defined;
116
117 /* Save information from a "cmpxx" operation until the branch or scc is
118 emitted. */
119 rtx rs6000_compare_op0, rs6000_compare_op1;
120 int rs6000_compare_fp_p;
121
122 /* Label number of label created for -mrelocatable, to call to so we can
123 get the address of the GOT section */
124 int rs6000_pic_labelno;
125
126 #ifdef USING_ELFOS_H
127 /* Which abi to adhere to */
128 const char *rs6000_abi_name;
129
130 /* Semantics of the small data area */
131 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
132
133 /* Which small data model to use */
134 const char *rs6000_sdata_name = (char *)0;
135
136 /* Counter for labels which are to be placed in .fixup. */
137 int fixuplabelno = 0;
138 #endif
139
140 /* Bit size of immediate TLS offsets and string from which it is decoded. */
141 int rs6000_tls_size = 32;
142 const char *rs6000_tls_size_string;
143
144 /* ABI enumeration available for subtarget to use. */
145 enum rs6000_abi rs6000_current_abi;
146
147 /* ABI string from -mabi= option. */
148 const char *rs6000_abi_string;
149
150 /* Debug flags */
151 const char *rs6000_debug_name;
152 int rs6000_debug_stack; /* debug stack applications */
153 int rs6000_debug_arg; /* debug argument handling */
154
155 /* Opaque types. */
156 static GTY(()) tree opaque_V2SI_type_node;
157 static GTY(()) tree opaque_V2SF_type_node;
158 static GTY(()) tree opaque_p_V2SI_type_node;
159
160 const char *rs6000_traceback_name;
161 static enum {
162 traceback_default = 0,
163 traceback_none,
164 traceback_part,
165 traceback_full
166 } rs6000_traceback;
167
168 /* Flag to say the TOC is initialized */
169 int toc_initialized;
170 char toc_label_name[10];
171
172 /* Alias set for saves and restores from the rs6000 stack. */
173 static int rs6000_sr_alias_set;
174
175 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
176 The only place that looks at this is rs6000_set_default_type_attributes;
177 everywhere else should rely on the presence or absence of a longcall
178 attribute on the function declaration. */
179 int rs6000_default_long_calls;
180 const char *rs6000_longcall_switch;
181
182 /* Control alignment for fields within structures. */
183 /* String from -malign-XXXXX. */
184 const char *rs6000_alignment_string;
185 int rs6000_alignment_flags;
186
187 struct builtin_description
188 {
189 /* mask is not const because we're going to alter it below. This
190 nonsense will go away when we rewrite the -march infrastructure
191 to give us more target flag bits. */
192 unsigned int mask;
193 const enum insn_code icode;
194 const char *const name;
195 const enum rs6000_builtins code;
196 };
197
198 static bool rs6000_function_ok_for_sibcall PARAMS ((tree, tree));
199 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
200 static void validate_condition_mode
201 PARAMS ((enum rtx_code, enum machine_mode));
202 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
203 static void rs6000_maybe_dead PARAMS ((rtx));
204 static void rs6000_emit_stack_tie PARAMS ((void));
205 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
206 static rtx spe_synthesize_frame_save PARAMS ((rtx));
207 static bool spe_func_has_64bit_regs_p PARAMS ((void));
208 static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
209 unsigned int, int, int));
210 static rtx gen_frame_mem_offset PARAMS ((enum machine_mode, rtx, int));
211 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
212 static unsigned rs6000_hash_constant PARAMS ((rtx));
213 static unsigned toc_hash_function PARAMS ((const void *));
214 static int toc_hash_eq PARAMS ((const void *, const void *));
215 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
216 static bool constant_pool_expr_p PARAMS ((rtx));
217 static bool toc_relative_expr_p PARAMS ((rtx));
218 static bool legitimate_small_data_p PARAMS ((enum machine_mode, rtx));
219 static bool legitimate_offset_address_p PARAMS ((enum machine_mode, rtx, int));
220 static bool legitimate_indexed_address_p PARAMS ((rtx, int));
221 static bool legitimate_indirect_address_p PARAMS ((rtx, int));
222 static bool legitimate_lo_sum_address_p PARAMS ((enum machine_mode, rtx, int));
223 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
224 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
225 #ifdef HAVE_GAS_HIDDEN
226 static void rs6000_assemble_visibility PARAMS ((tree, int));
227 #endif
228 static int rs6000_ra_ever_killed PARAMS ((void));
229 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
230 extern const struct attribute_spec rs6000_attribute_table[];
231 static void rs6000_set_default_type_attributes PARAMS ((tree));
232 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
233 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
234 static void rs6000_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
235 HOST_WIDE_INT, tree));
236 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
237 HOST_WIDE_INT, HOST_WIDE_INT));
238 #if TARGET_ELF
239 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
240 int));
241 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
242 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
243 static void rs6000_elf_select_section PARAMS ((tree, int,
244 unsigned HOST_WIDE_INT));
245 static void rs6000_elf_unique_section PARAMS ((tree, int));
246 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
247 unsigned HOST_WIDE_INT));
248 static void rs6000_elf_encode_section_info PARAMS ((tree, rtx, int))
249 ATTRIBUTE_UNUSED;
250 static bool rs6000_elf_in_small_data_p PARAMS ((tree));
251 #endif
252 #if TARGET_XCOFF
253 static void rs6000_xcoff_asm_globalize_label PARAMS ((FILE *, const char *));
254 static void rs6000_xcoff_asm_named_section PARAMS ((const char *, unsigned int));
255 static void rs6000_xcoff_select_section PARAMS ((tree, int,
256 unsigned HOST_WIDE_INT));
257 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
258 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
259 unsigned HOST_WIDE_INT));
260 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
261 static unsigned int rs6000_xcoff_section_type_flags PARAMS ((tree, const char *, int));
262 static void rs6000_xcoff_file_end PARAMS ((void));
263 #endif
264 #if TARGET_MACHO
265 static bool rs6000_binds_local_p PARAMS ((tree));
266 #endif
267 static int rs6000_use_dfa_pipeline_interface PARAMS ((void));
268 static int rs6000_variable_issue PARAMS ((FILE *, int, rtx, int));
269 static bool rs6000_rtx_costs PARAMS ((rtx, int, int, int *));
270 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
271 static int rs6000_adjust_priority PARAMS ((rtx, int));
272 static int rs6000_issue_rate PARAMS ((void));
273 static int rs6000_use_sched_lookahead PARAMS ((void));
274
275 static void rs6000_init_builtins PARAMS ((void));
276 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
277 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
278 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
279 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
280 static void altivec_init_builtins PARAMS ((void));
281 static void rs6000_common_init_builtins PARAMS ((void));
282
283 static void enable_mask_for_builtins PARAMS ((struct builtin_description *,
284 int, enum rs6000_builtins,
285 enum rs6000_builtins));
286 static void spe_init_builtins PARAMS ((void));
287 static rtx spe_expand_builtin PARAMS ((tree, rtx, bool *));
288 static rtx spe_expand_predicate_builtin PARAMS ((enum insn_code, tree, rtx));
289 static rtx spe_expand_evsel_builtin PARAMS ((enum insn_code, tree, rtx));
290 static int rs6000_emit_int_cmove PARAMS ((rtx, rtx, rtx, rtx));
291
292 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
293 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
294 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
295 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
296 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
297 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
298 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
299 static void rs6000_parse_abi_options PARAMS ((void));
300 static void rs6000_parse_alignment_option PARAMS ((void));
301 static void rs6000_parse_tls_size_option PARAMS ((void));
302 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
303 static int first_altivec_reg_to_save PARAMS ((void));
304 static unsigned int compute_vrsave_mask PARAMS ((void));
305 static void is_altivec_return_reg PARAMS ((rtx, void *));
306 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
307 int easy_vector_constant PARAMS ((rtx, enum machine_mode));
308 static int easy_vector_same PARAMS ((rtx, enum machine_mode));
309 static bool is_ev64_opaque_type PARAMS ((tree));
310 static rtx rs6000_dwarf_register_span PARAMS ((rtx));
311 static rtx rs6000_legitimize_tls_address PARAMS ((rtx, enum tls_model));
312 static rtx rs6000_tls_get_addr PARAMS ((void));
313 static rtx rs6000_got_sym PARAMS ((void));
314 static inline int rs6000_tls_symbol_ref_1 PARAMS ((rtx *, void *));
315 static const char *rs6000_get_some_local_dynamic_name PARAMS ((void));
316 static int rs6000_get_some_local_dynamic_name_1 PARAMS ((rtx *, void *));
317 static rtx rs6000_complex_function_value (enum machine_mode);
318 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *, enum machine_mode, tree);
319
320 /* Hash table stuff for keeping track of TOC entries. */
321
322 struct toc_hash_struct GTY(())
323 {
324 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
325 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
326 rtx key;
327 enum machine_mode key_mode;
328 int labelno;
329 };
330
331 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
332 \f
333 /* Default register names. */
334 char rs6000_reg_names[][8] =
335 {
336 "0", "1", "2", "3", "4", "5", "6", "7",
337 "8", "9", "10", "11", "12", "13", "14", "15",
338 "16", "17", "18", "19", "20", "21", "22", "23",
339 "24", "25", "26", "27", "28", "29", "30", "31",
340 "0", "1", "2", "3", "4", "5", "6", "7",
341 "8", "9", "10", "11", "12", "13", "14", "15",
342 "16", "17", "18", "19", "20", "21", "22", "23",
343 "24", "25", "26", "27", "28", "29", "30", "31",
344 "mq", "lr", "ctr","ap",
345 "0", "1", "2", "3", "4", "5", "6", "7",
346 "xer",
347 /* AltiVec registers. */
348 "0", "1", "2", "3", "4", "5", "6", "7",
349 "8", "9", "10", "11", "12", "13", "14", "15",
350 "16", "17", "18", "19", "20", "21", "22", "23",
351 "24", "25", "26", "27", "28", "29", "30", "31",
352 "vrsave", "vscr",
353 /* SPE registers. */
354 "spe_acc", "spefscr"
355 };
356
357 #ifdef TARGET_REGNAMES
358 static const char alt_reg_names[][8] =
359 {
360 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
361 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
362 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
363 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
364 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
365 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
366 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
367 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
368 "mq", "lr", "ctr", "ap",
369 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
370 "xer",
371 /* AltiVec registers. */
372 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
373 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
374 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
375 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
376 "vrsave", "vscr",
377 /* SPE registers. */
378 "spe_acc", "spefscr"
379 };
380 #endif
381 \f
382 #ifndef MASK_STRICT_ALIGN
383 #define MASK_STRICT_ALIGN 0
384 #endif
385 #ifndef TARGET_PROFILE_KERNEL
386 #define TARGET_PROFILE_KERNEL 0
387 #endif
388
389 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
390 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
391
392 /* Return 1 for a symbol ref for a thread-local storage symbol. */
393 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
394 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
395 \f
396 /* Initialize the GCC target structure. */
397 #undef TARGET_ATTRIBUTE_TABLE
398 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
399 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
400 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
401
402 #undef TARGET_ASM_ALIGNED_DI_OP
403 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
404
405 /* Default unaligned ops are only provided for ELF. Find the ops needed
406 for non-ELF systems. */
407 #ifndef OBJECT_FORMAT_ELF
408 #if TARGET_XCOFF
409 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
410 64-bit targets. */
411 #undef TARGET_ASM_UNALIGNED_HI_OP
412 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
413 #undef TARGET_ASM_UNALIGNED_SI_OP
414 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
415 #undef TARGET_ASM_UNALIGNED_DI_OP
416 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
417 #else
418 /* For Darwin. */
419 #undef TARGET_ASM_UNALIGNED_HI_OP
420 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
421 #undef TARGET_ASM_UNALIGNED_SI_OP
422 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
423 #endif
424 #endif
425
426 /* This hook deals with fixups for relocatable code and DI-mode objects
427 in 64-bit code. */
428 #undef TARGET_ASM_INTEGER
429 #define TARGET_ASM_INTEGER rs6000_assemble_integer
430
431 #ifdef HAVE_GAS_HIDDEN
432 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
433 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
434 #endif
435
436 #undef TARGET_HAVE_TLS
437 #define TARGET_HAVE_TLS HAVE_AS_TLS
438
439 #undef TARGET_CANNOT_FORCE_CONST_MEM
440 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
441
442 #undef TARGET_ASM_FUNCTION_PROLOGUE
443 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
444 #undef TARGET_ASM_FUNCTION_EPILOGUE
445 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
446
447 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
448 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
449 #undef TARGET_SCHED_VARIABLE_ISSUE
450 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
451
452 #undef TARGET_SCHED_ISSUE_RATE
453 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
454 #undef TARGET_SCHED_ADJUST_COST
455 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
456 #undef TARGET_SCHED_ADJUST_PRIORITY
457 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
458
459 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
460 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
461
462 #undef TARGET_INIT_BUILTINS
463 #define TARGET_INIT_BUILTINS rs6000_init_builtins
464
465 #undef TARGET_EXPAND_BUILTIN
466 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
467
468 #if TARGET_MACHO
469 #undef TARGET_BINDS_LOCAL_P
470 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
471 #endif
472
473 #undef TARGET_ASM_OUTPUT_MI_THUNK
474 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
475
476 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
477 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
478
479 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
480 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
481
482 #undef TARGET_RTX_COSTS
483 #define TARGET_RTX_COSTS rs6000_rtx_costs
484 #undef TARGET_ADDRESS_COST
485 #define TARGET_ADDRESS_COST hook_int_rtx_0
486
487 #undef TARGET_VECTOR_OPAQUE_P
488 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
489
490 #undef TARGET_DWARF_REGISTER_SPAN
491 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
492
493 struct gcc_target targetm = TARGET_INITIALIZER;
494 \f
495 /* Override command line options. Mostly we process the processor
496 type and sometimes adjust other TARGET_ options. */
497
498 void
499 rs6000_override_options (default_cpu)
500 const char *default_cpu;
501 {
502 size_t i, j;
503 struct rs6000_cpu_select *ptr;
504
505 /* Simplify the entries below by making a mask for any POWER
506 variant and any PowerPC variant. */
507
508 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
509 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
510 | MASK_PPC_GFXOPT | MASK_POWERPC64)
511 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
512
513 static struct ptt
514 {
515 const char *const name; /* Canonical processor name. */
516 const enum processor_type processor; /* Processor type enum value. */
517 const int target_enable; /* Target flags to enable. */
518 const int target_disable; /* Target flags to disable. */
519 } const processor_target_table[]
520 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
521 POWER_MASKS | POWERPC_MASKS},
522 {"power", PROCESSOR_POWER,
523 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
524 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
525 {"power2", PROCESSOR_POWER,
526 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
527 POWERPC_MASKS | MASK_NEW_MNEMONICS},
528 {"power3", PROCESSOR_PPC630,
529 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
530 POWER_MASKS},
531 {"power4", PROCESSOR_POWER4,
532 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
533 POWER_MASKS},
534 {"powerpc", PROCESSOR_POWERPC,
535 MASK_POWERPC | MASK_NEW_MNEMONICS,
536 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
537 {"powerpc64", PROCESSOR_POWERPC64,
538 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
539 POWER_MASKS | POWERPC_OPT_MASKS},
540 {"rios", PROCESSOR_RIOS1,
541 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
542 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
543 {"rios1", PROCESSOR_RIOS1,
544 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
545 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
546 {"rsc", PROCESSOR_PPC601,
547 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
548 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
549 {"rsc1", PROCESSOR_PPC601,
550 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
551 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
552 {"rios2", PROCESSOR_RIOS2,
553 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
554 POWERPC_MASKS | MASK_NEW_MNEMONICS},
555 {"rs64a", PROCESSOR_RS64A,
556 MASK_POWERPC | MASK_NEW_MNEMONICS,
557 POWER_MASKS | POWERPC_OPT_MASKS},
558 {"401", PROCESSOR_PPC403,
559 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
560 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
561 {"403", PROCESSOR_PPC403,
562 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
563 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
564 {"405", PROCESSOR_PPC405,
565 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
566 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
567 {"405fp", PROCESSOR_PPC405,
568 MASK_POWERPC | MASK_NEW_MNEMONICS,
569 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
570 {"440", PROCESSOR_PPC440,
571 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
572 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
573 {"440fp", PROCESSOR_PPC440,
574 MASK_POWERPC | MASK_NEW_MNEMONICS,
575 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
576 {"505", PROCESSOR_MPCCORE,
577 MASK_POWERPC | MASK_NEW_MNEMONICS,
578 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
579 {"601", PROCESSOR_PPC601,
580 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
581 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
582 {"602", PROCESSOR_PPC603,
583 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
584 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
585 {"603", PROCESSOR_PPC603,
586 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
587 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
588 {"603e", PROCESSOR_PPC603,
589 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
590 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
591 {"ec603e", PROCESSOR_PPC603,
592 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
593 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
594 {"604", PROCESSOR_PPC604,
595 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
596 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
597 {"604e", PROCESSOR_PPC604e,
598 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
599 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
600 {"620", PROCESSOR_PPC620,
601 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
602 POWER_MASKS},
603 {"630", PROCESSOR_PPC630,
604 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
605 POWER_MASKS},
606 {"740", PROCESSOR_PPC750,
607 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
608 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
609 {"750", PROCESSOR_PPC750,
610 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
611 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
612 {"7400", PROCESSOR_PPC7400,
613 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
614 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
615 {"7450", PROCESSOR_PPC7450,
616 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
617 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
618 {"8540", PROCESSOR_PPC8540,
619 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
620 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
621 {"801", PROCESSOR_MPCCORE,
622 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
623 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
624 {"821", PROCESSOR_MPCCORE,
625 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
626 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
627 {"823", PROCESSOR_MPCCORE,
628 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
629 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
630 {"860", PROCESSOR_MPCCORE,
631 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
632 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
633
634 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
635
636 /* Save current -mmultiple/-mno-multiple status. */
637 int multiple = TARGET_MULTIPLE;
638 /* Save current -mstring/-mno-string status. */
639 int string = TARGET_STRING;
640
641 /* Identify the processor type. */
642 rs6000_select[0].string = default_cpu;
643 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
644
645 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
646 {
647 ptr = &rs6000_select[i];
648 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
649 {
650 for (j = 0; j < ptt_size; j++)
651 if (! strcmp (ptr->string, processor_target_table[j].name))
652 {
653 if (ptr->set_tune_p)
654 rs6000_cpu = processor_target_table[j].processor;
655
656 if (ptr->set_arch_p)
657 {
658 target_flags |= processor_target_table[j].target_enable;
659 target_flags &= ~processor_target_table[j].target_disable;
660 }
661 break;
662 }
663
664 if (j == ptt_size)
665 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
666 }
667 }
668
669 if (TARGET_E500)
670 rs6000_isel = 1;
671
672 /* If we are optimizing big endian systems for space, use the load/store
673 multiple and string instructions. */
674 if (BYTES_BIG_ENDIAN && optimize_size)
675 target_flags |= MASK_MULTIPLE | MASK_STRING;
676
677 /* If -mmultiple or -mno-multiple was explicitly used, don't
678 override with the processor default */
679 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
680 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
681
682 /* If -mstring or -mno-string was explicitly used, don't override
683 with the processor default. */
684 if ((target_flags_explicit & MASK_STRING) != 0)
685 target_flags = (target_flags & ~MASK_STRING) | string;
686
687 /* Don't allow -mmultiple or -mstring on little endian systems
688 unless the cpu is a 750, because the hardware doesn't support the
689 instructions used in little endian mode, and causes an alignment
690 trap. The 750 does not cause an alignment trap (except when the
691 target is unaligned). */
692
693 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
694 {
695 if (TARGET_MULTIPLE)
696 {
697 target_flags &= ~MASK_MULTIPLE;
698 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
699 warning ("-mmultiple is not supported on little endian systems");
700 }
701
702 if (TARGET_STRING)
703 {
704 target_flags &= ~MASK_STRING;
705 if ((target_flags_explicit & MASK_STRING) != 0)
706 warning ("-mstring is not supported on little endian systems");
707 }
708 }
709
710 /* Set debug flags */
711 if (rs6000_debug_name)
712 {
713 if (! strcmp (rs6000_debug_name, "all"))
714 rs6000_debug_stack = rs6000_debug_arg = 1;
715 else if (! strcmp (rs6000_debug_name, "stack"))
716 rs6000_debug_stack = 1;
717 else if (! strcmp (rs6000_debug_name, "arg"))
718 rs6000_debug_arg = 1;
719 else
720 error ("unknown -mdebug-%s switch", rs6000_debug_name);
721 }
722
723 if (rs6000_traceback_name)
724 {
725 if (! strncmp (rs6000_traceback_name, "full", 4))
726 rs6000_traceback = traceback_full;
727 else if (! strncmp (rs6000_traceback_name, "part", 4))
728 rs6000_traceback = traceback_part;
729 else if (! strncmp (rs6000_traceback_name, "no", 2))
730 rs6000_traceback = traceback_none;
731 else
732 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
733 rs6000_traceback_name);
734 }
735
736 /* Set size of long double */
737 rs6000_long_double_type_size = 64;
738 if (rs6000_long_double_size_string)
739 {
740 char *tail;
741 int size = strtol (rs6000_long_double_size_string, &tail, 10);
742 if (*tail != '\0' || (size != 64 && size != 128))
743 error ("Unknown switch -mlong-double-%s",
744 rs6000_long_double_size_string);
745 else
746 rs6000_long_double_type_size = size;
747 }
748
749 /* Handle -mabi= options. */
750 rs6000_parse_abi_options ();
751
752 /* Handle -malign-XXXXX option. */
753 rs6000_parse_alignment_option ();
754
755 /* Handle generic -mFOO=YES/NO options. */
756 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
757 &rs6000_altivec_vrsave);
758 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
759 &rs6000_isel);
760 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
761 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
762 &rs6000_float_gprs);
763
764 /* Handle -mtls-size option. */
765 rs6000_parse_tls_size_option ();
766
767 #ifdef SUBTARGET_OVERRIDE_OPTIONS
768 SUBTARGET_OVERRIDE_OPTIONS;
769 #endif
770 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
771 SUBSUBTARGET_OVERRIDE_OPTIONS;
772 #endif
773
774 if (TARGET_E500)
775 {
776 /* The e500 does not have string instructions, and we set
777 MASK_STRING above when optimizing for size. */
778 if ((target_flags & MASK_STRING) != 0)
779 target_flags = target_flags & ~MASK_STRING;
780
781 /* No SPE means 64-bit long doubles, even if an E500. */
782 if (rs6000_spe_string != 0
783 && !strcmp (rs6000_spe_string, "no"))
784 rs6000_long_double_type_size = 64;
785 }
786 else if (rs6000_select[1].string != NULL)
787 {
788 /* For the powerpc-eabispe configuration, we set all these by
789 default, so let's unset them if we manually set another
790 CPU that is not the E500. */
791 if (rs6000_abi_string == 0)
792 rs6000_spe_abi = 0;
793 if (rs6000_spe_string == 0)
794 rs6000_spe = 0;
795 if (rs6000_float_gprs_string == 0)
796 rs6000_float_gprs = 0;
797 if (rs6000_isel_string == 0)
798 rs6000_isel = 0;
799 if (rs6000_long_double_size_string == 0)
800 rs6000_long_double_type_size = 64;
801 }
802
803 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
804 using TARGET_OPTIONS to handle a toggle switch, but we're out of
805 bits in target_flags so TARGET_SWITCHES cannot be used.
806 Assumption here is that rs6000_longcall_switch points into the
807 text of the complete option, rather than being a copy, so we can
808 scan back for the presence or absence of the no- modifier. */
809 if (rs6000_longcall_switch)
810 {
811 const char *base = rs6000_longcall_switch;
812 while (base[-1] != 'm') base--;
813
814 if (*rs6000_longcall_switch != '\0')
815 error ("invalid option `%s'", base);
816 rs6000_default_long_calls = (base[0] != 'n');
817 }
818
819 #ifdef TARGET_REGNAMES
820 /* If the user desires alternate register names, copy in the
821 alternate names now. */
822 if (TARGET_REGNAMES)
823 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
824 #endif
825
826 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
827 If -maix-struct-return or -msvr4-struct-return was explicitly
828 used, don't override with the ABI default. */
829 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
830 {
831 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
832 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
833 else
834 target_flags |= MASK_AIX_STRUCT_RET;
835 }
836
837 if (TARGET_LONG_DOUBLE_128
838 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
839 real_format_for_mode[TFmode - QFmode] = &ibm_extended_format;
840
841 /* Allocate an alias set for register saves & restores from stack. */
842 rs6000_sr_alias_set = new_alias_set ();
843
844 if (TARGET_TOC)
845 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
846
847 /* We can only guarantee the availability of DI pseudo-ops when
848 assembling for 64-bit targets. */
849 if (!TARGET_64BIT)
850 {
851 targetm.asm_out.aligned_op.di = NULL;
852 targetm.asm_out.unaligned_op.di = NULL;
853 }
854
855 /* Set maximum branch target alignment at two instructions, eight bytes. */
856 align_jumps_max_skip = 8;
857 align_loops_max_skip = 8;
858
859 /* Arrange to save and restore machine status around nested functions. */
860 init_machine_status = rs6000_init_machine_status;
861 }
862
863 /* Handle generic options of the form -mfoo=yes/no.
864 NAME is the option name.
865 VALUE is the option value.
866 FLAG is the pointer to the flag where to store a 1 or 0, depending on
867 whether the option value is 'yes' or 'no' respectively. */
868 static void
869 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
870 {
871 if (value == 0)
872 return;
873 else if (!strcmp (value, "yes"))
874 *flag = 1;
875 else if (!strcmp (value, "no"))
876 *flag = 0;
877 else
878 error ("unknown -m%s= option specified: '%s'", name, value);
879 }
880
881 /* Handle -mabi= options. */
882 static void
883 rs6000_parse_abi_options ()
884 {
885 if (rs6000_abi_string == 0)
886 return;
887 else if (! strcmp (rs6000_abi_string, "altivec"))
888 rs6000_altivec_abi = 1;
889 else if (! strcmp (rs6000_abi_string, "no-altivec"))
890 rs6000_altivec_abi = 0;
891 else if (! strcmp (rs6000_abi_string, "spe"))
892 {
893 rs6000_spe_abi = 1;
894 if (!TARGET_SPE_ABI)
895 error ("not configured for ABI: '%s'", rs6000_abi_string);
896 }
897
898 else if (! strcmp (rs6000_abi_string, "no-spe"))
899 rs6000_spe_abi = 0;
900 else
901 error ("unknown ABI specified: '%s'", rs6000_abi_string);
902 }
903
904 /* Handle -malign-XXXXXX options. */
905 static void
906 rs6000_parse_alignment_option ()
907 {
908 if (rs6000_alignment_string == 0
909 || ! strcmp (rs6000_alignment_string, "power"))
910 rs6000_alignment_flags = MASK_ALIGN_POWER;
911 else if (! strcmp (rs6000_alignment_string, "natural"))
912 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
913 else
914 error ("unknown -malign-XXXXX option specified: '%s'",
915 rs6000_alignment_string);
916 }
917
918 /* Validate and record the size specified with the -mtls-size option. */
919
920 static void
921 rs6000_parse_tls_size_option ()
922 {
923 if (rs6000_tls_size_string == 0)
924 return;
925 else if (strcmp (rs6000_tls_size_string, "16") == 0)
926 rs6000_tls_size = 16;
927 else if (strcmp (rs6000_tls_size_string, "32") == 0)
928 rs6000_tls_size = 32;
929 else if (strcmp (rs6000_tls_size_string, "64") == 0)
930 rs6000_tls_size = 64;
931 else
932 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
933 }
934
935 void
936 optimization_options (level, size)
937 int level ATTRIBUTE_UNUSED;
938 int size ATTRIBUTE_UNUSED;
939 {
940 }
941 \f
942 /* Do anything needed at the start of the asm file. */
943
944 void
945 rs6000_file_start (file, default_cpu)
946 FILE *file;
947 const char *default_cpu;
948 {
949 size_t i;
950 char buffer[80];
951 const char *start = buffer;
952 struct rs6000_cpu_select *ptr;
953
954 if (flag_verbose_asm)
955 {
956 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
957 rs6000_select[0].string = default_cpu;
958
959 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
960 {
961 ptr = &rs6000_select[i];
962 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
963 {
964 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
965 start = "";
966 }
967 }
968
969 #ifdef USING_ELFOS_H
970 switch (rs6000_sdata)
971 {
972 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
973 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
974 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
975 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
976 }
977
978 if (rs6000_sdata && g_switch_value)
979 {
980 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
981 g_switch_value);
982 start = "";
983 }
984 #endif
985
986 if (*start == '\0')
987 putc ('\n', file);
988 }
989 }
990 \f
991 /* Return nonzero if this function is known to have a null epilogue. */
992
993 int
994 direct_return ()
995 {
996 if (reload_completed)
997 {
998 rs6000_stack_t *info = rs6000_stack_info ();
999
1000 if (info->first_gp_reg_save == 32
1001 && info->first_fp_reg_save == 64
1002 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1003 && ! info->lr_save_p
1004 && ! info->cr_save_p
1005 && info->vrsave_mask == 0
1006 && ! info->push_p)
1007 return 1;
1008 }
1009
1010 return 0;
1011 }
1012
1013 /* Returns 1 always. */
1014
1015 int
1016 any_operand (op, mode)
1017 rtx op ATTRIBUTE_UNUSED;
1018 enum machine_mode mode ATTRIBUTE_UNUSED;
1019 {
1020 return 1;
1021 }
1022
1023 /* Returns 1 if op is the count register. */
1024 int
1025 count_register_operand (op, mode)
1026 rtx op;
1027 enum machine_mode mode ATTRIBUTE_UNUSED;
1028 {
1029 if (GET_CODE (op) != REG)
1030 return 0;
1031
1032 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1033 return 1;
1034
1035 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1036 return 1;
1037
1038 return 0;
1039 }
1040
1041 /* Returns 1 if op is an altivec register. */
1042 int
1043 altivec_register_operand (op, mode)
1044 rtx op;
1045 enum machine_mode mode ATTRIBUTE_UNUSED;
1046 {
1047
1048 return (register_operand (op, mode)
1049 && (GET_CODE (op) != REG
1050 || REGNO (op) > FIRST_PSEUDO_REGISTER
1051 || ALTIVEC_REGNO_P (REGNO (op))));
1052 }
1053
1054 int
1055 xer_operand (op, mode)
1056 rtx op;
1057 enum machine_mode mode ATTRIBUTE_UNUSED;
1058 {
1059 if (GET_CODE (op) != REG)
1060 return 0;
1061
1062 if (XER_REGNO_P (REGNO (op)))
1063 return 1;
1064
1065 return 0;
1066 }
1067
1068 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1069 by such constants completes more quickly. */
1070
1071 int
1072 s8bit_cint_operand (op, mode)
1073 rtx op;
1074 enum machine_mode mode ATTRIBUTE_UNUSED;
1075 {
1076 return ( GET_CODE (op) == CONST_INT
1077 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1078 }
1079
1080 /* Return 1 if OP is a constant that can fit in a D field. */
1081
1082 int
1083 short_cint_operand (op, mode)
1084 rtx op;
1085 enum machine_mode mode ATTRIBUTE_UNUSED;
1086 {
1087 return (GET_CODE (op) == CONST_INT
1088 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1089 }
1090
1091 /* Similar for an unsigned D field. */
1092
1093 int
1094 u_short_cint_operand (op, mode)
1095 rtx op;
1096 enum machine_mode mode ATTRIBUTE_UNUSED;
1097 {
1098 return (GET_CODE (op) == CONST_INT
1099 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1100 }
1101
1102 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1103
1104 int
1105 non_short_cint_operand (op, mode)
1106 rtx op;
1107 enum machine_mode mode ATTRIBUTE_UNUSED;
1108 {
1109 return (GET_CODE (op) == CONST_INT
1110 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1111 }
1112
1113 /* Returns 1 if OP is a CONST_INT that is a positive value
1114 and an exact power of 2. */
1115
1116 int
1117 exact_log2_cint_operand (op, mode)
1118 rtx op;
1119 enum machine_mode mode ATTRIBUTE_UNUSED;
1120 {
1121 return (GET_CODE (op) == CONST_INT
1122 && INTVAL (op) > 0
1123 && exact_log2 (INTVAL (op)) >= 0);
1124 }
1125
1126 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1127 ctr, or lr). */
1128
1129 int
1130 gpc_reg_operand (op, mode)
1131 rtx op;
1132 enum machine_mode mode;
1133 {
1134 return (register_operand (op, mode)
1135 && (GET_CODE (op) != REG
1136 || (REGNO (op) >= ARG_POINTER_REGNUM
1137 && !XER_REGNO_P (REGNO (op)))
1138 || REGNO (op) < MQ_REGNO));
1139 }
1140
1141 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1142 CR field. */
1143
1144 int
1145 cc_reg_operand (op, mode)
1146 rtx op;
1147 enum machine_mode mode;
1148 {
1149 return (register_operand (op, mode)
1150 && (GET_CODE (op) != REG
1151 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1152 || CR_REGNO_P (REGNO (op))));
1153 }
1154
1155 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1156 CR field that isn't CR0. */
1157
1158 int
1159 cc_reg_not_cr0_operand (op, mode)
1160 rtx op;
1161 enum machine_mode mode;
1162 {
1163 return (register_operand (op, mode)
1164 && (GET_CODE (op) != REG
1165 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1166 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1167 }
1168
1169 /* Returns 1 if OP is either a constant integer valid for a D-field or
1170 a non-special register. If a register, it must be in the proper
1171 mode unless MODE is VOIDmode. */
1172
1173 int
1174 reg_or_short_operand (op, mode)
1175 rtx op;
1176 enum machine_mode mode;
1177 {
1178 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1179 }
1180
1181 /* Similar, except check if the negation of the constant would be
1182 valid for a D-field. */
1183
1184 int
1185 reg_or_neg_short_operand (op, mode)
1186 rtx op;
1187 enum machine_mode mode;
1188 {
1189 if (GET_CODE (op) == CONST_INT)
1190 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1191
1192 return gpc_reg_operand (op, mode);
1193 }
1194
1195 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1196 a non-special register. If a register, it must be in the proper
1197 mode unless MODE is VOIDmode. */
1198
1199 int
1200 reg_or_aligned_short_operand (op, mode)
1201 rtx op;
1202 enum machine_mode mode;
1203 {
1204 if (gpc_reg_operand (op, mode))
1205 return 1;
1206 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1207 return 1;
1208
1209 return 0;
1210 }
1211
1212
1213 /* Return 1 if the operand is either a register or an integer whose
1214 high-order 16 bits are zero. */
1215
1216 int
1217 reg_or_u_short_operand (op, mode)
1218 rtx op;
1219 enum machine_mode mode;
1220 {
1221 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1222 }
1223
1224 /* Return 1 is the operand is either a non-special register or ANY
1225 constant integer. */
1226
1227 int
1228 reg_or_cint_operand (op, mode)
1229 rtx op;
1230 enum machine_mode mode;
1231 {
1232 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1233 }
1234
1235 /* Return 1 is the operand is either a non-special register or ANY
1236 32-bit signed constant integer. */
1237
1238 int
1239 reg_or_arith_cint_operand (op, mode)
1240 rtx op;
1241 enum machine_mode mode;
1242 {
1243 return (gpc_reg_operand (op, mode)
1244 || (GET_CODE (op) == CONST_INT
1245 #if HOST_BITS_PER_WIDE_INT != 32
1246 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1247 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1248 #endif
1249 ));
1250 }
1251
1252 /* Return 1 is the operand is either a non-special register or a 32-bit
1253 signed constant integer valid for 64-bit addition. */
1254
1255 int
1256 reg_or_add_cint64_operand (op, mode)
1257 rtx op;
1258 enum machine_mode mode;
1259 {
1260 return (gpc_reg_operand (op, mode)
1261 || (GET_CODE (op) == CONST_INT
1262 #if HOST_BITS_PER_WIDE_INT == 32
1263 && INTVAL (op) < 0x7fff8000
1264 #else
1265 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1266 < 0x100000000ll)
1267 #endif
1268 ));
1269 }
1270
1271 /* Return 1 is the operand is either a non-special register or a 32-bit
1272 signed constant integer valid for 64-bit subtraction. */
1273
1274 int
1275 reg_or_sub_cint64_operand (op, mode)
1276 rtx op;
1277 enum machine_mode mode;
1278 {
1279 return (gpc_reg_operand (op, mode)
1280 || (GET_CODE (op) == CONST_INT
1281 #if HOST_BITS_PER_WIDE_INT == 32
1282 && (- INTVAL (op)) < 0x7fff8000
1283 #else
1284 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1285 < 0x100000000ll)
1286 #endif
1287 ));
1288 }
1289
1290 /* Return 1 is the operand is either a non-special register or ANY
1291 32-bit unsigned constant integer. */
1292
1293 int
1294 reg_or_logical_cint_operand (op, mode)
1295 rtx op;
1296 enum machine_mode mode;
1297 {
1298 if (GET_CODE (op) == CONST_INT)
1299 {
1300 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1301 {
1302 if (GET_MODE_BITSIZE (mode) <= 32)
1303 abort ();
1304
1305 if (INTVAL (op) < 0)
1306 return 0;
1307 }
1308
1309 return ((INTVAL (op) & GET_MODE_MASK (mode)
1310 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1311 }
1312 else if (GET_CODE (op) == CONST_DOUBLE)
1313 {
1314 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1315 || mode != DImode)
1316 abort ();
1317
1318 return CONST_DOUBLE_HIGH (op) == 0;
1319 }
1320 else
1321 return gpc_reg_operand (op, mode);
1322 }
1323
1324 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1325
1326 int
1327 got_operand (op, mode)
1328 rtx op;
1329 enum machine_mode mode ATTRIBUTE_UNUSED;
1330 {
1331 return (GET_CODE (op) == SYMBOL_REF
1332 || GET_CODE (op) == CONST
1333 || GET_CODE (op) == LABEL_REF);
1334 }
1335
1336 /* Return 1 if the operand is a simple references that can be loaded via
1337 the GOT (labels involving addition aren't allowed). */
1338
1339 int
1340 got_no_const_operand (op, mode)
1341 rtx op;
1342 enum machine_mode mode ATTRIBUTE_UNUSED;
1343 {
1344 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1345 }
1346
1347 /* Return the number of instructions it takes to form a constant in an
1348 integer register. */
1349
1350 static int
1351 num_insns_constant_wide (value)
1352 HOST_WIDE_INT value;
1353 {
1354 /* signed constant loadable with {cal|addi} */
1355 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1356 return 1;
1357
1358 /* constant loadable with {cau|addis} */
1359 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1360 return 1;
1361
1362 #if HOST_BITS_PER_WIDE_INT == 64
1363 else if (TARGET_POWERPC64)
1364 {
1365 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1366 HOST_WIDE_INT high = value >> 31;
1367
1368 if (high == 0 || high == -1)
1369 return 2;
1370
1371 high >>= 1;
1372
1373 if (low == 0)
1374 return num_insns_constant_wide (high) + 1;
1375 else
1376 return (num_insns_constant_wide (high)
1377 + num_insns_constant_wide (low) + 1);
1378 }
1379 #endif
1380
1381 else
1382 return 2;
1383 }
1384
1385 int
1386 num_insns_constant (op, mode)
1387 rtx op;
1388 enum machine_mode mode;
1389 {
1390 if (GET_CODE (op) == CONST_INT)
1391 {
1392 #if HOST_BITS_PER_WIDE_INT == 64
1393 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1394 && mask64_operand (op, mode))
1395 return 2;
1396 else
1397 #endif
1398 return num_insns_constant_wide (INTVAL (op));
1399 }
1400
1401 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1402 {
1403 long l;
1404 REAL_VALUE_TYPE rv;
1405
1406 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1407 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1408 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1409 }
1410
1411 else if (GET_CODE (op) == CONST_DOUBLE)
1412 {
1413 HOST_WIDE_INT low;
1414 HOST_WIDE_INT high;
1415 long l[2];
1416 REAL_VALUE_TYPE rv;
1417 int endian = (WORDS_BIG_ENDIAN == 0);
1418
1419 if (mode == VOIDmode || mode == DImode)
1420 {
1421 high = CONST_DOUBLE_HIGH (op);
1422 low = CONST_DOUBLE_LOW (op);
1423 }
1424 else
1425 {
1426 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1427 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1428 high = l[endian];
1429 low = l[1 - endian];
1430 }
1431
1432 if (TARGET_32BIT)
1433 return (num_insns_constant_wide (low)
1434 + num_insns_constant_wide (high));
1435
1436 else
1437 {
1438 if (high == 0 && low >= 0)
1439 return num_insns_constant_wide (low);
1440
1441 else if (high == -1 && low < 0)
1442 return num_insns_constant_wide (low);
1443
1444 else if (mask64_operand (op, mode))
1445 return 2;
1446
1447 else if (low == 0)
1448 return num_insns_constant_wide (high) + 1;
1449
1450 else
1451 return (num_insns_constant_wide (high)
1452 + num_insns_constant_wide (low) + 1);
1453 }
1454 }
1455
1456 else
1457 abort ();
1458 }
1459
1460 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1461 register with one instruction per word. We only do this if we can
1462 safely read CONST_DOUBLE_{LOW,HIGH}. */
1463
1464 int
1465 easy_fp_constant (op, mode)
1466 rtx op;
1467 enum machine_mode mode;
1468 {
1469 if (GET_CODE (op) != CONST_DOUBLE
1470 || GET_MODE (op) != mode
1471 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1472 return 0;
1473
1474 /* Consider all constants with -msoft-float to be easy. */
1475 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1476 && mode != DImode)
1477 return 1;
1478
1479 /* If we are using V.4 style PIC, consider all constants to be hard. */
1480 if (flag_pic && DEFAULT_ABI == ABI_V4)
1481 return 0;
1482
1483 #ifdef TARGET_RELOCATABLE
1484 /* Similarly if we are using -mrelocatable, consider all constants
1485 to be hard. */
1486 if (TARGET_RELOCATABLE)
1487 return 0;
1488 #endif
1489
1490 if (mode == TFmode)
1491 {
1492 long k[4];
1493 REAL_VALUE_TYPE rv;
1494
1495 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1496 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1497
1498 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1499 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1500 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1501 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1502 }
1503
1504 else if (mode == DFmode)
1505 {
1506 long k[2];
1507 REAL_VALUE_TYPE rv;
1508
1509 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1510 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1511
1512 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1513 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1514 }
1515
1516 else if (mode == SFmode)
1517 {
1518 long l;
1519 REAL_VALUE_TYPE rv;
1520
1521 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1522 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1523
1524 return num_insns_constant_wide (l) == 1;
1525 }
1526
1527 else if (mode == DImode)
1528 return ((TARGET_POWERPC64
1529 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1530 || (num_insns_constant (op, DImode) <= 2));
1531
1532 else if (mode == SImode)
1533 return 1;
1534 else
1535 abort ();
1536 }
1537
1538 /* Return nonzero if all elements of a vector have the same value. */
1539
1540 static int
1541 easy_vector_same (op, mode)
1542 rtx op;
1543 enum machine_mode mode ATTRIBUTE_UNUSED;
1544 {
1545 int units, i, cst;
1546
1547 units = CONST_VECTOR_NUNITS (op);
1548
1549 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1550 for (i = 1; i < units; ++i)
1551 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1552 break;
1553 if (i == units)
1554 return 1;
1555 return 0;
1556 }
1557
1558 /* Return 1 if the operand is a CONST_INT and can be put into a
1559 register without using memory. */
1560
1561 int
1562 easy_vector_constant (op, mode)
1563 rtx op;
1564 enum machine_mode mode;
1565 {
1566 int cst, cst2;
1567
1568 if (GET_CODE (op) != CONST_VECTOR
1569 || (!TARGET_ALTIVEC
1570 && !TARGET_SPE))
1571 return 0;
1572
1573 if (zero_constant (op, mode)
1574 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1575 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1576 return 1;
1577
1578 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1579 return 0;
1580
1581 if (TARGET_SPE && mode == V1DImode)
1582 return 0;
1583
1584 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1585 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1586
1587 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1588 li r0, CONSTANT1
1589 evmergelo r0, r0, r0
1590 li r0, CONSTANT2
1591
1592 I don't know how efficient it would be to allow bigger constants,
1593 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1594 instructions is better than a 64-bit memory load, but I don't
1595 have the e500 timing specs. */
1596 if (TARGET_SPE && mode == V2SImode
1597 && cst >= -0x7fff && cst <= 0x7fff
1598 && cst2 >= -0x7fff && cst2 <= 0x7fff)
1599 return 1;
1600
1601 if (TARGET_ALTIVEC && EASY_VECTOR_15 (cst, op, mode))
1602 return 1;
1603
1604 if (TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode))
1605 return 1;
1606
1607 return 0;
1608 }
1609
1610 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1611
1612 int
1613 easy_vector_constant_add_self (op, mode)
1614 rtx op;
1615 enum machine_mode mode;
1616 {
1617 int cst;
1618
1619 if (!easy_vector_constant (op, mode))
1620 return 0;
1621
1622 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1623
1624 return TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode);
1625 }
1626
1627 const char *
1628 output_vec_const_move (operands)
1629 rtx *operands;
1630 {
1631 int cst, cst2;
1632 enum machine_mode mode;
1633 rtx dest, vec;
1634
1635 dest = operands[0];
1636 vec = operands[1];
1637
1638 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1639 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1640 mode = GET_MODE (dest);
1641
1642 if (TARGET_ALTIVEC)
1643 {
1644 if (zero_constant (vec, mode))
1645 return "vxor %0,%0,%0";
1646 else if (EASY_VECTOR_15 (cst, vec, mode))
1647 {
1648 operands[1] = GEN_INT (cst);
1649 switch (mode)
1650 {
1651 case V4SImode:
1652 return "vspltisw %0,%1";
1653 case V8HImode:
1654 return "vspltish %0,%1";
1655 case V16QImode:
1656 return "vspltisb %0,%1";
1657 default:
1658 abort ();
1659 }
1660 }
1661 else if (EASY_VECTOR_15_ADD_SELF (cst, vec, mode))
1662 return "#";
1663 else
1664 abort ();
1665 }
1666
1667 if (TARGET_SPE)
1668 {
1669 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1670 pattern of V1DI, V4HI, and V2SF.
1671
1672 FIXME: We should probabl return # and add post reload
1673 splitters for these, but this way is so easy ;-).
1674 */
1675 operands[1] = GEN_INT (cst);
1676 operands[2] = GEN_INT (cst2);
1677 if (cst == cst2)
1678 return "li %0,%1\n\tevmergelo %0,%0,%0";
1679 else
1680 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1681 }
1682
1683 abort ();
1684 }
1685
1686 /* Return 1 if the operand is the constant 0. This works for scalars
1687 as well as vectors. */
1688 int
1689 zero_constant (op, mode)
1690 rtx op;
1691 enum machine_mode mode;
1692 {
1693 return op == CONST0_RTX (mode);
1694 }
1695
1696 /* Return 1 if the operand is 0.0. */
1697 int
1698 zero_fp_constant (op, mode)
1699 rtx op;
1700 enum machine_mode mode;
1701 {
1702 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1703 }
1704
1705 /* Return 1 if the operand is in volatile memory. Note that during
1706 the RTL generation phase, memory_operand does not return TRUE for
1707 volatile memory references. So this function allows us to
1708 recognize volatile references where its safe. */
1709
1710 int
1711 volatile_mem_operand (op, mode)
1712 rtx op;
1713 enum machine_mode mode;
1714 {
1715 if (GET_CODE (op) != MEM)
1716 return 0;
1717
1718 if (!MEM_VOLATILE_P (op))
1719 return 0;
1720
1721 if (mode != GET_MODE (op))
1722 return 0;
1723
1724 if (reload_completed)
1725 return memory_operand (op, mode);
1726
1727 if (reload_in_progress)
1728 return strict_memory_address_p (mode, XEXP (op, 0));
1729
1730 return memory_address_p (mode, XEXP (op, 0));
1731 }
1732
1733 /* Return 1 if the operand is an offsettable memory operand. */
1734
1735 int
1736 offsettable_mem_operand (op, mode)
1737 rtx op;
1738 enum machine_mode mode;
1739 {
1740 return ((GET_CODE (op) == MEM)
1741 && offsettable_address_p (reload_completed || reload_in_progress,
1742 mode, XEXP (op, 0)));
1743 }
1744
1745 /* Return 1 if the operand is either an easy FP constant (see above) or
1746 memory. */
1747
1748 int
1749 mem_or_easy_const_operand (op, mode)
1750 rtx op;
1751 enum machine_mode mode;
1752 {
1753 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1754 }
1755
1756 /* Return 1 if the operand is either a non-special register or an item
1757 that can be used as the operand of a `mode' add insn. */
1758
1759 int
1760 add_operand (op, mode)
1761 rtx op;
1762 enum machine_mode mode;
1763 {
1764 if (GET_CODE (op) == CONST_INT)
1765 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1766 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1767
1768 return gpc_reg_operand (op, mode);
1769 }
1770
1771 /* Return 1 if OP is a constant but not a valid add_operand. */
1772
1773 int
1774 non_add_cint_operand (op, mode)
1775 rtx op;
1776 enum machine_mode mode ATTRIBUTE_UNUSED;
1777 {
1778 return (GET_CODE (op) == CONST_INT
1779 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1780 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1781 }
1782
1783 /* Return 1 if the operand is a non-special register or a constant that
1784 can be used as the operand of an OR or XOR insn on the RS/6000. */
1785
1786 int
1787 logical_operand (op, mode)
1788 rtx op;
1789 enum machine_mode mode;
1790 {
1791 HOST_WIDE_INT opl, oph;
1792
1793 if (gpc_reg_operand (op, mode))
1794 return 1;
1795
1796 if (GET_CODE (op) == CONST_INT)
1797 {
1798 opl = INTVAL (op) & GET_MODE_MASK (mode);
1799
1800 #if HOST_BITS_PER_WIDE_INT <= 32
1801 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1802 return 0;
1803 #endif
1804 }
1805 else if (GET_CODE (op) == CONST_DOUBLE)
1806 {
1807 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1808 abort ();
1809
1810 opl = CONST_DOUBLE_LOW (op);
1811 oph = CONST_DOUBLE_HIGH (op);
1812 if (oph != 0)
1813 return 0;
1814 }
1815 else
1816 return 0;
1817
1818 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1819 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1820 }
1821
1822 /* Return 1 if C is a constant that is not a logical operand (as
1823 above), but could be split into one. */
1824
1825 int
1826 non_logical_cint_operand (op, mode)
1827 rtx op;
1828 enum machine_mode mode;
1829 {
1830 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1831 && ! logical_operand (op, mode)
1832 && reg_or_logical_cint_operand (op, mode));
1833 }
1834
1835 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1836 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1837 Reject all ones and all zeros, since these should have been optimized
1838 away and confuse the making of MB and ME. */
1839
1840 int
1841 mask_operand (op, mode)
1842 rtx op;
1843 enum machine_mode mode ATTRIBUTE_UNUSED;
1844 {
1845 HOST_WIDE_INT c, lsb;
1846
1847 if (GET_CODE (op) != CONST_INT)
1848 return 0;
1849
1850 c = INTVAL (op);
1851
1852 /* Fail in 64-bit mode if the mask wraps around because the upper
1853 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1854 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1855 return 0;
1856
1857 /* We don't change the number of transitions by inverting,
1858 so make sure we start with the LS bit zero. */
1859 if (c & 1)
1860 c = ~c;
1861
1862 /* Reject all zeros or all ones. */
1863 if (c == 0)
1864 return 0;
1865
1866 /* Find the first transition. */
1867 lsb = c & -c;
1868
1869 /* Invert to look for a second transition. */
1870 c = ~c;
1871
1872 /* Erase first transition. */
1873 c &= -lsb;
1874
1875 /* Find the second transition (if any). */
1876 lsb = c & -c;
1877
1878 /* Match if all the bits above are 1's (or c is zero). */
1879 return c == -lsb;
1880 }
1881
1882 /* Return 1 for the PowerPC64 rlwinm corner case. */
1883
1884 int
1885 mask_operand_wrap (op, mode)
1886 rtx op;
1887 enum machine_mode mode ATTRIBUTE_UNUSED;
1888 {
1889 HOST_WIDE_INT c, lsb;
1890
1891 if (GET_CODE (op) != CONST_INT)
1892 return 0;
1893
1894 c = INTVAL (op);
1895
1896 if ((c & 0x80000001) != 0x80000001)
1897 return 0;
1898
1899 c = ~c;
1900 if (c == 0)
1901 return 0;
1902
1903 lsb = c & -c;
1904 c = ~c;
1905 c &= -lsb;
1906 lsb = c & -c;
1907 return c == -lsb;
1908 }
1909
1910 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1911 It is if there are no more than one 1->0 or 0->1 transitions.
1912 Reject all zeros, since zero should have been optimized away and
1913 confuses the making of MB and ME. */
1914
1915 int
1916 mask64_operand (op, mode)
1917 rtx op;
1918 enum machine_mode mode ATTRIBUTE_UNUSED;
1919 {
1920 if (GET_CODE (op) == CONST_INT)
1921 {
1922 HOST_WIDE_INT c, lsb;
1923
1924 c = INTVAL (op);
1925
1926 /* Reject all zeros. */
1927 if (c == 0)
1928 return 0;
1929
1930 /* We don't change the number of transitions by inverting,
1931 so make sure we start with the LS bit zero. */
1932 if (c & 1)
1933 c = ~c;
1934
1935 /* Find the transition, and check that all bits above are 1's. */
1936 lsb = c & -c;
1937
1938 /* Match if all the bits above are 1's (or c is zero). */
1939 return c == -lsb;
1940 }
1941 return 0;
1942 }
1943
1944 /* Like mask64_operand, but allow up to three transitions. This
1945 predicate is used by insn patterns that generate two rldicl or
1946 rldicr machine insns. */
1947
1948 int
1949 mask64_2_operand (op, mode)
1950 rtx op;
1951 enum machine_mode mode ATTRIBUTE_UNUSED;
1952 {
1953 if (GET_CODE (op) == CONST_INT)
1954 {
1955 HOST_WIDE_INT c, lsb;
1956
1957 c = INTVAL (op);
1958
1959 /* Disallow all zeros. */
1960 if (c == 0)
1961 return 0;
1962
1963 /* We don't change the number of transitions by inverting,
1964 so make sure we start with the LS bit zero. */
1965 if (c & 1)
1966 c = ~c;
1967
1968 /* Find the first transition. */
1969 lsb = c & -c;
1970
1971 /* Invert to look for a second transition. */
1972 c = ~c;
1973
1974 /* Erase first transition. */
1975 c &= -lsb;
1976
1977 /* Find the second transition. */
1978 lsb = c & -c;
1979
1980 /* Invert to look for a third transition. */
1981 c = ~c;
1982
1983 /* Erase second transition. */
1984 c &= -lsb;
1985
1986 /* Find the third transition (if any). */
1987 lsb = c & -c;
1988
1989 /* Match if all the bits above are 1's (or c is zero). */
1990 return c == -lsb;
1991 }
1992 return 0;
1993 }
1994
1995 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1996 implement ANDing by the mask IN. */
1997 void
1998 build_mask64_2_operands (in, out)
1999 rtx in;
2000 rtx *out;
2001 {
2002 #if HOST_BITS_PER_WIDE_INT >= 64
2003 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2004 int shift;
2005
2006 if (GET_CODE (in) != CONST_INT)
2007 abort ();
2008
2009 c = INTVAL (in);
2010 if (c & 1)
2011 {
2012 /* Assume c initially something like 0x00fff000000fffff. The idea
2013 is to rotate the word so that the middle ^^^^^^ group of zeros
2014 is at the MS end and can be cleared with an rldicl mask. We then
2015 rotate back and clear off the MS ^^ group of zeros with a
2016 second rldicl. */
2017 c = ~c; /* c == 0xff000ffffff00000 */
2018 lsb = c & -c; /* lsb == 0x0000000000100000 */
2019 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2020 c = ~c; /* c == 0x00fff000000fffff */
2021 c &= -lsb; /* c == 0x00fff00000000000 */
2022 lsb = c & -c; /* lsb == 0x0000100000000000 */
2023 c = ~c; /* c == 0xff000fffffffffff */
2024 c &= -lsb; /* c == 0xff00000000000000 */
2025 shift = 0;
2026 while ((lsb >>= 1) != 0)
2027 shift++; /* shift == 44 on exit from loop */
2028 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2029 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2030 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2031 }
2032 else
2033 {
2034 /* Assume c initially something like 0xff000f0000000000. The idea
2035 is to rotate the word so that the ^^^ middle group of zeros
2036 is at the LS end and can be cleared with an rldicr mask. We then
2037 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2038 a second rldicr. */
2039 lsb = c & -c; /* lsb == 0x0000010000000000 */
2040 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2041 c = ~c; /* c == 0x00fff0ffffffffff */
2042 c &= -lsb; /* c == 0x00fff00000000000 */
2043 lsb = c & -c; /* lsb == 0x0000100000000000 */
2044 c = ~c; /* c == 0xff000fffffffffff */
2045 c &= -lsb; /* c == 0xff00000000000000 */
2046 shift = 0;
2047 while ((lsb >>= 1) != 0)
2048 shift++; /* shift == 44 on exit from loop */
2049 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2050 m1 >>= shift; /* m1 == 0x0000000000000fff */
2051 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2052 }
2053
2054 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2055 masks will be all 1's. We are guaranteed more than one transition. */
2056 out[0] = GEN_INT (64 - shift);
2057 out[1] = GEN_INT (m1);
2058 out[2] = GEN_INT (shift);
2059 out[3] = GEN_INT (m2);
2060 #else
2061 (void)in;
2062 (void)out;
2063 abort ();
2064 #endif
2065 }
2066
2067 /* Return 1 if the operand is either a non-special register or a constant
2068 that can be used as the operand of a PowerPC64 logical AND insn. */
2069
2070 int
2071 and64_operand (op, mode)
2072 rtx op;
2073 enum machine_mode mode;
2074 {
2075 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2076 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2077
2078 return (logical_operand (op, mode) || mask64_operand (op, mode));
2079 }
2080
2081 /* Like the above, but also match constants that can be implemented
2082 with two rldicl or rldicr insns. */
2083
2084 int
2085 and64_2_operand (op, mode)
2086 rtx op;
2087 enum machine_mode mode;
2088 {
2089 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2090 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2091
2092 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2093 }
2094
2095 /* Return 1 if the operand is either a non-special register or a
2096 constant that can be used as the operand of an RS/6000 logical AND insn. */
2097
2098 int
2099 and_operand (op, mode)
2100 rtx op;
2101 enum machine_mode mode;
2102 {
2103 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2104 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2105
2106 return (logical_operand (op, mode) || mask_operand (op, mode));
2107 }
2108
2109 /* Return 1 if the operand is a general register or memory operand. */
2110
2111 int
2112 reg_or_mem_operand (op, mode)
2113 rtx op;
2114 enum machine_mode mode;
2115 {
2116 return (gpc_reg_operand (op, mode)
2117 || memory_operand (op, mode)
2118 || volatile_mem_operand (op, mode));
2119 }
2120
2121 /* Return 1 if the operand is a general register or memory operand without
2122 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2123 instruction. */
2124
2125 int
2126 lwa_operand (op, mode)
2127 rtx op;
2128 enum machine_mode mode;
2129 {
2130 rtx inner = op;
2131
2132 if (reload_completed && GET_CODE (inner) == SUBREG)
2133 inner = SUBREG_REG (inner);
2134
2135 return gpc_reg_operand (inner, mode)
2136 || (memory_operand (inner, mode)
2137 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2138 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2139 && (GET_CODE (XEXP (inner, 0)) != PLUS
2140 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2141 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2142 }
2143
2144 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2145
2146 int
2147 symbol_ref_operand (op, mode)
2148 rtx op;
2149 enum machine_mode mode;
2150 {
2151 if (mode != VOIDmode && GET_MODE (op) != mode)
2152 return 0;
2153
2154 return (GET_CODE (op) == SYMBOL_REF
2155 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2156 }
2157
2158 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2159 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2160
2161 int
2162 call_operand (op, mode)
2163 rtx op;
2164 enum machine_mode mode;
2165 {
2166 if (mode != VOIDmode && GET_MODE (op) != mode)
2167 return 0;
2168
2169 return (GET_CODE (op) == SYMBOL_REF
2170 || (GET_CODE (op) == REG
2171 && (REGNO (op) == LINK_REGISTER_REGNUM
2172 || REGNO (op) == COUNT_REGISTER_REGNUM
2173 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2174 }
2175
2176 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2177 this file. */
2178
2179 int
2180 current_file_function_operand (op, mode)
2181 rtx op;
2182 enum machine_mode mode ATTRIBUTE_UNUSED;
2183 {
2184 return (GET_CODE (op) == SYMBOL_REF
2185 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2186 && (SYMBOL_REF_LOCAL_P (op)
2187 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2188 }
2189
2190 /* Return 1 if this operand is a valid input for a move insn. */
2191
2192 int
2193 input_operand (op, mode)
2194 rtx op;
2195 enum machine_mode mode;
2196 {
2197 /* Memory is always valid. */
2198 if (memory_operand (op, mode))
2199 return 1;
2200
2201 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2202 if (GET_CODE (op) == CONSTANT_P_RTX)
2203 return 1;
2204
2205 /* For floating-point, easy constants are valid. */
2206 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2207 && CONSTANT_P (op)
2208 && easy_fp_constant (op, mode))
2209 return 1;
2210
2211 /* Allow any integer constant. */
2212 if (GET_MODE_CLASS (mode) == MODE_INT
2213 && (GET_CODE (op) == CONST_INT
2214 || GET_CODE (op) == CONST_DOUBLE))
2215 return 1;
2216
2217 /* Allow easy vector constants. */
2218 if (GET_CODE (op) == CONST_VECTOR
2219 && easy_vector_constant (op, mode))
2220 return 1;
2221
2222 /* For floating-point or multi-word mode, the only remaining valid type
2223 is a register. */
2224 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2225 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2226 return register_operand (op, mode);
2227
2228 /* The only cases left are integral modes one word or smaller (we
2229 do not get called for MODE_CC values). These can be in any
2230 register. */
2231 if (register_operand (op, mode))
2232 return 1;
2233
2234 /* A SYMBOL_REF referring to the TOC is valid. */
2235 if (legitimate_constant_pool_address_p (op))
2236 return 1;
2237
2238 /* A constant pool expression (relative to the TOC) is valid */
2239 if (toc_relative_expr_p (op))
2240 return 1;
2241
2242 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2243 to be valid. */
2244 if (DEFAULT_ABI == ABI_V4
2245 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2246 && small_data_operand (op, Pmode))
2247 return 1;
2248
2249 return 0;
2250 }
2251
2252 /* Return 1 for an operand in small memory on V.4/eabi. */
2253
2254 int
2255 small_data_operand (op, mode)
2256 rtx op ATTRIBUTE_UNUSED;
2257 enum machine_mode mode ATTRIBUTE_UNUSED;
2258 {
2259 #if TARGET_ELF
2260 rtx sym_ref;
2261
2262 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2263 return 0;
2264
2265 if (DEFAULT_ABI != ABI_V4)
2266 return 0;
2267
2268 if (GET_CODE (op) == SYMBOL_REF)
2269 sym_ref = op;
2270
2271 else if (GET_CODE (op) != CONST
2272 || GET_CODE (XEXP (op, 0)) != PLUS
2273 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2274 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2275 return 0;
2276
2277 else
2278 {
2279 rtx sum = XEXP (op, 0);
2280 HOST_WIDE_INT summand;
2281
2282 /* We have to be careful here, because it is the referenced address
2283 that must be 32k from _SDA_BASE_, not just the symbol. */
2284 summand = INTVAL (XEXP (sum, 1));
2285 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2286 return 0;
2287
2288 sym_ref = XEXP (sum, 0);
2289 }
2290
2291 return SYMBOL_REF_SMALL_P (sym_ref);
2292 #else
2293 return 0;
2294 #endif
2295 }
2296 \f
2297 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2298
2299 static int
2300 constant_pool_expr_1 (op, have_sym, have_toc)
2301 rtx op;
2302 int *have_sym;
2303 int *have_toc;
2304 {
2305 switch (GET_CODE(op))
2306 {
2307 case SYMBOL_REF:
2308 if (RS6000_SYMBOL_REF_TLS_P (op))
2309 return 0;
2310 else if (CONSTANT_POOL_ADDRESS_P (op))
2311 {
2312 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2313 {
2314 *have_sym = 1;
2315 return 1;
2316 }
2317 else
2318 return 0;
2319 }
2320 else if (! strcmp (XSTR (op, 0), toc_label_name))
2321 {
2322 *have_toc = 1;
2323 return 1;
2324 }
2325 else
2326 return 0;
2327 case PLUS:
2328 case MINUS:
2329 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2330 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2331 case CONST:
2332 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2333 case CONST_INT:
2334 return 1;
2335 default:
2336 return 0;
2337 }
2338 }
2339
2340 static bool
2341 constant_pool_expr_p (op)
2342 rtx op;
2343 {
2344 int have_sym = 0;
2345 int have_toc = 0;
2346 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2347 }
2348
2349 static bool
2350 toc_relative_expr_p (op)
2351 rtx op;
2352 {
2353 int have_sym = 0;
2354 int have_toc = 0;
2355 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2356 }
2357
2358 /* SPE offset addressing is limited to 5-bits worth of double words. */
2359 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2360
2361 bool
2362 legitimate_constant_pool_address_p (x)
2363 rtx x;
2364 {
2365 return (TARGET_TOC
2366 && GET_CODE (x) == PLUS
2367 && GET_CODE (XEXP (x, 0)) == REG
2368 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2369 && constant_pool_expr_p (XEXP (x, 1)));
2370 }
2371
2372 static bool
2373 legitimate_small_data_p (mode, x)
2374 enum machine_mode mode;
2375 rtx x;
2376 {
2377 return (DEFAULT_ABI == ABI_V4
2378 && !flag_pic && !TARGET_TOC
2379 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2380 && small_data_operand (x, mode));
2381 }
2382
2383 static bool
2384 legitimate_offset_address_p (mode, x, strict)
2385 enum machine_mode mode;
2386 rtx x;
2387 int strict;
2388 {
2389 unsigned HOST_WIDE_INT offset, extra;
2390
2391 if (GET_CODE (x) != PLUS)
2392 return false;
2393 if (GET_CODE (XEXP (x, 0)) != REG)
2394 return false;
2395 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2396 return false;
2397 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2398 return false;
2399
2400 offset = INTVAL (XEXP (x, 1));
2401 extra = 0;
2402 switch (mode)
2403 {
2404 case V16QImode:
2405 case V8HImode:
2406 case V4SFmode:
2407 case V4SImode:
2408 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2409 which leaves the only valid constant offset of zero, which by
2410 canonicalization rules is also invalid. */
2411 return false;
2412
2413 case V4HImode:
2414 case V2SImode:
2415 case V1DImode:
2416 case V2SFmode:
2417 /* SPE vector modes. */
2418 return SPE_CONST_OFFSET_OK (offset);
2419
2420 case DFmode:
2421 case DImode:
2422 if (TARGET_32BIT)
2423 extra = 4;
2424 else if (offset & 3)
2425 return false;
2426 break;
2427
2428 case TFmode:
2429 case TImode:
2430 if (TARGET_32BIT)
2431 extra = 12;
2432 else if (offset & 3)
2433 return false;
2434 else
2435 extra = 8;
2436 break;
2437
2438 default:
2439 break;
2440 }
2441
2442 return (offset + extra >= offset) && (offset + extra + 0x8000 < 0x10000);
2443 }
2444
2445 static bool
2446 legitimate_indexed_address_p (x, strict)
2447 rtx x;
2448 int strict;
2449 {
2450 rtx op0, op1;
2451
2452 if (GET_CODE (x) != PLUS)
2453 return false;
2454 op0 = XEXP (x, 0);
2455 op1 = XEXP (x, 1);
2456
2457 if (!REG_P (op0) || !REG_P (op1))
2458 return false;
2459
2460 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2461 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2462 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2463 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2464 }
2465
2466 static inline bool
2467 legitimate_indirect_address_p (x, strict)
2468 rtx x;
2469 int strict;
2470 {
2471 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2472 }
2473
2474 static bool
2475 legitimate_lo_sum_address_p (mode, x, strict)
2476 enum machine_mode mode;
2477 rtx x;
2478 int strict;
2479 {
2480 if (GET_CODE (x) != LO_SUM)
2481 return false;
2482 if (GET_CODE (XEXP (x, 0)) != REG)
2483 return false;
2484 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2485 return false;
2486 x = XEXP (x, 1);
2487
2488 if (TARGET_ELF)
2489 {
2490 if (DEFAULT_ABI != ABI_AIX && flag_pic)
2491 return false;
2492 if (TARGET_TOC)
2493 return false;
2494 if (GET_MODE_NUNITS (mode) != 1)
2495 return false;
2496 if (GET_MODE_BITSIZE (mode) > 32
2497 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2498 return false;
2499
2500 return CONSTANT_P (x);
2501 }
2502
2503 return false;
2504 }
2505
2506
2507 /* Try machine-dependent ways of modifying an illegitimate address
2508 to be legitimate. If we find one, return the new, valid address.
2509 This is used from only one place: `memory_address' in explow.c.
2510
2511 OLDX is the address as it was before break_out_memory_refs was
2512 called. In some cases it is useful to look at this to decide what
2513 needs to be done.
2514
2515 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2516
2517 It is always safe for this function to do nothing. It exists to
2518 recognize opportunities to optimize the output.
2519
2520 On RS/6000, first check for the sum of a register with a constant
2521 integer that is out of range. If so, generate code to add the
2522 constant with the low-order 16 bits masked to the register and force
2523 this result into another register (this can be done with `cau').
2524 Then generate an address of REG+(CONST&0xffff), allowing for the
2525 possibility of bit 16 being a one.
2526
2527 Then check for the sum of a register and something not constant, try to
2528 load the other things into a register and return the sum. */
2529
2530 rtx
2531 rs6000_legitimize_address (x, oldx, mode)
2532 rtx x;
2533 rtx oldx ATTRIBUTE_UNUSED;
2534 enum machine_mode mode;
2535 {
2536 if (GET_CODE (x) == SYMBOL_REF)
2537 {
2538 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2539 if (model != 0)
2540 return rs6000_legitimize_tls_address (x, model);
2541 }
2542
2543 if (GET_CODE (x) == PLUS
2544 && GET_CODE (XEXP (x, 0)) == REG
2545 && GET_CODE (XEXP (x, 1)) == CONST_INT
2546 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2547 {
2548 HOST_WIDE_INT high_int, low_int;
2549 rtx sum;
2550 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2551 high_int = INTVAL (XEXP (x, 1)) - low_int;
2552 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2553 GEN_INT (high_int)), 0);
2554 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2555 }
2556 else if (GET_CODE (x) == PLUS
2557 && GET_CODE (XEXP (x, 0)) == REG
2558 && GET_CODE (XEXP (x, 1)) != CONST_INT
2559 && GET_MODE_NUNITS (mode) == 1
2560 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2561 || TARGET_POWERPC64
2562 || (mode != DFmode && mode != TFmode))
2563 && (TARGET_POWERPC64 || mode != DImode)
2564 && mode != TImode)
2565 {
2566 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2567 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2568 }
2569 else if (ALTIVEC_VECTOR_MODE (mode))
2570 {
2571 rtx reg;
2572
2573 /* Make sure both operands are registers. */
2574 if (GET_CODE (x) == PLUS)
2575 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2576 force_reg (Pmode, XEXP (x, 1)));
2577
2578 reg = force_reg (Pmode, x);
2579 return reg;
2580 }
2581 else if (SPE_VECTOR_MODE (mode))
2582 {
2583 /* We accept [reg + reg] and [reg + OFFSET]. */
2584
2585 if (GET_CODE (x) == PLUS)
2586 {
2587 rtx op1 = XEXP (x, 0);
2588 rtx op2 = XEXP (x, 1);
2589
2590 op1 = force_reg (Pmode, op1);
2591
2592 if (GET_CODE (op2) != REG
2593 && (GET_CODE (op2) != CONST_INT
2594 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2595 op2 = force_reg (Pmode, op2);
2596
2597 return gen_rtx_PLUS (Pmode, op1, op2);
2598 }
2599
2600 return force_reg (Pmode, x);
2601 }
2602 else if (TARGET_ELF
2603 && TARGET_32BIT
2604 && TARGET_NO_TOC
2605 && ! flag_pic
2606 && GET_CODE (x) != CONST_INT
2607 && GET_CODE (x) != CONST_DOUBLE
2608 && CONSTANT_P (x)
2609 && GET_MODE_NUNITS (mode) == 1
2610 && (GET_MODE_BITSIZE (mode) <= 32
2611 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2612 {
2613 rtx reg = gen_reg_rtx (Pmode);
2614 emit_insn (gen_elf_high (reg, (x)));
2615 return gen_rtx_LO_SUM (Pmode, reg, (x));
2616 }
2617 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2618 && ! flag_pic
2619 #if TARGET_MACHO
2620 && ! MACHO_DYNAMIC_NO_PIC_P
2621 #endif
2622 && GET_CODE (x) != CONST_INT
2623 && GET_CODE (x) != CONST_DOUBLE
2624 && CONSTANT_P (x)
2625 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2626 && mode != DImode
2627 && mode != TImode)
2628 {
2629 rtx reg = gen_reg_rtx (Pmode);
2630 emit_insn (gen_macho_high (reg, (x)));
2631 return gen_rtx_LO_SUM (Pmode, reg, (x));
2632 }
2633 else if (TARGET_TOC
2634 && constant_pool_expr_p (x)
2635 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2636 {
2637 return create_TOC_reference (x);
2638 }
2639 else
2640 return NULL_RTX;
2641 }
2642
2643 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2644
2645 static GTY(()) rtx rs6000_tls_symbol;
2646 static rtx
2647 rs6000_tls_get_addr ()
2648 {
2649 if (!rs6000_tls_symbol)
2650 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2651
2652 return rs6000_tls_symbol;
2653 }
2654
2655 /* Construct the SYMBOL_REF for TLS GOT references. */
2656
2657 static GTY(()) rtx rs6000_got_symbol;
2658 static rtx
2659 rs6000_got_sym ()
2660 {
2661 if (!rs6000_got_symbol)
2662 {
2663 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2664 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2665 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2666 }
2667
2668 return rs6000_got_symbol;
2669 }
2670
2671 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2672 this (thread-local) address. */
2673
2674 static rtx
2675 rs6000_legitimize_tls_address (addr, model)
2676 rtx addr;
2677 enum tls_model model;
2678 {
2679 rtx dest, insn;
2680
2681 dest = gen_reg_rtx (Pmode);
2682 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2683 {
2684 rtx tlsreg;
2685
2686 if (TARGET_64BIT)
2687 {
2688 tlsreg = gen_rtx_REG (Pmode, 13);
2689 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2690 }
2691 else
2692 {
2693 tlsreg = gen_rtx_REG (Pmode, 2);
2694 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2695 }
2696 emit_insn (insn);
2697 }
2698 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2699 {
2700 rtx tlsreg, tmp;
2701
2702 tmp = gen_reg_rtx (Pmode);
2703 if (TARGET_64BIT)
2704 {
2705 tlsreg = gen_rtx_REG (Pmode, 13);
2706 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2707 }
2708 else
2709 {
2710 tlsreg = gen_rtx_REG (Pmode, 2);
2711 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2712 }
2713 emit_insn (insn);
2714 if (TARGET_64BIT)
2715 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2716 else
2717 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2718 emit_insn (insn);
2719 }
2720 else
2721 {
2722 rtx r3, got, tga, tmp1, tmp2, eqv;
2723
2724 if (TARGET_64BIT)
2725 got = gen_rtx_REG (Pmode, TOC_REGISTER);
2726 else
2727 {
2728 if (flag_pic == 1)
2729 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2730 else
2731 {
2732 rtx gsym = rs6000_got_sym ();
2733 got = gen_reg_rtx (Pmode);
2734 if (flag_pic == 0)
2735 rs6000_emit_move (got, gsym, Pmode);
2736 else
2737 {
2738 char buf[30];
2739 static int tls_got_labelno = 0;
2740 rtx tempLR, lab, tmp3, mem;
2741 rtx first, last;
2742
2743 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
2744 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2745 tempLR = gen_reg_rtx (Pmode);
2746 tmp1 = gen_reg_rtx (Pmode);
2747 tmp2 = gen_reg_rtx (Pmode);
2748 tmp3 = gen_reg_rtx (Pmode);
2749 mem = gen_rtx_MEM (Pmode, tmp1);
2750 RTX_UNCHANGING_P (mem) = 1;
2751
2752 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
2753 gsym));
2754 emit_move_insn (tmp1, tempLR);
2755 emit_move_insn (tmp2, mem);
2756 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
2757 last = emit_move_insn (got, tmp3);
2758 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
2759 REG_NOTES (last));
2760 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
2761 REG_NOTES (first));
2762 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
2763 REG_NOTES (last));
2764 }
2765 }
2766 }
2767
2768 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
2769 {
2770 r3 = gen_rtx_REG (Pmode, 3);
2771 if (TARGET_64BIT)
2772 insn = gen_tls_gd_64 (r3, got, addr);
2773 else
2774 insn = gen_tls_gd_32 (r3, got, addr);
2775 start_sequence ();
2776 emit_insn (insn);
2777 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2778 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2779 insn = emit_call_insn (insn);
2780 CONST_OR_PURE_CALL_P (insn) = 1;
2781 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2782 insn = get_insns ();
2783 end_sequence ();
2784 emit_libcall_block (insn, dest, r3, addr);
2785 }
2786 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
2787 {
2788 r3 = gen_rtx_REG (Pmode, 3);
2789 if (TARGET_64BIT)
2790 insn = gen_tls_ld_64 (r3, got);
2791 else
2792 insn = gen_tls_ld_32 (r3, got);
2793 start_sequence ();
2794 emit_insn (insn);
2795 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2796 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2797 insn = emit_call_insn (insn);
2798 CONST_OR_PURE_CALL_P (insn) = 1;
2799 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2800 insn = get_insns ();
2801 end_sequence ();
2802 tmp1 = gen_reg_rtx (Pmode);
2803 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2804 UNSPEC_TLSLD);
2805 emit_libcall_block (insn, tmp1, r3, eqv);
2806 if (rs6000_tls_size == 16)
2807 {
2808 if (TARGET_64BIT)
2809 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
2810 else
2811 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
2812 }
2813 else if (rs6000_tls_size == 32)
2814 {
2815 tmp2 = gen_reg_rtx (Pmode);
2816 if (TARGET_64BIT)
2817 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
2818 else
2819 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
2820 emit_insn (insn);
2821 if (TARGET_64BIT)
2822 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
2823 else
2824 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
2825 }
2826 else
2827 {
2828 tmp2 = gen_reg_rtx (Pmode);
2829 if (TARGET_64BIT)
2830 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
2831 else
2832 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
2833 emit_insn (insn);
2834 insn = gen_rtx_SET (Pmode, dest,
2835 gen_rtx_PLUS (Pmode, tmp2, tmp1));
2836 }
2837 emit_insn (insn);
2838 }
2839 else
2840 {
2841 /* IE, or 64 bit offset LE. */
2842 tmp2 = gen_reg_rtx (Pmode);
2843 if (TARGET_64BIT)
2844 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
2845 else
2846 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
2847 emit_insn (insn);
2848 if (TARGET_64BIT)
2849 insn = gen_tls_tls_64 (dest, tmp2, addr);
2850 else
2851 insn = gen_tls_tls_32 (dest, tmp2, addr);
2852 emit_insn (insn);
2853 }
2854 }
2855
2856 return dest;
2857 }
2858
2859 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
2860 instruction definitions. */
2861
2862 int
2863 rs6000_tls_symbol_ref (x, mode)
2864 rtx x;
2865 enum machine_mode mode ATTRIBUTE_UNUSED;
2866 {
2867 return RS6000_SYMBOL_REF_TLS_P (x);
2868 }
2869
2870 /* Return 1 if X contains a thread-local symbol. */
2871
2872 bool
2873 rs6000_tls_referenced_p (x)
2874 rtx x;
2875 {
2876 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
2877 }
2878
2879 /* Return 1 if *X is a thread-local symbol. This is the same as
2880 rs6000_tls_symbol_ref except for the type of the unused argument. */
2881
2882 static inline int
2883 rs6000_tls_symbol_ref_1 (x, data)
2884 rtx *x;
2885 void *data ATTRIBUTE_UNUSED;
2886 {
2887 return RS6000_SYMBOL_REF_TLS_P (*x);
2888 }
2889
2890 /* The convention appears to be to define this wherever it is used.
2891 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2892 is now used here. */
2893 #ifndef REG_MODE_OK_FOR_BASE_P
2894 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2895 #endif
2896
2897 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2898 replace the input X, or the original X if no replacement is called for.
2899 The output parameter *WIN is 1 if the calling macro should goto WIN,
2900 0 if it should not.
2901
2902 For RS/6000, we wish to handle large displacements off a base
2903 register by splitting the addend across an addiu/addis and the mem insn.
2904 This cuts number of extra insns needed from 3 to 1.
2905
2906 On Darwin, we use this to generate code for floating point constants.
2907 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2908 The Darwin code is inside #if TARGET_MACHO because only then is
2909 machopic_function_base_name() defined. */
2910 rtx
2911 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
2912 rtx x;
2913 enum machine_mode mode;
2914 int opnum;
2915 int type;
2916 int ind_levels ATTRIBUTE_UNUSED;
2917 int *win;
2918 {
2919 /* We must recognize output that we have already generated ourselves. */
2920 if (GET_CODE (x) == PLUS
2921 && GET_CODE (XEXP (x, 0)) == PLUS
2922 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2923 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2924 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2925 {
2926 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2927 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2928 opnum, (enum reload_type)type);
2929 *win = 1;
2930 return x;
2931 }
2932
2933 #if TARGET_MACHO
2934 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2935 && GET_CODE (x) == LO_SUM
2936 && GET_CODE (XEXP (x, 0)) == PLUS
2937 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2938 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2939 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2940 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2941 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2942 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2943 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2944 {
2945 /* Result of previous invocation of this function on Darwin
2946 floating point constant. */
2947 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2948 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2949 opnum, (enum reload_type)type);
2950 *win = 1;
2951 return x;
2952 }
2953 #endif
2954 if (GET_CODE (x) == PLUS
2955 && GET_CODE (XEXP (x, 0)) == REG
2956 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2957 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2958 && GET_CODE (XEXP (x, 1)) == CONST_INT
2959 && !SPE_VECTOR_MODE (mode)
2960 && !ALTIVEC_VECTOR_MODE (mode))
2961 {
2962 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2963 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2964 HOST_WIDE_INT high
2965 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2966
2967 /* Check for 32-bit overflow. */
2968 if (high + low != val)
2969 {
2970 *win = 0;
2971 return x;
2972 }
2973
2974 /* Reload the high part into a base reg; leave the low part
2975 in the mem directly. */
2976
2977 x = gen_rtx_PLUS (GET_MODE (x),
2978 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2979 GEN_INT (high)),
2980 GEN_INT (low));
2981
2982 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2983 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2984 opnum, (enum reload_type)type);
2985 *win = 1;
2986 return x;
2987 }
2988 #if TARGET_MACHO
2989 if (GET_CODE (x) == SYMBOL_REF
2990 && DEFAULT_ABI == ABI_DARWIN
2991 && !ALTIVEC_VECTOR_MODE (mode)
2992 && flag_pic)
2993 {
2994 /* Darwin load of floating point constant. */
2995 rtx offset = gen_rtx (CONST, Pmode,
2996 gen_rtx (MINUS, Pmode, x,
2997 gen_rtx (SYMBOL_REF, Pmode,
2998 machopic_function_base_name ())));
2999 x = gen_rtx (LO_SUM, GET_MODE (x),
3000 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
3001 gen_rtx (HIGH, Pmode, offset)), offset);
3002 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3003 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3004 opnum, (enum reload_type)type);
3005 *win = 1;
3006 return x;
3007 }
3008 if (GET_CODE (x) == SYMBOL_REF
3009 && DEFAULT_ABI == ABI_DARWIN
3010 && !ALTIVEC_VECTOR_MODE (mode)
3011 && MACHO_DYNAMIC_NO_PIC_P)
3012 {
3013 /* Darwin load of floating point constant. */
3014 x = gen_rtx (LO_SUM, GET_MODE (x),
3015 gen_rtx (HIGH, Pmode, x), x);
3016 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3017 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3018 opnum, (enum reload_type)type);
3019 *win = 1;
3020 return x;
3021 }
3022 #endif
3023 if (TARGET_TOC
3024 && constant_pool_expr_p (x)
3025 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3026 {
3027 (x) = create_TOC_reference (x);
3028 *win = 1;
3029 return x;
3030 }
3031 *win = 0;
3032 return x;
3033 }
3034
3035 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3036 that is a valid memory address for an instruction.
3037 The MODE argument is the machine mode for the MEM expression
3038 that wants to use this address.
3039
3040 On the RS/6000, there are four valid address: a SYMBOL_REF that
3041 refers to a constant pool entry of an address (or the sum of it
3042 plus a constant), a short (16-bit signed) constant plus a register,
3043 the sum of two registers, or a register indirect, possibly with an
3044 auto-increment. For DFmode and DImode with a constant plus register,
3045 we must ensure that both words are addressable or PowerPC64 with offset
3046 word aligned.
3047
3048 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3049 32-bit DImode, TImode), indexed addressing cannot be used because
3050 adjacent memory cells are accessed by adding word-sized offsets
3051 during assembly output. */
3052 int
3053 rs6000_legitimate_address (mode, x, reg_ok_strict)
3054 enum machine_mode mode;
3055 rtx x;
3056 int reg_ok_strict;
3057 {
3058 if (RS6000_SYMBOL_REF_TLS_P (x))
3059 return 0;
3060 if (legitimate_indirect_address_p (x, reg_ok_strict))
3061 return 1;
3062 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3063 && !ALTIVEC_VECTOR_MODE (mode)
3064 && !SPE_VECTOR_MODE (mode)
3065 && TARGET_UPDATE
3066 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3067 return 1;
3068 if (legitimate_small_data_p (mode, x))
3069 return 1;
3070 if (legitimate_constant_pool_address_p (x))
3071 return 1;
3072 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3073 if (! reg_ok_strict
3074 && GET_CODE (x) == PLUS
3075 && GET_CODE (XEXP (x, 0)) == REG
3076 && XEXP (x, 0) == virtual_stack_vars_rtx
3077 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3078 return 1;
3079 if (legitimate_offset_address_p (mode, x, reg_ok_strict))
3080 return 1;
3081 if (mode != TImode
3082 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3083 || TARGET_POWERPC64
3084 || (mode != DFmode && mode != TFmode))
3085 && (TARGET_POWERPC64 || mode != DImode)
3086 && legitimate_indexed_address_p (x, reg_ok_strict))
3087 return 1;
3088 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3089 return 1;
3090 return 0;
3091 }
3092
3093 /* Go to LABEL if ADDR (a legitimate address expression)
3094 has an effect that depends on the machine mode it is used for.
3095
3096 On the RS/6000 this is true of all integral offsets (since AltiVec
3097 modes don't allow them) or is a pre-increment or decrement.
3098
3099 ??? Except that due to conceptual problems in offsettable_address_p
3100 we can't really report the problems of integral offsets. So leave
3101 this assuming that the adjustable offset must be valid for the
3102 sub-words of a TFmode operand, which is what we had before. */
3103
3104 bool
3105 rs6000_mode_dependent_address (addr)
3106 rtx addr;
3107 {
3108 switch (GET_CODE (addr))
3109 {
3110 case PLUS:
3111 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3112 {
3113 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3114 return val + 12 + 0x8000 >= 0x10000;
3115 }
3116 break;
3117
3118 case LO_SUM:
3119 return true;
3120
3121 case PRE_INC:
3122 case PRE_DEC:
3123 return TARGET_UPDATE;
3124
3125 default:
3126 break;
3127 }
3128
3129 return false;
3130 }
3131 \f
3132 /* Try to output insns to set TARGET equal to the constant C if it can
3133 be done in less than N insns. Do all computations in MODE.
3134 Returns the place where the output has been placed if it can be
3135 done and the insns have been emitted. If it would take more than N
3136 insns, zero is returned and no insns and emitted. */
3137
3138 rtx
3139 rs6000_emit_set_const (dest, mode, source, n)
3140 rtx dest, source;
3141 enum machine_mode mode;
3142 int n ATTRIBUTE_UNUSED;
3143 {
3144 rtx result, insn, set;
3145 HOST_WIDE_INT c0, c1;
3146
3147 if (mode == QImode || mode == HImode)
3148 {
3149 if (dest == NULL)
3150 dest = gen_reg_rtx (mode);
3151 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3152 return dest;
3153 }
3154 else if (mode == SImode)
3155 {
3156 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3157
3158 emit_insn (gen_rtx_SET (VOIDmode, result,
3159 GEN_INT (INTVAL (source)
3160 & (~ (HOST_WIDE_INT) 0xffff))));
3161 emit_insn (gen_rtx_SET (VOIDmode, dest,
3162 gen_rtx_IOR (SImode, result,
3163 GEN_INT (INTVAL (source) & 0xffff))));
3164 result = dest;
3165 }
3166 else if (mode == DImode)
3167 {
3168 if (GET_CODE (source) == CONST_INT)
3169 {
3170 c0 = INTVAL (source);
3171 c1 = -(c0 < 0);
3172 }
3173 else if (GET_CODE (source) == CONST_DOUBLE)
3174 {
3175 #if HOST_BITS_PER_WIDE_INT >= 64
3176 c0 = CONST_DOUBLE_LOW (source);
3177 c1 = -(c0 < 0);
3178 #else
3179 c0 = CONST_DOUBLE_LOW (source);
3180 c1 = CONST_DOUBLE_HIGH (source);
3181 #endif
3182 }
3183 else
3184 abort ();
3185
3186 result = rs6000_emit_set_long_const (dest, c0, c1);
3187 }
3188 else
3189 abort ();
3190
3191 insn = get_last_insn ();
3192 set = single_set (insn);
3193 if (! CONSTANT_P (SET_SRC (set)))
3194 set_unique_reg_note (insn, REG_EQUAL, source);
3195
3196 return result;
3197 }
3198
3199 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3200 fall back to a straight forward decomposition. We do this to avoid
3201 exponential run times encountered when looking for longer sequences
3202 with rs6000_emit_set_const. */
3203 static rtx
3204 rs6000_emit_set_long_const (dest, c1, c2)
3205 rtx dest;
3206 HOST_WIDE_INT c1, c2;
3207 {
3208 if (!TARGET_POWERPC64)
3209 {
3210 rtx operand1, operand2;
3211
3212 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3213 DImode);
3214 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3215 DImode);
3216 emit_move_insn (operand1, GEN_INT (c1));
3217 emit_move_insn (operand2, GEN_INT (c2));
3218 }
3219 else
3220 {
3221 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3222
3223 ud1 = c1 & 0xffff;
3224 ud2 = (c1 & 0xffff0000) >> 16;
3225 #if HOST_BITS_PER_WIDE_INT >= 64
3226 c2 = c1 >> 32;
3227 #endif
3228 ud3 = c2 & 0xffff;
3229 ud4 = (c2 & 0xffff0000) >> 16;
3230
3231 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3232 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3233 {
3234 if (ud1 & 0x8000)
3235 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3236 else
3237 emit_move_insn (dest, GEN_INT (ud1));
3238 }
3239
3240 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3241 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3242 {
3243 if (ud2 & 0x8000)
3244 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3245 - 0x80000000));
3246 else
3247 emit_move_insn (dest, GEN_INT (ud2 << 16));
3248 if (ud1 != 0)
3249 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3250 }
3251 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3252 || (ud4 == 0 && ! (ud3 & 0x8000)))
3253 {
3254 if (ud3 & 0x8000)
3255 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3256 - 0x80000000));
3257 else
3258 emit_move_insn (dest, GEN_INT (ud3 << 16));
3259
3260 if (ud2 != 0)
3261 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3262 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3263 if (ud1 != 0)
3264 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3265 }
3266 else
3267 {
3268 if (ud4 & 0x8000)
3269 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3270 - 0x80000000));
3271 else
3272 emit_move_insn (dest, GEN_INT (ud4 << 16));
3273
3274 if (ud3 != 0)
3275 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3276
3277 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3278 if (ud2 != 0)
3279 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3280 GEN_INT (ud2 << 16)));
3281 if (ud1 != 0)
3282 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3283 }
3284 }
3285 return dest;
3286 }
3287
3288 /* Emit a move from SOURCE to DEST in mode MODE. */
3289 void
3290 rs6000_emit_move (dest, source, mode)
3291 rtx dest;
3292 rtx source;
3293 enum machine_mode mode;
3294 {
3295 rtx operands[2];
3296 operands[0] = dest;
3297 operands[1] = source;
3298
3299 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3300 if (GET_CODE (operands[1]) == CONST_DOUBLE
3301 && ! FLOAT_MODE_P (mode)
3302 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3303 {
3304 /* FIXME. This should never happen. */
3305 /* Since it seems that it does, do the safe thing and convert
3306 to a CONST_INT. */
3307 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3308 }
3309 if (GET_CODE (operands[1]) == CONST_DOUBLE
3310 && ! FLOAT_MODE_P (mode)
3311 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
3312 && CONST_DOUBLE_LOW (operands[1]) >= 0)
3313 || (CONST_DOUBLE_HIGH (operands[1]) == -1
3314 && CONST_DOUBLE_LOW (operands[1]) < 0)))
3315 abort ();
3316
3317 /* Check if GCC is setting up a block move that will end up using FP
3318 registers as temporaries. We must make sure this is acceptable. */
3319 if (GET_CODE (operands[0]) == MEM
3320 && GET_CODE (operands[1]) == MEM
3321 && mode == DImode
3322 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3323 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3324 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3325 ? 32 : MEM_ALIGN (operands[0])))
3326 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3327 ? 32
3328 : MEM_ALIGN (operands[1]))))
3329 && ! MEM_VOLATILE_P (operands [0])
3330 && ! MEM_VOLATILE_P (operands [1]))
3331 {
3332 emit_move_insn (adjust_address (operands[0], SImode, 0),
3333 adjust_address (operands[1], SImode, 0));
3334 emit_move_insn (adjust_address (operands[0], SImode, 4),
3335 adjust_address (operands[1], SImode, 4));
3336 return;
3337 }
3338
3339 if (!no_new_pseudos)
3340 {
3341 if (GET_CODE (operands[1]) == MEM && optimize > 0
3342 && (mode == QImode || mode == HImode || mode == SImode)
3343 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3344 {
3345 rtx reg = gen_reg_rtx (word_mode);
3346
3347 emit_insn (gen_rtx_SET (word_mode, reg,
3348 gen_rtx_ZERO_EXTEND (word_mode,
3349 operands[1])));
3350 operands[1] = gen_lowpart (mode, reg);
3351 }
3352 if (GET_CODE (operands[0]) != REG)
3353 operands[1] = force_reg (mode, operands[1]);
3354 }
3355
3356 if (mode == SFmode && ! TARGET_POWERPC
3357 && TARGET_HARD_FLOAT && TARGET_FPRS
3358 && GET_CODE (operands[0]) == MEM)
3359 {
3360 int regnum;
3361
3362 if (reload_in_progress || reload_completed)
3363 regnum = true_regnum (operands[1]);
3364 else if (GET_CODE (operands[1]) == REG)
3365 regnum = REGNO (operands[1]);
3366 else
3367 regnum = -1;
3368
3369 /* If operands[1] is a register, on POWER it may have
3370 double-precision data in it, so truncate it to single
3371 precision. */
3372 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3373 {
3374 rtx newreg;
3375 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3376 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3377 operands[1] = newreg;
3378 }
3379 }
3380
3381 /* Recognize the case where operand[1] is a reference to thread-local
3382 data and load its address to a register. */
3383 if (GET_CODE (operands[1]) == SYMBOL_REF)
3384 {
3385 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3386 if (model != 0)
3387 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3388 }
3389
3390 /* Handle the case where reload calls us with an invalid address. */
3391 if (reload_in_progress && mode == Pmode
3392 && (! general_operand (operands[1], mode)
3393 || ! nonimmediate_operand (operands[0], mode)))
3394 goto emit_set;
3395
3396 /* Handle the case of CONSTANT_P_RTX. */
3397 if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
3398 goto emit_set;
3399
3400 /* FIXME: In the long term, this switch statement should go away
3401 and be replaced by a sequence of tests based on things like
3402 mode == Pmode. */
3403 switch (mode)
3404 {
3405 case HImode:
3406 case QImode:
3407 if (CONSTANT_P (operands[1])
3408 && GET_CODE (operands[1]) != CONST_INT)
3409 operands[1] = force_const_mem (mode, operands[1]);
3410 break;
3411
3412 case TFmode:
3413 case DFmode:
3414 case SFmode:
3415 if (CONSTANT_P (operands[1])
3416 && ! easy_fp_constant (operands[1], mode))
3417 operands[1] = force_const_mem (mode, operands[1]);
3418 break;
3419
3420 case V16QImode:
3421 case V8HImode:
3422 case V4SFmode:
3423 case V4SImode:
3424 case V4HImode:
3425 case V2SFmode:
3426 case V2SImode:
3427 case V1DImode:
3428 if (CONSTANT_P (operands[1])
3429 && !easy_vector_constant (operands[1], mode))
3430 operands[1] = force_const_mem (mode, operands[1]);
3431 break;
3432
3433 case SImode:
3434 case DImode:
3435 /* Use default pattern for address of ELF small data */
3436 if (TARGET_ELF
3437 && mode == Pmode
3438 && DEFAULT_ABI == ABI_V4
3439 && (GET_CODE (operands[1]) == SYMBOL_REF
3440 || GET_CODE (operands[1]) == CONST)
3441 && small_data_operand (operands[1], mode))
3442 {
3443 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3444 return;
3445 }
3446
3447 if (DEFAULT_ABI == ABI_V4
3448 && mode == Pmode && mode == SImode
3449 && flag_pic == 1 && got_operand (operands[1], mode))
3450 {
3451 emit_insn (gen_movsi_got (operands[0], operands[1]));
3452 return;
3453 }
3454
3455 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3456 && TARGET_NO_TOC
3457 && ! flag_pic
3458 && mode == Pmode
3459 && CONSTANT_P (operands[1])
3460 && GET_CODE (operands[1]) != HIGH
3461 && GET_CODE (operands[1]) != CONST_INT)
3462 {
3463 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3464
3465 /* If this is a function address on -mcall-aixdesc,
3466 convert it to the address of the descriptor. */
3467 if (DEFAULT_ABI == ABI_AIX
3468 && GET_CODE (operands[1]) == SYMBOL_REF
3469 && XSTR (operands[1], 0)[0] == '.')
3470 {
3471 const char *name = XSTR (operands[1], 0);
3472 rtx new_ref;
3473 while (*name == '.')
3474 name++;
3475 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3476 CONSTANT_POOL_ADDRESS_P (new_ref)
3477 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3478 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3479 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3480 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3481 operands[1] = new_ref;
3482 }
3483
3484 if (DEFAULT_ABI == ABI_DARWIN)
3485 {
3486 #if TARGET_MACHO
3487 if (MACHO_DYNAMIC_NO_PIC_P)
3488 {
3489 /* Take care of any required data indirection. */
3490 operands[1] = rs6000_machopic_legitimize_pic_address (
3491 operands[1], mode, operands[0]);
3492 if (operands[0] != operands[1])
3493 emit_insn (gen_rtx_SET (VOIDmode,
3494 operands[0], operands[1]));
3495 return;
3496 }
3497 #endif
3498 emit_insn (gen_macho_high (target, operands[1]));
3499 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3500 return;
3501 }
3502
3503 emit_insn (gen_elf_high (target, operands[1]));
3504 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3505 return;
3506 }
3507
3508 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3509 and we have put it in the TOC, we just need to make a TOC-relative
3510 reference to it. */
3511 if (TARGET_TOC
3512 && GET_CODE (operands[1]) == SYMBOL_REF
3513 && constant_pool_expr_p (operands[1])
3514 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3515 get_pool_mode (operands[1])))
3516 {
3517 operands[1] = create_TOC_reference (operands[1]);
3518 }
3519 else if (mode == Pmode
3520 && CONSTANT_P (operands[1])
3521 && ((GET_CODE (operands[1]) != CONST_INT
3522 && ! easy_fp_constant (operands[1], mode))
3523 || (GET_CODE (operands[1]) == CONST_INT
3524 && num_insns_constant (operands[1], mode) > 2)
3525 || (GET_CODE (operands[0]) == REG
3526 && FP_REGNO_P (REGNO (operands[0]))))
3527 && GET_CODE (operands[1]) != HIGH
3528 && ! legitimate_constant_pool_address_p (operands[1])
3529 && ! toc_relative_expr_p (operands[1]))
3530 {
3531 /* Emit a USE operation so that the constant isn't deleted if
3532 expensive optimizations are turned on because nobody
3533 references it. This should only be done for operands that
3534 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3535 This should not be done for operands that contain LABEL_REFs.
3536 For now, we just handle the obvious case. */
3537 if (GET_CODE (operands[1]) != LABEL_REF)
3538 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3539
3540 #if TARGET_MACHO
3541 /* Darwin uses a special PIC legitimizer. */
3542 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3543 {
3544 operands[1] =
3545 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3546 operands[0]);
3547 if (operands[0] != operands[1])
3548 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3549 return;
3550 }
3551 #endif
3552
3553 /* If we are to limit the number of things we put in the TOC and
3554 this is a symbol plus a constant we can add in one insn,
3555 just put the symbol in the TOC and add the constant. Don't do
3556 this if reload is in progress. */
3557 if (GET_CODE (operands[1]) == CONST
3558 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3559 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3560 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3561 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3562 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3563 && ! side_effects_p (operands[0]))
3564 {
3565 rtx sym =
3566 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3567 rtx other = XEXP (XEXP (operands[1], 0), 1);
3568
3569 sym = force_reg (mode, sym);
3570 if (mode == SImode)
3571 emit_insn (gen_addsi3 (operands[0], sym, other));
3572 else
3573 emit_insn (gen_adddi3 (operands[0], sym, other));
3574 return;
3575 }
3576
3577 operands[1] = force_const_mem (mode, operands[1]);
3578
3579 if (TARGET_TOC
3580 && constant_pool_expr_p (XEXP (operands[1], 0))
3581 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3582 get_pool_constant (XEXP (operands[1], 0)),
3583 get_pool_mode (XEXP (operands[1], 0))))
3584 {
3585 operands[1]
3586 = gen_rtx_MEM (mode,
3587 create_TOC_reference (XEXP (operands[1], 0)));
3588 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3589 RTX_UNCHANGING_P (operands[1]) = 1;
3590 }
3591 }
3592 break;
3593
3594 case TImode:
3595 if (GET_CODE (operands[0]) == MEM
3596 && GET_CODE (XEXP (operands[0], 0)) != REG
3597 && ! reload_in_progress)
3598 operands[0]
3599 = replace_equiv_address (operands[0],
3600 copy_addr_to_reg (XEXP (operands[0], 0)));
3601
3602 if (GET_CODE (operands[1]) == MEM
3603 && GET_CODE (XEXP (operands[1], 0)) != REG
3604 && ! reload_in_progress)
3605 operands[1]
3606 = replace_equiv_address (operands[1],
3607 copy_addr_to_reg (XEXP (operands[1], 0)));
3608 if (TARGET_POWER)
3609 {
3610 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3611 gen_rtvec (2,
3612 gen_rtx_SET (VOIDmode,
3613 operands[0], operands[1]),
3614 gen_rtx_CLOBBER (VOIDmode,
3615 gen_rtx_SCRATCH (SImode)))));
3616 return;
3617 }
3618 break;
3619
3620 default:
3621 abort ();
3622 }
3623
3624 /* Above, we may have called force_const_mem which may have returned
3625 an invalid address. If we can, fix this up; otherwise, reload will
3626 have to deal with it. */
3627 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3628 operands[1] = validize_mem (operands[1]);
3629
3630 emit_set:
3631 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3632 }
3633 \f
3634 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3635 for a call to a function whose data type is FNTYPE.
3636 For a library call, FNTYPE is 0.
3637
3638 For incoming args we set the number of arguments in the prototype large
3639 so we never return a PARALLEL. */
3640
3641 void
3642 init_cumulative_args (cum, fntype, libname, incoming)
3643 CUMULATIVE_ARGS *cum;
3644 tree fntype;
3645 rtx libname ATTRIBUTE_UNUSED;
3646 int incoming;
3647 {
3648 static CUMULATIVE_ARGS zero_cumulative;
3649
3650 *cum = zero_cumulative;
3651 cum->words = 0;
3652 cum->fregno = FP_ARG_MIN_REG;
3653 cum->vregno = ALTIVEC_ARG_MIN_REG;
3654 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3655 cum->call_cookie = CALL_NORMAL;
3656 cum->sysv_gregno = GP_ARG_MIN_REG;
3657 cum->stdarg = fntype
3658 && (TYPE_ARG_TYPES (fntype) != 0
3659 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3660 != void_type_node));
3661
3662 if (incoming)
3663 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
3664
3665 else if (cum->prototype)
3666 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
3667 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
3668 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
3669
3670 else
3671 cum->nargs_prototype = 0;
3672
3673 cum->orig_nargs = cum->nargs_prototype;
3674
3675 /* Check for a longcall attribute. */
3676 if (fntype
3677 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3678 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
3679 cum->call_cookie = CALL_LONG;
3680
3681 if (TARGET_DEBUG_ARG)
3682 {
3683 fprintf (stderr, "\ninit_cumulative_args:");
3684 if (fntype)
3685 {
3686 tree ret_type = TREE_TYPE (fntype);
3687 fprintf (stderr, " ret code = %s,",
3688 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3689 }
3690
3691 if (cum->call_cookie & CALL_LONG)
3692 fprintf (stderr, " longcall,");
3693
3694 fprintf (stderr, " proto = %d, nargs = %d\n",
3695 cum->prototype, cum->nargs_prototype);
3696 }
3697 }
3698 \f
3699 /* If defined, a C expression which determines whether, and in which
3700 direction, to pad out an argument with extra space. The value
3701 should be of type `enum direction': either `upward' to pad above
3702 the argument, `downward' to pad below, or `none' to inhibit
3703 padding.
3704
3705 For the AIX ABI structs are always stored left shifted in their
3706 argument slot. */
3707
3708 enum direction
3709 function_arg_padding (mode, type)
3710 enum machine_mode mode;
3711 tree type;
3712 {
3713 if (type != 0 && AGGREGATE_TYPE_P (type))
3714 return upward;
3715
3716 /* This is the default definition. */
3717 return (! BYTES_BIG_ENDIAN
3718 ? upward
3719 : ((mode == BLKmode
3720 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3721 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
3722 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
3723 ? downward : upward));
3724 }
3725
3726 /* If defined, a C expression that gives the alignment boundary, in bits,
3727 of an argument with the specified mode and type. If it is not defined,
3728 PARM_BOUNDARY is used for all arguments.
3729
3730 V.4 wants long longs to be double word aligned. */
3731
3732 int
3733 function_arg_boundary (mode, type)
3734 enum machine_mode mode;
3735 tree type ATTRIBUTE_UNUSED;
3736 {
3737 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3738 return 64;
3739 else if (SPE_VECTOR_MODE (mode))
3740 return 64;
3741 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3742 return 128;
3743 else
3744 return PARM_BOUNDARY;
3745 }
3746 \f
3747 /* Update the data in CUM to advance over an argument
3748 of mode MODE and data type TYPE.
3749 (TYPE is null for libcalls where that information may not be available.) */
3750
3751 void
3752 function_arg_advance (cum, mode, type, named)
3753 CUMULATIVE_ARGS *cum;
3754 enum machine_mode mode;
3755 tree type;
3756 int named;
3757 {
3758 cum->nargs_prototype--;
3759
3760 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3761 {
3762 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
3763 cum->vregno++;
3764 else
3765 cum->words += RS6000_ARG_SIZE (mode, type);
3766 }
3767 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
3768 && !cum->stdarg
3769 && cum->sysv_gregno <= GP_ARG_MAX_REG)
3770 cum->sysv_gregno++;
3771 else if (DEFAULT_ABI == ABI_V4)
3772 {
3773 if (TARGET_HARD_FLOAT && TARGET_FPRS
3774 && (mode == SFmode || mode == DFmode))
3775 {
3776 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3777 cum->fregno++;
3778 else
3779 {
3780 if (mode == DFmode)
3781 cum->words += cum->words & 1;
3782 cum->words += RS6000_ARG_SIZE (mode, type);
3783 }
3784 }
3785 else
3786 {
3787 int n_words;
3788 int gregno = cum->sysv_gregno;
3789
3790 /* Aggregates and IEEE quad get passed by reference. */
3791 if ((type && AGGREGATE_TYPE_P (type))
3792 || mode == TFmode)
3793 n_words = 1;
3794 else
3795 n_words = RS6000_ARG_SIZE (mode, type);
3796
3797 /* Long long and SPE vectors are put in odd registers. */
3798 if (n_words == 2 && (gregno & 1) == 0)
3799 gregno += 1;
3800
3801 /* Long long and SPE vectors are not split between registers
3802 and stack. */
3803 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
3804 {
3805 /* Long long is aligned on the stack. */
3806 if (n_words == 2)
3807 cum->words += cum->words & 1;
3808 cum->words += n_words;
3809 }
3810
3811 /* Note: continuing to accumulate gregno past when we've started
3812 spilling to the stack indicates the fact that we've started
3813 spilling to the stack to expand_builtin_saveregs. */
3814 cum->sysv_gregno = gregno + n_words;
3815 }
3816
3817 if (TARGET_DEBUG_ARG)
3818 {
3819 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3820 cum->words, cum->fregno);
3821 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3822 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3823 fprintf (stderr, "mode = %4s, named = %d\n",
3824 GET_MODE_NAME (mode), named);
3825 }
3826 }
3827 else
3828 {
3829 int align = (TARGET_32BIT && (cum->words & 1) != 0
3830 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3831
3832 cum->words += align + RS6000_ARG_SIZE (mode, type);
3833
3834 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3835 && TARGET_HARD_FLOAT && TARGET_FPRS)
3836 cum->fregno += (mode == TFmode ? 2 : 1);
3837
3838 if (TARGET_DEBUG_ARG)
3839 {
3840 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3841 cum->words, cum->fregno);
3842 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3843 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3844 fprintf (stderr, "named = %d, align = %d\n", named, align);
3845 }
3846 }
3847 }
3848
3849 /* Determine where to put a SIMD argument on the SPE. */
3850 static rtx
3851 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type)
3852 {
3853 if (cum->stdarg)
3854 {
3855 int gregno = cum->sysv_gregno;
3856 int n_words = RS6000_ARG_SIZE (mode, type);
3857
3858 /* SPE vectors are put in odd registers. */
3859 if (n_words == 2 && (gregno & 1) == 0)
3860 gregno += 1;
3861
3862 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3863 {
3864 rtx r1, r2;
3865 enum machine_mode m = SImode;
3866
3867 r1 = gen_rtx_REG (m, gregno);
3868 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3869 r2 = gen_rtx_REG (m, gregno + 1);
3870 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3871 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3872 }
3873 else
3874 return NULL;
3875 }
3876 else
3877 {
3878 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3879 return gen_rtx_REG (mode, cum->sysv_gregno);
3880 else
3881 return NULL;
3882 }
3883 }
3884
3885 /* Determine where to put an argument to a function.
3886 Value is zero to push the argument on the stack,
3887 or a hard register in which to store the argument.
3888
3889 MODE is the argument's machine mode.
3890 TYPE is the data type of the argument (as a tree).
3891 This is null for libcalls where that information may
3892 not be available.
3893 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3894 the preceding args and about the function being called.
3895 NAMED is nonzero if this argument is a named parameter
3896 (otherwise it is an extra parameter matching an ellipsis).
3897
3898 On RS/6000 the first eight words of non-FP are normally in registers
3899 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3900 Under V.4, the first 8 FP args are in registers.
3901
3902 If this is floating-point and no prototype is specified, we use
3903 both an FP and integer register (or possibly FP reg and stack). Library
3904 functions (when TYPE is zero) always have the proper types for args,
3905 so we can pass the FP value just in one register. emit_library_function
3906 doesn't support PARALLEL anyway. */
3907
3908 struct rtx_def *
3909 function_arg (cum, mode, type, named)
3910 CUMULATIVE_ARGS *cum;
3911 enum machine_mode mode;
3912 tree type;
3913 int named;
3914 {
3915 enum rs6000_abi abi = DEFAULT_ABI;
3916
3917 /* Return a marker to indicate whether CR1 needs to set or clear the
3918 bit that V.4 uses to say fp args were passed in registers.
3919 Assume that we don't need the marker for software floating point,
3920 or compiler generated library calls. */
3921 if (mode == VOIDmode)
3922 {
3923 if (abi == ABI_V4
3924 && cum->nargs_prototype < 0
3925 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
3926 {
3927 /* For the SPE, we need to crxor CR6 always. */
3928 if (TARGET_SPE_ABI)
3929 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
3930 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
3931 return GEN_INT (cum->call_cookie
3932 | ((cum->fregno == FP_ARG_MIN_REG)
3933 ? CALL_V4_SET_FP_ARGS
3934 : CALL_V4_CLEAR_FP_ARGS));
3935 }
3936
3937 return GEN_INT (cum->call_cookie);
3938 }
3939
3940 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3941 {
3942 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
3943 return gen_rtx_REG (mode, cum->vregno);
3944 else
3945 return NULL;
3946 }
3947 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
3948 return rs6000_spe_function_arg (cum, mode, type);
3949 else if (abi == ABI_V4)
3950 {
3951 if (TARGET_HARD_FLOAT && TARGET_FPRS
3952 && (mode == SFmode || mode == DFmode))
3953 {
3954 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3955 return gen_rtx_REG (mode, cum->fregno);
3956 else
3957 return NULL;
3958 }
3959 else
3960 {
3961 int n_words;
3962 int gregno = cum->sysv_gregno;
3963
3964 /* Aggregates and IEEE quad get passed by reference. */
3965 if ((type && AGGREGATE_TYPE_P (type))
3966 || mode == TFmode)
3967 n_words = 1;
3968 else
3969 n_words = RS6000_ARG_SIZE (mode, type);
3970
3971 /* Long long and SPE vectors are put in odd registers. */
3972 if (n_words == 2 && (gregno & 1) == 0)
3973 gregno += 1;
3974
3975 /* Long long do not split between registers and stack. */
3976 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3977 return gen_rtx_REG (mode, gregno);
3978 else
3979 return NULL;
3980 }
3981 }
3982 else
3983 {
3984 int align = (TARGET_32BIT && (cum->words & 1) != 0
3985 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3986 int align_words = cum->words + align;
3987
3988 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3989 return NULL_RTX;
3990
3991 if (USE_FP_FOR_ARG_P (*cum, mode, type))
3992 {
3993 if (! type
3994 || ((cum->nargs_prototype > 0)
3995 /* IBM AIX extended its linkage convention definition always
3996 to require FP args after register save area hole on the
3997 stack. */
3998 && (DEFAULT_ABI != ABI_AIX
3999 || ! TARGET_XL_CALL
4000 || (align_words < GP_ARG_NUM_REG))))
4001 return gen_rtx_REG (mode, cum->fregno);
4002
4003 return gen_rtx_PARALLEL (mode,
4004 gen_rtvec (2,
4005 gen_rtx_EXPR_LIST (VOIDmode,
4006 ((align_words >= GP_ARG_NUM_REG)
4007 ? NULL_RTX
4008 : (align_words
4009 + RS6000_ARG_SIZE (mode, type)
4010 > GP_ARG_NUM_REG
4011 /* If this is partially on the stack, then
4012 we only include the portion actually
4013 in registers here. */
4014 ? gen_rtx_REG (SImode,
4015 GP_ARG_MIN_REG + align_words)
4016 : gen_rtx_REG (mode,
4017 GP_ARG_MIN_REG + align_words))),
4018 const0_rtx),
4019 gen_rtx_EXPR_LIST (VOIDmode,
4020 gen_rtx_REG (mode, cum->fregno),
4021 const0_rtx)));
4022 }
4023 else if (align_words < GP_ARG_NUM_REG)
4024 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4025 else
4026 return NULL_RTX;
4027 }
4028 }
4029 \f
4030 /* For an arg passed partly in registers and partly in memory,
4031 this is the number of registers used.
4032 For args passed entirely in registers or entirely in memory, zero. */
4033
4034 int
4035 function_arg_partial_nregs (cum, mode, type, named)
4036 CUMULATIVE_ARGS *cum;
4037 enum machine_mode mode;
4038 tree type;
4039 int named ATTRIBUTE_UNUSED;
4040 {
4041 if (DEFAULT_ABI == ABI_V4)
4042 return 0;
4043
4044 if (USE_FP_FOR_ARG_P (*cum, mode, type)
4045 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
4046 {
4047 if (cum->nargs_prototype >= 0)
4048 return 0;
4049 }
4050
4051 if (cum->words < GP_ARG_NUM_REG
4052 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
4053 {
4054 int ret = GP_ARG_NUM_REG - cum->words;
4055 if (ret && TARGET_DEBUG_ARG)
4056 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
4057
4058 return ret;
4059 }
4060
4061 return 0;
4062 }
4063 \f
4064 /* A C expression that indicates when an argument must be passed by
4065 reference. If nonzero for an argument, a copy of that argument is
4066 made in memory and a pointer to the argument is passed instead of
4067 the argument itself. The pointer is passed in whatever way is
4068 appropriate for passing a pointer to that type.
4069
4070 Under V.4, structures and unions are passed by reference.
4071
4072 As an extension to all ABIs, variable sized types are passed by
4073 reference. */
4074
4075 int
4076 function_arg_pass_by_reference (cum, mode, type, named)
4077 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
4078 enum machine_mode mode ATTRIBUTE_UNUSED;
4079 tree type;
4080 int named ATTRIBUTE_UNUSED;
4081 {
4082 if (DEFAULT_ABI == ABI_V4
4083 && ((type && AGGREGATE_TYPE_P (type))
4084 || mode == TFmode))
4085 {
4086 if (TARGET_DEBUG_ARG)
4087 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
4088
4089 return 1;
4090 }
4091 return type && int_size_in_bytes (type) <= 0;
4092 }
4093 \f
4094 /* Perform any needed actions needed for a function that is receiving a
4095 variable number of arguments.
4096
4097 CUM is as above.
4098
4099 MODE and TYPE are the mode and type of the current parameter.
4100
4101 PRETEND_SIZE is a variable that should be set to the amount of stack
4102 that must be pushed by the prolog to pretend that our caller pushed
4103 it.
4104
4105 Normally, this macro will push all remaining incoming registers on the
4106 stack and set PRETEND_SIZE to the length of the registers pushed. */
4107
4108 void
4109 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
4110 CUMULATIVE_ARGS *cum;
4111 enum machine_mode mode;
4112 tree type;
4113 int *pretend_size ATTRIBUTE_UNUSED;
4114 int no_rtl;
4115
4116 {
4117 CUMULATIVE_ARGS next_cum;
4118 int reg_size = TARGET_32BIT ? 4 : 8;
4119 rtx save_area = NULL_RTX, mem;
4120 int first_reg_offset, set;
4121 tree fntype;
4122 int stdarg_p;
4123
4124 fntype = TREE_TYPE (current_function_decl);
4125 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
4126 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4127 != void_type_node));
4128
4129 /* For varargs, we do not want to skip the dummy va_dcl argument.
4130 For stdargs, we do want to skip the last named argument. */
4131 next_cum = *cum;
4132 if (stdarg_p)
4133 function_arg_advance (&next_cum, mode, type, 1);
4134
4135 if (DEFAULT_ABI == ABI_V4)
4136 {
4137 /* Indicate to allocate space on the stack for varargs save area. */
4138 cfun->machine->sysv_varargs_p = 1;
4139 if (! no_rtl)
4140 save_area = plus_constant (virtual_stack_vars_rtx,
4141 - RS6000_VARARGS_SIZE);
4142
4143 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
4144 }
4145 else
4146 {
4147 first_reg_offset = next_cum.words;
4148 save_area = virtual_incoming_args_rtx;
4149 cfun->machine->sysv_varargs_p = 0;
4150
4151 if (MUST_PASS_IN_STACK (mode, type))
4152 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
4153 }
4154
4155 set = get_varargs_alias_set ();
4156 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
4157 {
4158 mem = gen_rtx_MEM (BLKmode,
4159 plus_constant (save_area,
4160 first_reg_offset * reg_size)),
4161 set_mem_alias_set (mem, set);
4162 set_mem_align (mem, BITS_PER_WORD);
4163
4164 move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
4165 GP_ARG_NUM_REG - first_reg_offset);
4166 }
4167
4168 /* Save FP registers if needed. */
4169 if (DEFAULT_ABI == ABI_V4
4170 && TARGET_HARD_FLOAT && TARGET_FPRS
4171 && ! no_rtl
4172 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
4173 {
4174 int fregno = next_cum.fregno;
4175 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4176 rtx lab = gen_label_rtx ();
4177 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
4178
4179 emit_jump_insn (gen_rtx_SET (VOIDmode,
4180 pc_rtx,
4181 gen_rtx_IF_THEN_ELSE (VOIDmode,
4182 gen_rtx_NE (VOIDmode, cr1,
4183 const0_rtx),
4184 gen_rtx_LABEL_REF (VOIDmode, lab),
4185 pc_rtx)));
4186
4187 while (fregno <= FP_ARG_V4_MAX_REG)
4188 {
4189 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
4190 set_mem_alias_set (mem, set);
4191 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4192 fregno++;
4193 off += 8;
4194 }
4195
4196 emit_label (lab);
4197 }
4198 }
4199
4200 /* Create the va_list data type. */
4201
4202 tree
4203 rs6000_build_va_list ()
4204 {
4205 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4206
4207 /* For AIX, prefer 'char *' because that's what the system
4208 header files like. */
4209 if (DEFAULT_ABI != ABI_V4)
4210 return build_pointer_type (char_type_node);
4211
4212 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4213 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4214
4215 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
4216 unsigned_char_type_node);
4217 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
4218 unsigned_char_type_node);
4219 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
4220 every user file. */
4221 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
4222 short_unsigned_type_node);
4223 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
4224 ptr_type_node);
4225 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
4226 ptr_type_node);
4227
4228 DECL_FIELD_CONTEXT (f_gpr) = record;
4229 DECL_FIELD_CONTEXT (f_fpr) = record;
4230 DECL_FIELD_CONTEXT (f_res) = record;
4231 DECL_FIELD_CONTEXT (f_ovf) = record;
4232 DECL_FIELD_CONTEXT (f_sav) = record;
4233
4234 TREE_CHAIN (record) = type_decl;
4235 TYPE_NAME (record) = type_decl;
4236 TYPE_FIELDS (record) = f_gpr;
4237 TREE_CHAIN (f_gpr) = f_fpr;
4238 TREE_CHAIN (f_fpr) = f_res;
4239 TREE_CHAIN (f_res) = f_ovf;
4240 TREE_CHAIN (f_ovf) = f_sav;
4241
4242 layout_type (record);
4243
4244 /* The correct type is an array type of one element. */
4245 return build_array_type (record, build_index_type (size_zero_node));
4246 }
4247
4248 /* Implement va_start. */
4249
4250 void
4251 rs6000_va_start (valist, nextarg)
4252 tree valist;
4253 rtx nextarg;
4254 {
4255 HOST_WIDE_INT words, n_gpr, n_fpr;
4256 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4257 tree gpr, fpr, ovf, sav, t;
4258
4259 /* Only SVR4 needs something special. */
4260 if (DEFAULT_ABI != ABI_V4)
4261 {
4262 std_expand_builtin_va_start (valist, nextarg);
4263 return;
4264 }
4265
4266 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4267 f_fpr = TREE_CHAIN (f_gpr);
4268 f_res = TREE_CHAIN (f_fpr);
4269 f_ovf = TREE_CHAIN (f_res);
4270 f_sav = TREE_CHAIN (f_ovf);
4271
4272 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4273 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4274 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4275 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4276 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4277
4278 /* Count number of gp and fp argument registers used. */
4279 words = current_function_args_info.words;
4280 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
4281 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
4282
4283 if (TARGET_DEBUG_ARG)
4284 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
4285 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
4286 words, n_gpr, n_fpr);
4287
4288 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
4289 TREE_SIDE_EFFECTS (t) = 1;
4290 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4291
4292 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
4293 TREE_SIDE_EFFECTS (t) = 1;
4294 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4295
4296 /* Find the overflow area. */
4297 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4298 if (words != 0)
4299 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
4300 build_int_2 (words * UNITS_PER_WORD, 0));
4301 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4302 TREE_SIDE_EFFECTS (t) = 1;
4303 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4304
4305 /* Find the register save area. */
4306 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
4307 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4308 build_int_2 (-RS6000_VARARGS_SIZE, -1));
4309 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
4310 TREE_SIDE_EFFECTS (t) = 1;
4311 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4312 }
4313
4314 /* Implement va_arg. */
4315
4316 rtx
4317 rs6000_va_arg (valist, type)
4318 tree valist, type;
4319 {
4320 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4321 tree gpr, fpr, ovf, sav, reg, t, u;
4322 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
4323 rtx lab_false, lab_over, addr_rtx, r;
4324
4325 if (DEFAULT_ABI != ABI_V4)
4326 {
4327 /* Variable sized types are passed by reference. */
4328 if (int_size_in_bytes (type) <= 0)
4329 {
4330 u = build_pointer_type (type);
4331
4332 /* Args grow upward. */
4333 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
4334 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
4335 TREE_SIDE_EFFECTS (t) = 1;
4336
4337 t = build1 (NOP_EXPR, build_pointer_type (u), t);
4338 TREE_SIDE_EFFECTS (t) = 1;
4339
4340 t = build1 (INDIRECT_REF, u, t);
4341 TREE_SIDE_EFFECTS (t) = 1;
4342
4343 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4344 }
4345 else
4346 return std_expand_builtin_va_arg (valist, type);
4347 }
4348
4349 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4350 f_fpr = TREE_CHAIN (f_gpr);
4351 f_res = TREE_CHAIN (f_fpr);
4352 f_ovf = TREE_CHAIN (f_res);
4353 f_sav = TREE_CHAIN (f_ovf);
4354
4355 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4356 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4357 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4358 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4359 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4360
4361 size = int_size_in_bytes (type);
4362 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4363
4364 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
4365 {
4366 /* Aggregates and long doubles are passed by reference. */
4367 indirect_p = 1;
4368 reg = gpr;
4369 n_reg = 1;
4370 sav_ofs = 0;
4371 sav_scale = 4;
4372 size = UNITS_PER_WORD;
4373 rsize = 1;
4374 }
4375 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
4376 {
4377 /* FP args go in FP registers, if present. */
4378 indirect_p = 0;
4379 reg = fpr;
4380 n_reg = 1;
4381 sav_ofs = 8*4;
4382 sav_scale = 8;
4383 }
4384 else
4385 {
4386 /* Otherwise into GP registers. */
4387 indirect_p = 0;
4388 reg = gpr;
4389 n_reg = rsize;
4390 sav_ofs = 0;
4391 sav_scale = 4;
4392 }
4393
4394 /* Pull the value out of the saved registers ... */
4395
4396 lab_false = gen_label_rtx ();
4397 lab_over = gen_label_rtx ();
4398 addr_rtx = gen_reg_rtx (Pmode);
4399
4400 /* AltiVec vectors never go in registers. */
4401 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
4402 {
4403 TREE_THIS_VOLATILE (reg) = 1;
4404 emit_cmp_and_jump_insns
4405 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
4406 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
4407 lab_false);
4408
4409 /* Long long is aligned in the registers. */
4410 if (n_reg > 1)
4411 {
4412 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
4413 build_int_2 (n_reg - 1, 0));
4414 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
4415 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
4416 TREE_SIDE_EFFECTS (u) = 1;
4417 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
4418 }
4419
4420 if (sav_ofs)
4421 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
4422 else
4423 t = sav;
4424
4425 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
4426 build_int_2 (n_reg, 0));
4427 TREE_SIDE_EFFECTS (u) = 1;
4428
4429 u = build1 (CONVERT_EXPR, integer_type_node, u);
4430 TREE_SIDE_EFFECTS (u) = 1;
4431
4432 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
4433 TREE_SIDE_EFFECTS (u) = 1;
4434
4435 t = build (PLUS_EXPR, ptr_type_node, t, u);
4436 TREE_SIDE_EFFECTS (t) = 1;
4437
4438 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4439 if (r != addr_rtx)
4440 emit_move_insn (addr_rtx, r);
4441
4442 emit_jump_insn (gen_jump (lab_over));
4443 emit_barrier ();
4444 }
4445
4446 emit_label (lab_false);
4447
4448 /* ... otherwise out of the overflow area. */
4449
4450 /* Make sure we don't find reg 7 for the next int arg.
4451
4452 All AltiVec vectors go in the overflow area. So in the AltiVec
4453 case we need to get the vectors from the overflow area, but
4454 remember where the GPRs and FPRs are. */
4455 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
4456 || !TARGET_ALTIVEC))
4457 {
4458 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
4459 TREE_SIDE_EFFECTS (t) = 1;
4460 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4461 }
4462
4463 /* Care for on-stack alignment if needed. */
4464 if (rsize <= 1)
4465 t = ovf;
4466 else
4467 {
4468 int align;
4469
4470 /* AltiVec vectors are 16 byte aligned. */
4471 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
4472 align = 15;
4473 else
4474 align = 7;
4475
4476 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
4477 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
4478 }
4479 t = save_expr (t);
4480
4481 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4482 if (r != addr_rtx)
4483 emit_move_insn (addr_rtx, r);
4484
4485 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
4486 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4487 TREE_SIDE_EFFECTS (t) = 1;
4488 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4489
4490 emit_label (lab_over);
4491
4492 if (indirect_p)
4493 {
4494 r = gen_rtx_MEM (Pmode, addr_rtx);
4495 set_mem_alias_set (r, get_varargs_alias_set ());
4496 emit_move_insn (addr_rtx, r);
4497 }
4498
4499 return addr_rtx;
4500 }
4501
4502 /* Builtins. */
4503
4504 #define def_builtin(MASK, NAME, TYPE, CODE) \
4505 do { \
4506 if ((MASK) & target_flags) \
4507 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
4508 NULL, NULL_TREE); \
4509 } while (0)
4510
4511 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
4512
4513 static const struct builtin_description bdesc_3arg[] =
4514 {
4515 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
4516 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
4517 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
4518 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
4519 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
4520 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
4521 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
4522 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
4523 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
4524 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
4525 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
4526 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
4527 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
4528 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
4529 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
4530 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
4531 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
4532 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
4533 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
4534 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
4535 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
4536 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
4537 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
4538 };
4539
4540 /* DST operations: void foo (void *, const int, const char). */
4541
4542 static const struct builtin_description bdesc_dst[] =
4543 {
4544 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
4545 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
4546 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
4547 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
4548 };
4549
4550 /* Simple binary operations: VECc = foo (VECa, VECb). */
4551
4552 static struct builtin_description bdesc_2arg[] =
4553 {
4554 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
4555 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
4556 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
4557 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
4558 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
4559 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
4560 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
4561 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
4562 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
4563 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
4564 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
4565 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
4566 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
4567 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
4568 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
4569 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
4570 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
4571 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
4572 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
4573 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
4574 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
4575 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
4576 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
4577 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
4578 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
4579 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
4580 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
4581 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
4582 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
4583 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
4584 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
4585 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
4586 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
4587 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
4588 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
4589 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
4590 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
4591 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
4592 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
4593 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
4594 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
4595 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
4596 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
4597 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
4598 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
4599 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
4600 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
4601 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
4602 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
4603 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
4604 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
4605 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
4606 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
4607 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
4608 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
4609 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
4610 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
4611 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
4612 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
4613 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
4614 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
4615 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
4616 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
4617 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
4618 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
4619 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
4620 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
4621 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
4622 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
4623 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
4624 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
4625 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
4626 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
4627 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
4628 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
4629 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
4630 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
4631 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
4632 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
4633 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
4634 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
4635 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
4636 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
4637 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
4638 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
4639 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
4640 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
4641 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
4642 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
4643 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
4644 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
4645 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
4646 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
4647 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
4648 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
4649 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
4650 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
4651 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
4652 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
4653 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
4654 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
4655 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
4656 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
4657 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
4658 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
4659 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
4660 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
4661 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
4662 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
4663 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
4664 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
4665 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
4666 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
4667
4668 /* Place holder, leave as first spe builtin. */
4669 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
4670 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
4671 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
4672 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
4673 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
4674 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
4675 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
4676 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
4677 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
4678 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
4679 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
4680 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
4681 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
4682 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
4683 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
4684 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
4685 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
4686 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
4687 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
4688 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
4689 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
4690 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
4691 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
4692 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
4693 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
4694 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
4695 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
4696 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
4697 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
4698 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
4699 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
4700 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
4701 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
4702 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
4703 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
4704 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
4705 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
4706 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
4707 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
4708 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
4709 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
4710 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
4711 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
4712 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
4713 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
4714 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
4715 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
4716 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
4717 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
4718 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
4719 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
4720 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
4721 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
4722 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
4723 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
4724 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
4725 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
4726 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
4727 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
4728 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
4729 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
4730 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
4731 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
4732 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
4733 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
4734 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
4735 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
4736 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
4737 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
4738 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
4739 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
4740 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
4741 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
4742 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
4743 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
4744 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
4745 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
4746 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
4747 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
4748 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
4749 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
4750 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
4751 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
4752 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
4753 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
4754 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
4755 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
4756 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
4757 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
4758 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
4759 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
4760 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
4761 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
4762 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
4763 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
4764 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
4765 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
4766 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
4767 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
4768 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
4769 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
4770 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
4771 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
4772 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
4773 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
4774 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
4775 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
4776 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
4777 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
4778
4779 /* SPE binary operations expecting a 5-bit unsigned literal. */
4780 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
4781
4782 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
4783 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
4784 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
4785 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
4786 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
4787 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
4788 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
4789 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
4790 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
4791 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
4792 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
4793 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
4794 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
4795 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
4796 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
4797 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
4798 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
4799 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
4800 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
4801 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
4802 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
4803 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
4804 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
4805 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
4806 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
4807 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
4808
4809 /* Place-holder. Leave as last binary SPE builtin. */
4810 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
4811 };
4812
4813 /* AltiVec predicates. */
4814
4815 struct builtin_description_predicates
4816 {
4817 const unsigned int mask;
4818 const enum insn_code icode;
4819 const char *opcode;
4820 const char *const name;
4821 const enum rs6000_builtins code;
4822 };
4823
4824 static const struct builtin_description_predicates bdesc_altivec_preds[] =
4825 {
4826 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
4827 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
4828 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
4829 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
4830 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
4831 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
4832 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
4833 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
4834 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
4835 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
4836 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
4837 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
4838 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
4839 };
4840
4841 /* SPE predicates. */
4842 static struct builtin_description bdesc_spe_predicates[] =
4843 {
4844 /* Place-holder. Leave as first. */
4845 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
4846 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
4847 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
4848 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
4849 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
4850 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
4851 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
4852 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
4853 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
4854 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
4855 /* Place-holder. Leave as last. */
4856 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
4857 };
4858
4859 /* SPE evsel predicates. */
4860 static struct builtin_description bdesc_spe_evsel[] =
4861 {
4862 /* Place-holder. Leave as first. */
4863 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
4864 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
4865 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
4866 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
4867 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
4868 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
4869 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
4870 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
4871 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
4872 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
4873 /* Place-holder. Leave as last. */
4874 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4875 };
4876
4877 /* ABS* operations. */
4878
4879 static const struct builtin_description bdesc_abs[] =
4880 {
4881 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4882 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4883 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4884 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4885 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4886 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4887 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4888 };
4889
4890 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4891 foo (VECa). */
4892
4893 static struct builtin_description bdesc_1arg[] =
4894 {
4895 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4896 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4897 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4898 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4899 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4900 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4901 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4902 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4903 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4904 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4905 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4906 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4907 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4908 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4909 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4910 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4911 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4912
4913 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4914 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4915 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4916 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4917 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4918 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4919 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4920 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4921 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4922 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4923 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4924 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4925 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4926 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4927 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4928 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4929 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
4930 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
4931 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
4932 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
4933 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
4934 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
4935 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
4936 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
4937 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
4938 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
4939 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
4940 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
4941 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
4942 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
4943 { 0, CODE_FOR_spe_evsplatfi, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI },
4944 { 0, CODE_FOR_spe_evsplati, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI },
4945
4946 /* Place-holder. Leave as last unary SPE builtin. */
4947 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
4948 };
4949
4950 static rtx
4951 rs6000_expand_unop_builtin (icode, arglist, target)
4952 enum insn_code icode;
4953 tree arglist;
4954 rtx target;
4955 {
4956 rtx pat;
4957 tree arg0 = TREE_VALUE (arglist);
4958 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4959 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4960 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4961
4962 if (icode == CODE_FOR_nothing)
4963 /* Builtin not supported on this processor. */
4964 return 0;
4965
4966 /* If we got invalid arguments bail out before generating bad rtl. */
4967 if (arg0 == error_mark_node)
4968 return const0_rtx;
4969
4970 if (icode == CODE_FOR_altivec_vspltisb
4971 || icode == CODE_FOR_altivec_vspltish
4972 || icode == CODE_FOR_altivec_vspltisw
4973 || icode == CODE_FOR_spe_evsplatfi
4974 || icode == CODE_FOR_spe_evsplati)
4975 {
4976 /* Only allow 5-bit *signed* literals. */
4977 if (GET_CODE (op0) != CONST_INT
4978 || INTVAL (op0) > 0x1f
4979 || INTVAL (op0) < -0x1f)
4980 {
4981 error ("argument 1 must be a 5-bit signed literal");
4982 return const0_rtx;
4983 }
4984 }
4985
4986 if (target == 0
4987 || GET_MODE (target) != tmode
4988 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4989 target = gen_reg_rtx (tmode);
4990
4991 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4992 op0 = copy_to_mode_reg (mode0, op0);
4993
4994 pat = GEN_FCN (icode) (target, op0);
4995 if (! pat)
4996 return 0;
4997 emit_insn (pat);
4998
4999 return target;
5000 }
5001
5002 static rtx
5003 altivec_expand_abs_builtin (icode, arglist, target)
5004 enum insn_code icode;
5005 tree arglist;
5006 rtx target;
5007 {
5008 rtx pat, scratch1, scratch2;
5009 tree arg0 = TREE_VALUE (arglist);
5010 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5011 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5012 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5013
5014 /* If we have invalid arguments, bail out before generating bad rtl. */
5015 if (arg0 == error_mark_node)
5016 return const0_rtx;
5017
5018 if (target == 0
5019 || GET_MODE (target) != tmode
5020 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5021 target = gen_reg_rtx (tmode);
5022
5023 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5024 op0 = copy_to_mode_reg (mode0, op0);
5025
5026 scratch1 = gen_reg_rtx (mode0);
5027 scratch2 = gen_reg_rtx (mode0);
5028
5029 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
5030 if (! pat)
5031 return 0;
5032 emit_insn (pat);
5033
5034 return target;
5035 }
5036
5037 static rtx
5038 rs6000_expand_binop_builtin (icode, arglist, target)
5039 enum insn_code icode;
5040 tree arglist;
5041 rtx target;
5042 {
5043 rtx pat;
5044 tree arg0 = TREE_VALUE (arglist);
5045 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5046 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5047 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5048 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5049 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5050 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5051
5052 if (icode == CODE_FOR_nothing)
5053 /* Builtin not supported on this processor. */
5054 return 0;
5055
5056 /* If we got invalid arguments bail out before generating bad rtl. */
5057 if (arg0 == error_mark_node || arg1 == error_mark_node)
5058 return const0_rtx;
5059
5060 if (icode == CODE_FOR_altivec_vcfux
5061 || icode == CODE_FOR_altivec_vcfsx
5062 || icode == CODE_FOR_altivec_vctsxs
5063 || icode == CODE_FOR_altivec_vctuxs
5064 || icode == CODE_FOR_altivec_vspltb
5065 || icode == CODE_FOR_altivec_vsplth
5066 || icode == CODE_FOR_altivec_vspltw
5067 || icode == CODE_FOR_spe_evaddiw
5068 || icode == CODE_FOR_spe_evldd
5069 || icode == CODE_FOR_spe_evldh
5070 || icode == CODE_FOR_spe_evldw
5071 || icode == CODE_FOR_spe_evlhhesplat
5072 || icode == CODE_FOR_spe_evlhhossplat
5073 || icode == CODE_FOR_spe_evlhhousplat
5074 || icode == CODE_FOR_spe_evlwhe
5075 || icode == CODE_FOR_spe_evlwhos
5076 || icode == CODE_FOR_spe_evlwhou
5077 || icode == CODE_FOR_spe_evlwhsplat
5078 || icode == CODE_FOR_spe_evlwwsplat
5079 || icode == CODE_FOR_spe_evrlwi
5080 || icode == CODE_FOR_spe_evslwi
5081 || icode == CODE_FOR_spe_evsrwis
5082 || icode == CODE_FOR_spe_evsubifw
5083 || icode == CODE_FOR_spe_evsrwiu)
5084 {
5085 /* Only allow 5-bit unsigned literals. */
5086 if (TREE_CODE (arg1) != INTEGER_CST
5087 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5088 {
5089 error ("argument 2 must be a 5-bit unsigned literal");
5090 return const0_rtx;
5091 }
5092 }
5093
5094 if (target == 0
5095 || GET_MODE (target) != tmode
5096 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5097 target = gen_reg_rtx (tmode);
5098
5099 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5100 op0 = copy_to_mode_reg (mode0, op0);
5101 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5102 op1 = copy_to_mode_reg (mode1, op1);
5103
5104 pat = GEN_FCN (icode) (target, op0, op1);
5105 if (! pat)
5106 return 0;
5107 emit_insn (pat);
5108
5109 return target;
5110 }
5111
5112 static rtx
5113 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
5114 enum insn_code icode;
5115 const char *opcode;
5116 tree arglist;
5117 rtx target;
5118 {
5119 rtx pat, scratch;
5120 tree cr6_form = TREE_VALUE (arglist);
5121 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5122 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5123 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5124 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5125 enum machine_mode tmode = SImode;
5126 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5127 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5128 int cr6_form_int;
5129
5130 if (TREE_CODE (cr6_form) != INTEGER_CST)
5131 {
5132 error ("argument 1 of __builtin_altivec_predicate must be a constant");
5133 return const0_rtx;
5134 }
5135 else
5136 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
5137
5138 if (mode0 != mode1)
5139 abort ();
5140
5141 /* If we have invalid arguments, bail out before generating bad rtl. */
5142 if (arg0 == error_mark_node || arg1 == error_mark_node)
5143 return const0_rtx;
5144
5145 if (target == 0
5146 || GET_MODE (target) != tmode
5147 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5148 target = gen_reg_rtx (tmode);
5149
5150 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5151 op0 = copy_to_mode_reg (mode0, op0);
5152 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5153 op1 = copy_to_mode_reg (mode1, op1);
5154
5155 scratch = gen_reg_rtx (mode0);
5156
5157 pat = GEN_FCN (icode) (scratch, op0, op1,
5158 gen_rtx (SYMBOL_REF, Pmode, opcode));
5159 if (! pat)
5160 return 0;
5161 emit_insn (pat);
5162
5163 /* The vec_any* and vec_all* predicates use the same opcodes for two
5164 different operations, but the bits in CR6 will be different
5165 depending on what information we want. So we have to play tricks
5166 with CR6 to get the right bits out.
5167
5168 If you think this is disgusting, look at the specs for the
5169 AltiVec predicates. */
5170
5171 switch (cr6_form_int)
5172 {
5173 case 0:
5174 emit_insn (gen_cr6_test_for_zero (target));
5175 break;
5176 case 1:
5177 emit_insn (gen_cr6_test_for_zero_reverse (target));
5178 break;
5179 case 2:
5180 emit_insn (gen_cr6_test_for_lt (target));
5181 break;
5182 case 3:
5183 emit_insn (gen_cr6_test_for_lt_reverse (target));
5184 break;
5185 default:
5186 error ("argument 1 of __builtin_altivec_predicate is out of range");
5187 break;
5188 }
5189
5190 return target;
5191 }
5192
5193 static rtx
5194 altivec_expand_stv_builtin (icode, arglist)
5195 enum insn_code icode;
5196 tree arglist;
5197 {
5198 tree arg0 = TREE_VALUE (arglist);
5199 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5200 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5201 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5202 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5203 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5204 rtx pat;
5205 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
5206 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
5207 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
5208
5209 /* Invalid arguments. Bail before doing anything stoopid! */
5210 if (arg0 == error_mark_node
5211 || arg1 == error_mark_node
5212 || arg2 == error_mark_node)
5213 return const0_rtx;
5214
5215 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
5216 op0 = copy_to_mode_reg (mode2, op0);
5217 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
5218 op1 = copy_to_mode_reg (mode0, op1);
5219 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5220 op2 = copy_to_mode_reg (mode1, op2);
5221
5222 pat = GEN_FCN (icode) (op1, op2, op0);
5223 if (pat)
5224 emit_insn (pat);
5225 return NULL_RTX;
5226 }
5227
5228 static rtx
5229 rs6000_expand_ternop_builtin (icode, arglist, target)
5230 enum insn_code icode;
5231 tree arglist;
5232 rtx target;
5233 {
5234 rtx pat;
5235 tree arg0 = TREE_VALUE (arglist);
5236 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5237 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5238 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5239 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5240 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5241 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5242 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5243 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5244 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
5245
5246 if (icode == CODE_FOR_nothing)
5247 /* Builtin not supported on this processor. */
5248 return 0;
5249
5250 /* If we got invalid arguments bail out before generating bad rtl. */
5251 if (arg0 == error_mark_node
5252 || arg1 == error_mark_node
5253 || arg2 == error_mark_node)
5254 return const0_rtx;
5255
5256 if (icode == CODE_FOR_altivec_vsldoi_4sf
5257 || icode == CODE_FOR_altivec_vsldoi_4si
5258 || icode == CODE_FOR_altivec_vsldoi_8hi
5259 || icode == CODE_FOR_altivec_vsldoi_16qi)
5260 {
5261 /* Only allow 4-bit unsigned literals. */
5262 if (TREE_CODE (arg2) != INTEGER_CST
5263 || TREE_INT_CST_LOW (arg2) & ~0xf)
5264 {
5265 error ("argument 3 must be a 4-bit unsigned literal");
5266 return const0_rtx;
5267 }
5268 }
5269
5270 if (target == 0
5271 || GET_MODE (target) != tmode
5272 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5273 target = gen_reg_rtx (tmode);
5274
5275 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5276 op0 = copy_to_mode_reg (mode0, op0);
5277 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5278 op1 = copy_to_mode_reg (mode1, op1);
5279 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
5280 op2 = copy_to_mode_reg (mode2, op2);
5281
5282 pat = GEN_FCN (icode) (target, op0, op1, op2);
5283 if (! pat)
5284 return 0;
5285 emit_insn (pat);
5286
5287 return target;
5288 }
5289
5290 /* Expand the lvx builtins. */
5291 static rtx
5292 altivec_expand_ld_builtin (exp, target, expandedp)
5293 tree exp;
5294 rtx target;
5295 bool *expandedp;
5296 {
5297 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5298 tree arglist = TREE_OPERAND (exp, 1);
5299 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5300 tree arg0;
5301 enum machine_mode tmode, mode0;
5302 rtx pat, op0;
5303 enum insn_code icode;
5304
5305 switch (fcode)
5306 {
5307 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
5308 icode = CODE_FOR_altivec_lvx_16qi;
5309 break;
5310 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
5311 icode = CODE_FOR_altivec_lvx_8hi;
5312 break;
5313 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
5314 icode = CODE_FOR_altivec_lvx_4si;
5315 break;
5316 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
5317 icode = CODE_FOR_altivec_lvx_4sf;
5318 break;
5319 default:
5320 *expandedp = false;
5321 return NULL_RTX;
5322 }
5323
5324 *expandedp = true;
5325
5326 arg0 = TREE_VALUE (arglist);
5327 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5328 tmode = insn_data[icode].operand[0].mode;
5329 mode0 = insn_data[icode].operand[1].mode;
5330
5331 if (target == 0
5332 || GET_MODE (target) != tmode
5333 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5334 target = gen_reg_rtx (tmode);
5335
5336 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5337 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5338
5339 pat = GEN_FCN (icode) (target, op0);
5340 if (! pat)
5341 return 0;
5342 emit_insn (pat);
5343 return target;
5344 }
5345
5346 /* Expand the stvx builtins. */
5347 static rtx
5348 altivec_expand_st_builtin (exp, target, expandedp)
5349 tree exp;
5350 rtx target ATTRIBUTE_UNUSED;
5351 bool *expandedp;
5352 {
5353 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5354 tree arglist = TREE_OPERAND (exp, 1);
5355 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5356 tree arg0, arg1;
5357 enum machine_mode mode0, mode1;
5358 rtx pat, op0, op1;
5359 enum insn_code icode;
5360
5361 switch (fcode)
5362 {
5363 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
5364 icode = CODE_FOR_altivec_stvx_16qi;
5365 break;
5366 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
5367 icode = CODE_FOR_altivec_stvx_8hi;
5368 break;
5369 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
5370 icode = CODE_FOR_altivec_stvx_4si;
5371 break;
5372 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
5373 icode = CODE_FOR_altivec_stvx_4sf;
5374 break;
5375 default:
5376 *expandedp = false;
5377 return NULL_RTX;
5378 }
5379
5380 arg0 = TREE_VALUE (arglist);
5381 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5382 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5383 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5384 mode0 = insn_data[icode].operand[0].mode;
5385 mode1 = insn_data[icode].operand[1].mode;
5386
5387 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5388 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5389 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5390 op1 = copy_to_mode_reg (mode1, op1);
5391
5392 pat = GEN_FCN (icode) (op0, op1);
5393 if (pat)
5394 emit_insn (pat);
5395
5396 *expandedp = true;
5397 return NULL_RTX;
5398 }
5399
5400 /* Expand the dst builtins. */
5401 static rtx
5402 altivec_expand_dst_builtin (exp, target, expandedp)
5403 tree exp;
5404 rtx target ATTRIBUTE_UNUSED;
5405 bool *expandedp;
5406 {
5407 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5408 tree arglist = TREE_OPERAND (exp, 1);
5409 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5410 tree arg0, arg1, arg2;
5411 enum machine_mode mode0, mode1, mode2;
5412 rtx pat, op0, op1, op2;
5413 struct builtin_description *d;
5414 size_t i;
5415
5416 *expandedp = false;
5417
5418 /* Handle DST variants. */
5419 d = (struct builtin_description *) bdesc_dst;
5420 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5421 if (d->code == fcode)
5422 {
5423 arg0 = TREE_VALUE (arglist);
5424 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5425 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5426 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5427 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5428 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5429 mode0 = insn_data[d->icode].operand[0].mode;
5430 mode1 = insn_data[d->icode].operand[1].mode;
5431 mode2 = insn_data[d->icode].operand[2].mode;
5432
5433 /* Invalid arguments, bail out before generating bad rtl. */
5434 if (arg0 == error_mark_node
5435 || arg1 == error_mark_node
5436 || arg2 == error_mark_node)
5437 return const0_rtx;
5438
5439 if (TREE_CODE (arg2) != INTEGER_CST
5440 || TREE_INT_CST_LOW (arg2) & ~0x3)
5441 {
5442 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
5443 return const0_rtx;
5444 }
5445
5446 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
5447 op0 = copy_to_mode_reg (mode0, op0);
5448 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
5449 op1 = copy_to_mode_reg (mode1, op1);
5450
5451 pat = GEN_FCN (d->icode) (op0, op1, op2);
5452 if (pat != 0)
5453 emit_insn (pat);
5454
5455 *expandedp = true;
5456 return NULL_RTX;
5457 }
5458
5459 return NULL_RTX;
5460 }
5461
5462 /* Expand the builtin in EXP and store the result in TARGET. Store
5463 true in *EXPANDEDP if we found a builtin to expand. */
5464 static rtx
5465 altivec_expand_builtin (exp, target, expandedp)
5466 tree exp;
5467 rtx target;
5468 bool *expandedp;
5469 {
5470 struct builtin_description *d;
5471 struct builtin_description_predicates *dp;
5472 size_t i;
5473 enum insn_code icode;
5474 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5475 tree arglist = TREE_OPERAND (exp, 1);
5476 tree arg0;
5477 rtx op0, pat;
5478 enum machine_mode tmode, mode0;
5479 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5480
5481 target = altivec_expand_ld_builtin (exp, target, expandedp);
5482 if (*expandedp)
5483 return target;
5484
5485 target = altivec_expand_st_builtin (exp, target, expandedp);
5486 if (*expandedp)
5487 return target;
5488
5489 target = altivec_expand_dst_builtin (exp, target, expandedp);
5490 if (*expandedp)
5491 return target;
5492
5493 *expandedp = true;
5494
5495 switch (fcode)
5496 {
5497 case ALTIVEC_BUILTIN_STVX:
5498 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
5499 case ALTIVEC_BUILTIN_STVEBX:
5500 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
5501 case ALTIVEC_BUILTIN_STVEHX:
5502 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
5503 case ALTIVEC_BUILTIN_STVEWX:
5504 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
5505 case ALTIVEC_BUILTIN_STVXL:
5506 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
5507
5508 case ALTIVEC_BUILTIN_MFVSCR:
5509 icode = CODE_FOR_altivec_mfvscr;
5510 tmode = insn_data[icode].operand[0].mode;
5511
5512 if (target == 0
5513 || GET_MODE (target) != tmode
5514 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5515 target = gen_reg_rtx (tmode);
5516
5517 pat = GEN_FCN (icode) (target);
5518 if (! pat)
5519 return 0;
5520 emit_insn (pat);
5521 return target;
5522
5523 case ALTIVEC_BUILTIN_MTVSCR:
5524 icode = CODE_FOR_altivec_mtvscr;
5525 arg0 = TREE_VALUE (arglist);
5526 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5527 mode0 = insn_data[icode].operand[0].mode;
5528
5529 /* If we got invalid arguments bail out before generating bad rtl. */
5530 if (arg0 == error_mark_node)
5531 return const0_rtx;
5532
5533 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5534 op0 = copy_to_mode_reg (mode0, op0);
5535
5536 pat = GEN_FCN (icode) (op0);
5537 if (pat)
5538 emit_insn (pat);
5539 return NULL_RTX;
5540
5541 case ALTIVEC_BUILTIN_DSSALL:
5542 emit_insn (gen_altivec_dssall ());
5543 return NULL_RTX;
5544
5545 case ALTIVEC_BUILTIN_DSS:
5546 icode = CODE_FOR_altivec_dss;
5547 arg0 = TREE_VALUE (arglist);
5548 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5549 mode0 = insn_data[icode].operand[0].mode;
5550
5551 /* If we got invalid arguments bail out before generating bad rtl. */
5552 if (arg0 == error_mark_node)
5553 return const0_rtx;
5554
5555 if (TREE_CODE (arg0) != INTEGER_CST
5556 || TREE_INT_CST_LOW (arg0) & ~0x3)
5557 {
5558 error ("argument to dss must be a 2-bit unsigned literal");
5559 return const0_rtx;
5560 }
5561
5562 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5563 op0 = copy_to_mode_reg (mode0, op0);
5564
5565 emit_insn (gen_altivec_dss (op0));
5566 return NULL_RTX;
5567 }
5568
5569 /* Expand abs* operations. */
5570 d = (struct builtin_description *) bdesc_abs;
5571 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5572 if (d->code == fcode)
5573 return altivec_expand_abs_builtin (d->icode, arglist, target);
5574
5575 /* Expand the AltiVec predicates. */
5576 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5577 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5578 if (dp->code == fcode)
5579 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
5580
5581 /* LV* are funky. We initialized them differently. */
5582 switch (fcode)
5583 {
5584 case ALTIVEC_BUILTIN_LVSL:
5585 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
5586 arglist, target);
5587 case ALTIVEC_BUILTIN_LVSR:
5588 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
5589 arglist, target);
5590 case ALTIVEC_BUILTIN_LVEBX:
5591 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
5592 arglist, target);
5593 case ALTIVEC_BUILTIN_LVEHX:
5594 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
5595 arglist, target);
5596 case ALTIVEC_BUILTIN_LVEWX:
5597 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
5598 arglist, target);
5599 case ALTIVEC_BUILTIN_LVXL:
5600 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
5601 arglist, target);
5602 case ALTIVEC_BUILTIN_LVX:
5603 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
5604 arglist, target);
5605 default:
5606 break;
5607 /* Fall through. */
5608 }
5609
5610 *expandedp = false;
5611 return NULL_RTX;
5612 }
5613
5614 /* Binops that need to be initialized manually, but can be expanded
5615 automagically by rs6000_expand_binop_builtin. */
5616 static struct builtin_description bdesc_2arg_spe[] =
5617 {
5618 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
5619 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
5620 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
5621 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
5622 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
5623 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
5624 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
5625 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
5626 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
5627 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
5628 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
5629 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
5630 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
5631 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
5632 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
5633 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
5634 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
5635 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
5636 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
5637 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
5638 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
5639 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
5640 };
5641
5642 /* Expand the builtin in EXP and store the result in TARGET. Store
5643 true in *EXPANDEDP if we found a builtin to expand.
5644
5645 This expands the SPE builtins that are not simple unary and binary
5646 operations. */
5647 static rtx
5648 spe_expand_builtin (exp, target, expandedp)
5649 tree exp;
5650 rtx target;
5651 bool *expandedp;
5652 {
5653 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5654 tree arglist = TREE_OPERAND (exp, 1);
5655 tree arg1, arg0;
5656 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5657 enum insn_code icode;
5658 enum machine_mode tmode, mode0;
5659 rtx pat, op0;
5660 struct builtin_description *d;
5661 size_t i;
5662
5663 *expandedp = true;
5664
5665 /* Syntax check for a 5-bit unsigned immediate. */
5666 switch (fcode)
5667 {
5668 case SPE_BUILTIN_EVSTDD:
5669 case SPE_BUILTIN_EVSTDH:
5670 case SPE_BUILTIN_EVSTDW:
5671 case SPE_BUILTIN_EVSTWHE:
5672 case SPE_BUILTIN_EVSTWHO:
5673 case SPE_BUILTIN_EVSTWWE:
5674 case SPE_BUILTIN_EVSTWWO:
5675 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5676 if (TREE_CODE (arg1) != INTEGER_CST
5677 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5678 {
5679 error ("argument 2 must be a 5-bit unsigned literal");
5680 return const0_rtx;
5681 }
5682 break;
5683 default:
5684 break;
5685 }
5686
5687 d = (struct builtin_description *) bdesc_2arg_spe;
5688 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
5689 if (d->code == fcode)
5690 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5691
5692 d = (struct builtin_description *) bdesc_spe_predicates;
5693 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
5694 if (d->code == fcode)
5695 return spe_expand_predicate_builtin (d->icode, arglist, target);
5696
5697 d = (struct builtin_description *) bdesc_spe_evsel;
5698 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
5699 if (d->code == fcode)
5700 return spe_expand_evsel_builtin (d->icode, arglist, target);
5701
5702 switch (fcode)
5703 {
5704 case SPE_BUILTIN_EVSTDDX:
5705 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
5706 case SPE_BUILTIN_EVSTDHX:
5707 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
5708 case SPE_BUILTIN_EVSTDWX:
5709 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
5710 case SPE_BUILTIN_EVSTWHEX:
5711 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
5712 case SPE_BUILTIN_EVSTWHOX:
5713 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
5714 case SPE_BUILTIN_EVSTWWEX:
5715 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
5716 case SPE_BUILTIN_EVSTWWOX:
5717 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
5718 case SPE_BUILTIN_EVSTDD:
5719 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
5720 case SPE_BUILTIN_EVSTDH:
5721 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
5722 case SPE_BUILTIN_EVSTDW:
5723 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
5724 case SPE_BUILTIN_EVSTWHE:
5725 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
5726 case SPE_BUILTIN_EVSTWHO:
5727 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
5728 case SPE_BUILTIN_EVSTWWE:
5729 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
5730 case SPE_BUILTIN_EVSTWWO:
5731 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
5732 case SPE_BUILTIN_MFSPEFSCR:
5733 icode = CODE_FOR_spe_mfspefscr;
5734 tmode = insn_data[icode].operand[0].mode;
5735
5736 if (target == 0
5737 || GET_MODE (target) != tmode
5738 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5739 target = gen_reg_rtx (tmode);
5740
5741 pat = GEN_FCN (icode) (target);
5742 if (! pat)
5743 return 0;
5744 emit_insn (pat);
5745 return target;
5746 case SPE_BUILTIN_MTSPEFSCR:
5747 icode = CODE_FOR_spe_mtspefscr;
5748 arg0 = TREE_VALUE (arglist);
5749 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5750 mode0 = insn_data[icode].operand[0].mode;
5751
5752 if (arg0 == error_mark_node)
5753 return const0_rtx;
5754
5755 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5756 op0 = copy_to_mode_reg (mode0, op0);
5757
5758 pat = GEN_FCN (icode) (op0);
5759 if (pat)
5760 emit_insn (pat);
5761 return NULL_RTX;
5762 default:
5763 break;
5764 }
5765
5766 *expandedp = false;
5767 return NULL_RTX;
5768 }
5769
5770 static rtx
5771 spe_expand_predicate_builtin (icode, arglist, target)
5772 enum insn_code icode;
5773 tree arglist;
5774 rtx target;
5775 {
5776 rtx pat, scratch, tmp;
5777 tree form = TREE_VALUE (arglist);
5778 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5779 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5780 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5781 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5782 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5783 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5784 int form_int;
5785 enum rtx_code code;
5786
5787 if (TREE_CODE (form) != INTEGER_CST)
5788 {
5789 error ("argument 1 of __builtin_spe_predicate must be a constant");
5790 return const0_rtx;
5791 }
5792 else
5793 form_int = TREE_INT_CST_LOW (form);
5794
5795 if (mode0 != mode1)
5796 abort ();
5797
5798 if (arg0 == error_mark_node || arg1 == error_mark_node)
5799 return const0_rtx;
5800
5801 if (target == 0
5802 || GET_MODE (target) != SImode
5803 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
5804 target = gen_reg_rtx (SImode);
5805
5806 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5807 op0 = copy_to_mode_reg (mode0, op0);
5808 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5809 op1 = copy_to_mode_reg (mode1, op1);
5810
5811 scratch = gen_reg_rtx (CCmode);
5812
5813 pat = GEN_FCN (icode) (scratch, op0, op1);
5814 if (! pat)
5815 return const0_rtx;
5816 emit_insn (pat);
5817
5818 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5819 _lower_. We use one compare, but look in different bits of the
5820 CR for each variant.
5821
5822 There are 2 elements in each SPE simd type (upper/lower). The CR
5823 bits are set as follows:
5824
5825 BIT0 | BIT 1 | BIT 2 | BIT 3
5826 U | L | (U | L) | (U & L)
5827
5828 So, for an "all" relationship, BIT 3 would be set.
5829 For an "any" relationship, BIT 2 would be set. Etc.
5830
5831 Following traditional nomenclature, these bits map to:
5832
5833 BIT0 | BIT 1 | BIT 2 | BIT 3
5834 LT | GT | EQ | OV
5835
5836 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5837 */
5838
5839 switch (form_int)
5840 {
5841 /* All variant. OV bit. */
5842 case 0:
5843 /* We need to get to the OV bit, which is the ORDERED bit. We
5844 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5845 that's ugly and will trigger a validate_condition_mode abort.
5846 So let's just use another pattern. */
5847 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
5848 return target;
5849 /* Any variant. EQ bit. */
5850 case 1:
5851 code = EQ;
5852 break;
5853 /* Upper variant. LT bit. */
5854 case 2:
5855 code = LT;
5856 break;
5857 /* Lower variant. GT bit. */
5858 case 3:
5859 code = GT;
5860 break;
5861 default:
5862 error ("argument 1 of __builtin_spe_predicate is out of range");
5863 return const0_rtx;
5864 }
5865
5866 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
5867 emit_move_insn (target, tmp);
5868
5869 return target;
5870 }
5871
5872 /* The evsel builtins look like this:
5873
5874 e = __builtin_spe_evsel_OP (a, b, c, d);
5875
5876 and work like this:
5877
5878 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5879 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5880 */
5881
5882 static rtx
5883 spe_expand_evsel_builtin (icode, arglist, target)
5884 enum insn_code icode;
5885 tree arglist;
5886 rtx target;
5887 {
5888 rtx pat, scratch;
5889 tree arg0 = TREE_VALUE (arglist);
5890 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5891 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5892 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5893 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5894 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5895 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5896 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5897 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5898 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5899
5900 if (mode0 != mode1)
5901 abort ();
5902
5903 if (arg0 == error_mark_node || arg1 == error_mark_node
5904 || arg2 == error_mark_node || arg3 == error_mark_node)
5905 return const0_rtx;
5906
5907 if (target == 0
5908 || GET_MODE (target) != mode0
5909 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5910 target = gen_reg_rtx (mode0);
5911
5912 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5913 op0 = copy_to_mode_reg (mode0, op0);
5914 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5915 op1 = copy_to_mode_reg (mode0, op1);
5916 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5917 op2 = copy_to_mode_reg (mode0, op2);
5918 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5919 op3 = copy_to_mode_reg (mode0, op3);
5920
5921 /* Generate the compare. */
5922 scratch = gen_reg_rtx (CCmode);
5923 pat = GEN_FCN (icode) (scratch, op0, op1);
5924 if (! pat)
5925 return const0_rtx;
5926 emit_insn (pat);
5927
5928 if (mode0 == V2SImode)
5929 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
5930 else
5931 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
5932
5933 return target;
5934 }
5935
5936 /* Expand an expression EXP that calls a built-in function,
5937 with result going to TARGET if that's convenient
5938 (and in mode MODE if that's convenient).
5939 SUBTARGET may be used as the target for computing one of EXP's operands.
5940 IGNORE is nonzero if the value is to be ignored. */
5941
5942 static rtx
5943 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
5944 tree exp;
5945 rtx target;
5946 rtx subtarget ATTRIBUTE_UNUSED;
5947 enum machine_mode mode ATTRIBUTE_UNUSED;
5948 int ignore ATTRIBUTE_UNUSED;
5949 {
5950 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5951 tree arglist = TREE_OPERAND (exp, 1);
5952 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5953 struct builtin_description *d;
5954 size_t i;
5955 rtx ret;
5956 bool success;
5957
5958 if (TARGET_ALTIVEC)
5959 {
5960 ret = altivec_expand_builtin (exp, target, &success);
5961
5962 if (success)
5963 return ret;
5964 }
5965 if (TARGET_SPE)
5966 {
5967 ret = spe_expand_builtin (exp, target, &success);
5968
5969 if (success)
5970 return ret;
5971 }
5972
5973 if (TARGET_ALTIVEC || TARGET_SPE)
5974 {
5975 /* Handle simple unary operations. */
5976 d = (struct builtin_description *) bdesc_1arg;
5977 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5978 if (d->code == fcode)
5979 return rs6000_expand_unop_builtin (d->icode, arglist, target);
5980
5981 /* Handle simple binary operations. */
5982 d = (struct builtin_description *) bdesc_2arg;
5983 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5984 if (d->code == fcode)
5985 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5986
5987 /* Handle simple ternary operations. */
5988 d = (struct builtin_description *) bdesc_3arg;
5989 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5990 if (d->code == fcode)
5991 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
5992 }
5993
5994 abort ();
5995 return NULL_RTX;
5996 }
5997
5998 static void
5999 rs6000_init_builtins ()
6000 {
6001 opaque_V2SI_type_node = copy_node (V2SI_type_node);
6002 opaque_V2SF_type_node = copy_node (V2SF_type_node);
6003 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
6004
6005 if (TARGET_SPE)
6006 spe_init_builtins ();
6007 if (TARGET_ALTIVEC)
6008 altivec_init_builtins ();
6009 if (TARGET_ALTIVEC || TARGET_SPE)
6010 rs6000_common_init_builtins ();
6011 }
6012
6013 /* Search through a set of builtins and enable the mask bits.
6014 DESC is an array of builtins.
6015 SIZE is the total number of builtins.
6016 START is the builtin enum at which to start.
6017 END is the builtin enum at which to end. */
6018 static void
6019 enable_mask_for_builtins (desc, size, start, end)
6020 struct builtin_description *desc;
6021 int size;
6022 enum rs6000_builtins start, end;
6023 {
6024 int i;
6025
6026 for (i = 0; i < size; ++i)
6027 if (desc[i].code == start)
6028 break;
6029
6030 if (i == size)
6031 return;
6032
6033 for (; i < size; ++i)
6034 {
6035 /* Flip all the bits on. */
6036 desc[i].mask = target_flags;
6037 if (desc[i].code == end)
6038 break;
6039 }
6040 }
6041
6042 static void
6043 spe_init_builtins ()
6044 {
6045 tree endlink = void_list_node;
6046 tree puint_type_node = build_pointer_type (unsigned_type_node);
6047 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
6048 struct builtin_description *d;
6049 size_t i;
6050
6051 tree v2si_ftype_4_v2si
6052 = build_function_type
6053 (opaque_V2SI_type_node,
6054 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6055 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6056 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6057 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6058 endlink)))));
6059
6060 tree v2sf_ftype_4_v2sf
6061 = build_function_type
6062 (opaque_V2SF_type_node,
6063 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6064 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6065 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6066 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6067 endlink)))));
6068
6069 tree int_ftype_int_v2si_v2si
6070 = build_function_type
6071 (integer_type_node,
6072 tree_cons (NULL_TREE, integer_type_node,
6073 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6074 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6075 endlink))));
6076
6077 tree int_ftype_int_v2sf_v2sf
6078 = build_function_type
6079 (integer_type_node,
6080 tree_cons (NULL_TREE, integer_type_node,
6081 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6082 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6083 endlink))));
6084
6085 tree void_ftype_v2si_puint_int
6086 = build_function_type (void_type_node,
6087 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6088 tree_cons (NULL_TREE, puint_type_node,
6089 tree_cons (NULL_TREE,
6090 integer_type_node,
6091 endlink))));
6092
6093 tree void_ftype_v2si_puint_char
6094 = build_function_type (void_type_node,
6095 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6096 tree_cons (NULL_TREE, puint_type_node,
6097 tree_cons (NULL_TREE,
6098 char_type_node,
6099 endlink))));
6100
6101 tree void_ftype_v2si_pv2si_int
6102 = build_function_type (void_type_node,
6103 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6104 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6105 tree_cons (NULL_TREE,
6106 integer_type_node,
6107 endlink))));
6108
6109 tree void_ftype_v2si_pv2si_char
6110 = build_function_type (void_type_node,
6111 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6112 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6113 tree_cons (NULL_TREE,
6114 char_type_node,
6115 endlink))));
6116
6117 tree void_ftype_int
6118 = build_function_type (void_type_node,
6119 tree_cons (NULL_TREE, integer_type_node, endlink));
6120
6121 tree int_ftype_void
6122 = build_function_type (integer_type_node, endlink);
6123
6124 tree v2si_ftype_pv2si_int
6125 = build_function_type (opaque_V2SI_type_node,
6126 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6127 tree_cons (NULL_TREE, integer_type_node,
6128 endlink)));
6129
6130 tree v2si_ftype_puint_int
6131 = build_function_type (opaque_V2SI_type_node,
6132 tree_cons (NULL_TREE, puint_type_node,
6133 tree_cons (NULL_TREE, integer_type_node,
6134 endlink)));
6135
6136 tree v2si_ftype_pushort_int
6137 = build_function_type (opaque_V2SI_type_node,
6138 tree_cons (NULL_TREE, pushort_type_node,
6139 tree_cons (NULL_TREE, integer_type_node,
6140 endlink)));
6141
6142 /* The initialization of the simple binary and unary builtins is
6143 done in rs6000_common_init_builtins, but we have to enable the
6144 mask bits here manually because we have run out of `target_flags'
6145 bits. We really need to redesign this mask business. */
6146
6147 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
6148 ARRAY_SIZE (bdesc_2arg),
6149 SPE_BUILTIN_EVADDW,
6150 SPE_BUILTIN_EVXOR);
6151 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
6152 ARRAY_SIZE (bdesc_1arg),
6153 SPE_BUILTIN_EVABS,
6154 SPE_BUILTIN_EVSUBFUSIAAW);
6155 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
6156 ARRAY_SIZE (bdesc_spe_predicates),
6157 SPE_BUILTIN_EVCMPEQ,
6158 SPE_BUILTIN_EVFSTSTLT);
6159 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
6160 ARRAY_SIZE (bdesc_spe_evsel),
6161 SPE_BUILTIN_EVSEL_CMPGTS,
6162 SPE_BUILTIN_EVSEL_FSTSTEQ);
6163
6164 /* Initialize irregular SPE builtins. */
6165
6166 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
6167 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
6168 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
6169 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
6170 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
6171 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
6172 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
6173 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
6174 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
6175 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
6176 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
6177 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
6178 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
6179 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
6180 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
6181 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
6182
6183 /* Loads. */
6184 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
6185 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
6186 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
6187 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
6188 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
6189 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
6190 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
6191 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
6192 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
6193 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
6194 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
6195 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
6196 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
6197 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
6198 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
6199 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
6200 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
6201 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
6202 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
6203 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
6204 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
6205 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
6206
6207 /* Predicates. */
6208 d = (struct builtin_description *) bdesc_spe_predicates;
6209 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
6210 {
6211 tree type;
6212
6213 switch (insn_data[d->icode].operand[1].mode)
6214 {
6215 case V2SImode:
6216 type = int_ftype_int_v2si_v2si;
6217 break;
6218 case V2SFmode:
6219 type = int_ftype_int_v2sf_v2sf;
6220 break;
6221 default:
6222 abort ();
6223 }
6224
6225 def_builtin (d->mask, d->name, type, d->code);
6226 }
6227
6228 /* Evsel predicates. */
6229 d = (struct builtin_description *) bdesc_spe_evsel;
6230 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
6231 {
6232 tree type;
6233
6234 switch (insn_data[d->icode].operand[1].mode)
6235 {
6236 case V2SImode:
6237 type = v2si_ftype_4_v2si;
6238 break;
6239 case V2SFmode:
6240 type = v2sf_ftype_4_v2sf;
6241 break;
6242 default:
6243 abort ();
6244 }
6245
6246 def_builtin (d->mask, d->name, type, d->code);
6247 }
6248 }
6249
6250 static void
6251 altivec_init_builtins ()
6252 {
6253 struct builtin_description *d;
6254 struct builtin_description_predicates *dp;
6255 size_t i;
6256 tree pfloat_type_node = build_pointer_type (float_type_node);
6257 tree pint_type_node = build_pointer_type (integer_type_node);
6258 tree pshort_type_node = build_pointer_type (short_integer_type_node);
6259 tree pchar_type_node = build_pointer_type (char_type_node);
6260
6261 tree pvoid_type_node = build_pointer_type (void_type_node);
6262
6263 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
6264 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
6265 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
6266 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
6267
6268 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
6269
6270 tree int_ftype_int_v4si_v4si
6271 = build_function_type_list (integer_type_node,
6272 integer_type_node, V4SI_type_node,
6273 V4SI_type_node, NULL_TREE);
6274 tree v4sf_ftype_pcfloat
6275 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
6276 tree void_ftype_pfloat_v4sf
6277 = build_function_type_list (void_type_node,
6278 pfloat_type_node, V4SF_type_node, NULL_TREE);
6279 tree v4si_ftype_pcint
6280 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
6281 tree void_ftype_pint_v4si
6282 = build_function_type_list (void_type_node,
6283 pint_type_node, V4SI_type_node, NULL_TREE);
6284 tree v8hi_ftype_pcshort
6285 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
6286 tree void_ftype_pshort_v8hi
6287 = build_function_type_list (void_type_node,
6288 pshort_type_node, V8HI_type_node, NULL_TREE);
6289 tree v16qi_ftype_pcchar
6290 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
6291 tree void_ftype_pchar_v16qi
6292 = build_function_type_list (void_type_node,
6293 pchar_type_node, V16QI_type_node, NULL_TREE);
6294 tree void_ftype_v4si
6295 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
6296 tree v8hi_ftype_void
6297 = build_function_type (V8HI_type_node, void_list_node);
6298 tree void_ftype_void
6299 = build_function_type (void_type_node, void_list_node);
6300 tree void_ftype_qi
6301 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
6302
6303 tree v16qi_ftype_int_pcvoid
6304 = build_function_type_list (V16QI_type_node,
6305 integer_type_node, pcvoid_type_node, NULL_TREE);
6306 tree v8hi_ftype_int_pcvoid
6307 = build_function_type_list (V8HI_type_node,
6308 integer_type_node, pcvoid_type_node, NULL_TREE);
6309 tree v4si_ftype_int_pcvoid
6310 = build_function_type_list (V4SI_type_node,
6311 integer_type_node, pcvoid_type_node, NULL_TREE);
6312
6313 tree void_ftype_v4si_int_pvoid
6314 = build_function_type_list (void_type_node,
6315 V4SI_type_node, integer_type_node,
6316 pvoid_type_node, NULL_TREE);
6317 tree void_ftype_v16qi_int_pvoid
6318 = build_function_type_list (void_type_node,
6319 V16QI_type_node, integer_type_node,
6320 pvoid_type_node, NULL_TREE);
6321 tree void_ftype_v8hi_int_pvoid
6322 = build_function_type_list (void_type_node,
6323 V8HI_type_node, integer_type_node,
6324 pvoid_type_node, NULL_TREE);
6325 tree int_ftype_int_v8hi_v8hi
6326 = build_function_type_list (integer_type_node,
6327 integer_type_node, V8HI_type_node,
6328 V8HI_type_node, NULL_TREE);
6329 tree int_ftype_int_v16qi_v16qi
6330 = build_function_type_list (integer_type_node,
6331 integer_type_node, V16QI_type_node,
6332 V16QI_type_node, NULL_TREE);
6333 tree int_ftype_int_v4sf_v4sf
6334 = build_function_type_list (integer_type_node,
6335 integer_type_node, V4SF_type_node,
6336 V4SF_type_node, NULL_TREE);
6337 tree v4si_ftype_v4si
6338 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
6339 tree v8hi_ftype_v8hi
6340 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
6341 tree v16qi_ftype_v16qi
6342 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
6343 tree v4sf_ftype_v4sf
6344 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6345 tree void_ftype_pcvoid_int_char
6346 = build_function_type_list (void_type_node,
6347 pcvoid_type_node, integer_type_node,
6348 char_type_node, NULL_TREE);
6349
6350 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
6351 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
6352 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
6353 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
6354 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
6355 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
6356 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
6357 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
6358 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
6359 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
6360 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
6361 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
6362 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
6363 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
6364 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
6365 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
6366 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
6367 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
6368 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
6369 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
6370 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSL);
6371 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSR);
6372 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEBX);
6373 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEHX);
6374 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEWX);
6375 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVXL);
6376 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVX);
6377 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
6378 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
6379 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
6380 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
6381 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
6382
6383 /* Add the DST variants. */
6384 d = (struct builtin_description *) bdesc_dst;
6385 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6386 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
6387
6388 /* Initialize the predicates. */
6389 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6390 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6391 {
6392 enum machine_mode mode1;
6393 tree type;
6394
6395 mode1 = insn_data[dp->icode].operand[1].mode;
6396
6397 switch (mode1)
6398 {
6399 case V4SImode:
6400 type = int_ftype_int_v4si_v4si;
6401 break;
6402 case V8HImode:
6403 type = int_ftype_int_v8hi_v8hi;
6404 break;
6405 case V16QImode:
6406 type = int_ftype_int_v16qi_v16qi;
6407 break;
6408 case V4SFmode:
6409 type = int_ftype_int_v4sf_v4sf;
6410 break;
6411 default:
6412 abort ();
6413 }
6414
6415 def_builtin (dp->mask, dp->name, type, dp->code);
6416 }
6417
6418 /* Initialize the abs* operators. */
6419 d = (struct builtin_description *) bdesc_abs;
6420 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6421 {
6422 enum machine_mode mode0;
6423 tree type;
6424
6425 mode0 = insn_data[d->icode].operand[0].mode;
6426
6427 switch (mode0)
6428 {
6429 case V4SImode:
6430 type = v4si_ftype_v4si;
6431 break;
6432 case V8HImode:
6433 type = v8hi_ftype_v8hi;
6434 break;
6435 case V16QImode:
6436 type = v16qi_ftype_v16qi;
6437 break;
6438 case V4SFmode:
6439 type = v4sf_ftype_v4sf;
6440 break;
6441 default:
6442 abort ();
6443 }
6444
6445 def_builtin (d->mask, d->name, type, d->code);
6446 }
6447 }
6448
6449 static void
6450 rs6000_common_init_builtins ()
6451 {
6452 struct builtin_description *d;
6453 size_t i;
6454
6455 tree v4sf_ftype_v4sf_v4sf_v16qi
6456 = build_function_type_list (V4SF_type_node,
6457 V4SF_type_node, V4SF_type_node,
6458 V16QI_type_node, NULL_TREE);
6459 tree v4si_ftype_v4si_v4si_v16qi
6460 = build_function_type_list (V4SI_type_node,
6461 V4SI_type_node, V4SI_type_node,
6462 V16QI_type_node, NULL_TREE);
6463 tree v8hi_ftype_v8hi_v8hi_v16qi
6464 = build_function_type_list (V8HI_type_node,
6465 V8HI_type_node, V8HI_type_node,
6466 V16QI_type_node, NULL_TREE);
6467 tree v16qi_ftype_v16qi_v16qi_v16qi
6468 = build_function_type_list (V16QI_type_node,
6469 V16QI_type_node, V16QI_type_node,
6470 V16QI_type_node, NULL_TREE);
6471 tree v4si_ftype_char
6472 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
6473 tree v8hi_ftype_char
6474 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
6475 tree v16qi_ftype_char
6476 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
6477 tree v8hi_ftype_v16qi
6478 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
6479 tree v4sf_ftype_v4sf
6480 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6481
6482 tree v2si_ftype_v2si_v2si
6483 = build_function_type_list (opaque_V2SI_type_node,
6484 opaque_V2SI_type_node,
6485 opaque_V2SI_type_node, NULL_TREE);
6486
6487 tree v2sf_ftype_v2sf_v2sf
6488 = build_function_type_list (opaque_V2SF_type_node,
6489 opaque_V2SF_type_node,
6490 opaque_V2SF_type_node, NULL_TREE);
6491
6492 tree v2si_ftype_int_int
6493 = build_function_type_list (opaque_V2SI_type_node,
6494 integer_type_node, integer_type_node,
6495 NULL_TREE);
6496
6497 tree v2si_ftype_v2si
6498 = build_function_type_list (opaque_V2SI_type_node,
6499 opaque_V2SI_type_node, NULL_TREE);
6500
6501 tree v2sf_ftype_v2sf
6502 = build_function_type_list (opaque_V2SF_type_node,
6503 opaque_V2SF_type_node, NULL_TREE);
6504
6505 tree v2sf_ftype_v2si
6506 = build_function_type_list (opaque_V2SF_type_node,
6507 opaque_V2SI_type_node, NULL_TREE);
6508
6509 tree v2si_ftype_v2sf
6510 = build_function_type_list (opaque_V2SI_type_node,
6511 opaque_V2SF_type_node, NULL_TREE);
6512
6513 tree v2si_ftype_v2si_char
6514 = build_function_type_list (opaque_V2SI_type_node,
6515 opaque_V2SI_type_node,
6516 char_type_node, NULL_TREE);
6517
6518 tree v2si_ftype_int_char
6519 = build_function_type_list (opaque_V2SI_type_node,
6520 integer_type_node, char_type_node, NULL_TREE);
6521
6522 tree v2si_ftype_char
6523 = build_function_type_list (opaque_V2SI_type_node,
6524 char_type_node, NULL_TREE);
6525
6526 tree int_ftype_int_int
6527 = build_function_type_list (integer_type_node,
6528 integer_type_node, integer_type_node,
6529 NULL_TREE);
6530
6531 tree v4si_ftype_v4si_v4si
6532 = build_function_type_list (V4SI_type_node,
6533 V4SI_type_node, V4SI_type_node, NULL_TREE);
6534 tree v4sf_ftype_v4si_char
6535 = build_function_type_list (V4SF_type_node,
6536 V4SI_type_node, char_type_node, NULL_TREE);
6537 tree v4si_ftype_v4sf_char
6538 = build_function_type_list (V4SI_type_node,
6539 V4SF_type_node, char_type_node, NULL_TREE);
6540 tree v4si_ftype_v4si_char
6541 = build_function_type_list (V4SI_type_node,
6542 V4SI_type_node, char_type_node, NULL_TREE);
6543 tree v8hi_ftype_v8hi_char
6544 = build_function_type_list (V8HI_type_node,
6545 V8HI_type_node, char_type_node, NULL_TREE);
6546 tree v16qi_ftype_v16qi_char
6547 = build_function_type_list (V16QI_type_node,
6548 V16QI_type_node, char_type_node, NULL_TREE);
6549 tree v16qi_ftype_v16qi_v16qi_char
6550 = build_function_type_list (V16QI_type_node,
6551 V16QI_type_node, V16QI_type_node,
6552 char_type_node, NULL_TREE);
6553 tree v8hi_ftype_v8hi_v8hi_char
6554 = build_function_type_list (V8HI_type_node,
6555 V8HI_type_node, V8HI_type_node,
6556 char_type_node, NULL_TREE);
6557 tree v4si_ftype_v4si_v4si_char
6558 = build_function_type_list (V4SI_type_node,
6559 V4SI_type_node, V4SI_type_node,
6560 char_type_node, NULL_TREE);
6561 tree v4sf_ftype_v4sf_v4sf_char
6562 = build_function_type_list (V4SF_type_node,
6563 V4SF_type_node, V4SF_type_node,
6564 char_type_node, NULL_TREE);
6565 tree v4sf_ftype_v4sf_v4sf
6566 = build_function_type_list (V4SF_type_node,
6567 V4SF_type_node, V4SF_type_node, NULL_TREE);
6568 tree v4sf_ftype_v4sf_v4sf_v4si
6569 = build_function_type_list (V4SF_type_node,
6570 V4SF_type_node, V4SF_type_node,
6571 V4SI_type_node, NULL_TREE);
6572 tree v4sf_ftype_v4sf_v4sf_v4sf
6573 = build_function_type_list (V4SF_type_node,
6574 V4SF_type_node, V4SF_type_node,
6575 V4SF_type_node, NULL_TREE);
6576 tree v4si_ftype_v4si_v4si_v4si
6577 = build_function_type_list (V4SI_type_node,
6578 V4SI_type_node, V4SI_type_node,
6579 V4SI_type_node, NULL_TREE);
6580 tree v8hi_ftype_v8hi_v8hi
6581 = build_function_type_list (V8HI_type_node,
6582 V8HI_type_node, V8HI_type_node, NULL_TREE);
6583 tree v8hi_ftype_v8hi_v8hi_v8hi
6584 = build_function_type_list (V8HI_type_node,
6585 V8HI_type_node, V8HI_type_node,
6586 V8HI_type_node, NULL_TREE);
6587 tree v4si_ftype_v8hi_v8hi_v4si
6588 = build_function_type_list (V4SI_type_node,
6589 V8HI_type_node, V8HI_type_node,
6590 V4SI_type_node, NULL_TREE);
6591 tree v4si_ftype_v16qi_v16qi_v4si
6592 = build_function_type_list (V4SI_type_node,
6593 V16QI_type_node, V16QI_type_node,
6594 V4SI_type_node, NULL_TREE);
6595 tree v16qi_ftype_v16qi_v16qi
6596 = build_function_type_list (V16QI_type_node,
6597 V16QI_type_node, V16QI_type_node, NULL_TREE);
6598 tree v4si_ftype_v4sf_v4sf
6599 = build_function_type_list (V4SI_type_node,
6600 V4SF_type_node, V4SF_type_node, NULL_TREE);
6601 tree v8hi_ftype_v16qi_v16qi
6602 = build_function_type_list (V8HI_type_node,
6603 V16QI_type_node, V16QI_type_node, NULL_TREE);
6604 tree v4si_ftype_v8hi_v8hi
6605 = build_function_type_list (V4SI_type_node,
6606 V8HI_type_node, V8HI_type_node, NULL_TREE);
6607 tree v8hi_ftype_v4si_v4si
6608 = build_function_type_list (V8HI_type_node,
6609 V4SI_type_node, V4SI_type_node, NULL_TREE);
6610 tree v16qi_ftype_v8hi_v8hi
6611 = build_function_type_list (V16QI_type_node,
6612 V8HI_type_node, V8HI_type_node, NULL_TREE);
6613 tree v4si_ftype_v16qi_v4si
6614 = build_function_type_list (V4SI_type_node,
6615 V16QI_type_node, V4SI_type_node, NULL_TREE);
6616 tree v4si_ftype_v16qi_v16qi
6617 = build_function_type_list (V4SI_type_node,
6618 V16QI_type_node, V16QI_type_node, NULL_TREE);
6619 tree v4si_ftype_v8hi_v4si
6620 = build_function_type_list (V4SI_type_node,
6621 V8HI_type_node, V4SI_type_node, NULL_TREE);
6622 tree v4si_ftype_v8hi
6623 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
6624 tree int_ftype_v4si_v4si
6625 = build_function_type_list (integer_type_node,
6626 V4SI_type_node, V4SI_type_node, NULL_TREE);
6627 tree int_ftype_v4sf_v4sf
6628 = build_function_type_list (integer_type_node,
6629 V4SF_type_node, V4SF_type_node, NULL_TREE);
6630 tree int_ftype_v16qi_v16qi
6631 = build_function_type_list (integer_type_node,
6632 V16QI_type_node, V16QI_type_node, NULL_TREE);
6633 tree int_ftype_v8hi_v8hi
6634 = build_function_type_list (integer_type_node,
6635 V8HI_type_node, V8HI_type_node, NULL_TREE);
6636
6637 /* Add the simple ternary operators. */
6638 d = (struct builtin_description *) bdesc_3arg;
6639 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6640 {
6641
6642 enum machine_mode mode0, mode1, mode2, mode3;
6643 tree type;
6644
6645 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6646 continue;
6647
6648 mode0 = insn_data[d->icode].operand[0].mode;
6649 mode1 = insn_data[d->icode].operand[1].mode;
6650 mode2 = insn_data[d->icode].operand[2].mode;
6651 mode3 = insn_data[d->icode].operand[3].mode;
6652
6653 /* When all four are of the same mode. */
6654 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
6655 {
6656 switch (mode0)
6657 {
6658 case V4SImode:
6659 type = v4si_ftype_v4si_v4si_v4si;
6660 break;
6661 case V4SFmode:
6662 type = v4sf_ftype_v4sf_v4sf_v4sf;
6663 break;
6664 case V8HImode:
6665 type = v8hi_ftype_v8hi_v8hi_v8hi;
6666 break;
6667 case V16QImode:
6668 type = v16qi_ftype_v16qi_v16qi_v16qi;
6669 break;
6670 default:
6671 abort();
6672 }
6673 }
6674 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
6675 {
6676 switch (mode0)
6677 {
6678 case V4SImode:
6679 type = v4si_ftype_v4si_v4si_v16qi;
6680 break;
6681 case V4SFmode:
6682 type = v4sf_ftype_v4sf_v4sf_v16qi;
6683 break;
6684 case V8HImode:
6685 type = v8hi_ftype_v8hi_v8hi_v16qi;
6686 break;
6687 case V16QImode:
6688 type = v16qi_ftype_v16qi_v16qi_v16qi;
6689 break;
6690 default:
6691 abort();
6692 }
6693 }
6694 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
6695 && mode3 == V4SImode)
6696 type = v4si_ftype_v16qi_v16qi_v4si;
6697 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
6698 && mode3 == V4SImode)
6699 type = v4si_ftype_v8hi_v8hi_v4si;
6700 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
6701 && mode3 == V4SImode)
6702 type = v4sf_ftype_v4sf_v4sf_v4si;
6703
6704 /* vchar, vchar, vchar, 4 bit literal. */
6705 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
6706 && mode3 == QImode)
6707 type = v16qi_ftype_v16qi_v16qi_char;
6708
6709 /* vshort, vshort, vshort, 4 bit literal. */
6710 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
6711 && mode3 == QImode)
6712 type = v8hi_ftype_v8hi_v8hi_char;
6713
6714 /* vint, vint, vint, 4 bit literal. */
6715 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
6716 && mode3 == QImode)
6717 type = v4si_ftype_v4si_v4si_char;
6718
6719 /* vfloat, vfloat, vfloat, 4 bit literal. */
6720 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
6721 && mode3 == QImode)
6722 type = v4sf_ftype_v4sf_v4sf_char;
6723
6724 else
6725 abort ();
6726
6727 def_builtin (d->mask, d->name, type, d->code);
6728 }
6729
6730 /* Add the simple binary operators. */
6731 d = (struct builtin_description *) bdesc_2arg;
6732 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6733 {
6734 enum machine_mode mode0, mode1, mode2;
6735 tree type;
6736
6737 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6738 continue;
6739
6740 mode0 = insn_data[d->icode].operand[0].mode;
6741 mode1 = insn_data[d->icode].operand[1].mode;
6742 mode2 = insn_data[d->icode].operand[2].mode;
6743
6744 /* When all three operands are of the same mode. */
6745 if (mode0 == mode1 && mode1 == mode2)
6746 {
6747 switch (mode0)
6748 {
6749 case V4SFmode:
6750 type = v4sf_ftype_v4sf_v4sf;
6751 break;
6752 case V4SImode:
6753 type = v4si_ftype_v4si_v4si;
6754 break;
6755 case V16QImode:
6756 type = v16qi_ftype_v16qi_v16qi;
6757 break;
6758 case V8HImode:
6759 type = v8hi_ftype_v8hi_v8hi;
6760 break;
6761 case V2SImode:
6762 type = v2si_ftype_v2si_v2si;
6763 break;
6764 case V2SFmode:
6765 type = v2sf_ftype_v2sf_v2sf;
6766 break;
6767 case SImode:
6768 type = int_ftype_int_int;
6769 break;
6770 default:
6771 abort ();
6772 }
6773 }
6774
6775 /* A few other combos we really don't want to do manually. */
6776
6777 /* vint, vfloat, vfloat. */
6778 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
6779 type = v4si_ftype_v4sf_v4sf;
6780
6781 /* vshort, vchar, vchar. */
6782 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
6783 type = v8hi_ftype_v16qi_v16qi;
6784
6785 /* vint, vshort, vshort. */
6786 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
6787 type = v4si_ftype_v8hi_v8hi;
6788
6789 /* vshort, vint, vint. */
6790 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
6791 type = v8hi_ftype_v4si_v4si;
6792
6793 /* vchar, vshort, vshort. */
6794 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
6795 type = v16qi_ftype_v8hi_v8hi;
6796
6797 /* vint, vchar, vint. */
6798 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
6799 type = v4si_ftype_v16qi_v4si;
6800
6801 /* vint, vchar, vchar. */
6802 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
6803 type = v4si_ftype_v16qi_v16qi;
6804
6805 /* vint, vshort, vint. */
6806 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
6807 type = v4si_ftype_v8hi_v4si;
6808
6809 /* vint, vint, 5 bit literal. */
6810 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
6811 type = v4si_ftype_v4si_char;
6812
6813 /* vshort, vshort, 5 bit literal. */
6814 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
6815 type = v8hi_ftype_v8hi_char;
6816
6817 /* vchar, vchar, 5 bit literal. */
6818 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
6819 type = v16qi_ftype_v16qi_char;
6820
6821 /* vfloat, vint, 5 bit literal. */
6822 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
6823 type = v4sf_ftype_v4si_char;
6824
6825 /* vint, vfloat, 5 bit literal. */
6826 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
6827 type = v4si_ftype_v4sf_char;
6828
6829 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
6830 type = v2si_ftype_int_int;
6831
6832 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
6833 type = v2si_ftype_v2si_char;
6834
6835 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
6836 type = v2si_ftype_int_char;
6837
6838 /* int, x, x. */
6839 else if (mode0 == SImode)
6840 {
6841 switch (mode1)
6842 {
6843 case V4SImode:
6844 type = int_ftype_v4si_v4si;
6845 break;
6846 case V4SFmode:
6847 type = int_ftype_v4sf_v4sf;
6848 break;
6849 case V16QImode:
6850 type = int_ftype_v16qi_v16qi;
6851 break;
6852 case V8HImode:
6853 type = int_ftype_v8hi_v8hi;
6854 break;
6855 default:
6856 abort ();
6857 }
6858 }
6859
6860 else
6861 abort ();
6862
6863 def_builtin (d->mask, d->name, type, d->code);
6864 }
6865
6866 /* Add the simple unary operators. */
6867 d = (struct builtin_description *) bdesc_1arg;
6868 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6869 {
6870 enum machine_mode mode0, mode1;
6871 tree type;
6872
6873 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6874 continue;
6875
6876 mode0 = insn_data[d->icode].operand[0].mode;
6877 mode1 = insn_data[d->icode].operand[1].mode;
6878
6879 if (mode0 == V4SImode && mode1 == QImode)
6880 type = v4si_ftype_char;
6881 else if (mode0 == V8HImode && mode1 == QImode)
6882 type = v8hi_ftype_char;
6883 else if (mode0 == V16QImode && mode1 == QImode)
6884 type = v16qi_ftype_char;
6885 else if (mode0 == V4SFmode && mode1 == V4SFmode)
6886 type = v4sf_ftype_v4sf;
6887 else if (mode0 == V8HImode && mode1 == V16QImode)
6888 type = v8hi_ftype_v16qi;
6889 else if (mode0 == V4SImode && mode1 == V8HImode)
6890 type = v4si_ftype_v8hi;
6891 else if (mode0 == V2SImode && mode1 == V2SImode)
6892 type = v2si_ftype_v2si;
6893 else if (mode0 == V2SFmode && mode1 == V2SFmode)
6894 type = v2sf_ftype_v2sf;
6895 else if (mode0 == V2SFmode && mode1 == V2SImode)
6896 type = v2sf_ftype_v2si;
6897 else if (mode0 == V2SImode && mode1 == V2SFmode)
6898 type = v2si_ftype_v2sf;
6899 else if (mode0 == V2SImode && mode1 == QImode)
6900 type = v2si_ftype_char;
6901 else
6902 abort ();
6903
6904 def_builtin (d->mask, d->name, type, d->code);
6905 }
6906 }
6907
6908 \f
6909 /* Expand a block move operation, and return 1 if successful. Return 0
6910 if we should let the compiler generate normal code.
6911
6912 operands[0] is the destination
6913 operands[1] is the source
6914 operands[2] is the length
6915 operands[3] is the alignment */
6916
6917 #define MAX_MOVE_REG 4
6918
6919 int
6920 expand_block_move (operands)
6921 rtx operands[];
6922 {
6923 rtx orig_dest = operands[0];
6924 rtx orig_src = operands[1];
6925 rtx bytes_rtx = operands[2];
6926 rtx align_rtx = operands[3];
6927 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
6928 int align;
6929 int bytes;
6930 int offset;
6931 int move_bytes;
6932 rtx stores[MAX_MOVE_REG];
6933 int num_reg = 0;
6934
6935 /* If this is not a fixed size move, just call memcpy */
6936 if (! constp)
6937 return 0;
6938
6939 /* If this is not a fixed size alignment, abort */
6940 if (GET_CODE (align_rtx) != CONST_INT)
6941 abort ();
6942 align = INTVAL (align_rtx);
6943
6944 /* Anything to move? */
6945 bytes = INTVAL (bytes_rtx);
6946 if (bytes <= 0)
6947 return 1;
6948
6949 /* store_one_arg depends on expand_block_move to handle at least the size of
6950 reg_parm_stack_space. */
6951 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
6952 return 0;
6953
6954 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6955 {
6956 union {
6957 rtx (*movstrsi) PARAMS ((rtx, rtx, rtx, rtx));
6958 rtx (*mov) PARAMS ((rtx, rtx));
6959 } gen_func;
6960 enum machine_mode mode = BLKmode;
6961 rtx src, dest;
6962
6963 if (TARGET_STRING
6964 && bytes > 24 /* move up to 32 bytes at a time */
6965 && ! fixed_regs[5]
6966 && ! fixed_regs[6]
6967 && ! fixed_regs[7]
6968 && ! fixed_regs[8]
6969 && ! fixed_regs[9]
6970 && ! fixed_regs[10]
6971 && ! fixed_regs[11]
6972 && ! fixed_regs[12])
6973 {
6974 move_bytes = (bytes > 32) ? 32 : bytes;
6975 gen_func.movstrsi = gen_movstrsi_8reg;
6976 }
6977 else if (TARGET_STRING
6978 && bytes > 16 /* move up to 24 bytes at a time */
6979 && ! fixed_regs[5]
6980 && ! fixed_regs[6]
6981 && ! fixed_regs[7]
6982 && ! fixed_regs[8]
6983 && ! fixed_regs[9]
6984 && ! fixed_regs[10])
6985 {
6986 move_bytes = (bytes > 24) ? 24 : bytes;
6987 gen_func.movstrsi = gen_movstrsi_6reg;
6988 }
6989 else if (TARGET_STRING
6990 && bytes > 8 /* move up to 16 bytes at a time */
6991 && ! fixed_regs[5]
6992 && ! fixed_regs[6]
6993 && ! fixed_regs[7]
6994 && ! fixed_regs[8])
6995 {
6996 move_bytes = (bytes > 16) ? 16 : bytes;
6997 gen_func.movstrsi = gen_movstrsi_4reg;
6998 }
6999 else if (bytes >= 8 && TARGET_POWERPC64
7000 /* 64-bit loads and stores require word-aligned
7001 displacements. */
7002 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
7003 {
7004 move_bytes = 8;
7005 mode = DImode;
7006 gen_func.mov = gen_movdi;
7007 }
7008 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
7009 { /* move up to 8 bytes at a time */
7010 move_bytes = (bytes > 8) ? 8 : bytes;
7011 gen_func.movstrsi = gen_movstrsi_2reg;
7012 }
7013 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
7014 { /* move 4 bytes */
7015 move_bytes = 4;
7016 mode = SImode;
7017 gen_func.mov = gen_movsi;
7018 }
7019 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
7020 { /* move 2 bytes */
7021 move_bytes = 2;
7022 mode = HImode;
7023 gen_func.mov = gen_movhi;
7024 }
7025 else if (TARGET_STRING && bytes > 1)
7026 { /* move up to 4 bytes at a time */
7027 move_bytes = (bytes > 4) ? 4 : bytes;
7028 gen_func.movstrsi = gen_movstrsi_1reg;
7029 }
7030 else /* move 1 byte at a time */
7031 {
7032 move_bytes = 1;
7033 mode = QImode;
7034 gen_func.mov = gen_movqi;
7035 }
7036
7037 src = adjust_address (orig_src, mode, offset);
7038 dest = adjust_address (orig_dest, mode, offset);
7039
7040 if (mode != BLKmode)
7041 {
7042 rtx tmp_reg = gen_reg_rtx (mode);
7043
7044 emit_insn ((*gen_func.mov) (tmp_reg, src));
7045 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
7046 }
7047
7048 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
7049 {
7050 int i;
7051 for (i = 0; i < num_reg; i++)
7052 emit_insn (stores[i]);
7053 num_reg = 0;
7054 }
7055
7056 if (mode == BLKmode)
7057 {
7058 /* Move the address into scratch registers. The movstrsi
7059 patterns require zero offset. */
7060 if (!REG_P (XEXP (src, 0)))
7061 {
7062 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
7063 src = replace_equiv_address (src, src_reg);
7064 }
7065 set_mem_size (src, GEN_INT (move_bytes));
7066
7067 if (!REG_P (XEXP (dest, 0)))
7068 {
7069 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
7070 dest = replace_equiv_address (dest, dest_reg);
7071 }
7072 set_mem_size (dest, GEN_INT (move_bytes));
7073
7074 emit_insn ((*gen_func.movstrsi) (dest, src,
7075 GEN_INT (move_bytes & 31),
7076 align_rtx));
7077 }
7078 }
7079
7080 return 1;
7081 }
7082
7083 \f
7084 /* Return 1 if OP is a load multiple operation. It is known to be a
7085 PARALLEL and the first section will be tested. */
7086
7087 int
7088 load_multiple_operation (op, mode)
7089 rtx op;
7090 enum machine_mode mode ATTRIBUTE_UNUSED;
7091 {
7092 int count = XVECLEN (op, 0);
7093 unsigned int dest_regno;
7094 rtx src_addr;
7095 int i;
7096
7097 /* Perform a quick check so we don't blow up below. */
7098 if (count <= 1
7099 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7100 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7101 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7102 return 0;
7103
7104 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7105 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7106
7107 for (i = 1; i < count; i++)
7108 {
7109 rtx elt = XVECEXP (op, 0, i);
7110
7111 if (GET_CODE (elt) != SET
7112 || GET_CODE (SET_DEST (elt)) != REG
7113 || GET_MODE (SET_DEST (elt)) != SImode
7114 || REGNO (SET_DEST (elt)) != dest_regno + i
7115 || GET_CODE (SET_SRC (elt)) != MEM
7116 || GET_MODE (SET_SRC (elt)) != SImode
7117 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
7118 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
7119 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
7120 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
7121 return 0;
7122 }
7123
7124 return 1;
7125 }
7126
7127 /* Similar, but tests for store multiple. Here, the second vector element
7128 is a CLOBBER. It will be tested later. */
7129
7130 int
7131 store_multiple_operation (op, mode)
7132 rtx op;
7133 enum machine_mode mode ATTRIBUTE_UNUSED;
7134 {
7135 int count = XVECLEN (op, 0) - 1;
7136 unsigned int src_regno;
7137 rtx dest_addr;
7138 int i;
7139
7140 /* Perform a quick check so we don't blow up below. */
7141 if (count <= 1
7142 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7143 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7144 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7145 return 0;
7146
7147 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7148 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7149
7150 for (i = 1; i < count; i++)
7151 {
7152 rtx elt = XVECEXP (op, 0, i + 1);
7153
7154 if (GET_CODE (elt) != SET
7155 || GET_CODE (SET_SRC (elt)) != REG
7156 || GET_MODE (SET_SRC (elt)) != SImode
7157 || REGNO (SET_SRC (elt)) != src_regno + i
7158 || GET_CODE (SET_DEST (elt)) != MEM
7159 || GET_MODE (SET_DEST (elt)) != SImode
7160 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
7161 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
7162 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
7163 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
7164 return 0;
7165 }
7166
7167 return 1;
7168 }
7169
7170 /* Return a string to perform a load_multiple operation.
7171 operands[0] is the vector.
7172 operands[1] is the source address.
7173 operands[2] is the first destination register. */
7174
7175 const char *
7176 rs6000_output_load_multiple (operands)
7177 rtx operands[3];
7178 {
7179 /* We have to handle the case where the pseudo used to contain the address
7180 is assigned to one of the output registers. */
7181 int i, j;
7182 int words = XVECLEN (operands[0], 0);
7183 rtx xop[10];
7184
7185 if (XVECLEN (operands[0], 0) == 1)
7186 return "{l|lwz} %2,0(%1)";
7187
7188 for (i = 0; i < words; i++)
7189 if (refers_to_regno_p (REGNO (operands[2]) + i,
7190 REGNO (operands[2]) + i + 1, operands[1], 0))
7191 {
7192 if (i == words-1)
7193 {
7194 xop[0] = GEN_INT (4 * (words-1));
7195 xop[1] = operands[1];
7196 xop[2] = operands[2];
7197 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
7198 return "";
7199 }
7200 else if (i == 0)
7201 {
7202 xop[0] = GEN_INT (4 * (words-1));
7203 xop[1] = operands[1];
7204 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
7205 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
7206 return "";
7207 }
7208 else
7209 {
7210 for (j = 0; j < words; j++)
7211 if (j != i)
7212 {
7213 xop[0] = GEN_INT (j * 4);
7214 xop[1] = operands[1];
7215 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
7216 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
7217 }
7218 xop[0] = GEN_INT (i * 4);
7219 xop[1] = operands[1];
7220 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
7221 return "";
7222 }
7223 }
7224
7225 return "{lsi|lswi} %2,%1,%N0";
7226 }
7227
7228 /* Return 1 for a parallel vrsave operation. */
7229
7230 int
7231 vrsave_operation (op, mode)
7232 rtx op;
7233 enum machine_mode mode ATTRIBUTE_UNUSED;
7234 {
7235 int count = XVECLEN (op, 0);
7236 unsigned int dest_regno, src_regno;
7237 int i;
7238
7239 if (count <= 1
7240 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7241 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7242 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
7243 return 0;
7244
7245 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7246 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7247
7248 if (dest_regno != VRSAVE_REGNO
7249 && src_regno != VRSAVE_REGNO)
7250 return 0;
7251
7252 for (i = 1; i < count; i++)
7253 {
7254 rtx elt = XVECEXP (op, 0, i);
7255
7256 if (GET_CODE (elt) != CLOBBER
7257 && GET_CODE (elt) != SET)
7258 return 0;
7259 }
7260
7261 return 1;
7262 }
7263
7264 /* Return 1 for an PARALLEL suitable for mtcrf. */
7265
7266 int
7267 mtcrf_operation (op, mode)
7268 rtx op;
7269 enum machine_mode mode ATTRIBUTE_UNUSED;
7270 {
7271 int count = XVECLEN (op, 0);
7272 int i;
7273 rtx src_reg;
7274
7275 /* Perform a quick check so we don't blow up below. */
7276 if (count < 1
7277 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7278 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7279 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7280 return 0;
7281 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
7282
7283 if (GET_CODE (src_reg) != REG
7284 || GET_MODE (src_reg) != SImode
7285 || ! INT_REGNO_P (REGNO (src_reg)))
7286 return 0;
7287
7288 for (i = 0; i < count; i++)
7289 {
7290 rtx exp = XVECEXP (op, 0, i);
7291 rtx unspec;
7292 int maskval;
7293
7294 if (GET_CODE (exp) != SET
7295 || GET_CODE (SET_DEST (exp)) != REG
7296 || GET_MODE (SET_DEST (exp)) != CCmode
7297 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
7298 return 0;
7299 unspec = SET_SRC (exp);
7300 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
7301
7302 if (GET_CODE (unspec) != UNSPEC
7303 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
7304 || XVECLEN (unspec, 0) != 2
7305 || XVECEXP (unspec, 0, 0) != src_reg
7306 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7307 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7308 return 0;
7309 }
7310 return 1;
7311 }
7312
7313 /* Return 1 for an PARALLEL suitable for lmw. */
7314
7315 int
7316 lmw_operation (op, mode)
7317 rtx op;
7318 enum machine_mode mode ATTRIBUTE_UNUSED;
7319 {
7320 int count = XVECLEN (op, 0);
7321 unsigned int dest_regno;
7322 rtx src_addr;
7323 unsigned int base_regno;
7324 HOST_WIDE_INT offset;
7325 int i;
7326
7327 /* Perform a quick check so we don't blow up below. */
7328 if (count <= 1
7329 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7330 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7331 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7332 return 0;
7333
7334 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7335 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7336
7337 if (dest_regno > 31
7338 || count != 32 - (int) dest_regno)
7339 return 0;
7340
7341 if (legitimate_indirect_address_p (src_addr, 0))
7342 {
7343 offset = 0;
7344 base_regno = REGNO (src_addr);
7345 if (base_regno == 0)
7346 return 0;
7347 }
7348 else if (legitimate_offset_address_p (SImode, src_addr, 0))
7349 {
7350 offset = INTVAL (XEXP (src_addr, 1));
7351 base_regno = REGNO (XEXP (src_addr, 0));
7352 }
7353 else
7354 return 0;
7355
7356 for (i = 0; i < count; i++)
7357 {
7358 rtx elt = XVECEXP (op, 0, i);
7359 rtx newaddr;
7360 rtx addr_reg;
7361 HOST_WIDE_INT newoffset;
7362
7363 if (GET_CODE (elt) != SET
7364 || GET_CODE (SET_DEST (elt)) != REG
7365 || GET_MODE (SET_DEST (elt)) != SImode
7366 || REGNO (SET_DEST (elt)) != dest_regno + i
7367 || GET_CODE (SET_SRC (elt)) != MEM
7368 || GET_MODE (SET_SRC (elt)) != SImode)
7369 return 0;
7370 newaddr = XEXP (SET_SRC (elt), 0);
7371 if (legitimate_indirect_address_p (newaddr, 0))
7372 {
7373 newoffset = 0;
7374 addr_reg = newaddr;
7375 }
7376 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7377 {
7378 addr_reg = XEXP (newaddr, 0);
7379 newoffset = INTVAL (XEXP (newaddr, 1));
7380 }
7381 else
7382 return 0;
7383 if (REGNO (addr_reg) != base_regno
7384 || newoffset != offset + 4 * i)
7385 return 0;
7386 }
7387
7388 return 1;
7389 }
7390
7391 /* Return 1 for an PARALLEL suitable for stmw. */
7392
7393 int
7394 stmw_operation (op, mode)
7395 rtx op;
7396 enum machine_mode mode ATTRIBUTE_UNUSED;
7397 {
7398 int count = XVECLEN (op, 0);
7399 unsigned int src_regno;
7400 rtx dest_addr;
7401 unsigned int base_regno;
7402 HOST_WIDE_INT offset;
7403 int i;
7404
7405 /* Perform a quick check so we don't blow up below. */
7406 if (count <= 1
7407 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7408 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7409 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7410 return 0;
7411
7412 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7413 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7414
7415 if (src_regno > 31
7416 || count != 32 - (int) src_regno)
7417 return 0;
7418
7419 if (legitimate_indirect_address_p (dest_addr, 0))
7420 {
7421 offset = 0;
7422 base_regno = REGNO (dest_addr);
7423 if (base_regno == 0)
7424 return 0;
7425 }
7426 else if (legitimate_offset_address_p (SImode, dest_addr, 0))
7427 {
7428 offset = INTVAL (XEXP (dest_addr, 1));
7429 base_regno = REGNO (XEXP (dest_addr, 0));
7430 }
7431 else
7432 return 0;
7433
7434 for (i = 0; i < count; i++)
7435 {
7436 rtx elt = XVECEXP (op, 0, i);
7437 rtx newaddr;
7438 rtx addr_reg;
7439 HOST_WIDE_INT newoffset;
7440
7441 if (GET_CODE (elt) != SET
7442 || GET_CODE (SET_SRC (elt)) != REG
7443 || GET_MODE (SET_SRC (elt)) != SImode
7444 || REGNO (SET_SRC (elt)) != src_regno + i
7445 || GET_CODE (SET_DEST (elt)) != MEM
7446 || GET_MODE (SET_DEST (elt)) != SImode)
7447 return 0;
7448 newaddr = XEXP (SET_DEST (elt), 0);
7449 if (legitimate_indirect_address_p (newaddr, 0))
7450 {
7451 newoffset = 0;
7452 addr_reg = newaddr;
7453 }
7454 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7455 {
7456 addr_reg = XEXP (newaddr, 0);
7457 newoffset = INTVAL (XEXP (newaddr, 1));
7458 }
7459 else
7460 return 0;
7461 if (REGNO (addr_reg) != base_regno
7462 || newoffset != offset + 4 * i)
7463 return 0;
7464 }
7465
7466 return 1;
7467 }
7468 \f
7469 /* A validation routine: say whether CODE, a condition code, and MODE
7470 match. The other alternatives either don't make sense or should
7471 never be generated. */
7472
7473 static void
7474 validate_condition_mode (code, mode)
7475 enum rtx_code code;
7476 enum machine_mode mode;
7477 {
7478 if (GET_RTX_CLASS (code) != '<'
7479 || GET_MODE_CLASS (mode) != MODE_CC)
7480 abort ();
7481
7482 /* These don't make sense. */
7483 if ((code == GT || code == LT || code == GE || code == LE)
7484 && mode == CCUNSmode)
7485 abort ();
7486
7487 if ((code == GTU || code == LTU || code == GEU || code == LEU)
7488 && mode != CCUNSmode)
7489 abort ();
7490
7491 if (mode != CCFPmode
7492 && (code == ORDERED || code == UNORDERED
7493 || code == UNEQ || code == LTGT
7494 || code == UNGT || code == UNLT
7495 || code == UNGE || code == UNLE))
7496 abort ();
7497
7498 /* These should never be generated except for
7499 flag_finite_math_only. */
7500 if (mode == CCFPmode
7501 && ! flag_finite_math_only
7502 && (code == LE || code == GE
7503 || code == UNEQ || code == LTGT
7504 || code == UNGT || code == UNLT))
7505 abort ();
7506
7507 /* These are invalid; the information is not there. */
7508 if (mode == CCEQmode
7509 && code != EQ && code != NE)
7510 abort ();
7511 }
7512
7513 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
7514 We only check the opcode against the mode of the CC value here. */
7515
7516 int
7517 branch_comparison_operator (op, mode)
7518 rtx op;
7519 enum machine_mode mode ATTRIBUTE_UNUSED;
7520 {
7521 enum rtx_code code = GET_CODE (op);
7522 enum machine_mode cc_mode;
7523
7524 if (GET_RTX_CLASS (code) != '<')
7525 return 0;
7526
7527 cc_mode = GET_MODE (XEXP (op, 0));
7528 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
7529 return 0;
7530
7531 validate_condition_mode (code, cc_mode);
7532
7533 return 1;
7534 }
7535
7536 /* Return 1 if OP is a comparison operation that is valid for a branch
7537 insn and which is true if the corresponding bit in the CC register
7538 is set. */
7539
7540 int
7541 branch_positive_comparison_operator (op, mode)
7542 rtx op;
7543 enum machine_mode mode;
7544 {
7545 enum rtx_code code;
7546
7547 if (! branch_comparison_operator (op, mode))
7548 return 0;
7549
7550 code = GET_CODE (op);
7551 return (code == EQ || code == LT || code == GT
7552 || (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
7553 || code == LTU || code == GTU
7554 || code == UNORDERED);
7555 }
7556
7557 /* Return 1 if OP is a comparison operation that is valid for an scc
7558 insn: it must be a positive comparison. */
7559
7560 int
7561 scc_comparison_operator (op, mode)
7562 rtx op;
7563 enum machine_mode mode;
7564 {
7565 return branch_positive_comparison_operator (op, mode);
7566 }
7567
7568 int
7569 trap_comparison_operator (op, mode)
7570 rtx op;
7571 enum machine_mode mode;
7572 {
7573 if (mode != VOIDmode && mode != GET_MODE (op))
7574 return 0;
7575 return GET_RTX_CLASS (GET_CODE (op)) == '<';
7576 }
7577
7578 int
7579 boolean_operator (op, mode)
7580 rtx op;
7581 enum machine_mode mode ATTRIBUTE_UNUSED;
7582 {
7583 enum rtx_code code = GET_CODE (op);
7584 return (code == AND || code == IOR || code == XOR);
7585 }
7586
7587 int
7588 boolean_or_operator (op, mode)
7589 rtx op;
7590 enum machine_mode mode ATTRIBUTE_UNUSED;
7591 {
7592 enum rtx_code code = GET_CODE (op);
7593 return (code == IOR || code == XOR);
7594 }
7595
7596 int
7597 min_max_operator (op, mode)
7598 rtx op;
7599 enum machine_mode mode ATTRIBUTE_UNUSED;
7600 {
7601 enum rtx_code code = GET_CODE (op);
7602 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
7603 }
7604 \f
7605 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
7606 mask required to convert the result of a rotate insn into a shift
7607 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
7608
7609 int
7610 includes_lshift_p (shiftop, andop)
7611 rtx shiftop;
7612 rtx andop;
7613 {
7614 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7615
7616 shift_mask <<= INTVAL (shiftop);
7617
7618 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7619 }
7620
7621 /* Similar, but for right shift. */
7622
7623 int
7624 includes_rshift_p (shiftop, andop)
7625 rtx shiftop;
7626 rtx andop;
7627 {
7628 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7629
7630 shift_mask >>= INTVAL (shiftop);
7631
7632 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7633 }
7634
7635 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
7636 to perform a left shift. It must have exactly SHIFTOP least
7637 significant 0's, then one or more 1's, then zero or more 0's. */
7638
7639 int
7640 includes_rldic_lshift_p (shiftop, andop)
7641 rtx shiftop;
7642 rtx andop;
7643 {
7644 if (GET_CODE (andop) == CONST_INT)
7645 {
7646 HOST_WIDE_INT c, lsb, shift_mask;
7647
7648 c = INTVAL (andop);
7649 if (c == 0 || c == ~0)
7650 return 0;
7651
7652 shift_mask = ~0;
7653 shift_mask <<= INTVAL (shiftop);
7654
7655 /* Find the least significant one bit. */
7656 lsb = c & -c;
7657
7658 /* It must coincide with the LSB of the shift mask. */
7659 if (-lsb != shift_mask)
7660 return 0;
7661
7662 /* Invert to look for the next transition (if any). */
7663 c = ~c;
7664
7665 /* Remove the low group of ones (originally low group of zeros). */
7666 c &= -lsb;
7667
7668 /* Again find the lsb, and check we have all 1's above. */
7669 lsb = c & -c;
7670 return c == -lsb;
7671 }
7672 else if (GET_CODE (andop) == CONST_DOUBLE
7673 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7674 {
7675 HOST_WIDE_INT low, high, lsb;
7676 HOST_WIDE_INT shift_mask_low, shift_mask_high;
7677
7678 low = CONST_DOUBLE_LOW (andop);
7679 if (HOST_BITS_PER_WIDE_INT < 64)
7680 high = CONST_DOUBLE_HIGH (andop);
7681
7682 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
7683 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
7684 return 0;
7685
7686 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7687 {
7688 shift_mask_high = ~0;
7689 if (INTVAL (shiftop) > 32)
7690 shift_mask_high <<= INTVAL (shiftop) - 32;
7691
7692 lsb = high & -high;
7693
7694 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
7695 return 0;
7696
7697 high = ~high;
7698 high &= -lsb;
7699
7700 lsb = high & -high;
7701 return high == -lsb;
7702 }
7703
7704 shift_mask_low = ~0;
7705 shift_mask_low <<= INTVAL (shiftop);
7706
7707 lsb = low & -low;
7708
7709 if (-lsb != shift_mask_low)
7710 return 0;
7711
7712 if (HOST_BITS_PER_WIDE_INT < 64)
7713 high = ~high;
7714 low = ~low;
7715 low &= -lsb;
7716
7717 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7718 {
7719 lsb = high & -high;
7720 return high == -lsb;
7721 }
7722
7723 lsb = low & -low;
7724 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
7725 }
7726 else
7727 return 0;
7728 }
7729
7730 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7731 to perform a left shift. It must have SHIFTOP or more least
7732 signifigant 0's, with the remainder of the word 1's. */
7733
7734 int
7735 includes_rldicr_lshift_p (shiftop, andop)
7736 rtx shiftop;
7737 rtx andop;
7738 {
7739 if (GET_CODE (andop) == CONST_INT)
7740 {
7741 HOST_WIDE_INT c, lsb, shift_mask;
7742
7743 shift_mask = ~0;
7744 shift_mask <<= INTVAL (shiftop);
7745 c = INTVAL (andop);
7746
7747 /* Find the least signifigant one bit. */
7748 lsb = c & -c;
7749
7750 /* It must be covered by the shift mask.
7751 This test also rejects c == 0. */
7752 if ((lsb & shift_mask) == 0)
7753 return 0;
7754
7755 /* Check we have all 1's above the transition, and reject all 1's. */
7756 return c == -lsb && lsb != 1;
7757 }
7758 else if (GET_CODE (andop) == CONST_DOUBLE
7759 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7760 {
7761 HOST_WIDE_INT low, lsb, shift_mask_low;
7762
7763 low = CONST_DOUBLE_LOW (andop);
7764
7765 if (HOST_BITS_PER_WIDE_INT < 64)
7766 {
7767 HOST_WIDE_INT high, shift_mask_high;
7768
7769 high = CONST_DOUBLE_HIGH (andop);
7770
7771 if (low == 0)
7772 {
7773 shift_mask_high = ~0;
7774 if (INTVAL (shiftop) > 32)
7775 shift_mask_high <<= INTVAL (shiftop) - 32;
7776
7777 lsb = high & -high;
7778
7779 if ((lsb & shift_mask_high) == 0)
7780 return 0;
7781
7782 return high == -lsb;
7783 }
7784 if (high != ~0)
7785 return 0;
7786 }
7787
7788 shift_mask_low = ~0;
7789 shift_mask_low <<= INTVAL (shiftop);
7790
7791 lsb = low & -low;
7792
7793 if ((lsb & shift_mask_low) == 0)
7794 return 0;
7795
7796 return low == -lsb && lsb != 1;
7797 }
7798 else
7799 return 0;
7800 }
7801
7802 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7803 for lfq and stfq insns.
7804
7805 Note reg1 and reg2 *must* be hard registers. To be sure we will
7806 abort if we are passed pseudo registers. */
7807
7808 int
7809 registers_ok_for_quad_peep (reg1, reg2)
7810 rtx reg1, reg2;
7811 {
7812 /* We might have been passed a SUBREG. */
7813 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
7814 return 0;
7815
7816 return (REGNO (reg1) == REGNO (reg2) - 1);
7817 }
7818
7819 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7820 addr1 and addr2 must be in consecutive memory locations
7821 (addr2 == addr1 + 8). */
7822
7823 int
7824 addrs_ok_for_quad_peep (addr1, addr2)
7825 rtx addr1;
7826 rtx addr2;
7827 {
7828 unsigned int reg1;
7829 int offset1;
7830
7831 /* Extract an offset (if used) from the first addr. */
7832 if (GET_CODE (addr1) == PLUS)
7833 {
7834 /* If not a REG, return zero. */
7835 if (GET_CODE (XEXP (addr1, 0)) != REG)
7836 return 0;
7837 else
7838 {
7839 reg1 = REGNO (XEXP (addr1, 0));
7840 /* The offset must be constant! */
7841 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
7842 return 0;
7843 offset1 = INTVAL (XEXP (addr1, 1));
7844 }
7845 }
7846 else if (GET_CODE (addr1) != REG)
7847 return 0;
7848 else
7849 {
7850 reg1 = REGNO (addr1);
7851 /* This was a simple (mem (reg)) expression. Offset is 0. */
7852 offset1 = 0;
7853 }
7854
7855 /* Make sure the second address is a (mem (plus (reg) (const_int)))
7856 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
7857 register as addr1. */
7858 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
7859 return 1;
7860 if (GET_CODE (addr2) != PLUS)
7861 return 0;
7862
7863 if (GET_CODE (XEXP (addr2, 0)) != REG
7864 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7865 return 0;
7866
7867 if (reg1 != REGNO (XEXP (addr2, 0)))
7868 return 0;
7869
7870 /* The offset for the second addr must be 8 more than the first addr. */
7871 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7872 return 0;
7873
7874 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7875 instructions. */
7876 return 1;
7877 }
7878 \f
7879 /* Return the register class of a scratch register needed to copy IN into
7880 or out of a register in CLASS in MODE. If it can be done directly,
7881 NO_REGS is returned. */
7882
7883 enum reg_class
7884 secondary_reload_class (class, mode, in)
7885 enum reg_class class;
7886 enum machine_mode mode ATTRIBUTE_UNUSED;
7887 rtx in;
7888 {
7889 int regno;
7890
7891 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
7892 #if TARGET_MACHO
7893 && MACHOPIC_INDIRECT
7894 #endif
7895 ))
7896 {
7897 /* We cannot copy a symbolic operand directly into anything
7898 other than BASE_REGS for TARGET_ELF. So indicate that a
7899 register from BASE_REGS is needed as an intermediate
7900 register.
7901
7902 On Darwin, pic addresses require a load from memory, which
7903 needs a base register. */
7904 if (class != BASE_REGS
7905 && (GET_CODE (in) == SYMBOL_REF
7906 || GET_CODE (in) == HIGH
7907 || GET_CODE (in) == LABEL_REF
7908 || GET_CODE (in) == CONST))
7909 return BASE_REGS;
7910 }
7911
7912 if (GET_CODE (in) == REG)
7913 {
7914 regno = REGNO (in);
7915 if (regno >= FIRST_PSEUDO_REGISTER)
7916 {
7917 regno = true_regnum (in);
7918 if (regno >= FIRST_PSEUDO_REGISTER)
7919 regno = -1;
7920 }
7921 }
7922 else if (GET_CODE (in) == SUBREG)
7923 {
7924 regno = true_regnum (in);
7925 if (regno >= FIRST_PSEUDO_REGISTER)
7926 regno = -1;
7927 }
7928 else
7929 regno = -1;
7930
7931 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7932 into anything. */
7933 if (class == GENERAL_REGS || class == BASE_REGS
7934 || (regno >= 0 && INT_REGNO_P (regno)))
7935 return NO_REGS;
7936
7937 /* Constants, memory, and FP registers can go into FP registers. */
7938 if ((regno == -1 || FP_REGNO_P (regno))
7939 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
7940 return NO_REGS;
7941
7942 /* Memory, and AltiVec registers can go into AltiVec registers. */
7943 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
7944 && class == ALTIVEC_REGS)
7945 return NO_REGS;
7946
7947 /* We can copy among the CR registers. */
7948 if ((class == CR_REGS || class == CR0_REGS)
7949 && regno >= 0 && CR_REGNO_P (regno))
7950 return NO_REGS;
7951
7952 /* Otherwise, we need GENERAL_REGS. */
7953 return GENERAL_REGS;
7954 }
7955 \f
7956 /* Given a comparison operation, return the bit number in CCR to test. We
7957 know this is a valid comparison.
7958
7959 SCC_P is 1 if this is for an scc. That means that %D will have been
7960 used instead of %C, so the bits will be in different places.
7961
7962 Return -1 if OP isn't a valid comparison for some reason. */
7963
7964 int
7965 ccr_bit (op, scc_p)
7966 rtx op;
7967 int scc_p;
7968 {
7969 enum rtx_code code = GET_CODE (op);
7970 enum machine_mode cc_mode;
7971 int cc_regnum;
7972 int base_bit;
7973 rtx reg;
7974
7975 if (GET_RTX_CLASS (code) != '<')
7976 return -1;
7977
7978 reg = XEXP (op, 0);
7979
7980 if (GET_CODE (reg) != REG
7981 || ! CR_REGNO_P (REGNO (reg)))
7982 abort ();
7983
7984 cc_mode = GET_MODE (reg);
7985 cc_regnum = REGNO (reg);
7986 base_bit = 4 * (cc_regnum - CR0_REGNO);
7987
7988 validate_condition_mode (code, cc_mode);
7989
7990 /* When generating a sCOND operation, only positive conditions are
7991 allowed. */
7992 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
7993 && code != GTU && code != LTU)
7994 abort ();
7995
7996 switch (code)
7997 {
7998 case NE:
7999 if (TARGET_E500 && !TARGET_FPRS
8000 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8001 return base_bit + 1;
8002 return scc_p ? base_bit + 3 : base_bit + 2;
8003 case EQ:
8004 if (TARGET_E500 && !TARGET_FPRS
8005 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8006 return base_bit + 1;
8007 return base_bit + 2;
8008 case GT: case GTU: case UNLE:
8009 return base_bit + 1;
8010 case LT: case LTU: case UNGE:
8011 return base_bit;
8012 case ORDERED: case UNORDERED:
8013 return base_bit + 3;
8014
8015 case GE: case GEU:
8016 /* If scc, we will have done a cror to put the bit in the
8017 unordered position. So test that bit. For integer, this is ! LT
8018 unless this is an scc insn. */
8019 return scc_p ? base_bit + 3 : base_bit;
8020
8021 case LE: case LEU:
8022 return scc_p ? base_bit + 3 : base_bit + 1;
8023
8024 default:
8025 abort ();
8026 }
8027 }
8028 \f
8029 /* Return the GOT register. */
8030
8031 struct rtx_def *
8032 rs6000_got_register (value)
8033 rtx value ATTRIBUTE_UNUSED;
8034 {
8035 /* The second flow pass currently (June 1999) can't update
8036 regs_ever_live without disturbing other parts of the compiler, so
8037 update it here to make the prolog/epilogue code happy. */
8038 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
8039 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
8040
8041 current_function_uses_pic_offset_table = 1;
8042
8043 return pic_offset_table_rtx;
8044 }
8045 \f
8046 /* Function to init struct machine_function.
8047 This will be called, via a pointer variable,
8048 from push_function_context. */
8049
8050 static struct machine_function *
8051 rs6000_init_machine_status ()
8052 {
8053 return ggc_alloc_cleared (sizeof (machine_function));
8054 }
8055 \f
8056 /* These macros test for integers and extract the low-order bits. */
8057 #define INT_P(X) \
8058 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
8059 && GET_MODE (X) == VOIDmode)
8060
8061 #define INT_LOWPART(X) \
8062 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
8063
8064 int
8065 extract_MB (op)
8066 rtx op;
8067 {
8068 int i;
8069 unsigned long val = INT_LOWPART (op);
8070
8071 /* If the high bit is zero, the value is the first 1 bit we find
8072 from the left. */
8073 if ((val & 0x80000000) == 0)
8074 {
8075 if ((val & 0xffffffff) == 0)
8076 abort ();
8077
8078 i = 1;
8079 while (((val <<= 1) & 0x80000000) == 0)
8080 ++i;
8081 return i;
8082 }
8083
8084 /* If the high bit is set and the low bit is not, or the mask is all
8085 1's, the value is zero. */
8086 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
8087 return 0;
8088
8089 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8090 from the right. */
8091 i = 31;
8092 while (((val >>= 1) & 1) != 0)
8093 --i;
8094
8095 return i;
8096 }
8097
8098 int
8099 extract_ME (op)
8100 rtx op;
8101 {
8102 int i;
8103 unsigned long val = INT_LOWPART (op);
8104
8105 /* If the low bit is zero, the value is the first 1 bit we find from
8106 the right. */
8107 if ((val & 1) == 0)
8108 {
8109 if ((val & 0xffffffff) == 0)
8110 abort ();
8111
8112 i = 30;
8113 while (((val >>= 1) & 1) == 0)
8114 --i;
8115
8116 return i;
8117 }
8118
8119 /* If the low bit is set and the high bit is not, or the mask is all
8120 1's, the value is 31. */
8121 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
8122 return 31;
8123
8124 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8125 from the left. */
8126 i = 0;
8127 while (((val <<= 1) & 0x80000000) != 0)
8128 ++i;
8129
8130 return i;
8131 }
8132
8133 /* Locate some local-dynamic symbol still in use by this function
8134 so that we can print its name in some tls_ld pattern. */
8135
8136 static const char *
8137 rs6000_get_some_local_dynamic_name ()
8138 {
8139 rtx insn;
8140
8141 if (cfun->machine->some_ld_name)
8142 return cfun->machine->some_ld_name;
8143
8144 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
8145 if (INSN_P (insn)
8146 && for_each_rtx (&PATTERN (insn),
8147 rs6000_get_some_local_dynamic_name_1, 0))
8148 return cfun->machine->some_ld_name;
8149
8150 abort ();
8151 }
8152
8153 /* Helper function for rs6000_get_some_local_dynamic_name. */
8154
8155 static int
8156 rs6000_get_some_local_dynamic_name_1 (px, data)
8157 rtx *px;
8158 void *data ATTRIBUTE_UNUSED;
8159 {
8160 rtx x = *px;
8161
8162 if (GET_CODE (x) == SYMBOL_REF)
8163 {
8164 const char *str = XSTR (x, 0);
8165 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
8166 {
8167 cfun->machine->some_ld_name = str;
8168 return 1;
8169 }
8170 }
8171
8172 return 0;
8173 }
8174
8175 /* Print an operand. Recognize special options, documented below. */
8176
8177 #if TARGET_ELF
8178 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8179 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
8180 #else
8181 #define SMALL_DATA_RELOC "sda21"
8182 #define SMALL_DATA_REG 0
8183 #endif
8184
8185 void
8186 print_operand (file, x, code)
8187 FILE *file;
8188 rtx x;
8189 int code;
8190 {
8191 int i;
8192 HOST_WIDE_INT val;
8193 unsigned HOST_WIDE_INT uval;
8194
8195 switch (code)
8196 {
8197 case '.':
8198 /* Write out an instruction after the call which may be replaced
8199 with glue code by the loader. This depends on the AIX version. */
8200 asm_fprintf (file, RS6000_CALL_GLUE);
8201 return;
8202
8203 /* %a is output_address. */
8204
8205 case 'A':
8206 /* If X is a constant integer whose low-order 5 bits are zero,
8207 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
8208 in the AIX assembler where "sri" with a zero shift count
8209 writes a trash instruction. */
8210 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
8211 putc ('l', file);
8212 else
8213 putc ('r', file);
8214 return;
8215
8216 case 'b':
8217 /* If constant, low-order 16 bits of constant, unsigned.
8218 Otherwise, write normally. */
8219 if (INT_P (x))
8220 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
8221 else
8222 print_operand (file, x, 0);
8223 return;
8224
8225 case 'B':
8226 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
8227 for 64-bit mask direction. */
8228 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
8229 return;
8230
8231 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
8232 output_operand. */
8233
8234 case 'E':
8235 /* X is a CR register. Print the number of the EQ bit of the CR */
8236 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8237 output_operand_lossage ("invalid %%E value");
8238 else
8239 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
8240 return;
8241
8242 case 'f':
8243 /* X is a CR register. Print the shift count needed to move it
8244 to the high-order four bits. */
8245 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8246 output_operand_lossage ("invalid %%f value");
8247 else
8248 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
8249 return;
8250
8251 case 'F':
8252 /* Similar, but print the count for the rotate in the opposite
8253 direction. */
8254 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8255 output_operand_lossage ("invalid %%F value");
8256 else
8257 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
8258 return;
8259
8260 case 'G':
8261 /* X is a constant integer. If it is negative, print "m",
8262 otherwise print "z". This is to make an aze or ame insn. */
8263 if (GET_CODE (x) != CONST_INT)
8264 output_operand_lossage ("invalid %%G value");
8265 else if (INTVAL (x) >= 0)
8266 putc ('z', file);
8267 else
8268 putc ('m', file);
8269 return;
8270
8271 case 'h':
8272 /* If constant, output low-order five bits. Otherwise, write
8273 normally. */
8274 if (INT_P (x))
8275 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
8276 else
8277 print_operand (file, x, 0);
8278 return;
8279
8280 case 'H':
8281 /* If constant, output low-order six bits. Otherwise, write
8282 normally. */
8283 if (INT_P (x))
8284 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
8285 else
8286 print_operand (file, x, 0);
8287 return;
8288
8289 case 'I':
8290 /* Print `i' if this is a constant, else nothing. */
8291 if (INT_P (x))
8292 putc ('i', file);
8293 return;
8294
8295 case 'j':
8296 /* Write the bit number in CCR for jump. */
8297 i = ccr_bit (x, 0);
8298 if (i == -1)
8299 output_operand_lossage ("invalid %%j code");
8300 else
8301 fprintf (file, "%d", i);
8302 return;
8303
8304 case 'J':
8305 /* Similar, but add one for shift count in rlinm for scc and pass
8306 scc flag to `ccr_bit'. */
8307 i = ccr_bit (x, 1);
8308 if (i == -1)
8309 output_operand_lossage ("invalid %%J code");
8310 else
8311 /* If we want bit 31, write a shift count of zero, not 32. */
8312 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8313 return;
8314
8315 case 'k':
8316 /* X must be a constant. Write the 1's complement of the
8317 constant. */
8318 if (! INT_P (x))
8319 output_operand_lossage ("invalid %%k value");
8320 else
8321 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
8322 return;
8323
8324 case 'K':
8325 /* X must be a symbolic constant on ELF. Write an
8326 expression suitable for an 'addi' that adds in the low 16
8327 bits of the MEM. */
8328 if (GET_CODE (x) != CONST)
8329 {
8330 print_operand_address (file, x);
8331 fputs ("@l", file);
8332 }
8333 else
8334 {
8335 if (GET_CODE (XEXP (x, 0)) != PLUS
8336 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
8337 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
8338 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
8339 output_operand_lossage ("invalid %%K value");
8340 print_operand_address (file, XEXP (XEXP (x, 0), 0));
8341 fputs ("@l", file);
8342 /* For GNU as, there must be a non-alphanumeric character
8343 between 'l' and the number. The '-' is added by
8344 print_operand() already. */
8345 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
8346 fputs ("+", file);
8347 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
8348 }
8349 return;
8350
8351 /* %l is output_asm_label. */
8352
8353 case 'L':
8354 /* Write second word of DImode or DFmode reference. Works on register
8355 or non-indexed memory only. */
8356 if (GET_CODE (x) == REG)
8357 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
8358 else if (GET_CODE (x) == MEM)
8359 {
8360 /* Handle possible auto-increment. Since it is pre-increment and
8361 we have already done it, we can just use an offset of word. */
8362 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8363 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8364 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
8365 UNITS_PER_WORD));
8366 else
8367 output_address (XEXP (adjust_address_nv (x, SImode,
8368 UNITS_PER_WORD),
8369 0));
8370
8371 if (small_data_operand (x, GET_MODE (x)))
8372 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8373 reg_names[SMALL_DATA_REG]);
8374 }
8375 return;
8376
8377 case 'm':
8378 /* MB value for a mask operand. */
8379 if (! mask_operand (x, SImode))
8380 output_operand_lossage ("invalid %%m value");
8381
8382 fprintf (file, "%d", extract_MB (x));
8383 return;
8384
8385 case 'M':
8386 /* ME value for a mask operand. */
8387 if (! mask_operand (x, SImode))
8388 output_operand_lossage ("invalid %%M value");
8389
8390 fprintf (file, "%d", extract_ME (x));
8391 return;
8392
8393 /* %n outputs the negative of its operand. */
8394
8395 case 'N':
8396 /* Write the number of elements in the vector times 4. */
8397 if (GET_CODE (x) != PARALLEL)
8398 output_operand_lossage ("invalid %%N value");
8399 else
8400 fprintf (file, "%d", XVECLEN (x, 0) * 4);
8401 return;
8402
8403 case 'O':
8404 /* Similar, but subtract 1 first. */
8405 if (GET_CODE (x) != PARALLEL)
8406 output_operand_lossage ("invalid %%O value");
8407 else
8408 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
8409 return;
8410
8411 case 'p':
8412 /* X is a CONST_INT that is a power of two. Output the logarithm. */
8413 if (! INT_P (x)
8414 || INT_LOWPART (x) < 0
8415 || (i = exact_log2 (INT_LOWPART (x))) < 0)
8416 output_operand_lossage ("invalid %%p value");
8417 else
8418 fprintf (file, "%d", i);
8419 return;
8420
8421 case 'P':
8422 /* The operand must be an indirect memory reference. The result
8423 is the register number. */
8424 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
8425 || REGNO (XEXP (x, 0)) >= 32)
8426 output_operand_lossage ("invalid %%P value");
8427 else
8428 fprintf (file, "%d", REGNO (XEXP (x, 0)));
8429 return;
8430
8431 case 'q':
8432 /* This outputs the logical code corresponding to a boolean
8433 expression. The expression may have one or both operands
8434 negated (if one, only the first one). For condition register
8435 logical operations, it will also treat the negated
8436 CR codes as NOTs, but not handle NOTs of them. */
8437 {
8438 const char *const *t = 0;
8439 const char *s;
8440 enum rtx_code code = GET_CODE (x);
8441 static const char * const tbl[3][3] = {
8442 { "and", "andc", "nor" },
8443 { "or", "orc", "nand" },
8444 { "xor", "eqv", "xor" } };
8445
8446 if (code == AND)
8447 t = tbl[0];
8448 else if (code == IOR)
8449 t = tbl[1];
8450 else if (code == XOR)
8451 t = tbl[2];
8452 else
8453 output_operand_lossage ("invalid %%q value");
8454
8455 if (GET_CODE (XEXP (x, 0)) != NOT)
8456 s = t[0];
8457 else
8458 {
8459 if (GET_CODE (XEXP (x, 1)) == NOT)
8460 s = t[2];
8461 else
8462 s = t[1];
8463 }
8464
8465 fputs (s, file);
8466 }
8467 return;
8468
8469 case 'R':
8470 /* X is a CR register. Print the mask for `mtcrf'. */
8471 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8472 output_operand_lossage ("invalid %%R value");
8473 else
8474 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
8475 return;
8476
8477 case 's':
8478 /* Low 5 bits of 32 - value */
8479 if (! INT_P (x))
8480 output_operand_lossage ("invalid %%s value");
8481 else
8482 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
8483 return;
8484
8485 case 'S':
8486 /* PowerPC64 mask position. All 0's is excluded.
8487 CONST_INT 32-bit mask is considered sign-extended so any
8488 transition must occur within the CONST_INT, not on the boundary. */
8489 if (! mask64_operand (x, DImode))
8490 output_operand_lossage ("invalid %%S value");
8491
8492 uval = INT_LOWPART (x);
8493
8494 if (uval & 1) /* Clear Left */
8495 {
8496 #if HOST_BITS_PER_WIDE_INT > 64
8497 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8498 #endif
8499 i = 64;
8500 }
8501 else /* Clear Right */
8502 {
8503 uval = ~uval;
8504 #if HOST_BITS_PER_WIDE_INT > 64
8505 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8506 #endif
8507 i = 63;
8508 }
8509 while (uval != 0)
8510 --i, uval >>= 1;
8511 if (i < 0)
8512 abort ();
8513 fprintf (file, "%d", i);
8514 return;
8515
8516 case 't':
8517 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
8518 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
8519 abort ();
8520
8521 /* Bit 3 is OV bit. */
8522 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
8523
8524 /* If we want bit 31, write a shift count of zero, not 32. */
8525 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8526 return;
8527
8528 case 'T':
8529 /* Print the symbolic name of a branch target register. */
8530 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
8531 && REGNO (x) != COUNT_REGISTER_REGNUM))
8532 output_operand_lossage ("invalid %%T value");
8533 else if (REGNO (x) == LINK_REGISTER_REGNUM)
8534 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
8535 else
8536 fputs ("ctr", file);
8537 return;
8538
8539 case 'u':
8540 /* High-order 16 bits of constant for use in unsigned operand. */
8541 if (! INT_P (x))
8542 output_operand_lossage ("invalid %%u value");
8543 else
8544 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8545 (INT_LOWPART (x) >> 16) & 0xffff);
8546 return;
8547
8548 case 'v':
8549 /* High-order 16 bits of constant for use in signed operand. */
8550 if (! INT_P (x))
8551 output_operand_lossage ("invalid %%v value");
8552 else
8553 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8554 (INT_LOWPART (x) >> 16) & 0xffff);
8555 return;
8556
8557 case 'U':
8558 /* Print `u' if this has an auto-increment or auto-decrement. */
8559 if (GET_CODE (x) == MEM
8560 && (GET_CODE (XEXP (x, 0)) == PRE_INC
8561 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
8562 putc ('u', file);
8563 return;
8564
8565 case 'V':
8566 /* Print the trap code for this operand. */
8567 switch (GET_CODE (x))
8568 {
8569 case EQ:
8570 fputs ("eq", file); /* 4 */
8571 break;
8572 case NE:
8573 fputs ("ne", file); /* 24 */
8574 break;
8575 case LT:
8576 fputs ("lt", file); /* 16 */
8577 break;
8578 case LE:
8579 fputs ("le", file); /* 20 */
8580 break;
8581 case GT:
8582 fputs ("gt", file); /* 8 */
8583 break;
8584 case GE:
8585 fputs ("ge", file); /* 12 */
8586 break;
8587 case LTU:
8588 fputs ("llt", file); /* 2 */
8589 break;
8590 case LEU:
8591 fputs ("lle", file); /* 6 */
8592 break;
8593 case GTU:
8594 fputs ("lgt", file); /* 1 */
8595 break;
8596 case GEU:
8597 fputs ("lge", file); /* 5 */
8598 break;
8599 default:
8600 abort ();
8601 }
8602 break;
8603
8604 case 'w':
8605 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
8606 normally. */
8607 if (INT_P (x))
8608 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
8609 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
8610 else
8611 print_operand (file, x, 0);
8612 return;
8613
8614 case 'W':
8615 /* MB value for a PowerPC64 rldic operand. */
8616 val = (GET_CODE (x) == CONST_INT
8617 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
8618
8619 if (val < 0)
8620 i = -1;
8621 else
8622 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
8623 if ((val <<= 1) < 0)
8624 break;
8625
8626 #if HOST_BITS_PER_WIDE_INT == 32
8627 if (GET_CODE (x) == CONST_INT && i >= 0)
8628 i += 32; /* zero-extend high-part was all 0's */
8629 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
8630 {
8631 val = CONST_DOUBLE_LOW (x);
8632
8633 if (val == 0)
8634 abort ();
8635 else if (val < 0)
8636 --i;
8637 else
8638 for ( ; i < 64; i++)
8639 if ((val <<= 1) < 0)
8640 break;
8641 }
8642 #endif
8643
8644 fprintf (file, "%d", i + 1);
8645 return;
8646
8647 case 'X':
8648 if (GET_CODE (x) == MEM
8649 && legitimate_indexed_address_p (XEXP (x, 0), 0))
8650 putc ('x', file);
8651 return;
8652
8653 case 'Y':
8654 /* Like 'L', for third word of TImode */
8655 if (GET_CODE (x) == REG)
8656 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
8657 else if (GET_CODE (x) == MEM)
8658 {
8659 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8660 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8661 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
8662 else
8663 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
8664 if (small_data_operand (x, GET_MODE (x)))
8665 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8666 reg_names[SMALL_DATA_REG]);
8667 }
8668 return;
8669
8670 case 'z':
8671 /* X is a SYMBOL_REF. Write out the name preceded by a
8672 period and without any trailing data in brackets. Used for function
8673 names. If we are configured for System V (or the embedded ABI) on
8674 the PowerPC, do not emit the period, since those systems do not use
8675 TOCs and the like. */
8676 if (GET_CODE (x) != SYMBOL_REF)
8677 abort ();
8678
8679 if (XSTR (x, 0)[0] != '.')
8680 {
8681 switch (DEFAULT_ABI)
8682 {
8683 default:
8684 abort ();
8685
8686 case ABI_AIX:
8687 putc ('.', file);
8688 break;
8689
8690 case ABI_V4:
8691 case ABI_DARWIN:
8692 break;
8693 }
8694 }
8695 if (TARGET_AIX)
8696 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
8697 else
8698 assemble_name (file, XSTR (x, 0));
8699 return;
8700
8701 case 'Z':
8702 /* Like 'L', for last word of TImode. */
8703 if (GET_CODE (x) == REG)
8704 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
8705 else if (GET_CODE (x) == MEM)
8706 {
8707 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8708 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8709 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
8710 else
8711 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
8712 if (small_data_operand (x, GET_MODE (x)))
8713 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8714 reg_names[SMALL_DATA_REG]);
8715 }
8716 return;
8717
8718 /* Print AltiVec or SPE memory operand. */
8719 case 'y':
8720 {
8721 rtx tmp;
8722
8723 if (GET_CODE (x) != MEM)
8724 abort ();
8725
8726 tmp = XEXP (x, 0);
8727
8728 if (TARGET_E500)
8729 {
8730 /* Handle [reg]. */
8731 if (GET_CODE (tmp) == REG)
8732 {
8733 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
8734 break;
8735 }
8736 /* Handle [reg+UIMM]. */
8737 else if (GET_CODE (tmp) == PLUS &&
8738 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
8739 {
8740 int x;
8741
8742 if (GET_CODE (XEXP (tmp, 0)) != REG)
8743 abort ();
8744
8745 x = INTVAL (XEXP (tmp, 1));
8746 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
8747 break;
8748 }
8749
8750 /* Fall through. Must be [reg+reg]. */
8751 }
8752 if (GET_CODE (tmp) == REG)
8753 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
8754 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
8755 {
8756 if (REGNO (XEXP (tmp, 0)) == 0)
8757 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
8758 reg_names[ REGNO (XEXP (tmp, 0)) ]);
8759 else
8760 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
8761 reg_names[ REGNO (XEXP (tmp, 1)) ]);
8762 }
8763 else
8764 abort ();
8765 break;
8766 }
8767
8768 case 0:
8769 if (GET_CODE (x) == REG)
8770 fprintf (file, "%s", reg_names[REGNO (x)]);
8771 else if (GET_CODE (x) == MEM)
8772 {
8773 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8774 know the width from the mode. */
8775 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
8776 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
8777 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8778 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
8779 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
8780 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8781 else
8782 output_address (XEXP (x, 0));
8783 }
8784 else
8785 output_addr_const (file, x);
8786 return;
8787
8788 case '&':
8789 assemble_name (file, rs6000_get_some_local_dynamic_name ());
8790 return;
8791
8792 default:
8793 output_operand_lossage ("invalid %%xn code");
8794 }
8795 }
8796 \f
8797 /* Print the address of an operand. */
8798
8799 void
8800 print_operand_address (file, x)
8801 FILE *file;
8802 rtx x;
8803 {
8804 if (GET_CODE (x) == REG)
8805 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
8806 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
8807 || GET_CODE (x) == LABEL_REF)
8808 {
8809 output_addr_const (file, x);
8810 if (small_data_operand (x, GET_MODE (x)))
8811 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8812 reg_names[SMALL_DATA_REG]);
8813 else if (TARGET_TOC)
8814 abort ();
8815 }
8816 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
8817 {
8818 if (REGNO (XEXP (x, 0)) == 0)
8819 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
8820 reg_names[ REGNO (XEXP (x, 0)) ]);
8821 else
8822 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
8823 reg_names[ REGNO (XEXP (x, 1)) ]);
8824 }
8825 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
8826 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
8827 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
8828 #if TARGET_ELF
8829 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8830 && CONSTANT_P (XEXP (x, 1)))
8831 {
8832 output_addr_const (file, XEXP (x, 1));
8833 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8834 }
8835 #endif
8836 #if TARGET_MACHO
8837 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8838 && CONSTANT_P (XEXP (x, 1)))
8839 {
8840 fprintf (file, "lo16(");
8841 output_addr_const (file, XEXP (x, 1));
8842 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8843 }
8844 #endif
8845 else if (legitimate_constant_pool_address_p (x))
8846 {
8847 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
8848 {
8849 rtx contains_minus = XEXP (x, 1);
8850 rtx minus, symref;
8851 const char *name;
8852
8853 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8854 turn it into (sym) for output_addr_const. */
8855 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
8856 contains_minus = XEXP (contains_minus, 0);
8857
8858 minus = XEXP (contains_minus, 0);
8859 symref = XEXP (minus, 0);
8860 XEXP (contains_minus, 0) = symref;
8861 if (TARGET_ELF)
8862 {
8863 char *newname;
8864
8865 name = XSTR (symref, 0);
8866 newname = alloca (strlen (name) + sizeof ("@toc"));
8867 strcpy (newname, name);
8868 strcat (newname, "@toc");
8869 XSTR (symref, 0) = newname;
8870 }
8871 output_addr_const (file, XEXP (x, 1));
8872 if (TARGET_ELF)
8873 XSTR (symref, 0) = name;
8874 XEXP (contains_minus, 0) = minus;
8875 }
8876 else
8877 output_addr_const (file, XEXP (x, 1));
8878
8879 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
8880 }
8881 else
8882 abort ();
8883 }
8884 \f
8885 /* Target hook for assembling integer objects. The PowerPC version has
8886 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8887 is defined. It also needs to handle DI-mode objects on 64-bit
8888 targets. */
8889
8890 static bool
8891 rs6000_assemble_integer (x, size, aligned_p)
8892 rtx x;
8893 unsigned int size;
8894 int aligned_p;
8895 {
8896 #ifdef RELOCATABLE_NEEDS_FIXUP
8897 /* Special handling for SI values. */
8898 if (size == 4 && aligned_p)
8899 {
8900 extern int in_toc_section PARAMS ((void));
8901 static int recurse = 0;
8902
8903 /* For -mrelocatable, we mark all addresses that need to be fixed up
8904 in the .fixup section. */
8905 if (TARGET_RELOCATABLE
8906 && !in_toc_section ()
8907 && !in_text_section ()
8908 && !recurse
8909 && GET_CODE (x) != CONST_INT
8910 && GET_CODE (x) != CONST_DOUBLE
8911 && CONSTANT_P (x))
8912 {
8913 char buf[256];
8914
8915 recurse = 1;
8916 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
8917 fixuplabelno++;
8918 ASM_OUTPUT_LABEL (asm_out_file, buf);
8919 fprintf (asm_out_file, "\t.long\t(");
8920 output_addr_const (asm_out_file, x);
8921 fprintf (asm_out_file, ")@fixup\n");
8922 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
8923 ASM_OUTPUT_ALIGN (asm_out_file, 2);
8924 fprintf (asm_out_file, "\t.long\t");
8925 assemble_name (asm_out_file, buf);
8926 fprintf (asm_out_file, "\n\t.previous\n");
8927 recurse = 0;
8928 return true;
8929 }
8930 /* Remove initial .'s to turn a -mcall-aixdesc function
8931 address into the address of the descriptor, not the function
8932 itself. */
8933 else if (GET_CODE (x) == SYMBOL_REF
8934 && XSTR (x, 0)[0] == '.'
8935 && DEFAULT_ABI == ABI_AIX)
8936 {
8937 const char *name = XSTR (x, 0);
8938 while (*name == '.')
8939 name++;
8940
8941 fprintf (asm_out_file, "\t.long\t%s\n", name);
8942 return true;
8943 }
8944 }
8945 #endif /* RELOCATABLE_NEEDS_FIXUP */
8946 return default_assemble_integer (x, size, aligned_p);
8947 }
8948
8949 #ifdef HAVE_GAS_HIDDEN
8950 /* Emit an assembler directive to set symbol visibility for DECL to
8951 VISIBILITY_TYPE. */
8952
8953 static void
8954 rs6000_assemble_visibility (decl, vis)
8955 tree decl;
8956 int vis;
8957 {
8958 /* Functions need to have their entry point symbol visibility set as
8959 well as their descriptor symbol visibility. */
8960 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
8961 {
8962 static const char * const visibility_types[] = {
8963 NULL, "internal", "hidden", "protected"
8964 };
8965
8966 const char *name, *type;
8967
8968 name = ((* targetm.strip_name_encoding)
8969 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
8970 type = visibility_types[vis];
8971
8972 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
8973 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
8974 }
8975 else
8976 default_assemble_visibility (decl, vis);
8977 }
8978 #endif
8979 \f
8980 enum rtx_code
8981 rs6000_reverse_condition (mode, code)
8982 enum machine_mode mode;
8983 enum rtx_code code;
8984 {
8985 /* Reversal of FP compares takes care -- an ordered compare
8986 becomes an unordered compare and vice versa. */
8987 if (mode == CCFPmode
8988 && (!flag_finite_math_only
8989 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
8990 || code == UNEQ || code == LTGT))
8991 return reverse_condition_maybe_unordered (code);
8992 else
8993 return reverse_condition (code);
8994 }
8995
8996 /* Generate a compare for CODE. Return a brand-new rtx that
8997 represents the result of the compare. */
8998
8999 static rtx
9000 rs6000_generate_compare (code)
9001 enum rtx_code code;
9002 {
9003 enum machine_mode comp_mode;
9004 rtx compare_result;
9005
9006 if (rs6000_compare_fp_p)
9007 comp_mode = CCFPmode;
9008 else if (code == GTU || code == LTU
9009 || code == GEU || code == LEU)
9010 comp_mode = CCUNSmode;
9011 else
9012 comp_mode = CCmode;
9013
9014 /* First, the compare. */
9015 compare_result = gen_reg_rtx (comp_mode);
9016
9017 /* SPE FP compare instructions on the GPRs. Yuck! */
9018 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
9019 && rs6000_compare_fp_p)
9020 {
9021 rtx cmp, or1, or2, or_result, compare_result2;
9022
9023 switch (code)
9024 {
9025 case EQ:
9026 case UNEQ:
9027 case NE:
9028 case LTGT:
9029 cmp = flag_finite_math_only
9030 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
9031 rs6000_compare_op1)
9032 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
9033 rs6000_compare_op1);
9034 break;
9035 case GT:
9036 case GTU:
9037 case UNGT:
9038 case UNGE:
9039 case GE:
9040 case GEU:
9041 cmp = flag_finite_math_only
9042 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
9043 rs6000_compare_op1)
9044 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
9045 rs6000_compare_op1);
9046 break;
9047 case LT:
9048 case LTU:
9049 case UNLT:
9050 case UNLE:
9051 case LE:
9052 case LEU:
9053 cmp = flag_finite_math_only
9054 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
9055 rs6000_compare_op1)
9056 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
9057 rs6000_compare_op1);
9058 break;
9059 default:
9060 abort ();
9061 }
9062
9063 /* Synthesize LE and GE from LT/GT || EQ. */
9064 if (code == LE || code == GE || code == LEU || code == GEU)
9065 {
9066 /* Synthesize GE/LE frome GT/LT || EQ. */
9067
9068 emit_insn (cmp);
9069
9070 switch (code)
9071 {
9072 case LE: code = LT; break;
9073 case GE: code = GT; break;
9074 case LEU: code = LT; break;
9075 case GEU: code = GT; break;
9076 default: abort ();
9077 }
9078
9079 or1 = gen_reg_rtx (SImode);
9080 or2 = gen_reg_rtx (SImode);
9081 or_result = gen_reg_rtx (CCEQmode);
9082 compare_result2 = gen_reg_rtx (CCFPmode);
9083
9084 /* Do the EQ. */
9085 cmp = flag_finite_math_only
9086 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
9087 rs6000_compare_op1)
9088 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
9089 rs6000_compare_op1);
9090 emit_insn (cmp);
9091
9092 /* The MC8540 FP compare instructions set the CR bits
9093 differently than other PPC compare instructions. For
9094 that matter, there is no generic test instruction, but a
9095 testgt, testlt, and testeq. For a true condition, bit 2
9096 is set (x1xx) in the CR. Following the traditional CR
9097 values:
9098
9099 LT GT EQ OV
9100 bit3 bit2 bit1 bit0
9101
9102 ... bit 2 would be a GT CR alias, so later on we
9103 look in the GT bits for the branch instructions.
9104 However, we must be careful to emit correct RTL in
9105 the meantime, so optimizations don't get confused. */
9106
9107 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
9108 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
9109
9110 /* OR them together. */
9111 cmp = gen_rtx_SET (VOIDmode, or_result,
9112 gen_rtx_COMPARE (CCEQmode,
9113 gen_rtx_IOR (SImode, or1, or2),
9114 const_true_rtx));
9115 compare_result = or_result;
9116 code = EQ;
9117 }
9118 else
9119 {
9120 /* We only care about 1 bit (x1xx), so map everything to NE to
9121 maintain rtl sanity. We'll get to the right bit (x1xx) at
9122 code output time. */
9123 if (code == NE || code == LTGT)
9124 /* Do the inverse here because we have no cmpne
9125 instruction. We use the cmpeq instruction and expect
9126 to get a 0 instead. */
9127 code = EQ;
9128 else
9129 code = NE;
9130 }
9131
9132 emit_insn (cmp);
9133 }
9134 else
9135 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
9136 gen_rtx_COMPARE (comp_mode,
9137 rs6000_compare_op0,
9138 rs6000_compare_op1)));
9139
9140 /* Some kinds of FP comparisons need an OR operation;
9141 under flag_finite_math_only we don't bother. */
9142 if (rs6000_compare_fp_p
9143 && ! flag_finite_math_only
9144 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
9145 && (code == LE || code == GE
9146 || code == UNEQ || code == LTGT
9147 || code == UNGT || code == UNLT))
9148 {
9149 enum rtx_code or1, or2;
9150 rtx or1_rtx, or2_rtx, compare2_rtx;
9151 rtx or_result = gen_reg_rtx (CCEQmode);
9152
9153 switch (code)
9154 {
9155 case LE: or1 = LT; or2 = EQ; break;
9156 case GE: or1 = GT; or2 = EQ; break;
9157 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
9158 case LTGT: or1 = LT; or2 = GT; break;
9159 case UNGT: or1 = UNORDERED; or2 = GT; break;
9160 case UNLT: or1 = UNORDERED; or2 = LT; break;
9161 default: abort ();
9162 }
9163 validate_condition_mode (or1, comp_mode);
9164 validate_condition_mode (or2, comp_mode);
9165 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
9166 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
9167 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
9168 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
9169 const_true_rtx);
9170 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
9171
9172 compare_result = or_result;
9173 code = EQ;
9174 }
9175
9176 validate_condition_mode (code, GET_MODE (compare_result));
9177
9178 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
9179 }
9180
9181
9182 /* Emit the RTL for an sCOND pattern. */
9183
9184 void
9185 rs6000_emit_sCOND (code, result)
9186 enum rtx_code code;
9187 rtx result;
9188 {
9189 rtx condition_rtx;
9190 enum machine_mode op_mode;
9191 enum rtx_code cond_code;
9192
9193 condition_rtx = rs6000_generate_compare (code);
9194 cond_code = GET_CODE (condition_rtx);
9195
9196 if (cond_code == NE
9197 || cond_code == GE || cond_code == LE
9198 || cond_code == GEU || cond_code == LEU
9199 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
9200 {
9201 rtx not_result = gen_reg_rtx (CCEQmode);
9202 rtx not_op, rev_cond_rtx;
9203 enum machine_mode cc_mode;
9204
9205 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
9206
9207 rev_cond_rtx = gen_rtx (rs6000_reverse_condition (cc_mode, cond_code),
9208 SImode, XEXP (condition_rtx, 0), const0_rtx);
9209 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
9210 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
9211 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
9212 }
9213
9214 op_mode = GET_MODE (rs6000_compare_op0);
9215 if (op_mode == VOIDmode)
9216 op_mode = GET_MODE (rs6000_compare_op1);
9217
9218 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
9219 {
9220 PUT_MODE (condition_rtx, DImode);
9221 convert_move (result, condition_rtx, 0);
9222 }
9223 else
9224 {
9225 PUT_MODE (condition_rtx, SImode);
9226 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
9227 }
9228 }
9229
9230 /* Emit a branch of kind CODE to location LOC. */
9231
9232 void
9233 rs6000_emit_cbranch (code, loc)
9234 enum rtx_code code;
9235 rtx loc;
9236 {
9237 rtx condition_rtx, loc_ref;
9238
9239 condition_rtx = rs6000_generate_compare (code);
9240 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
9241 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
9242 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
9243 loc_ref, pc_rtx)));
9244 }
9245
9246 /* Return the string to output a conditional branch to LABEL, which is
9247 the operand number of the label, or -1 if the branch is really a
9248 conditional return.
9249
9250 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
9251 condition code register and its mode specifies what kind of
9252 comparison we made.
9253
9254 REVERSED is nonzero if we should reverse the sense of the comparison.
9255
9256 INSN is the insn. */
9257
9258 char *
9259 output_cbranch (op, label, reversed, insn)
9260 rtx op;
9261 const char * label;
9262 int reversed;
9263 rtx insn;
9264 {
9265 static char string[64];
9266 enum rtx_code code = GET_CODE (op);
9267 rtx cc_reg = XEXP (op, 0);
9268 enum machine_mode mode = GET_MODE (cc_reg);
9269 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
9270 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
9271 int really_reversed = reversed ^ need_longbranch;
9272 char *s = string;
9273 const char *ccode;
9274 const char *pred;
9275 rtx note;
9276
9277 validate_condition_mode (code, mode);
9278
9279 /* Work out which way this really branches. We could use
9280 reverse_condition_maybe_unordered here always but this
9281 makes the resulting assembler clearer. */
9282 if (really_reversed)
9283 {
9284 /* Reversal of FP compares takes care -- an ordered compare
9285 becomes an unordered compare and vice versa. */
9286 if (mode == CCFPmode)
9287 code = reverse_condition_maybe_unordered (code);
9288 else
9289 code = reverse_condition (code);
9290 }
9291
9292 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
9293 {
9294 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
9295 to the GT bit. */
9296 if (code == EQ)
9297 /* Opposite of GT. */
9298 code = UNLE;
9299 else if (code == NE)
9300 code = GT;
9301 else
9302 abort ();
9303 }
9304
9305 switch (code)
9306 {
9307 /* Not all of these are actually distinct opcodes, but
9308 we distinguish them for clarity of the resulting assembler. */
9309 case NE: case LTGT:
9310 ccode = "ne"; break;
9311 case EQ: case UNEQ:
9312 ccode = "eq"; break;
9313 case GE: case GEU:
9314 ccode = "ge"; break;
9315 case GT: case GTU: case UNGT:
9316 ccode = "gt"; break;
9317 case LE: case LEU:
9318 ccode = "le"; break;
9319 case LT: case LTU: case UNLT:
9320 ccode = "lt"; break;
9321 case UNORDERED: ccode = "un"; break;
9322 case ORDERED: ccode = "nu"; break;
9323 case UNGE: ccode = "nl"; break;
9324 case UNLE: ccode = "ng"; break;
9325 default:
9326 abort ();
9327 }
9328
9329 /* Maybe we have a guess as to how likely the branch is.
9330 The old mnemonics don't have a way to specify this information. */
9331 pred = "";
9332 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
9333 if (note != NULL_RTX)
9334 {
9335 /* PROB is the difference from 50%. */
9336 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
9337 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
9338
9339 /* Only hint for highly probable/improbable branches on newer
9340 cpus as static prediction overrides processor dynamic
9341 prediction. For older cpus we may as well always hint, but
9342 assume not taken for branches that are very close to 50% as a
9343 mispredicted taken branch is more expensive than a
9344 mispredicted not-taken branch. */
9345 if (always_hint
9346 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
9347 {
9348 if (abs (prob) > REG_BR_PROB_BASE / 20
9349 && ((prob > 0) ^ need_longbranch))
9350 pred = "+";
9351 else
9352 pred = "-";
9353 }
9354 }
9355
9356 if (label == NULL)
9357 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
9358 else
9359 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
9360
9361 /* We need to escape any '%' characters in the reg_names string.
9362 Assume they'd only be the first character... */
9363 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
9364 *s++ = '%';
9365 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
9366
9367 if (label != NULL)
9368 {
9369 /* If the branch distance was too far, we may have to use an
9370 unconditional branch to go the distance. */
9371 if (need_longbranch)
9372 s += sprintf (s, ",$+8\n\tb %s", label);
9373 else
9374 s += sprintf (s, ",%s", label);
9375 }
9376
9377 return string;
9378 }
9379
9380 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
9381 operands of the last comparison is nonzero/true, FALSE_COND if it
9382 is zero/false. Return 0 if the hardware has no such operation. */
9383
9384 int
9385 rs6000_emit_cmove (dest, op, true_cond, false_cond)
9386 rtx dest;
9387 rtx op;
9388 rtx true_cond;
9389 rtx false_cond;
9390 {
9391 enum rtx_code code = GET_CODE (op);
9392 rtx op0 = rs6000_compare_op0;
9393 rtx op1 = rs6000_compare_op1;
9394 REAL_VALUE_TYPE c1;
9395 enum machine_mode compare_mode = GET_MODE (op0);
9396 enum machine_mode result_mode = GET_MODE (dest);
9397 rtx temp;
9398
9399 /* These modes should always match. */
9400 if (GET_MODE (op1) != compare_mode
9401 /* In the isel case however, we can use a compare immediate, so
9402 op1 may be a small constant. */
9403 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
9404 return 0;
9405 if (GET_MODE (true_cond) != result_mode)
9406 return 0;
9407 if (GET_MODE (false_cond) != result_mode)
9408 return 0;
9409
9410 /* First, work out if the hardware can do this at all, or
9411 if it's too slow... */
9412 if (! rs6000_compare_fp_p)
9413 {
9414 if (TARGET_ISEL)
9415 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
9416 return 0;
9417 }
9418
9419 /* Eliminate half of the comparisons by switching operands, this
9420 makes the remaining code simpler. */
9421 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
9422 || code == LTGT || code == LT || code == UNLE)
9423 {
9424 code = reverse_condition_maybe_unordered (code);
9425 temp = true_cond;
9426 true_cond = false_cond;
9427 false_cond = temp;
9428 }
9429
9430 /* UNEQ and LTGT take four instructions for a comparison with zero,
9431 it'll probably be faster to use a branch here too. */
9432 if (code == UNEQ && HONOR_NANS (compare_mode))
9433 return 0;
9434
9435 if (GET_CODE (op1) == CONST_DOUBLE)
9436 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
9437
9438 /* We're going to try to implement comparisons by performing
9439 a subtract, then comparing against zero. Unfortunately,
9440 Inf - Inf is NaN which is not zero, and so if we don't
9441 know that the operand is finite and the comparison
9442 would treat EQ different to UNORDERED, we can't do it. */
9443 if (HONOR_INFINITIES (compare_mode)
9444 && code != GT && code != UNGE
9445 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
9446 /* Constructs of the form (a OP b ? a : b) are safe. */
9447 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
9448 || (! rtx_equal_p (op0, true_cond)
9449 && ! rtx_equal_p (op1, true_cond))))
9450 return 0;
9451 /* At this point we know we can use fsel. */
9452
9453 /* Reduce the comparison to a comparison against zero. */
9454 temp = gen_reg_rtx (compare_mode);
9455 emit_insn (gen_rtx_SET (VOIDmode, temp,
9456 gen_rtx_MINUS (compare_mode, op0, op1)));
9457 op0 = temp;
9458 op1 = CONST0_RTX (compare_mode);
9459
9460 /* If we don't care about NaNs we can reduce some of the comparisons
9461 down to faster ones. */
9462 if (! HONOR_NANS (compare_mode))
9463 switch (code)
9464 {
9465 case GT:
9466 code = LE;
9467 temp = true_cond;
9468 true_cond = false_cond;
9469 false_cond = temp;
9470 break;
9471 case UNGE:
9472 code = GE;
9473 break;
9474 case UNEQ:
9475 code = EQ;
9476 break;
9477 default:
9478 break;
9479 }
9480
9481 /* Now, reduce everything down to a GE. */
9482 switch (code)
9483 {
9484 case GE:
9485 break;
9486
9487 case LE:
9488 temp = gen_reg_rtx (compare_mode);
9489 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9490 op0 = temp;
9491 break;
9492
9493 case ORDERED:
9494 temp = gen_reg_rtx (compare_mode);
9495 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
9496 op0 = temp;
9497 break;
9498
9499 case EQ:
9500 temp = gen_reg_rtx (compare_mode);
9501 emit_insn (gen_rtx_SET (VOIDmode, temp,
9502 gen_rtx_NEG (compare_mode,
9503 gen_rtx_ABS (compare_mode, op0))));
9504 op0 = temp;
9505 break;
9506
9507 case UNGE:
9508 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
9509 temp = gen_reg_rtx (result_mode);
9510 emit_insn (gen_rtx_SET (VOIDmode, temp,
9511 gen_rtx_IF_THEN_ELSE (result_mode,
9512 gen_rtx_GE (VOIDmode,
9513 op0, op1),
9514 true_cond, false_cond)));
9515 false_cond = true_cond;
9516 true_cond = temp;
9517
9518 temp = gen_reg_rtx (compare_mode);
9519 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9520 op0 = temp;
9521 break;
9522
9523 case GT:
9524 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
9525 temp = gen_reg_rtx (result_mode);
9526 emit_insn (gen_rtx_SET (VOIDmode, temp,
9527 gen_rtx_IF_THEN_ELSE (result_mode,
9528 gen_rtx_GE (VOIDmode,
9529 op0, op1),
9530 true_cond, false_cond)));
9531 true_cond = false_cond;
9532 false_cond = temp;
9533
9534 temp = gen_reg_rtx (compare_mode);
9535 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9536 op0 = temp;
9537 break;
9538
9539 default:
9540 abort ();
9541 }
9542
9543 emit_insn (gen_rtx_SET (VOIDmode, dest,
9544 gen_rtx_IF_THEN_ELSE (result_mode,
9545 gen_rtx_GE (VOIDmode,
9546 op0, op1),
9547 true_cond, false_cond)));
9548 return 1;
9549 }
9550
9551 /* Same as above, but for ints (isel). */
9552
9553 static int
9554 rs6000_emit_int_cmove (dest, op, true_cond, false_cond)
9555 rtx dest;
9556 rtx op;
9557 rtx true_cond;
9558 rtx false_cond;
9559 {
9560 rtx condition_rtx, cr;
9561
9562 /* All isel implementations thus far are 32-bits. */
9563 if (GET_MODE (rs6000_compare_op0) != SImode)
9564 return 0;
9565
9566 /* We still have to do the compare, because isel doesn't do a
9567 compare, it just looks at the CRx bits set by a previous compare
9568 instruction. */
9569 condition_rtx = rs6000_generate_compare (GET_CODE (op));
9570 cr = XEXP (condition_rtx, 0);
9571
9572 if (GET_MODE (cr) == CCmode)
9573 emit_insn (gen_isel_signed (dest, condition_rtx,
9574 true_cond, false_cond, cr));
9575 else
9576 emit_insn (gen_isel_unsigned (dest, condition_rtx,
9577 true_cond, false_cond, cr));
9578
9579 return 1;
9580 }
9581
9582 const char *
9583 output_isel (operands)
9584 rtx *operands;
9585 {
9586 enum rtx_code code;
9587
9588 code = GET_CODE (operands[1]);
9589 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
9590 {
9591 PUT_CODE (operands[1], reverse_condition (code));
9592 return "isel %0,%3,%2,%j1";
9593 }
9594 else
9595 return "isel %0,%2,%3,%j1";
9596 }
9597
9598 void
9599 rs6000_emit_minmax (dest, code, op0, op1)
9600 rtx dest;
9601 enum rtx_code code;
9602 rtx op0;
9603 rtx op1;
9604 {
9605 enum machine_mode mode = GET_MODE (op0);
9606 enum rtx_code c;
9607 rtx target;
9608
9609 if (code == SMAX || code == SMIN)
9610 c = GE;
9611 else
9612 c = GEU;
9613
9614 if (code == SMAX || code == UMAX)
9615 target = emit_conditional_move (dest, c, op0, op1, mode,
9616 op0, op1, mode, 0);
9617 else
9618 target = emit_conditional_move (dest, c, op0, op1, mode,
9619 op1, op0, mode, 0);
9620 if (target == NULL_RTX)
9621 abort ();
9622 if (target != dest)
9623 emit_move_insn (dest, target);
9624 }
9625 \f
9626 /* This page contains routines that are used to determine what the
9627 function prologue and epilogue code will do and write them out. */
9628
9629 /* Return the first fixed-point register that is required to be
9630 saved. 32 if none. */
9631
9632 int
9633 first_reg_to_save ()
9634 {
9635 int first_reg;
9636
9637 /* Find lowest numbered live register. */
9638 for (first_reg = 13; first_reg <= 31; first_reg++)
9639 if (regs_ever_live[first_reg]
9640 && (! call_used_regs[first_reg]
9641 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
9642 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
9643 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
9644 break;
9645
9646 #if TARGET_MACHO
9647 if (flag_pic
9648 && current_function_uses_pic_offset_table
9649 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
9650 return RS6000_PIC_OFFSET_TABLE_REGNUM;
9651 #endif
9652
9653 return first_reg;
9654 }
9655
9656 /* Similar, for FP regs. */
9657
9658 int
9659 first_fp_reg_to_save ()
9660 {
9661 int first_reg;
9662
9663 /* Find lowest numbered live register. */
9664 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
9665 if (regs_ever_live[first_reg])
9666 break;
9667
9668 return first_reg;
9669 }
9670
9671 /* Similar, for AltiVec regs. */
9672
9673 static int
9674 first_altivec_reg_to_save ()
9675 {
9676 int i;
9677
9678 /* Stack frame remains as is unless we are in AltiVec ABI. */
9679 if (! TARGET_ALTIVEC_ABI)
9680 return LAST_ALTIVEC_REGNO + 1;
9681
9682 /* Find lowest numbered live register. */
9683 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
9684 if (regs_ever_live[i])
9685 break;
9686
9687 return i;
9688 }
9689
9690 /* Return a 32-bit mask of the AltiVec registers we need to set in
9691 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
9692 the 32-bit word is 0. */
9693
9694 static unsigned int
9695 compute_vrsave_mask ()
9696 {
9697 unsigned int i, mask = 0;
9698
9699 /* First, find out if we use _any_ altivec registers. */
9700 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
9701 if (regs_ever_live[i])
9702 mask |= ALTIVEC_REG_BIT (i);
9703
9704 if (mask == 0)
9705 return mask;
9706
9707 /* Next, remove the argument registers from the set. These must
9708 be in the VRSAVE mask set by the caller, so we don't need to add
9709 them in again. More importantly, the mask we compute here is
9710 used to generate CLOBBERs in the set_vrsave insn, and we do not
9711 wish the argument registers to die. */
9712 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
9713 mask &= ~ALTIVEC_REG_BIT (i);
9714
9715 /* Similarly, remove the return value from the set. */
9716 {
9717 bool yes = false;
9718 diddle_return_value (is_altivec_return_reg, &yes);
9719 if (yes)
9720 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
9721 }
9722
9723 return mask;
9724 }
9725
9726 static void
9727 is_altivec_return_reg (reg, xyes)
9728 rtx reg;
9729 void *xyes;
9730 {
9731 bool *yes = (bool *) xyes;
9732 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
9733 *yes = true;
9734 }
9735
9736 \f
9737 /* Calculate the stack information for the current function. This is
9738 complicated by having two separate calling sequences, the AIX calling
9739 sequence and the V.4 calling sequence.
9740
9741 AIX (and Darwin/Mac OS X) stack frames look like:
9742 32-bit 64-bit
9743 SP----> +---------------------------------------+
9744 | back chain to caller | 0 0
9745 +---------------------------------------+
9746 | saved CR | 4 8 (8-11)
9747 +---------------------------------------+
9748 | saved LR | 8 16
9749 +---------------------------------------+
9750 | reserved for compilers | 12 24
9751 +---------------------------------------+
9752 | reserved for binders | 16 32
9753 +---------------------------------------+
9754 | saved TOC pointer | 20 40
9755 +---------------------------------------+
9756 | Parameter save area (P) | 24 48
9757 +---------------------------------------+
9758 | Alloca space (A) | 24+P etc.
9759 +---------------------------------------+
9760 | Local variable space (L) | 24+P+A
9761 +---------------------------------------+
9762 | Float/int conversion temporary (X) | 24+P+A+L
9763 +---------------------------------------+
9764 | Save area for AltiVec registers (W) | 24+P+A+L+X
9765 +---------------------------------------+
9766 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9767 +---------------------------------------+
9768 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9769 +---------------------------------------+
9770 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9771 +---------------------------------------+
9772 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9773 +---------------------------------------+
9774 old SP->| back chain to caller's caller |
9775 +---------------------------------------+
9776
9777 The required alignment for AIX configurations is two words (i.e., 8
9778 or 16 bytes).
9779
9780
9781 V.4 stack frames look like:
9782
9783 SP----> +---------------------------------------+
9784 | back chain to caller | 0
9785 +---------------------------------------+
9786 | caller's saved LR | 4
9787 +---------------------------------------+
9788 | Parameter save area (P) | 8
9789 +---------------------------------------+
9790 | Alloca space (A) | 8+P
9791 +---------------------------------------+
9792 | Varargs save area (V) | 8+P+A
9793 +---------------------------------------+
9794 | Local variable space (L) | 8+P+A+V
9795 +---------------------------------------+
9796 | Float/int conversion temporary (X) | 8+P+A+V+L
9797 +---------------------------------------+
9798 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9799 +---------------------------------------+
9800 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9801 +---------------------------------------+
9802 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9803 +---------------------------------------+
9804 | SPE: area for 64-bit GP registers |
9805 +---------------------------------------+
9806 | SPE alignment padding |
9807 +---------------------------------------+
9808 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9809 +---------------------------------------+
9810 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9811 +---------------------------------------+
9812 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9813 +---------------------------------------+
9814 old SP->| back chain to caller's caller |
9815 +---------------------------------------+
9816
9817 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9818 given. (But note below and in sysv4.h that we require only 8 and
9819 may round up the size of our stack frame anyways. The historical
9820 reason is early versions of powerpc-linux which didn't properly
9821 align the stack at program startup. A happy side-effect is that
9822 -mno-eabi libraries can be used with -meabi programs.)
9823
9824 The EABI configuration defaults to the V.4 layout. However,
9825 the stack alignment requirements may differ. If -mno-eabi is not
9826 given, the required stack alignment is 8 bytes; if -mno-eabi is
9827 given, the required alignment is 16 bytes. (But see V.4 comment
9828 above.) */
9829
9830 #ifndef ABI_STACK_BOUNDARY
9831 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9832 #endif
9833
9834 rs6000_stack_t *
9835 rs6000_stack_info ()
9836 {
9837 static rs6000_stack_t info, zero_info;
9838 rs6000_stack_t *info_ptr = &info;
9839 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9840 int ehrd_size;
9841 int total_raw_size;
9842
9843 /* Zero all fields portably. */
9844 info = zero_info;
9845
9846 if (TARGET_SPE)
9847 {
9848 /* Cache value so we don't rescan instruction chain over and over. */
9849 if (cfun->machine->insn_chain_scanned_p == 0)
9850 {
9851 cfun->machine->insn_chain_scanned_p = 1;
9852 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
9853 }
9854 }
9855
9856 /* Select which calling sequence. */
9857 info_ptr->abi = DEFAULT_ABI;
9858
9859 /* Calculate which registers need to be saved & save area size. */
9860 info_ptr->first_gp_reg_save = first_reg_to_save ();
9861 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9862 even if it currently looks like we won't. */
9863 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
9864 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
9865 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
9866 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
9867 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
9868 else
9869 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
9870
9871 /* For the SPE, we have an additional upper 32-bits on each GPR.
9872 Ideally we should save the entire 64-bits only when the upper
9873 half is used in SIMD instructions. Since we only record
9874 registers live (not the size they are used in), this proves
9875 difficult because we'd have to traverse the instruction chain at
9876 the right time, taking reload into account. This is a real pain,
9877 so we opt to save the GPRs in 64-bits always if but one register
9878 gets used in 64-bits. Otherwise, all the registers in the frame
9879 get saved in 32-bits.
9880
9881 So... since when we save all GPRs (except the SP) in 64-bits, the
9882 traditional GP save area will be empty. */
9883 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9884 info_ptr->gp_size = 0;
9885
9886 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
9887 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
9888
9889 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
9890 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
9891 - info_ptr->first_altivec_reg_save);
9892
9893 /* Does this function call anything? */
9894 info_ptr->calls_p = (! current_function_is_leaf
9895 || cfun->machine->ra_needs_full_frame);
9896
9897 /* Determine if we need to save the link register. */
9898 if (rs6000_ra_ever_killed ()
9899 || (DEFAULT_ABI == ABI_AIX
9900 && current_function_profile
9901 && !TARGET_PROFILE_KERNEL)
9902 #ifdef TARGET_RELOCATABLE
9903 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
9904 #endif
9905 || (info_ptr->first_fp_reg_save != 64
9906 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
9907 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
9908 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
9909 || (DEFAULT_ABI == ABI_DARWIN
9910 && flag_pic
9911 && current_function_uses_pic_offset_table)
9912 || info_ptr->calls_p)
9913 {
9914 info_ptr->lr_save_p = 1;
9915 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
9916 }
9917
9918 /* Determine if we need to save the condition code registers. */
9919 if (regs_ever_live[CR2_REGNO]
9920 || regs_ever_live[CR3_REGNO]
9921 || regs_ever_live[CR4_REGNO])
9922 {
9923 info_ptr->cr_save_p = 1;
9924 if (DEFAULT_ABI == ABI_V4)
9925 info_ptr->cr_size = reg_size;
9926 }
9927
9928 /* If the current function calls __builtin_eh_return, then we need
9929 to allocate stack space for registers that will hold data for
9930 the exception handler. */
9931 if (current_function_calls_eh_return)
9932 {
9933 unsigned int i;
9934 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
9935 continue;
9936
9937 /* SPE saves EH registers in 64-bits. */
9938 ehrd_size = i * (TARGET_SPE_ABI
9939 && info_ptr->spe_64bit_regs_used != 0
9940 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
9941 }
9942 else
9943 ehrd_size = 0;
9944
9945 /* Determine various sizes. */
9946 info_ptr->reg_size = reg_size;
9947 info_ptr->fixed_size = RS6000_SAVE_AREA;
9948 info_ptr->varargs_size = RS6000_VARARGS_AREA;
9949 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
9950 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
9951 8);
9952
9953 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9954 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
9955 else
9956 info_ptr->spe_gp_size = 0;
9957
9958 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
9959 {
9960 info_ptr->vrsave_mask = compute_vrsave_mask ();
9961 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
9962 }
9963 else
9964 {
9965 info_ptr->vrsave_mask = 0;
9966 info_ptr->vrsave_size = 0;
9967 }
9968
9969 /* Calculate the offsets. */
9970 switch (DEFAULT_ABI)
9971 {
9972 case ABI_NONE:
9973 default:
9974 abort ();
9975
9976 case ABI_AIX:
9977 case ABI_DARWIN:
9978 info_ptr->fp_save_offset = - info_ptr->fp_size;
9979 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9980
9981 if (TARGET_ALTIVEC_ABI)
9982 {
9983 info_ptr->vrsave_save_offset
9984 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
9985
9986 /* Align stack so vector save area is on a quadword boundary. */
9987 if (info_ptr->altivec_size != 0)
9988 info_ptr->altivec_padding_size
9989 = 16 - (-info_ptr->vrsave_save_offset % 16);
9990 else
9991 info_ptr->altivec_padding_size = 0;
9992
9993 info_ptr->altivec_save_offset
9994 = info_ptr->vrsave_save_offset
9995 - info_ptr->altivec_padding_size
9996 - info_ptr->altivec_size;
9997
9998 /* Adjust for AltiVec case. */
9999 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
10000 }
10001 else
10002 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
10003 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
10004 info_ptr->lr_save_offset = 2*reg_size;
10005 break;
10006
10007 case ABI_V4:
10008 info_ptr->fp_save_offset = - info_ptr->fp_size;
10009 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10010 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
10011
10012 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10013 {
10014 /* Align stack so SPE GPR save area is aligned on a
10015 double-word boundary. */
10016 if (info_ptr->spe_gp_size != 0)
10017 info_ptr->spe_padding_size
10018 = 8 - (-info_ptr->cr_save_offset % 8);
10019 else
10020 info_ptr->spe_padding_size = 0;
10021
10022 info_ptr->spe_gp_save_offset
10023 = info_ptr->cr_save_offset
10024 - info_ptr->spe_padding_size
10025 - info_ptr->spe_gp_size;
10026
10027 /* Adjust for SPE case. */
10028 info_ptr->toc_save_offset
10029 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
10030 }
10031 else if (TARGET_ALTIVEC_ABI)
10032 {
10033 info_ptr->vrsave_save_offset
10034 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
10035
10036 /* Align stack so vector save area is on a quadword boundary. */
10037 if (info_ptr->altivec_size != 0)
10038 info_ptr->altivec_padding_size
10039 = 16 - (-info_ptr->vrsave_save_offset % 16);
10040 else
10041 info_ptr->altivec_padding_size = 0;
10042
10043 info_ptr->altivec_save_offset
10044 = info_ptr->vrsave_save_offset
10045 - info_ptr->altivec_padding_size
10046 - info_ptr->altivec_size;
10047
10048 /* Adjust for AltiVec case. */
10049 info_ptr->toc_save_offset
10050 = info_ptr->altivec_save_offset - info_ptr->toc_size;
10051 }
10052 else
10053 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
10054 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
10055 info_ptr->lr_save_offset = reg_size;
10056 break;
10057 }
10058
10059 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
10060 + info_ptr->gp_size
10061 + info_ptr->altivec_size
10062 + info_ptr->altivec_padding_size
10063 + info_ptr->spe_gp_size
10064 + info_ptr->spe_padding_size
10065 + ehrd_size
10066 + info_ptr->cr_size
10067 + info_ptr->lr_size
10068 + info_ptr->vrsave_size
10069 + info_ptr->toc_size,
10070 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
10071 ? 16 : 8);
10072
10073 total_raw_size = (info_ptr->vars_size
10074 + info_ptr->parm_size
10075 + info_ptr->save_size
10076 + info_ptr->varargs_size
10077 + info_ptr->fixed_size);
10078
10079 info_ptr->total_size =
10080 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
10081
10082 /* Determine if we need to allocate any stack frame:
10083
10084 For AIX we need to push the stack if a frame pointer is needed
10085 (because the stack might be dynamically adjusted), if we are
10086 debugging, if we make calls, or if the sum of fp_save, gp_save,
10087 and local variables are more than the space needed to save all
10088 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
10089 + 18*8 = 288 (GPR13 reserved).
10090
10091 For V.4 we don't have the stack cushion that AIX uses, but assume
10092 that the debugger can handle stackless frames. */
10093
10094 if (info_ptr->calls_p)
10095 info_ptr->push_p = 1;
10096
10097 else if (DEFAULT_ABI == ABI_V4)
10098 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
10099
10100 else if (frame_pointer_needed)
10101 info_ptr->push_p = 1;
10102
10103 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
10104 info_ptr->push_p = 1;
10105
10106 else
10107 info_ptr->push_p
10108 = total_raw_size - info_ptr->fixed_size > (TARGET_32BIT ? 220 : 288);
10109
10110 /* Zero offsets if we're not saving those registers. */
10111 if (info_ptr->fp_size == 0)
10112 info_ptr->fp_save_offset = 0;
10113
10114 if (info_ptr->gp_size == 0)
10115 info_ptr->gp_save_offset = 0;
10116
10117 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
10118 info_ptr->altivec_save_offset = 0;
10119
10120 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
10121 info_ptr->vrsave_save_offset = 0;
10122
10123 if (! TARGET_SPE_ABI
10124 || info_ptr->spe_64bit_regs_used == 0
10125 || info_ptr->spe_gp_size == 0)
10126 info_ptr->spe_gp_save_offset = 0;
10127
10128 if (! info_ptr->lr_save_p)
10129 info_ptr->lr_save_offset = 0;
10130
10131 if (! info_ptr->cr_save_p)
10132 info_ptr->cr_save_offset = 0;
10133
10134 if (! info_ptr->toc_save_p)
10135 info_ptr->toc_save_offset = 0;
10136
10137 return info_ptr;
10138 }
10139
10140 /* Return true if the current function uses any GPRs in 64-bit SIMD
10141 mode. */
10142
10143 static bool
10144 spe_func_has_64bit_regs_p ()
10145 {
10146 rtx insns, insn;
10147
10148 /* Functions that save and restore all the call-saved registers will
10149 need to save/restore the registers in 64-bits. */
10150 if (current_function_calls_eh_return
10151 || current_function_calls_setjmp
10152 || current_function_has_nonlocal_goto)
10153 return true;
10154
10155 insns = get_insns ();
10156
10157 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
10158 {
10159 if (INSN_P (insn))
10160 {
10161 rtx i;
10162
10163 i = PATTERN (insn);
10164 if (GET_CODE (i) == SET
10165 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
10166 return true;
10167 }
10168 }
10169
10170 return false;
10171 }
10172
10173 void
10174 debug_stack_info (info)
10175 rs6000_stack_t *info;
10176 {
10177 const char *abi_string;
10178
10179 if (! info)
10180 info = rs6000_stack_info ();
10181
10182 fprintf (stderr, "\nStack information for function %s:\n",
10183 ((current_function_decl && DECL_NAME (current_function_decl))
10184 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
10185 : "<unknown>"));
10186
10187 switch (info->abi)
10188 {
10189 default: abi_string = "Unknown"; break;
10190 case ABI_NONE: abi_string = "NONE"; break;
10191 case ABI_AIX: abi_string = "AIX"; break;
10192 case ABI_DARWIN: abi_string = "Darwin"; break;
10193 case ABI_V4: abi_string = "V.4"; break;
10194 }
10195
10196 fprintf (stderr, "\tABI = %5s\n", abi_string);
10197
10198 if (TARGET_ALTIVEC_ABI)
10199 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
10200
10201 if (TARGET_SPE_ABI)
10202 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
10203
10204 if (info->first_gp_reg_save != 32)
10205 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
10206
10207 if (info->first_fp_reg_save != 64)
10208 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
10209
10210 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
10211 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
10212 info->first_altivec_reg_save);
10213
10214 if (info->lr_save_p)
10215 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
10216
10217 if (info->cr_save_p)
10218 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
10219
10220 if (info->toc_save_p)
10221 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
10222
10223 if (info->vrsave_mask)
10224 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
10225
10226 if (info->push_p)
10227 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
10228
10229 if (info->calls_p)
10230 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
10231
10232 if (info->gp_save_offset)
10233 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
10234
10235 if (info->fp_save_offset)
10236 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
10237
10238 if (info->altivec_save_offset)
10239 fprintf (stderr, "\taltivec_save_offset = %5d\n",
10240 info->altivec_save_offset);
10241
10242 if (info->spe_gp_save_offset)
10243 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
10244 info->spe_gp_save_offset);
10245
10246 if (info->vrsave_save_offset)
10247 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
10248 info->vrsave_save_offset);
10249
10250 if (info->lr_save_offset)
10251 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
10252
10253 if (info->cr_save_offset)
10254 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
10255
10256 if (info->toc_save_offset)
10257 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
10258
10259 if (info->varargs_save_offset)
10260 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
10261
10262 if (info->total_size)
10263 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
10264
10265 if (info->varargs_size)
10266 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
10267
10268 if (info->vars_size)
10269 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
10270
10271 if (info->parm_size)
10272 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
10273
10274 if (info->fixed_size)
10275 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
10276
10277 if (info->gp_size)
10278 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
10279
10280 if (info->spe_gp_size)
10281 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
10282
10283 if (info->fp_size)
10284 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
10285
10286 if (info->altivec_size)
10287 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
10288
10289 if (info->vrsave_size)
10290 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
10291
10292 if (info->altivec_padding_size)
10293 fprintf (stderr, "\taltivec_padding_size= %5d\n",
10294 info->altivec_padding_size);
10295
10296 if (info->spe_padding_size)
10297 fprintf (stderr, "\tspe_padding_size = %5d\n",
10298 info->spe_padding_size);
10299
10300 if (info->lr_size)
10301 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
10302
10303 if (info->cr_size)
10304 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
10305
10306 if (info->toc_size)
10307 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
10308
10309 if (info->save_size)
10310 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
10311
10312 if (info->reg_size != 4)
10313 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
10314
10315 fprintf (stderr, "\n");
10316 }
10317
10318 rtx
10319 rs6000_return_addr (count, frame)
10320 int count;
10321 rtx frame;
10322 {
10323 /* Currently we don't optimize very well between prolog and body
10324 code and for PIC code the code can be actually quite bad, so
10325 don't try to be too clever here. */
10326 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
10327 {
10328 cfun->machine->ra_needs_full_frame = 1;
10329
10330 return
10331 gen_rtx_MEM
10332 (Pmode,
10333 memory_address
10334 (Pmode,
10335 plus_constant (copy_to_reg
10336 (gen_rtx_MEM (Pmode,
10337 memory_address (Pmode, frame))),
10338 RETURN_ADDRESS_OFFSET)));
10339 }
10340
10341 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
10342 }
10343
10344 /* Say whether a function is a candidate for sibcall handling or not.
10345 We do not allow indirect calls to be optimized into sibling calls.
10346 Also, we can't do it if there are any vector parameters; there's
10347 nowhere to put the VRsave code so it works; note that functions with
10348 vector parameters are required to have a prototype, so the argument
10349 type info must be available here. (The tail recursion case can work
10350 with vector parameters, but there's no way to distinguish here.) */
10351 static bool
10352 rs6000_function_ok_for_sibcall (decl, exp)
10353 tree decl;
10354 tree exp ATTRIBUTE_UNUSED;
10355 {
10356 tree type;
10357 if (decl)
10358 {
10359 if (TARGET_ALTIVEC_VRSAVE)
10360 {
10361 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
10362 type; type = TREE_CHAIN (type))
10363 {
10364 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
10365 return false;
10366 }
10367 }
10368 if (DEFAULT_ABI == ABI_DARWIN
10369 || (*targetm.binds_local_p) (decl))
10370 {
10371 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
10372
10373 if (!lookup_attribute ("longcall", attr_list)
10374 || lookup_attribute ("shortcall", attr_list))
10375 return true;
10376 }
10377 }
10378 return false;
10379 }
10380
10381 static int
10382 rs6000_ra_ever_killed ()
10383 {
10384 rtx top;
10385 rtx reg;
10386 rtx insn;
10387
10388 /* Irritatingly, there are two kinds of thunks -- those created with
10389 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
10390 through the regular part of the compiler. This is a very hacky
10391 way to tell them apart. */
10392 if (current_function_is_thunk && !no_new_pseudos)
10393 return 0;
10394
10395 /* regs_ever_live has LR marked as used if any sibcalls are present,
10396 but this should not force saving and restoring in the
10397 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
10398 clobbers LR, so that is inappropriate. */
10399
10400 /* Also, the prologue can generate a store into LR that
10401 doesn't really count, like this:
10402
10403 move LR->R0
10404 bcl to set PIC register
10405 move LR->R31
10406 move R0->LR
10407
10408 When we're called from the epilogue, we need to avoid counting
10409 this as a store. */
10410
10411 push_topmost_sequence ();
10412 top = get_insns ();
10413 pop_topmost_sequence ();
10414 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10415
10416 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
10417 {
10418 if (INSN_P (insn))
10419 {
10420 if (FIND_REG_INC_NOTE (insn, reg))
10421 return 1;
10422 else if (GET_CODE (insn) == CALL_INSN
10423 && !SIBLING_CALL_P (insn))
10424 return 1;
10425 else if (set_of (reg, insn) != NULL_RTX
10426 && !prologue_epilogue_contains (insn))
10427 return 1;
10428 }
10429 }
10430 return 0;
10431 }
10432 \f
10433 /* Add a REG_MAYBE_DEAD note to the insn. */
10434 static void
10435 rs6000_maybe_dead (insn)
10436 rtx insn;
10437 {
10438 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
10439 const0_rtx,
10440 REG_NOTES (insn));
10441 }
10442
10443 /* Emit instructions needed to load the TOC register.
10444 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
10445 a constant pool; or for SVR4 -fpic. */
10446
10447 void
10448 rs6000_emit_load_toc_table (fromprolog)
10449 int fromprolog;
10450 {
10451 rtx dest, insn;
10452 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
10453
10454 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10455 {
10456 rtx temp = (fromprolog
10457 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10458 : gen_reg_rtx (Pmode));
10459 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
10460 if (fromprolog)
10461 rs6000_maybe_dead (insn);
10462 insn = emit_move_insn (dest, temp);
10463 if (fromprolog)
10464 rs6000_maybe_dead (insn);
10465 }
10466 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
10467 {
10468 char buf[30];
10469 rtx tempLR = (fromprolog
10470 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10471 : gen_reg_rtx (Pmode));
10472 rtx temp0 = (fromprolog
10473 ? gen_rtx_REG (Pmode, 0)
10474 : gen_reg_rtx (Pmode));
10475 rtx symF;
10476
10477 /* possibly create the toc section */
10478 if (! toc_initialized)
10479 {
10480 toc_section ();
10481 function_section (current_function_decl);
10482 }
10483
10484 if (fromprolog)
10485 {
10486 rtx symL;
10487
10488 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
10489 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10490
10491 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
10492 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10493
10494 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
10495 symF)));
10496 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
10497 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
10498 symL,
10499 symF)));
10500 }
10501 else
10502 {
10503 rtx tocsym;
10504 static int reload_toc_labelno = 0;
10505
10506 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
10507
10508 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
10509 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10510
10511 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
10512 emit_move_insn (dest, tempLR);
10513 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
10514 }
10515 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
10516 if (fromprolog)
10517 rs6000_maybe_dead (insn);
10518 }
10519 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
10520 {
10521 /* This is for AIX code running in non-PIC ELF32. */
10522 char buf[30];
10523 rtx realsym;
10524 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
10525 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10526
10527 insn = emit_insn (gen_elf_high (dest, realsym));
10528 if (fromprolog)
10529 rs6000_maybe_dead (insn);
10530 insn = emit_insn (gen_elf_low (dest, dest, realsym));
10531 if (fromprolog)
10532 rs6000_maybe_dead (insn);
10533 }
10534 else if (DEFAULT_ABI == ABI_AIX)
10535 {
10536 if (TARGET_32BIT)
10537 insn = emit_insn (gen_load_toc_aix_si (dest));
10538 else
10539 insn = emit_insn (gen_load_toc_aix_di (dest));
10540 if (fromprolog)
10541 rs6000_maybe_dead (insn);
10542 }
10543 else
10544 abort ();
10545 }
10546
10547 int
10548 get_TOC_alias_set ()
10549 {
10550 static int set = -1;
10551 if (set == -1)
10552 set = new_alias_set ();
10553 return set;
10554 }
10555
10556 /* This retuns nonzero if the current function uses the TOC. This is
10557 determined by the presence of (unspec ... UNSPEC_TOC) or
10558 use (unspec ... UNSPEC_TOC), which are generated by the various
10559 load_toc_* patterns. */
10560
10561 int
10562 uses_TOC ()
10563 {
10564 rtx insn;
10565
10566 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10567 if (INSN_P (insn))
10568 {
10569 rtx pat = PATTERN (insn);
10570 int i;
10571
10572 if (GET_CODE (pat) == PARALLEL)
10573 for (i = 0; i < XVECLEN (pat, 0); i++)
10574 {
10575 rtx sub = XVECEXP (pat, 0, i);
10576 if (GET_CODE (sub) == USE)
10577 {
10578 sub = XEXP (sub, 0);
10579 if (GET_CODE (sub) == UNSPEC
10580 && XINT (sub, 1) == UNSPEC_TOC)
10581 return 1;
10582 }
10583 }
10584 }
10585 return 0;
10586 }
10587
10588 rtx
10589 create_TOC_reference (symbol)
10590 rtx symbol;
10591 {
10592 return gen_rtx_PLUS (Pmode,
10593 gen_rtx_REG (Pmode, TOC_REGISTER),
10594 gen_rtx_CONST (Pmode,
10595 gen_rtx_MINUS (Pmode, symbol,
10596 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
10597 }
10598
10599 /* __throw will restore its own return address to be the same as the
10600 return address of the function that the throw is being made to.
10601 This is unfortunate, because we want to check the original
10602 return address to see if we need to restore the TOC.
10603 So we have to squirrel it away here.
10604 This is used only in compiling __throw and __rethrow.
10605
10606 Most of this code should be removed by CSE. */
10607 static rtx insn_after_throw;
10608
10609 /* This does the saving... */
10610 void
10611 rs6000_aix_emit_builtin_unwind_init ()
10612 {
10613 rtx mem;
10614 rtx stack_top = gen_reg_rtx (Pmode);
10615 rtx opcode_addr = gen_reg_rtx (Pmode);
10616
10617 insn_after_throw = gen_reg_rtx (SImode);
10618
10619 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10620 emit_move_insn (stack_top, mem);
10621
10622 mem = gen_rtx_MEM (Pmode,
10623 gen_rtx_PLUS (Pmode, stack_top,
10624 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10625 emit_move_insn (opcode_addr, mem);
10626 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
10627 }
10628
10629 /* Emit insns to _restore_ the TOC register, at runtime (specifically
10630 in _eh.o). Only used on AIX.
10631
10632 The idea is that on AIX, function calls look like this:
10633 bl somefunction-trampoline
10634 lwz r2,20(sp)
10635
10636 and later,
10637 somefunction-trampoline:
10638 stw r2,20(sp)
10639 ... load function address in the count register ...
10640 bctr
10641 or like this, if the linker determines that this is not a cross-module call
10642 and so the TOC need not be restored:
10643 bl somefunction
10644 nop
10645 or like this, if the compiler could determine that this is not a
10646 cross-module call:
10647 bl somefunction
10648 now, the tricky bit here is that register 2 is saved and restored
10649 by the _linker_, so we can't readily generate debugging information
10650 for it. So we need to go back up the call chain looking at the
10651 insns at return addresses to see which calls saved the TOC register
10652 and so see where it gets restored from.
10653
10654 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
10655 just before the actual epilogue.
10656
10657 On the bright side, this incurs no space or time overhead unless an
10658 exception is thrown, except for the extra code in libgcc.a.
10659
10660 The parameter STACKSIZE is a register containing (at runtime)
10661 the amount to be popped off the stack in addition to the stack frame
10662 of this routine (which will be __throw or __rethrow, and so is
10663 guaranteed to have a stack frame). */
10664
10665 void
10666 rs6000_emit_eh_toc_restore (stacksize)
10667 rtx stacksize;
10668 {
10669 rtx top_of_stack;
10670 rtx bottom_of_stack = gen_reg_rtx (Pmode);
10671 rtx tocompare = gen_reg_rtx (SImode);
10672 rtx opcode = gen_reg_rtx (SImode);
10673 rtx opcode_addr = gen_reg_rtx (Pmode);
10674 rtx mem;
10675 rtx loop_start = gen_label_rtx ();
10676 rtx no_toc_restore_needed = gen_label_rtx ();
10677 rtx loop_exit = gen_label_rtx ();
10678
10679 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10680 set_mem_alias_set (mem, rs6000_sr_alias_set);
10681 emit_move_insn (bottom_of_stack, mem);
10682
10683 top_of_stack = expand_binop (Pmode, add_optab,
10684 bottom_of_stack, stacksize,
10685 NULL_RTX, 1, OPTAB_WIDEN);
10686
10687 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
10688 : 0xE8410028, SImode));
10689
10690 if (insn_after_throw == NULL_RTX)
10691 abort ();
10692 emit_move_insn (opcode, insn_after_throw);
10693
10694 emit_note (NULL, NOTE_INSN_LOOP_BEG);
10695 emit_label (loop_start);
10696
10697 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
10698 SImode, NULL_RTX, NULL_RTX,
10699 no_toc_restore_needed);
10700
10701 mem = gen_rtx_MEM (Pmode,
10702 gen_rtx_PLUS (Pmode, bottom_of_stack,
10703 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
10704 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
10705
10706 emit_label (no_toc_restore_needed);
10707 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
10708 Pmode, NULL_RTX, NULL_RTX,
10709 loop_exit);
10710
10711 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
10712 set_mem_alias_set (mem, rs6000_sr_alias_set);
10713 emit_move_insn (bottom_of_stack, mem);
10714
10715 mem = gen_rtx_MEM (Pmode,
10716 gen_rtx_PLUS (Pmode, bottom_of_stack,
10717 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10718 emit_move_insn (opcode_addr, mem);
10719 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
10720
10721 emit_note (NULL, NOTE_INSN_LOOP_CONT);
10722 emit_jump (loop_start);
10723 emit_note (NULL, NOTE_INSN_LOOP_END);
10724 emit_label (loop_exit);
10725 }
10726 \f
10727 /* This ties together stack memory (MEM with an alias set of
10728 rs6000_sr_alias_set) and the change to the stack pointer. */
10729
10730 static void
10731 rs6000_emit_stack_tie ()
10732 {
10733 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
10734
10735 set_mem_alias_set (mem, rs6000_sr_alias_set);
10736 emit_insn (gen_stack_tie (mem));
10737 }
10738
10739 /* Emit the correct code for allocating stack space, as insns.
10740 If COPY_R12, make sure a copy of the old frame is left in r12.
10741 The generated code may use hard register 0 as a temporary. */
10742
10743 static void
10744 rs6000_emit_allocate_stack (size, copy_r12)
10745 HOST_WIDE_INT size;
10746 int copy_r12;
10747 {
10748 rtx insn;
10749 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10750 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
10751 rtx todec = GEN_INT (-size);
10752
10753 if (current_function_limit_stack)
10754 {
10755 if (REG_P (stack_limit_rtx)
10756 && REGNO (stack_limit_rtx) > 1
10757 && REGNO (stack_limit_rtx) <= 31)
10758 {
10759 emit_insn (TARGET_32BIT
10760 ? gen_addsi3 (tmp_reg,
10761 stack_limit_rtx,
10762 GEN_INT (size))
10763 : gen_adddi3 (tmp_reg,
10764 stack_limit_rtx,
10765 GEN_INT (size)));
10766
10767 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10768 const0_rtx));
10769 }
10770 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
10771 && TARGET_32BIT
10772 && DEFAULT_ABI == ABI_V4)
10773 {
10774 rtx toload = gen_rtx_CONST (VOIDmode,
10775 gen_rtx_PLUS (Pmode,
10776 stack_limit_rtx,
10777 GEN_INT (size)));
10778
10779 emit_insn (gen_elf_high (tmp_reg, toload));
10780 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
10781 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10782 const0_rtx));
10783 }
10784 else
10785 warning ("stack limit expression is not supported");
10786 }
10787
10788 if (copy_r12 || ! TARGET_UPDATE)
10789 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
10790
10791 if (TARGET_UPDATE)
10792 {
10793 if (size > 32767)
10794 {
10795 /* Need a note here so that try_split doesn't get confused. */
10796 if (get_last_insn() == NULL_RTX)
10797 emit_note (0, NOTE_INSN_DELETED);
10798 insn = emit_move_insn (tmp_reg, todec);
10799 try_split (PATTERN (insn), insn, 0);
10800 todec = tmp_reg;
10801 }
10802
10803 insn = emit_insn (TARGET_32BIT
10804 ? gen_movsi_update (stack_reg, stack_reg,
10805 todec, stack_reg)
10806 : gen_movdi_update (stack_reg, stack_reg,
10807 todec, stack_reg));
10808 }
10809 else
10810 {
10811 insn = emit_insn (TARGET_32BIT
10812 ? gen_addsi3 (stack_reg, stack_reg, todec)
10813 : gen_adddi3 (stack_reg, stack_reg, todec));
10814 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
10815 gen_rtx_REG (Pmode, 12));
10816 }
10817
10818 RTX_FRAME_RELATED_P (insn) = 1;
10819 REG_NOTES (insn) =
10820 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10821 gen_rtx_SET (VOIDmode, stack_reg,
10822 gen_rtx_PLUS (Pmode, stack_reg,
10823 GEN_INT (-size))),
10824 REG_NOTES (insn));
10825 }
10826
10827 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10828 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10829 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10830 deduce these equivalences by itself so it wasn't necessary to hold
10831 its hand so much. */
10832
10833 static void
10834 rs6000_frame_related (insn, reg, val, reg2, rreg)
10835 rtx insn;
10836 rtx reg;
10837 HOST_WIDE_INT val;
10838 rtx reg2;
10839 rtx rreg;
10840 {
10841 rtx real, temp;
10842
10843 /* copy_rtx will not make unique copies of registers, so we need to
10844 ensure we don't have unwanted sharing here. */
10845 if (reg == reg2)
10846 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10847
10848 if (reg == rreg)
10849 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10850
10851 real = copy_rtx (PATTERN (insn));
10852
10853 if (reg2 != NULL_RTX)
10854 real = replace_rtx (real, reg2, rreg);
10855
10856 real = replace_rtx (real, reg,
10857 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
10858 STACK_POINTER_REGNUM),
10859 GEN_INT (val)));
10860
10861 /* We expect that 'real' is either a SET or a PARALLEL containing
10862 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10863 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10864
10865 if (GET_CODE (real) == SET)
10866 {
10867 rtx set = real;
10868
10869 temp = simplify_rtx (SET_SRC (set));
10870 if (temp)
10871 SET_SRC (set) = temp;
10872 temp = simplify_rtx (SET_DEST (set));
10873 if (temp)
10874 SET_DEST (set) = temp;
10875 if (GET_CODE (SET_DEST (set)) == MEM)
10876 {
10877 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10878 if (temp)
10879 XEXP (SET_DEST (set), 0) = temp;
10880 }
10881 }
10882 else if (GET_CODE (real) == PARALLEL)
10883 {
10884 int i;
10885 for (i = 0; i < XVECLEN (real, 0); i++)
10886 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
10887 {
10888 rtx set = XVECEXP (real, 0, i);
10889
10890 temp = simplify_rtx (SET_SRC (set));
10891 if (temp)
10892 SET_SRC (set) = temp;
10893 temp = simplify_rtx (SET_DEST (set));
10894 if (temp)
10895 SET_DEST (set) = temp;
10896 if (GET_CODE (SET_DEST (set)) == MEM)
10897 {
10898 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10899 if (temp)
10900 XEXP (SET_DEST (set), 0) = temp;
10901 }
10902 RTX_FRAME_RELATED_P (set) = 1;
10903 }
10904 }
10905 else
10906 abort ();
10907
10908 if (TARGET_SPE)
10909 real = spe_synthesize_frame_save (real);
10910
10911 RTX_FRAME_RELATED_P (insn) = 1;
10912 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10913 real,
10914 REG_NOTES (insn));
10915 }
10916
10917 /* Given an SPE frame note, return a PARALLEL of SETs with the
10918 original note, plus a synthetic register save. */
10919
10920 static rtx
10921 spe_synthesize_frame_save (real)
10922 rtx real;
10923 {
10924 rtx synth, offset, reg, real2;
10925
10926 if (GET_CODE (real) != SET
10927 || GET_MODE (SET_SRC (real)) != V2SImode)
10928 return real;
10929
10930 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
10931 frame related note. The parallel contains a set of the register
10932 being saved, and another set to a synthetic register (n+1200).
10933 This is so we can differentiate between 64-bit and 32-bit saves.
10934 Words cannot describe this nastiness. */
10935
10936 if (GET_CODE (SET_DEST (real)) != MEM
10937 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
10938 || GET_CODE (SET_SRC (real)) != REG)
10939 abort ();
10940
10941 /* Transform:
10942 (set (mem (plus (reg x) (const y)))
10943 (reg z))
10944 into:
10945 (set (mem (plus (reg x) (const y+4)))
10946 (reg z+1200))
10947 */
10948
10949 real2 = copy_rtx (real);
10950 PUT_MODE (SET_DEST (real2), SImode);
10951 reg = SET_SRC (real2);
10952 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
10953 synth = copy_rtx (real2);
10954
10955 if (BYTES_BIG_ENDIAN)
10956 {
10957 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
10958 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
10959 }
10960
10961 reg = SET_SRC (synth);
10962
10963 synth = replace_rtx (synth, reg,
10964 gen_rtx_REG (SImode, REGNO (reg) + 1200));
10965
10966 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
10967 synth = replace_rtx (synth, offset,
10968 GEN_INT (INTVAL (offset)
10969 + (BYTES_BIG_ENDIAN ? 0 : 4)));
10970
10971 RTX_FRAME_RELATED_P (synth) = 1;
10972 RTX_FRAME_RELATED_P (real2) = 1;
10973 if (BYTES_BIG_ENDIAN)
10974 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
10975 else
10976 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
10977
10978 return real;
10979 }
10980
10981 /* Returns an insn that has a vrsave set operation with the
10982 appropriate CLOBBERs. */
10983
10984 static rtx
10985 generate_set_vrsave (reg, info, epiloguep)
10986 rtx reg;
10987 rs6000_stack_t *info;
10988 int epiloguep;
10989 {
10990 int nclobs, i;
10991 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
10992 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10993
10994 clobs[0]
10995 = gen_rtx_SET (VOIDmode,
10996 vrsave,
10997 gen_rtx_UNSPEC_VOLATILE (SImode,
10998 gen_rtvec (2, reg, vrsave),
10999 30));
11000
11001 nclobs = 1;
11002
11003 /* We need to clobber the registers in the mask so the scheduler
11004 does not move sets to VRSAVE before sets of AltiVec registers.
11005
11006 However, if the function receives nonlocal gotos, reload will set
11007 all call saved registers live. We will end up with:
11008
11009 (set (reg 999) (mem))
11010 (parallel [ (set (reg vrsave) (unspec blah))
11011 (clobber (reg 999))])
11012
11013 The clobber will cause the store into reg 999 to be dead, and
11014 flow will attempt to delete an epilogue insn. In this case, we
11015 need an unspec use/set of the register. */
11016
11017 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
11018 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
11019 {
11020 if (!epiloguep || call_used_regs [i])
11021 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
11022 gen_rtx_REG (V4SImode, i));
11023 else
11024 {
11025 rtx reg = gen_rtx_REG (V4SImode, i);
11026
11027 clobs[nclobs++]
11028 = gen_rtx_SET (VOIDmode,
11029 reg,
11030 gen_rtx_UNSPEC (V4SImode,
11031 gen_rtvec (1, reg), 27));
11032 }
11033 }
11034
11035 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
11036
11037 for (i = 0; i < nclobs; ++i)
11038 XVECEXP (insn, 0, i) = clobs[i];
11039
11040 return insn;
11041 }
11042
11043 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
11044 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
11045
11046 static void
11047 emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
11048 rtx frame_reg;
11049 rtx frame_ptr;
11050 enum machine_mode mode;
11051 unsigned int regno;
11052 int offset;
11053 int total_size;
11054 {
11055 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
11056 rtx replacea, replaceb;
11057
11058 int_rtx = GEN_INT (offset);
11059
11060 /* Some cases that need register indexed addressing. */
11061 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
11062 || (TARGET_SPE_ABI
11063 && SPE_VECTOR_MODE (mode)
11064 && !SPE_CONST_OFFSET_OK (offset)))
11065 {
11066 /* Whomever calls us must make sure r11 is available in the
11067 flow path of instructions in the prologue. */
11068 offset_rtx = gen_rtx_REG (Pmode, 11);
11069 emit_move_insn (offset_rtx, int_rtx);
11070
11071 replacea = offset_rtx;
11072 replaceb = int_rtx;
11073 }
11074 else
11075 {
11076 offset_rtx = int_rtx;
11077 replacea = NULL_RTX;
11078 replaceb = NULL_RTX;
11079 }
11080
11081 reg = gen_rtx_REG (mode, regno);
11082 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
11083 mem = gen_rtx_MEM (mode, addr);
11084 set_mem_alias_set (mem, rs6000_sr_alias_set);
11085
11086 insn = emit_move_insn (mem, reg);
11087
11088 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
11089 }
11090
11091 /* Emit an offset memory reference suitable for a frame store, while
11092 converting to a valid addressing mode. */
11093
11094 static rtx
11095 gen_frame_mem_offset (mode, reg, offset)
11096 enum machine_mode mode;
11097 rtx reg;
11098 int offset;
11099 {
11100 rtx int_rtx, offset_rtx;
11101
11102 int_rtx = GEN_INT (offset);
11103
11104 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
11105 {
11106 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11107 emit_move_insn (offset_rtx, int_rtx);
11108 }
11109 else
11110 offset_rtx = int_rtx;
11111
11112 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
11113 }
11114
11115 /* Emit function prologue as insns. */
11116
11117 void
11118 rs6000_emit_prologue ()
11119 {
11120 rs6000_stack_t *info = rs6000_stack_info ();
11121 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
11122 int reg_size = TARGET_POWERPC64 ? 8 : 4;
11123 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11124 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
11125 rtx frame_reg_rtx = sp_reg_rtx;
11126 rtx cr_save_rtx = NULL;
11127 rtx insn;
11128 int saving_FPRs_inline;
11129 int using_store_multiple;
11130 HOST_WIDE_INT sp_offset = 0;
11131
11132 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11133 {
11134 reg_mode = V2SImode;
11135 reg_size = 8;
11136 }
11137
11138 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11139 && (!TARGET_SPE_ABI
11140 || info->spe_64bit_regs_used == 0)
11141 && info->first_gp_reg_save < 31);
11142 saving_FPRs_inline = (info->first_fp_reg_save == 64
11143 || FP_SAVE_INLINE (info->first_fp_reg_save));
11144
11145 /* For V.4, update stack before we do any saving and set back pointer. */
11146 if (info->push_p && DEFAULT_ABI == ABI_V4)
11147 {
11148 if (info->total_size < 32767)
11149 sp_offset = info->total_size;
11150 else
11151 frame_reg_rtx = frame_ptr_rtx;
11152 rs6000_emit_allocate_stack (info->total_size,
11153 (frame_reg_rtx != sp_reg_rtx
11154 && (info->cr_save_p
11155 || info->lr_save_p
11156 || info->first_fp_reg_save < 64
11157 || info->first_gp_reg_save < 32
11158 )));
11159 if (frame_reg_rtx != sp_reg_rtx)
11160 rs6000_emit_stack_tie ();
11161 }
11162
11163 /* Save AltiVec registers if needed. */
11164 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11165 {
11166 int i;
11167
11168 /* There should be a non inline version of this, for when we
11169 are saving lots of vector registers. */
11170 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11171 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11172 {
11173 rtx areg, savereg, mem;
11174 int offset;
11175
11176 offset = info->altivec_save_offset + sp_offset
11177 + 16 * (i - info->first_altivec_reg_save);
11178
11179 savereg = gen_rtx_REG (V4SImode, i);
11180
11181 areg = gen_rtx_REG (Pmode, 0);
11182 emit_move_insn (areg, GEN_INT (offset));
11183
11184 /* AltiVec addressing mode is [reg+reg]. */
11185 mem = gen_rtx_MEM (V4SImode,
11186 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
11187
11188 set_mem_alias_set (mem, rs6000_sr_alias_set);
11189
11190 insn = emit_move_insn (mem, savereg);
11191
11192 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11193 areg, GEN_INT (offset));
11194 }
11195 }
11196
11197 /* VRSAVE is a bit vector representing which AltiVec registers
11198 are used. The OS uses this to determine which vector
11199 registers to save on a context switch. We need to save
11200 VRSAVE on the stack frame, add whatever AltiVec registers we
11201 used in this function, and do the corresponding magic in the
11202 epilogue. */
11203
11204 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
11205 {
11206 rtx reg, mem, vrsave;
11207 int offset;
11208
11209 /* Get VRSAVE onto a GPR. */
11210 reg = gen_rtx_REG (SImode, 12);
11211 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11212 if (TARGET_MACHO)
11213 emit_insn (gen_get_vrsave_internal (reg));
11214 else
11215 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
11216
11217 /* Save VRSAVE. */
11218 offset = info->vrsave_save_offset + sp_offset;
11219 mem
11220 = gen_rtx_MEM (SImode,
11221 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
11222 set_mem_alias_set (mem, rs6000_sr_alias_set);
11223 insn = emit_move_insn (mem, reg);
11224
11225 /* Include the registers in the mask. */
11226 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
11227
11228 insn = emit_insn (generate_set_vrsave (reg, info, 0));
11229 }
11230
11231 /* If we use the link register, get it into r0. */
11232 if (info->lr_save_p)
11233 emit_move_insn (gen_rtx_REG (Pmode, 0),
11234 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11235
11236 /* If we need to save CR, put it into r12. */
11237 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
11238 {
11239 cr_save_rtx = gen_rtx_REG (SImode, 12);
11240 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11241 }
11242
11243 /* Do any required saving of fpr's. If only one or two to save, do
11244 it ourselves. Otherwise, call function. */
11245 if (saving_FPRs_inline)
11246 {
11247 int i;
11248 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11249 if ((regs_ever_live[info->first_fp_reg_save+i]
11250 && ! call_used_regs[info->first_fp_reg_save+i]))
11251 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
11252 info->first_fp_reg_save + i,
11253 info->fp_save_offset + sp_offset + 8 * i,
11254 info->total_size);
11255 }
11256 else if (info->first_fp_reg_save != 64)
11257 {
11258 int i;
11259 char rname[30];
11260 const char *alloc_rname;
11261 rtvec p;
11262 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
11263
11264 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
11265 gen_rtx_REG (Pmode,
11266 LINK_REGISTER_REGNUM));
11267 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
11268 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
11269 alloc_rname = ggc_strdup (rname);
11270 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11271 gen_rtx_SYMBOL_REF (Pmode,
11272 alloc_rname));
11273 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11274 {
11275 rtx addr, reg, mem;
11276 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
11277 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11278 GEN_INT (info->fp_save_offset
11279 + sp_offset + 8*i));
11280 mem = gen_rtx_MEM (DFmode, addr);
11281 set_mem_alias_set (mem, rs6000_sr_alias_set);
11282
11283 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
11284 }
11285 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11286 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11287 NULL_RTX, NULL_RTX);
11288 }
11289
11290 /* Save GPRs. This is done as a PARALLEL if we are using
11291 the store-multiple instructions. */
11292 if (using_store_multiple)
11293 {
11294 rtvec p;
11295 int i;
11296 p = rtvec_alloc (32 - info->first_gp_reg_save);
11297 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11298 {
11299 rtx addr, reg, mem;
11300 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11301 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11302 GEN_INT (info->gp_save_offset
11303 + sp_offset
11304 + reg_size * i));
11305 mem = gen_rtx_MEM (reg_mode, addr);
11306 set_mem_alias_set (mem, rs6000_sr_alias_set);
11307
11308 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
11309 }
11310 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11311 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11312 NULL_RTX, NULL_RTX);
11313 }
11314 else
11315 {
11316 int i;
11317 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11318 if ((regs_ever_live[info->first_gp_reg_save+i]
11319 && ! call_used_regs[info->first_gp_reg_save+i])
11320 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11321 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11322 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11323 {
11324 rtx addr, reg, mem;
11325 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11326
11327 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11328 {
11329 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11330 rtx b;
11331
11332 if (!SPE_CONST_OFFSET_OK (offset))
11333 {
11334 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11335 emit_move_insn (b, GEN_INT (offset));
11336 }
11337 else
11338 b = GEN_INT (offset);
11339
11340 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11341 mem = gen_rtx_MEM (V2SImode, addr);
11342 set_mem_alias_set (mem, rs6000_sr_alias_set);
11343 insn = emit_move_insn (mem, reg);
11344
11345 if (GET_CODE (b) == CONST_INT)
11346 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11347 NULL_RTX, NULL_RTX);
11348 else
11349 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11350 b, GEN_INT (offset));
11351 }
11352 else
11353 {
11354 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11355 GEN_INT (info->gp_save_offset
11356 + sp_offset
11357 + reg_size * i));
11358 mem = gen_rtx_MEM (reg_mode, addr);
11359 set_mem_alias_set (mem, rs6000_sr_alias_set);
11360
11361 insn = emit_move_insn (mem, reg);
11362 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11363 NULL_RTX, NULL_RTX);
11364 }
11365 }
11366 }
11367
11368 /* ??? There's no need to emit actual instructions here, but it's the
11369 easiest way to get the frame unwind information emitted. */
11370 if (current_function_calls_eh_return)
11371 {
11372 unsigned int i, regno;
11373
11374 for (i = 0; ; ++i)
11375 {
11376 regno = EH_RETURN_DATA_REGNO (i);
11377 if (regno == INVALID_REGNUM)
11378 break;
11379
11380 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
11381 info->ehrd_offset + sp_offset
11382 + reg_size * (int) i,
11383 info->total_size);
11384 }
11385 }
11386
11387 /* Save lr if we used it. */
11388 if (info->lr_save_p)
11389 {
11390 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11391 GEN_INT (info->lr_save_offset + sp_offset));
11392 rtx reg = gen_rtx_REG (Pmode, 0);
11393 rtx mem = gen_rtx_MEM (Pmode, addr);
11394 /* This should not be of rs6000_sr_alias_set, because of
11395 __builtin_return_address. */
11396
11397 insn = emit_move_insn (mem, reg);
11398 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11399 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11400 }
11401
11402 /* Save CR if we use any that must be preserved. */
11403 if (info->cr_save_p)
11404 {
11405 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11406 GEN_INT (info->cr_save_offset + sp_offset));
11407 rtx mem = gen_rtx_MEM (SImode, addr);
11408
11409 set_mem_alias_set (mem, rs6000_sr_alias_set);
11410
11411 /* If r12 was used to hold the original sp, copy cr into r0 now
11412 that it's free. */
11413 if (REGNO (frame_reg_rtx) == 12)
11414 {
11415 cr_save_rtx = gen_rtx_REG (SImode, 0);
11416 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11417 }
11418 insn = emit_move_insn (mem, cr_save_rtx);
11419
11420 /* Now, there's no way that dwarf2out_frame_debug_expr is going
11421 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
11422 But that's OK. All we have to do is specify that _one_ condition
11423 code register is saved in this stack slot. The thrower's epilogue
11424 will then restore all the call-saved registers.
11425 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
11426 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11427 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
11428 }
11429
11430 /* Update stack and set back pointer unless this is V.4,
11431 for which it was done previously. */
11432 if (info->push_p && DEFAULT_ABI != ABI_V4)
11433 rs6000_emit_allocate_stack (info->total_size, FALSE);
11434
11435 /* Set frame pointer, if needed. */
11436 if (frame_pointer_needed)
11437 {
11438 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
11439 sp_reg_rtx);
11440 RTX_FRAME_RELATED_P (insn) = 1;
11441 }
11442
11443 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
11444 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
11445 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
11446 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
11447 {
11448 /* If emit_load_toc_table will use the link register, we need to save
11449 it. We use R12 for this purpose because emit_load_toc_table
11450 can use register 0. This allows us to use a plain 'blr' to return
11451 from the procedure more often. */
11452 int save_LR_around_toc_setup = (TARGET_ELF
11453 && DEFAULT_ABI != ABI_AIX
11454 && flag_pic
11455 && ! info->lr_save_p
11456 && EXIT_BLOCK_PTR->pred != NULL);
11457 if (save_LR_around_toc_setup)
11458 {
11459 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11460 rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx, lr));
11461 rs6000_emit_load_toc_table (TRUE);
11462 rs6000_maybe_dead (emit_move_insn (lr, frame_ptr_rtx));
11463 }
11464 else
11465 rs6000_emit_load_toc_table (TRUE);
11466 }
11467
11468 #if TARGET_MACHO
11469 if (DEFAULT_ABI == ABI_DARWIN
11470 && flag_pic && current_function_uses_pic_offset_table)
11471 {
11472 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11473 const char *picbase = machopic_function_base_name ();
11474 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
11475
11476 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
11477
11478 rs6000_maybe_dead (
11479 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
11480 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
11481 }
11482 #endif
11483 }
11484
11485 /* Write function prologue. */
11486
11487 static void
11488 rs6000_output_function_prologue (file, size)
11489 FILE *file;
11490 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11491 {
11492 rs6000_stack_t *info = rs6000_stack_info ();
11493
11494 if (TARGET_DEBUG_STACK)
11495 debug_stack_info (info);
11496
11497 /* Write .extern for any function we will call to save and restore
11498 fp values. */
11499 if (info->first_fp_reg_save < 64
11500 && !FP_SAVE_INLINE (info->first_fp_reg_save))
11501 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
11502 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
11503 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
11504 RESTORE_FP_SUFFIX);
11505
11506 /* Write .extern for AIX common mode routines, if needed. */
11507 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
11508 {
11509 fputs ("\t.extern __mulh\n", file);
11510 fputs ("\t.extern __mull\n", file);
11511 fputs ("\t.extern __divss\n", file);
11512 fputs ("\t.extern __divus\n", file);
11513 fputs ("\t.extern __quoss\n", file);
11514 fputs ("\t.extern __quous\n", file);
11515 common_mode_defined = 1;
11516 }
11517
11518 if (! HAVE_prologue)
11519 {
11520 start_sequence ();
11521
11522 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
11523 the "toplevel" insn chain. */
11524 emit_note (0, NOTE_INSN_DELETED);
11525 rs6000_emit_prologue ();
11526 emit_note (0, NOTE_INSN_DELETED);
11527
11528 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11529 {
11530 rtx insn;
11531 unsigned addr = 0;
11532 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11533 {
11534 INSN_ADDRESSES_NEW (insn, addr);
11535 addr += 4;
11536 }
11537 }
11538
11539 if (TARGET_DEBUG_STACK)
11540 debug_rtx_list (get_insns (), 100);
11541 final (get_insns (), file, FALSE, FALSE);
11542 end_sequence ();
11543 }
11544
11545 rs6000_pic_labelno++;
11546 }
11547
11548 /* Emit function epilogue as insns.
11549
11550 At present, dwarf2out_frame_debug_expr doesn't understand
11551 register restores, so we don't bother setting RTX_FRAME_RELATED_P
11552 anywhere in the epilogue. Most of the insns below would in any case
11553 need special notes to explain where r11 is in relation to the stack. */
11554
11555 void
11556 rs6000_emit_epilogue (sibcall)
11557 int sibcall;
11558 {
11559 rs6000_stack_t *info;
11560 int restoring_FPRs_inline;
11561 int using_load_multiple;
11562 int using_mfcr_multiple;
11563 int use_backchain_to_restore_sp;
11564 int sp_offset = 0;
11565 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
11566 rtx frame_reg_rtx = sp_reg_rtx;
11567 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
11568 int reg_size = TARGET_POWERPC64 ? 8 : 4;
11569 int i;
11570
11571 info = rs6000_stack_info ();
11572
11573 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11574 {
11575 reg_mode = V2SImode;
11576 reg_size = 8;
11577 }
11578
11579 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11580 && (!TARGET_SPE_ABI
11581 || info->spe_64bit_regs_used == 0)
11582 && info->first_gp_reg_save < 31);
11583 restoring_FPRs_inline = (sibcall
11584 || current_function_calls_eh_return
11585 || info->first_fp_reg_save == 64
11586 || FP_SAVE_INLINE (info->first_fp_reg_save));
11587 use_backchain_to_restore_sp = (frame_pointer_needed
11588 || current_function_calls_alloca
11589 || info->total_size > 32767);
11590 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
11591 || rs6000_cpu == PROCESSOR_PPC603
11592 || rs6000_cpu == PROCESSOR_PPC750
11593 || optimize_size);
11594
11595 /* If we have a frame pointer, a call to alloca, or a large stack
11596 frame, restore the old stack pointer using the backchain. Otherwise,
11597 we know what size to update it with. */
11598 if (use_backchain_to_restore_sp)
11599 {
11600 /* Under V.4, don't reset the stack pointer until after we're done
11601 loading the saved registers. */
11602 if (DEFAULT_ABI == ABI_V4)
11603 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
11604
11605 emit_move_insn (frame_reg_rtx,
11606 gen_rtx_MEM (Pmode, sp_reg_rtx));
11607
11608 }
11609 else if (info->push_p)
11610 {
11611 if (DEFAULT_ABI == ABI_V4)
11612 sp_offset = info->total_size;
11613 else
11614 {
11615 emit_insn (TARGET_32BIT
11616 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11617 GEN_INT (info->total_size))
11618 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11619 GEN_INT (info->total_size)));
11620 }
11621 }
11622
11623 /* Restore AltiVec registers if needed. */
11624 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11625 {
11626 int i;
11627
11628 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11629 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11630 {
11631 rtx addr, areg, mem;
11632
11633 areg = gen_rtx_REG (Pmode, 0);
11634 emit_move_insn
11635 (areg, GEN_INT (info->altivec_save_offset
11636 + sp_offset
11637 + 16 * (i - info->first_altivec_reg_save)));
11638
11639 /* AltiVec addressing mode is [reg+reg]. */
11640 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
11641 mem = gen_rtx_MEM (V4SImode, addr);
11642 set_mem_alias_set (mem, rs6000_sr_alias_set);
11643
11644 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
11645 }
11646 }
11647
11648 /* Restore VRSAVE if needed. */
11649 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
11650 {
11651 rtx addr, mem, reg;
11652
11653 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11654 GEN_INT (info->vrsave_save_offset + sp_offset));
11655 mem = gen_rtx_MEM (SImode, addr);
11656 set_mem_alias_set (mem, rs6000_sr_alias_set);
11657 reg = gen_rtx_REG (SImode, 12);
11658 emit_move_insn (reg, mem);
11659
11660 emit_insn (generate_set_vrsave (reg, info, 1));
11661 }
11662
11663 /* Get the old lr if we saved it. */
11664 if (info->lr_save_p)
11665 {
11666 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
11667 info->lr_save_offset + sp_offset);
11668
11669 set_mem_alias_set (mem, rs6000_sr_alias_set);
11670
11671 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
11672 }
11673
11674 /* Get the old cr if we saved it. */
11675 if (info->cr_save_p)
11676 {
11677 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11678 GEN_INT (info->cr_save_offset + sp_offset));
11679 rtx mem = gen_rtx_MEM (SImode, addr);
11680
11681 set_mem_alias_set (mem, rs6000_sr_alias_set);
11682
11683 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
11684 }
11685
11686 /* Set LR here to try to overlap restores below. */
11687 if (info->lr_save_p)
11688 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
11689 gen_rtx_REG (Pmode, 0));
11690
11691 /* Load exception handler data registers, if needed. */
11692 if (current_function_calls_eh_return)
11693 {
11694 unsigned int i, regno;
11695
11696 for (i = 0; ; ++i)
11697 {
11698 rtx mem;
11699
11700 regno = EH_RETURN_DATA_REGNO (i);
11701 if (regno == INVALID_REGNUM)
11702 break;
11703
11704 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
11705 info->ehrd_offset + sp_offset
11706 + reg_size * (int) i);
11707 set_mem_alias_set (mem, rs6000_sr_alias_set);
11708
11709 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
11710 }
11711 }
11712
11713 /* Restore GPRs. This is done as a PARALLEL if we are using
11714 the load-multiple instructions. */
11715 if (using_load_multiple)
11716 {
11717 rtvec p;
11718 p = rtvec_alloc (32 - info->first_gp_reg_save);
11719 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11720 {
11721 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11722 GEN_INT (info->gp_save_offset
11723 + sp_offset
11724 + reg_size * i));
11725 rtx mem = gen_rtx_MEM (reg_mode, addr);
11726
11727 set_mem_alias_set (mem, rs6000_sr_alias_set);
11728
11729 RTVEC_ELT (p, i) =
11730 gen_rtx_SET (VOIDmode,
11731 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
11732 mem);
11733 }
11734 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11735 }
11736 else
11737 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11738 if ((regs_ever_live[info->first_gp_reg_save+i]
11739 && ! call_used_regs[info->first_gp_reg_save+i])
11740 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11741 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11742 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11743 {
11744 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11745 GEN_INT (info->gp_save_offset
11746 + sp_offset
11747 + reg_size * i));
11748 rtx mem = gen_rtx_MEM (reg_mode, addr);
11749
11750 /* Restore 64-bit quantities for SPE. */
11751 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11752 {
11753 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11754 rtx b;
11755
11756 if (!SPE_CONST_OFFSET_OK (offset))
11757 {
11758 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11759 emit_move_insn (b, GEN_INT (offset));
11760 }
11761 else
11762 b = GEN_INT (offset);
11763
11764 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11765 mem = gen_rtx_MEM (V2SImode, addr);
11766 }
11767
11768 set_mem_alias_set (mem, rs6000_sr_alias_set);
11769
11770 emit_move_insn (gen_rtx_REG (reg_mode,
11771 info->first_gp_reg_save + i), mem);
11772 }
11773
11774 /* Restore fpr's if we need to do it without calling a function. */
11775 if (restoring_FPRs_inline)
11776 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11777 if ((regs_ever_live[info->first_fp_reg_save+i]
11778 && ! call_used_regs[info->first_fp_reg_save+i]))
11779 {
11780 rtx addr, mem;
11781 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11782 GEN_INT (info->fp_save_offset
11783 + sp_offset
11784 + 8 * i));
11785 mem = gen_rtx_MEM (DFmode, addr);
11786 set_mem_alias_set (mem, rs6000_sr_alias_set);
11787
11788 emit_move_insn (gen_rtx_REG (DFmode,
11789 info->first_fp_reg_save + i),
11790 mem);
11791 }
11792
11793 /* If we saved cr, restore it here. Just those that were used. */
11794 if (info->cr_save_p)
11795 {
11796 rtx r12_rtx = gen_rtx_REG (SImode, 12);
11797 int count = 0;
11798
11799 if (using_mfcr_multiple)
11800 {
11801 for (i = 0; i < 8; i++)
11802 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11803 count++;
11804 if (count == 0)
11805 abort ();
11806 }
11807
11808 if (using_mfcr_multiple && count > 1)
11809 {
11810 rtvec p;
11811 int ndx;
11812
11813 p = rtvec_alloc (count);
11814
11815 ndx = 0;
11816 for (i = 0; i < 8; i++)
11817 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11818 {
11819 rtvec r = rtvec_alloc (2);
11820 RTVEC_ELT (r, 0) = r12_rtx;
11821 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
11822 RTVEC_ELT (p, ndx) =
11823 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
11824 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
11825 ndx++;
11826 }
11827 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11828 if (ndx != count)
11829 abort ();
11830 }
11831 else
11832 for (i = 0; i < 8; i++)
11833 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11834 {
11835 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
11836 CR0_REGNO+i),
11837 r12_rtx));
11838 }
11839 }
11840
11841 /* If this is V.4, unwind the stack pointer after all of the loads
11842 have been done. We need to emit a block here so that sched
11843 doesn't decide to move the sp change before the register restores
11844 (which may not have any obvious dependency on the stack). This
11845 doesn't hurt performance, because there is no scheduling that can
11846 be done after this point. */
11847 if (DEFAULT_ABI == ABI_V4)
11848 {
11849 if (frame_reg_rtx != sp_reg_rtx)
11850 rs6000_emit_stack_tie ();
11851
11852 if (use_backchain_to_restore_sp)
11853 {
11854 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
11855 }
11856 else if (sp_offset != 0)
11857 {
11858 emit_insn (TARGET_32BIT
11859 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11860 GEN_INT (sp_offset))
11861 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11862 GEN_INT (sp_offset)));
11863 }
11864 }
11865
11866 if (current_function_calls_eh_return)
11867 {
11868 rtx sa = EH_RETURN_STACKADJ_RTX;
11869 emit_insn (TARGET_32BIT
11870 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
11871 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
11872 }
11873
11874 if (!sibcall)
11875 {
11876 rtvec p;
11877 if (! restoring_FPRs_inline)
11878 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
11879 else
11880 p = rtvec_alloc (2);
11881
11882 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
11883 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11884 gen_rtx_REG (Pmode,
11885 LINK_REGISTER_REGNUM));
11886
11887 /* If we have to restore more than two FP registers, branch to the
11888 restore function. It will return to our caller. */
11889 if (! restoring_FPRs_inline)
11890 {
11891 int i;
11892 char rname[30];
11893 const char *alloc_rname;
11894
11895 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
11896 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
11897 alloc_rname = ggc_strdup (rname);
11898 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
11899 gen_rtx_SYMBOL_REF (Pmode,
11900 alloc_rname));
11901
11902 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11903 {
11904 rtx addr, mem;
11905 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
11906 GEN_INT (info->fp_save_offset + 8*i));
11907 mem = gen_rtx_MEM (DFmode, addr);
11908 set_mem_alias_set (mem, rs6000_sr_alias_set);
11909
11910 RTVEC_ELT (p, i+3) =
11911 gen_rtx_SET (VOIDmode,
11912 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
11913 mem);
11914 }
11915 }
11916
11917 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
11918 }
11919 }
11920
11921 /* Write function epilogue. */
11922
11923 static void
11924 rs6000_output_function_epilogue (file, size)
11925 FILE *file;
11926 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11927 {
11928 rs6000_stack_t *info = rs6000_stack_info ();
11929
11930 if (! HAVE_epilogue)
11931 {
11932 rtx insn = get_last_insn ();
11933 /* If the last insn was a BARRIER, we don't have to write anything except
11934 the trace table. */
11935 if (GET_CODE (insn) == NOTE)
11936 insn = prev_nonnote_insn (insn);
11937 if (insn == 0 || GET_CODE (insn) != BARRIER)
11938 {
11939 /* This is slightly ugly, but at least we don't have two
11940 copies of the epilogue-emitting code. */
11941 start_sequence ();
11942
11943 /* A NOTE_INSN_DELETED is supposed to be at the start
11944 and end of the "toplevel" insn chain. */
11945 emit_note (0, NOTE_INSN_DELETED);
11946 rs6000_emit_epilogue (FALSE);
11947 emit_note (0, NOTE_INSN_DELETED);
11948
11949 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11950 {
11951 rtx insn;
11952 unsigned addr = 0;
11953 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11954 {
11955 INSN_ADDRESSES_NEW (insn, addr);
11956 addr += 4;
11957 }
11958 }
11959
11960 if (TARGET_DEBUG_STACK)
11961 debug_rtx_list (get_insns (), 100);
11962 final (get_insns (), file, FALSE, FALSE);
11963 end_sequence ();
11964 }
11965 }
11966
11967 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11968 on its format.
11969
11970 We don't output a traceback table if -finhibit-size-directive was
11971 used. The documentation for -finhibit-size-directive reads
11972 ``don't output a @code{.size} assembler directive, or anything
11973 else that would cause trouble if the function is split in the
11974 middle, and the two halves are placed at locations far apart in
11975 memory.'' The traceback table has this property, since it
11976 includes the offset from the start of the function to the
11977 traceback table itself.
11978
11979 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11980 different traceback table. */
11981 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
11982 && rs6000_traceback != traceback_none)
11983 {
11984 const char *fname = NULL;
11985 const char *language_string = lang_hooks.name;
11986 int fixed_parms = 0, float_parms = 0, parm_info = 0;
11987 int i;
11988 int optional_tbtab;
11989
11990 if (rs6000_traceback == traceback_full)
11991 optional_tbtab = 1;
11992 else if (rs6000_traceback == traceback_part)
11993 optional_tbtab = 0;
11994 else
11995 optional_tbtab = !optimize_size && !TARGET_ELF;
11996
11997 if (optional_tbtab)
11998 {
11999 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
12000 while (*fname == '.') /* V.4 encodes . in the name */
12001 fname++;
12002
12003 /* Need label immediately before tbtab, so we can compute
12004 its offset from the function start. */
12005 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12006 ASM_OUTPUT_LABEL (file, fname);
12007 }
12008
12009 /* The .tbtab pseudo-op can only be used for the first eight
12010 expressions, since it can't handle the possibly variable
12011 length fields that follow. However, if you omit the optional
12012 fields, the assembler outputs zeros for all optional fields
12013 anyways, giving each variable length field is minimum length
12014 (as defined in sys/debug.h). Thus we can not use the .tbtab
12015 pseudo-op at all. */
12016
12017 /* An all-zero word flags the start of the tbtab, for debuggers
12018 that have to find it by searching forward from the entry
12019 point or from the current pc. */
12020 fputs ("\t.long 0\n", file);
12021
12022 /* Tbtab format type. Use format type 0. */
12023 fputs ("\t.byte 0,", file);
12024
12025 /* Language type. Unfortunately, there doesn't seem to be any
12026 official way to get this info, so we use language_string. C
12027 is 0. C++ is 9. No number defined for Obj-C, so use the
12028 value for C for now. There is no official value for Java,
12029 although IBM appears to be using 13. There is no official value
12030 for Chill, so we've chosen 44 pseudo-randomly. */
12031 if (! strcmp (language_string, "GNU C")
12032 || ! strcmp (language_string, "GNU Objective-C"))
12033 i = 0;
12034 else if (! strcmp (language_string, "GNU F77"))
12035 i = 1;
12036 else if (! strcmp (language_string, "GNU Ada"))
12037 i = 3;
12038 else if (! strcmp (language_string, "GNU Pascal"))
12039 i = 2;
12040 else if (! strcmp (language_string, "GNU C++"))
12041 i = 9;
12042 else if (! strcmp (language_string, "GNU Java"))
12043 i = 13;
12044 else if (! strcmp (language_string, "GNU CHILL"))
12045 i = 44;
12046 else
12047 abort ();
12048 fprintf (file, "%d,", i);
12049
12050 /* 8 single bit fields: global linkage (not set for C extern linkage,
12051 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
12052 from start of procedure stored in tbtab, internal function, function
12053 has controlled storage, function has no toc, function uses fp,
12054 function logs/aborts fp operations. */
12055 /* Assume that fp operations are used if any fp reg must be saved. */
12056 fprintf (file, "%d,",
12057 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
12058
12059 /* 6 bitfields: function is interrupt handler, name present in
12060 proc table, function calls alloca, on condition directives
12061 (controls stack walks, 3 bits), saves condition reg, saves
12062 link reg. */
12063 /* The `function calls alloca' bit seems to be set whenever reg 31 is
12064 set up as a frame pointer, even when there is no alloca call. */
12065 fprintf (file, "%d,",
12066 ((optional_tbtab << 6)
12067 | ((optional_tbtab & frame_pointer_needed) << 5)
12068 | (info->cr_save_p << 1)
12069 | (info->lr_save_p)));
12070
12071 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
12072 (6 bits). */
12073 fprintf (file, "%d,",
12074 (info->push_p << 7) | (64 - info->first_fp_reg_save));
12075
12076 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
12077 fprintf (file, "%d,", (32 - first_reg_to_save ()));
12078
12079 if (optional_tbtab)
12080 {
12081 /* Compute the parameter info from the function decl argument
12082 list. */
12083 tree decl;
12084 int next_parm_info_bit = 31;
12085
12086 for (decl = DECL_ARGUMENTS (current_function_decl);
12087 decl; decl = TREE_CHAIN (decl))
12088 {
12089 rtx parameter = DECL_INCOMING_RTL (decl);
12090 enum machine_mode mode = GET_MODE (parameter);
12091
12092 if (GET_CODE (parameter) == REG)
12093 {
12094 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
12095 {
12096 int bits;
12097
12098 float_parms++;
12099
12100 if (mode == SFmode)
12101 bits = 0x2;
12102 else if (mode == DFmode || mode == TFmode)
12103 bits = 0x3;
12104 else
12105 abort ();
12106
12107 /* If only one bit will fit, don't or in this entry. */
12108 if (next_parm_info_bit > 0)
12109 parm_info |= (bits << (next_parm_info_bit - 1));
12110 next_parm_info_bit -= 2;
12111 }
12112 else
12113 {
12114 fixed_parms += ((GET_MODE_SIZE (mode)
12115 + (UNITS_PER_WORD - 1))
12116 / UNITS_PER_WORD);
12117 next_parm_info_bit -= 1;
12118 }
12119 }
12120 }
12121 }
12122
12123 /* Number of fixed point parameters. */
12124 /* This is actually the number of words of fixed point parameters; thus
12125 an 8 byte struct counts as 2; and thus the maximum value is 8. */
12126 fprintf (file, "%d,", fixed_parms);
12127
12128 /* 2 bitfields: number of floating point parameters (7 bits), parameters
12129 all on stack. */
12130 /* This is actually the number of fp registers that hold parameters;
12131 and thus the maximum value is 13. */
12132 /* Set parameters on stack bit if parameters are not in their original
12133 registers, regardless of whether they are on the stack? Xlc
12134 seems to set the bit when not optimizing. */
12135 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
12136
12137 if (! optional_tbtab)
12138 return;
12139
12140 /* Optional fields follow. Some are variable length. */
12141
12142 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
12143 11 double float. */
12144 /* There is an entry for each parameter in a register, in the order that
12145 they occur in the parameter list. Any intervening arguments on the
12146 stack are ignored. If the list overflows a long (max possible length
12147 34 bits) then completely leave off all elements that don't fit. */
12148 /* Only emit this long if there was at least one parameter. */
12149 if (fixed_parms || float_parms)
12150 fprintf (file, "\t.long %d\n", parm_info);
12151
12152 /* Offset from start of code to tb table. */
12153 fputs ("\t.long ", file);
12154 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12155 #if TARGET_AIX
12156 RS6000_OUTPUT_BASENAME (file, fname);
12157 #else
12158 assemble_name (file, fname);
12159 #endif
12160 fputs ("-.", file);
12161 #if TARGET_AIX
12162 RS6000_OUTPUT_BASENAME (file, fname);
12163 #else
12164 assemble_name (file, fname);
12165 #endif
12166 putc ('\n', file);
12167
12168 /* Interrupt handler mask. */
12169 /* Omit this long, since we never set the interrupt handler bit
12170 above. */
12171
12172 /* Number of CTL (controlled storage) anchors. */
12173 /* Omit this long, since the has_ctl bit is never set above. */
12174
12175 /* Displacement into stack of each CTL anchor. */
12176 /* Omit this list of longs, because there are no CTL anchors. */
12177
12178 /* Length of function name. */
12179 if (*fname == '*')
12180 ++fname;
12181 fprintf (file, "\t.short %d\n", (int) strlen (fname));
12182
12183 /* Function name. */
12184 assemble_string (fname, strlen (fname));
12185
12186 /* Register for alloca automatic storage; this is always reg 31.
12187 Only emit this if the alloca bit was set above. */
12188 if (frame_pointer_needed)
12189 fputs ("\t.byte 31\n", file);
12190
12191 fputs ("\t.align 2\n", file);
12192 }
12193 }
12194 \f
12195 /* A C compound statement that outputs the assembler code for a thunk
12196 function, used to implement C++ virtual function calls with
12197 multiple inheritance. The thunk acts as a wrapper around a virtual
12198 function, adjusting the implicit object parameter before handing
12199 control off to the real function.
12200
12201 First, emit code to add the integer DELTA to the location that
12202 contains the incoming first argument. Assume that this argument
12203 contains a pointer, and is the one used to pass the `this' pointer
12204 in C++. This is the incoming argument *before* the function
12205 prologue, e.g. `%o0' on a sparc. The addition must preserve the
12206 values of all other incoming arguments.
12207
12208 After the addition, emit code to jump to FUNCTION, which is a
12209 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
12210 not touch the return address. Hence returning from FUNCTION will
12211 return to whoever called the current `thunk'.
12212
12213 The effect must be as if FUNCTION had been called directly with the
12214 adjusted first argument. This macro is responsible for emitting
12215 all of the code for a thunk function; output_function_prologue()
12216 and output_function_epilogue() are not invoked.
12217
12218 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
12219 been extracted from it.) It might possibly be useful on some
12220 targets, but probably not.
12221
12222 If you do not define this macro, the target-independent code in the
12223 C++ frontend will generate a less efficient heavyweight thunk that
12224 calls FUNCTION instead of jumping to it. The generic approach does
12225 not support varargs. */
12226
12227 static void
12228 rs6000_output_mi_thunk (file, thunk_fndecl, delta, vcall_offset, function)
12229 FILE *file;
12230 tree thunk_fndecl ATTRIBUTE_UNUSED;
12231 HOST_WIDE_INT delta;
12232 HOST_WIDE_INT vcall_offset;
12233 tree function;
12234 {
12235 rtx this, insn, funexp;
12236
12237 reload_completed = 1;
12238 no_new_pseudos = 1;
12239
12240 /* Mark the end of the (empty) prologue. */
12241 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
12242
12243 /* Find the "this" pointer. If the function returns a structure,
12244 the structure return pointer is in r3. */
12245 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
12246 this = gen_rtx_REG (Pmode, 4);
12247 else
12248 this = gen_rtx_REG (Pmode, 3);
12249
12250 /* Apply the constant offset, if required. */
12251 if (delta)
12252 {
12253 rtx delta_rtx = GEN_INT (delta);
12254 emit_insn (TARGET_32BIT
12255 ? gen_addsi3 (this, this, delta_rtx)
12256 : gen_adddi3 (this, this, delta_rtx));
12257 }
12258
12259 /* Apply the offset from the vtable, if required. */
12260 if (vcall_offset)
12261 {
12262 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
12263 rtx tmp = gen_rtx_REG (Pmode, 12);
12264
12265 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
12266 emit_insn (TARGET_32BIT
12267 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
12268 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
12269 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
12270 emit_insn (TARGET_32BIT
12271 ? gen_addsi3 (this, this, tmp)
12272 : gen_adddi3 (this, this, tmp));
12273 }
12274
12275 /* Generate a tail call to the target function. */
12276 if (!TREE_USED (function))
12277 {
12278 assemble_external (function);
12279 TREE_USED (function) = 1;
12280 }
12281 funexp = XEXP (DECL_RTL (function), 0);
12282 SYMBOL_REF_FLAGS (funexp) &= ~SYMBOL_FLAG_LOCAL;
12283 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
12284
12285 #if TARGET_MACHO
12286 if (MACHOPIC_INDIRECT)
12287 funexp = machopic_indirect_call_target (funexp);
12288 #endif
12289
12290 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
12291 generate sibcall RTL explicitly to avoid constraint abort. */
12292 insn = emit_call_insn (
12293 gen_rtx_PARALLEL (VOIDmode,
12294 gen_rtvec (4,
12295 gen_rtx_CALL (VOIDmode,
12296 funexp, const0_rtx),
12297 gen_rtx_USE (VOIDmode, const0_rtx),
12298 gen_rtx_USE (VOIDmode,
12299 gen_rtx_REG (SImode,
12300 LINK_REGISTER_REGNUM)),
12301 gen_rtx_RETURN (VOIDmode))));
12302 SIBLING_CALL_P (insn) = 1;
12303 emit_barrier ();
12304
12305 /* Run just enough of rest_of_compilation to get the insns emitted.
12306 There's not really enough bulk here to make other passes such as
12307 instruction scheduling worth while. Note that use_thunk calls
12308 assemble_start_function and assemble_end_function. */
12309 insn = get_insns ();
12310 insn_locators_initialize ();
12311 shorten_branches (insn);
12312 final_start_function (insn, file, 1);
12313 final (insn, file, 1, 0);
12314 final_end_function ();
12315
12316 reload_completed = 0;
12317 no_new_pseudos = 0;
12318 }
12319 \f
12320 /* A quick summary of the various types of 'constant-pool tables'
12321 under PowerPC:
12322
12323 Target Flags Name One table per
12324 AIX (none) AIX TOC object file
12325 AIX -mfull-toc AIX TOC object file
12326 AIX -mminimal-toc AIX minimal TOC translation unit
12327 SVR4/EABI (none) SVR4 SDATA object file
12328 SVR4/EABI -fpic SVR4 pic object file
12329 SVR4/EABI -fPIC SVR4 PIC translation unit
12330 SVR4/EABI -mrelocatable EABI TOC function
12331 SVR4/EABI -maix AIX TOC object file
12332 SVR4/EABI -maix -mminimal-toc
12333 AIX minimal TOC translation unit
12334
12335 Name Reg. Set by entries contains:
12336 made by addrs? fp? sum?
12337
12338 AIX TOC 2 crt0 as Y option option
12339 AIX minimal TOC 30 prolog gcc Y Y option
12340 SVR4 SDATA 13 crt0 gcc N Y N
12341 SVR4 pic 30 prolog ld Y not yet N
12342 SVR4 PIC 30 prolog gcc Y option option
12343 EABI TOC 30 prolog gcc Y option option
12344
12345 */
12346
12347 /* Hash functions for the hash table. */
12348
12349 static unsigned
12350 rs6000_hash_constant (k)
12351 rtx k;
12352 {
12353 enum rtx_code code = GET_CODE (k);
12354 enum machine_mode mode = GET_MODE (k);
12355 unsigned result = (code << 3) ^ mode;
12356 const char *format;
12357 int flen, fidx;
12358
12359 format = GET_RTX_FORMAT (code);
12360 flen = strlen (format);
12361 fidx = 0;
12362
12363 switch (code)
12364 {
12365 case LABEL_REF:
12366 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
12367
12368 case CONST_DOUBLE:
12369 if (mode != VOIDmode)
12370 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
12371 flen = 2;
12372 break;
12373
12374 case CODE_LABEL:
12375 fidx = 3;
12376 break;
12377
12378 default:
12379 break;
12380 }
12381
12382 for (; fidx < flen; fidx++)
12383 switch (format[fidx])
12384 {
12385 case 's':
12386 {
12387 unsigned i, len;
12388 const char *str = XSTR (k, fidx);
12389 len = strlen (str);
12390 result = result * 613 + len;
12391 for (i = 0; i < len; i++)
12392 result = result * 613 + (unsigned) str[i];
12393 break;
12394 }
12395 case 'u':
12396 case 'e':
12397 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
12398 break;
12399 case 'i':
12400 case 'n':
12401 result = result * 613 + (unsigned) XINT (k, fidx);
12402 break;
12403 case 'w':
12404 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
12405 result = result * 613 + (unsigned) XWINT (k, fidx);
12406 else
12407 {
12408 size_t i;
12409 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
12410 result = result * 613 + (unsigned) (XWINT (k, fidx)
12411 >> CHAR_BIT * i);
12412 }
12413 break;
12414 case '0':
12415 break;
12416 default:
12417 abort ();
12418 }
12419
12420 return result;
12421 }
12422
12423 static unsigned
12424 toc_hash_function (hash_entry)
12425 const void * hash_entry;
12426 {
12427 const struct toc_hash_struct *thc =
12428 (const struct toc_hash_struct *) hash_entry;
12429 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
12430 }
12431
12432 /* Compare H1 and H2 for equivalence. */
12433
12434 static int
12435 toc_hash_eq (h1, h2)
12436 const void * h1;
12437 const void * h2;
12438 {
12439 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
12440 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
12441
12442 if (((const struct toc_hash_struct *) h1)->key_mode
12443 != ((const struct toc_hash_struct *) h2)->key_mode)
12444 return 0;
12445
12446 return rtx_equal_p (r1, r2);
12447 }
12448
12449 /* These are the names given by the C++ front-end to vtables, and
12450 vtable-like objects. Ideally, this logic should not be here;
12451 instead, there should be some programmatic way of inquiring as
12452 to whether or not an object is a vtable. */
12453
12454 #define VTABLE_NAME_P(NAME) \
12455 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
12456 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
12457 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
12458 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
12459
12460 void
12461 rs6000_output_symbol_ref (file, x)
12462 FILE *file;
12463 rtx x;
12464 {
12465 /* Currently C++ toc references to vtables can be emitted before it
12466 is decided whether the vtable is public or private. If this is
12467 the case, then the linker will eventually complain that there is
12468 a reference to an unknown section. Thus, for vtables only,
12469 we emit the TOC reference to reference the symbol and not the
12470 section. */
12471 const char *name = XSTR (x, 0);
12472
12473 if (VTABLE_NAME_P (name))
12474 {
12475 RS6000_OUTPUT_BASENAME (file, name);
12476 }
12477 else
12478 assemble_name (file, name);
12479 }
12480
12481 /* Output a TOC entry. We derive the entry name from what is being
12482 written. */
12483
12484 void
12485 output_toc (file, x, labelno, mode)
12486 FILE *file;
12487 rtx x;
12488 int labelno;
12489 enum machine_mode mode;
12490 {
12491 char buf[256];
12492 const char *name = buf;
12493 const char *real_name;
12494 rtx base = x;
12495 int offset = 0;
12496
12497 if (TARGET_NO_TOC)
12498 abort ();
12499
12500 /* When the linker won't eliminate them, don't output duplicate
12501 TOC entries (this happens on AIX if there is any kind of TOC,
12502 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
12503 CODE_LABELs. */
12504 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
12505 {
12506 struct toc_hash_struct *h;
12507 void * * found;
12508
12509 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
12510 time because GGC is not initialised at that point. */
12511 if (toc_hash_table == NULL)
12512 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
12513 toc_hash_eq, NULL);
12514
12515 h = ggc_alloc (sizeof (*h));
12516 h->key = x;
12517 h->key_mode = mode;
12518 h->labelno = labelno;
12519
12520 found = htab_find_slot (toc_hash_table, h, 1);
12521 if (*found == NULL)
12522 *found = h;
12523 else /* This is indeed a duplicate.
12524 Set this label equal to that label. */
12525 {
12526 fputs ("\t.set ", file);
12527 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12528 fprintf (file, "%d,", labelno);
12529 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12530 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
12531 found)->labelno));
12532 return;
12533 }
12534 }
12535
12536 /* If we're going to put a double constant in the TOC, make sure it's
12537 aligned properly when strict alignment is on. */
12538 if (GET_CODE (x) == CONST_DOUBLE
12539 && STRICT_ALIGNMENT
12540 && GET_MODE_BITSIZE (mode) >= 64
12541 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
12542 ASM_OUTPUT_ALIGN (file, 3);
12543 }
12544
12545 (*targetm.asm_out.internal_label) (file, "LC", labelno);
12546
12547 /* Handle FP constants specially. Note that if we have a minimal
12548 TOC, things we put here aren't actually in the TOC, so we can allow
12549 FP constants. */
12550 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
12551 {
12552 REAL_VALUE_TYPE rv;
12553 long k[4];
12554
12555 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12556 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
12557
12558 if (TARGET_64BIT)
12559 {
12560 if (TARGET_MINIMAL_TOC)
12561 fputs (DOUBLE_INT_ASM_OP, file);
12562 else
12563 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12564 k[0] & 0xffffffff, k[1] & 0xffffffff,
12565 k[2] & 0xffffffff, k[3] & 0xffffffff);
12566 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
12567 k[0] & 0xffffffff, k[1] & 0xffffffff,
12568 k[2] & 0xffffffff, k[3] & 0xffffffff);
12569 return;
12570 }
12571 else
12572 {
12573 if (TARGET_MINIMAL_TOC)
12574 fputs ("\t.long ", file);
12575 else
12576 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12577 k[0] & 0xffffffff, k[1] & 0xffffffff,
12578 k[2] & 0xffffffff, k[3] & 0xffffffff);
12579 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
12580 k[0] & 0xffffffff, k[1] & 0xffffffff,
12581 k[2] & 0xffffffff, k[3] & 0xffffffff);
12582 return;
12583 }
12584 }
12585 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
12586 {
12587 REAL_VALUE_TYPE rv;
12588 long k[2];
12589
12590 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12591 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
12592
12593 if (TARGET_64BIT)
12594 {
12595 if (TARGET_MINIMAL_TOC)
12596 fputs (DOUBLE_INT_ASM_OP, file);
12597 else
12598 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12599 k[0] & 0xffffffff, k[1] & 0xffffffff);
12600 fprintf (file, "0x%lx%08lx\n",
12601 k[0] & 0xffffffff, k[1] & 0xffffffff);
12602 return;
12603 }
12604 else
12605 {
12606 if (TARGET_MINIMAL_TOC)
12607 fputs ("\t.long ", file);
12608 else
12609 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12610 k[0] & 0xffffffff, k[1] & 0xffffffff);
12611 fprintf (file, "0x%lx,0x%lx\n",
12612 k[0] & 0xffffffff, k[1] & 0xffffffff);
12613 return;
12614 }
12615 }
12616 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
12617 {
12618 REAL_VALUE_TYPE rv;
12619 long l;
12620
12621 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12622 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
12623
12624 if (TARGET_64BIT)
12625 {
12626 if (TARGET_MINIMAL_TOC)
12627 fputs (DOUBLE_INT_ASM_OP, file);
12628 else
12629 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12630 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
12631 return;
12632 }
12633 else
12634 {
12635 if (TARGET_MINIMAL_TOC)
12636 fputs ("\t.long ", file);
12637 else
12638 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12639 fprintf (file, "0x%lx\n", l & 0xffffffff);
12640 return;
12641 }
12642 }
12643 else if (GET_MODE (x) == VOIDmode
12644 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
12645 {
12646 unsigned HOST_WIDE_INT low;
12647 HOST_WIDE_INT high;
12648
12649 if (GET_CODE (x) == CONST_DOUBLE)
12650 {
12651 low = CONST_DOUBLE_LOW (x);
12652 high = CONST_DOUBLE_HIGH (x);
12653 }
12654 else
12655 #if HOST_BITS_PER_WIDE_INT == 32
12656 {
12657 low = INTVAL (x);
12658 high = (low & 0x80000000) ? ~0 : 0;
12659 }
12660 #else
12661 {
12662 low = INTVAL (x) & 0xffffffff;
12663 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
12664 }
12665 #endif
12666
12667 /* TOC entries are always Pmode-sized, but since this
12668 is a bigendian machine then if we're putting smaller
12669 integer constants in the TOC we have to pad them.
12670 (This is still a win over putting the constants in
12671 a separate constant pool, because then we'd have
12672 to have both a TOC entry _and_ the actual constant.)
12673
12674 For a 32-bit target, CONST_INT values are loaded and shifted
12675 entirely within `low' and can be stored in one TOC entry. */
12676
12677 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
12678 abort ();/* It would be easy to make this work, but it doesn't now. */
12679
12680 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
12681 {
12682 #if HOST_BITS_PER_WIDE_INT == 32
12683 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
12684 POINTER_SIZE, &low, &high, 0);
12685 #else
12686 low |= high << 32;
12687 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
12688 high = (HOST_WIDE_INT) low >> 32;
12689 low &= 0xffffffff;
12690 #endif
12691 }
12692
12693 if (TARGET_64BIT)
12694 {
12695 if (TARGET_MINIMAL_TOC)
12696 fputs (DOUBLE_INT_ASM_OP, file);
12697 else
12698 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12699 (long) high & 0xffffffff, (long) low & 0xffffffff);
12700 fprintf (file, "0x%lx%08lx\n",
12701 (long) high & 0xffffffff, (long) low & 0xffffffff);
12702 return;
12703 }
12704 else
12705 {
12706 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
12707 {
12708 if (TARGET_MINIMAL_TOC)
12709 fputs ("\t.long ", file);
12710 else
12711 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12712 (long) high & 0xffffffff, (long) low & 0xffffffff);
12713 fprintf (file, "0x%lx,0x%lx\n",
12714 (long) high & 0xffffffff, (long) low & 0xffffffff);
12715 }
12716 else
12717 {
12718 if (TARGET_MINIMAL_TOC)
12719 fputs ("\t.long ", file);
12720 else
12721 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
12722 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
12723 }
12724 return;
12725 }
12726 }
12727
12728 if (GET_CODE (x) == CONST)
12729 {
12730 if (GET_CODE (XEXP (x, 0)) != PLUS)
12731 abort ();
12732
12733 base = XEXP (XEXP (x, 0), 0);
12734 offset = INTVAL (XEXP (XEXP (x, 0), 1));
12735 }
12736
12737 if (GET_CODE (base) == SYMBOL_REF)
12738 name = XSTR (base, 0);
12739 else if (GET_CODE (base) == LABEL_REF)
12740 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
12741 else if (GET_CODE (base) == CODE_LABEL)
12742 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
12743 else
12744 abort ();
12745
12746 real_name = (*targetm.strip_name_encoding) (name);
12747 if (TARGET_MINIMAL_TOC)
12748 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
12749 else
12750 {
12751 fprintf (file, "\t.tc %s", real_name);
12752
12753 if (offset < 0)
12754 fprintf (file, ".N%d", - offset);
12755 else if (offset)
12756 fprintf (file, ".P%d", offset);
12757
12758 fputs ("[TC],", file);
12759 }
12760
12761 /* Currently C++ toc references to vtables can be emitted before it
12762 is decided whether the vtable is public or private. If this is
12763 the case, then the linker will eventually complain that there is
12764 a TOC reference to an unknown section. Thus, for vtables only,
12765 we emit the TOC reference to reference the symbol and not the
12766 section. */
12767 if (VTABLE_NAME_P (name))
12768 {
12769 RS6000_OUTPUT_BASENAME (file, name);
12770 if (offset < 0)
12771 fprintf (file, "%d", offset);
12772 else if (offset > 0)
12773 fprintf (file, "+%d", offset);
12774 }
12775 else
12776 output_addr_const (file, x);
12777 putc ('\n', file);
12778 }
12779 \f
12780 /* Output an assembler pseudo-op to write an ASCII string of N characters
12781 starting at P to FILE.
12782
12783 On the RS/6000, we have to do this using the .byte operation and
12784 write out special characters outside the quoted string.
12785 Also, the assembler is broken; very long strings are truncated,
12786 so we must artificially break them up early. */
12787
12788 void
12789 output_ascii (file, p, n)
12790 FILE *file;
12791 const char *p;
12792 int n;
12793 {
12794 char c;
12795 int i, count_string;
12796 const char *for_string = "\t.byte \"";
12797 const char *for_decimal = "\t.byte ";
12798 const char *to_close = NULL;
12799
12800 count_string = 0;
12801 for (i = 0; i < n; i++)
12802 {
12803 c = *p++;
12804 if (c >= ' ' && c < 0177)
12805 {
12806 if (for_string)
12807 fputs (for_string, file);
12808 putc (c, file);
12809
12810 /* Write two quotes to get one. */
12811 if (c == '"')
12812 {
12813 putc (c, file);
12814 ++count_string;
12815 }
12816
12817 for_string = NULL;
12818 for_decimal = "\"\n\t.byte ";
12819 to_close = "\"\n";
12820 ++count_string;
12821
12822 if (count_string >= 512)
12823 {
12824 fputs (to_close, file);
12825
12826 for_string = "\t.byte \"";
12827 for_decimal = "\t.byte ";
12828 to_close = NULL;
12829 count_string = 0;
12830 }
12831 }
12832 else
12833 {
12834 if (for_decimal)
12835 fputs (for_decimal, file);
12836 fprintf (file, "%d", c);
12837
12838 for_string = "\n\t.byte \"";
12839 for_decimal = ", ";
12840 to_close = "\n";
12841 count_string = 0;
12842 }
12843 }
12844
12845 /* Now close the string if we have written one. Then end the line. */
12846 if (to_close)
12847 fputs (to_close, file);
12848 }
12849 \f
12850 /* Generate a unique section name for FILENAME for a section type
12851 represented by SECTION_DESC. Output goes into BUF.
12852
12853 SECTION_DESC can be any string, as long as it is different for each
12854 possible section type.
12855
12856 We name the section in the same manner as xlc. The name begins with an
12857 underscore followed by the filename (after stripping any leading directory
12858 names) with the last period replaced by the string SECTION_DESC. If
12859 FILENAME does not contain a period, SECTION_DESC is appended to the end of
12860 the name. */
12861
12862 void
12863 rs6000_gen_section_name (buf, filename, section_desc)
12864 char **buf;
12865 const char *filename;
12866 const char *section_desc;
12867 {
12868 const char *q, *after_last_slash, *last_period = 0;
12869 char *p;
12870 int len;
12871
12872 after_last_slash = filename;
12873 for (q = filename; *q; q++)
12874 {
12875 if (*q == '/')
12876 after_last_slash = q + 1;
12877 else if (*q == '.')
12878 last_period = q;
12879 }
12880
12881 len = strlen (after_last_slash) + strlen (section_desc) + 2;
12882 *buf = (char *) xmalloc (len);
12883
12884 p = *buf;
12885 *p++ = '_';
12886
12887 for (q = after_last_slash; *q; q++)
12888 {
12889 if (q == last_period)
12890 {
12891 strcpy (p, section_desc);
12892 p += strlen (section_desc);
12893 break;
12894 }
12895
12896 else if (ISALNUM (*q))
12897 *p++ = *q;
12898 }
12899
12900 if (last_period == 0)
12901 strcpy (p, section_desc);
12902 else
12903 *p = '\0';
12904 }
12905 \f
12906 /* Emit profile function. */
12907
12908 void
12909 output_profile_hook (labelno)
12910 int labelno ATTRIBUTE_UNUSED;
12911 {
12912 if (TARGET_PROFILE_KERNEL)
12913 return;
12914
12915 if (DEFAULT_ABI == ABI_AIX)
12916 {
12917 #ifndef NO_PROFILE_COUNTERS
12918 # define NO_PROFILE_COUNTERS 0
12919 #endif
12920 if (NO_PROFILE_COUNTERS)
12921 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
12922 else
12923 {
12924 char buf[30];
12925 const char *label_name;
12926 rtx fun;
12927
12928 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12929 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
12930 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
12931
12932 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
12933 fun, Pmode);
12934 }
12935 }
12936 else if (DEFAULT_ABI == ABI_DARWIN)
12937 {
12938 const char *mcount_name = RS6000_MCOUNT;
12939 int caller_addr_regno = LINK_REGISTER_REGNUM;
12940
12941 /* Be conservative and always set this, at least for now. */
12942 current_function_uses_pic_offset_table = 1;
12943
12944 #if TARGET_MACHO
12945 /* For PIC code, set up a stub and collect the caller's address
12946 from r0, which is where the prologue puts it. */
12947 if (MACHOPIC_INDIRECT)
12948 {
12949 mcount_name = machopic_stub_name (mcount_name);
12950 if (current_function_uses_pic_offset_table)
12951 caller_addr_regno = 0;
12952 }
12953 #endif
12954 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
12955 0, VOIDmode, 1,
12956 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
12957 }
12958 }
12959
12960 /* Write function profiler code. */
12961
12962 void
12963 output_function_profiler (file, labelno)
12964 FILE *file;
12965 int labelno;
12966 {
12967 char buf[100];
12968 int save_lr = 8;
12969
12970 switch (DEFAULT_ABI)
12971 {
12972 default:
12973 abort ();
12974
12975 case ABI_V4:
12976 save_lr = 4;
12977 if (!TARGET_32BIT)
12978 {
12979 warning ("no profiling of 64-bit code for this ABI");
12980 return;
12981 }
12982 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12983 fprintf (file, "\tmflr %s\n", reg_names[0]);
12984 if (flag_pic == 1)
12985 {
12986 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
12987 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12988 reg_names[0], save_lr, reg_names[1]);
12989 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
12990 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
12991 assemble_name (file, buf);
12992 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
12993 }
12994 else if (flag_pic > 1)
12995 {
12996 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12997 reg_names[0], save_lr, reg_names[1]);
12998 /* Now, we need to get the address of the label. */
12999 fputs ("\tbl 1f\n\t.long ", file);
13000 assemble_name (file, buf);
13001 fputs ("-.\n1:", file);
13002 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
13003 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
13004 reg_names[0], reg_names[11]);
13005 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
13006 reg_names[0], reg_names[0], reg_names[11]);
13007 }
13008 else
13009 {
13010 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
13011 assemble_name (file, buf);
13012 fputs ("@ha\n", file);
13013 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13014 reg_names[0], save_lr, reg_names[1]);
13015 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
13016 assemble_name (file, buf);
13017 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
13018 }
13019
13020 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
13021 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13022 break;
13023
13024 case ABI_AIX:
13025 case ABI_DARWIN:
13026 if (!TARGET_PROFILE_KERNEL)
13027 {
13028 /* Don't do anything, done in output_profile_hook (). */
13029 }
13030 else
13031 {
13032 if (TARGET_32BIT)
13033 abort ();
13034
13035 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
13036 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
13037
13038 if (current_function_needs_context)
13039 {
13040 asm_fprintf (file, "\tstd %s,24(%s)\n",
13041 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13042 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13043 asm_fprintf (file, "\tld %s,24(%s)\n",
13044 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13045 }
13046 else
13047 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13048 }
13049 break;
13050 }
13051 }
13052
13053 \f
13054 static int
13055 rs6000_use_dfa_pipeline_interface ()
13056 {
13057 return 1;
13058 }
13059
13060 /* Power4 load update and store update instructions are cracked into a
13061 load or store and an integer insn which are executed in the same cycle.
13062 Branches have their own dispatch slot which does not count against the
13063 GCC issue rate, but it changes the program flow so there are no other
13064 instructions to issue in this cycle. */
13065
13066 static int
13067 rs6000_variable_issue (stream, verbose, insn, more)
13068 FILE *stream ATTRIBUTE_UNUSED;
13069 int verbose ATTRIBUTE_UNUSED;
13070 rtx insn;
13071 int more;
13072 {
13073 if (GET_CODE (PATTERN (insn)) == USE
13074 || GET_CODE (PATTERN (insn)) == CLOBBER)
13075 return more;
13076
13077 if (rs6000_cpu == PROCESSOR_POWER4)
13078 {
13079 enum attr_type type = get_attr_type (insn);
13080 if (type == TYPE_LOAD_EXT_U || type == TYPE_LOAD_EXT_UX
13081 || type == TYPE_LOAD_UX || type == TYPE_STORE_UX)
13082 return 0;
13083 else if (type == TYPE_LOAD_U || type == TYPE_STORE_U
13084 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
13085 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
13086 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
13087 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
13088 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
13089 || type == TYPE_IDIV || type == TYPE_LDIV
13090 || type == TYPE_INSERT_WORD)
13091 return more > 2 ? more - 2 : 0;
13092 }
13093
13094 return more - 1;
13095 }
13096
13097 /* Adjust the cost of a scheduling dependency. Return the new cost of
13098 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
13099
13100 static int
13101 rs6000_adjust_cost (insn, link, dep_insn, cost)
13102 rtx insn;
13103 rtx link;
13104 rtx dep_insn ATTRIBUTE_UNUSED;
13105 int cost;
13106 {
13107 if (! recog_memoized (insn))
13108 return 0;
13109
13110 if (REG_NOTE_KIND (link) != 0)
13111 return 0;
13112
13113 if (REG_NOTE_KIND (link) == 0)
13114 {
13115 /* Data dependency; DEP_INSN writes a register that INSN reads
13116 some cycles later. */
13117 switch (get_attr_type (insn))
13118 {
13119 case TYPE_JMPREG:
13120 /* Tell the first scheduling pass about the latency between
13121 a mtctr and bctr (and mtlr and br/blr). The first
13122 scheduling pass will not know about this latency since
13123 the mtctr instruction, which has the latency associated
13124 to it, will be generated by reload. */
13125 return TARGET_POWER ? 5 : 4;
13126 case TYPE_BRANCH:
13127 /* Leave some extra cycles between a compare and its
13128 dependent branch, to inhibit expensive mispredicts. */
13129 if ((rs6000_cpu_attr == CPU_PPC603
13130 || rs6000_cpu_attr == CPU_PPC604
13131 || rs6000_cpu_attr == CPU_PPC604E
13132 || rs6000_cpu_attr == CPU_PPC620
13133 || rs6000_cpu_attr == CPU_PPC630
13134 || rs6000_cpu_attr == CPU_PPC750
13135 || rs6000_cpu_attr == CPU_PPC7400
13136 || rs6000_cpu_attr == CPU_PPC7450
13137 || rs6000_cpu_attr == CPU_POWER4)
13138 && recog_memoized (dep_insn)
13139 && (INSN_CODE (dep_insn) >= 0)
13140 && (get_attr_type (dep_insn) == TYPE_CMP
13141 || get_attr_type (dep_insn) == TYPE_COMPARE
13142 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
13143 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
13144 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
13145 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
13146 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
13147 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
13148 return cost + 2;
13149 default:
13150 break;
13151 }
13152 /* Fall out to return default cost. */
13153 }
13154
13155 return cost;
13156 }
13157
13158 /* A C statement (sans semicolon) to update the integer scheduling
13159 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
13160 INSN earlier, increase the priority to execute INSN later. Do not
13161 define this macro if you do not need to adjust the scheduling
13162 priorities of insns. */
13163
13164 static int
13165 rs6000_adjust_priority (insn, priority)
13166 rtx insn ATTRIBUTE_UNUSED;
13167 int priority;
13168 {
13169 /* On machines (like the 750) which have asymmetric integer units,
13170 where one integer unit can do multiply and divides and the other
13171 can't, reduce the priority of multiply/divide so it is scheduled
13172 before other integer operations. */
13173
13174 #if 0
13175 if (! INSN_P (insn))
13176 return priority;
13177
13178 if (GET_CODE (PATTERN (insn)) == USE)
13179 return priority;
13180
13181 switch (rs6000_cpu_attr) {
13182 case CPU_PPC750:
13183 switch (get_attr_type (insn))
13184 {
13185 default:
13186 break;
13187
13188 case TYPE_IMUL:
13189 case TYPE_IDIV:
13190 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
13191 priority, priority);
13192 if (priority >= 0 && priority < 0x01000000)
13193 priority >>= 3;
13194 break;
13195 }
13196 }
13197 #endif
13198
13199 return priority;
13200 }
13201
13202 /* Return how many instructions the machine can issue per cycle. */
13203
13204 static int
13205 rs6000_issue_rate ()
13206 {
13207 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
13208 if (!reload_completed)
13209 return 1;
13210
13211 switch (rs6000_cpu_attr) {
13212 case CPU_RIOS1: /* ? */
13213 case CPU_RS64A:
13214 case CPU_PPC601: /* ? */
13215 case CPU_PPC7450:
13216 return 3;
13217 case CPU_PPC440:
13218 case CPU_PPC603:
13219 case CPU_PPC750:
13220 case CPU_PPC7400:
13221 case CPU_PPC8540:
13222 return 2;
13223 case CPU_RIOS2:
13224 case CPU_PPC604:
13225 case CPU_PPC604E:
13226 case CPU_PPC620:
13227 case CPU_PPC630:
13228 case CPU_POWER4:
13229 return 4;
13230 default:
13231 return 1;
13232 }
13233 }
13234
13235 /* Return how many instructions to look ahead for better insn
13236 scheduling. */
13237
13238 static int
13239 rs6000_use_sched_lookahead ()
13240 {
13241 if (rs6000_cpu_attr == CPU_PPC8540)
13242 return 4;
13243 return 0;
13244 }
13245
13246 \f
13247 /* Length in units of the trampoline for entering a nested function. */
13248
13249 int
13250 rs6000_trampoline_size ()
13251 {
13252 int ret = 0;
13253
13254 switch (DEFAULT_ABI)
13255 {
13256 default:
13257 abort ();
13258
13259 case ABI_AIX:
13260 ret = (TARGET_32BIT) ? 12 : 24;
13261 break;
13262
13263 case ABI_DARWIN:
13264 case ABI_V4:
13265 ret = (TARGET_32BIT) ? 40 : 48;
13266 break;
13267 }
13268
13269 return ret;
13270 }
13271
13272 /* Emit RTL insns to initialize the variable parts of a trampoline.
13273 FNADDR is an RTX for the address of the function's pure code.
13274 CXT is an RTX for the static chain value for the function. */
13275
13276 void
13277 rs6000_initialize_trampoline (addr, fnaddr, cxt)
13278 rtx addr;
13279 rtx fnaddr;
13280 rtx cxt;
13281 {
13282 enum machine_mode pmode = Pmode;
13283 int regsize = (TARGET_32BIT) ? 4 : 8;
13284 rtx ctx_reg = force_reg (pmode, cxt);
13285
13286 switch (DEFAULT_ABI)
13287 {
13288 default:
13289 abort ();
13290
13291 /* Macros to shorten the code expansions below. */
13292 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
13293 #define MEM_PLUS(addr,offset) \
13294 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
13295
13296 /* Under AIX, just build the 3 word function descriptor */
13297 case ABI_AIX:
13298 {
13299 rtx fn_reg = gen_reg_rtx (pmode);
13300 rtx toc_reg = gen_reg_rtx (pmode);
13301 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
13302 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
13303 emit_move_insn (MEM_DEREF (addr), fn_reg);
13304 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
13305 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
13306 }
13307 break;
13308
13309 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
13310 case ABI_DARWIN:
13311 case ABI_V4:
13312 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
13313 FALSE, VOIDmode, 4,
13314 addr, pmode,
13315 GEN_INT (rs6000_trampoline_size ()), SImode,
13316 fnaddr, pmode,
13317 ctx_reg, pmode);
13318 break;
13319 }
13320
13321 return;
13322 }
13323
13324 \f
13325 /* Table of valid machine attributes. */
13326
13327 const struct attribute_spec rs6000_attribute_table[] =
13328 {
13329 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
13330 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
13331 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
13332 { NULL, 0, 0, false, false, false, NULL }
13333 };
13334
13335 /* Handle a "longcall" or "shortcall" attribute; arguments as in
13336 struct attribute_spec.handler. */
13337
13338 static tree
13339 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
13340 tree *node;
13341 tree name;
13342 tree args ATTRIBUTE_UNUSED;
13343 int flags ATTRIBUTE_UNUSED;
13344 bool *no_add_attrs;
13345 {
13346 if (TREE_CODE (*node) != FUNCTION_TYPE
13347 && TREE_CODE (*node) != FIELD_DECL
13348 && TREE_CODE (*node) != TYPE_DECL)
13349 {
13350 warning ("`%s' attribute only applies to functions",
13351 IDENTIFIER_POINTER (name));
13352 *no_add_attrs = true;
13353 }
13354
13355 return NULL_TREE;
13356 }
13357
13358 /* Set longcall attributes on all functions declared when
13359 rs6000_default_long_calls is true. */
13360 static void
13361 rs6000_set_default_type_attributes (type)
13362 tree type;
13363 {
13364 if (rs6000_default_long_calls
13365 && (TREE_CODE (type) == FUNCTION_TYPE
13366 || TREE_CODE (type) == METHOD_TYPE))
13367 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
13368 NULL_TREE,
13369 TYPE_ATTRIBUTES (type));
13370 }
13371
13372 /* Return a reference suitable for calling a function with the
13373 longcall attribute. */
13374
13375 struct rtx_def *
13376 rs6000_longcall_ref (call_ref)
13377 rtx call_ref;
13378 {
13379 const char *call_name;
13380 tree node;
13381
13382 if (GET_CODE (call_ref) != SYMBOL_REF)
13383 return call_ref;
13384
13385 /* System V adds '.' to the internal name, so skip them. */
13386 call_name = XSTR (call_ref, 0);
13387 if (*call_name == '.')
13388 {
13389 while (*call_name == '.')
13390 call_name++;
13391
13392 node = get_identifier (call_name);
13393 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
13394 }
13395
13396 return force_reg (Pmode, call_ref);
13397 }
13398 \f
13399 #ifdef USING_ELFOS_H
13400
13401 /* A C statement or statements to switch to the appropriate section
13402 for output of RTX in mode MODE. You can assume that RTX is some
13403 kind of constant in RTL. The argument MODE is redundant except in
13404 the case of a `const_int' rtx. Select the section by calling
13405 `text_section' or one of the alternatives for other sections.
13406
13407 Do not define this macro if you put all constants in the read-only
13408 data section. */
13409
13410 static void
13411 rs6000_elf_select_rtx_section (mode, x, align)
13412 enum machine_mode mode;
13413 rtx x;
13414 unsigned HOST_WIDE_INT align;
13415 {
13416 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13417 toc_section ();
13418 else
13419 default_elf_select_rtx_section (mode, x, align);
13420 }
13421
13422 /* A C statement or statements to switch to the appropriate
13423 section for output of DECL. DECL is either a `VAR_DECL' node
13424 or a constant of some sort. RELOC indicates whether forming
13425 the initial value of DECL requires link-time relocations. */
13426
13427 static void
13428 rs6000_elf_select_section (decl, reloc, align)
13429 tree decl;
13430 int reloc;
13431 unsigned HOST_WIDE_INT align;
13432 {
13433 /* Pretend that we're always building for a shared library when
13434 ABI_AIX, because otherwise we end up with dynamic relocations
13435 in read-only sections. This happens for function pointers,
13436 references to vtables in typeinfo, and probably other cases. */
13437 default_elf_select_section_1 (decl, reloc, align,
13438 flag_pic || DEFAULT_ABI == ABI_AIX);
13439 }
13440
13441 /* A C statement to build up a unique section name, expressed as a
13442 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
13443 RELOC indicates whether the initial value of EXP requires
13444 link-time relocations. If you do not define this macro, GCC will use
13445 the symbol name prefixed by `.' as the section name. Note - this
13446 macro can now be called for uninitialized data items as well as
13447 initialized data and functions. */
13448
13449 static void
13450 rs6000_elf_unique_section (decl, reloc)
13451 tree decl;
13452 int reloc;
13453 {
13454 /* As above, pretend that we're always building for a shared library
13455 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
13456 default_unique_section_1 (decl, reloc,
13457 flag_pic || DEFAULT_ABI == ABI_AIX);
13458 }
13459 \f
13460 /* For a SYMBOL_REF, set generic flags and then perform some
13461 target-specific processing.
13462
13463 When the AIX ABI is requested on a non-AIX system, replace the
13464 function name with the real name (with a leading .) rather than the
13465 function descriptor name. This saves a lot of overriding code to
13466 read the prefixes. */
13467
13468 static void
13469 rs6000_elf_encode_section_info (decl, rtl, first)
13470 tree decl;
13471 rtx rtl;
13472 int first;
13473 {
13474 default_encode_section_info (decl, rtl, first);
13475
13476 if (first
13477 && TREE_CODE (decl) == FUNCTION_DECL
13478 && !TARGET_AIX
13479 && DEFAULT_ABI == ABI_AIX)
13480 {
13481 rtx sym_ref = XEXP (rtl, 0);
13482 size_t len = strlen (XSTR (sym_ref, 0));
13483 char *str = alloca (len + 2);
13484 str[0] = '.';
13485 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
13486 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
13487 }
13488 }
13489
13490 static bool
13491 rs6000_elf_in_small_data_p (decl)
13492 tree decl;
13493 {
13494 if (rs6000_sdata == SDATA_NONE)
13495 return false;
13496
13497 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
13498 {
13499 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
13500 if (strcmp (section, ".sdata") == 0
13501 || strcmp (section, ".sdata2") == 0
13502 || strcmp (section, ".sbss") == 0
13503 || strcmp (section, ".sbss2") == 0
13504 || strcmp (section, ".PPC.EMB.sdata0") == 0
13505 || strcmp (section, ".PPC.EMB.sbss0") == 0)
13506 return true;
13507 }
13508 else
13509 {
13510 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
13511
13512 if (size > 0
13513 && (unsigned HOST_WIDE_INT) size <= g_switch_value
13514 /* If it's not public, and we're not going to reference it there,
13515 there's no need to put it in the small data section. */
13516 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
13517 return true;
13518 }
13519
13520 return false;
13521 }
13522
13523 #endif /* USING_ELFOS_H */
13524
13525 \f
13526 /* Return a REG that occurs in ADDR with coefficient 1.
13527 ADDR can be effectively incremented by incrementing REG.
13528
13529 r0 is special and we must not select it as an address
13530 register by this routine since our caller will try to
13531 increment the returned register via an "la" instruction. */
13532
13533 struct rtx_def *
13534 find_addr_reg (addr)
13535 rtx addr;
13536 {
13537 while (GET_CODE (addr) == PLUS)
13538 {
13539 if (GET_CODE (XEXP (addr, 0)) == REG
13540 && REGNO (XEXP (addr, 0)) != 0)
13541 addr = XEXP (addr, 0);
13542 else if (GET_CODE (XEXP (addr, 1)) == REG
13543 && REGNO (XEXP (addr, 1)) != 0)
13544 addr = XEXP (addr, 1);
13545 else if (CONSTANT_P (XEXP (addr, 0)))
13546 addr = XEXP (addr, 1);
13547 else if (CONSTANT_P (XEXP (addr, 1)))
13548 addr = XEXP (addr, 0);
13549 else
13550 abort ();
13551 }
13552 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
13553 return addr;
13554 abort ();
13555 }
13556
13557 void
13558 rs6000_fatal_bad_address (op)
13559 rtx op;
13560 {
13561 fatal_insn ("bad address", op);
13562 }
13563
13564 #if TARGET_MACHO
13565
13566 #if 0
13567 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
13568 reference and a constant. */
13569
13570 int
13571 symbolic_operand (op)
13572 rtx op;
13573 {
13574 switch (GET_CODE (op))
13575 {
13576 case SYMBOL_REF:
13577 case LABEL_REF:
13578 return 1;
13579 case CONST:
13580 op = XEXP (op, 0);
13581 return (GET_CODE (op) == SYMBOL_REF ||
13582 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
13583 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
13584 && GET_CODE (XEXP (op, 1)) == CONST_INT);
13585 default:
13586 return 0;
13587 }
13588 }
13589 #endif
13590
13591 #ifdef RS6000_LONG_BRANCH
13592
13593 static tree stub_list = 0;
13594
13595 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
13596 procedure calls to the linked list. */
13597
13598 void
13599 add_compiler_stub (label_name, function_name, line_number)
13600 tree label_name;
13601 tree function_name;
13602 int line_number;
13603 {
13604 tree stub = build_tree_list (function_name, label_name);
13605 TREE_TYPE (stub) = build_int_2 (line_number, 0);
13606 TREE_CHAIN (stub) = stub_list;
13607 stub_list = stub;
13608 }
13609
13610 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
13611 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
13612 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
13613
13614 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
13615 handling procedure calls from the linked list and initializes the
13616 linked list. */
13617
13618 void
13619 output_compiler_stub ()
13620 {
13621 char tmp_buf[256];
13622 char label_buf[256];
13623 tree stub;
13624
13625 if (!flag_pic)
13626 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13627 {
13628 fprintf (asm_out_file,
13629 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
13630
13631 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13632 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13633 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
13634 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13635
13636 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
13637 strcpy (label_buf,
13638 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
13639 else
13640 {
13641 label_buf[0] = '_';
13642 strcpy (label_buf+1,
13643 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
13644 }
13645
13646 strcpy (tmp_buf, "lis r12,hi16(");
13647 strcat (tmp_buf, label_buf);
13648 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
13649 strcat (tmp_buf, label_buf);
13650 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
13651 output_asm_insn (tmp_buf, 0);
13652
13653 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13654 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13655 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
13656 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13657 }
13658
13659 stub_list = 0;
13660 }
13661
13662 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
13663 already there or not. */
13664
13665 int
13666 no_previous_def (function_name)
13667 tree function_name;
13668 {
13669 tree stub;
13670 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13671 if (function_name == STUB_FUNCTION_NAME (stub))
13672 return 0;
13673 return 1;
13674 }
13675
13676 /* GET_PREV_LABEL gets the label name from the previous definition of
13677 the function. */
13678
13679 tree
13680 get_prev_label (function_name)
13681 tree function_name;
13682 {
13683 tree stub;
13684 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13685 if (function_name == STUB_FUNCTION_NAME (stub))
13686 return STUB_LABEL_NAME (stub);
13687 return 0;
13688 }
13689
13690 /* INSN is either a function call or a millicode call. It may have an
13691 unconditional jump in its delay slot.
13692
13693 CALL_DEST is the routine we are calling. */
13694
13695 char *
13696 output_call (insn, call_dest, operand_number)
13697 rtx insn;
13698 rtx call_dest;
13699 int operand_number;
13700 {
13701 static char buf[256];
13702 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
13703 {
13704 tree labelname;
13705 tree funname = get_identifier (XSTR (call_dest, 0));
13706
13707 if (no_previous_def (funname))
13708 {
13709 int line_number = 0;
13710 rtx label_rtx = gen_label_rtx ();
13711 char *label_buf, temp_buf[256];
13712 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
13713 CODE_LABEL_NUMBER (label_rtx));
13714 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
13715 labelname = get_identifier (label_buf);
13716 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
13717 if (insn)
13718 line_number = NOTE_LINE_NUMBER (insn);
13719 add_compiler_stub (labelname, funname, line_number);
13720 }
13721 else
13722 labelname = get_prev_label (funname);
13723
13724 sprintf (buf, "jbsr %%z%d,%.246s",
13725 operand_number, IDENTIFIER_POINTER (labelname));
13726 return buf;
13727 }
13728 else
13729 {
13730 sprintf (buf, "bl %%z%d", operand_number);
13731 return buf;
13732 }
13733 }
13734
13735 #endif /* RS6000_LONG_BRANCH */
13736
13737 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
13738 do { \
13739 const char *const symbol_ = (SYMBOL); \
13740 char *buffer_ = (BUF); \
13741 if (symbol_[0] == '"') \
13742 { \
13743 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
13744 } \
13745 else if (name_needs_quotes(symbol_)) \
13746 { \
13747 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
13748 } \
13749 else \
13750 { \
13751 sprintf(buffer_, "L%d$%s", (N), symbol_); \
13752 } \
13753 } while (0)
13754
13755
13756 /* Generate PIC and indirect symbol stubs. */
13757
13758 void
13759 machopic_output_stub (file, symb, stub)
13760 FILE *file;
13761 const char *symb, *stub;
13762 {
13763 unsigned int length;
13764 char *symbol_name, *lazy_ptr_name;
13765 char *local_label_0;
13766 static int label = 0;
13767
13768 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
13769 symb = (*targetm.strip_name_encoding) (symb);
13770
13771 label += 1;
13772
13773 length = strlen (symb);
13774 symbol_name = alloca (length + 32);
13775 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
13776
13777 lazy_ptr_name = alloca (length + 32);
13778 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
13779
13780 local_label_0 = alloca (length + 32);
13781 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
13782
13783 if (flag_pic == 2)
13784 machopic_picsymbol_stub1_section ();
13785 else
13786 machopic_symbol_stub1_section ();
13787 fprintf (file, "\t.align 2\n");
13788
13789 fprintf (file, "%s:\n", stub);
13790 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13791
13792 if (flag_pic == 2)
13793 {
13794 fprintf (file, "\tmflr r0\n");
13795 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
13796 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
13797 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
13798 lazy_ptr_name, local_label_0);
13799 fprintf (file, "\tmtlr r0\n");
13800 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
13801 lazy_ptr_name, local_label_0);
13802 fprintf (file, "\tmtctr r12\n");
13803 fprintf (file, "\tbctr\n");
13804 }
13805 else
13806 {
13807 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
13808 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
13809 fprintf (file, "\tmtctr r12\n");
13810 fprintf (file, "\tbctr\n");
13811 }
13812
13813 machopic_lazy_symbol_ptr_section ();
13814 fprintf (file, "%s:\n", lazy_ptr_name);
13815 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13816 fprintf (file, "\t.long dyld_stub_binding_helper\n");
13817 }
13818
13819 /* Legitimize PIC addresses. If the address is already
13820 position-independent, we return ORIG. Newly generated
13821 position-independent addresses go into a reg. This is REG if non
13822 zero, otherwise we allocate register(s) as necessary. */
13823
13824 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13825
13826 rtx
13827 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
13828 rtx orig;
13829 enum machine_mode mode;
13830 rtx reg;
13831 {
13832 rtx base, offset;
13833
13834 if (reg == NULL && ! reload_in_progress && ! reload_completed)
13835 reg = gen_reg_rtx (Pmode);
13836
13837 if (GET_CODE (orig) == CONST)
13838 {
13839 if (GET_CODE (XEXP (orig, 0)) == PLUS
13840 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
13841 return orig;
13842
13843 if (GET_CODE (XEXP (orig, 0)) == PLUS)
13844 {
13845 base =
13846 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
13847 Pmode, reg);
13848 offset =
13849 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
13850 Pmode, reg);
13851 }
13852 else
13853 abort ();
13854
13855 if (GET_CODE (offset) == CONST_INT)
13856 {
13857 if (SMALL_INT (offset))
13858 return plus_constant (base, INTVAL (offset));
13859 else if (! reload_in_progress && ! reload_completed)
13860 offset = force_reg (Pmode, offset);
13861 else
13862 {
13863 rtx mem = force_const_mem (Pmode, orig);
13864 return machopic_legitimize_pic_address (mem, Pmode, reg);
13865 }
13866 }
13867 return gen_rtx (PLUS, Pmode, base, offset);
13868 }
13869
13870 /* Fall back on generic machopic code. */
13871 return machopic_legitimize_pic_address (orig, mode, reg);
13872 }
13873
13874 /* This is just a placeholder to make linking work without having to
13875 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13876 ever needed for Darwin (not too likely!) this would have to get a
13877 real definition. */
13878
13879 void
13880 toc_section ()
13881 {
13882 }
13883
13884 #endif /* TARGET_MACHO */
13885
13886 #if TARGET_ELF
13887 static unsigned int
13888 rs6000_elf_section_type_flags (decl, name, reloc)
13889 tree decl;
13890 const char *name;
13891 int reloc;
13892 {
13893 unsigned int flags
13894 = default_section_type_flags_1 (decl, name, reloc,
13895 flag_pic || DEFAULT_ABI == ABI_AIX);
13896
13897 if (TARGET_RELOCATABLE)
13898 flags |= SECTION_WRITE;
13899
13900 return flags;
13901 }
13902
13903 /* Record an element in the table of global constructors. SYMBOL is
13904 a SYMBOL_REF of the function to be called; PRIORITY is a number
13905 between 0 and MAX_INIT_PRIORITY.
13906
13907 This differs from default_named_section_asm_out_constructor in
13908 that we have special handling for -mrelocatable. */
13909
13910 static void
13911 rs6000_elf_asm_out_constructor (symbol, priority)
13912 rtx symbol;
13913 int priority;
13914 {
13915 const char *section = ".ctors";
13916 char buf[16];
13917
13918 if (priority != DEFAULT_INIT_PRIORITY)
13919 {
13920 sprintf (buf, ".ctors.%.5u",
13921 /* Invert the numbering so the linker puts us in the proper
13922 order; constructors are run from right to left, and the
13923 linker sorts in increasing order. */
13924 MAX_INIT_PRIORITY - priority);
13925 section = buf;
13926 }
13927
13928 named_section_flags (section, SECTION_WRITE);
13929 assemble_align (POINTER_SIZE);
13930
13931 if (TARGET_RELOCATABLE)
13932 {
13933 fputs ("\t.long (", asm_out_file);
13934 output_addr_const (asm_out_file, symbol);
13935 fputs (")@fixup\n", asm_out_file);
13936 }
13937 else
13938 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13939 }
13940
13941 static void
13942 rs6000_elf_asm_out_destructor (symbol, priority)
13943 rtx symbol;
13944 int priority;
13945 {
13946 const char *section = ".dtors";
13947 char buf[16];
13948
13949 if (priority != DEFAULT_INIT_PRIORITY)
13950 {
13951 sprintf (buf, ".dtors.%.5u",
13952 /* Invert the numbering so the linker puts us in the proper
13953 order; constructors are run from right to left, and the
13954 linker sorts in increasing order. */
13955 MAX_INIT_PRIORITY - priority);
13956 section = buf;
13957 }
13958
13959 named_section_flags (section, SECTION_WRITE);
13960 assemble_align (POINTER_SIZE);
13961
13962 if (TARGET_RELOCATABLE)
13963 {
13964 fputs ("\t.long (", asm_out_file);
13965 output_addr_const (asm_out_file, symbol);
13966 fputs (")@fixup\n", asm_out_file);
13967 }
13968 else
13969 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13970 }
13971
13972 void
13973 rs6000_elf_declare_function_name (file, name, decl)
13974 FILE *file;
13975 const char *name;
13976 tree decl;
13977 {
13978 if (TARGET_64BIT)
13979 {
13980 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
13981 ASM_OUTPUT_LABEL (file, name);
13982 fputs (DOUBLE_INT_ASM_OP, file);
13983 putc ('.', file);
13984 assemble_name (file, name);
13985 fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file);
13986 assemble_name (file, name);
13987 fputs (",24\n\t.type\t.", file);
13988 assemble_name (file, name);
13989 fputs (",@function\n", file);
13990 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
13991 {
13992 fputs ("\t.globl\t.", file);
13993 assemble_name (file, name);
13994 putc ('\n', file);
13995 }
13996 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
13997 putc ('.', file);
13998 ASM_OUTPUT_LABEL (file, name);
13999 return;
14000 }
14001
14002 if (TARGET_RELOCATABLE
14003 && (get_pool_size () != 0 || current_function_profile)
14004 && uses_TOC())
14005 {
14006 char buf[256];
14007
14008 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
14009
14010 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
14011 fprintf (file, "\t.long ");
14012 assemble_name (file, buf);
14013 putc ('-', file);
14014 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
14015 assemble_name (file, buf);
14016 putc ('\n', file);
14017 }
14018
14019 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
14020 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
14021
14022 if (DEFAULT_ABI == ABI_AIX)
14023 {
14024 const char *desc_name, *orig_name;
14025
14026 orig_name = (*targetm.strip_name_encoding) (name);
14027 desc_name = orig_name;
14028 while (*desc_name == '.')
14029 desc_name++;
14030
14031 if (TREE_PUBLIC (decl))
14032 fprintf (file, "\t.globl %s\n", desc_name);
14033
14034 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
14035 fprintf (file, "%s:\n", desc_name);
14036 fprintf (file, "\t.long %s\n", orig_name);
14037 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
14038 if (DEFAULT_ABI == ABI_AIX)
14039 fputs ("\t.long 0\n", file);
14040 fprintf (file, "\t.previous\n");
14041 }
14042 ASM_OUTPUT_LABEL (file, name);
14043 }
14044 #endif
14045
14046 #if TARGET_XCOFF
14047 static void
14048 rs6000_xcoff_asm_globalize_label (stream, name)
14049 FILE *stream;
14050 const char *name;
14051 {
14052 fputs (GLOBAL_ASM_OP, stream);
14053 RS6000_OUTPUT_BASENAME (stream, name);
14054 putc ('\n', stream);
14055 }
14056
14057 static void
14058 rs6000_xcoff_asm_named_section (name, flags)
14059 const char *name;
14060 unsigned int flags;
14061 {
14062 int smclass;
14063 static const char * const suffix[3] = { "PR", "RO", "RW" };
14064
14065 if (flags & SECTION_CODE)
14066 smclass = 0;
14067 else if (flags & SECTION_WRITE)
14068 smclass = 2;
14069 else
14070 smclass = 1;
14071
14072 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
14073 (flags & SECTION_CODE) ? "." : "",
14074 name, suffix[smclass], flags & SECTION_ENTSIZE);
14075 }
14076
14077 static void
14078 rs6000_xcoff_select_section (decl, reloc, align)
14079 tree decl;
14080 int reloc;
14081 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
14082 {
14083 if (decl_readonly_section_1 (decl, reloc, 1))
14084 {
14085 if (TREE_PUBLIC (decl))
14086 read_only_data_section ();
14087 else
14088 read_only_private_data_section ();
14089 }
14090 else
14091 {
14092 if (TREE_PUBLIC (decl))
14093 data_section ();
14094 else
14095 private_data_section ();
14096 }
14097 }
14098
14099 static void
14100 rs6000_xcoff_unique_section (decl, reloc)
14101 tree decl;
14102 int reloc ATTRIBUTE_UNUSED;
14103 {
14104 const char *name;
14105
14106 /* Use select_section for private and uninitialized data. */
14107 if (!TREE_PUBLIC (decl)
14108 || DECL_COMMON (decl)
14109 || DECL_INITIAL (decl) == NULL_TREE
14110 || DECL_INITIAL (decl) == error_mark_node
14111 || (flag_zero_initialized_in_bss
14112 && initializer_zerop (DECL_INITIAL (decl))))
14113 return;
14114
14115 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
14116 name = (*targetm.strip_name_encoding) (name);
14117 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
14118 }
14119
14120 /* Select section for constant in constant pool.
14121
14122 On RS/6000, all constants are in the private read-only data area.
14123 However, if this is being placed in the TOC it must be output as a
14124 toc entry. */
14125
14126 static void
14127 rs6000_xcoff_select_rtx_section (mode, x, align)
14128 enum machine_mode mode;
14129 rtx x;
14130 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
14131 {
14132 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
14133 toc_section ();
14134 else
14135 read_only_private_data_section ();
14136 }
14137
14138 /* Remove any trailing [DS] or the like from the symbol name. */
14139
14140 static const char *
14141 rs6000_xcoff_strip_name_encoding (name)
14142 const char *name;
14143 {
14144 size_t len;
14145 if (*name == '*')
14146 name++;
14147 len = strlen (name);
14148 if (name[len - 1] == ']')
14149 return ggc_alloc_string (name, len - 4);
14150 else
14151 return name;
14152 }
14153
14154 /* Section attributes. AIX is always PIC. */
14155
14156 static unsigned int
14157 rs6000_xcoff_section_type_flags (decl, name, reloc)
14158 tree decl;
14159 const char *name;
14160 int reloc;
14161 {
14162 unsigned int align;
14163 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
14164
14165 /* Align to at least UNIT size. */
14166 if (flags & SECTION_CODE)
14167 align = MIN_UNITS_PER_WORD;
14168 else
14169 /* Increase alignment of large objects if not already stricter. */
14170 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
14171 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
14172 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
14173
14174 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
14175 }
14176
14177 /* Output at end of assembler file.
14178 On the RS/6000, referencing data should automatically pull in text. */
14179
14180 static void
14181 rs6000_xcoff_file_end ()
14182 {
14183 text_section ();
14184 fputs ("_section_.text:\n", asm_out_file);
14185 data_section ();
14186 fputs (TARGET_32BIT
14187 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
14188 asm_out_file);
14189 }
14190 #endif /* TARGET_XCOFF */
14191
14192 #if TARGET_MACHO
14193 /* Cross-module name binding. Darwin does not support overriding
14194 functions at dynamic-link time. */
14195
14196 static bool
14197 rs6000_binds_local_p (decl)
14198 tree decl;
14199 {
14200 return default_binds_local_p_1 (decl, 0);
14201 }
14202 #endif
14203
14204 /* Compute a (partial) cost for rtx X. Return true if the complete
14205 cost has been computed, and false if subexpressions should be
14206 scanned. In either case, *TOTAL contains the cost result. */
14207
14208 static bool
14209 rs6000_rtx_costs (x, code, outer_code, total)
14210 rtx x;
14211 int code, outer_code ATTRIBUTE_UNUSED;
14212 int *total;
14213 {
14214 switch (code)
14215 {
14216 /* On the RS/6000, if it is valid in the insn, it is free.
14217 So this always returns 0. */
14218 case CONST_INT:
14219 case CONST:
14220 case LABEL_REF:
14221 case SYMBOL_REF:
14222 case CONST_DOUBLE:
14223 case HIGH:
14224 *total = 0;
14225 return true;
14226
14227 case PLUS:
14228 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
14229 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
14230 + 0x8000) >= 0x10000)
14231 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
14232 ? COSTS_N_INSNS (2)
14233 : COSTS_N_INSNS (1));
14234 return true;
14235
14236 case AND:
14237 case IOR:
14238 case XOR:
14239 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
14240 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
14241 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
14242 ? COSTS_N_INSNS (2)
14243 : COSTS_N_INSNS (1));
14244 return true;
14245
14246 case MULT:
14247 if (optimize_size)
14248 {
14249 *total = COSTS_N_INSNS (2);
14250 return true;
14251 }
14252 switch (rs6000_cpu)
14253 {
14254 case PROCESSOR_RIOS1:
14255 case PROCESSOR_PPC405:
14256 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14257 ? COSTS_N_INSNS (5)
14258 : (INTVAL (XEXP (x, 1)) >= -256
14259 && INTVAL (XEXP (x, 1)) <= 255)
14260 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
14261 return true;
14262
14263 case PROCESSOR_PPC440:
14264 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14265 ? COSTS_N_INSNS (3)
14266 : COSTS_N_INSNS (2));
14267 return true;
14268
14269 case PROCESSOR_RS64A:
14270 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14271 ? GET_MODE (XEXP (x, 1)) != DImode
14272 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
14273 : (INTVAL (XEXP (x, 1)) >= -256
14274 && INTVAL (XEXP (x, 1)) <= 255)
14275 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
14276 return true;
14277
14278 case PROCESSOR_RIOS2:
14279 case PROCESSOR_MPCCORE:
14280 case PROCESSOR_PPC604e:
14281 *total = COSTS_N_INSNS (2);
14282 return true;
14283
14284 case PROCESSOR_PPC601:
14285 *total = COSTS_N_INSNS (5);
14286 return true;
14287
14288 case PROCESSOR_PPC603:
14289 case PROCESSOR_PPC7400:
14290 case PROCESSOR_PPC750:
14291 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14292 ? COSTS_N_INSNS (5)
14293 : (INTVAL (XEXP (x, 1)) >= -256
14294 && INTVAL (XEXP (x, 1)) <= 255)
14295 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
14296 return true;
14297
14298 case PROCESSOR_PPC7450:
14299 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14300 ? COSTS_N_INSNS (4)
14301 : COSTS_N_INSNS (3));
14302 return true;
14303
14304 case PROCESSOR_PPC403:
14305 case PROCESSOR_PPC604:
14306 case PROCESSOR_PPC8540:
14307 *total = COSTS_N_INSNS (4);
14308 return true;
14309
14310 case PROCESSOR_PPC620:
14311 case PROCESSOR_PPC630:
14312 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14313 ? GET_MODE (XEXP (x, 1)) != DImode
14314 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
14315 : (INTVAL (XEXP (x, 1)) >= -256
14316 && INTVAL (XEXP (x, 1)) <= 255)
14317 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
14318 return true;
14319
14320 case PROCESSOR_POWER4:
14321 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14322 ? GET_MODE (XEXP (x, 1)) != DImode
14323 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
14324 : COSTS_N_INSNS (2));
14325 return true;
14326
14327 default:
14328 abort ();
14329 }
14330
14331 case DIV:
14332 case MOD:
14333 if (GET_CODE (XEXP (x, 1)) == CONST_INT
14334 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
14335 {
14336 *total = COSTS_N_INSNS (2);
14337 return true;
14338 }
14339 /* FALLTHRU */
14340
14341 case UDIV:
14342 case UMOD:
14343 switch (rs6000_cpu)
14344 {
14345 case PROCESSOR_RIOS1:
14346 *total = COSTS_N_INSNS (19);
14347 return true;
14348
14349 case PROCESSOR_RIOS2:
14350 *total = COSTS_N_INSNS (13);
14351 return true;
14352
14353 case PROCESSOR_RS64A:
14354 *total = (GET_MODE (XEXP (x, 1)) != DImode
14355 ? COSTS_N_INSNS (65)
14356 : COSTS_N_INSNS (67));
14357 return true;
14358
14359 case PROCESSOR_MPCCORE:
14360 *total = COSTS_N_INSNS (6);
14361 return true;
14362
14363 case PROCESSOR_PPC403:
14364 *total = COSTS_N_INSNS (33);
14365 return true;
14366
14367 case PROCESSOR_PPC405:
14368 *total = COSTS_N_INSNS (35);
14369 return true;
14370
14371 case PROCESSOR_PPC440:
14372 *total = COSTS_N_INSNS (34);
14373 return true;
14374
14375 case PROCESSOR_PPC601:
14376 *total = COSTS_N_INSNS (36);
14377 return true;
14378
14379 case PROCESSOR_PPC603:
14380 *total = COSTS_N_INSNS (37);
14381 return true;
14382
14383 case PROCESSOR_PPC604:
14384 case PROCESSOR_PPC604e:
14385 *total = COSTS_N_INSNS (20);
14386 return true;
14387
14388 case PROCESSOR_PPC620:
14389 case PROCESSOR_PPC630:
14390 *total = (GET_MODE (XEXP (x, 1)) != DImode
14391 ? COSTS_N_INSNS (21)
14392 : COSTS_N_INSNS (37));
14393 return true;
14394
14395 case PROCESSOR_PPC750:
14396 case PROCESSOR_PPC8540:
14397 case PROCESSOR_PPC7400:
14398 *total = COSTS_N_INSNS (19);
14399 return true;
14400
14401 case PROCESSOR_PPC7450:
14402 *total = COSTS_N_INSNS (23);
14403 return true;
14404
14405 case PROCESSOR_POWER4:
14406 *total = (GET_MODE (XEXP (x, 1)) != DImode
14407 ? COSTS_N_INSNS (18)
14408 : COSTS_N_INSNS (34));
14409 return true;
14410
14411 default:
14412 abort ();
14413 }
14414
14415 case FFS:
14416 *total = COSTS_N_INSNS (4);
14417 return true;
14418
14419 case MEM:
14420 /* MEM should be slightly more expensive than (plus (reg) (const)) */
14421 *total = 5;
14422 return true;
14423
14424 default:
14425 return false;
14426 }
14427 }
14428
14429 /* A C expression returning the cost of moving data from a register of class
14430 CLASS1 to one of CLASS2. */
14431
14432 int
14433 rs6000_register_move_cost (mode, from, to)
14434 enum machine_mode mode;
14435 enum reg_class from, to;
14436 {
14437 /* Moves from/to GENERAL_REGS. */
14438 if (reg_classes_intersect_p (to, GENERAL_REGS)
14439 || reg_classes_intersect_p (from, GENERAL_REGS))
14440 {
14441 if (! reg_classes_intersect_p (to, GENERAL_REGS))
14442 from = to;
14443
14444 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
14445 return (rs6000_memory_move_cost (mode, from, 0)
14446 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
14447
14448 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
14449 else if (from == CR_REGS)
14450 return 4;
14451
14452 else
14453 /* A move will cost one instruction per GPR moved. */
14454 return 2 * HARD_REGNO_NREGS (0, mode);
14455 }
14456
14457 /* Moving between two similar registers is just one instruction. */
14458 else if (reg_classes_intersect_p (to, from))
14459 return mode == TFmode ? 4 : 2;
14460
14461 /* Everything else has to go through GENERAL_REGS. */
14462 else
14463 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
14464 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
14465 }
14466
14467 /* A C expressions returning the cost of moving data of MODE from a register to
14468 or from memory. */
14469
14470 int
14471 rs6000_memory_move_cost (mode, class, in)
14472 enum machine_mode mode;
14473 enum reg_class class;
14474 int in ATTRIBUTE_UNUSED;
14475 {
14476 if (reg_classes_intersect_p (class, GENERAL_REGS))
14477 return 4 * HARD_REGNO_NREGS (0, mode);
14478 else if (reg_classes_intersect_p (class, FLOAT_REGS))
14479 return 4 * HARD_REGNO_NREGS (32, mode);
14480 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
14481 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
14482 else
14483 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
14484 }
14485
14486 /* Return an RTX representing where to find the function value of a
14487 function returning MODE. */
14488 static rtx
14489 rs6000_complex_function_value (enum machine_mode mode)
14490 {
14491 unsigned int regno;
14492 rtx r1, r2;
14493 enum machine_mode inner = GET_MODE_INNER (mode);
14494
14495 if (FLOAT_MODE_P (mode))
14496 regno = FP_ARG_RETURN;
14497 else
14498 {
14499 regno = GP_ARG_RETURN;
14500
14501 /* 32-bit is OK since it'll go in r3/r4. */
14502 if (TARGET_32BIT
14503 && GET_MODE_BITSIZE (inner) >= 32)
14504 return gen_rtx_REG (mode, regno);
14505 }
14506
14507 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
14508 const0_rtx);
14509 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
14510 GEN_INT (GET_MODE_UNIT_SIZE (inner)));
14511 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
14512 }
14513
14514 /* Define how to find the value returned by a function.
14515 VALTYPE is the data type of the value (as a tree).
14516 If the precise function being called is known, FUNC is its FUNCTION_DECL;
14517 otherwise, FUNC is 0.
14518
14519 On the SPE, both FPs and vectors are returned in r3.
14520
14521 On RS/6000 an integer value is in r3 and a floating-point value is in
14522 fp1, unless -msoft-float. */
14523
14524 rtx
14525 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
14526 {
14527 enum machine_mode mode;
14528 unsigned int regno;
14529
14530 if ((INTEGRAL_TYPE_P (valtype)
14531 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
14532 || POINTER_TYPE_P (valtype))
14533 mode = word_mode;
14534 else
14535 mode = TYPE_MODE (valtype);
14536
14537 if (TREE_CODE (valtype) == REAL_TYPE && TARGET_HARD_FLOAT && TARGET_FPRS)
14538 regno = FP_ARG_RETURN;
14539 else if (TREE_CODE (valtype) == COMPLEX_TYPE
14540 && TARGET_HARD_FLOAT
14541 && SPLIT_COMPLEX_ARGS)
14542 return rs6000_complex_function_value (mode);
14543 else if (TREE_CODE (valtype) == VECTOR_TYPE && TARGET_ALTIVEC)
14544 regno = ALTIVEC_ARG_RETURN;
14545 else
14546 regno = GP_ARG_RETURN;
14547
14548 return gen_rtx_REG (mode, regno);
14549 }
14550
14551 /* Define how to find the value returned by a library function
14552 assuming the value has mode MODE. */
14553 rtx
14554 rs6000_libcall_value (enum machine_mode mode)
14555 {
14556 unsigned int regno;
14557
14558 if (GET_MODE_CLASS (mode) == MODE_FLOAT
14559 && TARGET_HARD_FLOAT && TARGET_FPRS)
14560 regno = FP_ARG_RETURN;
14561 else if (ALTIVEC_VECTOR_MODE (mode))
14562 regno = ALTIVEC_ARG_RETURN;
14563 else if (COMPLEX_MODE_P (mode) && SPLIT_COMPLEX_ARGS)
14564 return rs6000_complex_function_value (mode);
14565 else
14566 regno = GP_ARG_RETURN;
14567
14568 return gen_rtx_REG (mode, regno);
14569 }
14570
14571 /* Return true if TYPE is of type __ev64_opaque__. */
14572
14573 static bool
14574 is_ev64_opaque_type (type)
14575 tree type;
14576 {
14577 return (TARGET_SPE
14578 && (type == opaque_V2SI_type_node
14579 || type == opaque_V2SF_type_node
14580 || type == opaque_p_V2SI_type_node
14581 || (TREE_CODE (type) == VECTOR_TYPE
14582 && TYPE_NAME (type)
14583 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
14584 && DECL_NAME (TYPE_NAME (type))
14585 && strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))),
14586 "__ev64_opaque__") == 0)));
14587 }
14588
14589 static rtx
14590 rs6000_dwarf_register_span (reg)
14591 rtx reg;
14592 {
14593 unsigned regno;
14594
14595 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
14596 return NULL_RTX;
14597
14598 regno = REGNO (reg);
14599
14600 /* The duality of the SPE register size wreaks all kinds of havoc.
14601 This is a way of distinguishing r0 in 32-bits from r0 in
14602 64-bits. */
14603 return
14604 gen_rtx_PARALLEL (VOIDmode,
14605 BYTES_BIG_ENDIAN
14606 ? gen_rtvec (2,
14607 gen_rtx_REG (SImode, regno + 1200),
14608 gen_rtx_REG (SImode, regno))
14609 : gen_rtvec (2,
14610 gen_rtx_REG (SImode, regno),
14611 gen_rtx_REG (SImode, regno + 1200)));
14612 }
14613
14614 #include "gt-rs6000.h"
This page took 0.717051 seconds and 5 git commands to generate.