]> gcc.gnu.org Git - gcc.git/blob - gcc/config/rs6000/rs6000.c
rs6000.c (rs6000_stack_info): Only require a frame when debugging on XCOFF targets.
[gcc.git] / gcc / config / rs6000 / rs6000.c
1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
5
6 This file is part of GNU CC.
7
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "obstack.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "except.h"
41 #include "function.h"
42 #include "output.h"
43 #include "basic-block.h"
44 #include "integrate.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "hashtab.h"
48 #include "tm_p.h"
49 #include "target.h"
50 #include "target-def.h"
51 #include "langhooks.h"
52 #include "reload.h"
53
54 #ifndef TARGET_NO_PROTOTYPE
55 #define TARGET_NO_PROTOTYPE 0
56 #endif
57
58 #define min(A,B) ((A) < (B) ? (A) : (B))
59 #define max(A,B) ((A) > (B) ? (A) : (B))
60
61 /* Target cpu type */
62
63 enum processor_type rs6000_cpu;
64 struct rs6000_cpu_select rs6000_select[3] =
65 {
66 /* switch name, tune arch */
67 { (const char *)0, "--with-cpu=", 1, 1 },
68 { (const char *)0, "-mcpu=", 1, 1 },
69 { (const char *)0, "-mtune=", 1, 0 },
70 };
71
72 /* Size of long double */
73 const char *rs6000_long_double_size_string;
74 int rs6000_long_double_type_size;
75
76 /* Whether -mabi=altivec has appeared */
77 int rs6000_altivec_abi;
78
79 /* Whether VRSAVE instructions should be generated. */
80 int rs6000_altivec_vrsave;
81
82 /* String from -mvrsave= option. */
83 const char *rs6000_altivec_vrsave_string;
84
85 /* Nonzero if we want SPE ABI extensions. */
86 int rs6000_spe_abi;
87
88 /* Whether isel instructions should be generated. */
89 int rs6000_isel;
90
91 /* Nonzero if we have FPRs. */
92 int rs6000_fprs = 1;
93
94 /* String from -misel=. */
95 const char *rs6000_isel_string;
96
97 /* Set to nonzero once AIX common-mode calls have been defined. */
98 static GTY(()) int common_mode_defined;
99
100 /* Save information from a "cmpxx" operation until the branch or scc is
101 emitted. */
102 rtx rs6000_compare_op0, rs6000_compare_op1;
103 int rs6000_compare_fp_p;
104
105 /* Label number of label created for -mrelocatable, to call to so we can
106 get the address of the GOT section */
107 int rs6000_pic_labelno;
108
109 #ifdef USING_ELFOS_H
110 /* Which abi to adhere to */
111 const char *rs6000_abi_name = RS6000_ABI_NAME;
112
113 /* Semantics of the small data area */
114 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
115
116 /* Which small data model to use */
117 const char *rs6000_sdata_name = (char *)0;
118
119 /* Counter for labels which are to be placed in .fixup. */
120 int fixuplabelno = 0;
121 #endif
122
123 /* ABI enumeration available for subtarget to use. */
124 enum rs6000_abi rs6000_current_abi;
125
126 /* ABI string from -mabi= option. */
127 const char *rs6000_abi_string;
128
129 /* Debug flags */
130 const char *rs6000_debug_name;
131 int rs6000_debug_stack; /* debug stack applications */
132 int rs6000_debug_arg; /* debug argument handling */
133
134 const char *rs6000_traceback_name;
135 static enum {
136 traceback_default = 0,
137 traceback_none,
138 traceback_part,
139 traceback_full
140 } rs6000_traceback;
141
142 /* Flag to say the TOC is initialized */
143 int toc_initialized;
144 char toc_label_name[10];
145
146 /* Alias set for saves and restores from the rs6000 stack. */
147 static int rs6000_sr_alias_set;
148
149 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
150 The only place that looks at this is rs6000_set_default_type_attributes;
151 everywhere else should rely on the presence or absence of a longcall
152 attribute on the function declaration. */
153 int rs6000_default_long_calls;
154 const char *rs6000_longcall_switch;
155
156 struct builtin_description
157 {
158 /* mask is not const because we're going to alter it below. This
159 nonsense will go away when we rewrite the -march infrastructure
160 to give us more target flag bits. */
161 unsigned int mask;
162 const enum insn_code icode;
163 const char *const name;
164 const enum rs6000_builtins code;
165 };
166
167 static bool rs6000_function_ok_for_sibcall PARAMS ((tree, tree));
168 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
169 static void validate_condition_mode
170 PARAMS ((enum rtx_code, enum machine_mode));
171 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
172 static void rs6000_maybe_dead PARAMS ((rtx));
173 static void rs6000_emit_stack_tie PARAMS ((void));
174 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
175 static rtx spe_synthesize_frame_save PARAMS ((rtx));
176 static bool spe_func_has_64bit_regs_p PARAMS ((void));
177 static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
178 unsigned int, int, int));
179 static rtx gen_frame_mem_offset PARAMS ((enum machine_mode, rtx, int));
180 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
181 static unsigned rs6000_hash_constant PARAMS ((rtx));
182 static unsigned toc_hash_function PARAMS ((const void *));
183 static int toc_hash_eq PARAMS ((const void *, const void *));
184 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
185 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
186 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
187 #ifdef HAVE_GAS_HIDDEN
188 static void rs6000_assemble_visibility PARAMS ((tree, int));
189 #endif
190 static int rs6000_ra_ever_killed PARAMS ((void));
191 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
192 const struct attribute_spec rs6000_attribute_table[];
193 static void rs6000_set_default_type_attributes PARAMS ((tree));
194 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
195 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
196 static void rs6000_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
197 HOST_WIDE_INT, tree));
198 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
199 HOST_WIDE_INT, HOST_WIDE_INT));
200 #if TARGET_ELF
201 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
202 int));
203 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
204 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
205 static void rs6000_elf_select_section PARAMS ((tree, int,
206 unsigned HOST_WIDE_INT));
207 static void rs6000_elf_unique_section PARAMS ((tree, int));
208 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
209 unsigned HOST_WIDE_INT));
210 static void rs6000_elf_encode_section_info PARAMS ((tree, int))
211 ATTRIBUTE_UNUSED;
212 static const char *rs6000_elf_strip_name_encoding PARAMS ((const char *));
213 static bool rs6000_elf_in_small_data_p PARAMS ((tree));
214 #endif
215 #if TARGET_XCOFF
216 static void rs6000_xcoff_asm_globalize_label PARAMS ((FILE *, const char *));
217 static void rs6000_xcoff_asm_named_section PARAMS ((const char *, unsigned int));
218 static void rs6000_xcoff_select_section PARAMS ((tree, int,
219 unsigned HOST_WIDE_INT));
220 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
221 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
222 unsigned HOST_WIDE_INT));
223 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
224 static unsigned int rs6000_xcoff_section_type_flags PARAMS ((tree, const char *, int));
225 static void rs6000_xcoff_encode_section_info PARAMS ((tree, int))
226 ATTRIBUTE_UNUSED;
227 #endif
228 #if TARGET_MACHO
229 static bool rs6000_binds_local_p PARAMS ((tree));
230 #endif
231 static int rs6000_use_dfa_pipeline_interface PARAMS ((void));
232 static int rs6000_variable_issue PARAMS ((FILE *, int, rtx, int));
233 static bool rs6000_rtx_costs PARAMS ((rtx, int, int, int *));
234 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
235 static int rs6000_adjust_priority PARAMS ((rtx, int));
236 static int rs6000_issue_rate PARAMS ((void));
237
238 static void rs6000_init_builtins PARAMS ((void));
239 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
240 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
241 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
242 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
243 static void altivec_init_builtins PARAMS ((void));
244 static void rs6000_common_init_builtins PARAMS ((void));
245
246 static void enable_mask_for_builtins PARAMS ((struct builtin_description *,
247 int, enum rs6000_builtins,
248 enum rs6000_builtins));
249 static void spe_init_builtins PARAMS ((void));
250 static rtx spe_expand_builtin PARAMS ((tree, rtx, bool *));
251 static rtx spe_expand_predicate_builtin PARAMS ((enum insn_code, tree, rtx));
252 static rtx spe_expand_evsel_builtin PARAMS ((enum insn_code, tree, rtx));
253 static int rs6000_emit_int_cmove PARAMS ((rtx, rtx, rtx, rtx));
254
255 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
256 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
257 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
258 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
259 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
260 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
261 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
262 static void rs6000_parse_abi_options PARAMS ((void));
263 static void rs6000_parse_vrsave_option PARAMS ((void));
264 static void rs6000_parse_isel_option PARAMS ((void));
265 static int first_altivec_reg_to_save PARAMS ((void));
266 static unsigned int compute_vrsave_mask PARAMS ((void));
267 static void is_altivec_return_reg PARAMS ((rtx, void *));
268 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
269 static int easy_vector_constant PARAMS ((rtx));
270 static bool is_ev64_opaque_type PARAMS ((tree));
271 static rtx rs6000_dwarf_register_span PARAMS ((rtx));
272
273 /* Hash table stuff for keeping track of TOC entries. */
274
275 struct toc_hash_struct GTY(())
276 {
277 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
278 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
279 rtx key;
280 enum machine_mode key_mode;
281 int labelno;
282 };
283
284 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
285 \f
286 /* Default register names. */
287 char rs6000_reg_names[][8] =
288 {
289 "0", "1", "2", "3", "4", "5", "6", "7",
290 "8", "9", "10", "11", "12", "13", "14", "15",
291 "16", "17", "18", "19", "20", "21", "22", "23",
292 "24", "25", "26", "27", "28", "29", "30", "31",
293 "0", "1", "2", "3", "4", "5", "6", "7",
294 "8", "9", "10", "11", "12", "13", "14", "15",
295 "16", "17", "18", "19", "20", "21", "22", "23",
296 "24", "25", "26", "27", "28", "29", "30", "31",
297 "mq", "lr", "ctr","ap",
298 "0", "1", "2", "3", "4", "5", "6", "7",
299 "xer",
300 /* AltiVec registers. */
301 "0", "1", "2", "3", "4", "5", "6", "7",
302 "8", "9", "10", "11", "12", "13", "14", "15",
303 "16", "17", "18", "19", "20", "21", "22", "23",
304 "24", "25", "26", "27", "28", "29", "30", "31",
305 "vrsave", "vscr",
306 /* SPE registers. */
307 "spe_acc", "spefscr"
308 };
309
310 #ifdef TARGET_REGNAMES
311 static const char alt_reg_names[][8] =
312 {
313 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
314 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
315 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
316 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
317 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
318 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
319 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
320 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
321 "mq", "lr", "ctr", "ap",
322 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
323 "xer",
324 /* AltiVec registers. */
325 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
326 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
327 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
328 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
329 "vrsave", "vscr",
330 /* SPE registers. */
331 "spe_acc", "spefscr"
332 };
333 #endif
334 \f
335 #ifndef MASK_STRICT_ALIGN
336 #define MASK_STRICT_ALIGN 0
337 #endif
338 #ifndef TARGET_PROFILE_KERNEL
339 #define TARGET_PROFILE_KERNEL 0
340 #endif
341
342 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
343 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
344 \f
345 /* Initialize the GCC target structure. */
346 #undef TARGET_ATTRIBUTE_TABLE
347 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
348 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
349 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
350
351 #undef TARGET_ASM_ALIGNED_DI_OP
352 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
353
354 /* Default unaligned ops are only provided for ELF. Find the ops needed
355 for non-ELF systems. */
356 #ifndef OBJECT_FORMAT_ELF
357 #if TARGET_XCOFF
358 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
359 64-bit targets. */
360 #undef TARGET_ASM_UNALIGNED_HI_OP
361 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
362 #undef TARGET_ASM_UNALIGNED_SI_OP
363 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
364 #undef TARGET_ASM_UNALIGNED_DI_OP
365 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
366 #else
367 /* For Darwin. */
368 #undef TARGET_ASM_UNALIGNED_HI_OP
369 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
370 #undef TARGET_ASM_UNALIGNED_SI_OP
371 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
372 #endif
373 #endif
374
375 /* This hook deals with fixups for relocatable code and DI-mode objects
376 in 64-bit code. */
377 #undef TARGET_ASM_INTEGER
378 #define TARGET_ASM_INTEGER rs6000_assemble_integer
379
380 #ifdef HAVE_GAS_HIDDEN
381 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
382 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
383 #endif
384
385 #undef TARGET_ASM_FUNCTION_PROLOGUE
386 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
387 #undef TARGET_ASM_FUNCTION_EPILOGUE
388 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
389
390 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
391 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
392 #undef TARGET_SCHED_VARIABLE_ISSUE
393 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
394
395 #undef TARGET_SCHED_ISSUE_RATE
396 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
397 #undef TARGET_SCHED_ADJUST_COST
398 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
399 #undef TARGET_SCHED_ADJUST_PRIORITY
400 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
401
402 #undef TARGET_INIT_BUILTINS
403 #define TARGET_INIT_BUILTINS rs6000_init_builtins
404
405 #undef TARGET_EXPAND_BUILTIN
406 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
407
408 #if TARGET_MACHO
409 #undef TARGET_BINDS_LOCAL_P
410 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
411 #endif
412
413 #undef TARGET_ASM_OUTPUT_MI_THUNK
414 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
415
416 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
417 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
418
419 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
420 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
421
422 #undef TARGET_RTX_COSTS
423 #define TARGET_RTX_COSTS rs6000_rtx_costs
424 #undef TARGET_ADDRESS_COST
425 #define TARGET_ADDRESS_COST hook_int_rtx_0
426
427 #undef TARGET_VECTOR_OPAQUE_P
428 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
429
430 #undef TARGET_DWARF_REGISTER_SPAN
431 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
432
433 struct gcc_target targetm = TARGET_INITIALIZER;
434 \f
435 /* Override command line options. Mostly we process the processor
436 type and sometimes adjust other TARGET_ options. */
437
438 void
439 rs6000_override_options (default_cpu)
440 const char *default_cpu;
441 {
442 size_t i, j;
443 struct rs6000_cpu_select *ptr;
444
445 /* Simplify the entries below by making a mask for any POWER
446 variant and any PowerPC variant. */
447
448 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
449 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
450 | MASK_PPC_GFXOPT | MASK_POWERPC64)
451 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
452
453 static struct ptt
454 {
455 const char *const name; /* Canonical processor name. */
456 const enum processor_type processor; /* Processor type enum value. */
457 const int target_enable; /* Target flags to enable. */
458 const int target_disable; /* Target flags to disable. */
459 } const processor_target_table[]
460 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
461 POWER_MASKS | POWERPC_MASKS},
462 {"power", PROCESSOR_POWER,
463 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
464 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
465 {"power2", PROCESSOR_POWER,
466 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
467 POWERPC_MASKS | MASK_NEW_MNEMONICS},
468 {"power3", PROCESSOR_PPC630,
469 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
470 POWER_MASKS},
471 {"power4", PROCESSOR_POWER4,
472 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
473 POWER_MASKS},
474 {"powerpc", PROCESSOR_POWERPC,
475 MASK_POWERPC | MASK_NEW_MNEMONICS,
476 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
477 {"powerpc64", PROCESSOR_POWERPC64,
478 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
479 POWER_MASKS | POWERPC_OPT_MASKS},
480 {"rios", PROCESSOR_RIOS1,
481 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
482 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
483 {"rios1", PROCESSOR_RIOS1,
484 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
485 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
486 {"rsc", PROCESSOR_PPC601,
487 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
488 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
489 {"rsc1", PROCESSOR_PPC601,
490 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
491 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
492 {"rios2", PROCESSOR_RIOS2,
493 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
494 POWERPC_MASKS | MASK_NEW_MNEMONICS},
495 {"rs64a", PROCESSOR_RS64A,
496 MASK_POWERPC | MASK_NEW_MNEMONICS,
497 POWER_MASKS | POWERPC_OPT_MASKS},
498 {"401", PROCESSOR_PPC403,
499 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
500 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
501 {"403", PROCESSOR_PPC403,
502 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
503 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
504 {"405", PROCESSOR_PPC405,
505 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
506 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
507 {"405f", PROCESSOR_PPC405,
508 MASK_POWERPC | MASK_NEW_MNEMONICS,
509 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
510 {"505", PROCESSOR_MPCCORE,
511 MASK_POWERPC | MASK_NEW_MNEMONICS,
512 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
513 {"601", PROCESSOR_PPC601,
514 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
515 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
516 {"602", PROCESSOR_PPC603,
517 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
518 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
519 {"603", PROCESSOR_PPC603,
520 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
521 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
522 {"603e", PROCESSOR_PPC603,
523 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
524 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
525 {"ec603e", PROCESSOR_PPC603,
526 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
527 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
528 {"604", PROCESSOR_PPC604,
529 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
530 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
531 {"604e", PROCESSOR_PPC604e,
532 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
533 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
534 {"620", PROCESSOR_PPC620,
535 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
536 POWER_MASKS},
537 {"630", PROCESSOR_PPC630,
538 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
539 POWER_MASKS},
540 {"740", PROCESSOR_PPC750,
541 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
542 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
543 {"750", PROCESSOR_PPC750,
544 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
545 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
546 {"7400", PROCESSOR_PPC7400,
547 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
548 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
549 {"7450", PROCESSOR_PPC7450,
550 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
551 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
552 {"8540", PROCESSOR_PPC8540,
553 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
554 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
555 {"801", PROCESSOR_MPCCORE,
556 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
557 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
558 {"821", PROCESSOR_MPCCORE,
559 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
560 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
561 {"823", PROCESSOR_MPCCORE,
562 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
563 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
564 {"860", PROCESSOR_MPCCORE,
565 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
566 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
567
568 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
569
570 /* Save current -mmultiple/-mno-multiple status. */
571 int multiple = TARGET_MULTIPLE;
572 /* Save current -mstring/-mno-string status. */
573 int string = TARGET_STRING;
574
575 /* Identify the processor type. */
576 rs6000_select[0].string = default_cpu;
577 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
578
579 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
580 {
581 ptr = &rs6000_select[i];
582 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
583 {
584 for (j = 0; j < ptt_size; j++)
585 if (! strcmp (ptr->string, processor_target_table[j].name))
586 {
587 if (ptr->set_tune_p)
588 rs6000_cpu = processor_target_table[j].processor;
589
590 if (ptr->set_arch_p)
591 {
592 target_flags |= processor_target_table[j].target_enable;
593 target_flags &= ~processor_target_table[j].target_disable;
594 }
595 break;
596 }
597
598 if (j == ptt_size)
599 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
600 }
601 }
602
603 if (rs6000_cpu == PROCESSOR_PPC8540)
604 rs6000_isel = 1;
605
606 /* If we are optimizing big endian systems for space, use the load/store
607 multiple and string instructions. */
608 if (BYTES_BIG_ENDIAN && optimize_size)
609 target_flags |= MASK_MULTIPLE | MASK_STRING;
610
611 /* If -mmultiple or -mno-multiple was explicitly used, don't
612 override with the processor default */
613 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
614 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
615
616 /* If -mstring or -mno-string was explicitly used, don't override
617 with the processor default. */
618 if ((target_flags_explicit & MASK_STRING) != 0)
619 target_flags = (target_flags & ~MASK_STRING) | string;
620
621 /* Don't allow -mmultiple or -mstring on little endian systems
622 unless the cpu is a 750, because the hardware doesn't support the
623 instructions used in little endian mode, and causes an alignment
624 trap. The 750 does not cause an alignment trap (except when the
625 target is unaligned). */
626
627 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
628 {
629 if (TARGET_MULTIPLE)
630 {
631 target_flags &= ~MASK_MULTIPLE;
632 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
633 warning ("-mmultiple is not supported on little endian systems");
634 }
635
636 if (TARGET_STRING)
637 {
638 target_flags &= ~MASK_STRING;
639 if ((target_flags_explicit & MASK_STRING) != 0)
640 warning ("-mstring is not supported on little endian systems");
641 }
642 }
643
644 /* Set debug flags */
645 if (rs6000_debug_name)
646 {
647 if (! strcmp (rs6000_debug_name, "all"))
648 rs6000_debug_stack = rs6000_debug_arg = 1;
649 else if (! strcmp (rs6000_debug_name, "stack"))
650 rs6000_debug_stack = 1;
651 else if (! strcmp (rs6000_debug_name, "arg"))
652 rs6000_debug_arg = 1;
653 else
654 error ("unknown -mdebug-%s switch", rs6000_debug_name);
655 }
656
657 if (rs6000_traceback_name)
658 {
659 if (! strncmp (rs6000_traceback_name, "full", 4))
660 rs6000_traceback = traceback_full;
661 else if (! strncmp (rs6000_traceback_name, "part", 4))
662 rs6000_traceback = traceback_part;
663 else if (! strncmp (rs6000_traceback_name, "no", 2))
664 rs6000_traceback = traceback_none;
665 else
666 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
667 rs6000_traceback_name);
668 }
669
670 /* Set size of long double */
671 rs6000_long_double_type_size = 64;
672 if (rs6000_long_double_size_string)
673 {
674 char *tail;
675 int size = strtol (rs6000_long_double_size_string, &tail, 10);
676 if (*tail != '\0' || (size != 64 && size != 128))
677 error ("Unknown switch -mlong-double-%s",
678 rs6000_long_double_size_string);
679 else
680 rs6000_long_double_type_size = size;
681 }
682
683 /* Handle -mabi= options. */
684 rs6000_parse_abi_options ();
685
686 /* Handle -mvrsave= option. */
687 rs6000_parse_vrsave_option ();
688
689 /* Handle -misel= option. */
690 rs6000_parse_isel_option ();
691
692 #ifdef SUBTARGET_OVERRIDE_OPTIONS
693 SUBTARGET_OVERRIDE_OPTIONS;
694 #endif
695 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
696 SUBSUBTARGET_OVERRIDE_OPTIONS;
697 #endif
698
699 /* The e500 does not have string instructions, and we set
700 MASK_STRING above when optimizing for size. */
701 if (rs6000_cpu == PROCESSOR_PPC8540 && (target_flags & MASK_STRING) != 0)
702 target_flags = target_flags & ~MASK_STRING;
703
704 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
705 using TARGET_OPTIONS to handle a toggle switch, but we're out of
706 bits in target_flags so TARGET_SWITCHES cannot be used.
707 Assumption here is that rs6000_longcall_switch points into the
708 text of the complete option, rather than being a copy, so we can
709 scan back for the presence or absence of the no- modifier. */
710 if (rs6000_longcall_switch)
711 {
712 const char *base = rs6000_longcall_switch;
713 while (base[-1] != 'm') base--;
714
715 if (*rs6000_longcall_switch != '\0')
716 error ("invalid option `%s'", base);
717 rs6000_default_long_calls = (base[0] != 'n');
718 }
719
720 #ifdef TARGET_REGNAMES
721 /* If the user desires alternate register names, copy in the
722 alternate names now. */
723 if (TARGET_REGNAMES)
724 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
725 #endif
726
727 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
728 If -maix-struct-return or -msvr4-struct-return was explicitly
729 used, don't override with the ABI default. */
730 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
731 {
732 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
733 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
734 else
735 target_flags |= MASK_AIX_STRUCT_RET;
736 }
737
738 if (TARGET_LONG_DOUBLE_128
739 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
740 real_format_for_mode[TFmode - QFmode] = &ibm_extended_format;
741
742 /* Allocate an alias set for register saves & restores from stack. */
743 rs6000_sr_alias_set = new_alias_set ();
744
745 if (TARGET_TOC)
746 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
747
748 /* We can only guarantee the availability of DI pseudo-ops when
749 assembling for 64-bit targets. */
750 if (!TARGET_64BIT)
751 {
752 targetm.asm_out.aligned_op.di = NULL;
753 targetm.asm_out.unaligned_op.di = NULL;
754 }
755
756 /* Set maximum branch target alignment at two instructions, eight bytes. */
757 align_jumps_max_skip = 8;
758 align_loops_max_skip = 8;
759
760 /* Arrange to save and restore machine status around nested functions. */
761 init_machine_status = rs6000_init_machine_status;
762 }
763
764 /* Handle -misel= option. */
765 static void
766 rs6000_parse_isel_option ()
767 {
768 if (rs6000_isel_string == 0)
769 return;
770 else if (! strcmp (rs6000_isel_string, "yes"))
771 rs6000_isel = 1;
772 else if (! strcmp (rs6000_isel_string, "no"))
773 rs6000_isel = 0;
774 else
775 error ("unknown -misel= option specified: '%s'",
776 rs6000_isel_string);
777 }
778
779 /* Handle -mvrsave= options. */
780 static void
781 rs6000_parse_vrsave_option ()
782 {
783 /* Generate VRSAVE instructions by default. */
784 if (rs6000_altivec_vrsave_string == 0
785 || ! strcmp (rs6000_altivec_vrsave_string, "yes"))
786 rs6000_altivec_vrsave = 1;
787 else if (! strcmp (rs6000_altivec_vrsave_string, "no"))
788 rs6000_altivec_vrsave = 0;
789 else
790 error ("unknown -mvrsave= option specified: '%s'",
791 rs6000_altivec_vrsave_string);
792 }
793
794 /* Handle -mabi= options. */
795 static void
796 rs6000_parse_abi_options ()
797 {
798 if (rs6000_abi_string == 0)
799 return;
800 else if (! strcmp (rs6000_abi_string, "altivec"))
801 rs6000_altivec_abi = 1;
802 else if (! strcmp (rs6000_abi_string, "no-altivec"))
803 rs6000_altivec_abi = 0;
804 else if (! strcmp (rs6000_abi_string, "spe"))
805 {
806 rs6000_spe_abi = 1;
807 if (!TARGET_SPE_ABI)
808 error ("not configured for ABI: '%s'", rs6000_abi_string);
809 }
810
811 else if (! strcmp (rs6000_abi_string, "no-spe"))
812 rs6000_spe_abi = 0;
813 else
814 error ("unknown ABI specified: '%s'", rs6000_abi_string);
815 }
816
817 void
818 optimization_options (level, size)
819 int level ATTRIBUTE_UNUSED;
820 int size ATTRIBUTE_UNUSED;
821 {
822 }
823 \f
824 /* Do anything needed at the start of the asm file. */
825
826 void
827 rs6000_file_start (file, default_cpu)
828 FILE *file;
829 const char *default_cpu;
830 {
831 size_t i;
832 char buffer[80];
833 const char *start = buffer;
834 struct rs6000_cpu_select *ptr;
835
836 if (flag_verbose_asm)
837 {
838 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
839 rs6000_select[0].string = default_cpu;
840
841 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
842 {
843 ptr = &rs6000_select[i];
844 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
845 {
846 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
847 start = "";
848 }
849 }
850
851 #ifdef USING_ELFOS_H
852 switch (rs6000_sdata)
853 {
854 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
855 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
856 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
857 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
858 }
859
860 if (rs6000_sdata && g_switch_value)
861 {
862 fprintf (file, "%s -G %d", start, g_switch_value);
863 start = "";
864 }
865 #endif
866
867 if (*start == '\0')
868 putc ('\n', file);
869 }
870 }
871 \f
872 /* Return nonzero if this function is known to have a null epilogue. */
873
874 int
875 direct_return ()
876 {
877 if (reload_completed)
878 {
879 rs6000_stack_t *info = rs6000_stack_info ();
880
881 if (info->first_gp_reg_save == 32
882 && info->first_fp_reg_save == 64
883 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
884 && ! info->lr_save_p
885 && ! info->cr_save_p
886 && info->vrsave_mask == 0
887 && ! info->push_p)
888 return 1;
889 }
890
891 return 0;
892 }
893
894 /* Returns 1 always. */
895
896 int
897 any_operand (op, mode)
898 rtx op ATTRIBUTE_UNUSED;
899 enum machine_mode mode ATTRIBUTE_UNUSED;
900 {
901 return 1;
902 }
903
904 /* Returns 1 if op is the count register. */
905 int
906 count_register_operand (op, mode)
907 rtx op;
908 enum machine_mode mode ATTRIBUTE_UNUSED;
909 {
910 if (GET_CODE (op) != REG)
911 return 0;
912
913 if (REGNO (op) == COUNT_REGISTER_REGNUM)
914 return 1;
915
916 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
917 return 1;
918
919 return 0;
920 }
921
922 /* Returns 1 if op is an altivec register. */
923 int
924 altivec_register_operand (op, mode)
925 rtx op;
926 enum machine_mode mode ATTRIBUTE_UNUSED;
927 {
928
929 return (register_operand (op, mode)
930 && (GET_CODE (op) != REG
931 || REGNO (op) > FIRST_PSEUDO_REGISTER
932 || ALTIVEC_REGNO_P (REGNO (op))));
933 }
934
935 int
936 xer_operand (op, mode)
937 rtx op;
938 enum machine_mode mode ATTRIBUTE_UNUSED;
939 {
940 if (GET_CODE (op) != REG)
941 return 0;
942
943 if (XER_REGNO_P (REGNO (op)))
944 return 1;
945
946 return 0;
947 }
948
949 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
950 by such constants completes more quickly. */
951
952 int
953 s8bit_cint_operand (op, mode)
954 rtx op;
955 enum machine_mode mode ATTRIBUTE_UNUSED;
956 {
957 return ( GET_CODE (op) == CONST_INT
958 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
959 }
960
961 /* Return 1 if OP is a constant that can fit in a D field. */
962
963 int
964 short_cint_operand (op, mode)
965 rtx op;
966 enum machine_mode mode ATTRIBUTE_UNUSED;
967 {
968 return (GET_CODE (op) == CONST_INT
969 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
970 }
971
972 /* Similar for an unsigned D field. */
973
974 int
975 u_short_cint_operand (op, mode)
976 rtx op;
977 enum machine_mode mode ATTRIBUTE_UNUSED;
978 {
979 return (GET_CODE (op) == CONST_INT
980 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
981 }
982
983 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
984
985 int
986 non_short_cint_operand (op, mode)
987 rtx op;
988 enum machine_mode mode ATTRIBUTE_UNUSED;
989 {
990 return (GET_CODE (op) == CONST_INT
991 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
992 }
993
994 /* Returns 1 if OP is a CONST_INT that is a positive value
995 and an exact power of 2. */
996
997 int
998 exact_log2_cint_operand (op, mode)
999 rtx op;
1000 enum machine_mode mode ATTRIBUTE_UNUSED;
1001 {
1002 return (GET_CODE (op) == CONST_INT
1003 && INTVAL (op) > 0
1004 && exact_log2 (INTVAL (op)) >= 0);
1005 }
1006
1007 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1008 ctr, or lr). */
1009
1010 int
1011 gpc_reg_operand (op, mode)
1012 rtx op;
1013 enum machine_mode mode;
1014 {
1015 return (register_operand (op, mode)
1016 && (GET_CODE (op) != REG
1017 || (REGNO (op) >= ARG_POINTER_REGNUM
1018 && !XER_REGNO_P (REGNO (op)))
1019 || REGNO (op) < MQ_REGNO));
1020 }
1021
1022 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1023 CR field. */
1024
1025 int
1026 cc_reg_operand (op, mode)
1027 rtx op;
1028 enum machine_mode mode;
1029 {
1030 return (register_operand (op, mode)
1031 && (GET_CODE (op) != REG
1032 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1033 || CR_REGNO_P (REGNO (op))));
1034 }
1035
1036 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1037 CR field that isn't CR0. */
1038
1039 int
1040 cc_reg_not_cr0_operand (op, mode)
1041 rtx op;
1042 enum machine_mode mode;
1043 {
1044 return (register_operand (op, mode)
1045 && (GET_CODE (op) != REG
1046 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1047 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1048 }
1049
1050 /* Returns 1 if OP is either a constant integer valid for a D-field or
1051 a non-special register. If a register, it must be in the proper
1052 mode unless MODE is VOIDmode. */
1053
1054 int
1055 reg_or_short_operand (op, mode)
1056 rtx op;
1057 enum machine_mode mode;
1058 {
1059 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1060 }
1061
1062 /* Similar, except check if the negation of the constant would be
1063 valid for a D-field. */
1064
1065 int
1066 reg_or_neg_short_operand (op, mode)
1067 rtx op;
1068 enum machine_mode mode;
1069 {
1070 if (GET_CODE (op) == CONST_INT)
1071 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1072
1073 return gpc_reg_operand (op, mode);
1074 }
1075
1076 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1077 a non-special register. If a register, it must be in the proper
1078 mode unless MODE is VOIDmode. */
1079
1080 int
1081 reg_or_aligned_short_operand (op, mode)
1082 rtx op;
1083 enum machine_mode mode;
1084 {
1085 if (gpc_reg_operand (op, mode))
1086 return 1;
1087 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1088 return 1;
1089
1090 return 0;
1091 }
1092
1093
1094 /* Return 1 if the operand is either a register or an integer whose
1095 high-order 16 bits are zero. */
1096
1097 int
1098 reg_or_u_short_operand (op, mode)
1099 rtx op;
1100 enum machine_mode mode;
1101 {
1102 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1103 }
1104
1105 /* Return 1 is the operand is either a non-special register or ANY
1106 constant integer. */
1107
1108 int
1109 reg_or_cint_operand (op, mode)
1110 rtx op;
1111 enum machine_mode mode;
1112 {
1113 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1114 }
1115
1116 /* Return 1 is the operand is either a non-special register or ANY
1117 32-bit signed constant integer. */
1118
1119 int
1120 reg_or_arith_cint_operand (op, mode)
1121 rtx op;
1122 enum machine_mode mode;
1123 {
1124 return (gpc_reg_operand (op, mode)
1125 || (GET_CODE (op) == CONST_INT
1126 #if HOST_BITS_PER_WIDE_INT != 32
1127 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1128 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1129 #endif
1130 ));
1131 }
1132
1133 /* Return 1 is the operand is either a non-special register or a 32-bit
1134 signed constant integer valid for 64-bit addition. */
1135
1136 int
1137 reg_or_add_cint64_operand (op, mode)
1138 rtx op;
1139 enum machine_mode mode;
1140 {
1141 return (gpc_reg_operand (op, mode)
1142 || (GET_CODE (op) == CONST_INT
1143 #if HOST_BITS_PER_WIDE_INT == 32
1144 && INTVAL (op) < 0x7fff8000
1145 #else
1146 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1147 < 0x100000000ll)
1148 #endif
1149 ));
1150 }
1151
1152 /* Return 1 is the operand is either a non-special register or a 32-bit
1153 signed constant integer valid for 64-bit subtraction. */
1154
1155 int
1156 reg_or_sub_cint64_operand (op, mode)
1157 rtx op;
1158 enum machine_mode mode;
1159 {
1160 return (gpc_reg_operand (op, mode)
1161 || (GET_CODE (op) == CONST_INT
1162 #if HOST_BITS_PER_WIDE_INT == 32
1163 && (- INTVAL (op)) < 0x7fff8000
1164 #else
1165 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1166 < 0x100000000ll)
1167 #endif
1168 ));
1169 }
1170
1171 /* Return 1 is the operand is either a non-special register or ANY
1172 32-bit unsigned constant integer. */
1173
1174 int
1175 reg_or_logical_cint_operand (op, mode)
1176 rtx op;
1177 enum machine_mode mode;
1178 {
1179 if (GET_CODE (op) == CONST_INT)
1180 {
1181 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1182 {
1183 if (GET_MODE_BITSIZE (mode) <= 32)
1184 abort ();
1185
1186 if (INTVAL (op) < 0)
1187 return 0;
1188 }
1189
1190 return ((INTVAL (op) & GET_MODE_MASK (mode)
1191 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1192 }
1193 else if (GET_CODE (op) == CONST_DOUBLE)
1194 {
1195 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1196 || mode != DImode)
1197 abort ();
1198
1199 return CONST_DOUBLE_HIGH (op) == 0;
1200 }
1201 else
1202 return gpc_reg_operand (op, mode);
1203 }
1204
1205 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1206
1207 int
1208 got_operand (op, mode)
1209 rtx op;
1210 enum machine_mode mode ATTRIBUTE_UNUSED;
1211 {
1212 return (GET_CODE (op) == SYMBOL_REF
1213 || GET_CODE (op) == CONST
1214 || GET_CODE (op) == LABEL_REF);
1215 }
1216
1217 /* Return 1 if the operand is a simple references that can be loaded via
1218 the GOT (labels involving addition aren't allowed). */
1219
1220 int
1221 got_no_const_operand (op, mode)
1222 rtx op;
1223 enum machine_mode mode ATTRIBUTE_UNUSED;
1224 {
1225 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1226 }
1227
1228 /* Return the number of instructions it takes to form a constant in an
1229 integer register. */
1230
1231 static int
1232 num_insns_constant_wide (value)
1233 HOST_WIDE_INT value;
1234 {
1235 /* signed constant loadable with {cal|addi} */
1236 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1237 return 1;
1238
1239 /* constant loadable with {cau|addis} */
1240 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1241 return 1;
1242
1243 #if HOST_BITS_PER_WIDE_INT == 64
1244 else if (TARGET_POWERPC64)
1245 {
1246 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1247 HOST_WIDE_INT high = value >> 31;
1248
1249 if (high == 0 || high == -1)
1250 return 2;
1251
1252 high >>= 1;
1253
1254 if (low == 0)
1255 return num_insns_constant_wide (high) + 1;
1256 else
1257 return (num_insns_constant_wide (high)
1258 + num_insns_constant_wide (low) + 1);
1259 }
1260 #endif
1261
1262 else
1263 return 2;
1264 }
1265
1266 int
1267 num_insns_constant (op, mode)
1268 rtx op;
1269 enum machine_mode mode;
1270 {
1271 if (GET_CODE (op) == CONST_INT)
1272 {
1273 #if HOST_BITS_PER_WIDE_INT == 64
1274 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1275 && mask64_operand (op, mode))
1276 return 2;
1277 else
1278 #endif
1279 return num_insns_constant_wide (INTVAL (op));
1280 }
1281
1282 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1283 {
1284 long l;
1285 REAL_VALUE_TYPE rv;
1286
1287 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1288 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1289 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1290 }
1291
1292 else if (GET_CODE (op) == CONST_DOUBLE)
1293 {
1294 HOST_WIDE_INT low;
1295 HOST_WIDE_INT high;
1296 long l[2];
1297 REAL_VALUE_TYPE rv;
1298 int endian = (WORDS_BIG_ENDIAN == 0);
1299
1300 if (mode == VOIDmode || mode == DImode)
1301 {
1302 high = CONST_DOUBLE_HIGH (op);
1303 low = CONST_DOUBLE_LOW (op);
1304 }
1305 else
1306 {
1307 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1308 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1309 high = l[endian];
1310 low = l[1 - endian];
1311 }
1312
1313 if (TARGET_32BIT)
1314 return (num_insns_constant_wide (low)
1315 + num_insns_constant_wide (high));
1316
1317 else
1318 {
1319 if (high == 0 && low >= 0)
1320 return num_insns_constant_wide (low);
1321
1322 else if (high == -1 && low < 0)
1323 return num_insns_constant_wide (low);
1324
1325 else if (mask64_operand (op, mode))
1326 return 2;
1327
1328 else if (low == 0)
1329 return num_insns_constant_wide (high) + 1;
1330
1331 else
1332 return (num_insns_constant_wide (high)
1333 + num_insns_constant_wide (low) + 1);
1334 }
1335 }
1336
1337 else
1338 abort ();
1339 }
1340
1341 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1342 register with one instruction per word. We only do this if we can
1343 safely read CONST_DOUBLE_{LOW,HIGH}. */
1344
1345 int
1346 easy_fp_constant (op, mode)
1347 rtx op;
1348 enum machine_mode mode;
1349 {
1350 if (GET_CODE (op) != CONST_DOUBLE
1351 || GET_MODE (op) != mode
1352 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1353 return 0;
1354
1355 /* Consider all constants with -msoft-float to be easy. */
1356 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1357 && mode != DImode)
1358 return 1;
1359
1360 /* If we are using V.4 style PIC, consider all constants to be hard. */
1361 if (flag_pic && DEFAULT_ABI == ABI_V4)
1362 return 0;
1363
1364 #ifdef TARGET_RELOCATABLE
1365 /* Similarly if we are using -mrelocatable, consider all constants
1366 to be hard. */
1367 if (TARGET_RELOCATABLE)
1368 return 0;
1369 #endif
1370
1371 if (mode == TFmode)
1372 {
1373 long k[4];
1374 REAL_VALUE_TYPE rv;
1375
1376 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1377 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1378
1379 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1380 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1381 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1382 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1383 }
1384
1385 else if (mode == DFmode)
1386 {
1387 long k[2];
1388 REAL_VALUE_TYPE rv;
1389
1390 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1391 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1392
1393 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1394 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1395 }
1396
1397 else if (mode == SFmode)
1398 {
1399 long l;
1400 REAL_VALUE_TYPE rv;
1401
1402 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1403 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1404
1405 return num_insns_constant_wide (l) == 1;
1406 }
1407
1408 else if (mode == DImode)
1409 return ((TARGET_POWERPC64
1410 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1411 || (num_insns_constant (op, DImode) <= 2));
1412
1413 else if (mode == SImode)
1414 return 1;
1415 else
1416 abort ();
1417 }
1418
1419 /* Return 1 if the operand is a CONST_INT and can be put into a
1420 register with one instruction. */
1421
1422 static int
1423 easy_vector_constant (op)
1424 rtx op;
1425 {
1426 rtx elt;
1427 int units, i;
1428
1429 if (GET_CODE (op) != CONST_VECTOR)
1430 return 0;
1431
1432 units = CONST_VECTOR_NUNITS (op);
1433
1434 /* We can generate 0 easily. Look for that. */
1435 for (i = 0; i < units; ++i)
1436 {
1437 elt = CONST_VECTOR_ELT (op, i);
1438
1439 /* We could probably simplify this by just checking for equality
1440 with CONST0_RTX for the current mode, but let's be safe
1441 instead. */
1442
1443 switch (GET_CODE (elt))
1444 {
1445 case CONST_INT:
1446 if (INTVAL (elt) != 0)
1447 return 0;
1448 break;
1449 case CONST_DOUBLE:
1450 if (CONST_DOUBLE_LOW (elt) != 0 || CONST_DOUBLE_HIGH (elt) != 0)
1451 return 0;
1452 break;
1453 default:
1454 return 0;
1455 }
1456 }
1457
1458 /* We could probably generate a few other constants trivially, but
1459 gcc doesn't generate them yet. FIXME later. */
1460 return 1;
1461 }
1462
1463 /* Return 1 if the operand is the constant 0. This works for scalars
1464 as well as vectors. */
1465 int
1466 zero_constant (op, mode)
1467 rtx op;
1468 enum machine_mode mode;
1469 {
1470 return op == CONST0_RTX (mode);
1471 }
1472
1473 /* Return 1 if the operand is 0.0. */
1474 int
1475 zero_fp_constant (op, mode)
1476 rtx op;
1477 enum machine_mode mode;
1478 {
1479 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1480 }
1481
1482 /* Return 1 if the operand is in volatile memory. Note that during
1483 the RTL generation phase, memory_operand does not return TRUE for
1484 volatile memory references. So this function allows us to
1485 recognize volatile references where its safe. */
1486
1487 int
1488 volatile_mem_operand (op, mode)
1489 rtx op;
1490 enum machine_mode mode;
1491 {
1492 if (GET_CODE (op) != MEM)
1493 return 0;
1494
1495 if (!MEM_VOLATILE_P (op))
1496 return 0;
1497
1498 if (mode != GET_MODE (op))
1499 return 0;
1500
1501 if (reload_completed)
1502 return memory_operand (op, mode);
1503
1504 if (reload_in_progress)
1505 return strict_memory_address_p (mode, XEXP (op, 0));
1506
1507 return memory_address_p (mode, XEXP (op, 0));
1508 }
1509
1510 /* Return 1 if the operand is an offsettable memory operand. */
1511
1512 int
1513 offsettable_mem_operand (op, mode)
1514 rtx op;
1515 enum machine_mode mode;
1516 {
1517 return ((GET_CODE (op) == MEM)
1518 && offsettable_address_p (reload_completed || reload_in_progress,
1519 mode, XEXP (op, 0)));
1520 }
1521
1522 /* Return 1 if the operand is either an easy FP constant (see above) or
1523 memory. */
1524
1525 int
1526 mem_or_easy_const_operand (op, mode)
1527 rtx op;
1528 enum machine_mode mode;
1529 {
1530 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1531 }
1532
1533 /* Return 1 if the operand is either a non-special register or an item
1534 that can be used as the operand of a `mode' add insn. */
1535
1536 int
1537 add_operand (op, mode)
1538 rtx op;
1539 enum machine_mode mode;
1540 {
1541 if (GET_CODE (op) == CONST_INT)
1542 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1543 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1544
1545 return gpc_reg_operand (op, mode);
1546 }
1547
1548 /* Return 1 if OP is a constant but not a valid add_operand. */
1549
1550 int
1551 non_add_cint_operand (op, mode)
1552 rtx op;
1553 enum machine_mode mode ATTRIBUTE_UNUSED;
1554 {
1555 return (GET_CODE (op) == CONST_INT
1556 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1557 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1558 }
1559
1560 /* Return 1 if the operand is a non-special register or a constant that
1561 can be used as the operand of an OR or XOR insn on the RS/6000. */
1562
1563 int
1564 logical_operand (op, mode)
1565 rtx op;
1566 enum machine_mode mode;
1567 {
1568 HOST_WIDE_INT opl, oph;
1569
1570 if (gpc_reg_operand (op, mode))
1571 return 1;
1572
1573 if (GET_CODE (op) == CONST_INT)
1574 {
1575 opl = INTVAL (op) & GET_MODE_MASK (mode);
1576
1577 #if HOST_BITS_PER_WIDE_INT <= 32
1578 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1579 return 0;
1580 #endif
1581 }
1582 else if (GET_CODE (op) == CONST_DOUBLE)
1583 {
1584 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1585 abort ();
1586
1587 opl = CONST_DOUBLE_LOW (op);
1588 oph = CONST_DOUBLE_HIGH (op);
1589 if (oph != 0)
1590 return 0;
1591 }
1592 else
1593 return 0;
1594
1595 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1596 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1597 }
1598
1599 /* Return 1 if C is a constant that is not a logical operand (as
1600 above), but could be split into one. */
1601
1602 int
1603 non_logical_cint_operand (op, mode)
1604 rtx op;
1605 enum machine_mode mode;
1606 {
1607 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1608 && ! logical_operand (op, mode)
1609 && reg_or_logical_cint_operand (op, mode));
1610 }
1611
1612 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1613 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1614 Reject all ones and all zeros, since these should have been optimized
1615 away and confuse the making of MB and ME. */
1616
1617 int
1618 mask_operand (op, mode)
1619 rtx op;
1620 enum machine_mode mode ATTRIBUTE_UNUSED;
1621 {
1622 HOST_WIDE_INT c, lsb;
1623
1624 if (GET_CODE (op) != CONST_INT)
1625 return 0;
1626
1627 c = INTVAL (op);
1628
1629 /* Fail in 64-bit mode if the mask wraps around because the upper
1630 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1631 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1632 return 0;
1633
1634 /* We don't change the number of transitions by inverting,
1635 so make sure we start with the LS bit zero. */
1636 if (c & 1)
1637 c = ~c;
1638
1639 /* Reject all zeros or all ones. */
1640 if (c == 0)
1641 return 0;
1642
1643 /* Find the first transition. */
1644 lsb = c & -c;
1645
1646 /* Invert to look for a second transition. */
1647 c = ~c;
1648
1649 /* Erase first transition. */
1650 c &= -lsb;
1651
1652 /* Find the second transition (if any). */
1653 lsb = c & -c;
1654
1655 /* Match if all the bits above are 1's (or c is zero). */
1656 return c == -lsb;
1657 }
1658
1659 /* Return 1 for the PowerPC64 rlwinm corner case. */
1660
1661 int
1662 mask_operand_wrap (op, mode)
1663 rtx op;
1664 enum machine_mode mode ATTRIBUTE_UNUSED;
1665 {
1666 HOST_WIDE_INT c, lsb;
1667
1668 if (GET_CODE (op) != CONST_INT)
1669 return 0;
1670
1671 c = INTVAL (op);
1672
1673 if ((c & 0x80000001) != 0x80000001)
1674 return 0;
1675
1676 c = ~c;
1677 if (c == 0)
1678 return 0;
1679
1680 lsb = c & -c;
1681 c = ~c;
1682 c &= -lsb;
1683 lsb = c & -c;
1684 return c == -lsb;
1685 }
1686
1687 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1688 It is if there are no more than one 1->0 or 0->1 transitions.
1689 Reject all zeros, since zero should have been optimized away and
1690 confuses the making of MB and ME. */
1691
1692 int
1693 mask64_operand (op, mode)
1694 rtx op;
1695 enum machine_mode mode ATTRIBUTE_UNUSED;
1696 {
1697 if (GET_CODE (op) == CONST_INT)
1698 {
1699 HOST_WIDE_INT c, lsb;
1700
1701 c = INTVAL (op);
1702
1703 /* Reject all zeros. */
1704 if (c == 0)
1705 return 0;
1706
1707 /* We don't change the number of transitions by inverting,
1708 so make sure we start with the LS bit zero. */
1709 if (c & 1)
1710 c = ~c;
1711
1712 /* Find the transition, and check that all bits above are 1's. */
1713 lsb = c & -c;
1714
1715 /* Match if all the bits above are 1's (or c is zero). */
1716 return c == -lsb;
1717 }
1718 return 0;
1719 }
1720
1721 /* Like mask64_operand, but allow up to three transitions. This
1722 predicate is used by insn patterns that generate two rldicl or
1723 rldicr machine insns. */
1724
1725 int
1726 mask64_2_operand (op, mode)
1727 rtx op;
1728 enum machine_mode mode ATTRIBUTE_UNUSED;
1729 {
1730 if (GET_CODE (op) == CONST_INT)
1731 {
1732 HOST_WIDE_INT c, lsb;
1733
1734 c = INTVAL (op);
1735
1736 /* Disallow all zeros. */
1737 if (c == 0)
1738 return 0;
1739
1740 /* We don't change the number of transitions by inverting,
1741 so make sure we start with the LS bit zero. */
1742 if (c & 1)
1743 c = ~c;
1744
1745 /* Find the first transition. */
1746 lsb = c & -c;
1747
1748 /* Invert to look for a second transition. */
1749 c = ~c;
1750
1751 /* Erase first transition. */
1752 c &= -lsb;
1753
1754 /* Find the second transition. */
1755 lsb = c & -c;
1756
1757 /* Invert to look for a third transition. */
1758 c = ~c;
1759
1760 /* Erase second transition. */
1761 c &= -lsb;
1762
1763 /* Find the third transition (if any). */
1764 lsb = c & -c;
1765
1766 /* Match if all the bits above are 1's (or c is zero). */
1767 return c == -lsb;
1768 }
1769 return 0;
1770 }
1771
1772 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1773 implement ANDing by the mask IN. */
1774 void
1775 build_mask64_2_operands (in, out)
1776 rtx in;
1777 rtx *out;
1778 {
1779 #if HOST_BITS_PER_WIDE_INT >= 64
1780 unsigned HOST_WIDE_INT c, lsb, m1, m2;
1781 int shift;
1782
1783 if (GET_CODE (in) != CONST_INT)
1784 abort ();
1785
1786 c = INTVAL (in);
1787 if (c & 1)
1788 {
1789 /* Assume c initially something like 0x00fff000000fffff. The idea
1790 is to rotate the word so that the middle ^^^^^^ group of zeros
1791 is at the MS end and can be cleared with an rldicl mask. We then
1792 rotate back and clear off the MS ^^ group of zeros with a
1793 second rldicl. */
1794 c = ~c; /* c == 0xff000ffffff00000 */
1795 lsb = c & -c; /* lsb == 0x0000000000100000 */
1796 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
1797 c = ~c; /* c == 0x00fff000000fffff */
1798 c &= -lsb; /* c == 0x00fff00000000000 */
1799 lsb = c & -c; /* lsb == 0x0000100000000000 */
1800 c = ~c; /* c == 0xff000fffffffffff */
1801 c &= -lsb; /* c == 0xff00000000000000 */
1802 shift = 0;
1803 while ((lsb >>= 1) != 0)
1804 shift++; /* shift == 44 on exit from loop */
1805 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
1806 m1 = ~m1; /* m1 == 0x000000ffffffffff */
1807 m2 = ~c; /* m2 == 0x00ffffffffffffff */
1808 }
1809 else
1810 {
1811 /* Assume c initially something like 0xff000f0000000000. The idea
1812 is to rotate the word so that the ^^^ middle group of zeros
1813 is at the LS end and can be cleared with an rldicr mask. We then
1814 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1815 a second rldicr. */
1816 lsb = c & -c; /* lsb == 0x0000010000000000 */
1817 m2 = -lsb; /* m2 == 0xffffff0000000000 */
1818 c = ~c; /* c == 0x00fff0ffffffffff */
1819 c &= -lsb; /* c == 0x00fff00000000000 */
1820 lsb = c & -c; /* lsb == 0x0000100000000000 */
1821 c = ~c; /* c == 0xff000fffffffffff */
1822 c &= -lsb; /* c == 0xff00000000000000 */
1823 shift = 0;
1824 while ((lsb >>= 1) != 0)
1825 shift++; /* shift == 44 on exit from loop */
1826 m1 = ~c; /* m1 == 0x00ffffffffffffff */
1827 m1 >>= shift; /* m1 == 0x0000000000000fff */
1828 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
1829 }
1830
1831 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1832 masks will be all 1's. We are guaranteed more than one transition. */
1833 out[0] = GEN_INT (64 - shift);
1834 out[1] = GEN_INT (m1);
1835 out[2] = GEN_INT (shift);
1836 out[3] = GEN_INT (m2);
1837 #else
1838 (void)in;
1839 (void)out;
1840 abort ();
1841 #endif
1842 }
1843
1844 /* Return 1 if the operand is either a non-special register or a constant
1845 that can be used as the operand of a PowerPC64 logical AND insn. */
1846
1847 int
1848 and64_operand (op, mode)
1849 rtx op;
1850 enum machine_mode mode;
1851 {
1852 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1853 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1854
1855 return (logical_operand (op, mode) || mask64_operand (op, mode));
1856 }
1857
1858 /* Like the above, but also match constants that can be implemented
1859 with two rldicl or rldicr insns. */
1860
1861 int
1862 and64_2_operand (op, mode)
1863 rtx op;
1864 enum machine_mode mode;
1865 {
1866 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1867 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
1868
1869 return logical_operand (op, mode) || mask64_2_operand (op, mode);
1870 }
1871
1872 /* Return 1 if the operand is either a non-special register or a
1873 constant that can be used as the operand of an RS/6000 logical AND insn. */
1874
1875 int
1876 and_operand (op, mode)
1877 rtx op;
1878 enum machine_mode mode;
1879 {
1880 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1881 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1882
1883 return (logical_operand (op, mode) || mask_operand (op, mode));
1884 }
1885
1886 /* Return 1 if the operand is a general register or memory operand. */
1887
1888 int
1889 reg_or_mem_operand (op, mode)
1890 rtx op;
1891 enum machine_mode mode;
1892 {
1893 return (gpc_reg_operand (op, mode)
1894 || memory_operand (op, mode)
1895 || volatile_mem_operand (op, mode));
1896 }
1897
1898 /* Return 1 if the operand is a general register or memory operand without
1899 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1900 instruction. */
1901
1902 int
1903 lwa_operand (op, mode)
1904 rtx op;
1905 enum machine_mode mode;
1906 {
1907 rtx inner = op;
1908
1909 if (reload_completed && GET_CODE (inner) == SUBREG)
1910 inner = SUBREG_REG (inner);
1911
1912 return gpc_reg_operand (inner, mode)
1913 || (memory_operand (inner, mode)
1914 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1915 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1916 && (GET_CODE (XEXP (inner, 0)) != PLUS
1917 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1918 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1919 }
1920
1921 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
1922
1923 int
1924 symbol_ref_operand (op, mode)
1925 rtx op;
1926 enum machine_mode mode;
1927 {
1928 if (mode != VOIDmode && GET_MODE (op) != mode)
1929 return 0;
1930
1931 return (GET_CODE (op) == SYMBOL_REF);
1932 }
1933
1934 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1935 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
1936
1937 int
1938 call_operand (op, mode)
1939 rtx op;
1940 enum machine_mode mode;
1941 {
1942 if (mode != VOIDmode && GET_MODE (op) != mode)
1943 return 0;
1944
1945 return (GET_CODE (op) == SYMBOL_REF
1946 || (GET_CODE (op) == REG
1947 && (REGNO (op) == LINK_REGISTER_REGNUM
1948 || REGNO (op) == COUNT_REGISTER_REGNUM
1949 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1950 }
1951
1952 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1953 this file and the function is not weakly defined. */
1954
1955 int
1956 current_file_function_operand (op, mode)
1957 rtx op;
1958 enum machine_mode mode ATTRIBUTE_UNUSED;
1959 {
1960 return (GET_CODE (op) == SYMBOL_REF
1961 && (SYMBOL_REF_FLAG (op)
1962 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1963 && ! DECL_WEAK (current_function_decl))));
1964 }
1965
1966 /* Return 1 if this operand is a valid input for a move insn. */
1967
1968 int
1969 input_operand (op, mode)
1970 rtx op;
1971 enum machine_mode mode;
1972 {
1973 /* Memory is always valid. */
1974 if (memory_operand (op, mode))
1975 return 1;
1976
1977 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1978 if (GET_CODE (op) == CONSTANT_P_RTX)
1979 return 1;
1980
1981 /* For floating-point, easy constants are valid. */
1982 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1983 && CONSTANT_P (op)
1984 && easy_fp_constant (op, mode))
1985 return 1;
1986
1987 /* Allow any integer constant. */
1988 if (GET_MODE_CLASS (mode) == MODE_INT
1989 && (GET_CODE (op) == CONST_INT
1990 || GET_CODE (op) == CONST_DOUBLE))
1991 return 1;
1992
1993 /* For floating-point or multi-word mode, the only remaining valid type
1994 is a register. */
1995 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1996 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1997 return register_operand (op, mode);
1998
1999 /* The only cases left are integral modes one word or smaller (we
2000 do not get called for MODE_CC values). These can be in any
2001 register. */
2002 if (register_operand (op, mode))
2003 return 1;
2004
2005 /* A SYMBOL_REF referring to the TOC is valid. */
2006 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
2007 return 1;
2008
2009 /* A constant pool expression (relative to the TOC) is valid */
2010 if (TOC_RELATIVE_EXPR_P (op))
2011 return 1;
2012
2013 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2014 to be valid. */
2015 if (DEFAULT_ABI == ABI_V4
2016 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2017 && small_data_operand (op, Pmode))
2018 return 1;
2019
2020 return 0;
2021 }
2022
2023 /* Return 1 for an operand in small memory on V.4/eabi. */
2024
2025 int
2026 small_data_operand (op, mode)
2027 rtx op ATTRIBUTE_UNUSED;
2028 enum machine_mode mode ATTRIBUTE_UNUSED;
2029 {
2030 #if TARGET_ELF
2031 rtx sym_ref;
2032
2033 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2034 return 0;
2035
2036 if (DEFAULT_ABI != ABI_V4)
2037 return 0;
2038
2039 if (GET_CODE (op) == SYMBOL_REF)
2040 sym_ref = op;
2041
2042 else if (GET_CODE (op) != CONST
2043 || GET_CODE (XEXP (op, 0)) != PLUS
2044 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2045 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2046 return 0;
2047
2048 else
2049 {
2050 rtx sum = XEXP (op, 0);
2051 HOST_WIDE_INT summand;
2052
2053 /* We have to be careful here, because it is the referenced address
2054 that must be 32k from _SDA_BASE_, not just the symbol. */
2055 summand = INTVAL (XEXP (sum, 1));
2056 if (summand < 0 || summand > g_switch_value)
2057 return 0;
2058
2059 sym_ref = XEXP (sum, 0);
2060 }
2061
2062 if (*XSTR (sym_ref, 0) != '@')
2063 return 0;
2064
2065 return 1;
2066
2067 #else
2068 return 0;
2069 #endif
2070 }
2071 \f
2072 static int
2073 constant_pool_expr_1 (op, have_sym, have_toc)
2074 rtx op;
2075 int *have_sym;
2076 int *have_toc;
2077 {
2078 switch (GET_CODE(op))
2079 {
2080 case SYMBOL_REF:
2081 if (CONSTANT_POOL_ADDRESS_P (op))
2082 {
2083 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2084 {
2085 *have_sym = 1;
2086 return 1;
2087 }
2088 else
2089 return 0;
2090 }
2091 else if (! strcmp (XSTR (op, 0), toc_label_name))
2092 {
2093 *have_toc = 1;
2094 return 1;
2095 }
2096 else
2097 return 0;
2098 case PLUS:
2099 case MINUS:
2100 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2101 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2102 case CONST:
2103 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2104 case CONST_INT:
2105 return 1;
2106 default:
2107 return 0;
2108 }
2109 }
2110
2111 int
2112 constant_pool_expr_p (op)
2113 rtx op;
2114 {
2115 int have_sym = 0;
2116 int have_toc = 0;
2117 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2118 }
2119
2120 int
2121 toc_relative_expr_p (op)
2122 rtx op;
2123 {
2124 int have_sym = 0;
2125 int have_toc = 0;
2126 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2127 }
2128
2129 /* Try machine-dependent ways of modifying an illegitimate address
2130 to be legitimate. If we find one, return the new, valid address.
2131 This is used from only one place: `memory_address' in explow.c.
2132
2133 OLDX is the address as it was before break_out_memory_refs was
2134 called. In some cases it is useful to look at this to decide what
2135 needs to be done.
2136
2137 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2138
2139 It is always safe for this function to do nothing. It exists to
2140 recognize opportunities to optimize the output.
2141
2142 On RS/6000, first check for the sum of a register with a constant
2143 integer that is out of range. If so, generate code to add the
2144 constant with the low-order 16 bits masked to the register and force
2145 this result into another register (this can be done with `cau').
2146 Then generate an address of REG+(CONST&0xffff), allowing for the
2147 possibility of bit 16 being a one.
2148
2149 Then check for the sum of a register and something not constant, try to
2150 load the other things into a register and return the sum. */
2151 rtx
2152 rs6000_legitimize_address (x, oldx, mode)
2153 rtx x;
2154 rtx oldx ATTRIBUTE_UNUSED;
2155 enum machine_mode mode;
2156 {
2157 if (GET_CODE (x) == PLUS
2158 && GET_CODE (XEXP (x, 0)) == REG
2159 && GET_CODE (XEXP (x, 1)) == CONST_INT
2160 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2161 {
2162 HOST_WIDE_INT high_int, low_int;
2163 rtx sum;
2164 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2165 high_int = INTVAL (XEXP (x, 1)) - low_int;
2166 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2167 GEN_INT (high_int)), 0);
2168 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2169 }
2170 else if (GET_CODE (x) == PLUS
2171 && GET_CODE (XEXP (x, 0)) == REG
2172 && GET_CODE (XEXP (x, 1)) != CONST_INT
2173 && GET_MODE_NUNITS (mode) == 1
2174 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2175 || TARGET_POWERPC64
2176 || (mode != DFmode && mode != TFmode))
2177 && (TARGET_POWERPC64 || mode != DImode)
2178 && mode != TImode)
2179 {
2180 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2181 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2182 }
2183 else if (ALTIVEC_VECTOR_MODE (mode))
2184 {
2185 rtx reg;
2186
2187 /* Make sure both operands are registers. */
2188 if (GET_CODE (x) == PLUS)
2189 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2190 force_reg (Pmode, XEXP (x, 1)));
2191
2192 reg = force_reg (Pmode, x);
2193 return reg;
2194 }
2195 else if (SPE_VECTOR_MODE (mode))
2196 {
2197 /* We accept [reg + reg] and [reg + OFFSET]. */
2198
2199 if (GET_CODE (x) == PLUS)
2200 {
2201 rtx op1 = XEXP (x, 0);
2202 rtx op2 = XEXP (x, 1);
2203
2204 op1 = force_reg (Pmode, op1);
2205
2206 if (GET_CODE (op2) != REG
2207 && (GET_CODE (op2) != CONST_INT
2208 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2209 op2 = force_reg (Pmode, op2);
2210
2211 return gen_rtx_PLUS (Pmode, op1, op2);
2212 }
2213
2214 return force_reg (Pmode, x);
2215 }
2216 else if (TARGET_ELF
2217 && TARGET_32BIT
2218 && TARGET_NO_TOC
2219 && ! flag_pic
2220 && GET_CODE (x) != CONST_INT
2221 && GET_CODE (x) != CONST_DOUBLE
2222 && CONSTANT_P (x)
2223 && GET_MODE_NUNITS (mode) == 1
2224 && (GET_MODE_BITSIZE (mode) <= 32
2225 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2226 {
2227 rtx reg = gen_reg_rtx (Pmode);
2228 emit_insn (gen_elf_high (reg, (x)));
2229 return gen_rtx_LO_SUM (Pmode, reg, (x));
2230 }
2231 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2232 && ! flag_pic
2233 #if TARGET_MACHO
2234 && ! MACHO_DYNAMIC_NO_PIC_P
2235 #endif
2236 && GET_CODE (x) != CONST_INT
2237 && GET_CODE (x) != CONST_DOUBLE
2238 && CONSTANT_P (x)
2239 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2240 && mode != DImode
2241 && mode != TImode)
2242 {
2243 rtx reg = gen_reg_rtx (Pmode);
2244 emit_insn (gen_macho_high (reg, (x)));
2245 return gen_rtx_LO_SUM (Pmode, reg, (x));
2246 }
2247 else if (TARGET_TOC
2248 && CONSTANT_POOL_EXPR_P (x)
2249 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2250 {
2251 return create_TOC_reference (x);
2252 }
2253 else
2254 return NULL_RTX;
2255 }
2256
2257 /* The convention appears to be to define this wherever it is used.
2258 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2259 is now used here. */
2260 #ifndef REG_MODE_OK_FOR_BASE_P
2261 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2262 #endif
2263
2264 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2265 replace the input X, or the original X if no replacement is called for.
2266 The output parameter *WIN is 1 if the calling macro should goto WIN,
2267 0 if it should not.
2268
2269 For RS/6000, we wish to handle large displacements off a base
2270 register by splitting the addend across an addiu/addis and the mem insn.
2271 This cuts number of extra insns needed from 3 to 1.
2272
2273 On Darwin, we use this to generate code for floating point constants.
2274 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2275 The Darwin code is inside #if TARGET_MACHO because only then is
2276 machopic_function_base_name() defined. */
2277 rtx
2278 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
2279 rtx x;
2280 enum machine_mode mode;
2281 int opnum;
2282 int type;
2283 int ind_levels ATTRIBUTE_UNUSED;
2284 int *win;
2285 {
2286 /* We must recognize output that we have already generated ourselves. */
2287 if (GET_CODE (x) == PLUS
2288 && GET_CODE (XEXP (x, 0)) == PLUS
2289 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2290 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2291 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2292 {
2293 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2294 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2295 opnum, (enum reload_type)type);
2296 *win = 1;
2297 return x;
2298 }
2299
2300 #if TARGET_MACHO
2301 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2302 && GET_CODE (x) == LO_SUM
2303 && GET_CODE (XEXP (x, 0)) == PLUS
2304 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2305 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2306 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2307 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2308 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2309 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2310 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2311 {
2312 /* Result of previous invocation of this function on Darwin
2313 floating point constant. */
2314 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2315 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2316 opnum, (enum reload_type)type);
2317 *win = 1;
2318 return x;
2319 }
2320 #endif
2321 if (GET_CODE (x) == PLUS
2322 && GET_CODE (XEXP (x, 0)) == REG
2323 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2324 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2325 && GET_CODE (XEXP (x, 1)) == CONST_INT
2326 && !SPE_VECTOR_MODE (mode)
2327 && !ALTIVEC_VECTOR_MODE (mode))
2328 {
2329 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2330 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2331 HOST_WIDE_INT high
2332 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2333
2334 /* Check for 32-bit overflow. */
2335 if (high + low != val)
2336 {
2337 *win = 0;
2338 return x;
2339 }
2340
2341 /* Reload the high part into a base reg; leave the low part
2342 in the mem directly. */
2343
2344 x = gen_rtx_PLUS (GET_MODE (x),
2345 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2346 GEN_INT (high)),
2347 GEN_INT (low));
2348
2349 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2350 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2351 opnum, (enum reload_type)type);
2352 *win = 1;
2353 return x;
2354 }
2355 #if TARGET_MACHO
2356 if (GET_CODE (x) == SYMBOL_REF
2357 && DEFAULT_ABI == ABI_DARWIN
2358 && !ALTIVEC_VECTOR_MODE (mode)
2359 && flag_pic)
2360 {
2361 /* Darwin load of floating point constant. */
2362 rtx offset = gen_rtx (CONST, Pmode,
2363 gen_rtx (MINUS, Pmode, x,
2364 gen_rtx (SYMBOL_REF, Pmode,
2365 machopic_function_base_name ())));
2366 x = gen_rtx (LO_SUM, GET_MODE (x),
2367 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2368 gen_rtx (HIGH, Pmode, offset)), offset);
2369 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2370 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2371 opnum, (enum reload_type)type);
2372 *win = 1;
2373 return x;
2374 }
2375 if (GET_CODE (x) == SYMBOL_REF
2376 && DEFAULT_ABI == ABI_DARWIN
2377 && !ALTIVEC_VECTOR_MODE (mode)
2378 && MACHO_DYNAMIC_NO_PIC_P)
2379 {
2380 /* Darwin load of floating point constant. */
2381 x = gen_rtx (LO_SUM, GET_MODE (x),
2382 gen_rtx (HIGH, Pmode, x), x);
2383 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2384 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2385 opnum, (enum reload_type)type);
2386 *win = 1;
2387 return x;
2388 }
2389 #endif
2390 if (TARGET_TOC
2391 && CONSTANT_POOL_EXPR_P (x)
2392 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2393 {
2394 (x) = create_TOC_reference (x);
2395 *win = 1;
2396 return x;
2397 }
2398 *win = 0;
2399 return x;
2400 }
2401
2402 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2403 that is a valid memory address for an instruction.
2404 The MODE argument is the machine mode for the MEM expression
2405 that wants to use this address.
2406
2407 On the RS/6000, there are four valid address: a SYMBOL_REF that
2408 refers to a constant pool entry of an address (or the sum of it
2409 plus a constant), a short (16-bit signed) constant plus a register,
2410 the sum of two registers, or a register indirect, possibly with an
2411 auto-increment. For DFmode and DImode with a constant plus register,
2412 we must ensure that both words are addressable or PowerPC64 with offset
2413 word aligned.
2414
2415 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2416 32-bit DImode, TImode), indexed addressing cannot be used because
2417 adjacent memory cells are accessed by adding word-sized offsets
2418 during assembly output. */
2419 int
2420 rs6000_legitimate_address (mode, x, reg_ok_strict)
2421 enum machine_mode mode;
2422 rtx x;
2423 int reg_ok_strict;
2424 {
2425 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2426 return 1;
2427 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2428 && !ALTIVEC_VECTOR_MODE (mode)
2429 && !SPE_VECTOR_MODE (mode)
2430 && TARGET_UPDATE
2431 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2432 return 1;
2433 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2434 return 1;
2435 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2436 return 1;
2437 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2438 if (! reg_ok_strict
2439 && GET_CODE (x) == PLUS
2440 && GET_CODE (XEXP (x, 0)) == REG
2441 && XEXP (x, 0) == virtual_stack_vars_rtx
2442 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2443 return 1;
2444 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2445 return 1;
2446 if (mode != TImode
2447 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2448 || TARGET_POWERPC64
2449 || (mode != DFmode && mode != TFmode))
2450 && (TARGET_POWERPC64 || mode != DImode)
2451 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2452 return 1;
2453 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2454 return 1;
2455 return 0;
2456 }
2457 \f
2458 /* Try to output insns to set TARGET equal to the constant C if it can
2459 be done in less than N insns. Do all computations in MODE.
2460 Returns the place where the output has been placed if it can be
2461 done and the insns have been emitted. If it would take more than N
2462 insns, zero is returned and no insns and emitted. */
2463
2464 rtx
2465 rs6000_emit_set_const (dest, mode, source, n)
2466 rtx dest, source;
2467 enum machine_mode mode;
2468 int n ATTRIBUTE_UNUSED;
2469 {
2470 rtx result, insn, set;
2471 HOST_WIDE_INT c0, c1;
2472
2473 if (mode == QImode || mode == HImode)
2474 {
2475 if (dest == NULL)
2476 dest = gen_reg_rtx (mode);
2477 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2478 return dest;
2479 }
2480 else if (mode == SImode)
2481 {
2482 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
2483
2484 emit_insn (gen_rtx_SET (VOIDmode, result,
2485 GEN_INT (INTVAL (source)
2486 & (~ (HOST_WIDE_INT) 0xffff))));
2487 emit_insn (gen_rtx_SET (VOIDmode, dest,
2488 gen_rtx_IOR (SImode, result,
2489 GEN_INT (INTVAL (source) & 0xffff))));
2490 result = dest;
2491 }
2492 else if (mode == DImode)
2493 {
2494 if (GET_CODE (source) == CONST_INT)
2495 {
2496 c0 = INTVAL (source);
2497 c1 = -(c0 < 0);
2498 }
2499 else if (GET_CODE (source) == CONST_DOUBLE)
2500 {
2501 #if HOST_BITS_PER_WIDE_INT >= 64
2502 c0 = CONST_DOUBLE_LOW (source);
2503 c1 = -(c0 < 0);
2504 #else
2505 c0 = CONST_DOUBLE_LOW (source);
2506 c1 = CONST_DOUBLE_HIGH (source);
2507 #endif
2508 }
2509 else
2510 abort ();
2511
2512 result = rs6000_emit_set_long_const (dest, c0, c1);
2513 }
2514 else
2515 abort ();
2516
2517 insn = get_last_insn ();
2518 set = single_set (insn);
2519 if (! CONSTANT_P (SET_SRC (set)))
2520 set_unique_reg_note (insn, REG_EQUAL, source);
2521
2522 return result;
2523 }
2524
2525 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2526 fall back to a straight forward decomposition. We do this to avoid
2527 exponential run times encountered when looking for longer sequences
2528 with rs6000_emit_set_const. */
2529 static rtx
2530 rs6000_emit_set_long_const (dest, c1, c2)
2531 rtx dest;
2532 HOST_WIDE_INT c1, c2;
2533 {
2534 if (!TARGET_POWERPC64)
2535 {
2536 rtx operand1, operand2;
2537
2538 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2539 DImode);
2540 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2541 DImode);
2542 emit_move_insn (operand1, GEN_INT (c1));
2543 emit_move_insn (operand2, GEN_INT (c2));
2544 }
2545 else
2546 {
2547 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2548
2549 ud1 = c1 & 0xffff;
2550 ud2 = (c1 & 0xffff0000) >> 16;
2551 #if HOST_BITS_PER_WIDE_INT >= 64
2552 c2 = c1 >> 32;
2553 #endif
2554 ud3 = c2 & 0xffff;
2555 ud4 = (c2 & 0xffff0000) >> 16;
2556
2557 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2558 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2559 {
2560 if (ud1 & 0x8000)
2561 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2562 else
2563 emit_move_insn (dest, GEN_INT (ud1));
2564 }
2565
2566 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2567 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2568 {
2569 if (ud2 & 0x8000)
2570 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2571 - 0x80000000));
2572 else
2573 emit_move_insn (dest, GEN_INT (ud2 << 16));
2574 if (ud1 != 0)
2575 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2576 }
2577 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2578 || (ud4 == 0 && ! (ud3 & 0x8000)))
2579 {
2580 if (ud3 & 0x8000)
2581 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2582 - 0x80000000));
2583 else
2584 emit_move_insn (dest, GEN_INT (ud3 << 16));
2585
2586 if (ud2 != 0)
2587 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2588 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2589 if (ud1 != 0)
2590 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2591 }
2592 else
2593 {
2594 if (ud4 & 0x8000)
2595 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2596 - 0x80000000));
2597 else
2598 emit_move_insn (dest, GEN_INT (ud4 << 16));
2599
2600 if (ud3 != 0)
2601 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2602
2603 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2604 if (ud2 != 0)
2605 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2606 GEN_INT (ud2 << 16)));
2607 if (ud1 != 0)
2608 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2609 }
2610 }
2611 return dest;
2612 }
2613
2614 /* Emit a move from SOURCE to DEST in mode MODE. */
2615 void
2616 rs6000_emit_move (dest, source, mode)
2617 rtx dest;
2618 rtx source;
2619 enum machine_mode mode;
2620 {
2621 rtx operands[2];
2622 operands[0] = dest;
2623 operands[1] = source;
2624
2625 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2626 if (GET_CODE (operands[1]) == CONST_DOUBLE
2627 && ! FLOAT_MODE_P (mode)
2628 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2629 {
2630 /* FIXME. This should never happen. */
2631 /* Since it seems that it does, do the safe thing and convert
2632 to a CONST_INT. */
2633 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2634 }
2635 if (GET_CODE (operands[1]) == CONST_DOUBLE
2636 && ! FLOAT_MODE_P (mode)
2637 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2638 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2639 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2640 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2641 abort ();
2642
2643 /* Check if GCC is setting up a block move that will end up using FP
2644 registers as temporaries. We must make sure this is acceptable. */
2645 if (GET_CODE (operands[0]) == MEM
2646 && GET_CODE (operands[1]) == MEM
2647 && mode == DImode
2648 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2649 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2650 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2651 ? 32 : MEM_ALIGN (operands[0])))
2652 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2653 ? 32
2654 : MEM_ALIGN (operands[1]))))
2655 && ! MEM_VOLATILE_P (operands [0])
2656 && ! MEM_VOLATILE_P (operands [1]))
2657 {
2658 emit_move_insn (adjust_address (operands[0], SImode, 0),
2659 adjust_address (operands[1], SImode, 0));
2660 emit_move_insn (adjust_address (operands[0], SImode, 4),
2661 adjust_address (operands[1], SImode, 4));
2662 return;
2663 }
2664
2665 if (!no_new_pseudos)
2666 {
2667 if (GET_CODE (operands[1]) == MEM && optimize > 0
2668 && (mode == QImode || mode == HImode || mode == SImode)
2669 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2670 {
2671 rtx reg = gen_reg_rtx (word_mode);
2672
2673 emit_insn (gen_rtx_SET (word_mode, reg,
2674 gen_rtx_ZERO_EXTEND (word_mode,
2675 operands[1])));
2676 operands[1] = gen_lowpart (mode, reg);
2677 }
2678 if (GET_CODE (operands[0]) != REG)
2679 operands[1] = force_reg (mode, operands[1]);
2680 }
2681
2682 if (mode == SFmode && ! TARGET_POWERPC
2683 && TARGET_HARD_FLOAT && TARGET_FPRS
2684 && GET_CODE (operands[0]) == MEM)
2685 {
2686 int regnum;
2687
2688 if (reload_in_progress || reload_completed)
2689 regnum = true_regnum (operands[1]);
2690 else if (GET_CODE (operands[1]) == REG)
2691 regnum = REGNO (operands[1]);
2692 else
2693 regnum = -1;
2694
2695 /* If operands[1] is a register, on POWER it may have
2696 double-precision data in it, so truncate it to single
2697 precision. */
2698 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2699 {
2700 rtx newreg;
2701 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2702 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2703 operands[1] = newreg;
2704 }
2705 }
2706
2707 /* Handle the case where reload calls us with an invalid address;
2708 and the case of CONSTANT_P_RTX. */
2709 if (!ALTIVEC_VECTOR_MODE (mode)
2710 && (! general_operand (operands[1], mode)
2711 || ! nonimmediate_operand (operands[0], mode)
2712 || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2713 {
2714 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2715 return;
2716 }
2717
2718 /* FIXME: In the long term, this switch statement should go away
2719 and be replaced by a sequence of tests based on things like
2720 mode == Pmode. */
2721 switch (mode)
2722 {
2723 case HImode:
2724 case QImode:
2725 if (CONSTANT_P (operands[1])
2726 && GET_CODE (operands[1]) != CONST_INT)
2727 operands[1] = force_const_mem (mode, operands[1]);
2728 break;
2729
2730 case TFmode:
2731 case DFmode:
2732 case SFmode:
2733 if (CONSTANT_P (operands[1])
2734 && ! easy_fp_constant (operands[1], mode))
2735 operands[1] = force_const_mem (mode, operands[1]);
2736 break;
2737
2738 case V16QImode:
2739 case V8HImode:
2740 case V4SFmode:
2741 case V4SImode:
2742 case V4HImode:
2743 case V2SFmode:
2744 case V2SImode:
2745 case V1DImode:
2746 if (CONSTANT_P (operands[1])
2747 && !easy_vector_constant (operands[1]))
2748 operands[1] = force_const_mem (mode, operands[1]);
2749 break;
2750
2751 case SImode:
2752 case DImode:
2753 /* Use default pattern for address of ELF small data */
2754 if (TARGET_ELF
2755 && mode == Pmode
2756 && DEFAULT_ABI == ABI_V4
2757 && (GET_CODE (operands[1]) == SYMBOL_REF
2758 || GET_CODE (operands[1]) == CONST)
2759 && small_data_operand (operands[1], mode))
2760 {
2761 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2762 return;
2763 }
2764
2765 if (DEFAULT_ABI == ABI_V4
2766 && mode == Pmode && mode == SImode
2767 && flag_pic == 1 && got_operand (operands[1], mode))
2768 {
2769 emit_insn (gen_movsi_got (operands[0], operands[1]));
2770 return;
2771 }
2772
2773 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2774 && TARGET_NO_TOC
2775 && ! flag_pic
2776 && mode == Pmode
2777 && CONSTANT_P (operands[1])
2778 && GET_CODE (operands[1]) != HIGH
2779 && GET_CODE (operands[1]) != CONST_INT)
2780 {
2781 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2782
2783 /* If this is a function address on -mcall-aixdesc,
2784 convert it to the address of the descriptor. */
2785 if (DEFAULT_ABI == ABI_AIX
2786 && GET_CODE (operands[1]) == SYMBOL_REF
2787 && XSTR (operands[1], 0)[0] == '.')
2788 {
2789 const char *name = XSTR (operands[1], 0);
2790 rtx new_ref;
2791 while (*name == '.')
2792 name++;
2793 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2794 CONSTANT_POOL_ADDRESS_P (new_ref)
2795 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2796 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2797 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2798 operands[1] = new_ref;
2799 }
2800
2801 if (DEFAULT_ABI == ABI_DARWIN)
2802 {
2803 #if TARGET_MACHO
2804 if (MACHO_DYNAMIC_NO_PIC_P)
2805 {
2806 /* Take care of any required data indirection. */
2807 operands[1] = rs6000_machopic_legitimize_pic_address (
2808 operands[1], mode, operands[0]);
2809 if (operands[0] != operands[1])
2810 emit_insn (gen_rtx_SET (VOIDmode,
2811 operands[0], operands[1]));
2812 return;
2813 }
2814 #endif
2815 emit_insn (gen_macho_high (target, operands[1]));
2816 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2817 return;
2818 }
2819
2820 emit_insn (gen_elf_high (target, operands[1]));
2821 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2822 return;
2823 }
2824
2825 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2826 and we have put it in the TOC, we just need to make a TOC-relative
2827 reference to it. */
2828 if (TARGET_TOC
2829 && GET_CODE (operands[1]) == SYMBOL_REF
2830 && CONSTANT_POOL_EXPR_P (operands[1])
2831 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2832 get_pool_mode (operands[1])))
2833 {
2834 operands[1] = create_TOC_reference (operands[1]);
2835 }
2836 else if (mode == Pmode
2837 && CONSTANT_P (operands[1])
2838 && ((GET_CODE (operands[1]) != CONST_INT
2839 && ! easy_fp_constant (operands[1], mode))
2840 || (GET_CODE (operands[1]) == CONST_INT
2841 && num_insns_constant (operands[1], mode) > 2)
2842 || (GET_CODE (operands[0]) == REG
2843 && FP_REGNO_P (REGNO (operands[0]))))
2844 && GET_CODE (operands[1]) != HIGH
2845 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2846 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2847 {
2848 /* Emit a USE operation so that the constant isn't deleted if
2849 expensive optimizations are turned on because nobody
2850 references it. This should only be done for operands that
2851 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2852 This should not be done for operands that contain LABEL_REFs.
2853 For now, we just handle the obvious case. */
2854 if (GET_CODE (operands[1]) != LABEL_REF)
2855 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2856
2857 #if TARGET_MACHO
2858 /* Darwin uses a special PIC legitimizer. */
2859 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
2860 {
2861 operands[1] =
2862 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2863 operands[0]);
2864 if (operands[0] != operands[1])
2865 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2866 return;
2867 }
2868 #endif
2869
2870 /* If we are to limit the number of things we put in the TOC and
2871 this is a symbol plus a constant we can add in one insn,
2872 just put the symbol in the TOC and add the constant. Don't do
2873 this if reload is in progress. */
2874 if (GET_CODE (operands[1]) == CONST
2875 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2876 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2877 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2878 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2879 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2880 && ! side_effects_p (operands[0]))
2881 {
2882 rtx sym =
2883 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2884 rtx other = XEXP (XEXP (operands[1], 0), 1);
2885
2886 sym = force_reg (mode, sym);
2887 if (mode == SImode)
2888 emit_insn (gen_addsi3 (operands[0], sym, other));
2889 else
2890 emit_insn (gen_adddi3 (operands[0], sym, other));
2891 return;
2892 }
2893
2894 operands[1] = force_const_mem (mode, operands[1]);
2895
2896 if (TARGET_TOC
2897 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2898 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2899 get_pool_constant (XEXP (operands[1], 0)),
2900 get_pool_mode (XEXP (operands[1], 0))))
2901 {
2902 operands[1]
2903 = gen_rtx_MEM (mode,
2904 create_TOC_reference (XEXP (operands[1], 0)));
2905 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2906 RTX_UNCHANGING_P (operands[1]) = 1;
2907 }
2908 }
2909 break;
2910
2911 case TImode:
2912 if (GET_CODE (operands[0]) == MEM
2913 && GET_CODE (XEXP (operands[0], 0)) != REG
2914 && ! reload_in_progress)
2915 operands[0]
2916 = replace_equiv_address (operands[0],
2917 copy_addr_to_reg (XEXP (operands[0], 0)));
2918
2919 if (GET_CODE (operands[1]) == MEM
2920 && GET_CODE (XEXP (operands[1], 0)) != REG
2921 && ! reload_in_progress)
2922 operands[1]
2923 = replace_equiv_address (operands[1],
2924 copy_addr_to_reg (XEXP (operands[1], 0)));
2925 if (TARGET_POWER)
2926 {
2927 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2928 gen_rtvec (2,
2929 gen_rtx_SET (VOIDmode,
2930 operands[0], operands[1]),
2931 gen_rtx_CLOBBER (VOIDmode,
2932 gen_rtx_SCRATCH (SImode)))));
2933 return;
2934 }
2935 break;
2936
2937 default:
2938 abort ();
2939 }
2940
2941 /* Above, we may have called force_const_mem which may have returned
2942 an invalid address. If we can, fix this up; otherwise, reload will
2943 have to deal with it. */
2944 if (GET_CODE (operands[1]) == MEM
2945 && ! memory_address_p (mode, XEXP (operands[1], 0))
2946 && ! reload_in_progress)
2947 operands[1] = adjust_address (operands[1], mode, 0);
2948
2949 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2950 return;
2951 }
2952 \f
2953 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2954 for a call to a function whose data type is FNTYPE.
2955 For a library call, FNTYPE is 0.
2956
2957 For incoming args we set the number of arguments in the prototype large
2958 so we never return a PARALLEL. */
2959
2960 void
2961 init_cumulative_args (cum, fntype, libname, incoming)
2962 CUMULATIVE_ARGS *cum;
2963 tree fntype;
2964 rtx libname ATTRIBUTE_UNUSED;
2965 int incoming;
2966 {
2967 static CUMULATIVE_ARGS zero_cumulative;
2968
2969 *cum = zero_cumulative;
2970 cum->words = 0;
2971 cum->fregno = FP_ARG_MIN_REG;
2972 cum->vregno = ALTIVEC_ARG_MIN_REG;
2973 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2974 cum->call_cookie = CALL_NORMAL;
2975 cum->sysv_gregno = GP_ARG_MIN_REG;
2976
2977 if (incoming)
2978 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2979
2980 else if (cum->prototype)
2981 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2982 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2983 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2984
2985 else
2986 cum->nargs_prototype = 0;
2987
2988 cum->orig_nargs = cum->nargs_prototype;
2989
2990 /* Check for a longcall attribute. */
2991 if (fntype
2992 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
2993 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
2994 cum->call_cookie = CALL_LONG;
2995
2996 if (TARGET_DEBUG_ARG)
2997 {
2998 fprintf (stderr, "\ninit_cumulative_args:");
2999 if (fntype)
3000 {
3001 tree ret_type = TREE_TYPE (fntype);
3002 fprintf (stderr, " ret code = %s,",
3003 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3004 }
3005
3006 if (cum->call_cookie & CALL_LONG)
3007 fprintf (stderr, " longcall,");
3008
3009 fprintf (stderr, " proto = %d, nargs = %d\n",
3010 cum->prototype, cum->nargs_prototype);
3011 }
3012 }
3013 \f
3014 /* If defined, a C expression which determines whether, and in which
3015 direction, to pad out an argument with extra space. The value
3016 should be of type `enum direction': either `upward' to pad above
3017 the argument, `downward' to pad below, or `none' to inhibit
3018 padding.
3019
3020 For the AIX ABI structs are always stored left shifted in their
3021 argument slot. */
3022
3023 enum direction
3024 function_arg_padding (mode, type)
3025 enum machine_mode mode;
3026 tree type;
3027 {
3028 if (type != 0 && AGGREGATE_TYPE_P (type))
3029 return upward;
3030
3031 /* This is the default definition. */
3032 return (! BYTES_BIG_ENDIAN
3033 ? upward
3034 : ((mode == BLKmode
3035 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3036 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
3037 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
3038 ? downward : upward));
3039 }
3040
3041 /* If defined, a C expression that gives the alignment boundary, in bits,
3042 of an argument with the specified mode and type. If it is not defined,
3043 PARM_BOUNDARY is used for all arguments.
3044
3045 V.4 wants long longs to be double word aligned. */
3046
3047 int
3048 function_arg_boundary (mode, type)
3049 enum machine_mode mode;
3050 tree type ATTRIBUTE_UNUSED;
3051 {
3052 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3053 return 64;
3054 else if (SPE_VECTOR_MODE (mode))
3055 return 64;
3056 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3057 return 128;
3058 else
3059 return PARM_BOUNDARY;
3060 }
3061 \f
3062 /* Update the data in CUM to advance over an argument
3063 of mode MODE and data type TYPE.
3064 (TYPE is null for libcalls where that information may not be available.) */
3065
3066 void
3067 function_arg_advance (cum, mode, type, named)
3068 CUMULATIVE_ARGS *cum;
3069 enum machine_mode mode;
3070 tree type;
3071 int named;
3072 {
3073 cum->nargs_prototype--;
3074
3075 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3076 {
3077 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
3078 cum->vregno++;
3079 else
3080 cum->words += RS6000_ARG_SIZE (mode, type);
3081 }
3082 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
3083 && named && cum->sysv_gregno <= GP_ARG_MAX_REG)
3084 cum->sysv_gregno++;
3085 else if (DEFAULT_ABI == ABI_V4)
3086 {
3087 if (TARGET_HARD_FLOAT && TARGET_FPRS
3088 && (mode == SFmode || mode == DFmode))
3089 {
3090 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3091 cum->fregno++;
3092 else
3093 {
3094 if (mode == DFmode)
3095 cum->words += cum->words & 1;
3096 cum->words += RS6000_ARG_SIZE (mode, type);
3097 }
3098 }
3099 else
3100 {
3101 int n_words;
3102 int gregno = cum->sysv_gregno;
3103
3104 /* Aggregates and IEEE quad get passed by reference. */
3105 if ((type && AGGREGATE_TYPE_P (type))
3106 || mode == TFmode)
3107 n_words = 1;
3108 else
3109 n_words = RS6000_ARG_SIZE (mode, type);
3110
3111 /* Long long and SPE vectors are put in odd registers. */
3112 if (n_words == 2 && (gregno & 1) == 0)
3113 gregno += 1;
3114
3115 /* Long long and SPE vectors are not split between registers
3116 and stack. */
3117 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
3118 {
3119 /* Long long is aligned on the stack. */
3120 if (n_words == 2)
3121 cum->words += cum->words & 1;
3122 cum->words += n_words;
3123 }
3124
3125 /* Note: continuing to accumulate gregno past when we've started
3126 spilling to the stack indicates the fact that we've started
3127 spilling to the stack to expand_builtin_saveregs. */
3128 cum->sysv_gregno = gregno + n_words;
3129 }
3130
3131 if (TARGET_DEBUG_ARG)
3132 {
3133 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3134 cum->words, cum->fregno);
3135 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3136 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3137 fprintf (stderr, "mode = %4s, named = %d\n",
3138 GET_MODE_NAME (mode), named);
3139 }
3140 }
3141 else
3142 {
3143 int align = (TARGET_32BIT && (cum->words & 1) != 0
3144 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3145
3146 cum->words += align + RS6000_ARG_SIZE (mode, type);
3147
3148 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3149 && TARGET_HARD_FLOAT && TARGET_FPRS)
3150 cum->fregno += (mode == TFmode ? 2 : 1);
3151
3152 if (TARGET_DEBUG_ARG)
3153 {
3154 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3155 cum->words, cum->fregno);
3156 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3157 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3158 fprintf (stderr, "named = %d, align = %d\n", named, align);
3159 }
3160 }
3161 }
3162 \f
3163 /* Determine where to put an argument to a function.
3164 Value is zero to push the argument on the stack,
3165 or a hard register in which to store the argument.
3166
3167 MODE is the argument's machine mode.
3168 TYPE is the data type of the argument (as a tree).
3169 This is null for libcalls where that information may
3170 not be available.
3171 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3172 the preceding args and about the function being called.
3173 NAMED is nonzero if this argument is a named parameter
3174 (otherwise it is an extra parameter matching an ellipsis).
3175
3176 On RS/6000 the first eight words of non-FP are normally in registers
3177 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3178 Under V.4, the first 8 FP args are in registers.
3179
3180 If this is floating-point and no prototype is specified, we use
3181 both an FP and integer register (or possibly FP reg and stack). Library
3182 functions (when TYPE is zero) always have the proper types for args,
3183 so we can pass the FP value just in one register. emit_library_function
3184 doesn't support PARALLEL anyway. */
3185
3186 struct rtx_def *
3187 function_arg (cum, mode, type, named)
3188 CUMULATIVE_ARGS *cum;
3189 enum machine_mode mode;
3190 tree type;
3191 int named;
3192 {
3193 enum rs6000_abi abi = DEFAULT_ABI;
3194
3195 /* Return a marker to indicate whether CR1 needs to set or clear the
3196 bit that V.4 uses to say fp args were passed in registers.
3197 Assume that we don't need the marker for software floating point,
3198 or compiler generated library calls. */
3199 if (mode == VOIDmode)
3200 {
3201 if (abi == ABI_V4
3202 && cum->nargs_prototype < 0
3203 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
3204 {
3205 /* For the SPE, we need to crxor CR6 always. */
3206 if (TARGET_SPE_ABI)
3207 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
3208 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
3209 return GEN_INT (cum->call_cookie
3210 | ((cum->fregno == FP_ARG_MIN_REG)
3211 ? CALL_V4_SET_FP_ARGS
3212 : CALL_V4_CLEAR_FP_ARGS));
3213 }
3214
3215 return GEN_INT (cum->call_cookie);
3216 }
3217
3218 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3219 {
3220 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
3221 return gen_rtx_REG (mode, cum->vregno);
3222 else
3223 return NULL;
3224 }
3225 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode) && named)
3226 {
3227 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3228 return gen_rtx_REG (mode, cum->sysv_gregno);
3229 else
3230 return NULL;
3231 }
3232 else if (abi == ABI_V4)
3233 {
3234 if (TARGET_HARD_FLOAT && TARGET_FPRS
3235 && (mode == SFmode || mode == DFmode))
3236 {
3237 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3238 return gen_rtx_REG (mode, cum->fregno);
3239 else
3240 return NULL;
3241 }
3242 else
3243 {
3244 int n_words;
3245 int gregno = cum->sysv_gregno;
3246
3247 /* Aggregates and IEEE quad get passed by reference. */
3248 if ((type && AGGREGATE_TYPE_P (type))
3249 || mode == TFmode)
3250 n_words = 1;
3251 else
3252 n_words = RS6000_ARG_SIZE (mode, type);
3253
3254 /* Long long and SPE vectors are put in odd registers. */
3255 if (n_words == 2 && (gregno & 1) == 0)
3256 gregno += 1;
3257
3258 /* Long long and SPE vectors are not split between registers
3259 and stack. */
3260 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3261 {
3262 /* SPE vectors in ... get split into 2 registers. */
3263 if (TARGET_SPE && TARGET_SPE_ABI
3264 && SPE_VECTOR_MODE (mode) && !named)
3265 {
3266 rtx r1, r2;
3267 enum machine_mode m = SImode;
3268
3269 r1 = gen_rtx_REG (m, gregno);
3270 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3271 r2 = gen_rtx_REG (m, gregno + 1);
3272 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3273 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3274 }
3275 return gen_rtx_REG (mode, gregno);
3276 }
3277 else
3278 return NULL;
3279 }
3280 }
3281 else
3282 {
3283 int align = (TARGET_32BIT && (cum->words & 1) != 0
3284 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3285 int align_words = cum->words + align;
3286
3287 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3288 return NULL_RTX;
3289
3290 if (USE_FP_FOR_ARG_P (*cum, mode, type))
3291 {
3292 if (! type
3293 || ((cum->nargs_prototype > 0)
3294 /* IBM AIX extended its linkage convention definition always
3295 to require FP args after register save area hole on the
3296 stack. */
3297 && (DEFAULT_ABI != ABI_AIX
3298 || ! TARGET_XL_CALL
3299 || (align_words < GP_ARG_NUM_REG))))
3300 return gen_rtx_REG (mode, cum->fregno);
3301
3302 return gen_rtx_PARALLEL (mode,
3303 gen_rtvec (2,
3304 gen_rtx_EXPR_LIST (VOIDmode,
3305 ((align_words >= GP_ARG_NUM_REG)
3306 ? NULL_RTX
3307 : (align_words
3308 + RS6000_ARG_SIZE (mode, type)
3309 > GP_ARG_NUM_REG
3310 /* If this is partially on the stack, then
3311 we only include the portion actually
3312 in registers here. */
3313 ? gen_rtx_REG (SImode,
3314 GP_ARG_MIN_REG + align_words)
3315 : gen_rtx_REG (mode,
3316 GP_ARG_MIN_REG + align_words))),
3317 const0_rtx),
3318 gen_rtx_EXPR_LIST (VOIDmode,
3319 gen_rtx_REG (mode, cum->fregno),
3320 const0_rtx)));
3321 }
3322 else if (align_words < GP_ARG_NUM_REG)
3323 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
3324 else
3325 return NULL_RTX;
3326 }
3327 }
3328 \f
3329 /* For an arg passed partly in registers and partly in memory,
3330 this is the number of registers used.
3331 For args passed entirely in registers or entirely in memory, zero. */
3332
3333 int
3334 function_arg_partial_nregs (cum, mode, type, named)
3335 CUMULATIVE_ARGS *cum;
3336 enum machine_mode mode;
3337 tree type;
3338 int named ATTRIBUTE_UNUSED;
3339 {
3340 if (DEFAULT_ABI == ABI_V4)
3341 return 0;
3342
3343 if (USE_FP_FOR_ARG_P (*cum, mode, type)
3344 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
3345 {
3346 if (cum->nargs_prototype >= 0)
3347 return 0;
3348 }
3349
3350 if (cum->words < GP_ARG_NUM_REG
3351 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
3352 {
3353 int ret = GP_ARG_NUM_REG - cum->words;
3354 if (ret && TARGET_DEBUG_ARG)
3355 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
3356
3357 return ret;
3358 }
3359
3360 return 0;
3361 }
3362 \f
3363 /* A C expression that indicates when an argument must be passed by
3364 reference. If nonzero for an argument, a copy of that argument is
3365 made in memory and a pointer to the argument is passed instead of
3366 the argument itself. The pointer is passed in whatever way is
3367 appropriate for passing a pointer to that type.
3368
3369 Under V.4, structures and unions are passed by reference.
3370
3371 As an extension to all ABIs, variable sized types are passed by
3372 reference. */
3373
3374 int
3375 function_arg_pass_by_reference (cum, mode, type, named)
3376 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
3377 enum machine_mode mode ATTRIBUTE_UNUSED;
3378 tree type;
3379 int named ATTRIBUTE_UNUSED;
3380 {
3381 if (DEFAULT_ABI == ABI_V4
3382 && ((type && AGGREGATE_TYPE_P (type))
3383 || mode == TFmode))
3384 {
3385 if (TARGET_DEBUG_ARG)
3386 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
3387
3388 return 1;
3389 }
3390 return type && int_size_in_bytes (type) <= 0;
3391 }
3392 \f
3393 /* Perform any needed actions needed for a function that is receiving a
3394 variable number of arguments.
3395
3396 CUM is as above.
3397
3398 MODE and TYPE are the mode and type of the current parameter.
3399
3400 PRETEND_SIZE is a variable that should be set to the amount of stack
3401 that must be pushed by the prolog to pretend that our caller pushed
3402 it.
3403
3404 Normally, this macro will push all remaining incoming registers on the
3405 stack and set PRETEND_SIZE to the length of the registers pushed. */
3406
3407 void
3408 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
3409 CUMULATIVE_ARGS *cum;
3410 enum machine_mode mode;
3411 tree type;
3412 int *pretend_size ATTRIBUTE_UNUSED;
3413 int no_rtl;
3414
3415 {
3416 CUMULATIVE_ARGS next_cum;
3417 int reg_size = TARGET_32BIT ? 4 : 8;
3418 rtx save_area = NULL_RTX, mem;
3419 int first_reg_offset, set;
3420 tree fntype;
3421 int stdarg_p;
3422
3423 fntype = TREE_TYPE (current_function_decl);
3424 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
3425 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3426 != void_type_node));
3427
3428 /* For varargs, we do not want to skip the dummy va_dcl argument.
3429 For stdargs, we do want to skip the last named argument. */
3430 next_cum = *cum;
3431 if (stdarg_p)
3432 function_arg_advance (&next_cum, mode, type, 1);
3433
3434 if (DEFAULT_ABI == ABI_V4)
3435 {
3436 /* Indicate to allocate space on the stack for varargs save area. */
3437 cfun->machine->sysv_varargs_p = 1;
3438 if (! no_rtl)
3439 save_area = plus_constant (virtual_stack_vars_rtx,
3440 - RS6000_VARARGS_SIZE);
3441
3442 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
3443 }
3444 else
3445 {
3446 first_reg_offset = next_cum.words;
3447 save_area = virtual_incoming_args_rtx;
3448 cfun->machine->sysv_varargs_p = 0;
3449
3450 if (MUST_PASS_IN_STACK (mode, type))
3451 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
3452 }
3453
3454 set = get_varargs_alias_set ();
3455 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
3456 {
3457 mem = gen_rtx_MEM (BLKmode,
3458 plus_constant (save_area,
3459 first_reg_offset * reg_size)),
3460 set_mem_alias_set (mem, set);
3461 set_mem_align (mem, BITS_PER_WORD);
3462
3463 move_block_from_reg
3464 (GP_ARG_MIN_REG + first_reg_offset, mem,
3465 GP_ARG_NUM_REG - first_reg_offset,
3466 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
3467 }
3468
3469 /* Save FP registers if needed. */
3470 if (DEFAULT_ABI == ABI_V4
3471 && TARGET_HARD_FLOAT && TARGET_FPRS
3472 && ! no_rtl
3473 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
3474 {
3475 int fregno = next_cum.fregno;
3476 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
3477 rtx lab = gen_label_rtx ();
3478 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
3479
3480 emit_jump_insn (gen_rtx_SET (VOIDmode,
3481 pc_rtx,
3482 gen_rtx_IF_THEN_ELSE (VOIDmode,
3483 gen_rtx_NE (VOIDmode, cr1,
3484 const0_rtx),
3485 gen_rtx_LABEL_REF (VOIDmode, lab),
3486 pc_rtx)));
3487
3488 while (fregno <= FP_ARG_V4_MAX_REG)
3489 {
3490 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
3491 set_mem_alias_set (mem, set);
3492 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
3493 fregno++;
3494 off += 8;
3495 }
3496
3497 emit_label (lab);
3498 }
3499 }
3500
3501 /* Create the va_list data type. */
3502
3503 tree
3504 rs6000_build_va_list ()
3505 {
3506 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
3507
3508 /* For AIX, prefer 'char *' because that's what the system
3509 header files like. */
3510 if (DEFAULT_ABI != ABI_V4)
3511 return build_pointer_type (char_type_node);
3512
3513 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3514 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3515
3516 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3517 unsigned_char_type_node);
3518 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3519 unsigned_char_type_node);
3520 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3521 ptr_type_node);
3522 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3523 ptr_type_node);
3524
3525 DECL_FIELD_CONTEXT (f_gpr) = record;
3526 DECL_FIELD_CONTEXT (f_fpr) = record;
3527 DECL_FIELD_CONTEXT (f_ovf) = record;
3528 DECL_FIELD_CONTEXT (f_sav) = record;
3529
3530 TREE_CHAIN (record) = type_decl;
3531 TYPE_NAME (record) = type_decl;
3532 TYPE_FIELDS (record) = f_gpr;
3533 TREE_CHAIN (f_gpr) = f_fpr;
3534 TREE_CHAIN (f_fpr) = f_ovf;
3535 TREE_CHAIN (f_ovf) = f_sav;
3536
3537 layout_type (record);
3538
3539 /* The correct type is an array type of one element. */
3540 return build_array_type (record, build_index_type (size_zero_node));
3541 }
3542
3543 /* Implement va_start. */
3544
3545 void
3546 rs6000_va_start (valist, nextarg)
3547 tree valist;
3548 rtx nextarg;
3549 {
3550 HOST_WIDE_INT words, n_gpr, n_fpr;
3551 tree f_gpr, f_fpr, f_ovf, f_sav;
3552 tree gpr, fpr, ovf, sav, t;
3553
3554 /* Only SVR4 needs something special. */
3555 if (DEFAULT_ABI != ABI_V4)
3556 {
3557 std_expand_builtin_va_start (valist, nextarg);
3558 return;
3559 }
3560
3561 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3562 f_fpr = TREE_CHAIN (f_gpr);
3563 f_ovf = TREE_CHAIN (f_fpr);
3564 f_sav = TREE_CHAIN (f_ovf);
3565
3566 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3567 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3568 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3569 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3570 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3571
3572 /* Count number of gp and fp argument registers used. */
3573 words = current_function_args_info.words;
3574 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3575 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3576
3577 if (TARGET_DEBUG_ARG)
3578 {
3579 fputs ("va_start: words = ", stderr);
3580 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3581 fputs (", n_gpr = ", stderr);
3582 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3583 fputs (", n_fpr = ", stderr);
3584 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3585 putc ('\n', stderr);
3586 }
3587
3588 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3589 TREE_SIDE_EFFECTS (t) = 1;
3590 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3591
3592 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3593 TREE_SIDE_EFFECTS (t) = 1;
3594 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3595
3596 /* Find the overflow area. */
3597 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3598 if (words != 0)
3599 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3600 build_int_2 (words * UNITS_PER_WORD, 0));
3601 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3602 TREE_SIDE_EFFECTS (t) = 1;
3603 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3604
3605 /* Find the register save area. */
3606 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3607 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3608 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3609 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3610 TREE_SIDE_EFFECTS (t) = 1;
3611 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3612 }
3613
3614 /* Implement va_arg. */
3615
3616 rtx
3617 rs6000_va_arg (valist, type)
3618 tree valist, type;
3619 {
3620 tree f_gpr, f_fpr, f_ovf, f_sav;
3621 tree gpr, fpr, ovf, sav, reg, t, u;
3622 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3623 rtx lab_false, lab_over, addr_rtx, r;
3624
3625 if (DEFAULT_ABI != ABI_V4)
3626 {
3627 /* Variable sized types are passed by reference. */
3628 if (int_size_in_bytes (type) <= 0)
3629 {
3630 u = build_pointer_type (type);
3631
3632 /* Args grow upward. */
3633 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
3634 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
3635 TREE_SIDE_EFFECTS (t) = 1;
3636
3637 t = build1 (NOP_EXPR, build_pointer_type (u), t);
3638 TREE_SIDE_EFFECTS (t) = 1;
3639
3640 t = build1 (INDIRECT_REF, u, t);
3641 TREE_SIDE_EFFECTS (t) = 1;
3642
3643 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3644 }
3645 else
3646 return std_expand_builtin_va_arg (valist, type);
3647 }
3648
3649 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3650 f_fpr = TREE_CHAIN (f_gpr);
3651 f_ovf = TREE_CHAIN (f_fpr);
3652 f_sav = TREE_CHAIN (f_ovf);
3653
3654 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3655 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3656 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3657 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3658 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3659
3660 size = int_size_in_bytes (type);
3661 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3662
3663 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3664 {
3665 /* Aggregates and long doubles are passed by reference. */
3666 indirect_p = 1;
3667 reg = gpr;
3668 n_reg = 1;
3669 sav_ofs = 0;
3670 sav_scale = 4;
3671 size = UNITS_PER_WORD;
3672 rsize = 1;
3673 }
3674 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
3675 {
3676 /* FP args go in FP registers, if present. */
3677 indirect_p = 0;
3678 reg = fpr;
3679 n_reg = 1;
3680 sav_ofs = 8*4;
3681 sav_scale = 8;
3682 }
3683 else
3684 {
3685 /* Otherwise into GP registers. */
3686 indirect_p = 0;
3687 reg = gpr;
3688 n_reg = rsize;
3689 sav_ofs = 0;
3690 sav_scale = 4;
3691 }
3692
3693 /* Pull the value out of the saved registers ... */
3694
3695 lab_false = gen_label_rtx ();
3696 lab_over = gen_label_rtx ();
3697 addr_rtx = gen_reg_rtx (Pmode);
3698
3699 /* AltiVec vectors never go in registers. */
3700 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
3701 {
3702 TREE_THIS_VOLATILE (reg) = 1;
3703 emit_cmp_and_jump_insns
3704 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3705 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3706 lab_false);
3707
3708 /* Long long is aligned in the registers. */
3709 if (n_reg > 1)
3710 {
3711 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3712 build_int_2 (n_reg - 1, 0));
3713 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3714 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3715 TREE_SIDE_EFFECTS (u) = 1;
3716 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3717 }
3718
3719 if (sav_ofs)
3720 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3721 else
3722 t = sav;
3723
3724 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3725 build_int_2 (n_reg, 0));
3726 TREE_SIDE_EFFECTS (u) = 1;
3727
3728 u = build1 (CONVERT_EXPR, integer_type_node, u);
3729 TREE_SIDE_EFFECTS (u) = 1;
3730
3731 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3732 TREE_SIDE_EFFECTS (u) = 1;
3733
3734 t = build (PLUS_EXPR, ptr_type_node, t, u);
3735 TREE_SIDE_EFFECTS (t) = 1;
3736
3737 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3738 if (r != addr_rtx)
3739 emit_move_insn (addr_rtx, r);
3740
3741 emit_jump_insn (gen_jump (lab_over));
3742 emit_barrier ();
3743 }
3744
3745 emit_label (lab_false);
3746
3747 /* ... otherwise out of the overflow area. */
3748
3749 /* Make sure we don't find reg 7 for the next int arg.
3750
3751 All AltiVec vectors go in the overflow area. So in the AltiVec
3752 case we need to get the vectors from the overflow area, but
3753 remember where the GPRs and FPRs are. */
3754 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
3755 || !TARGET_ALTIVEC))
3756 {
3757 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3758 TREE_SIDE_EFFECTS (t) = 1;
3759 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3760 }
3761
3762 /* Care for on-stack alignment if needed. */
3763 if (rsize <= 1)
3764 t = ovf;
3765 else
3766 {
3767 int align;
3768
3769 /* AltiVec vectors are 16 byte aligned. */
3770 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
3771 align = 15;
3772 else
3773 align = 7;
3774
3775 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3776 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3777 }
3778 t = save_expr (t);
3779
3780 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3781 if (r != addr_rtx)
3782 emit_move_insn (addr_rtx, r);
3783
3784 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3785 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3786 TREE_SIDE_EFFECTS (t) = 1;
3787 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3788
3789 emit_label (lab_over);
3790
3791 if (indirect_p)
3792 {
3793 r = gen_rtx_MEM (Pmode, addr_rtx);
3794 set_mem_alias_set (r, get_varargs_alias_set ());
3795 emit_move_insn (addr_rtx, r);
3796 }
3797
3798 return addr_rtx;
3799 }
3800
3801 /* Builtins. */
3802
3803 #define def_builtin(MASK, NAME, TYPE, CODE) \
3804 do { \
3805 if ((MASK) & target_flags) \
3806 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3807 NULL, NULL_TREE); \
3808 } while (0)
3809
3810 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3811
3812 static const struct builtin_description bdesc_3arg[] =
3813 {
3814 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3815 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3816 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3817 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3818 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3819 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3820 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3821 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3822 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3823 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3824 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3825 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3826 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3827 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3828 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3829 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3830 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3831 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3832 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3833 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3834 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3835 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3836 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3837 };
3838
3839 /* DST operations: void foo (void *, const int, const char). */
3840
3841 static const struct builtin_description bdesc_dst[] =
3842 {
3843 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3844 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3845 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3846 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3847 };
3848
3849 /* Simple binary operations: VECc = foo (VECa, VECb). */
3850
3851 static struct builtin_description bdesc_2arg[] =
3852 {
3853 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3854 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3855 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3856 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3857 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3858 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3859 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3860 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3861 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3862 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3863 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3864 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3865 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3866 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3867 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3868 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3869 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3870 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3871 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3872 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3873 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3874 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3875 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3876 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3877 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3878 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3879 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3880 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3881 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3882 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3883 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3884 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3885 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3886 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3887 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3888 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3889 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3890 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3891 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3892 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3893 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3894 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3895 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3896 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3897 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3898 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3899 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3900 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3901 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3902 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3903 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3904 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3905 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3906 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3907 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3908 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3909 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3910 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3911 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3912 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3913 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3914 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3915 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3916 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3917 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3918 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3919 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3920 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3921 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3922 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3923 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3924 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3925 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3926 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3927 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3928 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3929 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3930 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3931 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3932 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3933 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3934 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3935 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3936 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3937 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3938 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3939 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3940 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3941 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3942 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3943 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3944 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3945 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3946 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3947 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3948 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3949 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3950 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3951 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3952 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3953 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3954 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3955 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3956 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3957 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3958 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3959 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3960 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3961 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3962 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3963 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3964 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3965 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3966
3967 /* Place holder, leave as first spe builtin. */
3968 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
3969 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
3970 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
3971 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
3972 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
3973 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
3974 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
3975 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
3976 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
3977 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
3978 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
3979 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
3980 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
3981 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
3982 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
3983 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
3984 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
3985 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
3986 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
3987 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
3988 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
3989 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
3990 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
3991 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
3992 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
3993 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
3994 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
3995 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
3996 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
3997 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
3998 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
3999 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
4000 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
4001 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
4002 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
4003 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
4004 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
4005 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
4006 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
4007 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
4008 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
4009 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
4010 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
4011 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
4012 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
4013 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
4014 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
4015 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
4016 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
4017 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
4018 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
4019 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
4020 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
4021 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
4022 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
4023 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
4024 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
4025 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
4026 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
4027 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
4028 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
4029 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
4030 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
4031 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
4032 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
4033 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
4034 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
4035 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
4036 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
4037 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
4038 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
4039 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
4040 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
4041 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
4042 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
4043 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
4044 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
4045 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
4046 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
4047 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
4048 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
4049 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
4050 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
4051 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
4052 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
4053 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
4054 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
4055 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
4056 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
4057 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
4058 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
4059 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
4060 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
4061 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
4062 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
4063 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
4064 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
4065 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
4066 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
4067 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
4068 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
4069 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
4070 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
4071 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
4072 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
4073 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
4074 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
4075 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
4076 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
4077
4078 /* SPE binary operations expecting a 5-bit unsigned literal. */
4079 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
4080
4081 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
4082 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
4083 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
4084 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
4085 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
4086 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
4087 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
4088 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
4089 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
4090 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
4091 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
4092 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
4093 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
4094 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
4095 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
4096 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
4097 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
4098 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
4099 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
4100 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
4101 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
4102 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
4103 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
4104 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
4105 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
4106 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
4107
4108 /* Place-holder. Leave as last binary SPE builtin. */
4109 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
4110 };
4111
4112 /* AltiVec predicates. */
4113
4114 struct builtin_description_predicates
4115 {
4116 const unsigned int mask;
4117 const enum insn_code icode;
4118 const char *opcode;
4119 const char *const name;
4120 const enum rs6000_builtins code;
4121 };
4122
4123 static const struct builtin_description_predicates bdesc_altivec_preds[] =
4124 {
4125 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
4126 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
4127 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
4128 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
4129 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
4130 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
4131 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
4132 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
4133 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
4134 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
4135 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
4136 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
4137 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
4138 };
4139
4140 /* SPE predicates. */
4141 static struct builtin_description bdesc_spe_predicates[] =
4142 {
4143 /* Place-holder. Leave as first. */
4144 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
4145 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
4146 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
4147 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
4148 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
4149 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
4150 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
4151 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
4152 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
4153 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
4154 /* Place-holder. Leave as last. */
4155 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
4156 };
4157
4158 /* SPE evsel predicates. */
4159 static struct builtin_description bdesc_spe_evsel[] =
4160 {
4161 /* Place-holder. Leave as first. */
4162 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
4163 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
4164 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
4165 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
4166 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
4167 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
4168 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
4169 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
4170 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
4171 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
4172 /* Place-holder. Leave as last. */
4173 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4174 };
4175
4176 /* ABS* operations. */
4177
4178 static const struct builtin_description bdesc_abs[] =
4179 {
4180 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4181 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4182 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4183 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4184 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4185 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4186 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4187 };
4188
4189 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4190 foo (VECa). */
4191
4192 static struct builtin_description bdesc_1arg[] =
4193 {
4194 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4195 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4196 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4197 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4198 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4199 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4200 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4201 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4202 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4203 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4204 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4205 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4206 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4207 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4208 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4209 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4210 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4211
4212 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4213 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4214 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4215 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4216 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4217 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4218 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4219 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4220 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4221 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4222 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4223 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4224 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4225 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4226 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4227 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4228 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
4229 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
4230 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
4231 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
4232 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
4233 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
4234 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
4235 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
4236 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
4237 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
4238 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
4239 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
4240 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
4241 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
4242 { 0, CODE_FOR_spe_evsplatfi, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI },
4243 { 0, CODE_FOR_spe_evsplati, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI },
4244
4245 /* Place-holder. Leave as last unary SPE builtin. */
4246 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
4247 };
4248
4249 static rtx
4250 rs6000_expand_unop_builtin (icode, arglist, target)
4251 enum insn_code icode;
4252 tree arglist;
4253 rtx target;
4254 {
4255 rtx pat;
4256 tree arg0 = TREE_VALUE (arglist);
4257 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4258 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4259 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4260
4261 if (icode == CODE_FOR_nothing)
4262 /* Builtin not supported on this processor. */
4263 return 0;
4264
4265 /* If we got invalid arguments bail out before generating bad rtl. */
4266 if (arg0 == error_mark_node)
4267 return const0_rtx;
4268
4269 if (icode == CODE_FOR_altivec_vspltisb
4270 || icode == CODE_FOR_altivec_vspltish
4271 || icode == CODE_FOR_altivec_vspltisw
4272 || icode == CODE_FOR_spe_evsplatfi
4273 || icode == CODE_FOR_spe_evsplati)
4274 {
4275 /* Only allow 5-bit *signed* literals. */
4276 if (GET_CODE (op0) != CONST_INT
4277 || INTVAL (op0) > 0x1f
4278 || INTVAL (op0) < -0x1f)
4279 {
4280 error ("argument 1 must be a 5-bit signed literal");
4281 return const0_rtx;
4282 }
4283 }
4284
4285 if (target == 0
4286 || GET_MODE (target) != tmode
4287 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4288 target = gen_reg_rtx (tmode);
4289
4290 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4291 op0 = copy_to_mode_reg (mode0, op0);
4292
4293 pat = GEN_FCN (icode) (target, op0);
4294 if (! pat)
4295 return 0;
4296 emit_insn (pat);
4297
4298 return target;
4299 }
4300
4301 static rtx
4302 altivec_expand_abs_builtin (icode, arglist, target)
4303 enum insn_code icode;
4304 tree arglist;
4305 rtx target;
4306 {
4307 rtx pat, scratch1, scratch2;
4308 tree arg0 = TREE_VALUE (arglist);
4309 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4310 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4311 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4312
4313 /* If we have invalid arguments, bail out before generating bad rtl. */
4314 if (arg0 == error_mark_node)
4315 return const0_rtx;
4316
4317 if (target == 0
4318 || GET_MODE (target) != tmode
4319 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4320 target = gen_reg_rtx (tmode);
4321
4322 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4323 op0 = copy_to_mode_reg (mode0, op0);
4324
4325 scratch1 = gen_reg_rtx (mode0);
4326 scratch2 = gen_reg_rtx (mode0);
4327
4328 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
4329 if (! pat)
4330 return 0;
4331 emit_insn (pat);
4332
4333 return target;
4334 }
4335
4336 static rtx
4337 rs6000_expand_binop_builtin (icode, arglist, target)
4338 enum insn_code icode;
4339 tree arglist;
4340 rtx target;
4341 {
4342 rtx pat;
4343 tree arg0 = TREE_VALUE (arglist);
4344 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4345 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4346 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4347 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4348 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4349 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4350
4351 if (icode == CODE_FOR_nothing)
4352 /* Builtin not supported on this processor. */
4353 return 0;
4354
4355 /* If we got invalid arguments bail out before generating bad rtl. */
4356 if (arg0 == error_mark_node || arg1 == error_mark_node)
4357 return const0_rtx;
4358
4359 if (icode == CODE_FOR_altivec_vcfux
4360 || icode == CODE_FOR_altivec_vcfsx
4361 || icode == CODE_FOR_altivec_vctsxs
4362 || icode == CODE_FOR_altivec_vctuxs
4363 || icode == CODE_FOR_altivec_vspltb
4364 || icode == CODE_FOR_altivec_vsplth
4365 || icode == CODE_FOR_altivec_vspltw
4366 || icode == CODE_FOR_spe_evaddiw
4367 || icode == CODE_FOR_spe_evldd
4368 || icode == CODE_FOR_spe_evldh
4369 || icode == CODE_FOR_spe_evldw
4370 || icode == CODE_FOR_spe_evlhhesplat
4371 || icode == CODE_FOR_spe_evlhhossplat
4372 || icode == CODE_FOR_spe_evlhhousplat
4373 || icode == CODE_FOR_spe_evlwhe
4374 || icode == CODE_FOR_spe_evlwhos
4375 || icode == CODE_FOR_spe_evlwhou
4376 || icode == CODE_FOR_spe_evlwhsplat
4377 || icode == CODE_FOR_spe_evlwwsplat
4378 || icode == CODE_FOR_spe_evrlwi
4379 || icode == CODE_FOR_spe_evslwi
4380 || icode == CODE_FOR_spe_evsrwis
4381 || icode == CODE_FOR_spe_evsrwiu)
4382 {
4383 /* Only allow 5-bit unsigned literals. */
4384 if (TREE_CODE (arg1) != INTEGER_CST
4385 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4386 {
4387 error ("argument 2 must be a 5-bit unsigned literal");
4388 return const0_rtx;
4389 }
4390 }
4391
4392 if (target == 0
4393 || GET_MODE (target) != tmode
4394 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4395 target = gen_reg_rtx (tmode);
4396
4397 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4398 op0 = copy_to_mode_reg (mode0, op0);
4399 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4400 op1 = copy_to_mode_reg (mode1, op1);
4401
4402 pat = GEN_FCN (icode) (target, op0, op1);
4403 if (! pat)
4404 return 0;
4405 emit_insn (pat);
4406
4407 return target;
4408 }
4409
4410 static rtx
4411 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
4412 enum insn_code icode;
4413 const char *opcode;
4414 tree arglist;
4415 rtx target;
4416 {
4417 rtx pat, scratch;
4418 tree cr6_form = TREE_VALUE (arglist);
4419 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
4420 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4421 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4422 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4423 enum machine_mode tmode = SImode;
4424 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4425 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4426 int cr6_form_int;
4427
4428 if (TREE_CODE (cr6_form) != INTEGER_CST)
4429 {
4430 error ("argument 1 of __builtin_altivec_predicate must be a constant");
4431 return const0_rtx;
4432 }
4433 else
4434 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
4435
4436 if (mode0 != mode1)
4437 abort ();
4438
4439 /* If we have invalid arguments, bail out before generating bad rtl. */
4440 if (arg0 == error_mark_node || arg1 == error_mark_node)
4441 return const0_rtx;
4442
4443 if (target == 0
4444 || GET_MODE (target) != tmode
4445 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4446 target = gen_reg_rtx (tmode);
4447
4448 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4449 op0 = copy_to_mode_reg (mode0, op0);
4450 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4451 op1 = copy_to_mode_reg (mode1, op1);
4452
4453 scratch = gen_reg_rtx (mode0);
4454
4455 pat = GEN_FCN (icode) (scratch, op0, op1,
4456 gen_rtx (SYMBOL_REF, Pmode, opcode));
4457 if (! pat)
4458 return 0;
4459 emit_insn (pat);
4460
4461 /* The vec_any* and vec_all* predicates use the same opcodes for two
4462 different operations, but the bits in CR6 will be different
4463 depending on what information we want. So we have to play tricks
4464 with CR6 to get the right bits out.
4465
4466 If you think this is disgusting, look at the specs for the
4467 AltiVec predicates. */
4468
4469 switch (cr6_form_int)
4470 {
4471 case 0:
4472 emit_insn (gen_cr6_test_for_zero (target));
4473 break;
4474 case 1:
4475 emit_insn (gen_cr6_test_for_zero_reverse (target));
4476 break;
4477 case 2:
4478 emit_insn (gen_cr6_test_for_lt (target));
4479 break;
4480 case 3:
4481 emit_insn (gen_cr6_test_for_lt_reverse (target));
4482 break;
4483 default:
4484 error ("argument 1 of __builtin_altivec_predicate is out of range");
4485 break;
4486 }
4487
4488 return target;
4489 }
4490
4491 static rtx
4492 altivec_expand_stv_builtin (icode, arglist)
4493 enum insn_code icode;
4494 tree arglist;
4495 {
4496 tree arg0 = TREE_VALUE (arglist);
4497 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4498 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4499 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4500 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4501 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4502 rtx pat;
4503 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
4504 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
4505 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
4506
4507 /* Invalid arguments. Bail before doing anything stoopid! */
4508 if (arg0 == error_mark_node
4509 || arg1 == error_mark_node
4510 || arg2 == error_mark_node)
4511 return const0_rtx;
4512
4513 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
4514 op0 = copy_to_mode_reg (mode2, op0);
4515 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
4516 op1 = copy_to_mode_reg (mode0, op1);
4517 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
4518 op2 = copy_to_mode_reg (mode1, op2);
4519
4520 pat = GEN_FCN (icode) (op1, op2, op0);
4521 if (pat)
4522 emit_insn (pat);
4523 return NULL_RTX;
4524 }
4525
4526 static rtx
4527 rs6000_expand_ternop_builtin (icode, arglist, target)
4528 enum insn_code icode;
4529 tree arglist;
4530 rtx target;
4531 {
4532 rtx pat;
4533 tree arg0 = TREE_VALUE (arglist);
4534 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4535 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4536 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4537 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4538 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4539 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4540 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4541 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4542 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
4543
4544 if (icode == CODE_FOR_nothing)
4545 /* Builtin not supported on this processor. */
4546 return 0;
4547
4548 /* If we got invalid arguments bail out before generating bad rtl. */
4549 if (arg0 == error_mark_node
4550 || arg1 == error_mark_node
4551 || arg2 == error_mark_node)
4552 return const0_rtx;
4553
4554 if (icode == CODE_FOR_altivec_vsldoi_4sf
4555 || icode == CODE_FOR_altivec_vsldoi_4si
4556 || icode == CODE_FOR_altivec_vsldoi_8hi
4557 || icode == CODE_FOR_altivec_vsldoi_16qi)
4558 {
4559 /* Only allow 4-bit unsigned literals. */
4560 if (TREE_CODE (arg2) != INTEGER_CST
4561 || TREE_INT_CST_LOW (arg2) & ~0xf)
4562 {
4563 error ("argument 3 must be a 4-bit unsigned literal");
4564 return const0_rtx;
4565 }
4566 }
4567
4568 if (target == 0
4569 || GET_MODE (target) != tmode
4570 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4571 target = gen_reg_rtx (tmode);
4572
4573 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4574 op0 = copy_to_mode_reg (mode0, op0);
4575 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4576 op1 = copy_to_mode_reg (mode1, op1);
4577 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
4578 op2 = copy_to_mode_reg (mode2, op2);
4579
4580 pat = GEN_FCN (icode) (target, op0, op1, op2);
4581 if (! pat)
4582 return 0;
4583 emit_insn (pat);
4584
4585 return target;
4586 }
4587
4588 /* Expand the lvx builtins. */
4589 static rtx
4590 altivec_expand_ld_builtin (exp, target, expandedp)
4591 tree exp;
4592 rtx target;
4593 bool *expandedp;
4594 {
4595 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4596 tree arglist = TREE_OPERAND (exp, 1);
4597 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4598 tree arg0;
4599 enum machine_mode tmode, mode0;
4600 rtx pat, op0;
4601 enum insn_code icode;
4602
4603 switch (fcode)
4604 {
4605 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
4606 icode = CODE_FOR_altivec_lvx_16qi;
4607 break;
4608 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
4609 icode = CODE_FOR_altivec_lvx_8hi;
4610 break;
4611 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
4612 icode = CODE_FOR_altivec_lvx_4si;
4613 break;
4614 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
4615 icode = CODE_FOR_altivec_lvx_4sf;
4616 break;
4617 default:
4618 *expandedp = false;
4619 return NULL_RTX;
4620 }
4621
4622 *expandedp = true;
4623
4624 arg0 = TREE_VALUE (arglist);
4625 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4626 tmode = insn_data[icode].operand[0].mode;
4627 mode0 = insn_data[icode].operand[1].mode;
4628
4629 if (target == 0
4630 || GET_MODE (target) != tmode
4631 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4632 target = gen_reg_rtx (tmode);
4633
4634 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4635 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4636
4637 pat = GEN_FCN (icode) (target, op0);
4638 if (! pat)
4639 return 0;
4640 emit_insn (pat);
4641 return target;
4642 }
4643
4644 /* Expand the stvx builtins. */
4645 static rtx
4646 altivec_expand_st_builtin (exp, target, expandedp)
4647 tree exp;
4648 rtx target ATTRIBUTE_UNUSED;
4649 bool *expandedp;
4650 {
4651 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4652 tree arglist = TREE_OPERAND (exp, 1);
4653 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4654 tree arg0, arg1;
4655 enum machine_mode mode0, mode1;
4656 rtx pat, op0, op1;
4657 enum insn_code icode;
4658
4659 switch (fcode)
4660 {
4661 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
4662 icode = CODE_FOR_altivec_stvx_16qi;
4663 break;
4664 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
4665 icode = CODE_FOR_altivec_stvx_8hi;
4666 break;
4667 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
4668 icode = CODE_FOR_altivec_stvx_4si;
4669 break;
4670 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
4671 icode = CODE_FOR_altivec_stvx_4sf;
4672 break;
4673 default:
4674 *expandedp = false;
4675 return NULL_RTX;
4676 }
4677
4678 arg0 = TREE_VALUE (arglist);
4679 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4680 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4681 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4682 mode0 = insn_data[icode].operand[0].mode;
4683 mode1 = insn_data[icode].operand[1].mode;
4684
4685 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4686 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4687 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
4688 op1 = copy_to_mode_reg (mode1, op1);
4689
4690 pat = GEN_FCN (icode) (op0, op1);
4691 if (pat)
4692 emit_insn (pat);
4693
4694 *expandedp = true;
4695 return NULL_RTX;
4696 }
4697
4698 /* Expand the dst builtins. */
4699 static rtx
4700 altivec_expand_dst_builtin (exp, target, expandedp)
4701 tree exp;
4702 rtx target ATTRIBUTE_UNUSED;
4703 bool *expandedp;
4704 {
4705 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4706 tree arglist = TREE_OPERAND (exp, 1);
4707 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4708 tree arg0, arg1, arg2;
4709 enum machine_mode mode0, mode1, mode2;
4710 rtx pat, op0, op1, op2;
4711 struct builtin_description *d;
4712 size_t i;
4713
4714 *expandedp = false;
4715
4716 /* Handle DST variants. */
4717 d = (struct builtin_description *) bdesc_dst;
4718 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4719 if (d->code == fcode)
4720 {
4721 arg0 = TREE_VALUE (arglist);
4722 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4723 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4724 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4725 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4726 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4727 mode0 = insn_data[d->icode].operand[0].mode;
4728 mode1 = insn_data[d->icode].operand[1].mode;
4729 mode2 = insn_data[d->icode].operand[2].mode;
4730
4731 /* Invalid arguments, bail out before generating bad rtl. */
4732 if (arg0 == error_mark_node
4733 || arg1 == error_mark_node
4734 || arg2 == error_mark_node)
4735 return const0_rtx;
4736
4737 if (TREE_CODE (arg2) != INTEGER_CST
4738 || TREE_INT_CST_LOW (arg2) & ~0x3)
4739 {
4740 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
4741 return const0_rtx;
4742 }
4743
4744 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4745 op0 = copy_to_mode_reg (mode0, op0);
4746 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4747 op1 = copy_to_mode_reg (mode1, op1);
4748
4749 pat = GEN_FCN (d->icode) (op0, op1, op2);
4750 if (pat != 0)
4751 emit_insn (pat);
4752
4753 *expandedp = true;
4754 return NULL_RTX;
4755 }
4756
4757 return NULL_RTX;
4758 }
4759
4760 /* Expand the builtin in EXP and store the result in TARGET. Store
4761 true in *EXPANDEDP if we found a builtin to expand. */
4762 static rtx
4763 altivec_expand_builtin (exp, target, expandedp)
4764 tree exp;
4765 rtx target;
4766 bool *expandedp;
4767 {
4768 struct builtin_description *d;
4769 struct builtin_description_predicates *dp;
4770 size_t i;
4771 enum insn_code icode;
4772 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4773 tree arglist = TREE_OPERAND (exp, 1);
4774 tree arg0;
4775 rtx op0, pat;
4776 enum machine_mode tmode, mode0;
4777 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4778
4779 target = altivec_expand_ld_builtin (exp, target, expandedp);
4780 if (*expandedp)
4781 return target;
4782
4783 target = altivec_expand_st_builtin (exp, target, expandedp);
4784 if (*expandedp)
4785 return target;
4786
4787 target = altivec_expand_dst_builtin (exp, target, expandedp);
4788 if (*expandedp)
4789 return target;
4790
4791 *expandedp = true;
4792
4793 switch (fcode)
4794 {
4795 case ALTIVEC_BUILTIN_STVX:
4796 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
4797 case ALTIVEC_BUILTIN_STVEBX:
4798 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
4799 case ALTIVEC_BUILTIN_STVEHX:
4800 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
4801 case ALTIVEC_BUILTIN_STVEWX:
4802 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
4803 case ALTIVEC_BUILTIN_STVXL:
4804 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
4805
4806 case ALTIVEC_BUILTIN_MFVSCR:
4807 icode = CODE_FOR_altivec_mfvscr;
4808 tmode = insn_data[icode].operand[0].mode;
4809
4810 if (target == 0
4811 || GET_MODE (target) != tmode
4812 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4813 target = gen_reg_rtx (tmode);
4814
4815 pat = GEN_FCN (icode) (target);
4816 if (! pat)
4817 return 0;
4818 emit_insn (pat);
4819 return target;
4820
4821 case ALTIVEC_BUILTIN_MTVSCR:
4822 icode = CODE_FOR_altivec_mtvscr;
4823 arg0 = TREE_VALUE (arglist);
4824 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4825 mode0 = insn_data[icode].operand[0].mode;
4826
4827 /* If we got invalid arguments bail out before generating bad rtl. */
4828 if (arg0 == error_mark_node)
4829 return const0_rtx;
4830
4831 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4832 op0 = copy_to_mode_reg (mode0, op0);
4833
4834 pat = GEN_FCN (icode) (op0);
4835 if (pat)
4836 emit_insn (pat);
4837 return NULL_RTX;
4838
4839 case ALTIVEC_BUILTIN_DSSALL:
4840 emit_insn (gen_altivec_dssall ());
4841 return NULL_RTX;
4842
4843 case ALTIVEC_BUILTIN_DSS:
4844 icode = CODE_FOR_altivec_dss;
4845 arg0 = TREE_VALUE (arglist);
4846 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4847 mode0 = insn_data[icode].operand[0].mode;
4848
4849 /* If we got invalid arguments bail out before generating bad rtl. */
4850 if (arg0 == error_mark_node)
4851 return const0_rtx;
4852
4853 if (TREE_CODE (arg0) != INTEGER_CST
4854 || TREE_INT_CST_LOW (arg0) & ~0x3)
4855 {
4856 error ("argument to dss must be a 2-bit unsigned literal");
4857 return const0_rtx;
4858 }
4859
4860 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4861 op0 = copy_to_mode_reg (mode0, op0);
4862
4863 emit_insn (gen_altivec_dss (op0));
4864 return NULL_RTX;
4865 }
4866
4867 /* Expand abs* operations. */
4868 d = (struct builtin_description *) bdesc_abs;
4869 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4870 if (d->code == fcode)
4871 return altivec_expand_abs_builtin (d->icode, arglist, target);
4872
4873 /* Expand the AltiVec predicates. */
4874 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4875 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4876 if (dp->code == fcode)
4877 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4878
4879 /* LV* are funky. We initialized them differently. */
4880 switch (fcode)
4881 {
4882 case ALTIVEC_BUILTIN_LVSL:
4883 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4884 arglist, target);
4885 case ALTIVEC_BUILTIN_LVSR:
4886 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4887 arglist, target);
4888 case ALTIVEC_BUILTIN_LVEBX:
4889 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4890 arglist, target);
4891 case ALTIVEC_BUILTIN_LVEHX:
4892 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4893 arglist, target);
4894 case ALTIVEC_BUILTIN_LVEWX:
4895 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4896 arglist, target);
4897 case ALTIVEC_BUILTIN_LVXL:
4898 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4899 arglist, target);
4900 case ALTIVEC_BUILTIN_LVX:
4901 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
4902 arglist, target);
4903 default:
4904 break;
4905 /* Fall through. */
4906 }
4907
4908 *expandedp = false;
4909 return NULL_RTX;
4910 }
4911
4912 /* Binops that need to be initialized manually, but can be expanded
4913 automagically by rs6000_expand_binop_builtin. */
4914 static struct builtin_description bdesc_2arg_spe[] =
4915 {
4916 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
4917 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
4918 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
4919 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
4920 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
4921 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
4922 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
4923 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
4924 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
4925 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
4926 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
4927 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
4928 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
4929 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
4930 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
4931 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
4932 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
4933 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
4934 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
4935 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
4936 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
4937 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
4938 };
4939
4940 /* Expand the builtin in EXP and store the result in TARGET. Store
4941 true in *EXPANDEDP if we found a builtin to expand.
4942
4943 This expands the SPE builtins that are not simple unary and binary
4944 operations. */
4945 static rtx
4946 spe_expand_builtin (exp, target, expandedp)
4947 tree exp;
4948 rtx target;
4949 bool *expandedp;
4950 {
4951 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4952 tree arglist = TREE_OPERAND (exp, 1);
4953 tree arg1, arg0;
4954 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4955 enum insn_code icode;
4956 enum machine_mode tmode, mode0;
4957 rtx pat, op0;
4958 struct builtin_description *d;
4959 size_t i;
4960
4961 *expandedp = true;
4962
4963 /* Syntax check for a 5-bit unsigned immediate. */
4964 switch (fcode)
4965 {
4966 case SPE_BUILTIN_EVSTDD:
4967 case SPE_BUILTIN_EVSTDH:
4968 case SPE_BUILTIN_EVSTDW:
4969 case SPE_BUILTIN_EVSTWHE:
4970 case SPE_BUILTIN_EVSTWHO:
4971 case SPE_BUILTIN_EVSTWWE:
4972 case SPE_BUILTIN_EVSTWWO:
4973 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4974 if (TREE_CODE (arg1) != INTEGER_CST
4975 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4976 {
4977 error ("argument 2 must be a 5-bit unsigned literal");
4978 return const0_rtx;
4979 }
4980 break;
4981 default:
4982 break;
4983 }
4984
4985 d = (struct builtin_description *) bdesc_2arg_spe;
4986 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
4987 if (d->code == fcode)
4988 return rs6000_expand_binop_builtin (d->icode, arglist, target);
4989
4990 d = (struct builtin_description *) bdesc_spe_predicates;
4991 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
4992 if (d->code == fcode)
4993 return spe_expand_predicate_builtin (d->icode, arglist, target);
4994
4995 d = (struct builtin_description *) bdesc_spe_evsel;
4996 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
4997 if (d->code == fcode)
4998 return spe_expand_evsel_builtin (d->icode, arglist, target);
4999
5000 switch (fcode)
5001 {
5002 case SPE_BUILTIN_EVSTDDX:
5003 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
5004 case SPE_BUILTIN_EVSTDHX:
5005 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
5006 case SPE_BUILTIN_EVSTDWX:
5007 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
5008 case SPE_BUILTIN_EVSTWHEX:
5009 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
5010 case SPE_BUILTIN_EVSTWHOX:
5011 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
5012 case SPE_BUILTIN_EVSTWWEX:
5013 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
5014 case SPE_BUILTIN_EVSTWWOX:
5015 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
5016 case SPE_BUILTIN_EVSTDD:
5017 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
5018 case SPE_BUILTIN_EVSTDH:
5019 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
5020 case SPE_BUILTIN_EVSTDW:
5021 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
5022 case SPE_BUILTIN_EVSTWHE:
5023 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
5024 case SPE_BUILTIN_EVSTWHO:
5025 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
5026 case SPE_BUILTIN_EVSTWWE:
5027 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
5028 case SPE_BUILTIN_EVSTWWO:
5029 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
5030 case SPE_BUILTIN_MFSPEFSCR:
5031 icode = CODE_FOR_spe_mfspefscr;
5032 tmode = insn_data[icode].operand[0].mode;
5033
5034 if (target == 0
5035 || GET_MODE (target) != tmode
5036 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5037 target = gen_reg_rtx (tmode);
5038
5039 pat = GEN_FCN (icode) (target);
5040 if (! pat)
5041 return 0;
5042 emit_insn (pat);
5043 return target;
5044 case SPE_BUILTIN_MTSPEFSCR:
5045 icode = CODE_FOR_spe_mtspefscr;
5046 arg0 = TREE_VALUE (arglist);
5047 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5048 mode0 = insn_data[icode].operand[0].mode;
5049
5050 if (arg0 == error_mark_node)
5051 return const0_rtx;
5052
5053 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5054 op0 = copy_to_mode_reg (mode0, op0);
5055
5056 pat = GEN_FCN (icode) (op0);
5057 if (pat)
5058 emit_insn (pat);
5059 return NULL_RTX;
5060 default:
5061 break;
5062 }
5063
5064 *expandedp = false;
5065 return NULL_RTX;
5066 }
5067
5068 static rtx
5069 spe_expand_predicate_builtin (icode, arglist, target)
5070 enum insn_code icode;
5071 tree arglist;
5072 rtx target;
5073 {
5074 rtx pat, scratch, tmp;
5075 tree form = TREE_VALUE (arglist);
5076 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5077 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5078 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5079 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5080 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5081 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5082 int form_int;
5083 enum rtx_code code;
5084
5085 if (TREE_CODE (form) != INTEGER_CST)
5086 {
5087 error ("argument 1 of __builtin_spe_predicate must be a constant");
5088 return const0_rtx;
5089 }
5090 else
5091 form_int = TREE_INT_CST_LOW (form);
5092
5093 if (mode0 != mode1)
5094 abort ();
5095
5096 if (arg0 == error_mark_node || arg1 == error_mark_node)
5097 return const0_rtx;
5098
5099 if (target == 0
5100 || GET_MODE (target) != SImode
5101 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
5102 target = gen_reg_rtx (SImode);
5103
5104 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5105 op0 = copy_to_mode_reg (mode0, op0);
5106 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5107 op1 = copy_to_mode_reg (mode1, op1);
5108
5109 scratch = gen_reg_rtx (CCmode);
5110
5111 pat = GEN_FCN (icode) (scratch, op0, op1);
5112 if (! pat)
5113 return const0_rtx;
5114 emit_insn (pat);
5115
5116 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5117 _lower_. We use one compare, but look in different bits of the
5118 CR for each variant.
5119
5120 There are 2 elements in each SPE simd type (upper/lower). The CR
5121 bits are set as follows:
5122
5123 BIT0 | BIT 1 | BIT 2 | BIT 3
5124 U | L | (U | L) | (U & L)
5125
5126 So, for an "all" relationship, BIT 3 would be set.
5127 For an "any" relationship, BIT 2 would be set. Etc.
5128
5129 Following traditional nomenclature, these bits map to:
5130
5131 BIT0 | BIT 1 | BIT 2 | BIT 3
5132 LT | GT | EQ | OV
5133
5134 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5135 */
5136
5137 switch (form_int)
5138 {
5139 /* All variant. OV bit. */
5140 case 0:
5141 /* We need to get to the OV bit, which is the ORDERED bit. We
5142 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5143 that's ugly and will trigger a validate_condition_mode abort.
5144 So let's just use another pattern. */
5145 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
5146 return target;
5147 /* Any variant. EQ bit. */
5148 case 1:
5149 code = EQ;
5150 break;
5151 /* Upper variant. LT bit. */
5152 case 2:
5153 code = LT;
5154 break;
5155 /* Lower variant. GT bit. */
5156 case 3:
5157 code = GT;
5158 break;
5159 default:
5160 error ("argument 1 of __builtin_spe_predicate is out of range");
5161 return const0_rtx;
5162 }
5163
5164 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
5165 emit_move_insn (target, tmp);
5166
5167 return target;
5168 }
5169
5170 /* The evsel builtins look like this:
5171
5172 e = __builtin_spe_evsel_OP (a, b, c, d);
5173
5174 and work like this:
5175
5176 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5177 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5178 */
5179
5180 static rtx
5181 spe_expand_evsel_builtin (icode, arglist, target)
5182 enum insn_code icode;
5183 tree arglist;
5184 rtx target;
5185 {
5186 rtx pat, scratch;
5187 tree arg0 = TREE_VALUE (arglist);
5188 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5189 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5190 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5191 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5192 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5193 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5194 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5195 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5196 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5197
5198 if (mode0 != mode1)
5199 abort ();
5200
5201 if (arg0 == error_mark_node || arg1 == error_mark_node
5202 || arg2 == error_mark_node || arg3 == error_mark_node)
5203 return const0_rtx;
5204
5205 if (target == 0
5206 || GET_MODE (target) != mode0
5207 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5208 target = gen_reg_rtx (mode0);
5209
5210 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5211 op0 = copy_to_mode_reg (mode0, op0);
5212 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5213 op1 = copy_to_mode_reg (mode0, op1);
5214 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5215 op2 = copy_to_mode_reg (mode0, op2);
5216 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5217 op3 = copy_to_mode_reg (mode0, op3);
5218
5219 /* Generate the compare. */
5220 scratch = gen_reg_rtx (CCmode);
5221 pat = GEN_FCN (icode) (scratch, op0, op1);
5222 if (! pat)
5223 return const0_rtx;
5224 emit_insn (pat);
5225
5226 if (mode0 == V2SImode)
5227 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
5228 else
5229 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
5230
5231 return target;
5232 }
5233
5234 /* Expand an expression EXP that calls a built-in function,
5235 with result going to TARGET if that's convenient
5236 (and in mode MODE if that's convenient).
5237 SUBTARGET may be used as the target for computing one of EXP's operands.
5238 IGNORE is nonzero if the value is to be ignored. */
5239
5240 static rtx
5241 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
5242 tree exp;
5243 rtx target;
5244 rtx subtarget ATTRIBUTE_UNUSED;
5245 enum machine_mode mode ATTRIBUTE_UNUSED;
5246 int ignore ATTRIBUTE_UNUSED;
5247 {
5248 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5249 tree arglist = TREE_OPERAND (exp, 1);
5250 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5251 struct builtin_description *d;
5252 size_t i;
5253 rtx ret;
5254 bool success;
5255
5256 if (TARGET_ALTIVEC)
5257 {
5258 ret = altivec_expand_builtin (exp, target, &success);
5259
5260 if (success)
5261 return ret;
5262 }
5263 if (TARGET_SPE)
5264 {
5265 ret = spe_expand_builtin (exp, target, &success);
5266
5267 if (success)
5268 return ret;
5269 }
5270
5271 if (TARGET_ALTIVEC || TARGET_SPE)
5272 {
5273 /* Handle simple unary operations. */
5274 d = (struct builtin_description *) bdesc_1arg;
5275 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5276 if (d->code == fcode)
5277 return rs6000_expand_unop_builtin (d->icode, arglist, target);
5278
5279 /* Handle simple binary operations. */
5280 d = (struct builtin_description *) bdesc_2arg;
5281 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5282 if (d->code == fcode)
5283 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5284
5285 /* Handle simple ternary operations. */
5286 d = (struct builtin_description *) bdesc_3arg;
5287 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5288 if (d->code == fcode)
5289 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
5290 }
5291
5292 abort ();
5293 return NULL_RTX;
5294 }
5295
5296 static void
5297 rs6000_init_builtins ()
5298 {
5299 if (TARGET_SPE)
5300 spe_init_builtins ();
5301 if (TARGET_ALTIVEC)
5302 altivec_init_builtins ();
5303 if (TARGET_ALTIVEC || TARGET_SPE)
5304 rs6000_common_init_builtins ();
5305 }
5306
5307 /* Search through a set of builtins and enable the mask bits.
5308 DESC is an array of builtins.
5309 SIZE is the total number of builtins.
5310 START is the builtin enum at which to start.
5311 END is the builtin enum at which to end. */
5312 static void
5313 enable_mask_for_builtins (desc, size, start, end)
5314 struct builtin_description *desc;
5315 int size;
5316 enum rs6000_builtins start, end;
5317 {
5318 int i;
5319
5320 for (i = 0; i < size; ++i)
5321 if (desc[i].code == start)
5322 break;
5323
5324 if (i == size)
5325 return;
5326
5327 for (; i < size; ++i)
5328 {
5329 /* Flip all the bits on. */
5330 desc[i].mask = target_flags;
5331 if (desc[i].code == end)
5332 break;
5333 }
5334 }
5335
5336 static void
5337 spe_init_builtins ()
5338 {
5339 tree endlink = void_list_node;
5340 tree puint_type_node = build_pointer_type (unsigned_type_node);
5341 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
5342 tree pv2si_type_node = build_pointer_type (V2SI_type_node);
5343 struct builtin_description *d;
5344 size_t i;
5345
5346 tree v2si_ftype_4_v2si
5347 = build_function_type
5348 (V2SI_type_node,
5349 tree_cons (NULL_TREE, V2SI_type_node,
5350 tree_cons (NULL_TREE, V2SI_type_node,
5351 tree_cons (NULL_TREE, V2SI_type_node,
5352 tree_cons (NULL_TREE, V2SI_type_node,
5353 endlink)))));
5354
5355 tree v2sf_ftype_4_v2sf
5356 = build_function_type
5357 (V2SF_type_node,
5358 tree_cons (NULL_TREE, V2SF_type_node,
5359 tree_cons (NULL_TREE, V2SF_type_node,
5360 tree_cons (NULL_TREE, V2SF_type_node,
5361 tree_cons (NULL_TREE, V2SF_type_node,
5362 endlink)))));
5363
5364 tree int_ftype_int_v2si_v2si
5365 = build_function_type
5366 (integer_type_node,
5367 tree_cons (NULL_TREE, integer_type_node,
5368 tree_cons (NULL_TREE, V2SI_type_node,
5369 tree_cons (NULL_TREE, V2SI_type_node,
5370 endlink))));
5371
5372 tree int_ftype_int_v2sf_v2sf
5373 = build_function_type
5374 (integer_type_node,
5375 tree_cons (NULL_TREE, integer_type_node,
5376 tree_cons (NULL_TREE, V2SF_type_node,
5377 tree_cons (NULL_TREE, V2SF_type_node,
5378 endlink))));
5379
5380 tree void_ftype_v2si_puint_int
5381 = build_function_type (void_type_node,
5382 tree_cons (NULL_TREE, V2SI_type_node,
5383 tree_cons (NULL_TREE, puint_type_node,
5384 tree_cons (NULL_TREE,
5385 integer_type_node,
5386 endlink))));
5387
5388 tree void_ftype_v2si_puint_char
5389 = build_function_type (void_type_node,
5390 tree_cons (NULL_TREE, V2SI_type_node,
5391 tree_cons (NULL_TREE, puint_type_node,
5392 tree_cons (NULL_TREE,
5393 char_type_node,
5394 endlink))));
5395
5396 tree void_ftype_v2si_pv2si_int
5397 = build_function_type (void_type_node,
5398 tree_cons (NULL_TREE, V2SI_type_node,
5399 tree_cons (NULL_TREE, pv2si_type_node,
5400 tree_cons (NULL_TREE,
5401 integer_type_node,
5402 endlink))));
5403
5404 tree void_ftype_v2si_pv2si_char
5405 = build_function_type (void_type_node,
5406 tree_cons (NULL_TREE, V2SI_type_node,
5407 tree_cons (NULL_TREE, pv2si_type_node,
5408 tree_cons (NULL_TREE,
5409 char_type_node,
5410 endlink))));
5411
5412 tree void_ftype_int
5413 = build_function_type (void_type_node,
5414 tree_cons (NULL_TREE, integer_type_node, endlink));
5415
5416 tree int_ftype_void
5417 = build_function_type (integer_type_node,
5418 tree_cons (NULL_TREE, void_type_node, endlink));
5419
5420 tree v2si_ftype_pv2si_int
5421 = build_function_type (V2SI_type_node,
5422 tree_cons (NULL_TREE, pv2si_type_node,
5423 tree_cons (NULL_TREE, integer_type_node,
5424 endlink)));
5425
5426 tree v2si_ftype_puint_int
5427 = build_function_type (V2SI_type_node,
5428 tree_cons (NULL_TREE, puint_type_node,
5429 tree_cons (NULL_TREE, integer_type_node,
5430 endlink)));
5431
5432 tree v2si_ftype_pushort_int
5433 = build_function_type (V2SI_type_node,
5434 tree_cons (NULL_TREE, pushort_type_node,
5435 tree_cons (NULL_TREE, integer_type_node,
5436 endlink)));
5437
5438 /* The initialization of the simple binary and unary builtins is
5439 done in rs6000_common_init_builtins, but we have to enable the
5440 mask bits here manually because we have run out of `target_flags'
5441 bits. We really need to redesign this mask business. */
5442
5443 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
5444 ARRAY_SIZE (bdesc_2arg),
5445 SPE_BUILTIN_EVADDW,
5446 SPE_BUILTIN_EVXOR);
5447 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
5448 ARRAY_SIZE (bdesc_1arg),
5449 SPE_BUILTIN_EVABS,
5450 SPE_BUILTIN_EVSUBFUSIAAW);
5451 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
5452 ARRAY_SIZE (bdesc_spe_predicates),
5453 SPE_BUILTIN_EVCMPEQ,
5454 SPE_BUILTIN_EVFSTSTLT);
5455 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
5456 ARRAY_SIZE (bdesc_spe_evsel),
5457 SPE_BUILTIN_EVSEL_CMPGTS,
5458 SPE_BUILTIN_EVSEL_FSTSTEQ);
5459
5460 /* Initialize irregular SPE builtins. */
5461
5462 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
5463 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
5464 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
5465 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
5466 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
5467 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
5468 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
5469 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
5470 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
5471 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
5472 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
5473 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
5474 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
5475 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
5476 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
5477 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
5478
5479 /* Loads. */
5480 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
5481 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
5482 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
5483 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
5484 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
5485 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
5486 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
5487 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
5488 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
5489 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
5490 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
5491 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
5492 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
5493 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
5494 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
5495 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
5496 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
5497 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
5498 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
5499 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
5500 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
5501 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
5502
5503 /* Predicates. */
5504 d = (struct builtin_description *) bdesc_spe_predicates;
5505 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
5506 {
5507 tree type;
5508
5509 switch (insn_data[d->icode].operand[1].mode)
5510 {
5511 case V2SImode:
5512 type = int_ftype_int_v2si_v2si;
5513 break;
5514 case V2SFmode:
5515 type = int_ftype_int_v2sf_v2sf;
5516 break;
5517 default:
5518 abort ();
5519 }
5520
5521 def_builtin (d->mask, d->name, type, d->code);
5522 }
5523
5524 /* Evsel predicates. */
5525 d = (struct builtin_description *) bdesc_spe_evsel;
5526 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
5527 {
5528 tree type;
5529
5530 switch (insn_data[d->icode].operand[1].mode)
5531 {
5532 case V2SImode:
5533 type = v2si_ftype_4_v2si;
5534 break;
5535 case V2SFmode:
5536 type = v2sf_ftype_4_v2sf;
5537 break;
5538 default:
5539 abort ();
5540 }
5541
5542 def_builtin (d->mask, d->name, type, d->code);
5543 }
5544 }
5545
5546 static void
5547 altivec_init_builtins ()
5548 {
5549 struct builtin_description *d;
5550 struct builtin_description_predicates *dp;
5551 size_t i;
5552 tree pfloat_type_node = build_pointer_type (float_type_node);
5553 tree pint_type_node = build_pointer_type (integer_type_node);
5554 tree pshort_type_node = build_pointer_type (short_integer_type_node);
5555 tree pchar_type_node = build_pointer_type (char_type_node);
5556
5557 tree pvoid_type_node = build_pointer_type (void_type_node);
5558
5559 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
5560 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
5561 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
5562 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
5563
5564 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
5565
5566 tree int_ftype_int_v4si_v4si
5567 = build_function_type_list (integer_type_node,
5568 integer_type_node, V4SI_type_node,
5569 V4SI_type_node, NULL_TREE);
5570 tree v4sf_ftype_pcfloat
5571 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
5572 tree void_ftype_pfloat_v4sf
5573 = build_function_type_list (void_type_node,
5574 pfloat_type_node, V4SF_type_node, NULL_TREE);
5575 tree v4si_ftype_pcint
5576 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
5577 tree void_ftype_pint_v4si
5578 = build_function_type_list (void_type_node,
5579 pint_type_node, V4SI_type_node, NULL_TREE);
5580 tree v8hi_ftype_pcshort
5581 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
5582 tree void_ftype_pshort_v8hi
5583 = build_function_type_list (void_type_node,
5584 pshort_type_node, V8HI_type_node, NULL_TREE);
5585 tree v16qi_ftype_pcchar
5586 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
5587 tree void_ftype_pchar_v16qi
5588 = build_function_type_list (void_type_node,
5589 pchar_type_node, V16QI_type_node, NULL_TREE);
5590 tree void_ftype_v4si
5591 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
5592 tree v8hi_ftype_void
5593 = build_function_type (V8HI_type_node, void_list_node);
5594 tree void_ftype_void
5595 = build_function_type (void_type_node, void_list_node);
5596 tree void_ftype_qi
5597 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
5598
5599 tree v16qi_ftype_int_pcvoid
5600 = build_function_type_list (V16QI_type_node,
5601 integer_type_node, pcvoid_type_node, NULL_TREE);
5602 tree v8hi_ftype_int_pcvoid
5603 = build_function_type_list (V8HI_type_node,
5604 integer_type_node, pcvoid_type_node, NULL_TREE);
5605 tree v4si_ftype_int_pcvoid
5606 = build_function_type_list (V4SI_type_node,
5607 integer_type_node, pcvoid_type_node, NULL_TREE);
5608
5609 tree void_ftype_v4si_int_pvoid
5610 = build_function_type_list (void_type_node,
5611 V4SI_type_node, integer_type_node,
5612 pvoid_type_node, NULL_TREE);
5613 tree void_ftype_v16qi_int_pvoid
5614 = build_function_type_list (void_type_node,
5615 V16QI_type_node, integer_type_node,
5616 pvoid_type_node, NULL_TREE);
5617 tree void_ftype_v8hi_int_pvoid
5618 = build_function_type_list (void_type_node,
5619 V8HI_type_node, integer_type_node,
5620 pvoid_type_node, NULL_TREE);
5621 tree int_ftype_int_v8hi_v8hi
5622 = build_function_type_list (integer_type_node,
5623 integer_type_node, V8HI_type_node,
5624 V8HI_type_node, NULL_TREE);
5625 tree int_ftype_int_v16qi_v16qi
5626 = build_function_type_list (integer_type_node,
5627 integer_type_node, V16QI_type_node,
5628 V16QI_type_node, NULL_TREE);
5629 tree int_ftype_int_v4sf_v4sf
5630 = build_function_type_list (integer_type_node,
5631 integer_type_node, V4SF_type_node,
5632 V4SF_type_node, NULL_TREE);
5633 tree v4si_ftype_v4si
5634 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
5635 tree v8hi_ftype_v8hi
5636 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
5637 tree v16qi_ftype_v16qi
5638 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
5639 tree v4sf_ftype_v4sf
5640 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5641 tree void_ftype_pcvoid_int_char
5642 = build_function_type_list (void_type_node,
5643 pcvoid_type_node, integer_type_node,
5644 char_type_node, NULL_TREE);
5645
5646 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
5647 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
5648 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
5649 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
5650 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
5651 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
5652 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
5653 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
5654 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
5655 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
5656 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
5657 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
5658 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
5659 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
5660 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
5661 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
5662 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
5663 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
5664 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
5665 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
5666 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSL);
5667 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSR);
5668 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEBX);
5669 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEHX);
5670 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEWX);
5671 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVXL);
5672 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVX);
5673 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
5674 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
5675 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
5676 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
5677 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
5678
5679 /* Add the DST variants. */
5680 d = (struct builtin_description *) bdesc_dst;
5681 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5682 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
5683
5684 /* Initialize the predicates. */
5685 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5686 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5687 {
5688 enum machine_mode mode1;
5689 tree type;
5690
5691 mode1 = insn_data[dp->icode].operand[1].mode;
5692
5693 switch (mode1)
5694 {
5695 case V4SImode:
5696 type = int_ftype_int_v4si_v4si;
5697 break;
5698 case V8HImode:
5699 type = int_ftype_int_v8hi_v8hi;
5700 break;
5701 case V16QImode:
5702 type = int_ftype_int_v16qi_v16qi;
5703 break;
5704 case V4SFmode:
5705 type = int_ftype_int_v4sf_v4sf;
5706 break;
5707 default:
5708 abort ();
5709 }
5710
5711 def_builtin (dp->mask, dp->name, type, dp->code);
5712 }
5713
5714 /* Initialize the abs* operators. */
5715 d = (struct builtin_description *) bdesc_abs;
5716 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5717 {
5718 enum machine_mode mode0;
5719 tree type;
5720
5721 mode0 = insn_data[d->icode].operand[0].mode;
5722
5723 switch (mode0)
5724 {
5725 case V4SImode:
5726 type = v4si_ftype_v4si;
5727 break;
5728 case V8HImode:
5729 type = v8hi_ftype_v8hi;
5730 break;
5731 case V16QImode:
5732 type = v16qi_ftype_v16qi;
5733 break;
5734 case V4SFmode:
5735 type = v4sf_ftype_v4sf;
5736 break;
5737 default:
5738 abort ();
5739 }
5740
5741 def_builtin (d->mask, d->name, type, d->code);
5742 }
5743 }
5744
5745 static void
5746 rs6000_common_init_builtins ()
5747 {
5748 struct builtin_description *d;
5749 size_t i;
5750
5751 tree v4sf_ftype_v4sf_v4sf_v16qi
5752 = build_function_type_list (V4SF_type_node,
5753 V4SF_type_node, V4SF_type_node,
5754 V16QI_type_node, NULL_TREE);
5755 tree v4si_ftype_v4si_v4si_v16qi
5756 = build_function_type_list (V4SI_type_node,
5757 V4SI_type_node, V4SI_type_node,
5758 V16QI_type_node, NULL_TREE);
5759 tree v8hi_ftype_v8hi_v8hi_v16qi
5760 = build_function_type_list (V8HI_type_node,
5761 V8HI_type_node, V8HI_type_node,
5762 V16QI_type_node, NULL_TREE);
5763 tree v16qi_ftype_v16qi_v16qi_v16qi
5764 = build_function_type_list (V16QI_type_node,
5765 V16QI_type_node, V16QI_type_node,
5766 V16QI_type_node, NULL_TREE);
5767 tree v4si_ftype_char
5768 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
5769 tree v8hi_ftype_char
5770 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
5771 tree v16qi_ftype_char
5772 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
5773 tree v8hi_ftype_v16qi
5774 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
5775 tree v4sf_ftype_v4sf
5776 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5777
5778 tree v2si_ftype_v2si_v2si
5779 = build_function_type_list (V2SI_type_node,
5780 V2SI_type_node, V2SI_type_node, NULL_TREE);
5781
5782 tree v2sf_ftype_v2sf_v2sf
5783 = build_function_type_list (V2SF_type_node,
5784 V2SF_type_node, V2SF_type_node, NULL_TREE);
5785
5786 tree v2si_ftype_int_int
5787 = build_function_type_list (V2SI_type_node,
5788 integer_type_node, integer_type_node,
5789 NULL_TREE);
5790
5791 tree v2si_ftype_v2si
5792 = build_function_type_list (V2SI_type_node, V2SI_type_node, NULL_TREE);
5793
5794 tree v2sf_ftype_v2sf
5795 = build_function_type_list (V2SF_type_node,
5796 V2SF_type_node, NULL_TREE);
5797
5798 tree v2sf_ftype_v2si
5799 = build_function_type_list (V2SF_type_node,
5800 V2SI_type_node, NULL_TREE);
5801
5802 tree v2si_ftype_v2sf
5803 = build_function_type_list (V2SI_type_node,
5804 V2SF_type_node, NULL_TREE);
5805
5806 tree v2si_ftype_v2si_char
5807 = build_function_type_list (V2SI_type_node,
5808 V2SI_type_node, char_type_node, NULL_TREE);
5809
5810 tree v2si_ftype_int_char
5811 = build_function_type_list (V2SI_type_node,
5812 integer_type_node, char_type_node, NULL_TREE);
5813
5814 tree v2si_ftype_char
5815 = build_function_type_list (V2SI_type_node, char_type_node, NULL_TREE);
5816
5817 tree int_ftype_int_int
5818 = build_function_type_list (integer_type_node,
5819 integer_type_node, integer_type_node,
5820 NULL_TREE);
5821
5822 tree v4si_ftype_v4si_v4si
5823 = build_function_type_list (V4SI_type_node,
5824 V4SI_type_node, V4SI_type_node, NULL_TREE);
5825 tree v4sf_ftype_v4si_char
5826 = build_function_type_list (V4SF_type_node,
5827 V4SI_type_node, char_type_node, NULL_TREE);
5828 tree v4si_ftype_v4sf_char
5829 = build_function_type_list (V4SI_type_node,
5830 V4SF_type_node, char_type_node, NULL_TREE);
5831 tree v4si_ftype_v4si_char
5832 = build_function_type_list (V4SI_type_node,
5833 V4SI_type_node, char_type_node, NULL_TREE);
5834 tree v8hi_ftype_v8hi_char
5835 = build_function_type_list (V8HI_type_node,
5836 V8HI_type_node, char_type_node, NULL_TREE);
5837 tree v16qi_ftype_v16qi_char
5838 = build_function_type_list (V16QI_type_node,
5839 V16QI_type_node, char_type_node, NULL_TREE);
5840 tree v16qi_ftype_v16qi_v16qi_char
5841 = build_function_type_list (V16QI_type_node,
5842 V16QI_type_node, V16QI_type_node,
5843 char_type_node, NULL_TREE);
5844 tree v8hi_ftype_v8hi_v8hi_char
5845 = build_function_type_list (V8HI_type_node,
5846 V8HI_type_node, V8HI_type_node,
5847 char_type_node, NULL_TREE);
5848 tree v4si_ftype_v4si_v4si_char
5849 = build_function_type_list (V4SI_type_node,
5850 V4SI_type_node, V4SI_type_node,
5851 char_type_node, NULL_TREE);
5852 tree v4sf_ftype_v4sf_v4sf_char
5853 = build_function_type_list (V4SF_type_node,
5854 V4SF_type_node, V4SF_type_node,
5855 char_type_node, NULL_TREE);
5856 tree v4sf_ftype_v4sf_v4sf
5857 = build_function_type_list (V4SF_type_node,
5858 V4SF_type_node, V4SF_type_node, NULL_TREE);
5859 tree v4sf_ftype_v4sf_v4sf_v4si
5860 = build_function_type_list (V4SF_type_node,
5861 V4SF_type_node, V4SF_type_node,
5862 V4SI_type_node, NULL_TREE);
5863 tree v4sf_ftype_v4sf_v4sf_v4sf
5864 = build_function_type_list (V4SF_type_node,
5865 V4SF_type_node, V4SF_type_node,
5866 V4SF_type_node, NULL_TREE);
5867 tree v4si_ftype_v4si_v4si_v4si
5868 = build_function_type_list (V4SI_type_node,
5869 V4SI_type_node, V4SI_type_node,
5870 V4SI_type_node, NULL_TREE);
5871 tree v8hi_ftype_v8hi_v8hi
5872 = build_function_type_list (V8HI_type_node,
5873 V8HI_type_node, V8HI_type_node, NULL_TREE);
5874 tree v8hi_ftype_v8hi_v8hi_v8hi
5875 = build_function_type_list (V8HI_type_node,
5876 V8HI_type_node, V8HI_type_node,
5877 V8HI_type_node, NULL_TREE);
5878 tree v4si_ftype_v8hi_v8hi_v4si
5879 = build_function_type_list (V4SI_type_node,
5880 V8HI_type_node, V8HI_type_node,
5881 V4SI_type_node, NULL_TREE);
5882 tree v4si_ftype_v16qi_v16qi_v4si
5883 = build_function_type_list (V4SI_type_node,
5884 V16QI_type_node, V16QI_type_node,
5885 V4SI_type_node, NULL_TREE);
5886 tree v16qi_ftype_v16qi_v16qi
5887 = build_function_type_list (V16QI_type_node,
5888 V16QI_type_node, V16QI_type_node, NULL_TREE);
5889 tree v4si_ftype_v4sf_v4sf
5890 = build_function_type_list (V4SI_type_node,
5891 V4SF_type_node, V4SF_type_node, NULL_TREE);
5892 tree v8hi_ftype_v16qi_v16qi
5893 = build_function_type_list (V8HI_type_node,
5894 V16QI_type_node, V16QI_type_node, NULL_TREE);
5895 tree v4si_ftype_v8hi_v8hi
5896 = build_function_type_list (V4SI_type_node,
5897 V8HI_type_node, V8HI_type_node, NULL_TREE);
5898 tree v8hi_ftype_v4si_v4si
5899 = build_function_type_list (V8HI_type_node,
5900 V4SI_type_node, V4SI_type_node, NULL_TREE);
5901 tree v16qi_ftype_v8hi_v8hi
5902 = build_function_type_list (V16QI_type_node,
5903 V8HI_type_node, V8HI_type_node, NULL_TREE);
5904 tree v4si_ftype_v16qi_v4si
5905 = build_function_type_list (V4SI_type_node,
5906 V16QI_type_node, V4SI_type_node, NULL_TREE);
5907 tree v4si_ftype_v16qi_v16qi
5908 = build_function_type_list (V4SI_type_node,
5909 V16QI_type_node, V16QI_type_node, NULL_TREE);
5910 tree v4si_ftype_v8hi_v4si
5911 = build_function_type_list (V4SI_type_node,
5912 V8HI_type_node, V4SI_type_node, NULL_TREE);
5913 tree v4si_ftype_v8hi
5914 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
5915 tree int_ftype_v4si_v4si
5916 = build_function_type_list (integer_type_node,
5917 V4SI_type_node, V4SI_type_node, NULL_TREE);
5918 tree int_ftype_v4sf_v4sf
5919 = build_function_type_list (integer_type_node,
5920 V4SF_type_node, V4SF_type_node, NULL_TREE);
5921 tree int_ftype_v16qi_v16qi
5922 = build_function_type_list (integer_type_node,
5923 V16QI_type_node, V16QI_type_node, NULL_TREE);
5924 tree int_ftype_v8hi_v8hi
5925 = build_function_type_list (integer_type_node,
5926 V8HI_type_node, V8HI_type_node, NULL_TREE);
5927
5928 /* Add the simple ternary operators. */
5929 d = (struct builtin_description *) bdesc_3arg;
5930 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5931 {
5932
5933 enum machine_mode mode0, mode1, mode2, mode3;
5934 tree type;
5935
5936 if (d->name == 0 || d->icode == CODE_FOR_nothing)
5937 continue;
5938
5939 mode0 = insn_data[d->icode].operand[0].mode;
5940 mode1 = insn_data[d->icode].operand[1].mode;
5941 mode2 = insn_data[d->icode].operand[2].mode;
5942 mode3 = insn_data[d->icode].operand[3].mode;
5943
5944 /* When all four are of the same mode. */
5945 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
5946 {
5947 switch (mode0)
5948 {
5949 case V4SImode:
5950 type = v4si_ftype_v4si_v4si_v4si;
5951 break;
5952 case V4SFmode:
5953 type = v4sf_ftype_v4sf_v4sf_v4sf;
5954 break;
5955 case V8HImode:
5956 type = v8hi_ftype_v8hi_v8hi_v8hi;
5957 break;
5958 case V16QImode:
5959 type = v16qi_ftype_v16qi_v16qi_v16qi;
5960 break;
5961 default:
5962 abort();
5963 }
5964 }
5965 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
5966 {
5967 switch (mode0)
5968 {
5969 case V4SImode:
5970 type = v4si_ftype_v4si_v4si_v16qi;
5971 break;
5972 case V4SFmode:
5973 type = v4sf_ftype_v4sf_v4sf_v16qi;
5974 break;
5975 case V8HImode:
5976 type = v8hi_ftype_v8hi_v8hi_v16qi;
5977 break;
5978 case V16QImode:
5979 type = v16qi_ftype_v16qi_v16qi_v16qi;
5980 break;
5981 default:
5982 abort();
5983 }
5984 }
5985 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
5986 && mode3 == V4SImode)
5987 type = v4si_ftype_v16qi_v16qi_v4si;
5988 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
5989 && mode3 == V4SImode)
5990 type = v4si_ftype_v8hi_v8hi_v4si;
5991 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
5992 && mode3 == V4SImode)
5993 type = v4sf_ftype_v4sf_v4sf_v4si;
5994
5995 /* vchar, vchar, vchar, 4 bit literal. */
5996 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
5997 && mode3 == QImode)
5998 type = v16qi_ftype_v16qi_v16qi_char;
5999
6000 /* vshort, vshort, vshort, 4 bit literal. */
6001 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
6002 && mode3 == QImode)
6003 type = v8hi_ftype_v8hi_v8hi_char;
6004
6005 /* vint, vint, vint, 4 bit literal. */
6006 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
6007 && mode3 == QImode)
6008 type = v4si_ftype_v4si_v4si_char;
6009
6010 /* vfloat, vfloat, vfloat, 4 bit literal. */
6011 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
6012 && mode3 == QImode)
6013 type = v4sf_ftype_v4sf_v4sf_char;
6014
6015 else
6016 abort ();
6017
6018 def_builtin (d->mask, d->name, type, d->code);
6019 }
6020
6021 /* Add the simple binary operators. */
6022 d = (struct builtin_description *) bdesc_2arg;
6023 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6024 {
6025 enum machine_mode mode0, mode1, mode2;
6026 tree type;
6027
6028 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6029 continue;
6030
6031 mode0 = insn_data[d->icode].operand[0].mode;
6032 mode1 = insn_data[d->icode].operand[1].mode;
6033 mode2 = insn_data[d->icode].operand[2].mode;
6034
6035 /* When all three operands are of the same mode. */
6036 if (mode0 == mode1 && mode1 == mode2)
6037 {
6038 switch (mode0)
6039 {
6040 case V4SFmode:
6041 type = v4sf_ftype_v4sf_v4sf;
6042 break;
6043 case V4SImode:
6044 type = v4si_ftype_v4si_v4si;
6045 break;
6046 case V16QImode:
6047 type = v16qi_ftype_v16qi_v16qi;
6048 break;
6049 case V8HImode:
6050 type = v8hi_ftype_v8hi_v8hi;
6051 break;
6052 case V2SImode:
6053 type = v2si_ftype_v2si_v2si;
6054 break;
6055 case V2SFmode:
6056 type = v2sf_ftype_v2sf_v2sf;
6057 break;
6058 case SImode:
6059 type = int_ftype_int_int;
6060 break;
6061 default:
6062 abort ();
6063 }
6064 }
6065
6066 /* A few other combos we really don't want to do manually. */
6067
6068 /* vint, vfloat, vfloat. */
6069 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
6070 type = v4si_ftype_v4sf_v4sf;
6071
6072 /* vshort, vchar, vchar. */
6073 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
6074 type = v8hi_ftype_v16qi_v16qi;
6075
6076 /* vint, vshort, vshort. */
6077 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
6078 type = v4si_ftype_v8hi_v8hi;
6079
6080 /* vshort, vint, vint. */
6081 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
6082 type = v8hi_ftype_v4si_v4si;
6083
6084 /* vchar, vshort, vshort. */
6085 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
6086 type = v16qi_ftype_v8hi_v8hi;
6087
6088 /* vint, vchar, vint. */
6089 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
6090 type = v4si_ftype_v16qi_v4si;
6091
6092 /* vint, vchar, vchar. */
6093 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
6094 type = v4si_ftype_v16qi_v16qi;
6095
6096 /* vint, vshort, vint. */
6097 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
6098 type = v4si_ftype_v8hi_v4si;
6099
6100 /* vint, vint, 5 bit literal. */
6101 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
6102 type = v4si_ftype_v4si_char;
6103
6104 /* vshort, vshort, 5 bit literal. */
6105 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
6106 type = v8hi_ftype_v8hi_char;
6107
6108 /* vchar, vchar, 5 bit literal. */
6109 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
6110 type = v16qi_ftype_v16qi_char;
6111
6112 /* vfloat, vint, 5 bit literal. */
6113 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
6114 type = v4sf_ftype_v4si_char;
6115
6116 /* vint, vfloat, 5 bit literal. */
6117 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
6118 type = v4si_ftype_v4sf_char;
6119
6120 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
6121 type = v2si_ftype_int_int;
6122
6123 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
6124 type = v2si_ftype_v2si_char;
6125
6126 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
6127 type = v2si_ftype_int_char;
6128
6129 /* int, x, x. */
6130 else if (mode0 == SImode)
6131 {
6132 switch (mode1)
6133 {
6134 case V4SImode:
6135 type = int_ftype_v4si_v4si;
6136 break;
6137 case V4SFmode:
6138 type = int_ftype_v4sf_v4sf;
6139 break;
6140 case V16QImode:
6141 type = int_ftype_v16qi_v16qi;
6142 break;
6143 case V8HImode:
6144 type = int_ftype_v8hi_v8hi;
6145 break;
6146 default:
6147 abort ();
6148 }
6149 }
6150
6151 else
6152 abort ();
6153
6154 def_builtin (d->mask, d->name, type, d->code);
6155 }
6156
6157 /* Add the simple unary operators. */
6158 d = (struct builtin_description *) bdesc_1arg;
6159 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6160 {
6161 enum machine_mode mode0, mode1;
6162 tree type;
6163
6164 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6165 continue;
6166
6167 mode0 = insn_data[d->icode].operand[0].mode;
6168 mode1 = insn_data[d->icode].operand[1].mode;
6169
6170 if (mode0 == V4SImode && mode1 == QImode)
6171 type = v4si_ftype_char;
6172 else if (mode0 == V8HImode && mode1 == QImode)
6173 type = v8hi_ftype_char;
6174 else if (mode0 == V16QImode && mode1 == QImode)
6175 type = v16qi_ftype_char;
6176 else if (mode0 == V4SFmode && mode1 == V4SFmode)
6177 type = v4sf_ftype_v4sf;
6178 else if (mode0 == V8HImode && mode1 == V16QImode)
6179 type = v8hi_ftype_v16qi;
6180 else if (mode0 == V4SImode && mode1 == V8HImode)
6181 type = v4si_ftype_v8hi;
6182 else if (mode0 == V2SImode && mode1 == V2SImode)
6183 type = v2si_ftype_v2si;
6184 else if (mode0 == V2SFmode && mode1 == V2SFmode)
6185 type = v2sf_ftype_v2sf;
6186 else if (mode0 == V2SFmode && mode1 == V2SImode)
6187 type = v2sf_ftype_v2si;
6188 else if (mode0 == V2SImode && mode1 == V2SFmode)
6189 type = v2si_ftype_v2sf;
6190 else if (mode0 == V2SImode && mode1 == QImode)
6191 type = v2si_ftype_char;
6192 else
6193 abort ();
6194
6195 def_builtin (d->mask, d->name, type, d->code);
6196 }
6197 }
6198
6199 \f
6200 /* Expand a block move operation, and return 1 if successful. Return 0
6201 if we should let the compiler generate normal code.
6202
6203 operands[0] is the destination
6204 operands[1] is the source
6205 operands[2] is the length
6206 operands[3] is the alignment */
6207
6208 #define MAX_MOVE_REG 4
6209
6210 int
6211 expand_block_move (operands)
6212 rtx operands[];
6213 {
6214 rtx orig_dest = operands[0];
6215 rtx orig_src = operands[1];
6216 rtx bytes_rtx = operands[2];
6217 rtx align_rtx = operands[3];
6218 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
6219 int align;
6220 int bytes;
6221 int offset;
6222 int move_bytes;
6223
6224 /* If this is not a fixed size move, just call memcpy */
6225 if (! constp)
6226 return 0;
6227
6228 /* If this is not a fixed size alignment, abort */
6229 if (GET_CODE (align_rtx) != CONST_INT)
6230 abort ();
6231 align = INTVAL (align_rtx);
6232
6233 /* Anything to move? */
6234 bytes = INTVAL (bytes_rtx);
6235 if (bytes <= 0)
6236 return 1;
6237
6238 /* store_one_arg depends on expand_block_move to handle at least the size of
6239 reg_parm_stack_space. */
6240 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
6241 return 0;
6242
6243 if (TARGET_STRING) /* string instructions are available */
6244 {
6245 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6246 {
6247 union {
6248 rtx (*movstrsi) PARAMS ((rtx, rtx, rtx, rtx));
6249 rtx (*mov) PARAMS ((rtx, rtx));
6250 } gen_func;
6251 enum machine_mode mode = BLKmode;
6252 rtx src, dest;
6253
6254 if (bytes > 24 /* move up to 32 bytes at a time */
6255 && ! fixed_regs[5]
6256 && ! fixed_regs[6]
6257 && ! fixed_regs[7]
6258 && ! fixed_regs[8]
6259 && ! fixed_regs[9]
6260 && ! fixed_regs[10]
6261 && ! fixed_regs[11]
6262 && ! fixed_regs[12])
6263 {
6264 move_bytes = (bytes > 32) ? 32 : bytes;
6265 gen_func.movstrsi = gen_movstrsi_8reg;
6266 }
6267 else if (bytes > 16 /* move up to 24 bytes at a time */
6268 && ! fixed_regs[5]
6269 && ! fixed_regs[6]
6270 && ! fixed_regs[7]
6271 && ! fixed_regs[8]
6272 && ! fixed_regs[9]
6273 && ! fixed_regs[10])
6274 {
6275 move_bytes = (bytes > 24) ? 24 : bytes;
6276 gen_func.movstrsi = gen_movstrsi_6reg;
6277 }
6278 else if (bytes > 8 /* move up to 16 bytes at a time */
6279 && ! fixed_regs[5]
6280 && ! fixed_regs[6]
6281 && ! fixed_regs[7]
6282 && ! fixed_regs[8])
6283 {
6284 move_bytes = (bytes > 16) ? 16 : bytes;
6285 gen_func.movstrsi = gen_movstrsi_4reg;
6286 }
6287 else if (bytes >= 8 && TARGET_POWERPC64
6288 /* 64-bit loads and stores require word-aligned
6289 displacements. */
6290 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6291 {
6292 move_bytes = 8;
6293 mode = DImode;
6294 gen_func.mov = gen_movdi;
6295 }
6296 else if (bytes > 4 && !TARGET_POWERPC64)
6297 { /* move up to 8 bytes at a time */
6298 move_bytes = (bytes > 8) ? 8 : bytes;
6299 gen_func.movstrsi = gen_movstrsi_2reg;
6300 }
6301 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6302 { /* move 4 bytes */
6303 move_bytes = 4;
6304 mode = SImode;
6305 gen_func.mov = gen_movsi;
6306 }
6307 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6308 { /* move 2 bytes */
6309 move_bytes = 2;
6310 mode = HImode;
6311 gen_func.mov = gen_movhi;
6312 }
6313 else if (bytes == 1) /* move 1 byte */
6314 {
6315 move_bytes = 1;
6316 mode = QImode;
6317 gen_func.mov = gen_movqi;
6318 }
6319 else
6320 { /* move up to 4 bytes at a time */
6321 move_bytes = (bytes > 4) ? 4 : bytes;
6322 gen_func.movstrsi = gen_movstrsi_1reg;
6323 }
6324
6325 src = adjust_address (orig_src, mode, offset);
6326 dest = adjust_address (orig_dest, mode, offset);
6327
6328 if (mode == BLKmode)
6329 {
6330 /* Move the address into scratch registers. The movstrsi
6331 patterns require zero offset. */
6332 if (!REG_P (XEXP (src, 0)))
6333 {
6334 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
6335 src = replace_equiv_address (src, src_reg);
6336 }
6337 set_mem_size (src, GEN_INT (move_bytes));
6338
6339 if (!REG_P (XEXP (dest, 0)))
6340 {
6341 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
6342 dest = replace_equiv_address (dest, dest_reg);
6343 }
6344 set_mem_size (dest, GEN_INT (move_bytes));
6345
6346 emit_insn ((*gen_func.movstrsi) (dest, src,
6347 GEN_INT (move_bytes & 31),
6348 align_rtx));
6349 }
6350 else
6351 {
6352 rtx tmp_reg = gen_reg_rtx (mode);
6353
6354 emit_insn ((*gen_func.mov) (tmp_reg, src));
6355 emit_insn ((*gen_func.mov) (dest, tmp_reg));
6356 }
6357 }
6358 }
6359
6360 else /* string instructions not available */
6361 {
6362 rtx stores[MAX_MOVE_REG];
6363 int num_reg = 0;
6364 int i;
6365
6366 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6367 {
6368 rtx (*gen_mov_func) PARAMS ((rtx, rtx));
6369 enum machine_mode mode;
6370 rtx src, dest, tmp_reg;
6371
6372 /* Generate the appropriate load and store, saving the stores
6373 for later. */
6374 if (bytes >= 8 && TARGET_POWERPC64
6375 /* 64-bit loads and stores require word-aligned
6376 displacements. */
6377 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6378 {
6379 move_bytes = 8;
6380 mode = DImode;
6381 gen_mov_func = gen_movdi;
6382 }
6383 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6384 {
6385 move_bytes = 4;
6386 mode = SImode;
6387 gen_mov_func = gen_movsi;
6388 }
6389 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6390 {
6391 move_bytes = 2;
6392 mode = HImode;
6393 gen_mov_func = gen_movhi;
6394 }
6395 else
6396 {
6397 move_bytes = 1;
6398 mode = QImode;
6399 gen_mov_func = gen_movqi;
6400 }
6401
6402 src = adjust_address (orig_src, mode, offset);
6403 dest = adjust_address (orig_dest, mode, offset);
6404 tmp_reg = gen_reg_rtx (mode);
6405
6406 emit_insn ((*gen_mov_func) (tmp_reg, src));
6407 stores[num_reg++] = (*gen_mov_func) (dest, tmp_reg);
6408
6409 if (num_reg >= MAX_MOVE_REG)
6410 {
6411 for (i = 0; i < num_reg; i++)
6412 emit_insn (stores[i]);
6413 num_reg = 0;
6414 }
6415 }
6416
6417 for (i = 0; i < num_reg; i++)
6418 emit_insn (stores[i]);
6419 }
6420
6421 return 1;
6422 }
6423
6424 \f
6425 /* Return 1 if OP is a load multiple operation. It is known to be a
6426 PARALLEL and the first section will be tested. */
6427
6428 int
6429 load_multiple_operation (op, mode)
6430 rtx op;
6431 enum machine_mode mode ATTRIBUTE_UNUSED;
6432 {
6433 int count = XVECLEN (op, 0);
6434 unsigned int dest_regno;
6435 rtx src_addr;
6436 int i;
6437
6438 /* Perform a quick check so we don't blow up below. */
6439 if (count <= 1
6440 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6441 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6442 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6443 return 0;
6444
6445 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6446 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6447
6448 for (i = 1; i < count; i++)
6449 {
6450 rtx elt = XVECEXP (op, 0, i);
6451
6452 if (GET_CODE (elt) != SET
6453 || GET_CODE (SET_DEST (elt)) != REG
6454 || GET_MODE (SET_DEST (elt)) != SImode
6455 || REGNO (SET_DEST (elt)) != dest_regno + i
6456 || GET_CODE (SET_SRC (elt)) != MEM
6457 || GET_MODE (SET_SRC (elt)) != SImode
6458 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
6459 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
6460 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
6461 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
6462 return 0;
6463 }
6464
6465 return 1;
6466 }
6467
6468 /* Similar, but tests for store multiple. Here, the second vector element
6469 is a CLOBBER. It will be tested later. */
6470
6471 int
6472 store_multiple_operation (op, mode)
6473 rtx op;
6474 enum machine_mode mode ATTRIBUTE_UNUSED;
6475 {
6476 int count = XVECLEN (op, 0) - 1;
6477 unsigned int src_regno;
6478 rtx dest_addr;
6479 int i;
6480
6481 /* Perform a quick check so we don't blow up below. */
6482 if (count <= 1
6483 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6484 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6485 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6486 return 0;
6487
6488 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6489 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6490
6491 for (i = 1; i < count; i++)
6492 {
6493 rtx elt = XVECEXP (op, 0, i + 1);
6494
6495 if (GET_CODE (elt) != SET
6496 || GET_CODE (SET_SRC (elt)) != REG
6497 || GET_MODE (SET_SRC (elt)) != SImode
6498 || REGNO (SET_SRC (elt)) != src_regno + i
6499 || GET_CODE (SET_DEST (elt)) != MEM
6500 || GET_MODE (SET_DEST (elt)) != SImode
6501 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
6502 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
6503 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
6504 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
6505 return 0;
6506 }
6507
6508 return 1;
6509 }
6510
6511 /* Return a string to perform a load_multiple operation.
6512 operands[0] is the vector.
6513 operands[1] is the source address.
6514 operands[2] is the first destination register. */
6515
6516 const char *
6517 rs6000_output_load_multiple (operands)
6518 rtx operands[3];
6519 {
6520 /* We have to handle the case where the pseudo used to contain the address
6521 is assigned to one of the output registers. */
6522 int i, j;
6523 int words = XVECLEN (operands[0], 0);
6524 rtx xop[10];
6525
6526 if (XVECLEN (operands[0], 0) == 1)
6527 return "{l|lwz} %2,0(%1)";
6528
6529 for (i = 0; i < words; i++)
6530 if (refers_to_regno_p (REGNO (operands[2]) + i,
6531 REGNO (operands[2]) + i + 1, operands[1], 0))
6532 {
6533 if (i == words-1)
6534 {
6535 xop[0] = GEN_INT (4 * (words-1));
6536 xop[1] = operands[1];
6537 xop[2] = operands[2];
6538 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
6539 return "";
6540 }
6541 else if (i == 0)
6542 {
6543 xop[0] = GEN_INT (4 * (words-1));
6544 xop[1] = operands[1];
6545 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
6546 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
6547 return "";
6548 }
6549 else
6550 {
6551 for (j = 0; j < words; j++)
6552 if (j != i)
6553 {
6554 xop[0] = GEN_INT (j * 4);
6555 xop[1] = operands[1];
6556 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
6557 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
6558 }
6559 xop[0] = GEN_INT (i * 4);
6560 xop[1] = operands[1];
6561 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
6562 return "";
6563 }
6564 }
6565
6566 return "{lsi|lswi} %2,%1,%N0";
6567 }
6568
6569 /* Return 1 for a parallel vrsave operation. */
6570
6571 int
6572 vrsave_operation (op, mode)
6573 rtx op;
6574 enum machine_mode mode ATTRIBUTE_UNUSED;
6575 {
6576 int count = XVECLEN (op, 0);
6577 unsigned int dest_regno, src_regno;
6578 int i;
6579
6580 if (count <= 1
6581 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6582 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6583 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
6584 return 0;
6585
6586 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6587 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6588
6589 if (dest_regno != VRSAVE_REGNO
6590 && src_regno != VRSAVE_REGNO)
6591 return 0;
6592
6593 for (i = 1; i < count; i++)
6594 {
6595 rtx elt = XVECEXP (op, 0, i);
6596
6597 if (GET_CODE (elt) != CLOBBER
6598 && GET_CODE (elt) != SET)
6599 return 0;
6600 }
6601
6602 return 1;
6603 }
6604
6605 /* Return 1 for an PARALLEL suitable for mtcrf. */
6606
6607 int
6608 mtcrf_operation (op, mode)
6609 rtx op;
6610 enum machine_mode mode ATTRIBUTE_UNUSED;
6611 {
6612 int count = XVECLEN (op, 0);
6613 int i;
6614 rtx src_reg;
6615
6616 /* Perform a quick check so we don't blow up below. */
6617 if (count < 1
6618 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6619 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
6620 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
6621 return 0;
6622 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
6623
6624 if (GET_CODE (src_reg) != REG
6625 || GET_MODE (src_reg) != SImode
6626 || ! INT_REGNO_P (REGNO (src_reg)))
6627 return 0;
6628
6629 for (i = 0; i < count; i++)
6630 {
6631 rtx exp = XVECEXP (op, 0, i);
6632 rtx unspec;
6633 int maskval;
6634
6635 if (GET_CODE (exp) != SET
6636 || GET_CODE (SET_DEST (exp)) != REG
6637 || GET_MODE (SET_DEST (exp)) != CCmode
6638 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
6639 return 0;
6640 unspec = SET_SRC (exp);
6641 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
6642
6643 if (GET_CODE (unspec) != UNSPEC
6644 || XINT (unspec, 1) != 20
6645 || XVECLEN (unspec, 0) != 2
6646 || XVECEXP (unspec, 0, 0) != src_reg
6647 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
6648 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
6649 return 0;
6650 }
6651 return 1;
6652 }
6653
6654 /* Return 1 for an PARALLEL suitable for lmw. */
6655
6656 int
6657 lmw_operation (op, mode)
6658 rtx op;
6659 enum machine_mode mode ATTRIBUTE_UNUSED;
6660 {
6661 int count = XVECLEN (op, 0);
6662 unsigned int dest_regno;
6663 rtx src_addr;
6664 unsigned int base_regno;
6665 HOST_WIDE_INT offset;
6666 int i;
6667
6668 /* Perform a quick check so we don't blow up below. */
6669 if (count <= 1
6670 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6671 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6672 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6673 return 0;
6674
6675 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6676 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6677
6678 if (dest_regno > 31
6679 || count != 32 - (int) dest_regno)
6680 return 0;
6681
6682 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
6683 {
6684 offset = 0;
6685 base_regno = REGNO (src_addr);
6686 if (base_regno == 0)
6687 return 0;
6688 }
6689 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
6690 {
6691 offset = INTVAL (XEXP (src_addr, 1));
6692 base_regno = REGNO (XEXP (src_addr, 0));
6693 }
6694 else
6695 return 0;
6696
6697 for (i = 0; i < count; i++)
6698 {
6699 rtx elt = XVECEXP (op, 0, i);
6700 rtx newaddr;
6701 rtx addr_reg;
6702 HOST_WIDE_INT newoffset;
6703
6704 if (GET_CODE (elt) != SET
6705 || GET_CODE (SET_DEST (elt)) != REG
6706 || GET_MODE (SET_DEST (elt)) != SImode
6707 || REGNO (SET_DEST (elt)) != dest_regno + i
6708 || GET_CODE (SET_SRC (elt)) != MEM
6709 || GET_MODE (SET_SRC (elt)) != SImode)
6710 return 0;
6711 newaddr = XEXP (SET_SRC (elt), 0);
6712 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6713 {
6714 newoffset = 0;
6715 addr_reg = newaddr;
6716 }
6717 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6718 {
6719 addr_reg = XEXP (newaddr, 0);
6720 newoffset = INTVAL (XEXP (newaddr, 1));
6721 }
6722 else
6723 return 0;
6724 if (REGNO (addr_reg) != base_regno
6725 || newoffset != offset + 4 * i)
6726 return 0;
6727 }
6728
6729 return 1;
6730 }
6731
6732 /* Return 1 for an PARALLEL suitable for stmw. */
6733
6734 int
6735 stmw_operation (op, mode)
6736 rtx op;
6737 enum machine_mode mode ATTRIBUTE_UNUSED;
6738 {
6739 int count = XVECLEN (op, 0);
6740 unsigned int src_regno;
6741 rtx dest_addr;
6742 unsigned int base_regno;
6743 HOST_WIDE_INT offset;
6744 int i;
6745
6746 /* Perform a quick check so we don't blow up below. */
6747 if (count <= 1
6748 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6749 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6750 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6751 return 0;
6752
6753 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6754 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6755
6756 if (src_regno > 31
6757 || count != 32 - (int) src_regno)
6758 return 0;
6759
6760 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
6761 {
6762 offset = 0;
6763 base_regno = REGNO (dest_addr);
6764 if (base_regno == 0)
6765 return 0;
6766 }
6767 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
6768 {
6769 offset = INTVAL (XEXP (dest_addr, 1));
6770 base_regno = REGNO (XEXP (dest_addr, 0));
6771 }
6772 else
6773 return 0;
6774
6775 for (i = 0; i < count; i++)
6776 {
6777 rtx elt = XVECEXP (op, 0, i);
6778 rtx newaddr;
6779 rtx addr_reg;
6780 HOST_WIDE_INT newoffset;
6781
6782 if (GET_CODE (elt) != SET
6783 || GET_CODE (SET_SRC (elt)) != REG
6784 || GET_MODE (SET_SRC (elt)) != SImode
6785 || REGNO (SET_SRC (elt)) != src_regno + i
6786 || GET_CODE (SET_DEST (elt)) != MEM
6787 || GET_MODE (SET_DEST (elt)) != SImode)
6788 return 0;
6789 newaddr = XEXP (SET_DEST (elt), 0);
6790 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6791 {
6792 newoffset = 0;
6793 addr_reg = newaddr;
6794 }
6795 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6796 {
6797 addr_reg = XEXP (newaddr, 0);
6798 newoffset = INTVAL (XEXP (newaddr, 1));
6799 }
6800 else
6801 return 0;
6802 if (REGNO (addr_reg) != base_regno
6803 || newoffset != offset + 4 * i)
6804 return 0;
6805 }
6806
6807 return 1;
6808 }
6809 \f
6810 /* A validation routine: say whether CODE, a condition code, and MODE
6811 match. The other alternatives either don't make sense or should
6812 never be generated. */
6813
6814 static void
6815 validate_condition_mode (code, mode)
6816 enum rtx_code code;
6817 enum machine_mode mode;
6818 {
6819 if (GET_RTX_CLASS (code) != '<'
6820 || GET_MODE_CLASS (mode) != MODE_CC)
6821 abort ();
6822
6823 /* These don't make sense. */
6824 if ((code == GT || code == LT || code == GE || code == LE)
6825 && mode == CCUNSmode)
6826 abort ();
6827
6828 if ((code == GTU || code == LTU || code == GEU || code == LEU)
6829 && mode != CCUNSmode)
6830 abort ();
6831
6832 if (mode != CCFPmode
6833 && (code == ORDERED || code == UNORDERED
6834 || code == UNEQ || code == LTGT
6835 || code == UNGT || code == UNLT
6836 || code == UNGE || code == UNLE))
6837 abort ();
6838
6839 /* These should never be generated except for
6840 flag_unsafe_math_optimizations and flag_finite_math_only. */
6841 if (mode == CCFPmode
6842 && ! flag_unsafe_math_optimizations
6843 && ! flag_finite_math_only
6844 && (code == LE || code == GE
6845 || code == UNEQ || code == LTGT
6846 || code == UNGT || code == UNLT))
6847 abort ();
6848
6849 /* These are invalid; the information is not there. */
6850 if (mode == CCEQmode
6851 && code != EQ && code != NE)
6852 abort ();
6853 }
6854
6855 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
6856 We only check the opcode against the mode of the CC value here. */
6857
6858 int
6859 branch_comparison_operator (op, mode)
6860 rtx op;
6861 enum machine_mode mode ATTRIBUTE_UNUSED;
6862 {
6863 enum rtx_code code = GET_CODE (op);
6864 enum machine_mode cc_mode;
6865
6866 if (GET_RTX_CLASS (code) != '<')
6867 return 0;
6868
6869 cc_mode = GET_MODE (XEXP (op, 0));
6870 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
6871 return 0;
6872
6873 validate_condition_mode (code, cc_mode);
6874
6875 return 1;
6876 }
6877
6878 /* Return 1 if OP is a comparison operation that is valid for a branch
6879 insn and which is true if the corresponding bit in the CC register
6880 is set. */
6881
6882 int
6883 branch_positive_comparison_operator (op, mode)
6884 rtx op;
6885 enum machine_mode mode;
6886 {
6887 enum rtx_code code;
6888
6889 if (! branch_comparison_operator (op, mode))
6890 return 0;
6891
6892 code = GET_CODE (op);
6893 return (code == EQ || code == LT || code == GT
6894 || (TARGET_SPE && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
6895 || code == LTU || code == GTU
6896 || code == UNORDERED);
6897 }
6898
6899 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
6900 We check the opcode against the mode of the CC value and disallow EQ or
6901 NE comparisons for integers. */
6902
6903 int
6904 scc_comparison_operator (op, mode)
6905 rtx op;
6906 enum machine_mode mode;
6907 {
6908 enum rtx_code code = GET_CODE (op);
6909 enum machine_mode cc_mode;
6910
6911 if (GET_MODE (op) != mode && mode != VOIDmode)
6912 return 0;
6913
6914 if (GET_RTX_CLASS (code) != '<')
6915 return 0;
6916
6917 cc_mode = GET_MODE (XEXP (op, 0));
6918 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
6919 return 0;
6920
6921 validate_condition_mode (code, cc_mode);
6922
6923 if (code == NE && cc_mode != CCFPmode)
6924 return 0;
6925
6926 return 1;
6927 }
6928
6929 int
6930 trap_comparison_operator (op, mode)
6931 rtx op;
6932 enum machine_mode mode;
6933 {
6934 if (mode != VOIDmode && mode != GET_MODE (op))
6935 return 0;
6936 return GET_RTX_CLASS (GET_CODE (op)) == '<';
6937 }
6938
6939 int
6940 boolean_operator (op, mode)
6941 rtx op;
6942 enum machine_mode mode ATTRIBUTE_UNUSED;
6943 {
6944 enum rtx_code code = GET_CODE (op);
6945 return (code == AND || code == IOR || code == XOR);
6946 }
6947
6948 int
6949 boolean_or_operator (op, mode)
6950 rtx op;
6951 enum machine_mode mode ATTRIBUTE_UNUSED;
6952 {
6953 enum rtx_code code = GET_CODE (op);
6954 return (code == IOR || code == XOR);
6955 }
6956
6957 int
6958 min_max_operator (op, mode)
6959 rtx op;
6960 enum machine_mode mode ATTRIBUTE_UNUSED;
6961 {
6962 enum rtx_code code = GET_CODE (op);
6963 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
6964 }
6965 \f
6966 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
6967 mask required to convert the result of a rotate insn into a shift
6968 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
6969
6970 int
6971 includes_lshift_p (shiftop, andop)
6972 rtx shiftop;
6973 rtx andop;
6974 {
6975 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
6976
6977 shift_mask <<= INTVAL (shiftop);
6978
6979 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
6980 }
6981
6982 /* Similar, but for right shift. */
6983
6984 int
6985 includes_rshift_p (shiftop, andop)
6986 rtx shiftop;
6987 rtx andop;
6988 {
6989 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
6990
6991 shift_mask >>= INTVAL (shiftop);
6992
6993 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
6994 }
6995
6996 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
6997 to perform a left shift. It must have exactly SHIFTOP least
6998 significant 0's, then one or more 1's, then zero or more 0's. */
6999
7000 int
7001 includes_rldic_lshift_p (shiftop, andop)
7002 rtx shiftop;
7003 rtx andop;
7004 {
7005 if (GET_CODE (andop) == CONST_INT)
7006 {
7007 HOST_WIDE_INT c, lsb, shift_mask;
7008
7009 c = INTVAL (andop);
7010 if (c == 0 || c == ~0)
7011 return 0;
7012
7013 shift_mask = ~0;
7014 shift_mask <<= INTVAL (shiftop);
7015
7016 /* Find the least significant one bit. */
7017 lsb = c & -c;
7018
7019 /* It must coincide with the LSB of the shift mask. */
7020 if (-lsb != shift_mask)
7021 return 0;
7022
7023 /* Invert to look for the next transition (if any). */
7024 c = ~c;
7025
7026 /* Remove the low group of ones (originally low group of zeros). */
7027 c &= -lsb;
7028
7029 /* Again find the lsb, and check we have all 1's above. */
7030 lsb = c & -c;
7031 return c == -lsb;
7032 }
7033 else if (GET_CODE (andop) == CONST_DOUBLE
7034 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7035 {
7036 HOST_WIDE_INT low, high, lsb;
7037 HOST_WIDE_INT shift_mask_low, shift_mask_high;
7038
7039 low = CONST_DOUBLE_LOW (andop);
7040 if (HOST_BITS_PER_WIDE_INT < 64)
7041 high = CONST_DOUBLE_HIGH (andop);
7042
7043 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
7044 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
7045 return 0;
7046
7047 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7048 {
7049 shift_mask_high = ~0;
7050 if (INTVAL (shiftop) > 32)
7051 shift_mask_high <<= INTVAL (shiftop) - 32;
7052
7053 lsb = high & -high;
7054
7055 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
7056 return 0;
7057
7058 high = ~high;
7059 high &= -lsb;
7060
7061 lsb = high & -high;
7062 return high == -lsb;
7063 }
7064
7065 shift_mask_low = ~0;
7066 shift_mask_low <<= INTVAL (shiftop);
7067
7068 lsb = low & -low;
7069
7070 if (-lsb != shift_mask_low)
7071 return 0;
7072
7073 if (HOST_BITS_PER_WIDE_INT < 64)
7074 high = ~high;
7075 low = ~low;
7076 low &= -lsb;
7077
7078 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7079 {
7080 lsb = high & -high;
7081 return high == -lsb;
7082 }
7083
7084 lsb = low & -low;
7085 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
7086 }
7087 else
7088 return 0;
7089 }
7090
7091 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7092 to perform a left shift. It must have SHIFTOP or more least
7093 signifigant 0's, with the remainder of the word 1's. */
7094
7095 int
7096 includes_rldicr_lshift_p (shiftop, andop)
7097 rtx shiftop;
7098 rtx andop;
7099 {
7100 if (GET_CODE (andop) == CONST_INT)
7101 {
7102 HOST_WIDE_INT c, lsb, shift_mask;
7103
7104 shift_mask = ~0;
7105 shift_mask <<= INTVAL (shiftop);
7106 c = INTVAL (andop);
7107
7108 /* Find the least signifigant one bit. */
7109 lsb = c & -c;
7110
7111 /* It must be covered by the shift mask.
7112 This test also rejects c == 0. */
7113 if ((lsb & shift_mask) == 0)
7114 return 0;
7115
7116 /* Check we have all 1's above the transition, and reject all 1's. */
7117 return c == -lsb && lsb != 1;
7118 }
7119 else if (GET_CODE (andop) == CONST_DOUBLE
7120 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7121 {
7122 HOST_WIDE_INT low, lsb, shift_mask_low;
7123
7124 low = CONST_DOUBLE_LOW (andop);
7125
7126 if (HOST_BITS_PER_WIDE_INT < 64)
7127 {
7128 HOST_WIDE_INT high, shift_mask_high;
7129
7130 high = CONST_DOUBLE_HIGH (andop);
7131
7132 if (low == 0)
7133 {
7134 shift_mask_high = ~0;
7135 if (INTVAL (shiftop) > 32)
7136 shift_mask_high <<= INTVAL (shiftop) - 32;
7137
7138 lsb = high & -high;
7139
7140 if ((lsb & shift_mask_high) == 0)
7141 return 0;
7142
7143 return high == -lsb;
7144 }
7145 if (high != ~0)
7146 return 0;
7147 }
7148
7149 shift_mask_low = ~0;
7150 shift_mask_low <<= INTVAL (shiftop);
7151
7152 lsb = low & -low;
7153
7154 if ((lsb & shift_mask_low) == 0)
7155 return 0;
7156
7157 return low == -lsb && lsb != 1;
7158 }
7159 else
7160 return 0;
7161 }
7162
7163 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7164 for lfq and stfq insns.
7165
7166 Note reg1 and reg2 *must* be hard registers. To be sure we will
7167 abort if we are passed pseudo registers. */
7168
7169 int
7170 registers_ok_for_quad_peep (reg1, reg2)
7171 rtx reg1, reg2;
7172 {
7173 /* We might have been passed a SUBREG. */
7174 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
7175 return 0;
7176
7177 return (REGNO (reg1) == REGNO (reg2) - 1);
7178 }
7179
7180 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7181 addr1 and addr2 must be in consecutive memory locations
7182 (addr2 == addr1 + 8). */
7183
7184 int
7185 addrs_ok_for_quad_peep (addr1, addr2)
7186 rtx addr1;
7187 rtx addr2;
7188 {
7189 unsigned int reg1;
7190 int offset1;
7191
7192 /* Extract an offset (if used) from the first addr. */
7193 if (GET_CODE (addr1) == PLUS)
7194 {
7195 /* If not a REG, return zero. */
7196 if (GET_CODE (XEXP (addr1, 0)) != REG)
7197 return 0;
7198 else
7199 {
7200 reg1 = REGNO (XEXP (addr1, 0));
7201 /* The offset must be constant! */
7202 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
7203 return 0;
7204 offset1 = INTVAL (XEXP (addr1, 1));
7205 }
7206 }
7207 else if (GET_CODE (addr1) != REG)
7208 return 0;
7209 else
7210 {
7211 reg1 = REGNO (addr1);
7212 /* This was a simple (mem (reg)) expression. Offset is 0. */
7213 offset1 = 0;
7214 }
7215
7216 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
7217 if (GET_CODE (addr2) != PLUS)
7218 return 0;
7219
7220 if (GET_CODE (XEXP (addr2, 0)) != REG
7221 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7222 return 0;
7223
7224 if (reg1 != REGNO (XEXP (addr2, 0)))
7225 return 0;
7226
7227 /* The offset for the second addr must be 8 more than the first addr. */
7228 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7229 return 0;
7230
7231 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7232 instructions. */
7233 return 1;
7234 }
7235 \f
7236 /* Return the register class of a scratch register needed to copy IN into
7237 or out of a register in CLASS in MODE. If it can be done directly,
7238 NO_REGS is returned. */
7239
7240 enum reg_class
7241 secondary_reload_class (class, mode, in)
7242 enum reg_class class;
7243 enum machine_mode mode ATTRIBUTE_UNUSED;
7244 rtx in;
7245 {
7246 int regno;
7247
7248 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
7249 #if TARGET_MACHO
7250 && MACHOPIC_INDIRECT
7251 #endif
7252 ))
7253 {
7254 /* We cannot copy a symbolic operand directly into anything
7255 other than BASE_REGS for TARGET_ELF. So indicate that a
7256 register from BASE_REGS is needed as an intermediate
7257 register.
7258
7259 On Darwin, pic addresses require a load from memory, which
7260 needs a base register. */
7261 if (class != BASE_REGS
7262 && (GET_CODE (in) == SYMBOL_REF
7263 || GET_CODE (in) == HIGH
7264 || GET_CODE (in) == LABEL_REF
7265 || GET_CODE (in) == CONST))
7266 return BASE_REGS;
7267 }
7268
7269 if (GET_CODE (in) == REG)
7270 {
7271 regno = REGNO (in);
7272 if (regno >= FIRST_PSEUDO_REGISTER)
7273 {
7274 regno = true_regnum (in);
7275 if (regno >= FIRST_PSEUDO_REGISTER)
7276 regno = -1;
7277 }
7278 }
7279 else if (GET_CODE (in) == SUBREG)
7280 {
7281 regno = true_regnum (in);
7282 if (regno >= FIRST_PSEUDO_REGISTER)
7283 regno = -1;
7284 }
7285 else
7286 regno = -1;
7287
7288 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7289 into anything. */
7290 if (class == GENERAL_REGS || class == BASE_REGS
7291 || (regno >= 0 && INT_REGNO_P (regno)))
7292 return NO_REGS;
7293
7294 /* Constants, memory, and FP registers can go into FP registers. */
7295 if ((regno == -1 || FP_REGNO_P (regno))
7296 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
7297 return NO_REGS;
7298
7299 /* Memory, and AltiVec registers can go into AltiVec registers. */
7300 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
7301 && class == ALTIVEC_REGS)
7302 return NO_REGS;
7303
7304 /* We can copy among the CR registers. */
7305 if ((class == CR_REGS || class == CR0_REGS)
7306 && regno >= 0 && CR_REGNO_P (regno))
7307 return NO_REGS;
7308
7309 /* Otherwise, we need GENERAL_REGS. */
7310 return GENERAL_REGS;
7311 }
7312 \f
7313 /* Given a comparison operation, return the bit number in CCR to test. We
7314 know this is a valid comparison.
7315
7316 SCC_P is 1 if this is for an scc. That means that %D will have been
7317 used instead of %C, so the bits will be in different places.
7318
7319 Return -1 if OP isn't a valid comparison for some reason. */
7320
7321 int
7322 ccr_bit (op, scc_p)
7323 rtx op;
7324 int scc_p;
7325 {
7326 enum rtx_code code = GET_CODE (op);
7327 enum machine_mode cc_mode;
7328 int cc_regnum;
7329 int base_bit;
7330 rtx reg;
7331
7332 if (GET_RTX_CLASS (code) != '<')
7333 return -1;
7334
7335 reg = XEXP (op, 0);
7336
7337 if (GET_CODE (reg) != REG
7338 || ! CR_REGNO_P (REGNO (reg)))
7339 abort ();
7340
7341 cc_mode = GET_MODE (reg);
7342 cc_regnum = REGNO (reg);
7343 base_bit = 4 * (cc_regnum - CR0_REGNO);
7344
7345 validate_condition_mode (code, cc_mode);
7346
7347 switch (code)
7348 {
7349 case NE:
7350 if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7351 return base_bit + 1;
7352 return scc_p ? base_bit + 3 : base_bit + 2;
7353 case EQ:
7354 if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7355 return base_bit + 1;
7356 return base_bit + 2;
7357 case GT: case GTU: case UNLE:
7358 return base_bit + 1;
7359 case LT: case LTU: case UNGE:
7360 return base_bit;
7361 case ORDERED: case UNORDERED:
7362 return base_bit + 3;
7363
7364 case GE: case GEU:
7365 /* If scc, we will have done a cror to put the bit in the
7366 unordered position. So test that bit. For integer, this is ! LT
7367 unless this is an scc insn. */
7368 return scc_p ? base_bit + 3 : base_bit;
7369
7370 case LE: case LEU:
7371 return scc_p ? base_bit + 3 : base_bit + 1;
7372
7373 default:
7374 abort ();
7375 }
7376 }
7377 \f
7378 /* Return the GOT register. */
7379
7380 struct rtx_def *
7381 rs6000_got_register (value)
7382 rtx value ATTRIBUTE_UNUSED;
7383 {
7384 /* The second flow pass currently (June 1999) can't update
7385 regs_ever_live without disturbing other parts of the compiler, so
7386 update it here to make the prolog/epilogue code happy. */
7387 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
7388 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
7389
7390 current_function_uses_pic_offset_table = 1;
7391
7392 return pic_offset_table_rtx;
7393 }
7394 \f
7395 /* Function to init struct machine_function.
7396 This will be called, via a pointer variable,
7397 from push_function_context. */
7398
7399 static struct machine_function *
7400 rs6000_init_machine_status ()
7401 {
7402 return ggc_alloc_cleared (sizeof (machine_function));
7403 }
7404 \f
7405 /* These macros test for integers and extract the low-order bits. */
7406 #define INT_P(X) \
7407 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7408 && GET_MODE (X) == VOIDmode)
7409
7410 #define INT_LOWPART(X) \
7411 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7412
7413 int
7414 extract_MB (op)
7415 rtx op;
7416 {
7417 int i;
7418 unsigned long val = INT_LOWPART (op);
7419
7420 /* If the high bit is zero, the value is the first 1 bit we find
7421 from the left. */
7422 if ((val & 0x80000000) == 0)
7423 {
7424 if ((val & 0xffffffff) == 0)
7425 abort ();
7426
7427 i = 1;
7428 while (((val <<= 1) & 0x80000000) == 0)
7429 ++i;
7430 return i;
7431 }
7432
7433 /* If the high bit is set and the low bit is not, or the mask is all
7434 1's, the value is zero. */
7435 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
7436 return 0;
7437
7438 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7439 from the right. */
7440 i = 31;
7441 while (((val >>= 1) & 1) != 0)
7442 --i;
7443
7444 return i;
7445 }
7446
7447 int
7448 extract_ME (op)
7449 rtx op;
7450 {
7451 int i;
7452 unsigned long val = INT_LOWPART (op);
7453
7454 /* If the low bit is zero, the value is the first 1 bit we find from
7455 the right. */
7456 if ((val & 1) == 0)
7457 {
7458 if ((val & 0xffffffff) == 0)
7459 abort ();
7460
7461 i = 30;
7462 while (((val >>= 1) & 1) == 0)
7463 --i;
7464
7465 return i;
7466 }
7467
7468 /* If the low bit is set and the high bit is not, or the mask is all
7469 1's, the value is 31. */
7470 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
7471 return 31;
7472
7473 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7474 from the left. */
7475 i = 0;
7476 while (((val <<= 1) & 0x80000000) != 0)
7477 ++i;
7478
7479 return i;
7480 }
7481
7482 /* Print an operand. Recognize special options, documented below. */
7483
7484 #if TARGET_ELF
7485 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7486 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7487 #else
7488 #define SMALL_DATA_RELOC "sda21"
7489 #define SMALL_DATA_REG 0
7490 #endif
7491
7492 void
7493 print_operand (file, x, code)
7494 FILE *file;
7495 rtx x;
7496 int code;
7497 {
7498 int i;
7499 HOST_WIDE_INT val;
7500 unsigned HOST_WIDE_INT uval;
7501
7502 switch (code)
7503 {
7504 case '.':
7505 /* Write out an instruction after the call which may be replaced
7506 with glue code by the loader. This depends on the AIX version. */
7507 asm_fprintf (file, RS6000_CALL_GLUE);
7508 return;
7509
7510 /* %a is output_address. */
7511
7512 case 'A':
7513 /* If X is a constant integer whose low-order 5 bits are zero,
7514 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
7515 in the AIX assembler where "sri" with a zero shift count
7516 writes a trash instruction. */
7517 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
7518 putc ('l', file);
7519 else
7520 putc ('r', file);
7521 return;
7522
7523 case 'b':
7524 /* If constant, low-order 16 bits of constant, unsigned.
7525 Otherwise, write normally. */
7526 if (INT_P (x))
7527 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
7528 else
7529 print_operand (file, x, 0);
7530 return;
7531
7532 case 'B':
7533 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7534 for 64-bit mask direction. */
7535 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
7536 return;
7537
7538 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7539 output_operand. */
7540
7541 case 'D':
7542 /* There used to be a comment for 'C' reading "This is an
7543 optional cror needed for certain floating-point
7544 comparisons. Otherwise write nothing." */
7545
7546 /* Similar, except that this is for an scc, so we must be able to
7547 encode the test in a single bit that is one. We do the above
7548 for any LE, GE, GEU, or LEU and invert the bit for NE. */
7549 if (GET_CODE (x) == LE || GET_CODE (x) == GE
7550 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
7551 {
7552 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7553
7554 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
7555 base_bit + 2,
7556 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
7557 }
7558
7559 else if (GET_CODE (x) == NE)
7560 {
7561 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7562
7563 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
7564 base_bit + 2, base_bit + 2);
7565 }
7566 else if (TARGET_SPE && TARGET_HARD_FLOAT
7567 && GET_CODE (x) == EQ
7568 && GET_MODE (XEXP (x, 0)) == CCFPmode)
7569 {
7570 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7571
7572 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 1,
7573 base_bit + 1, base_bit + 1);
7574 }
7575 return;
7576
7577 case 'E':
7578 /* X is a CR register. Print the number of the EQ bit of the CR */
7579 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7580 output_operand_lossage ("invalid %%E value");
7581 else
7582 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
7583 return;
7584
7585 case 'f':
7586 /* X is a CR register. Print the shift count needed to move it
7587 to the high-order four bits. */
7588 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7589 output_operand_lossage ("invalid %%f value");
7590 else
7591 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
7592 return;
7593
7594 case 'F':
7595 /* Similar, but print the count for the rotate in the opposite
7596 direction. */
7597 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7598 output_operand_lossage ("invalid %%F value");
7599 else
7600 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
7601 return;
7602
7603 case 'G':
7604 /* X is a constant integer. If it is negative, print "m",
7605 otherwise print "z". This is to make an aze or ame insn. */
7606 if (GET_CODE (x) != CONST_INT)
7607 output_operand_lossage ("invalid %%G value");
7608 else if (INTVAL (x) >= 0)
7609 putc ('z', file);
7610 else
7611 putc ('m', file);
7612 return;
7613
7614 case 'h':
7615 /* If constant, output low-order five bits. Otherwise, write
7616 normally. */
7617 if (INT_P (x))
7618 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
7619 else
7620 print_operand (file, x, 0);
7621 return;
7622
7623 case 'H':
7624 /* If constant, output low-order six bits. Otherwise, write
7625 normally. */
7626 if (INT_P (x))
7627 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
7628 else
7629 print_operand (file, x, 0);
7630 return;
7631
7632 case 'I':
7633 /* Print `i' if this is a constant, else nothing. */
7634 if (INT_P (x))
7635 putc ('i', file);
7636 return;
7637
7638 case 'j':
7639 /* Write the bit number in CCR for jump. */
7640 i = ccr_bit (x, 0);
7641 if (i == -1)
7642 output_operand_lossage ("invalid %%j code");
7643 else
7644 fprintf (file, "%d", i);
7645 return;
7646
7647 case 'J':
7648 /* Similar, but add one for shift count in rlinm for scc and pass
7649 scc flag to `ccr_bit'. */
7650 i = ccr_bit (x, 1);
7651 if (i == -1)
7652 output_operand_lossage ("invalid %%J code");
7653 else
7654 /* If we want bit 31, write a shift count of zero, not 32. */
7655 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7656 return;
7657
7658 case 'k':
7659 /* X must be a constant. Write the 1's complement of the
7660 constant. */
7661 if (! INT_P (x))
7662 output_operand_lossage ("invalid %%k value");
7663 else
7664 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
7665 return;
7666
7667 case 'K':
7668 /* X must be a symbolic constant on ELF. Write an
7669 expression suitable for an 'addi' that adds in the low 16
7670 bits of the MEM. */
7671 if (GET_CODE (x) != CONST)
7672 {
7673 print_operand_address (file, x);
7674 fputs ("@l", file);
7675 }
7676 else
7677 {
7678 if (GET_CODE (XEXP (x, 0)) != PLUS
7679 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
7680 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
7681 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
7682 output_operand_lossage ("invalid %%K value");
7683 print_operand_address (file, XEXP (XEXP (x, 0), 0));
7684 fputs ("@l", file);
7685 /* For GNU as, there must be a non-alphanumeric character
7686 between 'l' and the number. The '-' is added by
7687 print_operand() already. */
7688 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
7689 fputs ("+", file);
7690 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
7691 }
7692 return;
7693
7694 /* %l is output_asm_label. */
7695
7696 case 'L':
7697 /* Write second word of DImode or DFmode reference. Works on register
7698 or non-indexed memory only. */
7699 if (GET_CODE (x) == REG)
7700 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
7701 else if (GET_CODE (x) == MEM)
7702 {
7703 /* Handle possible auto-increment. Since it is pre-increment and
7704 we have already done it, we can just use an offset of word. */
7705 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7706 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7707 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
7708 UNITS_PER_WORD));
7709 else
7710 output_address (XEXP (adjust_address_nv (x, SImode,
7711 UNITS_PER_WORD),
7712 0));
7713
7714 if (small_data_operand (x, GET_MODE (x)))
7715 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7716 reg_names[SMALL_DATA_REG]);
7717 }
7718 return;
7719
7720 case 'm':
7721 /* MB value for a mask operand. */
7722 if (! mask_operand (x, SImode))
7723 output_operand_lossage ("invalid %%m value");
7724
7725 fprintf (file, "%d", extract_MB (x));
7726 return;
7727
7728 case 'M':
7729 /* ME value for a mask operand. */
7730 if (! mask_operand (x, SImode))
7731 output_operand_lossage ("invalid %%M value");
7732
7733 fprintf (file, "%d", extract_ME (x));
7734 return;
7735
7736 /* %n outputs the negative of its operand. */
7737
7738 case 'N':
7739 /* Write the number of elements in the vector times 4. */
7740 if (GET_CODE (x) != PARALLEL)
7741 output_operand_lossage ("invalid %%N value");
7742 else
7743 fprintf (file, "%d", XVECLEN (x, 0) * 4);
7744 return;
7745
7746 case 'O':
7747 /* Similar, but subtract 1 first. */
7748 if (GET_CODE (x) != PARALLEL)
7749 output_operand_lossage ("invalid %%O value");
7750 else
7751 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
7752 return;
7753
7754 case 'p':
7755 /* X is a CONST_INT that is a power of two. Output the logarithm. */
7756 if (! INT_P (x)
7757 || INT_LOWPART (x) < 0
7758 || (i = exact_log2 (INT_LOWPART (x))) < 0)
7759 output_operand_lossage ("invalid %%p value");
7760 else
7761 fprintf (file, "%d", i);
7762 return;
7763
7764 case 'P':
7765 /* The operand must be an indirect memory reference. The result
7766 is the register number. */
7767 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
7768 || REGNO (XEXP (x, 0)) >= 32)
7769 output_operand_lossage ("invalid %%P value");
7770 else
7771 fprintf (file, "%d", REGNO (XEXP (x, 0)));
7772 return;
7773
7774 case 'q':
7775 /* This outputs the logical code corresponding to a boolean
7776 expression. The expression may have one or both operands
7777 negated (if one, only the first one). For condition register
7778 logical operations, it will also treat the negated
7779 CR codes as NOTs, but not handle NOTs of them. */
7780 {
7781 const char *const *t = 0;
7782 const char *s;
7783 enum rtx_code code = GET_CODE (x);
7784 static const char * const tbl[3][3] = {
7785 { "and", "andc", "nor" },
7786 { "or", "orc", "nand" },
7787 { "xor", "eqv", "xor" } };
7788
7789 if (code == AND)
7790 t = tbl[0];
7791 else if (code == IOR)
7792 t = tbl[1];
7793 else if (code == XOR)
7794 t = tbl[2];
7795 else
7796 output_operand_lossage ("invalid %%q value");
7797
7798 if (GET_CODE (XEXP (x, 0)) != NOT)
7799 s = t[0];
7800 else
7801 {
7802 if (GET_CODE (XEXP (x, 1)) == NOT)
7803 s = t[2];
7804 else
7805 s = t[1];
7806 }
7807
7808 fputs (s, file);
7809 }
7810 return;
7811
7812 case 'R':
7813 /* X is a CR register. Print the mask for `mtcrf'. */
7814 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7815 output_operand_lossage ("invalid %%R value");
7816 else
7817 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
7818 return;
7819
7820 case 's':
7821 /* Low 5 bits of 32 - value */
7822 if (! INT_P (x))
7823 output_operand_lossage ("invalid %%s value");
7824 else
7825 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
7826 return;
7827
7828 case 'S':
7829 /* PowerPC64 mask position. All 0's is excluded.
7830 CONST_INT 32-bit mask is considered sign-extended so any
7831 transition must occur within the CONST_INT, not on the boundary. */
7832 if (! mask64_operand (x, DImode))
7833 output_operand_lossage ("invalid %%S value");
7834
7835 uval = INT_LOWPART (x);
7836
7837 if (uval & 1) /* Clear Left */
7838 {
7839 #if HOST_BITS_PER_WIDE_INT > 64
7840 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
7841 #endif
7842 i = 64;
7843 }
7844 else /* Clear Right */
7845 {
7846 uval = ~uval;
7847 #if HOST_BITS_PER_WIDE_INT > 64
7848 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
7849 #endif
7850 i = 63;
7851 }
7852 while (uval != 0)
7853 --i, uval >>= 1;
7854 if (i < 0)
7855 abort ();
7856 fprintf (file, "%d", i);
7857 return;
7858
7859 case 't':
7860 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
7861 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
7862 abort ();
7863
7864 /* Bit 3 is OV bit. */
7865 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
7866
7867 /* If we want bit 31, write a shift count of zero, not 32. */
7868 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7869 return;
7870
7871 case 'T':
7872 /* Print the symbolic name of a branch target register. */
7873 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
7874 && REGNO (x) != COUNT_REGISTER_REGNUM))
7875 output_operand_lossage ("invalid %%T value");
7876 else if (REGNO (x) == LINK_REGISTER_REGNUM)
7877 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
7878 else
7879 fputs ("ctr", file);
7880 return;
7881
7882 case 'u':
7883 /* High-order 16 bits of constant for use in unsigned operand. */
7884 if (! INT_P (x))
7885 output_operand_lossage ("invalid %%u value");
7886 else
7887 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
7888 (INT_LOWPART (x) >> 16) & 0xffff);
7889 return;
7890
7891 case 'v':
7892 /* High-order 16 bits of constant for use in signed operand. */
7893 if (! INT_P (x))
7894 output_operand_lossage ("invalid %%v value");
7895 else
7896 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
7897 (INT_LOWPART (x) >> 16) & 0xffff);
7898 return;
7899
7900 case 'U':
7901 /* Print `u' if this has an auto-increment or auto-decrement. */
7902 if (GET_CODE (x) == MEM
7903 && (GET_CODE (XEXP (x, 0)) == PRE_INC
7904 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
7905 putc ('u', file);
7906 return;
7907
7908 case 'V':
7909 /* Print the trap code for this operand. */
7910 switch (GET_CODE (x))
7911 {
7912 case EQ:
7913 fputs ("eq", file); /* 4 */
7914 break;
7915 case NE:
7916 fputs ("ne", file); /* 24 */
7917 break;
7918 case LT:
7919 fputs ("lt", file); /* 16 */
7920 break;
7921 case LE:
7922 fputs ("le", file); /* 20 */
7923 break;
7924 case GT:
7925 fputs ("gt", file); /* 8 */
7926 break;
7927 case GE:
7928 fputs ("ge", file); /* 12 */
7929 break;
7930 case LTU:
7931 fputs ("llt", file); /* 2 */
7932 break;
7933 case LEU:
7934 fputs ("lle", file); /* 6 */
7935 break;
7936 case GTU:
7937 fputs ("lgt", file); /* 1 */
7938 break;
7939 case GEU:
7940 fputs ("lge", file); /* 5 */
7941 break;
7942 default:
7943 abort ();
7944 }
7945 break;
7946
7947 case 'w':
7948 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
7949 normally. */
7950 if (INT_P (x))
7951 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7952 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
7953 else
7954 print_operand (file, x, 0);
7955 return;
7956
7957 case 'W':
7958 /* MB value for a PowerPC64 rldic operand. */
7959 val = (GET_CODE (x) == CONST_INT
7960 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
7961
7962 if (val < 0)
7963 i = -1;
7964 else
7965 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
7966 if ((val <<= 1) < 0)
7967 break;
7968
7969 #if HOST_BITS_PER_WIDE_INT == 32
7970 if (GET_CODE (x) == CONST_INT && i >= 0)
7971 i += 32; /* zero-extend high-part was all 0's */
7972 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
7973 {
7974 val = CONST_DOUBLE_LOW (x);
7975
7976 if (val == 0)
7977 abort ();
7978 else if (val < 0)
7979 --i;
7980 else
7981 for ( ; i < 64; i++)
7982 if ((val <<= 1) < 0)
7983 break;
7984 }
7985 #endif
7986
7987 fprintf (file, "%d", i + 1);
7988 return;
7989
7990 case 'X':
7991 if (GET_CODE (x) == MEM
7992 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
7993 putc ('x', file);
7994 return;
7995
7996 case 'Y':
7997 /* Like 'L', for third word of TImode */
7998 if (GET_CODE (x) == REG)
7999 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
8000 else if (GET_CODE (x) == MEM)
8001 {
8002 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8003 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8004 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
8005 else
8006 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
8007 if (small_data_operand (x, GET_MODE (x)))
8008 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8009 reg_names[SMALL_DATA_REG]);
8010 }
8011 return;
8012
8013 case 'z':
8014 /* X is a SYMBOL_REF. Write out the name preceded by a
8015 period and without any trailing data in brackets. Used for function
8016 names. If we are configured for System V (or the embedded ABI) on
8017 the PowerPC, do not emit the period, since those systems do not use
8018 TOCs and the like. */
8019 if (GET_CODE (x) != SYMBOL_REF)
8020 abort ();
8021
8022 if (XSTR (x, 0)[0] != '.')
8023 {
8024 switch (DEFAULT_ABI)
8025 {
8026 default:
8027 abort ();
8028
8029 case ABI_AIX:
8030 putc ('.', file);
8031 break;
8032
8033 case ABI_V4:
8034 case ABI_AIX_NODESC:
8035 case ABI_DARWIN:
8036 break;
8037 }
8038 }
8039 #if TARGET_AIX
8040 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
8041 #else
8042 assemble_name (file, XSTR (x, 0));
8043 #endif
8044 return;
8045
8046 case 'Z':
8047 /* Like 'L', for last word of TImode. */
8048 if (GET_CODE (x) == REG)
8049 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
8050 else if (GET_CODE (x) == MEM)
8051 {
8052 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8053 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8054 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
8055 else
8056 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
8057 if (small_data_operand (x, GET_MODE (x)))
8058 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8059 reg_names[SMALL_DATA_REG]);
8060 }
8061 return;
8062
8063 /* Print AltiVec or SPE memory operand. */
8064 case 'y':
8065 {
8066 rtx tmp;
8067
8068 if (GET_CODE (x) != MEM)
8069 abort ();
8070
8071 tmp = XEXP (x, 0);
8072
8073 if (TARGET_SPE)
8074 {
8075 /* Handle [reg]. */
8076 if (GET_CODE (tmp) == REG)
8077 {
8078 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
8079 break;
8080 }
8081 /* Handle [reg+UIMM]. */
8082 else if (GET_CODE (tmp) == PLUS &&
8083 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
8084 {
8085 int x;
8086
8087 if (GET_CODE (XEXP (tmp, 0)) != REG)
8088 abort ();
8089
8090 x = INTVAL (XEXP (tmp, 1));
8091 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
8092 break;
8093 }
8094
8095 /* Fall through. Must be [reg+reg]. */
8096 }
8097 if (GET_CODE (tmp) == REG)
8098 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
8099 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
8100 {
8101 if (REGNO (XEXP (tmp, 0)) == 0)
8102 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
8103 reg_names[ REGNO (XEXP (tmp, 0)) ]);
8104 else
8105 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
8106 reg_names[ REGNO (XEXP (tmp, 1)) ]);
8107 }
8108 else
8109 abort ();
8110 break;
8111 }
8112
8113 case 0:
8114 if (GET_CODE (x) == REG)
8115 fprintf (file, "%s", reg_names[REGNO (x)]);
8116 else if (GET_CODE (x) == MEM)
8117 {
8118 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8119 know the width from the mode. */
8120 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
8121 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
8122 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8123 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
8124 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
8125 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8126 else
8127 output_address (XEXP (x, 0));
8128 }
8129 else
8130 output_addr_const (file, x);
8131 return;
8132
8133 default:
8134 output_operand_lossage ("invalid %%xn code");
8135 }
8136 }
8137 \f
8138 /* Print the address of an operand. */
8139
8140 void
8141 print_operand_address (file, x)
8142 FILE *file;
8143 rtx x;
8144 {
8145 if (GET_CODE (x) == REG)
8146 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
8147 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
8148 || GET_CODE (x) == LABEL_REF)
8149 {
8150 output_addr_const (file, x);
8151 if (small_data_operand (x, GET_MODE (x)))
8152 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8153 reg_names[SMALL_DATA_REG]);
8154 else if (TARGET_TOC)
8155 abort ();
8156 }
8157 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
8158 {
8159 if (REGNO (XEXP (x, 0)) == 0)
8160 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
8161 reg_names[ REGNO (XEXP (x, 0)) ]);
8162 else
8163 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
8164 reg_names[ REGNO (XEXP (x, 1)) ]);
8165 }
8166 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
8167 {
8168 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
8169 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8170 }
8171 #if TARGET_ELF
8172 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8173 && CONSTANT_P (XEXP (x, 1)))
8174 {
8175 output_addr_const (file, XEXP (x, 1));
8176 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8177 }
8178 #endif
8179 #if TARGET_MACHO
8180 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8181 && CONSTANT_P (XEXP (x, 1)))
8182 {
8183 fprintf (file, "lo16(");
8184 output_addr_const (file, XEXP (x, 1));
8185 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8186 }
8187 #endif
8188 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
8189 {
8190 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
8191 {
8192 rtx contains_minus = XEXP (x, 1);
8193 rtx minus, symref;
8194 const char *name;
8195
8196 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8197 turn it into (sym) for output_addr_const. */
8198 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
8199 contains_minus = XEXP (contains_minus, 0);
8200
8201 minus = XEXP (contains_minus, 0);
8202 symref = XEXP (minus, 0);
8203 XEXP (contains_minus, 0) = symref;
8204 if (TARGET_ELF)
8205 {
8206 char *newname;
8207
8208 name = XSTR (symref, 0);
8209 newname = alloca (strlen (name) + sizeof ("@toc"));
8210 strcpy (newname, name);
8211 strcat (newname, "@toc");
8212 XSTR (symref, 0) = newname;
8213 }
8214 output_addr_const (file, XEXP (x, 1));
8215 if (TARGET_ELF)
8216 XSTR (symref, 0) = name;
8217 XEXP (contains_minus, 0) = minus;
8218 }
8219 else
8220 output_addr_const (file, XEXP (x, 1));
8221
8222 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
8223 }
8224 else
8225 abort ();
8226 }
8227 \f
8228 /* Target hook for assembling integer objects. The PowerPC version has
8229 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8230 is defined. It also needs to handle DI-mode objects on 64-bit
8231 targets. */
8232
8233 static bool
8234 rs6000_assemble_integer (x, size, aligned_p)
8235 rtx x;
8236 unsigned int size;
8237 int aligned_p;
8238 {
8239 #ifdef RELOCATABLE_NEEDS_FIXUP
8240 /* Special handling for SI values. */
8241 if (size == 4 && aligned_p)
8242 {
8243 extern int in_toc_section PARAMS ((void));
8244 static int recurse = 0;
8245
8246 /* For -mrelocatable, we mark all addresses that need to be fixed up
8247 in the .fixup section. */
8248 if (TARGET_RELOCATABLE
8249 && !in_toc_section ()
8250 && !in_text_section ()
8251 && !recurse
8252 && GET_CODE (x) != CONST_INT
8253 && GET_CODE (x) != CONST_DOUBLE
8254 && CONSTANT_P (x))
8255 {
8256 char buf[256];
8257
8258 recurse = 1;
8259 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
8260 fixuplabelno++;
8261 ASM_OUTPUT_LABEL (asm_out_file, buf);
8262 fprintf (asm_out_file, "\t.long\t(");
8263 output_addr_const (asm_out_file, x);
8264 fprintf (asm_out_file, ")@fixup\n");
8265 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
8266 ASM_OUTPUT_ALIGN (asm_out_file, 2);
8267 fprintf (asm_out_file, "\t.long\t");
8268 assemble_name (asm_out_file, buf);
8269 fprintf (asm_out_file, "\n\t.previous\n");
8270 recurse = 0;
8271 return true;
8272 }
8273 /* Remove initial .'s to turn a -mcall-aixdesc function
8274 address into the address of the descriptor, not the function
8275 itself. */
8276 else if (GET_CODE (x) == SYMBOL_REF
8277 && XSTR (x, 0)[0] == '.'
8278 && DEFAULT_ABI == ABI_AIX)
8279 {
8280 const char *name = XSTR (x, 0);
8281 while (*name == '.')
8282 name++;
8283
8284 fprintf (asm_out_file, "\t.long\t%s\n", name);
8285 return true;
8286 }
8287 }
8288 #endif /* RELOCATABLE_NEEDS_FIXUP */
8289 return default_assemble_integer (x, size, aligned_p);
8290 }
8291
8292 #ifdef HAVE_GAS_HIDDEN
8293 /* Emit an assembler directive to set symbol visibility for DECL to
8294 VISIBILITY_TYPE. */
8295
8296 static void
8297 rs6000_assemble_visibility (decl, vis)
8298 tree decl;
8299 int vis;
8300 {
8301 /* Functions need to have their entry point symbol visibility set as
8302 well as their descriptor symbol visibility. */
8303 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
8304 {
8305 static const char * const visibility_types[] = {
8306 NULL, "internal", "hidden", "protected"
8307 };
8308
8309 const char *name, *type;
8310
8311 name = ((* targetm.strip_name_encoding)
8312 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
8313 type = visibility_types[vis];
8314
8315 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
8316 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
8317 }
8318 else
8319 default_assemble_visibility (decl, vis);
8320 }
8321 #endif
8322 \f
8323 enum rtx_code
8324 rs6000_reverse_condition (mode, code)
8325 enum machine_mode mode;
8326 enum rtx_code code;
8327 {
8328 /* Reversal of FP compares takes care -- an ordered compare
8329 becomes an unordered compare and vice versa. */
8330 if (mode == CCFPmode && !flag_unsafe_math_optimizations)
8331 return reverse_condition_maybe_unordered (code);
8332 else
8333 return reverse_condition (code);
8334 }
8335
8336 /* Generate a compare for CODE. Return a brand-new rtx that
8337 represents the result of the compare. */
8338
8339 static rtx
8340 rs6000_generate_compare (code)
8341 enum rtx_code code;
8342 {
8343 enum machine_mode comp_mode;
8344 rtx compare_result;
8345
8346 if (rs6000_compare_fp_p)
8347 comp_mode = CCFPmode;
8348 else if (code == GTU || code == LTU
8349 || code == GEU || code == LEU)
8350 comp_mode = CCUNSmode;
8351 else
8352 comp_mode = CCmode;
8353
8354 /* First, the compare. */
8355 compare_result = gen_reg_rtx (comp_mode);
8356
8357 /* SPE FP compare instructions on the GPRs. Yuck! */
8358 if ((TARGET_SPE && TARGET_HARD_FLOAT) && rs6000_compare_fp_p)
8359 {
8360 rtx cmp, or1, or2, or_result, compare_result2;
8361
8362 switch (code)
8363 {
8364 case EQ:
8365 case UNEQ:
8366 case NE:
8367 case LTGT:
8368 cmp = flag_unsafe_math_optimizations
8369 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
8370 rs6000_compare_op1)
8371 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
8372 rs6000_compare_op1);
8373 break;
8374 case GT:
8375 case GTU:
8376 case UNGT:
8377 case UNGE:
8378 case GE:
8379 case GEU:
8380 cmp = flag_unsafe_math_optimizations
8381 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
8382 rs6000_compare_op1)
8383 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
8384 rs6000_compare_op1);
8385 break;
8386 case LT:
8387 case LTU:
8388 case UNLT:
8389 case UNLE:
8390 case LE:
8391 case LEU:
8392 cmp = flag_unsafe_math_optimizations
8393 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
8394 rs6000_compare_op1)
8395 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
8396 rs6000_compare_op1);
8397 break;
8398 default:
8399 abort ();
8400 }
8401
8402 /* Synthesize LE and GE from LT/GT || EQ. */
8403 if (code == LE || code == GE || code == LEU || code == GEU)
8404 {
8405 /* Synthesize GE/LE frome GT/LT || EQ. */
8406
8407 emit_insn (cmp);
8408
8409 switch (code)
8410 {
8411 case LE: code = LT; break;
8412 case GE: code = GT; break;
8413 case LEU: code = LT; break;
8414 case GEU: code = GT; break;
8415 default: abort ();
8416 }
8417
8418 or1 = gen_reg_rtx (SImode);
8419 or2 = gen_reg_rtx (SImode);
8420 or_result = gen_reg_rtx (CCEQmode);
8421 compare_result2 = gen_reg_rtx (CCFPmode);
8422
8423 /* Do the EQ. */
8424 cmp = flag_unsafe_math_optimizations
8425 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
8426 rs6000_compare_op1)
8427 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
8428 rs6000_compare_op1);
8429 emit_insn (cmp);
8430
8431 /* The MC8540 FP compare instructions set the CR bits
8432 differently than other PPC compare instructions. For
8433 that matter, there is no generic test instruction, but a
8434 testgt, testlt, and testeq. For a true condition, bit 2
8435 is set (x1xx) in the CR. Following the traditional CR
8436 values:
8437
8438 LT GT EQ OV
8439 bit3 bit2 bit1 bit0
8440
8441 ... bit 2 would be a GT CR alias, so later on we
8442 look in the GT bits for the branch instructions.
8443 However, we must be careful to emit correct RTL in
8444 the meantime, so optimizations don't get confused. */
8445
8446 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
8447 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
8448
8449 /* OR them together. */
8450 cmp = gen_rtx_SET (VOIDmode, or_result,
8451 gen_rtx_COMPARE (CCEQmode,
8452 gen_rtx_IOR (SImode, or1, or2),
8453 const_true_rtx));
8454 compare_result = or_result;
8455 code = EQ;
8456 }
8457 else
8458 {
8459 /* We only care about 1 bit (x1xx), so map everything to NE to
8460 maintain rtl sanity. We'll get to the right bit (x1xx) at
8461 code output time. */
8462 if (code == NE || code == LTGT)
8463 /* Do the inverse here because we have no cmpne
8464 instruction. We use the cmpeq instruction and expect
8465 to get a 0 instead. */
8466 code = EQ;
8467 else
8468 code = NE;
8469 }
8470
8471 emit_insn (cmp);
8472 }
8473 else
8474 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
8475 gen_rtx_COMPARE (comp_mode,
8476 rs6000_compare_op0,
8477 rs6000_compare_op1)));
8478
8479 /* Some kinds of FP comparisons need an OR operation;
8480 except for flag_unsafe_math_optimizations we don't bother. */
8481 if (rs6000_compare_fp_p
8482 && ! flag_unsafe_math_optimizations
8483 && ! (TARGET_HARD_FLOAT && TARGET_SPE)
8484 && (code == LE || code == GE
8485 || code == UNEQ || code == LTGT
8486 || code == UNGT || code == UNLT))
8487 {
8488 enum rtx_code or1, or2;
8489 rtx or1_rtx, or2_rtx, compare2_rtx;
8490 rtx or_result = gen_reg_rtx (CCEQmode);
8491
8492 switch (code)
8493 {
8494 case LE: or1 = LT; or2 = EQ; break;
8495 case GE: or1 = GT; or2 = EQ; break;
8496 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
8497 case LTGT: or1 = LT; or2 = GT; break;
8498 case UNGT: or1 = UNORDERED; or2 = GT; break;
8499 case UNLT: or1 = UNORDERED; or2 = LT; break;
8500 default: abort ();
8501 }
8502 validate_condition_mode (or1, comp_mode);
8503 validate_condition_mode (or2, comp_mode);
8504 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
8505 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
8506 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
8507 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
8508 const_true_rtx);
8509 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
8510
8511 compare_result = or_result;
8512 code = EQ;
8513 }
8514
8515 validate_condition_mode (code, GET_MODE (compare_result));
8516
8517 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
8518 }
8519
8520
8521 /* Emit the RTL for an sCOND pattern. */
8522
8523 void
8524 rs6000_emit_sCOND (code, result)
8525 enum rtx_code code;
8526 rtx result;
8527 {
8528 rtx condition_rtx;
8529 enum machine_mode op_mode;
8530
8531 condition_rtx = rs6000_generate_compare (code);
8532
8533 op_mode = GET_MODE (rs6000_compare_op0);
8534 if (op_mode == VOIDmode)
8535 op_mode = GET_MODE (rs6000_compare_op1);
8536
8537 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
8538 {
8539 PUT_MODE (condition_rtx, DImode);
8540 convert_move (result, condition_rtx, 0);
8541 }
8542 else
8543 {
8544 PUT_MODE (condition_rtx, SImode);
8545 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
8546 }
8547 }
8548
8549 /* Emit a branch of kind CODE to location LOC. */
8550
8551 void
8552 rs6000_emit_cbranch (code, loc)
8553 enum rtx_code code;
8554 rtx loc;
8555 {
8556 rtx condition_rtx, loc_ref;
8557
8558 condition_rtx = rs6000_generate_compare (code);
8559 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
8560 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
8561 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
8562 loc_ref, pc_rtx)));
8563 }
8564
8565 /* Return the string to output a conditional branch to LABEL, which is
8566 the operand number of the label, or -1 if the branch is really a
8567 conditional return.
8568
8569 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
8570 condition code register and its mode specifies what kind of
8571 comparison we made.
8572
8573 REVERSED is nonzero if we should reverse the sense of the comparison.
8574
8575 INSN is the insn. */
8576
8577 char *
8578 output_cbranch (op, label, reversed, insn)
8579 rtx op;
8580 const char * label;
8581 int reversed;
8582 rtx insn;
8583 {
8584 static char string[64];
8585 enum rtx_code code = GET_CODE (op);
8586 rtx cc_reg = XEXP (op, 0);
8587 enum machine_mode mode = GET_MODE (cc_reg);
8588 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
8589 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
8590 int really_reversed = reversed ^ need_longbranch;
8591 char *s = string;
8592 const char *ccode;
8593 const char *pred;
8594 rtx note;
8595
8596 validate_condition_mode (code, mode);
8597
8598 /* Work out which way this really branches. We could use
8599 reverse_condition_maybe_unordered here always but this
8600 makes the resulting assembler clearer. */
8601 if (really_reversed)
8602 {
8603 /* Reversal of FP compares takes care -- an ordered compare
8604 becomes an unordered compare and vice versa. */
8605 if (mode == CCFPmode)
8606 code = reverse_condition_maybe_unordered (code);
8607 else
8608 code = reverse_condition (code);
8609 }
8610
8611 if ((TARGET_SPE && TARGET_HARD_FLOAT) && mode == CCFPmode)
8612 {
8613 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8614 to the GT bit. */
8615 if (code == EQ)
8616 /* Opposite of GT. */
8617 code = UNLE;
8618 else if (code == NE)
8619 code = GT;
8620 else
8621 abort ();
8622 }
8623
8624 switch (code)
8625 {
8626 /* Not all of these are actually distinct opcodes, but
8627 we distinguish them for clarity of the resulting assembler. */
8628 case NE: case LTGT:
8629 ccode = "ne"; break;
8630 case EQ: case UNEQ:
8631 ccode = "eq"; break;
8632 case GE: case GEU:
8633 ccode = "ge"; break;
8634 case GT: case GTU: case UNGT:
8635 ccode = "gt"; break;
8636 case LE: case LEU:
8637 ccode = "le"; break;
8638 case LT: case LTU: case UNLT:
8639 ccode = "lt"; break;
8640 case UNORDERED: ccode = "un"; break;
8641 case ORDERED: ccode = "nu"; break;
8642 case UNGE: ccode = "nl"; break;
8643 case UNLE: ccode = "ng"; break;
8644 default:
8645 abort ();
8646 }
8647
8648 /* Maybe we have a guess as to how likely the branch is.
8649 The old mnemonics don't have a way to specify this information. */
8650 pred = "";
8651 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
8652 if (note != NULL_RTX)
8653 {
8654 /* PROB is the difference from 50%. */
8655 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
8656 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
8657
8658 /* Only hint for highly probable/improbable branches on newer
8659 cpus as static prediction overrides processor dynamic
8660 prediction. For older cpus we may as well always hint, but
8661 assume not taken for branches that are very close to 50% as a
8662 mispredicted taken branch is more expensive than a
8663 mispredicted not-taken branch. */
8664 if (always_hint
8665 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
8666 {
8667 if (abs (prob) > REG_BR_PROB_BASE / 20
8668 && ((prob > 0) ^ need_longbranch))
8669 pred = "+";
8670 else
8671 pred = "-";
8672 }
8673 }
8674
8675 if (label == NULL)
8676 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
8677 else
8678 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
8679
8680 /* We need to escape any '%' characters in the reg_names string.
8681 Assume they'd only be the first character... */
8682 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
8683 *s++ = '%';
8684 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
8685
8686 if (label != NULL)
8687 {
8688 /* If the branch distance was too far, we may have to use an
8689 unconditional branch to go the distance. */
8690 if (need_longbranch)
8691 s += sprintf (s, ",$+8\n\tb %s", label);
8692 else
8693 s += sprintf (s, ",%s", label);
8694 }
8695
8696 return string;
8697 }
8698
8699 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
8700 operands of the last comparison is nonzero/true, FALSE_COND if it
8701 is zero/false. Return 0 if the hardware has no such operation. */
8702
8703 int
8704 rs6000_emit_cmove (dest, op, true_cond, false_cond)
8705 rtx dest;
8706 rtx op;
8707 rtx true_cond;
8708 rtx false_cond;
8709 {
8710 enum rtx_code code = GET_CODE (op);
8711 rtx op0 = rs6000_compare_op0;
8712 rtx op1 = rs6000_compare_op1;
8713 REAL_VALUE_TYPE c1;
8714 enum machine_mode compare_mode = GET_MODE (op0);
8715 enum machine_mode result_mode = GET_MODE (dest);
8716 rtx temp;
8717
8718 /* These modes should always match. */
8719 if (GET_MODE (op1) != compare_mode
8720 /* In the isel case however, we can use a compare immediate, so
8721 op1 may be a small constant. */
8722 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
8723 return 0;
8724 if (GET_MODE (true_cond) != result_mode)
8725 return 0;
8726 if (GET_MODE (false_cond) != result_mode)
8727 return 0;
8728
8729 /* First, work out if the hardware can do this at all, or
8730 if it's too slow... */
8731 if (! rs6000_compare_fp_p)
8732 {
8733 if (TARGET_ISEL)
8734 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
8735 return 0;
8736 }
8737
8738 /* Eliminate half of the comparisons by switching operands, this
8739 makes the remaining code simpler. */
8740 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
8741 || code == LTGT || code == LT)
8742 {
8743 code = reverse_condition_maybe_unordered (code);
8744 temp = true_cond;
8745 true_cond = false_cond;
8746 false_cond = temp;
8747 }
8748
8749 /* UNEQ and LTGT take four instructions for a comparison with zero,
8750 it'll probably be faster to use a branch here too. */
8751 if (code == UNEQ)
8752 return 0;
8753
8754 if (GET_CODE (op1) == CONST_DOUBLE)
8755 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
8756
8757 /* We're going to try to implement comparisons by performing
8758 a subtract, then comparing against zero. Unfortunately,
8759 Inf - Inf is NaN which is not zero, and so if we don't
8760 know that the operand is finite and the comparison
8761 would treat EQ different to UNORDERED, we can't do it. */
8762 if (! flag_unsafe_math_optimizations
8763 && code != GT && code != UNGE
8764 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
8765 /* Constructs of the form (a OP b ? a : b) are safe. */
8766 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
8767 || (! rtx_equal_p (op0, true_cond)
8768 && ! rtx_equal_p (op1, true_cond))))
8769 return 0;
8770 /* At this point we know we can use fsel. */
8771
8772 /* Reduce the comparison to a comparison against zero. */
8773 temp = gen_reg_rtx (compare_mode);
8774 emit_insn (gen_rtx_SET (VOIDmode, temp,
8775 gen_rtx_MINUS (compare_mode, op0, op1)));
8776 op0 = temp;
8777 op1 = CONST0_RTX (compare_mode);
8778
8779 /* If we don't care about NaNs we can reduce some of the comparisons
8780 down to faster ones. */
8781 if (flag_unsafe_math_optimizations)
8782 switch (code)
8783 {
8784 case GT:
8785 code = LE;
8786 temp = true_cond;
8787 true_cond = false_cond;
8788 false_cond = temp;
8789 break;
8790 case UNGE:
8791 code = GE;
8792 break;
8793 case UNEQ:
8794 code = EQ;
8795 break;
8796 default:
8797 break;
8798 }
8799
8800 /* Now, reduce everything down to a GE. */
8801 switch (code)
8802 {
8803 case GE:
8804 break;
8805
8806 case LE:
8807 temp = gen_reg_rtx (compare_mode);
8808 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8809 op0 = temp;
8810 break;
8811
8812 case ORDERED:
8813 temp = gen_reg_rtx (compare_mode);
8814 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
8815 op0 = temp;
8816 break;
8817
8818 case EQ:
8819 temp = gen_reg_rtx (compare_mode);
8820 emit_insn (gen_rtx_SET (VOIDmode, temp,
8821 gen_rtx_NEG (compare_mode,
8822 gen_rtx_ABS (compare_mode, op0))));
8823 op0 = temp;
8824 break;
8825
8826 case UNGE:
8827 temp = gen_reg_rtx (result_mode);
8828 emit_insn (gen_rtx_SET (VOIDmode, temp,
8829 gen_rtx_IF_THEN_ELSE (result_mode,
8830 gen_rtx_GE (VOIDmode,
8831 op0, op1),
8832 true_cond, false_cond)));
8833 false_cond = temp;
8834 true_cond = false_cond;
8835
8836 temp = gen_reg_rtx (compare_mode);
8837 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8838 op0 = temp;
8839 break;
8840
8841 case GT:
8842 temp = gen_reg_rtx (result_mode);
8843 emit_insn (gen_rtx_SET (VOIDmode, temp,
8844 gen_rtx_IF_THEN_ELSE (result_mode,
8845 gen_rtx_GE (VOIDmode,
8846 op0, op1),
8847 true_cond, false_cond)));
8848 true_cond = temp;
8849 false_cond = true_cond;
8850
8851 temp = gen_reg_rtx (compare_mode);
8852 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8853 op0 = temp;
8854 break;
8855
8856 default:
8857 abort ();
8858 }
8859
8860 emit_insn (gen_rtx_SET (VOIDmode, dest,
8861 gen_rtx_IF_THEN_ELSE (result_mode,
8862 gen_rtx_GE (VOIDmode,
8863 op0, op1),
8864 true_cond, false_cond)));
8865 return 1;
8866 }
8867
8868 /* Same as above, but for ints (isel). */
8869
8870 static int
8871 rs6000_emit_int_cmove (dest, op, true_cond, false_cond)
8872 rtx dest;
8873 rtx op;
8874 rtx true_cond;
8875 rtx false_cond;
8876 {
8877 rtx condition_rtx, cr;
8878
8879 /* All isel implementations thus far are 32-bits. */
8880 if (GET_MODE (rs6000_compare_op0) != SImode)
8881 return 0;
8882
8883 /* We still have to do the compare, because isel doesn't do a
8884 compare, it just looks at the CRx bits set by a previous compare
8885 instruction. */
8886 condition_rtx = rs6000_generate_compare (GET_CODE (op));
8887 cr = XEXP (condition_rtx, 0);
8888
8889 if (GET_MODE (cr) == CCmode)
8890 emit_insn (gen_isel_signed (dest, condition_rtx,
8891 true_cond, false_cond, cr));
8892 else
8893 emit_insn (gen_isel_unsigned (dest, condition_rtx,
8894 true_cond, false_cond, cr));
8895
8896 return 1;
8897 }
8898
8899 const char *
8900 output_isel (operands)
8901 rtx *operands;
8902 {
8903 enum rtx_code code;
8904
8905 code = GET_CODE (operands[1]);
8906 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
8907 {
8908 PUT_CODE (operands[1], reverse_condition (code));
8909 return "isel %0,%3,%2,%j1";
8910 }
8911 else
8912 return "isel %0,%2,%3,%j1";
8913 }
8914
8915 void
8916 rs6000_emit_minmax (dest, code, op0, op1)
8917 rtx dest;
8918 enum rtx_code code;
8919 rtx op0;
8920 rtx op1;
8921 {
8922 enum machine_mode mode = GET_MODE (op0);
8923 enum rtx_code c;
8924 rtx target;
8925
8926 if (code == SMAX || code == SMIN)
8927 c = GE;
8928 else
8929 c = GEU;
8930
8931 if (code == SMAX || code == UMAX)
8932 target = emit_conditional_move (dest, c, op0, op1, mode,
8933 op0, op1, mode, 0);
8934 else
8935 target = emit_conditional_move (dest, c, op0, op1, mode,
8936 op1, op0, mode, 0);
8937 if (target == NULL_RTX)
8938 abort ();
8939 if (target != dest)
8940 emit_move_insn (dest, target);
8941 }
8942 \f
8943 /* This page contains routines that are used to determine what the
8944 function prologue and epilogue code will do and write them out. */
8945
8946 /* Return the first fixed-point register that is required to be
8947 saved. 32 if none. */
8948
8949 int
8950 first_reg_to_save ()
8951 {
8952 int first_reg;
8953
8954 /* Find lowest numbered live register. */
8955 for (first_reg = 13; first_reg <= 31; first_reg++)
8956 if (regs_ever_live[first_reg]
8957 && (! call_used_regs[first_reg]
8958 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
8959 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
8960 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
8961 break;
8962
8963 #if TARGET_MACHO
8964 if (flag_pic
8965 && current_function_uses_pic_offset_table
8966 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
8967 return RS6000_PIC_OFFSET_TABLE_REGNUM;
8968 #endif
8969
8970 return first_reg;
8971 }
8972
8973 /* Similar, for FP regs. */
8974
8975 int
8976 first_fp_reg_to_save ()
8977 {
8978 int first_reg;
8979
8980 /* Find lowest numbered live register. */
8981 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
8982 if (regs_ever_live[first_reg])
8983 break;
8984
8985 return first_reg;
8986 }
8987
8988 /* Similar, for AltiVec regs. */
8989
8990 static int
8991 first_altivec_reg_to_save ()
8992 {
8993 int i;
8994
8995 /* Stack frame remains as is unless we are in AltiVec ABI. */
8996 if (! TARGET_ALTIVEC_ABI)
8997 return LAST_ALTIVEC_REGNO + 1;
8998
8999 /* Find lowest numbered live register. */
9000 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
9001 if (regs_ever_live[i])
9002 break;
9003
9004 return i;
9005 }
9006
9007 /* Return a 32-bit mask of the AltiVec registers we need to set in
9008 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
9009 the 32-bit word is 0. */
9010
9011 static unsigned int
9012 compute_vrsave_mask ()
9013 {
9014 unsigned int i, mask = 0;
9015
9016 /* First, find out if we use _any_ altivec registers. */
9017 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
9018 if (regs_ever_live[i])
9019 mask |= ALTIVEC_REG_BIT (i);
9020
9021 if (mask == 0)
9022 return mask;
9023
9024 /* Next, add all registers that are call-clobbered. We do this
9025 because post-reload register optimizers such as regrename_optimize
9026 may choose to use them. They never change the register class
9027 chosen by reload, so cannot create new uses of altivec registers
9028 if there were none before, so the early exit above is safe. */
9029 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
9030 altivec registers not saved in the mask, which might well make the
9031 adjustments below more effective in eliding the save/restore of
9032 VRSAVE in small functions. */
9033 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
9034 if (call_used_regs[i])
9035 mask |= ALTIVEC_REG_BIT (i);
9036
9037 /* Next, remove the argument registers from the set. These must
9038 be in the VRSAVE mask set by the caller, so we don't need to add
9039 them in again. More importantly, the mask we compute here is
9040 used to generate CLOBBERs in the set_vrsave insn, and we do not
9041 wish the argument registers to die. */
9042 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
9043 mask &= ~ALTIVEC_REG_BIT (i);
9044
9045 /* Similarly, remove the return value from the set. */
9046 {
9047 bool yes = false;
9048 diddle_return_value (is_altivec_return_reg, &yes);
9049 if (yes)
9050 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
9051 }
9052
9053 return mask;
9054 }
9055
9056 static void
9057 is_altivec_return_reg (reg, xyes)
9058 rtx reg;
9059 void *xyes;
9060 {
9061 bool *yes = (bool *) xyes;
9062 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
9063 *yes = true;
9064 }
9065
9066 \f
9067 /* Calculate the stack information for the current function. This is
9068 complicated by having two separate calling sequences, the AIX calling
9069 sequence and the V.4 calling sequence.
9070
9071 AIX (and Darwin/Mac OS X) stack frames look like:
9072 32-bit 64-bit
9073 SP----> +---------------------------------------+
9074 | back chain to caller | 0 0
9075 +---------------------------------------+
9076 | saved CR | 4 8 (8-11)
9077 +---------------------------------------+
9078 | saved LR | 8 16
9079 +---------------------------------------+
9080 | reserved for compilers | 12 24
9081 +---------------------------------------+
9082 | reserved for binders | 16 32
9083 +---------------------------------------+
9084 | saved TOC pointer | 20 40
9085 +---------------------------------------+
9086 | Parameter save area (P) | 24 48
9087 +---------------------------------------+
9088 | Alloca space (A) | 24+P etc.
9089 +---------------------------------------+
9090 | Local variable space (L) | 24+P+A
9091 +---------------------------------------+
9092 | Float/int conversion temporary (X) | 24+P+A+L
9093 +---------------------------------------+
9094 | Save area for AltiVec registers (W) | 24+P+A+L+X
9095 +---------------------------------------+
9096 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9097 +---------------------------------------+
9098 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9099 +---------------------------------------+
9100 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9101 +---------------------------------------+
9102 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9103 +---------------------------------------+
9104 old SP->| back chain to caller's caller |
9105 +---------------------------------------+
9106
9107 The required alignment for AIX configurations is two words (i.e., 8
9108 or 16 bytes).
9109
9110
9111 V.4 stack frames look like:
9112
9113 SP----> +---------------------------------------+
9114 | back chain to caller | 0
9115 +---------------------------------------+
9116 | caller's saved LR | 4
9117 +---------------------------------------+
9118 | Parameter save area (P) | 8
9119 +---------------------------------------+
9120 | Alloca space (A) | 8+P
9121 +---------------------------------------+
9122 | Varargs save area (V) | 8+P+A
9123 +---------------------------------------+
9124 | Local variable space (L) | 8+P+A+V
9125 +---------------------------------------+
9126 | Float/int conversion temporary (X) | 8+P+A+V+L
9127 +---------------------------------------+
9128 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9129 +---------------------------------------+
9130 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9131 +---------------------------------------+
9132 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9133 +---------------------------------------+
9134 | SPE: area for 64-bit GP registers |
9135 +---------------------------------------+
9136 | SPE alignment padding |
9137 +---------------------------------------+
9138 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9139 +---------------------------------------+
9140 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9141 +---------------------------------------+
9142 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9143 +---------------------------------------+
9144 old SP->| back chain to caller's caller |
9145 +---------------------------------------+
9146
9147 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9148 given. (But note below and in sysv4.h that we require only 8 and
9149 may round up the size of our stack frame anyways. The historical
9150 reason is early versions of powerpc-linux which didn't properly
9151 align the stack at program startup. A happy side-effect is that
9152 -mno-eabi libraries can be used with -meabi programs.)
9153
9154 The EABI configuration defaults to the V.4 layout, unless
9155 -mcall-aix is used, in which case the AIX layout is used. However,
9156 the stack alignment requirements may differ. If -mno-eabi is not
9157 given, the required stack alignment is 8 bytes; if -mno-eabi is
9158 given, the required alignment is 16 bytes. (But see V.4 comment
9159 above.) */
9160
9161 #ifndef ABI_STACK_BOUNDARY
9162 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9163 #endif
9164
9165 rs6000_stack_t *
9166 rs6000_stack_info ()
9167 {
9168 static rs6000_stack_t info, zero_info;
9169 rs6000_stack_t *info_ptr = &info;
9170 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9171 int ehrd_size;
9172 int total_raw_size;
9173
9174 /* Zero all fields portably. */
9175 info = zero_info;
9176
9177 if (TARGET_SPE)
9178 {
9179 /* Cache value so we don't rescan instruction chain over and over. */
9180 if (cfun->machine->insn_chain_scanned_p == 0)
9181 {
9182 cfun->machine->insn_chain_scanned_p = 1;
9183 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
9184 }
9185 }
9186
9187 /* Select which calling sequence. */
9188 info_ptr->abi = DEFAULT_ABI;
9189
9190 /* Calculate which registers need to be saved & save area size. */
9191 info_ptr->first_gp_reg_save = first_reg_to_save ();
9192 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9193 even if it currently looks like we won't. */
9194 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
9195 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
9196 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
9197 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
9198 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
9199 else
9200 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
9201
9202 /* For the SPE, we have an additional upper 32-bits on each GPR.
9203 Ideally we should save the entire 64-bits only when the upper
9204 half is used in SIMD instructions. Since we only record
9205 registers live (not the size they are used in), this proves
9206 difficult because we'd have to traverse the instruction chain at
9207 the right time, taking reload into account. This is a real pain,
9208 so we opt to save the GPRs in 64-bits always if but one register
9209 gets used in 64-bits. Otherwise, all the registers in the frame
9210 get saved in 32-bits.
9211
9212 So... since when we save all GPRs (except the SP) in 64-bits, the
9213 traditional GP save area will be empty. */
9214 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9215 info_ptr->gp_size = 0;
9216
9217 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
9218 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
9219
9220 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
9221 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
9222 - info_ptr->first_altivec_reg_save);
9223
9224 /* Does this function call anything? */
9225 info_ptr->calls_p = (! current_function_is_leaf
9226 || cfun->machine->ra_needs_full_frame);
9227
9228 /* Determine if we need to save the link register. */
9229 if (rs6000_ra_ever_killed ()
9230 || (DEFAULT_ABI == ABI_AIX
9231 && current_function_profile
9232 && !TARGET_PROFILE_KERNEL)
9233 #ifdef TARGET_RELOCATABLE
9234 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
9235 #endif
9236 || (info_ptr->first_fp_reg_save != 64
9237 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
9238 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
9239 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
9240 || (DEFAULT_ABI == ABI_DARWIN
9241 && flag_pic
9242 && current_function_uses_pic_offset_table)
9243 || info_ptr->calls_p)
9244 {
9245 info_ptr->lr_save_p = 1;
9246 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
9247 }
9248
9249 /* Determine if we need to save the condition code registers. */
9250 if (regs_ever_live[CR2_REGNO]
9251 || regs_ever_live[CR3_REGNO]
9252 || regs_ever_live[CR4_REGNO])
9253 {
9254 info_ptr->cr_save_p = 1;
9255 if (DEFAULT_ABI == ABI_V4)
9256 info_ptr->cr_size = reg_size;
9257 }
9258
9259 /* If the current function calls __builtin_eh_return, then we need
9260 to allocate stack space for registers that will hold data for
9261 the exception handler. */
9262 if (current_function_calls_eh_return)
9263 {
9264 unsigned int i;
9265 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
9266 continue;
9267
9268 /* SPE saves EH registers in 64-bits. */
9269 ehrd_size = i * (TARGET_SPE_ABI
9270 && info_ptr->spe_64bit_regs_used != 0
9271 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
9272 }
9273 else
9274 ehrd_size = 0;
9275
9276 /* Determine various sizes. */
9277 info_ptr->reg_size = reg_size;
9278 info_ptr->fixed_size = RS6000_SAVE_AREA;
9279 info_ptr->varargs_size = RS6000_VARARGS_AREA;
9280 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
9281 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
9282 8);
9283
9284 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9285 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
9286 else
9287 info_ptr->spe_gp_size = 0;
9288
9289 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
9290 {
9291 info_ptr->vrsave_mask = compute_vrsave_mask ();
9292 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
9293 }
9294 else
9295 {
9296 info_ptr->vrsave_mask = 0;
9297 info_ptr->vrsave_size = 0;
9298 }
9299
9300 /* Calculate the offsets. */
9301 switch (DEFAULT_ABI)
9302 {
9303 case ABI_NONE:
9304 default:
9305 abort ();
9306
9307 case ABI_AIX:
9308 case ABI_AIX_NODESC:
9309 case ABI_DARWIN:
9310 info_ptr->fp_save_offset = - info_ptr->fp_size;
9311 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9312
9313 if (TARGET_ALTIVEC_ABI)
9314 {
9315 info_ptr->vrsave_save_offset
9316 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
9317
9318 /* Align stack so vector save area is on a quadword boundary. */
9319 if (info_ptr->altivec_size != 0)
9320 info_ptr->altivec_padding_size
9321 = 16 - (-info_ptr->vrsave_save_offset % 16);
9322 else
9323 info_ptr->altivec_padding_size = 0;
9324
9325 info_ptr->altivec_save_offset
9326 = info_ptr->vrsave_save_offset
9327 - info_ptr->altivec_padding_size
9328 - info_ptr->altivec_size;
9329
9330 /* Adjust for AltiVec case. */
9331 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
9332 }
9333 else
9334 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
9335 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
9336 info_ptr->lr_save_offset = 2*reg_size;
9337 break;
9338
9339 case ABI_V4:
9340 info_ptr->fp_save_offset = - info_ptr->fp_size;
9341 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9342 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
9343
9344 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9345 {
9346 /* Align stack so SPE GPR save area is aligned on a
9347 double-word boundary. */
9348 if (info_ptr->spe_gp_size != 0)
9349 info_ptr->spe_padding_size
9350 = 8 - (-info_ptr->cr_save_offset % 8);
9351 else
9352 info_ptr->spe_padding_size = 0;
9353
9354 info_ptr->spe_gp_save_offset
9355 = info_ptr->cr_save_offset
9356 - info_ptr->spe_padding_size
9357 - info_ptr->spe_gp_size;
9358
9359 /* Adjust for SPE case. */
9360 info_ptr->toc_save_offset
9361 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
9362 }
9363 else if (TARGET_ALTIVEC_ABI)
9364 {
9365 info_ptr->vrsave_save_offset
9366 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
9367
9368 /* Align stack so vector save area is on a quadword boundary. */
9369 if (info_ptr->altivec_size != 0)
9370 info_ptr->altivec_padding_size
9371 = 16 - (-info_ptr->vrsave_save_offset % 16);
9372 else
9373 info_ptr->altivec_padding_size = 0;
9374
9375 info_ptr->altivec_save_offset
9376 = info_ptr->vrsave_save_offset
9377 - info_ptr->altivec_padding_size
9378 - info_ptr->altivec_size;
9379
9380 /* Adjust for AltiVec case. */
9381 info_ptr->toc_save_offset
9382 = info_ptr->altivec_save_offset - info_ptr->toc_size;
9383 }
9384 else
9385 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
9386 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
9387 info_ptr->lr_save_offset = reg_size;
9388 break;
9389 }
9390
9391 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
9392 + info_ptr->gp_size
9393 + info_ptr->altivec_size
9394 + info_ptr->altivec_padding_size
9395 + info_ptr->vrsave_size
9396 + info_ptr->spe_gp_size
9397 + info_ptr->spe_padding_size
9398 + ehrd_size
9399 + info_ptr->cr_size
9400 + info_ptr->lr_size
9401 + info_ptr->vrsave_size
9402 + info_ptr->toc_size,
9403 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
9404 ? 16 : 8);
9405
9406 total_raw_size = (info_ptr->vars_size
9407 + info_ptr->parm_size
9408 + info_ptr->save_size
9409 + info_ptr->varargs_size
9410 + info_ptr->fixed_size);
9411
9412 info_ptr->total_size =
9413 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
9414
9415 /* Determine if we need to allocate any stack frame:
9416
9417 For AIX we need to push the stack if a frame pointer is needed
9418 (because the stack might be dynamically adjusted), if we are
9419 debugging, if we make calls, or if the sum of fp_save, gp_save,
9420 and local variables are more than the space needed to save all
9421 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9422 + 18*8 = 288 (GPR13 reserved).
9423
9424 For V.4 we don't have the stack cushion that AIX uses, but assume
9425 that the debugger can handle stackless frames. */
9426
9427 if (info_ptr->calls_p)
9428 info_ptr->push_p = 1;
9429
9430 else if (DEFAULT_ABI == ABI_V4)
9431 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
9432
9433 else if (frame_pointer_needed)
9434 info_ptr->push_p = 1;
9435
9436 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
9437 info_ptr->push_p = 1;
9438
9439 else
9440 info_ptr->push_p
9441 = total_raw_size - info_ptr->fixed_size > (TARGET_32BIT ? 220 : 288);
9442
9443 /* Zero offsets if we're not saving those registers. */
9444 if (info_ptr->fp_size == 0)
9445 info_ptr->fp_save_offset = 0;
9446
9447 if (info_ptr->gp_size == 0)
9448 info_ptr->gp_save_offset = 0;
9449
9450 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
9451 info_ptr->altivec_save_offset = 0;
9452
9453 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
9454 info_ptr->vrsave_save_offset = 0;
9455
9456 if (! TARGET_SPE_ABI
9457 || info_ptr->spe_64bit_regs_used == 0
9458 || info_ptr->spe_gp_size == 0)
9459 info_ptr->spe_gp_save_offset = 0;
9460
9461 if (! info_ptr->lr_save_p)
9462 info_ptr->lr_save_offset = 0;
9463
9464 if (! info_ptr->cr_save_p)
9465 info_ptr->cr_save_offset = 0;
9466
9467 if (! info_ptr->toc_save_p)
9468 info_ptr->toc_save_offset = 0;
9469
9470 return info_ptr;
9471 }
9472
9473 /* Return true if the current function uses any GPRs in 64-bit SIMD
9474 mode. */
9475
9476 static bool
9477 spe_func_has_64bit_regs_p ()
9478 {
9479 rtx insns, insn;
9480
9481 /* Functions that save and restore all the call-saved registers will
9482 need to save/restore the registers in 64-bits. */
9483 if (current_function_calls_eh_return
9484 || current_function_calls_setjmp
9485 || current_function_has_nonlocal_goto)
9486 return true;
9487
9488 insns = get_insns ();
9489
9490 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
9491 {
9492 if (INSN_P (insn))
9493 {
9494 rtx i;
9495
9496 i = PATTERN (insn);
9497 if (GET_CODE (i) == SET
9498 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
9499 return true;
9500 }
9501 }
9502
9503 return false;
9504 }
9505
9506 void
9507 debug_stack_info (info)
9508 rs6000_stack_t *info;
9509 {
9510 const char *abi_string;
9511
9512 if (! info)
9513 info = rs6000_stack_info ();
9514
9515 fprintf (stderr, "\nStack information for function %s:\n",
9516 ((current_function_decl && DECL_NAME (current_function_decl))
9517 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
9518 : "<unknown>"));
9519
9520 switch (info->abi)
9521 {
9522 default: abi_string = "Unknown"; break;
9523 case ABI_NONE: abi_string = "NONE"; break;
9524 case ABI_AIX:
9525 case ABI_AIX_NODESC: abi_string = "AIX"; break;
9526 case ABI_DARWIN: abi_string = "Darwin"; break;
9527 case ABI_V4: abi_string = "V.4"; break;
9528 }
9529
9530 fprintf (stderr, "\tABI = %5s\n", abi_string);
9531
9532 if (TARGET_ALTIVEC_ABI)
9533 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
9534
9535 if (TARGET_SPE_ABI)
9536 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
9537
9538 if (info->first_gp_reg_save != 32)
9539 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
9540
9541 if (info->first_fp_reg_save != 64)
9542 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9543
9544 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
9545 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
9546 info->first_altivec_reg_save);
9547
9548 if (info->lr_save_p)
9549 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9550
9551 if (info->cr_save_p)
9552 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
9553
9554 if (info->toc_save_p)
9555 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
9556
9557 if (info->vrsave_mask)
9558 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
9559
9560 if (info->push_p)
9561 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
9562
9563 if (info->calls_p)
9564 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
9565
9566 if (info->gp_save_offset)
9567 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
9568
9569 if (info->fp_save_offset)
9570 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
9571
9572 if (info->altivec_save_offset)
9573 fprintf (stderr, "\taltivec_save_offset = %5d\n",
9574 info->altivec_save_offset);
9575
9576 if (info->spe_gp_save_offset)
9577 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
9578 info->spe_gp_save_offset);
9579
9580 if (info->vrsave_save_offset)
9581 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
9582 info->vrsave_save_offset);
9583
9584 if (info->lr_save_offset)
9585 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
9586
9587 if (info->cr_save_offset)
9588 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
9589
9590 if (info->toc_save_offset)
9591 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
9592
9593 if (info->varargs_save_offset)
9594 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
9595
9596 if (info->total_size)
9597 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
9598
9599 if (info->varargs_size)
9600 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
9601
9602 if (info->vars_size)
9603 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
9604
9605 if (info->parm_size)
9606 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
9607
9608 if (info->fixed_size)
9609 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
9610
9611 if (info->gp_size)
9612 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
9613
9614 if (info->spe_gp_size)
9615 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
9616
9617 if (info->fp_size)
9618 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
9619
9620 if (info->altivec_size)
9621 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
9622
9623 if (info->vrsave_size)
9624 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
9625
9626 if (info->altivec_padding_size)
9627 fprintf (stderr, "\taltivec_padding_size= %5d\n",
9628 info->altivec_padding_size);
9629
9630 if (info->spe_padding_size)
9631 fprintf (stderr, "\tspe_padding_size = %5d\n",
9632 info->spe_padding_size);
9633
9634 if (info->lr_size)
9635 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
9636
9637 if (info->cr_size)
9638 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
9639
9640 if (info->toc_size)
9641 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
9642
9643 if (info->save_size)
9644 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
9645
9646 if (info->reg_size != 4)
9647 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
9648
9649 fprintf (stderr, "\n");
9650 }
9651
9652 rtx
9653 rs6000_return_addr (count, frame)
9654 int count;
9655 rtx frame;
9656 {
9657 /* Currently we don't optimize very well between prolog and body
9658 code and for PIC code the code can be actually quite bad, so
9659 don't try to be too clever here. */
9660 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
9661 {
9662 cfun->machine->ra_needs_full_frame = 1;
9663
9664 return
9665 gen_rtx_MEM
9666 (Pmode,
9667 memory_address
9668 (Pmode,
9669 plus_constant (copy_to_reg
9670 (gen_rtx_MEM (Pmode,
9671 memory_address (Pmode, frame))),
9672 RETURN_ADDRESS_OFFSET)));
9673 }
9674
9675 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
9676 }
9677
9678 /* Say whether a function is a candidate for sibcall handling or not.
9679 We do not allow indirect calls to be optimized into sibling calls.
9680 Also, we can't do it if there are any vector parameters; there's
9681 nowhere to put the VRsave code so it works; note that functions with
9682 vector parameters are required to have a prototype, so the argument
9683 type info must be available here. (The tail recursion case can work
9684 with vector parameters, but there's no way to distinguish here.) */
9685 static bool
9686 rs6000_function_ok_for_sibcall (decl, exp)
9687 tree decl;
9688 tree exp ATTRIBUTE_UNUSED;
9689 {
9690 tree type;
9691 if (decl)
9692 {
9693 if (TARGET_ALTIVEC_VRSAVE)
9694 {
9695 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
9696 type; type = TREE_CHAIN (type))
9697 {
9698 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
9699 return false;
9700 }
9701 }
9702 if (DEFAULT_ABI == ABI_DARWIN
9703 || (*targetm.binds_local_p) (decl))
9704 {
9705 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
9706
9707 if (!lookup_attribute ("longcall", attr_list)
9708 || lookup_attribute ("shortcall", attr_list))
9709 return true;
9710 }
9711 }
9712 return false;
9713 }
9714
9715 static int
9716 rs6000_ra_ever_killed ()
9717 {
9718 rtx top;
9719 rtx reg;
9720 rtx insn;
9721
9722 /* Irritatingly, there are two kinds of thunks -- those created with
9723 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
9724 through the regular part of the compiler. This is a very hacky
9725 way to tell them apart. */
9726 if (current_function_is_thunk && !no_new_pseudos)
9727 return 0;
9728
9729 /* regs_ever_live has LR marked as used if any sibcalls are present,
9730 but this should not force saving and restoring in the
9731 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
9732 clobbers LR, so that is inappropriate. */
9733
9734 /* Also, the prologue can generate a store into LR that
9735 doesn't really count, like this:
9736
9737 move LR->R0
9738 bcl to set PIC register
9739 move LR->R31
9740 move R0->LR
9741
9742 When we're called from the epilogue, we need to avoid counting
9743 this as a store. */
9744
9745 push_topmost_sequence ();
9746 top = get_insns ();
9747 pop_topmost_sequence ();
9748 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
9749
9750 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
9751 {
9752 if (INSN_P (insn))
9753 {
9754 if (FIND_REG_INC_NOTE (insn, reg))
9755 return 1;
9756 else if (GET_CODE (insn) == CALL_INSN
9757 && !SIBLING_CALL_P (insn))
9758 return 1;
9759 else if (set_of (reg, insn) != NULL_RTX
9760 && !prologue_epilogue_contains (insn))
9761 return 1;
9762 }
9763 }
9764 return 0;
9765 }
9766 \f
9767 /* Add a REG_MAYBE_DEAD note to the insn. */
9768 static void
9769 rs6000_maybe_dead (insn)
9770 rtx insn;
9771 {
9772 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
9773 const0_rtx,
9774 REG_NOTES (insn));
9775 }
9776
9777 /* Emit instructions needed to load the TOC register.
9778 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9779 a constant pool; or for SVR4 -fpic. */
9780
9781 void
9782 rs6000_emit_load_toc_table (fromprolog)
9783 int fromprolog;
9784 {
9785 rtx dest, insn;
9786 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
9787
9788 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9789 {
9790 rtx temp = (fromprolog
9791 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9792 : gen_reg_rtx (Pmode));
9793 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
9794 if (fromprolog)
9795 rs6000_maybe_dead (insn);
9796 insn = emit_move_insn (dest, temp);
9797 if (fromprolog)
9798 rs6000_maybe_dead (insn);
9799 }
9800 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
9801 {
9802 char buf[30];
9803 rtx tempLR = (fromprolog
9804 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9805 : gen_reg_rtx (Pmode));
9806 rtx temp0 = (fromprolog
9807 ? gen_rtx_REG (Pmode, 0)
9808 : gen_reg_rtx (Pmode));
9809 rtx symF;
9810
9811 /* possibly create the toc section */
9812 if (! toc_initialized)
9813 {
9814 toc_section ();
9815 function_section (current_function_decl);
9816 }
9817
9818 if (fromprolog)
9819 {
9820 rtx symL;
9821
9822 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
9823 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9824
9825 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
9826 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9827
9828 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
9829 symF)));
9830 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
9831 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
9832 symL,
9833 symF)));
9834 }
9835 else
9836 {
9837 rtx tocsym;
9838 static int reload_toc_labelno = 0;
9839
9840 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
9841
9842 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
9843 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9844
9845 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
9846 emit_move_insn (dest, tempLR);
9847 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
9848 }
9849 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
9850 if (fromprolog)
9851 rs6000_maybe_dead (insn);
9852 }
9853 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
9854 {
9855 /* This is for AIX code running in non-PIC ELF32. */
9856 char buf[30];
9857 rtx realsym;
9858 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
9859 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9860
9861 insn = emit_insn (gen_elf_high (dest, realsym));
9862 if (fromprolog)
9863 rs6000_maybe_dead (insn);
9864 insn = emit_insn (gen_elf_low (dest, dest, realsym));
9865 if (fromprolog)
9866 rs6000_maybe_dead (insn);
9867 }
9868 else if (DEFAULT_ABI == ABI_AIX)
9869 {
9870 if (TARGET_32BIT)
9871 insn = emit_insn (gen_load_toc_aix_si (dest));
9872 else
9873 insn = emit_insn (gen_load_toc_aix_di (dest));
9874 if (fromprolog)
9875 rs6000_maybe_dead (insn);
9876 }
9877 else
9878 abort ();
9879 }
9880
9881 int
9882 get_TOC_alias_set ()
9883 {
9884 static int set = -1;
9885 if (set == -1)
9886 set = new_alias_set ();
9887 return set;
9888 }
9889
9890 /* This retuns nonzero if the current function uses the TOC. This is
9891 determined by the presence of (unspec ... 7), which is generated by
9892 the various load_toc_* patterns. */
9893
9894 int
9895 uses_TOC ()
9896 {
9897 rtx insn;
9898
9899 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9900 if (INSN_P (insn))
9901 {
9902 rtx pat = PATTERN (insn);
9903 int i;
9904
9905 if (GET_CODE (pat) == PARALLEL)
9906 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
9907 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
9908 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
9909 return 1;
9910 }
9911 return 0;
9912 }
9913
9914 rtx
9915 create_TOC_reference (symbol)
9916 rtx symbol;
9917 {
9918 return gen_rtx_PLUS (Pmode,
9919 gen_rtx_REG (Pmode, TOC_REGISTER),
9920 gen_rtx_CONST (Pmode,
9921 gen_rtx_MINUS (Pmode, symbol,
9922 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9923 }
9924
9925 #if TARGET_AIX
9926 /* __throw will restore its own return address to be the same as the
9927 return address of the function that the throw is being made to.
9928 This is unfortunate, because we want to check the original
9929 return address to see if we need to restore the TOC.
9930 So we have to squirrel it away here.
9931 This is used only in compiling __throw and __rethrow.
9932
9933 Most of this code should be removed by CSE. */
9934 static rtx insn_after_throw;
9935
9936 /* This does the saving... */
9937 void
9938 rs6000_aix_emit_builtin_unwind_init ()
9939 {
9940 rtx mem;
9941 rtx stack_top = gen_reg_rtx (Pmode);
9942 rtx opcode_addr = gen_reg_rtx (Pmode);
9943
9944 insn_after_throw = gen_reg_rtx (SImode);
9945
9946 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
9947 emit_move_insn (stack_top, mem);
9948
9949 mem = gen_rtx_MEM (Pmode,
9950 gen_rtx_PLUS (Pmode, stack_top,
9951 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9952 emit_move_insn (opcode_addr, mem);
9953 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
9954 }
9955
9956 /* Emit insns to _restore_ the TOC register, at runtime (specifically
9957 in _eh.o). Only used on AIX.
9958
9959 The idea is that on AIX, function calls look like this:
9960 bl somefunction-trampoline
9961 lwz r2,20(sp)
9962
9963 and later,
9964 somefunction-trampoline:
9965 stw r2,20(sp)
9966 ... load function address in the count register ...
9967 bctr
9968 or like this, if the linker determines that this is not a cross-module call
9969 and so the TOC need not be restored:
9970 bl somefunction
9971 nop
9972 or like this, if the compiler could determine that this is not a
9973 cross-module call:
9974 bl somefunction
9975 now, the tricky bit here is that register 2 is saved and restored
9976 by the _linker_, so we can't readily generate debugging information
9977 for it. So we need to go back up the call chain looking at the
9978 insns at return addresses to see which calls saved the TOC register
9979 and so see where it gets restored from.
9980
9981 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
9982 just before the actual epilogue.
9983
9984 On the bright side, this incurs no space or time overhead unless an
9985 exception is thrown, except for the extra code in libgcc.a.
9986
9987 The parameter STACKSIZE is a register containing (at runtime)
9988 the amount to be popped off the stack in addition to the stack frame
9989 of this routine (which will be __throw or __rethrow, and so is
9990 guaranteed to have a stack frame). */
9991
9992 void
9993 rs6000_emit_eh_toc_restore (stacksize)
9994 rtx stacksize;
9995 {
9996 rtx top_of_stack;
9997 rtx bottom_of_stack = gen_reg_rtx (Pmode);
9998 rtx tocompare = gen_reg_rtx (SImode);
9999 rtx opcode = gen_reg_rtx (SImode);
10000 rtx opcode_addr = gen_reg_rtx (Pmode);
10001 rtx mem;
10002 rtx loop_start = gen_label_rtx ();
10003 rtx no_toc_restore_needed = gen_label_rtx ();
10004 rtx loop_exit = gen_label_rtx ();
10005
10006 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10007 set_mem_alias_set (mem, rs6000_sr_alias_set);
10008 emit_move_insn (bottom_of_stack, mem);
10009
10010 top_of_stack = expand_binop (Pmode, add_optab,
10011 bottom_of_stack, stacksize,
10012 NULL_RTX, 1, OPTAB_WIDEN);
10013
10014 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
10015 : 0xE8410028, SImode));
10016
10017 if (insn_after_throw == NULL_RTX)
10018 abort ();
10019 emit_move_insn (opcode, insn_after_throw);
10020
10021 emit_note (NULL, NOTE_INSN_LOOP_BEG);
10022 emit_label (loop_start);
10023
10024 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
10025 SImode, NULL_RTX, NULL_RTX,
10026 no_toc_restore_needed);
10027
10028 mem = gen_rtx_MEM (Pmode,
10029 gen_rtx_PLUS (Pmode, bottom_of_stack,
10030 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
10031 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
10032
10033 emit_label (no_toc_restore_needed);
10034 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
10035 Pmode, NULL_RTX, NULL_RTX,
10036 loop_exit);
10037
10038 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
10039 set_mem_alias_set (mem, rs6000_sr_alias_set);
10040 emit_move_insn (bottom_of_stack, mem);
10041
10042 mem = gen_rtx_MEM (Pmode,
10043 gen_rtx_PLUS (Pmode, bottom_of_stack,
10044 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10045 emit_move_insn (opcode_addr, mem);
10046 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
10047
10048 emit_note (NULL, NOTE_INSN_LOOP_CONT);
10049 emit_jump (loop_start);
10050 emit_note (NULL, NOTE_INSN_LOOP_END);
10051 emit_label (loop_exit);
10052 }
10053 #endif /* TARGET_AIX */
10054 \f
10055 /* This ties together stack memory (MEM with an alias set of
10056 rs6000_sr_alias_set) and the change to the stack pointer. */
10057
10058 static void
10059 rs6000_emit_stack_tie ()
10060 {
10061 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
10062
10063 set_mem_alias_set (mem, rs6000_sr_alias_set);
10064 emit_insn (gen_stack_tie (mem));
10065 }
10066
10067 /* Emit the correct code for allocating stack space, as insns.
10068 If COPY_R12, make sure a copy of the old frame is left in r12.
10069 The generated code may use hard register 0 as a temporary. */
10070
10071 static void
10072 rs6000_emit_allocate_stack (size, copy_r12)
10073 HOST_WIDE_INT size;
10074 int copy_r12;
10075 {
10076 rtx insn;
10077 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10078 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
10079 rtx todec = GEN_INT (-size);
10080
10081 if (current_function_limit_stack)
10082 {
10083 if (REG_P (stack_limit_rtx)
10084 && REGNO (stack_limit_rtx) > 1
10085 && REGNO (stack_limit_rtx) <= 31)
10086 {
10087 emit_insn (TARGET_32BIT
10088 ? gen_addsi3 (tmp_reg,
10089 stack_limit_rtx,
10090 GEN_INT (size))
10091 : gen_adddi3 (tmp_reg,
10092 stack_limit_rtx,
10093 GEN_INT (size)));
10094
10095 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10096 const0_rtx));
10097 }
10098 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
10099 && TARGET_32BIT
10100 && DEFAULT_ABI == ABI_V4)
10101 {
10102 rtx toload = gen_rtx_CONST (VOIDmode,
10103 gen_rtx_PLUS (Pmode,
10104 stack_limit_rtx,
10105 GEN_INT (size)));
10106
10107 emit_insn (gen_elf_high (tmp_reg, toload));
10108 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
10109 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10110 const0_rtx));
10111 }
10112 else
10113 warning ("stack limit expression is not supported");
10114 }
10115
10116 if (copy_r12 || ! TARGET_UPDATE)
10117 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
10118
10119 if (TARGET_UPDATE)
10120 {
10121 if (size > 32767)
10122 {
10123 /* Need a note here so that try_split doesn't get confused. */
10124 if (get_last_insn() == NULL_RTX)
10125 emit_note (0, NOTE_INSN_DELETED);
10126 insn = emit_move_insn (tmp_reg, todec);
10127 try_split (PATTERN (insn), insn, 0);
10128 todec = tmp_reg;
10129 }
10130
10131 insn = emit_insn (TARGET_32BIT
10132 ? gen_movsi_update (stack_reg, stack_reg,
10133 todec, stack_reg)
10134 : gen_movdi_update (stack_reg, stack_reg,
10135 todec, stack_reg));
10136 }
10137 else
10138 {
10139 insn = emit_insn (TARGET_32BIT
10140 ? gen_addsi3 (stack_reg, stack_reg, todec)
10141 : gen_adddi3 (stack_reg, stack_reg, todec));
10142 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
10143 gen_rtx_REG (Pmode, 12));
10144 }
10145
10146 RTX_FRAME_RELATED_P (insn) = 1;
10147 REG_NOTES (insn) =
10148 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10149 gen_rtx_SET (VOIDmode, stack_reg,
10150 gen_rtx_PLUS (Pmode, stack_reg,
10151 GEN_INT (-size))),
10152 REG_NOTES (insn));
10153 }
10154
10155 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10156 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10157 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10158 deduce these equivalences by itself so it wasn't necessary to hold
10159 its hand so much. */
10160
10161 static void
10162 rs6000_frame_related (insn, reg, val, reg2, rreg)
10163 rtx insn;
10164 rtx reg;
10165 HOST_WIDE_INT val;
10166 rtx reg2;
10167 rtx rreg;
10168 {
10169 rtx real, temp;
10170
10171 /* copy_rtx will not make unique copies of registers, so we need to
10172 ensure we don't have unwanted sharing here. */
10173 if (reg == reg2)
10174 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10175
10176 if (reg == rreg)
10177 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10178
10179 real = copy_rtx (PATTERN (insn));
10180
10181 if (reg2 != NULL_RTX)
10182 real = replace_rtx (real, reg2, rreg);
10183
10184 real = replace_rtx (real, reg,
10185 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
10186 STACK_POINTER_REGNUM),
10187 GEN_INT (val)));
10188
10189 /* We expect that 'real' is either a SET or a PARALLEL containing
10190 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10191 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10192
10193 if (GET_CODE (real) == SET)
10194 {
10195 rtx set = real;
10196
10197 temp = simplify_rtx (SET_SRC (set));
10198 if (temp)
10199 SET_SRC (set) = temp;
10200 temp = simplify_rtx (SET_DEST (set));
10201 if (temp)
10202 SET_DEST (set) = temp;
10203 if (GET_CODE (SET_DEST (set)) == MEM)
10204 {
10205 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10206 if (temp)
10207 XEXP (SET_DEST (set), 0) = temp;
10208 }
10209 }
10210 else if (GET_CODE (real) == PARALLEL)
10211 {
10212 int i;
10213 for (i = 0; i < XVECLEN (real, 0); i++)
10214 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
10215 {
10216 rtx set = XVECEXP (real, 0, i);
10217
10218 temp = simplify_rtx (SET_SRC (set));
10219 if (temp)
10220 SET_SRC (set) = temp;
10221 temp = simplify_rtx (SET_DEST (set));
10222 if (temp)
10223 SET_DEST (set) = temp;
10224 if (GET_CODE (SET_DEST (set)) == MEM)
10225 {
10226 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10227 if (temp)
10228 XEXP (SET_DEST (set), 0) = temp;
10229 }
10230 RTX_FRAME_RELATED_P (set) = 1;
10231 }
10232 }
10233 else
10234 abort ();
10235
10236 if (TARGET_SPE)
10237 real = spe_synthesize_frame_save (real);
10238
10239 RTX_FRAME_RELATED_P (insn) = 1;
10240 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10241 real,
10242 REG_NOTES (insn));
10243 }
10244
10245 /* Given an SPE frame note, return a PARALLEL of SETs with the
10246 original note, plus a synthetic register save. */
10247
10248 static rtx
10249 spe_synthesize_frame_save (real)
10250 rtx real;
10251 {
10252 rtx synth, offset, reg, real2;
10253
10254 if (GET_CODE (real) != SET
10255 || GET_MODE (SET_SRC (real)) != V2SImode)
10256 return real;
10257
10258 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
10259 frame related note. The parallel contains a set of the register
10260 being saved, and another set to a synthetic register (n+1200).
10261 This is so we can differentiate between 64-bit and 32-bit saves.
10262 Words cannot describe this nastiness. */
10263
10264 if (GET_CODE (SET_DEST (real)) != MEM
10265 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
10266 || GET_CODE (SET_SRC (real)) != REG)
10267 abort ();
10268
10269 /* Transform:
10270 (set (mem (plus (reg x) (const y)))
10271 (reg z))
10272 into:
10273 (set (mem (plus (reg x) (const y+4)))
10274 (reg z+1200))
10275 */
10276
10277 real2 = copy_rtx (real);
10278 PUT_MODE (SET_DEST (real2), SImode);
10279 reg = SET_SRC (real2);
10280 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
10281 synth = copy_rtx (real2);
10282
10283 if (BYTES_BIG_ENDIAN)
10284 {
10285 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
10286 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
10287 }
10288
10289 reg = SET_SRC (synth);
10290
10291 synth = replace_rtx (synth, reg,
10292 gen_rtx_REG (SImode, REGNO (reg) + 1200));
10293
10294 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
10295 synth = replace_rtx (synth, offset,
10296 GEN_INT (INTVAL (offset)
10297 + (BYTES_BIG_ENDIAN ? 0 : 4)));
10298
10299 RTX_FRAME_RELATED_P (synth) = 1;
10300 RTX_FRAME_RELATED_P (real2) = 1;
10301 if (BYTES_BIG_ENDIAN)
10302 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
10303 else
10304 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
10305
10306 return real;
10307 }
10308
10309 /* Returns an insn that has a vrsave set operation with the
10310 appropriate CLOBBERs. */
10311
10312 static rtx
10313 generate_set_vrsave (reg, info, epiloguep)
10314 rtx reg;
10315 rs6000_stack_t *info;
10316 int epiloguep;
10317 {
10318 int nclobs, i;
10319 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
10320 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10321
10322 clobs[0]
10323 = gen_rtx_SET (VOIDmode,
10324 vrsave,
10325 gen_rtx_UNSPEC_VOLATILE (SImode,
10326 gen_rtvec (2, reg, vrsave),
10327 30));
10328
10329 nclobs = 1;
10330
10331 /* We need to clobber the registers in the mask so the scheduler
10332 does not move sets to VRSAVE before sets of AltiVec registers.
10333
10334 However, if the function receives nonlocal gotos, reload will set
10335 all call saved registers live. We will end up with:
10336
10337 (set (reg 999) (mem))
10338 (parallel [ (set (reg vrsave) (unspec blah))
10339 (clobber (reg 999))])
10340
10341 The clobber will cause the store into reg 999 to be dead, and
10342 flow will attempt to delete an epilogue insn. In this case, we
10343 need an unspec use/set of the register. */
10344
10345 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10346 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
10347 {
10348 if (!epiloguep || call_used_regs [i])
10349 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
10350 gen_rtx_REG (V4SImode, i));
10351 else
10352 {
10353 rtx reg = gen_rtx_REG (V4SImode, i);
10354
10355 clobs[nclobs++]
10356 = gen_rtx_SET (VOIDmode,
10357 reg,
10358 gen_rtx_UNSPEC (V4SImode,
10359 gen_rtvec (1, reg), 27));
10360 }
10361 }
10362
10363 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
10364
10365 for (i = 0; i < nclobs; ++i)
10366 XVECEXP (insn, 0, i) = clobs[i];
10367
10368 return insn;
10369 }
10370
10371 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10372 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
10373
10374 static void
10375 emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
10376 rtx frame_reg;
10377 rtx frame_ptr;
10378 enum machine_mode mode;
10379 unsigned int regno;
10380 int offset;
10381 int total_size;
10382 {
10383 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
10384 rtx replacea, replaceb;
10385
10386 int_rtx = GEN_INT (offset);
10387
10388 /* Some cases that need register indexed addressing. */
10389 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
10390 || (TARGET_SPE_ABI
10391 && SPE_VECTOR_MODE (mode)
10392 && !SPE_CONST_OFFSET_OK (offset)))
10393 {
10394 /* Whomever calls us must make sure r11 is available in the
10395 flow path of instructions in the prologue. */
10396 offset_rtx = gen_rtx_REG (Pmode, 11);
10397 emit_move_insn (offset_rtx, int_rtx);
10398
10399 replacea = offset_rtx;
10400 replaceb = int_rtx;
10401 }
10402 else
10403 {
10404 offset_rtx = int_rtx;
10405 replacea = NULL_RTX;
10406 replaceb = NULL_RTX;
10407 }
10408
10409 reg = gen_rtx_REG (mode, regno);
10410 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
10411 mem = gen_rtx_MEM (mode, addr);
10412 set_mem_alias_set (mem, rs6000_sr_alias_set);
10413
10414 insn = emit_move_insn (mem, reg);
10415
10416 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
10417 }
10418
10419 /* Emit an offset memory reference suitable for a frame store, while
10420 converting to a valid addressing mode. */
10421
10422 static rtx
10423 gen_frame_mem_offset (mode, reg, offset)
10424 enum machine_mode mode;
10425 rtx reg;
10426 int offset;
10427 {
10428 rtx int_rtx, offset_rtx;
10429
10430 int_rtx = GEN_INT (offset);
10431
10432 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
10433 {
10434 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10435 emit_move_insn (offset_rtx, int_rtx);
10436 }
10437 else
10438 offset_rtx = int_rtx;
10439
10440 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
10441 }
10442
10443 /* Emit function prologue as insns. */
10444
10445 void
10446 rs6000_emit_prologue ()
10447 {
10448 rs6000_stack_t *info = rs6000_stack_info ();
10449 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10450 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10451 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10452 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
10453 rtx frame_reg_rtx = sp_reg_rtx;
10454 rtx cr_save_rtx = NULL;
10455 rtx insn;
10456 int saving_FPRs_inline;
10457 int using_store_multiple;
10458 HOST_WIDE_INT sp_offset = 0;
10459
10460 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
10461 {
10462 reg_mode = V2SImode;
10463 reg_size = 8;
10464 }
10465
10466 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10467 && (!TARGET_SPE_ABI
10468 || info->spe_64bit_regs_used == 0)
10469 && info->first_gp_reg_save < 31);
10470 saving_FPRs_inline = (info->first_fp_reg_save == 64
10471 || FP_SAVE_INLINE (info->first_fp_reg_save));
10472
10473 /* For V.4, update stack before we do any saving and set back pointer. */
10474 if (info->push_p && DEFAULT_ABI == ABI_V4)
10475 {
10476 if (info->total_size < 32767)
10477 sp_offset = info->total_size;
10478 else
10479 frame_reg_rtx = frame_ptr_rtx;
10480 rs6000_emit_allocate_stack (info->total_size,
10481 (frame_reg_rtx != sp_reg_rtx
10482 && (info->cr_save_p
10483 || info->lr_save_p
10484 || info->first_fp_reg_save < 64
10485 || info->first_gp_reg_save < 32
10486 )));
10487 if (frame_reg_rtx != sp_reg_rtx)
10488 rs6000_emit_stack_tie ();
10489 }
10490
10491 /* Save AltiVec registers if needed. */
10492 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10493 {
10494 int i;
10495
10496 /* There should be a non inline version of this, for when we
10497 are saving lots of vector registers. */
10498 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10499 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10500 {
10501 rtx areg, savereg, mem;
10502 int offset;
10503
10504 offset = info->altivec_save_offset + sp_offset
10505 + 16 * (i - info->first_altivec_reg_save);
10506
10507 savereg = gen_rtx_REG (V4SImode, i);
10508
10509 areg = gen_rtx_REG (Pmode, 0);
10510 emit_move_insn (areg, GEN_INT (offset));
10511
10512 /* AltiVec addressing mode is [reg+reg]. */
10513 mem = gen_rtx_MEM (V4SImode,
10514 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
10515
10516 set_mem_alias_set (mem, rs6000_sr_alias_set);
10517
10518 insn = emit_move_insn (mem, savereg);
10519
10520 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10521 areg, GEN_INT (offset));
10522 }
10523 }
10524
10525 /* VRSAVE is a bit vector representing which AltiVec registers
10526 are used. The OS uses this to determine which vector
10527 registers to save on a context switch. We need to save
10528 VRSAVE on the stack frame, add whatever AltiVec registers we
10529 used in this function, and do the corresponding magic in the
10530 epilogue. */
10531
10532 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
10533 {
10534 rtx reg, mem, vrsave;
10535 int offset;
10536
10537 /* Get VRSAVE onto a GPR. */
10538 reg = gen_rtx_REG (SImode, 12);
10539 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10540 if (TARGET_MACHO)
10541 emit_insn (gen_get_vrsave_internal (reg));
10542 else
10543 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
10544
10545 /* Save VRSAVE. */
10546 offset = info->vrsave_save_offset + sp_offset;
10547 mem
10548 = gen_rtx_MEM (SImode,
10549 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
10550 set_mem_alias_set (mem, rs6000_sr_alias_set);
10551 insn = emit_move_insn (mem, reg);
10552
10553 /* Include the registers in the mask. */
10554 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
10555
10556 insn = emit_insn (generate_set_vrsave (reg, info, 0));
10557 }
10558
10559 /* If we use the link register, get it into r0. */
10560 if (info->lr_save_p)
10561 emit_move_insn (gen_rtx_REG (Pmode, 0),
10562 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10563
10564 /* If we need to save CR, put it into r12. */
10565 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
10566 {
10567 cr_save_rtx = gen_rtx_REG (SImode, 12);
10568 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10569 }
10570
10571 /* Do any required saving of fpr's. If only one or two to save, do
10572 it ourselves. Otherwise, call function. */
10573 if (saving_FPRs_inline)
10574 {
10575 int i;
10576 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10577 if ((regs_ever_live[info->first_fp_reg_save+i]
10578 && ! call_used_regs[info->first_fp_reg_save+i]))
10579 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
10580 info->first_fp_reg_save + i,
10581 info->fp_save_offset + sp_offset + 8 * i,
10582 info->total_size);
10583 }
10584 else if (info->first_fp_reg_save != 64)
10585 {
10586 int i;
10587 char rname[30];
10588 const char *alloc_rname;
10589 rtvec p;
10590 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
10591
10592 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
10593 gen_rtx_REG (Pmode,
10594 LINK_REGISTER_REGNUM));
10595 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
10596 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
10597 alloc_rname = ggc_strdup (rname);
10598 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
10599 gen_rtx_SYMBOL_REF (Pmode,
10600 alloc_rname));
10601 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10602 {
10603 rtx addr, reg, mem;
10604 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
10605 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10606 GEN_INT (info->fp_save_offset
10607 + sp_offset + 8*i));
10608 mem = gen_rtx_MEM (DFmode, addr);
10609 set_mem_alias_set (mem, rs6000_sr_alias_set);
10610
10611 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
10612 }
10613 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10614 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10615 NULL_RTX, NULL_RTX);
10616 }
10617
10618 /* Save GPRs. This is done as a PARALLEL if we are using
10619 the store-multiple instructions. */
10620 if (using_store_multiple)
10621 {
10622 rtvec p;
10623 int i;
10624 p = rtvec_alloc (32 - info->first_gp_reg_save);
10625 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10626 {
10627 rtx addr, reg, mem;
10628 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10629 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10630 GEN_INT (info->gp_save_offset
10631 + sp_offset
10632 + reg_size * i));
10633 mem = gen_rtx_MEM (reg_mode, addr);
10634 set_mem_alias_set (mem, rs6000_sr_alias_set);
10635
10636 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
10637 }
10638 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10639 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10640 NULL_RTX, NULL_RTX);
10641 }
10642 else
10643 {
10644 int i;
10645 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10646 if ((regs_ever_live[info->first_gp_reg_save+i]
10647 && ! call_used_regs[info->first_gp_reg_save+i])
10648 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10649 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10650 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10651 {
10652 rtx addr, reg, mem;
10653 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10654
10655 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
10656 {
10657 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10658 rtx b;
10659
10660 if (!SPE_CONST_OFFSET_OK (offset))
10661 {
10662 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10663 emit_move_insn (b, GEN_INT (offset));
10664 }
10665 else
10666 b = GEN_INT (offset);
10667
10668 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10669 mem = gen_rtx_MEM (V2SImode, addr);
10670 set_mem_alias_set (mem, rs6000_sr_alias_set);
10671 insn = emit_move_insn (mem, reg);
10672
10673 if (GET_CODE (b) == CONST_INT)
10674 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10675 NULL_RTX, NULL_RTX);
10676 else
10677 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10678 b, GEN_INT (offset));
10679 }
10680 else
10681 {
10682 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10683 GEN_INT (info->gp_save_offset
10684 + sp_offset
10685 + reg_size * i));
10686 mem = gen_rtx_MEM (reg_mode, addr);
10687 set_mem_alias_set (mem, rs6000_sr_alias_set);
10688
10689 insn = emit_move_insn (mem, reg);
10690 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10691 NULL_RTX, NULL_RTX);
10692 }
10693 }
10694 }
10695
10696 /* ??? There's no need to emit actual instructions here, but it's the
10697 easiest way to get the frame unwind information emitted. */
10698 if (current_function_calls_eh_return)
10699 {
10700 unsigned int i, regno;
10701
10702 for (i = 0; ; ++i)
10703 {
10704 regno = EH_RETURN_DATA_REGNO (i);
10705 if (regno == INVALID_REGNUM)
10706 break;
10707
10708 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
10709 info->ehrd_offset + sp_offset
10710 + reg_size * (int) i,
10711 info->total_size);
10712 }
10713 }
10714
10715 /* Save lr if we used it. */
10716 if (info->lr_save_p)
10717 {
10718 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10719 GEN_INT (info->lr_save_offset + sp_offset));
10720 rtx reg = gen_rtx_REG (Pmode, 0);
10721 rtx mem = gen_rtx_MEM (Pmode, addr);
10722 /* This should not be of rs6000_sr_alias_set, because of
10723 __builtin_return_address. */
10724
10725 insn = emit_move_insn (mem, reg);
10726 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10727 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10728 }
10729
10730 /* Save CR if we use any that must be preserved. */
10731 if (info->cr_save_p)
10732 {
10733 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10734 GEN_INT (info->cr_save_offset + sp_offset));
10735 rtx mem = gen_rtx_MEM (SImode, addr);
10736
10737 set_mem_alias_set (mem, rs6000_sr_alias_set);
10738
10739 /* If r12 was used to hold the original sp, copy cr into r0 now
10740 that it's free. */
10741 if (REGNO (frame_reg_rtx) == 12)
10742 {
10743 cr_save_rtx = gen_rtx_REG (SImode, 0);
10744 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10745 }
10746 insn = emit_move_insn (mem, cr_save_rtx);
10747
10748 /* Now, there's no way that dwarf2out_frame_debug_expr is going
10749 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
10750 OK. All we have to do is specify that _one_ condition code
10751 register is saved in this stack slot. The thrower's epilogue
10752 will then restore all the call-saved registers.
10753 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
10754 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10755 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
10756 }
10757
10758 /* Update stack and set back pointer unless this is V.4,
10759 for which it was done previously. */
10760 if (info->push_p && DEFAULT_ABI != ABI_V4)
10761 rs6000_emit_allocate_stack (info->total_size, FALSE);
10762
10763 /* Set frame pointer, if needed. */
10764 if (frame_pointer_needed)
10765 {
10766 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
10767 sp_reg_rtx);
10768 RTX_FRAME_RELATED_P (insn) = 1;
10769 }
10770
10771 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
10772 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
10773 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
10774 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
10775 {
10776 /* If emit_load_toc_table will use the link register, we need to save
10777 it. We use R11 for this purpose because emit_load_toc_table
10778 can use register 0. This allows us to use a plain 'blr' to return
10779 from the procedure more often. */
10780 int save_LR_around_toc_setup = (TARGET_ELF
10781 && DEFAULT_ABI != ABI_AIX
10782 && flag_pic
10783 && ! info->lr_save_p
10784 && EXIT_BLOCK_PTR->pred != NULL);
10785 if (save_LR_around_toc_setup)
10786 emit_move_insn (gen_rtx_REG (Pmode, 11),
10787 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10788
10789 rs6000_emit_load_toc_table (TRUE);
10790
10791 if (save_LR_around_toc_setup)
10792 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10793 gen_rtx_REG (Pmode, 11));
10794 }
10795
10796 #if TARGET_MACHO
10797 if (DEFAULT_ABI == ABI_DARWIN
10798 && flag_pic && current_function_uses_pic_offset_table)
10799 {
10800 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10801 const char *picbase = machopic_function_base_name ();
10802 rtx src = gen_rtx_SYMBOL_REF (Pmode, ggc_alloc_string (picbase, -1));
10803
10804 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
10805
10806 rs6000_maybe_dead (
10807 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
10808 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
10809 }
10810 #endif
10811 }
10812
10813 /* Write function prologue. */
10814
10815 static void
10816 rs6000_output_function_prologue (file, size)
10817 FILE *file;
10818 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10819 {
10820 rs6000_stack_t *info = rs6000_stack_info ();
10821
10822 if (TARGET_DEBUG_STACK)
10823 debug_stack_info (info);
10824
10825 /* Write .extern for any function we will call to save and restore
10826 fp values. */
10827 if (info->first_fp_reg_save < 64
10828 && !FP_SAVE_INLINE (info->first_fp_reg_save))
10829 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
10830 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
10831 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
10832 RESTORE_FP_SUFFIX);
10833
10834 /* Write .extern for AIX common mode routines, if needed. */
10835 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
10836 {
10837 fputs ("\t.extern __mulh\n", file);
10838 fputs ("\t.extern __mull\n", file);
10839 fputs ("\t.extern __divss\n", file);
10840 fputs ("\t.extern __divus\n", file);
10841 fputs ("\t.extern __quoss\n", file);
10842 fputs ("\t.extern __quous\n", file);
10843 common_mode_defined = 1;
10844 }
10845
10846 if (! HAVE_prologue)
10847 {
10848 start_sequence ();
10849
10850 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
10851 the "toplevel" insn chain. */
10852 emit_note (0, NOTE_INSN_DELETED);
10853 rs6000_emit_prologue ();
10854 emit_note (0, NOTE_INSN_DELETED);
10855
10856 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10857 {
10858 rtx insn;
10859 unsigned addr = 0;
10860 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
10861 {
10862 INSN_ADDRESSES_NEW (insn, addr);
10863 addr += 4;
10864 }
10865 }
10866
10867 if (TARGET_DEBUG_STACK)
10868 debug_rtx_list (get_insns (), 100);
10869 final (get_insns (), file, FALSE, FALSE);
10870 end_sequence ();
10871 }
10872
10873 rs6000_pic_labelno++;
10874 }
10875
10876 /* Emit function epilogue as insns.
10877
10878 At present, dwarf2out_frame_debug_expr doesn't understand
10879 register restores, so we don't bother setting RTX_FRAME_RELATED_P
10880 anywhere in the epilogue. Most of the insns below would in any case
10881 need special notes to explain where r11 is in relation to the stack. */
10882
10883 void
10884 rs6000_emit_epilogue (sibcall)
10885 int sibcall;
10886 {
10887 rs6000_stack_t *info;
10888 int restoring_FPRs_inline;
10889 int using_load_multiple;
10890 int using_mfcr_multiple;
10891 int use_backchain_to_restore_sp;
10892 int sp_offset = 0;
10893 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
10894 rtx frame_reg_rtx = sp_reg_rtx;
10895 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10896 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10897 int i;
10898
10899 info = rs6000_stack_info ();
10900
10901 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
10902 {
10903 reg_mode = V2SImode;
10904 reg_size = 8;
10905 }
10906
10907 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10908 && (!TARGET_SPE_ABI
10909 || info->spe_64bit_regs_used == 0)
10910 && info->first_gp_reg_save < 31);
10911 restoring_FPRs_inline = (sibcall
10912 || current_function_calls_eh_return
10913 || info->first_fp_reg_save == 64
10914 || FP_SAVE_INLINE (info->first_fp_reg_save));
10915 use_backchain_to_restore_sp = (frame_pointer_needed
10916 || current_function_calls_alloca
10917 || info->total_size > 32767);
10918 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
10919 || rs6000_cpu == PROCESSOR_PPC603
10920 || rs6000_cpu == PROCESSOR_PPC750
10921 || optimize_size);
10922
10923 /* If we have a frame pointer, a call to alloca, or a large stack
10924 frame, restore the old stack pointer using the backchain. Otherwise,
10925 we know what size to update it with. */
10926 if (use_backchain_to_restore_sp)
10927 {
10928 /* Under V.4, don't reset the stack pointer until after we're done
10929 loading the saved registers. */
10930 if (DEFAULT_ABI == ABI_V4)
10931 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
10932
10933 emit_move_insn (frame_reg_rtx,
10934 gen_rtx_MEM (Pmode, sp_reg_rtx));
10935
10936 }
10937 else if (info->push_p)
10938 {
10939 if (DEFAULT_ABI == ABI_V4)
10940 sp_offset = info->total_size;
10941 else
10942 {
10943 emit_insn (TARGET_32BIT
10944 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
10945 GEN_INT (info->total_size))
10946 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
10947 GEN_INT (info->total_size)));
10948 }
10949 }
10950
10951 /* Restore AltiVec registers if needed. */
10952 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10953 {
10954 int i;
10955
10956 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10957 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10958 {
10959 rtx addr, areg, mem;
10960
10961 areg = gen_rtx_REG (Pmode, 0);
10962 emit_move_insn
10963 (areg, GEN_INT (info->altivec_save_offset
10964 + sp_offset
10965 + 16 * (i - info->first_altivec_reg_save)));
10966
10967 /* AltiVec addressing mode is [reg+reg]. */
10968 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
10969 mem = gen_rtx_MEM (V4SImode, addr);
10970 set_mem_alias_set (mem, rs6000_sr_alias_set);
10971
10972 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
10973 }
10974 }
10975
10976 /* Restore VRSAVE if needed. */
10977 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
10978 {
10979 rtx addr, mem, reg;
10980
10981 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10982 GEN_INT (info->vrsave_save_offset + sp_offset));
10983 mem = gen_rtx_MEM (SImode, addr);
10984 set_mem_alias_set (mem, rs6000_sr_alias_set);
10985 reg = gen_rtx_REG (SImode, 12);
10986 emit_move_insn (reg, mem);
10987
10988 emit_insn (generate_set_vrsave (reg, info, 1));
10989 }
10990
10991 /* Get the old lr if we saved it. */
10992 if (info->lr_save_p)
10993 {
10994 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
10995 info->lr_save_offset + sp_offset);
10996
10997 set_mem_alias_set (mem, rs6000_sr_alias_set);
10998
10999 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
11000 }
11001
11002 /* Get the old cr if we saved it. */
11003 if (info->cr_save_p)
11004 {
11005 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11006 GEN_INT (info->cr_save_offset + sp_offset));
11007 rtx mem = gen_rtx_MEM (SImode, addr);
11008
11009 set_mem_alias_set (mem, rs6000_sr_alias_set);
11010
11011 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
11012 }
11013
11014 /* Set LR here to try to overlap restores below. */
11015 if (info->lr_save_p)
11016 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
11017 gen_rtx_REG (Pmode, 0));
11018
11019 /* Load exception handler data registers, if needed. */
11020 if (current_function_calls_eh_return)
11021 {
11022 unsigned int i, regno;
11023
11024 for (i = 0; ; ++i)
11025 {
11026 rtx mem;
11027
11028 regno = EH_RETURN_DATA_REGNO (i);
11029 if (regno == INVALID_REGNUM)
11030 break;
11031
11032 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
11033 info->ehrd_offset + sp_offset
11034 + reg_size * (int) i);
11035 set_mem_alias_set (mem, rs6000_sr_alias_set);
11036
11037 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
11038 }
11039 }
11040
11041 /* Restore GPRs. This is done as a PARALLEL if we are using
11042 the load-multiple instructions. */
11043 if (using_load_multiple)
11044 {
11045 rtvec p;
11046 p = rtvec_alloc (32 - info->first_gp_reg_save);
11047 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11048 {
11049 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11050 GEN_INT (info->gp_save_offset
11051 + sp_offset
11052 + reg_size * i));
11053 rtx mem = gen_rtx_MEM (reg_mode, addr);
11054
11055 set_mem_alias_set (mem, rs6000_sr_alias_set);
11056
11057 RTVEC_ELT (p, i) =
11058 gen_rtx_SET (VOIDmode,
11059 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
11060 mem);
11061 }
11062 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11063 }
11064 else
11065 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11066 if ((regs_ever_live[info->first_gp_reg_save+i]
11067 && ! call_used_regs[info->first_gp_reg_save+i])
11068 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11069 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11070 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11071 {
11072 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11073 GEN_INT (info->gp_save_offset
11074 + sp_offset
11075 + reg_size * i));
11076 rtx mem = gen_rtx_MEM (reg_mode, addr);
11077
11078 /* Restore 64-bit quantities for SPE. */
11079 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11080 {
11081 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11082 rtx b;
11083
11084 if (!SPE_CONST_OFFSET_OK (offset))
11085 {
11086 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11087 emit_move_insn (b, GEN_INT (offset));
11088 }
11089 else
11090 b = GEN_INT (offset);
11091
11092 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11093 mem = gen_rtx_MEM (V2SImode, addr);
11094 }
11095
11096 set_mem_alias_set (mem, rs6000_sr_alias_set);
11097
11098 emit_move_insn (gen_rtx_REG (reg_mode,
11099 info->first_gp_reg_save + i), mem);
11100 }
11101
11102 /* Restore fpr's if we need to do it without calling a function. */
11103 if (restoring_FPRs_inline)
11104 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11105 if ((regs_ever_live[info->first_fp_reg_save+i]
11106 && ! call_used_regs[info->first_fp_reg_save+i]))
11107 {
11108 rtx addr, mem;
11109 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11110 GEN_INT (info->fp_save_offset
11111 + sp_offset
11112 + 8 * i));
11113 mem = gen_rtx_MEM (DFmode, addr);
11114 set_mem_alias_set (mem, rs6000_sr_alias_set);
11115
11116 emit_move_insn (gen_rtx_REG (DFmode,
11117 info->first_fp_reg_save + i),
11118 mem);
11119 }
11120
11121 /* If we saved cr, restore it here. Just those that were used. */
11122 if (info->cr_save_p)
11123 {
11124 rtx r12_rtx = gen_rtx_REG (SImode, 12);
11125 int count = 0;
11126
11127 if (using_mfcr_multiple)
11128 {
11129 for (i = 0; i < 8; i++)
11130 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11131 count++;
11132 if (count == 0)
11133 abort ();
11134 }
11135
11136 if (using_mfcr_multiple && count > 1)
11137 {
11138 rtvec p;
11139 int ndx;
11140
11141 p = rtvec_alloc (count);
11142
11143 ndx = 0;
11144 for (i = 0; i < 8; i++)
11145 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11146 {
11147 rtvec r = rtvec_alloc (2);
11148 RTVEC_ELT (r, 0) = r12_rtx;
11149 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
11150 RTVEC_ELT (p, ndx) =
11151 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
11152 gen_rtx_UNSPEC (CCmode, r, 20));
11153 ndx++;
11154 }
11155 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11156 if (ndx != count)
11157 abort ();
11158 }
11159 else
11160 for (i = 0; i < 8; i++)
11161 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11162 {
11163 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
11164 CR0_REGNO+i),
11165 r12_rtx));
11166 }
11167 }
11168
11169 /* If this is V.4, unwind the stack pointer after all of the loads
11170 have been done. We need to emit a block here so that sched
11171 doesn't decide to move the sp change before the register restores
11172 (which may not have any obvious dependency on the stack). This
11173 doesn't hurt performance, because there is no scheduling that can
11174 be done after this point. */
11175 if (DEFAULT_ABI == ABI_V4)
11176 {
11177 if (frame_reg_rtx != sp_reg_rtx)
11178 rs6000_emit_stack_tie ();
11179
11180 if (use_backchain_to_restore_sp)
11181 {
11182 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
11183 }
11184 else if (sp_offset != 0)
11185 {
11186 emit_insn (TARGET_32BIT
11187 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11188 GEN_INT (sp_offset))
11189 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11190 GEN_INT (sp_offset)));
11191 }
11192 }
11193
11194 if (current_function_calls_eh_return)
11195 {
11196 rtx sa = EH_RETURN_STACKADJ_RTX;
11197 emit_insn (TARGET_32BIT
11198 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
11199 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
11200 }
11201
11202 if (!sibcall)
11203 {
11204 rtvec p;
11205 if (! restoring_FPRs_inline)
11206 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
11207 else
11208 p = rtvec_alloc (2);
11209
11210 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
11211 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11212 gen_rtx_REG (Pmode,
11213 LINK_REGISTER_REGNUM));
11214
11215 /* If we have to restore more than two FP registers, branch to the
11216 restore function. It will return to our caller. */
11217 if (! restoring_FPRs_inline)
11218 {
11219 int i;
11220 char rname[30];
11221 const char *alloc_rname;
11222
11223 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
11224 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
11225 alloc_rname = ggc_strdup (rname);
11226 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
11227 gen_rtx_SYMBOL_REF (Pmode,
11228 alloc_rname));
11229
11230 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11231 {
11232 rtx addr, mem;
11233 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
11234 GEN_INT (info->fp_save_offset + 8*i));
11235 mem = gen_rtx_MEM (DFmode, addr);
11236 set_mem_alias_set (mem, rs6000_sr_alias_set);
11237
11238 RTVEC_ELT (p, i+3) =
11239 gen_rtx_SET (VOIDmode,
11240 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
11241 mem);
11242 }
11243 }
11244
11245 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
11246 }
11247 }
11248
11249 /* Write function epilogue. */
11250
11251 static void
11252 rs6000_output_function_epilogue (file, size)
11253 FILE *file;
11254 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11255 {
11256 rs6000_stack_t *info = rs6000_stack_info ();
11257
11258 if (! HAVE_epilogue)
11259 {
11260 rtx insn = get_last_insn ();
11261 /* If the last insn was a BARRIER, we don't have to write anything except
11262 the trace table. */
11263 if (GET_CODE (insn) == NOTE)
11264 insn = prev_nonnote_insn (insn);
11265 if (insn == 0 || GET_CODE (insn) != BARRIER)
11266 {
11267 /* This is slightly ugly, but at least we don't have two
11268 copies of the epilogue-emitting code. */
11269 start_sequence ();
11270
11271 /* A NOTE_INSN_DELETED is supposed to be at the start
11272 and end of the "toplevel" insn chain. */
11273 emit_note (0, NOTE_INSN_DELETED);
11274 rs6000_emit_epilogue (FALSE);
11275 emit_note (0, NOTE_INSN_DELETED);
11276
11277 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11278 {
11279 rtx insn;
11280 unsigned addr = 0;
11281 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11282 {
11283 INSN_ADDRESSES_NEW (insn, addr);
11284 addr += 4;
11285 }
11286 }
11287
11288 if (TARGET_DEBUG_STACK)
11289 debug_rtx_list (get_insns (), 100);
11290 final (get_insns (), file, FALSE, FALSE);
11291 end_sequence ();
11292 }
11293 }
11294
11295 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11296 on its format.
11297
11298 We don't output a traceback table if -finhibit-size-directive was
11299 used. The documentation for -finhibit-size-directive reads
11300 ``don't output a @code{.size} assembler directive, or anything
11301 else that would cause trouble if the function is split in the
11302 middle, and the two halves are placed at locations far apart in
11303 memory.'' The traceback table has this property, since it
11304 includes the offset from the start of the function to the
11305 traceback table itself.
11306
11307 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11308 different traceback table. */
11309 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
11310 && rs6000_traceback != traceback_none)
11311 {
11312 const char *fname = NULL;
11313 const char *language_string = lang_hooks.name;
11314 int fixed_parms = 0, float_parms = 0, parm_info = 0;
11315 int i;
11316 int optional_tbtab;
11317
11318 if (rs6000_traceback == traceback_full)
11319 optional_tbtab = 1;
11320 else if (rs6000_traceback == traceback_part)
11321 optional_tbtab = 0;
11322 else
11323 optional_tbtab = !optimize_size && !TARGET_ELF;
11324
11325 if (optional_tbtab)
11326 {
11327 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
11328 while (*fname == '.') /* V.4 encodes . in the name */
11329 fname++;
11330
11331 /* Need label immediately before tbtab, so we can compute
11332 its offset from the function start. */
11333 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11334 ASM_OUTPUT_LABEL (file, fname);
11335 }
11336
11337 /* The .tbtab pseudo-op can only be used for the first eight
11338 expressions, since it can't handle the possibly variable
11339 length fields that follow. However, if you omit the optional
11340 fields, the assembler outputs zeros for all optional fields
11341 anyways, giving each variable length field is minimum length
11342 (as defined in sys/debug.h). Thus we can not use the .tbtab
11343 pseudo-op at all. */
11344
11345 /* An all-zero word flags the start of the tbtab, for debuggers
11346 that have to find it by searching forward from the entry
11347 point or from the current pc. */
11348 fputs ("\t.long 0\n", file);
11349
11350 /* Tbtab format type. Use format type 0. */
11351 fputs ("\t.byte 0,", file);
11352
11353 /* Language type. Unfortunately, there doesn't seem to be any
11354 official way to get this info, so we use language_string. C
11355 is 0. C++ is 9. No number defined for Obj-C, so use the
11356 value for C for now. There is no official value for Java,
11357 although IBM appears to be using 13. There is no official value
11358 for Chill, so we've chosen 44 pseudo-randomly. */
11359 if (! strcmp (language_string, "GNU C")
11360 || ! strcmp (language_string, "GNU Objective-C"))
11361 i = 0;
11362 else if (! strcmp (language_string, "GNU F77"))
11363 i = 1;
11364 else if (! strcmp (language_string, "GNU Ada"))
11365 i = 3;
11366 else if (! strcmp (language_string, "GNU Pascal"))
11367 i = 2;
11368 else if (! strcmp (language_string, "GNU C++"))
11369 i = 9;
11370 else if (! strcmp (language_string, "GNU Java"))
11371 i = 13;
11372 else if (! strcmp (language_string, "GNU CHILL"))
11373 i = 44;
11374 else
11375 abort ();
11376 fprintf (file, "%d,", i);
11377
11378 /* 8 single bit fields: global linkage (not set for C extern linkage,
11379 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
11380 from start of procedure stored in tbtab, internal function, function
11381 has controlled storage, function has no toc, function uses fp,
11382 function logs/aborts fp operations. */
11383 /* Assume that fp operations are used if any fp reg must be saved. */
11384 fprintf (file, "%d,",
11385 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
11386
11387 /* 6 bitfields: function is interrupt handler, name present in
11388 proc table, function calls alloca, on condition directives
11389 (controls stack walks, 3 bits), saves condition reg, saves
11390 link reg. */
11391 /* The `function calls alloca' bit seems to be set whenever reg 31 is
11392 set up as a frame pointer, even when there is no alloca call. */
11393 fprintf (file, "%d,",
11394 ((optional_tbtab << 6)
11395 | ((optional_tbtab & frame_pointer_needed) << 5)
11396 | (info->cr_save_p << 1)
11397 | (info->lr_save_p)));
11398
11399 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
11400 (6 bits). */
11401 fprintf (file, "%d,",
11402 (info->push_p << 7) | (64 - info->first_fp_reg_save));
11403
11404 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
11405 fprintf (file, "%d,", (32 - first_reg_to_save ()));
11406
11407 if (optional_tbtab)
11408 {
11409 /* Compute the parameter info from the function decl argument
11410 list. */
11411 tree decl;
11412 int next_parm_info_bit = 31;
11413
11414 for (decl = DECL_ARGUMENTS (current_function_decl);
11415 decl; decl = TREE_CHAIN (decl))
11416 {
11417 rtx parameter = DECL_INCOMING_RTL (decl);
11418 enum machine_mode mode = GET_MODE (parameter);
11419
11420 if (GET_CODE (parameter) == REG)
11421 {
11422 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
11423 {
11424 int bits;
11425
11426 float_parms++;
11427
11428 if (mode == SFmode)
11429 bits = 0x2;
11430 else if (mode == DFmode || mode == TFmode)
11431 bits = 0x3;
11432 else
11433 abort ();
11434
11435 /* If only one bit will fit, don't or in this entry. */
11436 if (next_parm_info_bit > 0)
11437 parm_info |= (bits << (next_parm_info_bit - 1));
11438 next_parm_info_bit -= 2;
11439 }
11440 else
11441 {
11442 fixed_parms += ((GET_MODE_SIZE (mode)
11443 + (UNITS_PER_WORD - 1))
11444 / UNITS_PER_WORD);
11445 next_parm_info_bit -= 1;
11446 }
11447 }
11448 }
11449 }
11450
11451 /* Number of fixed point parameters. */
11452 /* This is actually the number of words of fixed point parameters; thus
11453 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11454 fprintf (file, "%d,", fixed_parms);
11455
11456 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11457 all on stack. */
11458 /* This is actually the number of fp registers that hold parameters;
11459 and thus the maximum value is 13. */
11460 /* Set parameters on stack bit if parameters are not in their original
11461 registers, regardless of whether they are on the stack? Xlc
11462 seems to set the bit when not optimizing. */
11463 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
11464
11465 if (! optional_tbtab)
11466 return;
11467
11468 /* Optional fields follow. Some are variable length. */
11469
11470 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11471 11 double float. */
11472 /* There is an entry for each parameter in a register, in the order that
11473 they occur in the parameter list. Any intervening arguments on the
11474 stack are ignored. If the list overflows a long (max possible length
11475 34 bits) then completely leave off all elements that don't fit. */
11476 /* Only emit this long if there was at least one parameter. */
11477 if (fixed_parms || float_parms)
11478 fprintf (file, "\t.long %d\n", parm_info);
11479
11480 /* Offset from start of code to tb table. */
11481 fputs ("\t.long ", file);
11482 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11483 #if TARGET_AIX
11484 RS6000_OUTPUT_BASENAME (file, fname);
11485 #else
11486 assemble_name (file, fname);
11487 #endif
11488 fputs ("-.", file);
11489 #if TARGET_AIX
11490 RS6000_OUTPUT_BASENAME (file, fname);
11491 #else
11492 assemble_name (file, fname);
11493 #endif
11494 putc ('\n', file);
11495
11496 /* Interrupt handler mask. */
11497 /* Omit this long, since we never set the interrupt handler bit
11498 above. */
11499
11500 /* Number of CTL (controlled storage) anchors. */
11501 /* Omit this long, since the has_ctl bit is never set above. */
11502
11503 /* Displacement into stack of each CTL anchor. */
11504 /* Omit this list of longs, because there are no CTL anchors. */
11505
11506 /* Length of function name. */
11507 if (*fname == '*')
11508 ++fname;
11509 fprintf (file, "\t.short %d\n", (int) strlen (fname));
11510
11511 /* Function name. */
11512 assemble_string (fname, strlen (fname));
11513
11514 /* Register for alloca automatic storage; this is always reg 31.
11515 Only emit this if the alloca bit was set above. */
11516 if (frame_pointer_needed)
11517 fputs ("\t.byte 31\n", file);
11518
11519 fputs ("\t.align 2\n", file);
11520 }
11521 }
11522 \f
11523 /* A C compound statement that outputs the assembler code for a thunk
11524 function, used to implement C++ virtual function calls with
11525 multiple inheritance. The thunk acts as a wrapper around a virtual
11526 function, adjusting the implicit object parameter before handing
11527 control off to the real function.
11528
11529 First, emit code to add the integer DELTA to the location that
11530 contains the incoming first argument. Assume that this argument
11531 contains a pointer, and is the one used to pass the `this' pointer
11532 in C++. This is the incoming argument *before* the function
11533 prologue, e.g. `%o0' on a sparc. The addition must preserve the
11534 values of all other incoming arguments.
11535
11536 After the addition, emit code to jump to FUNCTION, which is a
11537 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
11538 not touch the return address. Hence returning from FUNCTION will
11539 return to whoever called the current `thunk'.
11540
11541 The effect must be as if FUNCTION had been called directly with the
11542 adjusted first argument. This macro is responsible for emitting
11543 all of the code for a thunk function; output_function_prologue()
11544 and output_function_epilogue() are not invoked.
11545
11546 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
11547 been extracted from it.) It might possibly be useful on some
11548 targets, but probably not.
11549
11550 If you do not define this macro, the target-independent code in the
11551 C++ frontend will generate a less efficient heavyweight thunk that
11552 calls FUNCTION instead of jumping to it. The generic approach does
11553 not support varargs. */
11554
11555 static void
11556 rs6000_output_mi_thunk (file, thunk_fndecl, delta, vcall_offset, function)
11557 FILE *file;
11558 tree thunk_fndecl ATTRIBUTE_UNUSED;
11559 HOST_WIDE_INT delta;
11560 HOST_WIDE_INT vcall_offset;
11561 tree function;
11562 {
11563 rtx this, insn, funexp;
11564
11565 reload_completed = 1;
11566 no_new_pseudos = 1;
11567
11568 /* Mark the end of the (empty) prologue. */
11569 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
11570
11571 /* Find the "this" pointer. If the function returns a structure,
11572 the structure return pointer is in r3. */
11573 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
11574 this = gen_rtx_REG (Pmode, 4);
11575 else
11576 this = gen_rtx_REG (Pmode, 3);
11577
11578 /* Apply the constant offset, if required. */
11579 if (delta)
11580 {
11581 rtx delta_rtx = GEN_INT (delta);
11582 emit_insn (TARGET_32BIT
11583 ? gen_addsi3 (this, this, delta_rtx)
11584 : gen_adddi3 (this, this, delta_rtx));
11585 }
11586
11587 /* Apply the offset from the vtable, if required. */
11588 if (vcall_offset)
11589 {
11590 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
11591 rtx tmp = gen_rtx_REG (Pmode, 12);
11592
11593 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
11594 emit_insn (TARGET_32BIT
11595 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
11596 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
11597 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
11598 emit_insn (TARGET_32BIT
11599 ? gen_addsi3 (this, this, tmp)
11600 : gen_adddi3 (this, this, tmp));
11601 }
11602
11603 /* Generate a tail call to the target function. */
11604 if (!TREE_USED (function))
11605 {
11606 assemble_external (function);
11607 TREE_USED (function) = 1;
11608 }
11609 funexp = XEXP (DECL_RTL (function), 0);
11610
11611 SYMBOL_REF_FLAG (funexp) = 0;
11612 if (current_file_function_operand (funexp, VOIDmode)
11613 && (! lookup_attribute ("longcall",
11614 TYPE_ATTRIBUTES (TREE_TYPE (function)))
11615 || lookup_attribute ("shortcall",
11616 TYPE_ATTRIBUTES (TREE_TYPE (function)))))
11617 SYMBOL_REF_FLAG (funexp) = 1;
11618
11619 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
11620
11621 #if TARGET_MACHO
11622 if (MACHOPIC_INDIRECT)
11623 funexp = machopic_indirect_call_target (funexp);
11624 #endif
11625
11626 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
11627 generate sibcall RTL explicitly to avoid constraint abort. */
11628 insn = emit_call_insn (
11629 gen_rtx_PARALLEL (VOIDmode,
11630 gen_rtvec (4,
11631 gen_rtx_CALL (VOIDmode,
11632 funexp, const0_rtx),
11633 gen_rtx_USE (VOIDmode, const0_rtx),
11634 gen_rtx_USE (VOIDmode,
11635 gen_rtx_REG (SImode,
11636 LINK_REGISTER_REGNUM)),
11637 gen_rtx_RETURN (VOIDmode))));
11638 SIBLING_CALL_P (insn) = 1;
11639 emit_barrier ();
11640
11641 /* Run just enough of rest_of_compilation to get the insns emitted.
11642 There's not really enough bulk here to make other passes such as
11643 instruction scheduling worth while. Note that use_thunk calls
11644 assemble_start_function and assemble_end_function. */
11645 insn = get_insns ();
11646 shorten_branches (insn);
11647 final_start_function (insn, file, 1);
11648 final (insn, file, 1, 0);
11649 final_end_function ();
11650
11651 reload_completed = 0;
11652 no_new_pseudos = 0;
11653 }
11654 \f
11655 /* A quick summary of the various types of 'constant-pool tables'
11656 under PowerPC:
11657
11658 Target Flags Name One table per
11659 AIX (none) AIX TOC object file
11660 AIX -mfull-toc AIX TOC object file
11661 AIX -mminimal-toc AIX minimal TOC translation unit
11662 SVR4/EABI (none) SVR4 SDATA object file
11663 SVR4/EABI -fpic SVR4 pic object file
11664 SVR4/EABI -fPIC SVR4 PIC translation unit
11665 SVR4/EABI -mrelocatable EABI TOC function
11666 SVR4/EABI -maix AIX TOC object file
11667 SVR4/EABI -maix -mminimal-toc
11668 AIX minimal TOC translation unit
11669
11670 Name Reg. Set by entries contains:
11671 made by addrs? fp? sum?
11672
11673 AIX TOC 2 crt0 as Y option option
11674 AIX minimal TOC 30 prolog gcc Y Y option
11675 SVR4 SDATA 13 crt0 gcc N Y N
11676 SVR4 pic 30 prolog ld Y not yet N
11677 SVR4 PIC 30 prolog gcc Y option option
11678 EABI TOC 30 prolog gcc Y option option
11679
11680 */
11681
11682 /* Hash functions for the hash table. */
11683
11684 static unsigned
11685 rs6000_hash_constant (k)
11686 rtx k;
11687 {
11688 enum rtx_code code = GET_CODE (k);
11689 enum machine_mode mode = GET_MODE (k);
11690 unsigned result = (code << 3) ^ mode;
11691 const char *format;
11692 int flen, fidx;
11693
11694 format = GET_RTX_FORMAT (code);
11695 flen = strlen (format);
11696 fidx = 0;
11697
11698 switch (code)
11699 {
11700 case LABEL_REF:
11701 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
11702
11703 case CONST_DOUBLE:
11704 if (mode != VOIDmode)
11705 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
11706 flen = 2;
11707 break;
11708
11709 case CODE_LABEL:
11710 fidx = 3;
11711 break;
11712
11713 default:
11714 break;
11715 }
11716
11717 for (; fidx < flen; fidx++)
11718 switch (format[fidx])
11719 {
11720 case 's':
11721 {
11722 unsigned i, len;
11723 const char *str = XSTR (k, fidx);
11724 len = strlen (str);
11725 result = result * 613 + len;
11726 for (i = 0; i < len; i++)
11727 result = result * 613 + (unsigned) str[i];
11728 break;
11729 }
11730 case 'u':
11731 case 'e':
11732 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
11733 break;
11734 case 'i':
11735 case 'n':
11736 result = result * 613 + (unsigned) XINT (k, fidx);
11737 break;
11738 case 'w':
11739 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
11740 result = result * 613 + (unsigned) XWINT (k, fidx);
11741 else
11742 {
11743 size_t i;
11744 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
11745 result = result * 613 + (unsigned) (XWINT (k, fidx)
11746 >> CHAR_BIT * i);
11747 }
11748 break;
11749 default:
11750 abort ();
11751 }
11752
11753 return result;
11754 }
11755
11756 static unsigned
11757 toc_hash_function (hash_entry)
11758 const void * hash_entry;
11759 {
11760 const struct toc_hash_struct *thc =
11761 (const struct toc_hash_struct *) hash_entry;
11762 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
11763 }
11764
11765 /* Compare H1 and H2 for equivalence. */
11766
11767 static int
11768 toc_hash_eq (h1, h2)
11769 const void * h1;
11770 const void * h2;
11771 {
11772 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
11773 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
11774
11775 if (((const struct toc_hash_struct *) h1)->key_mode
11776 != ((const struct toc_hash_struct *) h2)->key_mode)
11777 return 0;
11778
11779 return rtx_equal_p (r1, r2);
11780 }
11781
11782 /* These are the names given by the C++ front-end to vtables, and
11783 vtable-like objects. Ideally, this logic should not be here;
11784 instead, there should be some programmatic way of inquiring as
11785 to whether or not an object is a vtable. */
11786
11787 #define VTABLE_NAME_P(NAME) \
11788 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
11789 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
11790 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
11791 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
11792
11793 void
11794 rs6000_output_symbol_ref (file, x)
11795 FILE *file;
11796 rtx x;
11797 {
11798 /* Currently C++ toc references to vtables can be emitted before it
11799 is decided whether the vtable is public or private. If this is
11800 the case, then the linker will eventually complain that there is
11801 a reference to an unknown section. Thus, for vtables only,
11802 we emit the TOC reference to reference the symbol and not the
11803 section. */
11804 const char *name = XSTR (x, 0);
11805
11806 if (VTABLE_NAME_P (name))
11807 {
11808 RS6000_OUTPUT_BASENAME (file, name);
11809 }
11810 else
11811 assemble_name (file, name);
11812 }
11813
11814 /* Output a TOC entry. We derive the entry name from what is being
11815 written. */
11816
11817 void
11818 output_toc (file, x, labelno, mode)
11819 FILE *file;
11820 rtx x;
11821 int labelno;
11822 enum machine_mode mode;
11823 {
11824 char buf[256];
11825 const char *name = buf;
11826 const char *real_name;
11827 rtx base = x;
11828 int offset = 0;
11829
11830 if (TARGET_NO_TOC)
11831 abort ();
11832
11833 /* When the linker won't eliminate them, don't output duplicate
11834 TOC entries (this happens on AIX if there is any kind of TOC,
11835 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
11836 CODE_LABELs. */
11837 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
11838 {
11839 struct toc_hash_struct *h;
11840 void * * found;
11841
11842 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
11843 time because GGC is not initialised at that point. */
11844 if (toc_hash_table == NULL)
11845 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
11846 toc_hash_eq, NULL);
11847
11848 h = ggc_alloc (sizeof (*h));
11849 h->key = x;
11850 h->key_mode = mode;
11851 h->labelno = labelno;
11852
11853 found = htab_find_slot (toc_hash_table, h, 1);
11854 if (*found == NULL)
11855 *found = h;
11856 else /* This is indeed a duplicate.
11857 Set this label equal to that label. */
11858 {
11859 fputs ("\t.set ", file);
11860 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11861 fprintf (file, "%d,", labelno);
11862 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11863 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
11864 found)->labelno));
11865 return;
11866 }
11867 }
11868
11869 /* If we're going to put a double constant in the TOC, make sure it's
11870 aligned properly when strict alignment is on. */
11871 if (GET_CODE (x) == CONST_DOUBLE
11872 && STRICT_ALIGNMENT
11873 && GET_MODE_BITSIZE (mode) >= 64
11874 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
11875 ASM_OUTPUT_ALIGN (file, 3);
11876 }
11877
11878 (*targetm.asm_out.internal_label) (file, "LC", labelno);
11879
11880 /* Handle FP constants specially. Note that if we have a minimal
11881 TOC, things we put here aren't actually in the TOC, so we can allow
11882 FP constants. */
11883 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
11884 {
11885 REAL_VALUE_TYPE rv;
11886 long k[4];
11887
11888 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11889 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
11890
11891 if (TARGET_64BIT)
11892 {
11893 if (TARGET_MINIMAL_TOC)
11894 fputs (DOUBLE_INT_ASM_OP, file);
11895 else
11896 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
11897 k[0] & 0xffffffff, k[1] & 0xffffffff,
11898 k[2] & 0xffffffff, k[3] & 0xffffffff);
11899 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
11900 k[0] & 0xffffffff, k[1] & 0xffffffff,
11901 k[2] & 0xffffffff, k[3] & 0xffffffff);
11902 return;
11903 }
11904 else
11905 {
11906 if (TARGET_MINIMAL_TOC)
11907 fputs ("\t.long ", file);
11908 else
11909 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
11910 k[0] & 0xffffffff, k[1] & 0xffffffff,
11911 k[2] & 0xffffffff, k[3] & 0xffffffff);
11912 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
11913 k[0] & 0xffffffff, k[1] & 0xffffffff,
11914 k[2] & 0xffffffff, k[3] & 0xffffffff);
11915 return;
11916 }
11917 }
11918 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
11919 {
11920 REAL_VALUE_TYPE rv;
11921 long k[2];
11922
11923 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11924 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
11925
11926 if (TARGET_64BIT)
11927 {
11928 if (TARGET_MINIMAL_TOC)
11929 fputs (DOUBLE_INT_ASM_OP, file);
11930 else
11931 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
11932 k[0] & 0xffffffff, k[1] & 0xffffffff);
11933 fprintf (file, "0x%lx%08lx\n",
11934 k[0] & 0xffffffff, k[1] & 0xffffffff);
11935 return;
11936 }
11937 else
11938 {
11939 if (TARGET_MINIMAL_TOC)
11940 fputs ("\t.long ", file);
11941 else
11942 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
11943 k[0] & 0xffffffff, k[1] & 0xffffffff);
11944 fprintf (file, "0x%lx,0x%lx\n",
11945 k[0] & 0xffffffff, k[1] & 0xffffffff);
11946 return;
11947 }
11948 }
11949 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
11950 {
11951 REAL_VALUE_TYPE rv;
11952 long l;
11953
11954 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11955 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
11956
11957 if (TARGET_64BIT)
11958 {
11959 if (TARGET_MINIMAL_TOC)
11960 fputs (DOUBLE_INT_ASM_OP, file);
11961 else
11962 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
11963 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
11964 return;
11965 }
11966 else
11967 {
11968 if (TARGET_MINIMAL_TOC)
11969 fputs ("\t.long ", file);
11970 else
11971 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
11972 fprintf (file, "0x%lx\n", l & 0xffffffff);
11973 return;
11974 }
11975 }
11976 else if (GET_MODE (x) == VOIDmode
11977 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
11978 {
11979 unsigned HOST_WIDE_INT low;
11980 HOST_WIDE_INT high;
11981
11982 if (GET_CODE (x) == CONST_DOUBLE)
11983 {
11984 low = CONST_DOUBLE_LOW (x);
11985 high = CONST_DOUBLE_HIGH (x);
11986 }
11987 else
11988 #if HOST_BITS_PER_WIDE_INT == 32
11989 {
11990 low = INTVAL (x);
11991 high = (low & 0x80000000) ? ~0 : 0;
11992 }
11993 #else
11994 {
11995 low = INTVAL (x) & 0xffffffff;
11996 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
11997 }
11998 #endif
11999
12000 /* TOC entries are always Pmode-sized, but since this
12001 is a bigendian machine then if we're putting smaller
12002 integer constants in the TOC we have to pad them.
12003 (This is still a win over putting the constants in
12004 a separate constant pool, because then we'd have
12005 to have both a TOC entry _and_ the actual constant.)
12006
12007 For a 32-bit target, CONST_INT values are loaded and shifted
12008 entirely within `low' and can be stored in one TOC entry. */
12009
12010 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
12011 abort ();/* It would be easy to make this work, but it doesn't now. */
12012
12013 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
12014 {
12015 #if HOST_BITS_PER_WIDE_INT == 32
12016 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
12017 POINTER_SIZE, &low, &high, 0);
12018 #else
12019 low |= high << 32;
12020 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
12021 high = (HOST_WIDE_INT) low >> 32;
12022 low &= 0xffffffff;
12023 #endif
12024 }
12025
12026 if (TARGET_64BIT)
12027 {
12028 if (TARGET_MINIMAL_TOC)
12029 fputs (DOUBLE_INT_ASM_OP, file);
12030 else
12031 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12032 (long) high & 0xffffffff, (long) low & 0xffffffff);
12033 fprintf (file, "0x%lx%08lx\n",
12034 (long) high & 0xffffffff, (long) low & 0xffffffff);
12035 return;
12036 }
12037 else
12038 {
12039 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
12040 {
12041 if (TARGET_MINIMAL_TOC)
12042 fputs ("\t.long ", file);
12043 else
12044 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12045 (long) high & 0xffffffff, (long) low & 0xffffffff);
12046 fprintf (file, "0x%lx,0x%lx\n",
12047 (long) high & 0xffffffff, (long) low & 0xffffffff);
12048 }
12049 else
12050 {
12051 if (TARGET_MINIMAL_TOC)
12052 fputs ("\t.long ", file);
12053 else
12054 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
12055 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
12056 }
12057 return;
12058 }
12059 }
12060
12061 if (GET_CODE (x) == CONST)
12062 {
12063 if (GET_CODE (XEXP (x, 0)) != PLUS)
12064 abort ();
12065
12066 base = XEXP (XEXP (x, 0), 0);
12067 offset = INTVAL (XEXP (XEXP (x, 0), 1));
12068 }
12069
12070 if (GET_CODE (base) == SYMBOL_REF)
12071 name = XSTR (base, 0);
12072 else if (GET_CODE (base) == LABEL_REF)
12073 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
12074 else if (GET_CODE (base) == CODE_LABEL)
12075 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
12076 else
12077 abort ();
12078
12079 real_name = (*targetm.strip_name_encoding) (name);
12080 if (TARGET_MINIMAL_TOC)
12081 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
12082 else
12083 {
12084 fprintf (file, "\t.tc %s", real_name);
12085
12086 if (offset < 0)
12087 fprintf (file, ".N%d", - offset);
12088 else if (offset)
12089 fprintf (file, ".P%d", offset);
12090
12091 fputs ("[TC],", file);
12092 }
12093
12094 /* Currently C++ toc references to vtables can be emitted before it
12095 is decided whether the vtable is public or private. If this is
12096 the case, then the linker will eventually complain that there is
12097 a TOC reference to an unknown section. Thus, for vtables only,
12098 we emit the TOC reference to reference the symbol and not the
12099 section. */
12100 if (VTABLE_NAME_P (name))
12101 {
12102 RS6000_OUTPUT_BASENAME (file, name);
12103 if (offset < 0)
12104 fprintf (file, "%d", offset);
12105 else if (offset > 0)
12106 fprintf (file, "+%d", offset);
12107 }
12108 else
12109 output_addr_const (file, x);
12110 putc ('\n', file);
12111 }
12112 \f
12113 /* Output an assembler pseudo-op to write an ASCII string of N characters
12114 starting at P to FILE.
12115
12116 On the RS/6000, we have to do this using the .byte operation and
12117 write out special characters outside the quoted string.
12118 Also, the assembler is broken; very long strings are truncated,
12119 so we must artificially break them up early. */
12120
12121 void
12122 output_ascii (file, p, n)
12123 FILE *file;
12124 const char *p;
12125 int n;
12126 {
12127 char c;
12128 int i, count_string;
12129 const char *for_string = "\t.byte \"";
12130 const char *for_decimal = "\t.byte ";
12131 const char *to_close = NULL;
12132
12133 count_string = 0;
12134 for (i = 0; i < n; i++)
12135 {
12136 c = *p++;
12137 if (c >= ' ' && c < 0177)
12138 {
12139 if (for_string)
12140 fputs (for_string, file);
12141 putc (c, file);
12142
12143 /* Write two quotes to get one. */
12144 if (c == '"')
12145 {
12146 putc (c, file);
12147 ++count_string;
12148 }
12149
12150 for_string = NULL;
12151 for_decimal = "\"\n\t.byte ";
12152 to_close = "\"\n";
12153 ++count_string;
12154
12155 if (count_string >= 512)
12156 {
12157 fputs (to_close, file);
12158
12159 for_string = "\t.byte \"";
12160 for_decimal = "\t.byte ";
12161 to_close = NULL;
12162 count_string = 0;
12163 }
12164 }
12165 else
12166 {
12167 if (for_decimal)
12168 fputs (for_decimal, file);
12169 fprintf (file, "%d", c);
12170
12171 for_string = "\n\t.byte \"";
12172 for_decimal = ", ";
12173 to_close = "\n";
12174 count_string = 0;
12175 }
12176 }
12177
12178 /* Now close the string if we have written one. Then end the line. */
12179 if (to_close)
12180 fputs (to_close, file);
12181 }
12182 \f
12183 /* Generate a unique section name for FILENAME for a section type
12184 represented by SECTION_DESC. Output goes into BUF.
12185
12186 SECTION_DESC can be any string, as long as it is different for each
12187 possible section type.
12188
12189 We name the section in the same manner as xlc. The name begins with an
12190 underscore followed by the filename (after stripping any leading directory
12191 names) with the last period replaced by the string SECTION_DESC. If
12192 FILENAME does not contain a period, SECTION_DESC is appended to the end of
12193 the name. */
12194
12195 void
12196 rs6000_gen_section_name (buf, filename, section_desc)
12197 char **buf;
12198 const char *filename;
12199 const char *section_desc;
12200 {
12201 const char *q, *after_last_slash, *last_period = 0;
12202 char *p;
12203 int len;
12204
12205 after_last_slash = filename;
12206 for (q = filename; *q; q++)
12207 {
12208 if (*q == '/')
12209 after_last_slash = q + 1;
12210 else if (*q == '.')
12211 last_period = q;
12212 }
12213
12214 len = strlen (after_last_slash) + strlen (section_desc) + 2;
12215 *buf = (char *) xmalloc (len);
12216
12217 p = *buf;
12218 *p++ = '_';
12219
12220 for (q = after_last_slash; *q; q++)
12221 {
12222 if (q == last_period)
12223 {
12224 strcpy (p, section_desc);
12225 p += strlen (section_desc);
12226 break;
12227 }
12228
12229 else if (ISALNUM (*q))
12230 *p++ = *q;
12231 }
12232
12233 if (last_period == 0)
12234 strcpy (p, section_desc);
12235 else
12236 *p = '\0';
12237 }
12238 \f
12239 /* Emit profile function. */
12240
12241 void
12242 output_profile_hook (labelno)
12243 int labelno ATTRIBUTE_UNUSED;
12244 {
12245 if (TARGET_PROFILE_KERNEL)
12246 return;
12247
12248 if (DEFAULT_ABI == ABI_AIX)
12249 {
12250 #ifdef NO_PROFILE_COUNTERS
12251 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
12252 #else
12253 char buf[30];
12254 const char *label_name;
12255 rtx fun;
12256
12257 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12258 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
12259 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
12260
12261 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
12262 fun, Pmode);
12263 #endif
12264 }
12265 else if (DEFAULT_ABI == ABI_DARWIN)
12266 {
12267 const char *mcount_name = RS6000_MCOUNT;
12268 int caller_addr_regno = LINK_REGISTER_REGNUM;
12269
12270 /* Be conservative and always set this, at least for now. */
12271 current_function_uses_pic_offset_table = 1;
12272
12273 #if TARGET_MACHO
12274 /* For PIC code, set up a stub and collect the caller's address
12275 from r0, which is where the prologue puts it. */
12276 if (MACHOPIC_INDIRECT)
12277 {
12278 mcount_name = machopic_stub_name (mcount_name);
12279 if (current_function_uses_pic_offset_table)
12280 caller_addr_regno = 0;
12281 }
12282 #endif
12283 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
12284 0, VOIDmode, 1,
12285 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
12286 }
12287 }
12288
12289 /* Write function profiler code. */
12290
12291 void
12292 output_function_profiler (file, labelno)
12293 FILE *file;
12294 int labelno;
12295 {
12296 char buf[100];
12297 int save_lr = 8;
12298
12299 switch (DEFAULT_ABI)
12300 {
12301 default:
12302 abort ();
12303
12304 case ABI_V4:
12305 save_lr = 4;
12306 /* Fall through. */
12307
12308 case ABI_AIX_NODESC:
12309 if (!TARGET_32BIT)
12310 {
12311 warning ("no profiling of 64-bit code for this ABI");
12312 return;
12313 }
12314 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12315 fprintf (file, "\tmflr %s\n", reg_names[0]);
12316 if (flag_pic == 1)
12317 {
12318 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
12319 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12320 reg_names[0], save_lr, reg_names[1]);
12321 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
12322 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
12323 assemble_name (file, buf);
12324 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
12325 }
12326 else if (flag_pic > 1)
12327 {
12328 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12329 reg_names[0], save_lr, reg_names[1]);
12330 /* Now, we need to get the address of the label. */
12331 fputs ("\tbl 1f\n\t.long ", file);
12332 assemble_name (file, buf);
12333 fputs ("-.\n1:", file);
12334 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
12335 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
12336 reg_names[0], reg_names[11]);
12337 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
12338 reg_names[0], reg_names[0], reg_names[11]);
12339 }
12340 else
12341 {
12342 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
12343 assemble_name (file, buf);
12344 fputs ("@ha\n", file);
12345 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12346 reg_names[0], save_lr, reg_names[1]);
12347 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
12348 assemble_name (file, buf);
12349 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
12350 }
12351
12352 if (current_function_needs_context && DEFAULT_ABI == ABI_AIX_NODESC)
12353 {
12354 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12355 reg_names[STATIC_CHAIN_REGNUM],
12356 12, reg_names[1]);
12357 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12358 asm_fprintf (file, "\t{l|lwz} %s,%d(%s)\n",
12359 reg_names[STATIC_CHAIN_REGNUM],
12360 12, reg_names[1]);
12361 }
12362 else
12363 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12364 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12365 break;
12366
12367 case ABI_AIX:
12368 case ABI_DARWIN:
12369 if (!TARGET_PROFILE_KERNEL)
12370 {
12371 /* Don't do anything, done in output_profile_hook (). */
12372 }
12373 else
12374 {
12375 if (TARGET_32BIT)
12376 abort ();
12377
12378 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
12379 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
12380
12381 if (current_function_needs_context)
12382 {
12383 asm_fprintf (file, "\tstd %s,24(%s)\n",
12384 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
12385 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12386 asm_fprintf (file, "\tld %s,24(%s)\n",
12387 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
12388 }
12389 else
12390 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12391 }
12392 break;
12393 }
12394 }
12395
12396 \f
12397 static int
12398 rs6000_use_dfa_pipeline_interface ()
12399 {
12400 return 1;
12401 }
12402
12403 /* Power4 load update and store update instructions are cracked into a
12404 load or store and an integer insn which are executed in the same cycle.
12405 Branches have their own dispatch slot which does not count against the
12406 GCC issue rate, but it changes the program flow so there are no other
12407 instructions to issue in this cycle. */
12408
12409 static int
12410 rs6000_variable_issue (stream, verbose, insn, more)
12411 FILE *stream ATTRIBUTE_UNUSED;
12412 int verbose ATTRIBUTE_UNUSED;
12413 rtx insn;
12414 int more;
12415 {
12416 if (GET_CODE (PATTERN (insn)) == USE
12417 || GET_CODE (PATTERN (insn)) == CLOBBER)
12418 return more;
12419
12420 if (rs6000_cpu == PROCESSOR_POWER4)
12421 {
12422 enum attr_type type = get_attr_type (insn);
12423 if (type == TYPE_LOAD_EXT_U || type == TYPE_LOAD_EXT_UX
12424 || type == TYPE_LOAD_UX || type == TYPE_STORE_UX
12425 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX)
12426 return 0;
12427 else if (type == TYPE_LOAD_U || type == TYPE_STORE_U
12428 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
12429 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR)
12430 return more > 2 ? more - 2 : 0;
12431 }
12432
12433 return more - 1;
12434 }
12435
12436 /* Adjust the cost of a scheduling dependency. Return the new cost of
12437 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
12438
12439 static int
12440 rs6000_adjust_cost (insn, link, dep_insn, cost)
12441 rtx insn;
12442 rtx link;
12443 rtx dep_insn ATTRIBUTE_UNUSED;
12444 int cost;
12445 {
12446 if (! recog_memoized (insn))
12447 return 0;
12448
12449 if (REG_NOTE_KIND (link) != 0)
12450 return 0;
12451
12452 if (REG_NOTE_KIND (link) == 0)
12453 {
12454 /* Data dependency; DEP_INSN writes a register that INSN reads
12455 some cycles later. */
12456 switch (get_attr_type (insn))
12457 {
12458 case TYPE_JMPREG:
12459 /* Tell the first scheduling pass about the latency between
12460 a mtctr and bctr (and mtlr and br/blr). The first
12461 scheduling pass will not know about this latency since
12462 the mtctr instruction, which has the latency associated
12463 to it, will be generated by reload. */
12464 return TARGET_POWER ? 5 : 4;
12465 case TYPE_BRANCH:
12466 /* Leave some extra cycles between a compare and its
12467 dependent branch, to inhibit expensive mispredicts. */
12468 if ((rs6000_cpu_attr == CPU_PPC603
12469 || rs6000_cpu_attr == CPU_PPC604
12470 || rs6000_cpu_attr == CPU_PPC604E
12471 || rs6000_cpu_attr == CPU_PPC620
12472 || rs6000_cpu_attr == CPU_PPC630
12473 || rs6000_cpu_attr == CPU_PPC750
12474 || rs6000_cpu_attr == CPU_PPC7400
12475 || rs6000_cpu_attr == CPU_PPC7450
12476 || rs6000_cpu_attr == CPU_POWER4)
12477 && recog_memoized (dep_insn)
12478 && (INSN_CODE (dep_insn) >= 0)
12479 && (get_attr_type (dep_insn) == TYPE_CMP
12480 || get_attr_type (dep_insn) == TYPE_COMPARE
12481 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
12482 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
12483 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
12484 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
12485 return cost + 2;
12486 default:
12487 break;
12488 }
12489 /* Fall out to return default cost. */
12490 }
12491
12492 return cost;
12493 }
12494
12495 /* A C statement (sans semicolon) to update the integer scheduling
12496 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12497 INSN earlier, increase the priority to execute INSN later. Do not
12498 define this macro if you do not need to adjust the scheduling
12499 priorities of insns. */
12500
12501 static int
12502 rs6000_adjust_priority (insn, priority)
12503 rtx insn ATTRIBUTE_UNUSED;
12504 int priority;
12505 {
12506 /* On machines (like the 750) which have asymmetric integer units,
12507 where one integer unit can do multiply and divides and the other
12508 can't, reduce the priority of multiply/divide so it is scheduled
12509 before other integer operations. */
12510
12511 #if 0
12512 if (! INSN_P (insn))
12513 return priority;
12514
12515 if (GET_CODE (PATTERN (insn)) == USE)
12516 return priority;
12517
12518 switch (rs6000_cpu_attr) {
12519 case CPU_PPC750:
12520 switch (get_attr_type (insn))
12521 {
12522 default:
12523 break;
12524
12525 case TYPE_IMUL:
12526 case TYPE_IDIV:
12527 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
12528 priority, priority);
12529 if (priority >= 0 && priority < 0x01000000)
12530 priority >>= 3;
12531 break;
12532 }
12533 }
12534 #endif
12535
12536 return priority;
12537 }
12538
12539 /* Return how many instructions the machine can issue per cycle. */
12540
12541 static int
12542 rs6000_issue_rate ()
12543 {
12544 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
12545 if (!reload_completed)
12546 return 1;
12547
12548 switch (rs6000_cpu_attr) {
12549 case CPU_RIOS1: /* ? */
12550 case CPU_RS64A:
12551 case CPU_PPC601: /* ? */
12552 case CPU_PPC7450:
12553 return 3;
12554 case CPU_PPC440:
12555 case CPU_PPC603:
12556 case CPU_PPC750:
12557 case CPU_PPC7400:
12558 return 2;
12559 case CPU_RIOS2:
12560 case CPU_PPC604:
12561 case CPU_PPC604E:
12562 case CPU_PPC620:
12563 case CPU_PPC630:
12564 case CPU_POWER4:
12565 return 4;
12566 default:
12567 return 1;
12568 }
12569 }
12570
12571 \f
12572 /* Length in units of the trampoline for entering a nested function. */
12573
12574 int
12575 rs6000_trampoline_size ()
12576 {
12577 int ret = 0;
12578
12579 switch (DEFAULT_ABI)
12580 {
12581 default:
12582 abort ();
12583
12584 case ABI_AIX:
12585 ret = (TARGET_32BIT) ? 12 : 24;
12586 break;
12587
12588 case ABI_DARWIN:
12589 case ABI_V4:
12590 case ABI_AIX_NODESC:
12591 ret = (TARGET_32BIT) ? 40 : 48;
12592 break;
12593 }
12594
12595 return ret;
12596 }
12597
12598 /* Emit RTL insns to initialize the variable parts of a trampoline.
12599 FNADDR is an RTX for the address of the function's pure code.
12600 CXT is an RTX for the static chain value for the function. */
12601
12602 void
12603 rs6000_initialize_trampoline (addr, fnaddr, cxt)
12604 rtx addr;
12605 rtx fnaddr;
12606 rtx cxt;
12607 {
12608 enum machine_mode pmode = Pmode;
12609 int regsize = (TARGET_32BIT) ? 4 : 8;
12610 rtx ctx_reg = force_reg (pmode, cxt);
12611
12612 switch (DEFAULT_ABI)
12613 {
12614 default:
12615 abort ();
12616
12617 /* Macros to shorten the code expansions below. */
12618 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12619 #define MEM_PLUS(addr,offset) \
12620 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12621
12622 /* Under AIX, just build the 3 word function descriptor */
12623 case ABI_AIX:
12624 {
12625 rtx fn_reg = gen_reg_rtx (pmode);
12626 rtx toc_reg = gen_reg_rtx (pmode);
12627 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
12628 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
12629 emit_move_insn (MEM_DEREF (addr), fn_reg);
12630 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
12631 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
12632 }
12633 break;
12634
12635 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
12636 case ABI_DARWIN:
12637 case ABI_V4:
12638 case ABI_AIX_NODESC:
12639 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
12640 FALSE, VOIDmode, 4,
12641 addr, pmode,
12642 GEN_INT (rs6000_trampoline_size ()), SImode,
12643 fnaddr, pmode,
12644 ctx_reg, pmode);
12645 break;
12646 }
12647
12648 return;
12649 }
12650
12651 \f
12652 /* Table of valid machine attributes. */
12653
12654 const struct attribute_spec rs6000_attribute_table[] =
12655 {
12656 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12657 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12658 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12659 { NULL, 0, 0, false, false, false, NULL }
12660 };
12661
12662 /* Handle a "longcall" or "shortcall" attribute; arguments as in
12663 struct attribute_spec.handler. */
12664
12665 static tree
12666 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
12667 tree *node;
12668 tree name;
12669 tree args ATTRIBUTE_UNUSED;
12670 int flags ATTRIBUTE_UNUSED;
12671 bool *no_add_attrs;
12672 {
12673 if (TREE_CODE (*node) != FUNCTION_TYPE
12674 && TREE_CODE (*node) != FIELD_DECL
12675 && TREE_CODE (*node) != TYPE_DECL)
12676 {
12677 warning ("`%s' attribute only applies to functions",
12678 IDENTIFIER_POINTER (name));
12679 *no_add_attrs = true;
12680 }
12681
12682 return NULL_TREE;
12683 }
12684
12685 /* Set longcall attributes on all functions declared when
12686 rs6000_default_long_calls is true. */
12687 static void
12688 rs6000_set_default_type_attributes (type)
12689 tree type;
12690 {
12691 if (rs6000_default_long_calls
12692 && (TREE_CODE (type) == FUNCTION_TYPE
12693 || TREE_CODE (type) == METHOD_TYPE))
12694 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
12695 NULL_TREE,
12696 TYPE_ATTRIBUTES (type));
12697 }
12698
12699 /* Return a reference suitable for calling a function with the
12700 longcall attribute. */
12701
12702 struct rtx_def *
12703 rs6000_longcall_ref (call_ref)
12704 rtx call_ref;
12705 {
12706 const char *call_name;
12707 tree node;
12708
12709 if (GET_CODE (call_ref) != SYMBOL_REF)
12710 return call_ref;
12711
12712 /* System V adds '.' to the internal name, so skip them. */
12713 call_name = XSTR (call_ref, 0);
12714 if (*call_name == '.')
12715 {
12716 while (*call_name == '.')
12717 call_name++;
12718
12719 node = get_identifier (call_name);
12720 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
12721 }
12722
12723 return force_reg (Pmode, call_ref);
12724 }
12725
12726 \f
12727 #ifdef USING_ELFOS_H
12728
12729 /* A C statement or statements to switch to the appropriate section
12730 for output of RTX in mode MODE. You can assume that RTX is some
12731 kind of constant in RTL. The argument MODE is redundant except in
12732 the case of a `const_int' rtx. Select the section by calling
12733 `text_section' or one of the alternatives for other sections.
12734
12735 Do not define this macro if you put all constants in the read-only
12736 data section. */
12737
12738 static void
12739 rs6000_elf_select_rtx_section (mode, x, align)
12740 enum machine_mode mode;
12741 rtx x;
12742 unsigned HOST_WIDE_INT align;
12743 {
12744 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
12745 toc_section ();
12746 else
12747 default_elf_select_rtx_section (mode, x, align);
12748 }
12749
12750 /* A C statement or statements to switch to the appropriate
12751 section for output of DECL. DECL is either a `VAR_DECL' node
12752 or a constant of some sort. RELOC indicates whether forming
12753 the initial value of DECL requires link-time relocations. */
12754
12755 static void
12756 rs6000_elf_select_section (decl, reloc, align)
12757 tree decl;
12758 int reloc;
12759 unsigned HOST_WIDE_INT align;
12760 {
12761 /* Pretend that we're always building for a shared library when
12762 ABI_AIX, because otherwise we end up with dynamic relocations
12763 in read-only sections. This happens for function pointers,
12764 references to vtables in typeinfo, and probably other cases. */
12765 default_elf_select_section_1 (decl, reloc, align,
12766 flag_pic || DEFAULT_ABI == ABI_AIX);
12767 }
12768
12769 /* A C statement to build up a unique section name, expressed as a
12770 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
12771 RELOC indicates whether the initial value of EXP requires
12772 link-time relocations. If you do not define this macro, GCC will use
12773 the symbol name prefixed by `.' as the section name. Note - this
12774 macro can now be called for uninitialized data items as well as
12775 initialized data and functions. */
12776
12777 static void
12778 rs6000_elf_unique_section (decl, reloc)
12779 tree decl;
12780 int reloc;
12781 {
12782 /* As above, pretend that we're always building for a shared library
12783 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
12784 default_unique_section_1 (decl, reloc,
12785 flag_pic || DEFAULT_ABI == ABI_AIX);
12786 }
12787 \f
12788 /* If we are referencing a function that is static or is known to be
12789 in this file, make the SYMBOL_REF special. We can use this to indicate
12790 that we can branch to this function without emitting a no-op after the
12791 call. For real AIX calling sequences, we also replace the
12792 function name with the real name (1 or 2 leading .'s), rather than
12793 the function descriptor name. This saves a lot of overriding code
12794 to read the prefixes. */
12795
12796 static void
12797 rs6000_elf_encode_section_info (decl, first)
12798 tree decl;
12799 int first;
12800 {
12801 if (!first)
12802 return;
12803
12804 if (TREE_CODE (decl) == FUNCTION_DECL)
12805 {
12806 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12807 if ((*targetm.binds_local_p) (decl))
12808 SYMBOL_REF_FLAG (sym_ref) = 1;
12809
12810 if (!TARGET_AIX && DEFAULT_ABI == ABI_AIX)
12811 {
12812 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
12813 size_t len2 = strlen (XSTR (sym_ref, 0));
12814 char *str = alloca (len1 + len2 + 1);
12815 str[0] = '.';
12816 str[1] = '.';
12817 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
12818
12819 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
12820 }
12821 }
12822 else if (rs6000_sdata != SDATA_NONE
12823 && DEFAULT_ABI == ABI_V4
12824 && TREE_CODE (decl) == VAR_DECL)
12825 {
12826 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12827 int size = int_size_in_bytes (TREE_TYPE (decl));
12828 tree section_name = DECL_SECTION_NAME (decl);
12829 const char *name = (char *)0;
12830 int len = 0;
12831
12832 if ((*targetm.binds_local_p) (decl))
12833 SYMBOL_REF_FLAG (sym_ref) = 1;
12834
12835 if (section_name)
12836 {
12837 if (TREE_CODE (section_name) == STRING_CST)
12838 {
12839 name = TREE_STRING_POINTER (section_name);
12840 len = TREE_STRING_LENGTH (section_name);
12841 }
12842 else
12843 abort ();
12844 }
12845
12846 if (name
12847 ? ((len == sizeof (".sdata") - 1
12848 && strcmp (name, ".sdata") == 0)
12849 || (len == sizeof (".sdata2") - 1
12850 && strcmp (name, ".sdata2") == 0)
12851 || (len == sizeof (".sbss") - 1
12852 && strcmp (name, ".sbss") == 0)
12853 || (len == sizeof (".sbss2") - 1
12854 && strcmp (name, ".sbss2") == 0)
12855 || (len == sizeof (".PPC.EMB.sdata0") - 1
12856 && strcmp (name, ".PPC.EMB.sdata0") == 0)
12857 || (len == sizeof (".PPC.EMB.sbss0") - 1
12858 && strcmp (name, ".PPC.EMB.sbss0") == 0))
12859 : (size > 0 && size <= g_switch_value))
12860 {
12861 size_t len = strlen (XSTR (sym_ref, 0));
12862 char *str = alloca (len + 2);
12863
12864 str[0] = '@';
12865 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
12866 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
12867 }
12868 }
12869 }
12870
12871 static const char *
12872 rs6000_elf_strip_name_encoding (str)
12873 const char *str;
12874 {
12875 while (*str == '*' || *str == '@')
12876 str++;
12877 return str;
12878 }
12879
12880 static bool
12881 rs6000_elf_in_small_data_p (decl)
12882 tree decl;
12883 {
12884 if (rs6000_sdata == SDATA_NONE)
12885 return false;
12886
12887 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
12888 {
12889 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
12890 if (strcmp (section, ".sdata") == 0
12891 || strcmp (section, ".sdata2") == 0
12892 || strcmp (section, ".sbss") == 0)
12893 return true;
12894 }
12895 else
12896 {
12897 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
12898
12899 if (size > 0
12900 && size <= g_switch_value
12901 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
12902 return true;
12903 }
12904
12905 return false;
12906 }
12907
12908 #endif /* USING_ELFOS_H */
12909
12910 \f
12911 /* Return a REG that occurs in ADDR with coefficient 1.
12912 ADDR can be effectively incremented by incrementing REG.
12913
12914 r0 is special and we must not select it as an address
12915 register by this routine since our caller will try to
12916 increment the returned register via an "la" instruction. */
12917
12918 struct rtx_def *
12919 find_addr_reg (addr)
12920 rtx addr;
12921 {
12922 while (GET_CODE (addr) == PLUS)
12923 {
12924 if (GET_CODE (XEXP (addr, 0)) == REG
12925 && REGNO (XEXP (addr, 0)) != 0)
12926 addr = XEXP (addr, 0);
12927 else if (GET_CODE (XEXP (addr, 1)) == REG
12928 && REGNO (XEXP (addr, 1)) != 0)
12929 addr = XEXP (addr, 1);
12930 else if (CONSTANT_P (XEXP (addr, 0)))
12931 addr = XEXP (addr, 1);
12932 else if (CONSTANT_P (XEXP (addr, 1)))
12933 addr = XEXP (addr, 0);
12934 else
12935 abort ();
12936 }
12937 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
12938 return addr;
12939 abort ();
12940 }
12941
12942 void
12943 rs6000_fatal_bad_address (op)
12944 rtx op;
12945 {
12946 fatal_insn ("bad address", op);
12947 }
12948
12949 #if TARGET_MACHO
12950
12951 #if 0
12952 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
12953 reference and a constant. */
12954
12955 int
12956 symbolic_operand (op)
12957 rtx op;
12958 {
12959 switch (GET_CODE (op))
12960 {
12961 case SYMBOL_REF:
12962 case LABEL_REF:
12963 return 1;
12964 case CONST:
12965 op = XEXP (op, 0);
12966 return (GET_CODE (op) == SYMBOL_REF ||
12967 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
12968 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
12969 && GET_CODE (XEXP (op, 1)) == CONST_INT);
12970 default:
12971 return 0;
12972 }
12973 }
12974 #endif
12975
12976 #ifdef RS6000_LONG_BRANCH
12977
12978 static tree stub_list = 0;
12979
12980 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
12981 procedure calls to the linked list. */
12982
12983 void
12984 add_compiler_stub (label_name, function_name, line_number)
12985 tree label_name;
12986 tree function_name;
12987 int line_number;
12988 {
12989 tree stub = build_tree_list (function_name, label_name);
12990 TREE_TYPE (stub) = build_int_2 (line_number, 0);
12991 TREE_CHAIN (stub) = stub_list;
12992 stub_list = stub;
12993 }
12994
12995 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
12996 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
12997 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
12998
12999 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
13000 handling procedure calls from the linked list and initializes the
13001 linked list. */
13002
13003 void
13004 output_compiler_stub ()
13005 {
13006 char tmp_buf[256];
13007 char label_buf[256];
13008 tree stub;
13009
13010 if (!flag_pic)
13011 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13012 {
13013 fprintf (asm_out_file,
13014 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
13015
13016 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13017 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13018 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
13019 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13020
13021 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
13022 strcpy (label_buf,
13023 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
13024 else
13025 {
13026 label_buf[0] = '_';
13027 strcpy (label_buf+1,
13028 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
13029 }
13030
13031 strcpy (tmp_buf, "lis r12,hi16(");
13032 strcat (tmp_buf, label_buf);
13033 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
13034 strcat (tmp_buf, label_buf);
13035 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
13036 output_asm_insn (tmp_buf, 0);
13037
13038 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13039 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13040 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
13041 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13042 }
13043
13044 stub_list = 0;
13045 }
13046
13047 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
13048 already there or not. */
13049
13050 int
13051 no_previous_def (function_name)
13052 tree function_name;
13053 {
13054 tree stub;
13055 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13056 if (function_name == STUB_FUNCTION_NAME (stub))
13057 return 0;
13058 return 1;
13059 }
13060
13061 /* GET_PREV_LABEL gets the label name from the previous definition of
13062 the function. */
13063
13064 tree
13065 get_prev_label (function_name)
13066 tree function_name;
13067 {
13068 tree stub;
13069 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13070 if (function_name == STUB_FUNCTION_NAME (stub))
13071 return STUB_LABEL_NAME (stub);
13072 return 0;
13073 }
13074
13075 /* INSN is either a function call or a millicode call. It may have an
13076 unconditional jump in its delay slot.
13077
13078 CALL_DEST is the routine we are calling. */
13079
13080 char *
13081 output_call (insn, call_dest, operand_number)
13082 rtx insn;
13083 rtx call_dest;
13084 int operand_number;
13085 {
13086 static char buf[256];
13087 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
13088 {
13089 tree labelname;
13090 tree funname = get_identifier (XSTR (call_dest, 0));
13091
13092 if (no_previous_def (funname))
13093 {
13094 int line_number = 0;
13095 rtx label_rtx = gen_label_rtx ();
13096 char *label_buf, temp_buf[256];
13097 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
13098 CODE_LABEL_NUMBER (label_rtx));
13099 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
13100 labelname = get_identifier (label_buf);
13101 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
13102 if (insn)
13103 line_number = NOTE_LINE_NUMBER (insn);
13104 add_compiler_stub (labelname, funname, line_number);
13105 }
13106 else
13107 labelname = get_prev_label (funname);
13108
13109 sprintf (buf, "jbsr %%z%d,%.246s",
13110 operand_number, IDENTIFIER_POINTER (labelname));
13111 return buf;
13112 }
13113 else
13114 {
13115 sprintf (buf, "bl %%z%d", operand_number);
13116 return buf;
13117 }
13118 }
13119
13120 #endif /* RS6000_LONG_BRANCH */
13121
13122 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
13123 do { \
13124 const char *const symbol_ = (SYMBOL); \
13125 char *buffer_ = (BUF); \
13126 if (symbol_[0] == '"') \
13127 { \
13128 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
13129 } \
13130 else if (name_needs_quotes(symbol_)) \
13131 { \
13132 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
13133 } \
13134 else \
13135 { \
13136 sprintf(buffer_, "L%d$%s", (N), symbol_); \
13137 } \
13138 } while (0)
13139
13140
13141 /* Generate PIC and indirect symbol stubs. */
13142
13143 void
13144 machopic_output_stub (file, symb, stub)
13145 FILE *file;
13146 const char *symb, *stub;
13147 {
13148 unsigned int length;
13149 char *symbol_name, *lazy_ptr_name;
13150 char *local_label_0;
13151 static int label = 0;
13152
13153 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
13154 symb = (*targetm.strip_name_encoding) (symb);
13155
13156 label += 1;
13157
13158 length = strlen (symb);
13159 symbol_name = alloca (length + 32);
13160 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
13161
13162 lazy_ptr_name = alloca (length + 32);
13163 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
13164
13165 local_label_0 = alloca (length + 32);
13166 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
13167
13168 if (flag_pic == 2)
13169 machopic_picsymbol_stub1_section ();
13170 else
13171 machopic_symbol_stub1_section ();
13172 fprintf (file, "\t.align 2\n");
13173
13174 fprintf (file, "%s:\n", stub);
13175 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13176
13177 if (flag_pic == 2)
13178 {
13179 fprintf (file, "\tmflr r0\n");
13180 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
13181 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
13182 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
13183 lazy_ptr_name, local_label_0);
13184 fprintf (file, "\tmtlr r0\n");
13185 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
13186 lazy_ptr_name, local_label_0);
13187 fprintf (file, "\tmtctr r12\n");
13188 fprintf (file, "\tbctr\n");
13189 }
13190 else
13191 {
13192 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
13193 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
13194 fprintf (file, "\tmtctr r12\n");
13195 fprintf (file, "\tbctr\n");
13196 }
13197
13198 machopic_lazy_symbol_ptr_section ();
13199 fprintf (file, "%s:\n", lazy_ptr_name);
13200 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13201 fprintf (file, "\t.long dyld_stub_binding_helper\n");
13202 }
13203
13204 /* Legitimize PIC addresses. If the address is already
13205 position-independent, we return ORIG. Newly generated
13206 position-independent addresses go into a reg. This is REG if non
13207 zero, otherwise we allocate register(s) as necessary. */
13208
13209 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13210
13211 rtx
13212 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
13213 rtx orig;
13214 enum machine_mode mode;
13215 rtx reg;
13216 {
13217 rtx base, offset;
13218
13219 if (reg == NULL && ! reload_in_progress && ! reload_completed)
13220 reg = gen_reg_rtx (Pmode);
13221
13222 if (GET_CODE (orig) == CONST)
13223 {
13224 if (GET_CODE (XEXP (orig, 0)) == PLUS
13225 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
13226 return orig;
13227
13228 if (GET_CODE (XEXP (orig, 0)) == PLUS)
13229 {
13230 base =
13231 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
13232 Pmode, reg);
13233 offset =
13234 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
13235 Pmode, reg);
13236 }
13237 else
13238 abort ();
13239
13240 if (GET_CODE (offset) == CONST_INT)
13241 {
13242 if (SMALL_INT (offset))
13243 return plus_constant (base, INTVAL (offset));
13244 else if (! reload_in_progress && ! reload_completed)
13245 offset = force_reg (Pmode, offset);
13246 else
13247 {
13248 rtx mem = force_const_mem (Pmode, orig);
13249 return machopic_legitimize_pic_address (mem, Pmode, reg);
13250 }
13251 }
13252 return gen_rtx (PLUS, Pmode, base, offset);
13253 }
13254
13255 /* Fall back on generic machopic code. */
13256 return machopic_legitimize_pic_address (orig, mode, reg);
13257 }
13258
13259 /* This is just a placeholder to make linking work without having to
13260 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13261 ever needed for Darwin (not too likely!) this would have to get a
13262 real definition. */
13263
13264 void
13265 toc_section ()
13266 {
13267 }
13268
13269 #endif /* TARGET_MACHO */
13270
13271 #if TARGET_ELF
13272 static unsigned int
13273 rs6000_elf_section_type_flags (decl, name, reloc)
13274 tree decl;
13275 const char *name;
13276 int reloc;
13277 {
13278 unsigned int flags
13279 = default_section_type_flags_1 (decl, name, reloc,
13280 flag_pic || DEFAULT_ABI == ABI_AIX);
13281
13282 if (TARGET_RELOCATABLE)
13283 flags |= SECTION_WRITE;
13284
13285 return flags;
13286 }
13287
13288 /* Record an element in the table of global constructors. SYMBOL is
13289 a SYMBOL_REF of the function to be called; PRIORITY is a number
13290 between 0 and MAX_INIT_PRIORITY.
13291
13292 This differs from default_named_section_asm_out_constructor in
13293 that we have special handling for -mrelocatable. */
13294
13295 static void
13296 rs6000_elf_asm_out_constructor (symbol, priority)
13297 rtx symbol;
13298 int priority;
13299 {
13300 const char *section = ".ctors";
13301 char buf[16];
13302
13303 if (priority != DEFAULT_INIT_PRIORITY)
13304 {
13305 sprintf (buf, ".ctors.%.5u",
13306 /* Invert the numbering so the linker puts us in the proper
13307 order; constructors are run from right to left, and the
13308 linker sorts in increasing order. */
13309 MAX_INIT_PRIORITY - priority);
13310 section = buf;
13311 }
13312
13313 named_section_flags (section, SECTION_WRITE);
13314 assemble_align (POINTER_SIZE);
13315
13316 if (TARGET_RELOCATABLE)
13317 {
13318 fputs ("\t.long (", asm_out_file);
13319 output_addr_const (asm_out_file, symbol);
13320 fputs (")@fixup\n", asm_out_file);
13321 }
13322 else
13323 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13324 }
13325
13326 static void
13327 rs6000_elf_asm_out_destructor (symbol, priority)
13328 rtx symbol;
13329 int priority;
13330 {
13331 const char *section = ".dtors";
13332 char buf[16];
13333
13334 if (priority != DEFAULT_INIT_PRIORITY)
13335 {
13336 sprintf (buf, ".dtors.%.5u",
13337 /* Invert the numbering so the linker puts us in the proper
13338 order; constructors are run from right to left, and the
13339 linker sorts in increasing order. */
13340 MAX_INIT_PRIORITY - priority);
13341 section = buf;
13342 }
13343
13344 named_section_flags (section, SECTION_WRITE);
13345 assemble_align (POINTER_SIZE);
13346
13347 if (TARGET_RELOCATABLE)
13348 {
13349 fputs ("\t.long (", asm_out_file);
13350 output_addr_const (asm_out_file, symbol);
13351 fputs (")@fixup\n", asm_out_file);
13352 }
13353 else
13354 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13355 }
13356 #endif
13357
13358 #if TARGET_XCOFF
13359 static void
13360 rs6000_xcoff_asm_globalize_label (stream, name)
13361 FILE *stream;
13362 const char *name;
13363 {
13364 fputs (GLOBAL_ASM_OP, stream);
13365 RS6000_OUTPUT_BASENAME (stream, name);
13366 putc ('\n', stream);
13367 }
13368
13369 static void
13370 rs6000_xcoff_asm_named_section (name, flags)
13371 const char *name;
13372 unsigned int flags;
13373 {
13374 int smclass;
13375 static const char * const suffix[3] = { "PR", "RO", "RW" };
13376
13377 if (flags & SECTION_CODE)
13378 smclass = 0;
13379 else if (flags & SECTION_WRITE)
13380 smclass = 2;
13381 else
13382 smclass = 1;
13383
13384 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
13385 (flags & SECTION_CODE) ? "." : "",
13386 name, suffix[smclass], flags & SECTION_ENTSIZE);
13387 }
13388
13389 static void
13390 rs6000_xcoff_select_section (decl, reloc, align)
13391 tree decl;
13392 int reloc;
13393 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13394 {
13395 if (decl_readonly_section_1 (decl, reloc, 1))
13396 {
13397 if (TREE_PUBLIC (decl))
13398 read_only_data_section ();
13399 else
13400 read_only_private_data_section ();
13401 }
13402 else
13403 {
13404 if (TREE_PUBLIC (decl))
13405 data_section ();
13406 else
13407 private_data_section ();
13408 }
13409 }
13410
13411 static void
13412 rs6000_xcoff_unique_section (decl, reloc)
13413 tree decl;
13414 int reloc ATTRIBUTE_UNUSED;
13415 {
13416 const char *name;
13417
13418 /* Use select_section for private and uninitialized data. */
13419 if (!TREE_PUBLIC (decl)
13420 || DECL_COMMON (decl)
13421 || DECL_INITIAL (decl) == NULL_TREE
13422 || DECL_INITIAL (decl) == error_mark_node
13423 || (flag_zero_initialized_in_bss
13424 && initializer_zerop (DECL_INITIAL (decl))))
13425 return;
13426
13427 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
13428 name = (*targetm.strip_name_encoding) (name);
13429 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
13430 }
13431
13432 /* Select section for constant in constant pool.
13433
13434 On RS/6000, all constants are in the private read-only data area.
13435 However, if this is being placed in the TOC it must be output as a
13436 toc entry. */
13437
13438 static void
13439 rs6000_xcoff_select_rtx_section (mode, x, align)
13440 enum machine_mode mode;
13441 rtx x;
13442 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13443 {
13444 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13445 toc_section ();
13446 else
13447 read_only_private_data_section ();
13448 }
13449
13450 /* Remove any trailing [DS] or the like from the symbol name. */
13451
13452 static const char *
13453 rs6000_xcoff_strip_name_encoding (name)
13454 const char *name;
13455 {
13456 size_t len;
13457 if (*name == '*')
13458 name++;
13459 len = strlen (name);
13460 if (name[len - 1] == ']')
13461 return ggc_alloc_string (name, len - 4);
13462 else
13463 return name;
13464 }
13465
13466 /* Section attributes. AIX is always PIC. */
13467
13468 static unsigned int
13469 rs6000_xcoff_section_type_flags (decl, name, reloc)
13470 tree decl;
13471 const char *name;
13472 int reloc;
13473 {
13474 unsigned int align;
13475 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
13476
13477 /* Align to at least UNIT size. */
13478 if (flags & SECTION_CODE)
13479 align = MIN_UNITS_PER_WORD;
13480 else
13481 /* Increase alignment of large objects if not already stricter. */
13482 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
13483 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
13484 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
13485
13486 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
13487 }
13488
13489 static void
13490 rs6000_xcoff_encode_section_info (decl, first)
13491 tree decl;
13492 int first ATTRIBUTE_UNUSED;
13493 {
13494 if (TREE_CODE (decl) == FUNCTION_DECL
13495 && (*targetm.binds_local_p) (decl))
13496 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
13497 }
13498 #endif /* TARGET_XCOFF */
13499
13500 #if TARGET_MACHO
13501 /* Cross-module name binding. Darwin does not support overriding
13502 functions at dynamic-link time. */
13503
13504 static bool
13505 rs6000_binds_local_p (decl)
13506 tree decl;
13507 {
13508 return default_binds_local_p_1 (decl, 0);
13509 }
13510 #endif
13511
13512 /* Compute a (partial) cost for rtx X. Return true if the complete
13513 cost has been computed, and false if subexpressions should be
13514 scanned. In either case, *TOTAL contains the cost result. */
13515
13516 static bool
13517 rs6000_rtx_costs (x, code, outer_code, total)
13518 rtx x;
13519 int code, outer_code ATTRIBUTE_UNUSED;
13520 int *total;
13521 {
13522 switch (code)
13523 {
13524 /* On the RS/6000, if it is valid in the insn, it is free.
13525 So this always returns 0. */
13526 case CONST_INT:
13527 case CONST:
13528 case LABEL_REF:
13529 case SYMBOL_REF:
13530 case CONST_DOUBLE:
13531 case HIGH:
13532 *total = 0;
13533 return true;
13534
13535 case PLUS:
13536 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
13537 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
13538 + 0x8000) >= 0x10000)
13539 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
13540 ? COSTS_N_INSNS (2)
13541 : COSTS_N_INSNS (1));
13542 return true;
13543
13544 case AND:
13545 case IOR:
13546 case XOR:
13547 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
13548 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
13549 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
13550 ? COSTS_N_INSNS (2)
13551 : COSTS_N_INSNS (1));
13552 return true;
13553
13554 case MULT:
13555 if (optimize_size)
13556 {
13557 *total = COSTS_N_INSNS (2);
13558 return true;
13559 }
13560 switch (rs6000_cpu)
13561 {
13562 case PROCESSOR_RIOS1:
13563 case PROCESSOR_PPC405:
13564 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13565 ? COSTS_N_INSNS (5)
13566 : (INTVAL (XEXP (x, 1)) >= -256
13567 && INTVAL (XEXP (x, 1)) <= 255)
13568 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13569 return true;
13570
13571 case PROCESSOR_RS64A:
13572 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13573 ? GET_MODE (XEXP (x, 1)) != DImode
13574 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
13575 : (INTVAL (XEXP (x, 1)) >= -256
13576 && INTVAL (XEXP (x, 1)) <= 255)
13577 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
13578 return true;
13579
13580 case PROCESSOR_RIOS2:
13581 case PROCESSOR_MPCCORE:
13582 case PROCESSOR_PPC604e:
13583 *total = COSTS_N_INSNS (2);
13584 return true;
13585
13586 case PROCESSOR_PPC601:
13587 *total = COSTS_N_INSNS (5);
13588 return true;
13589
13590 case PROCESSOR_PPC603:
13591 case PROCESSOR_PPC7400:
13592 case PROCESSOR_PPC750:
13593 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13594 ? COSTS_N_INSNS (5)
13595 : (INTVAL (XEXP (x, 1)) >= -256
13596 && INTVAL (XEXP (x, 1)) <= 255)
13597 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
13598 return true;
13599
13600 case PROCESSOR_PPC7450:
13601 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13602 ? COSTS_N_INSNS (4)
13603 : COSTS_N_INSNS (3));
13604 return true;
13605
13606 case PROCESSOR_PPC403:
13607 case PROCESSOR_PPC604:
13608 case PROCESSOR_PPC8540:
13609 *total = COSTS_N_INSNS (4);
13610 return true;
13611
13612 case PROCESSOR_PPC620:
13613 case PROCESSOR_PPC630:
13614 case PROCESSOR_POWER4:
13615 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13616 ? GET_MODE (XEXP (x, 1)) != DImode
13617 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
13618 : (INTVAL (XEXP (x, 1)) >= -256
13619 && INTVAL (XEXP (x, 1)) <= 255)
13620 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13621 return true;
13622
13623 default:
13624 abort ();
13625 }
13626
13627 case DIV:
13628 case MOD:
13629 if (GET_CODE (XEXP (x, 1)) == CONST_INT
13630 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
13631 {
13632 *total = COSTS_N_INSNS (2);
13633 return true;
13634 }
13635 /* FALLTHRU */
13636
13637 case UDIV:
13638 case UMOD:
13639 switch (rs6000_cpu)
13640 {
13641 case PROCESSOR_RIOS1:
13642 *total = COSTS_N_INSNS (19);
13643 return true;
13644
13645 case PROCESSOR_RIOS2:
13646 *total = COSTS_N_INSNS (13);
13647 return true;
13648
13649 case PROCESSOR_RS64A:
13650 *total = (GET_MODE (XEXP (x, 1)) != DImode
13651 ? COSTS_N_INSNS (65)
13652 : COSTS_N_INSNS (67));
13653 return true;
13654
13655 case PROCESSOR_MPCCORE:
13656 *total = COSTS_N_INSNS (6);
13657 return true;
13658
13659 case PROCESSOR_PPC403:
13660 *total = COSTS_N_INSNS (33);
13661 return true;
13662
13663 case PROCESSOR_PPC405:
13664 *total = COSTS_N_INSNS (35);
13665 return true;
13666
13667 case PROCESSOR_PPC601:
13668 *total = COSTS_N_INSNS (36);
13669 return true;
13670
13671 case PROCESSOR_PPC603:
13672 *total = COSTS_N_INSNS (37);
13673 return true;
13674
13675 case PROCESSOR_PPC604:
13676 case PROCESSOR_PPC604e:
13677 *total = COSTS_N_INSNS (20);
13678 return true;
13679
13680 case PROCESSOR_PPC620:
13681 case PROCESSOR_PPC630:
13682 case PROCESSOR_POWER4:
13683 *total = (GET_MODE (XEXP (x, 1)) != DImode
13684 ? COSTS_N_INSNS (21)
13685 : COSTS_N_INSNS (37));
13686 return true;
13687
13688 case PROCESSOR_PPC750:
13689 case PROCESSOR_PPC8540:
13690 case PROCESSOR_PPC7400:
13691 *total = COSTS_N_INSNS (19);
13692 return true;
13693
13694 case PROCESSOR_PPC7450:
13695 *total = COSTS_N_INSNS (23);
13696 return true;
13697
13698 default:
13699 abort ();
13700 }
13701
13702 case FFS:
13703 *total = COSTS_N_INSNS (4);
13704 return true;
13705
13706 case MEM:
13707 /* MEM should be slightly more expensive than (plus (reg) (const)) */
13708 *total = 5;
13709 return true;
13710
13711 default:
13712 return false;
13713 }
13714 }
13715
13716 /* A C expression returning the cost of moving data from a register of class
13717 CLASS1 to one of CLASS2. */
13718
13719 int
13720 rs6000_register_move_cost (mode, from, to)
13721 enum machine_mode mode;
13722 enum reg_class from, to;
13723 {
13724 /* Moves from/to GENERAL_REGS. */
13725 if (reg_classes_intersect_p (to, GENERAL_REGS)
13726 || reg_classes_intersect_p (from, GENERAL_REGS))
13727 {
13728 if (! reg_classes_intersect_p (to, GENERAL_REGS))
13729 from = to;
13730
13731 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
13732 return (rs6000_memory_move_cost (mode, from, 0)
13733 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
13734
13735 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
13736 else if (from == CR_REGS)
13737 return 4;
13738
13739 else
13740 /* A move will cost one instruction per GPR moved. */
13741 return 2 * HARD_REGNO_NREGS (0, mode);
13742 }
13743
13744 /* Moving between two similar registers is just one instruction. */
13745 else if (reg_classes_intersect_p (to, from))
13746 return mode == TFmode ? 4 : 2;
13747
13748 /* Everything else has to go through GENERAL_REGS. */
13749 else
13750 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
13751 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
13752 }
13753
13754 /* A C expressions returning the cost of moving data of MODE from a register to
13755 or from memory. */
13756
13757 int
13758 rs6000_memory_move_cost (mode, class, in)
13759 enum machine_mode mode;
13760 enum reg_class class;
13761 int in ATTRIBUTE_UNUSED;
13762 {
13763 if (reg_classes_intersect_p (class, GENERAL_REGS))
13764 return 4 * HARD_REGNO_NREGS (0, mode);
13765 else if (reg_classes_intersect_p (class, FLOAT_REGS))
13766 return 4 * HARD_REGNO_NREGS (32, mode);
13767 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
13768 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
13769 else
13770 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
13771 }
13772
13773 /* Return true if TYPE is of type __ev64_opaque__. */
13774
13775 static bool
13776 is_ev64_opaque_type (type)
13777 tree type;
13778 {
13779 return (TARGET_SPE
13780 && TREE_CODE (type) == VECTOR_TYPE
13781 && TYPE_NAME (type)
13782 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
13783 && DECL_NAME (TYPE_NAME (type))
13784 && strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))),
13785 "__ev64_opaque__") == 0);
13786 }
13787
13788 static rtx
13789 rs6000_dwarf_register_span (reg)
13790 rtx reg;
13791 {
13792 unsigned regno;
13793
13794 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
13795 return NULL_RTX;
13796
13797 regno = REGNO (reg);
13798
13799 /* The duality of the SPE register size wreaks all kinds of havoc.
13800 This is a way of distinguishing r0 in 32-bits from r0 in
13801 64-bits. */
13802 return
13803 gen_rtx_PARALLEL (VOIDmode,
13804 BYTES_BIG_ENDIAN
13805 ? gen_rtvec (2,
13806 gen_rtx_REG (SImode, regno + 1200),
13807 gen_rtx_REG (SImode, regno))
13808 : gen_rtvec (2,
13809 gen_rtx_REG (SImode, regno),
13810 gen_rtx_REG (SImode, regno + 1200)));
13811 }
13812
13813 #include "gt-rs6000.h"
This page took 0.656068 seconds and 6 git commands to generate.