]> gcc.gnu.org Git - gcc.git/blob - gcc/config/rs6000/rs6000.c
003-02-20 Aldy Hernandez <aldyh@redhat.com>
[gcc.git] / gcc / config / rs6000 / rs6000.c
1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
5
6 This file is part of GNU CC.
7
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "obstack.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "except.h"
41 #include "function.h"
42 #include "output.h"
43 #include "basic-block.h"
44 #include "integrate.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "hashtab.h"
48 #include "tm_p.h"
49 #include "target.h"
50 #include "target-def.h"
51 #include "langhooks.h"
52 #include "reload.h"
53
54 #ifndef TARGET_NO_PROTOTYPE
55 #define TARGET_NO_PROTOTYPE 0
56 #endif
57
58 #define min(A,B) ((A) < (B) ? (A) : (B))
59 #define max(A,B) ((A) > (B) ? (A) : (B))
60
61 /* Target cpu type */
62
63 enum processor_type rs6000_cpu;
64 struct rs6000_cpu_select rs6000_select[3] =
65 {
66 /* switch name, tune arch */
67 { (const char *)0, "--with-cpu=", 1, 1 },
68 { (const char *)0, "-mcpu=", 1, 1 },
69 { (const char *)0, "-mtune=", 1, 0 },
70 };
71
72 /* Size of long double */
73 const char *rs6000_long_double_size_string;
74 int rs6000_long_double_type_size;
75
76 /* Whether -mabi=altivec has appeared */
77 int rs6000_altivec_abi;
78
79 /* Whether VRSAVE instructions should be generated. */
80 int rs6000_altivec_vrsave;
81
82 /* String from -mvrsave= option. */
83 const char *rs6000_altivec_vrsave_string;
84
85 /* Nonzero if we want SPE ABI extensions. */
86 int rs6000_spe_abi;
87
88 /* Whether isel instructions should be generated. */
89 int rs6000_isel;
90
91 /* Nonzero if we have FPRs. */
92 int rs6000_fprs = 1;
93
94 /* String from -misel=. */
95 const char *rs6000_isel_string;
96
97 /* Set to nonzero once AIX common-mode calls have been defined. */
98 static GTY(()) int common_mode_defined;
99
100 /* Private copy of original value of flag_pic for ABI_AIX. */
101 static int rs6000_flag_pic;
102
103 /* Save information from a "cmpxx" operation until the branch or scc is
104 emitted. */
105 rtx rs6000_compare_op0, rs6000_compare_op1;
106 int rs6000_compare_fp_p;
107
108 /* Label number of label created for -mrelocatable, to call to so we can
109 get the address of the GOT section */
110 int rs6000_pic_labelno;
111
112 #ifdef USING_ELFOS_H
113 /* Which abi to adhere to */
114 const char *rs6000_abi_name = RS6000_ABI_NAME;
115
116 /* Semantics of the small data area */
117 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
118
119 /* Which small data model to use */
120 const char *rs6000_sdata_name = (char *)0;
121
122 /* Counter for labels which are to be placed in .fixup. */
123 int fixuplabelno = 0;
124 #endif
125
126 /* ABI enumeration available for subtarget to use. */
127 enum rs6000_abi rs6000_current_abi;
128
129 /* ABI string from -mabi= option. */
130 const char *rs6000_abi_string;
131
132 /* Debug flags */
133 const char *rs6000_debug_name;
134 int rs6000_debug_stack; /* debug stack applications */
135 int rs6000_debug_arg; /* debug argument handling */
136
137 const char *rs6000_traceback_name;
138 static enum {
139 traceback_default = 0,
140 traceback_none,
141 traceback_part,
142 traceback_full
143 } rs6000_traceback;
144
145 /* Flag to say the TOC is initialized */
146 int toc_initialized;
147 char toc_label_name[10];
148
149 /* Alias set for saves and restores from the rs6000 stack. */
150 static int rs6000_sr_alias_set;
151
152 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
153 The only place that looks at this is rs6000_set_default_type_attributes;
154 everywhere else should rely on the presence or absence of a longcall
155 attribute on the function declaration. */
156 int rs6000_default_long_calls;
157 const char *rs6000_longcall_switch;
158
159 struct builtin_description
160 {
161 /* mask is not const because we're going to alter it below. This
162 nonsense will go away when we rewrite the -march infrastructure
163 to give us more target flag bits. */
164 unsigned int mask;
165 const enum insn_code icode;
166 const char *const name;
167 const enum rs6000_builtins code;
168 };
169
170 static bool rs6000_function_ok_for_sibcall PARAMS ((tree, tree));
171 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
172 static void validate_condition_mode
173 PARAMS ((enum rtx_code, enum machine_mode));
174 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
175 static void rs6000_maybe_dead PARAMS ((rtx));
176 static void rs6000_emit_stack_tie PARAMS ((void));
177 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
178 static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
179 unsigned int, int, int));
180 static rtx gen_frame_mem_offset PARAMS ((enum machine_mode, rtx, int));
181 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
182 static unsigned rs6000_hash_constant PARAMS ((rtx));
183 static unsigned toc_hash_function PARAMS ((const void *));
184 static int toc_hash_eq PARAMS ((const void *, const void *));
185 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
186 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
187 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
188 #ifdef HAVE_GAS_HIDDEN
189 static void rs6000_assemble_visibility PARAMS ((tree, int));
190 #endif
191 static int rs6000_ra_ever_killed PARAMS ((void));
192 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
193 const struct attribute_spec rs6000_attribute_table[];
194 static void rs6000_set_default_type_attributes PARAMS ((tree));
195 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
196 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
197 static void rs6000_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
198 HOST_WIDE_INT, tree));
199 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
200 HOST_WIDE_INT, HOST_WIDE_INT));
201 #if TARGET_ELF
202 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
203 int));
204 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
205 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
206 static void rs6000_elf_select_section PARAMS ((tree, int,
207 unsigned HOST_WIDE_INT));
208 static void rs6000_elf_unique_section PARAMS ((tree, int));
209 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
210 unsigned HOST_WIDE_INT));
211 static void rs6000_elf_encode_section_info PARAMS ((tree, int))
212 ATTRIBUTE_UNUSED;
213 static const char *rs6000_elf_strip_name_encoding PARAMS ((const char *));
214 static bool rs6000_elf_in_small_data_p PARAMS ((tree));
215 #endif
216 #if TARGET_XCOFF
217 static void rs6000_xcoff_asm_globalize_label PARAMS ((FILE *, const char *));
218 static void rs6000_xcoff_asm_named_section PARAMS ((const char *, unsigned int));
219 static void rs6000_xcoff_select_section PARAMS ((tree, int,
220 unsigned HOST_WIDE_INT));
221 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
222 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
223 unsigned HOST_WIDE_INT));
224 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
225 static unsigned int rs6000_xcoff_section_type_flags PARAMS ((tree, const char *, int));
226 #endif
227 static void rs6000_xcoff_encode_section_info PARAMS ((tree, int))
228 ATTRIBUTE_UNUSED;
229 static bool rs6000_binds_local_p PARAMS ((tree));
230 static int rs6000_use_dfa_pipeline_interface PARAMS ((void));
231 static int rs6000_multipass_dfa_lookahead PARAMS ((void));
232 static int rs6000_variable_issue PARAMS ((FILE *, int, rtx, int));
233 static bool rs6000_rtx_costs PARAMS ((rtx, int, int, int *));
234 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
235 static int rs6000_adjust_priority PARAMS ((rtx, int));
236 static int rs6000_issue_rate PARAMS ((void));
237
238 static void rs6000_init_builtins PARAMS ((void));
239 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
240 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
241 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
242 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
243 static void altivec_init_builtins PARAMS ((void));
244 static void rs6000_common_init_builtins PARAMS ((void));
245
246 static void enable_mask_for_builtins PARAMS ((struct builtin_description *,
247 int, enum rs6000_builtins,
248 enum rs6000_builtins));
249 static void spe_init_builtins PARAMS ((void));
250 static rtx spe_expand_builtin PARAMS ((tree, rtx, bool *));
251 static rtx spe_expand_predicate_builtin PARAMS ((enum insn_code, tree, rtx));
252 static rtx spe_expand_evsel_builtin PARAMS ((enum insn_code, tree, rtx));
253 static int rs6000_emit_int_cmove PARAMS ((rtx, rtx, rtx, rtx));
254
255 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
256 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
257 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
258 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
259 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
260 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
261 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
262 static void rs6000_parse_abi_options PARAMS ((void));
263 static void rs6000_parse_vrsave_option PARAMS ((void));
264 static void rs6000_parse_isel_option PARAMS ((void));
265 static int first_altivec_reg_to_save PARAMS ((void));
266 static unsigned int compute_vrsave_mask PARAMS ((void));
267 static void is_altivec_return_reg PARAMS ((rtx, void *));
268 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
269 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
270 static int easy_vector_constant PARAMS ((rtx));
271 static bool is_ev64_opaque_type PARAMS ((tree));
272
273 /* Hash table stuff for keeping track of TOC entries. */
274
275 struct toc_hash_struct GTY(())
276 {
277 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
278 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
279 rtx key;
280 enum machine_mode key_mode;
281 int labelno;
282 };
283
284 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
285 \f
286 /* Default register names. */
287 char rs6000_reg_names[][8] =
288 {
289 "0", "1", "2", "3", "4", "5", "6", "7",
290 "8", "9", "10", "11", "12", "13", "14", "15",
291 "16", "17", "18", "19", "20", "21", "22", "23",
292 "24", "25", "26", "27", "28", "29", "30", "31",
293 "0", "1", "2", "3", "4", "5", "6", "7",
294 "8", "9", "10", "11", "12", "13", "14", "15",
295 "16", "17", "18", "19", "20", "21", "22", "23",
296 "24", "25", "26", "27", "28", "29", "30", "31",
297 "mq", "lr", "ctr","ap",
298 "0", "1", "2", "3", "4", "5", "6", "7",
299 "xer",
300 /* AltiVec registers. */
301 "0", "1", "2", "3", "4", "5", "6", "7",
302 "8", "9", "10", "11", "12", "13", "14", "15",
303 "16", "17", "18", "19", "20", "21", "22", "23",
304 "24", "25", "26", "27", "28", "29", "30", "31",
305 "vrsave", "vscr",
306 /* SPE registers. */
307 "spe_acc", "spefscr"
308 };
309
310 #ifdef TARGET_REGNAMES
311 static const char alt_reg_names[][8] =
312 {
313 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
314 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
315 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
316 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
317 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
318 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
319 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
320 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
321 "mq", "lr", "ctr", "ap",
322 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
323 "xer",
324 /* AltiVec registers. */
325 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
326 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
327 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
328 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
329 "vrsave", "vscr",
330 /* SPE registers. */
331 "spe_acc", "spefscr"
332 };
333 #endif
334 \f
335 #ifndef MASK_STRICT_ALIGN
336 #define MASK_STRICT_ALIGN 0
337 #endif
338
339 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
340 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
341 \f
342 /* Initialize the GCC target structure. */
343 #undef TARGET_ATTRIBUTE_TABLE
344 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
345 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
346 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
347
348 #undef TARGET_ASM_ALIGNED_DI_OP
349 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
350
351 /* Default unaligned ops are only provided for ELF. Find the ops needed
352 for non-ELF systems. */
353 #ifndef OBJECT_FORMAT_ELF
354 #if TARGET_XCOFF
355 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
356 64-bit targets. */
357 #undef TARGET_ASM_UNALIGNED_HI_OP
358 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
359 #undef TARGET_ASM_UNALIGNED_SI_OP
360 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
361 #undef TARGET_ASM_UNALIGNED_DI_OP
362 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
363 #else
364 /* For Darwin. */
365 #undef TARGET_ASM_UNALIGNED_HI_OP
366 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
367 #undef TARGET_ASM_UNALIGNED_SI_OP
368 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
369 #endif
370 #endif
371
372 /* This hook deals with fixups for relocatable code and DI-mode objects
373 in 64-bit code. */
374 #undef TARGET_ASM_INTEGER
375 #define TARGET_ASM_INTEGER rs6000_assemble_integer
376
377 #ifdef HAVE_GAS_HIDDEN
378 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
379 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
380 #endif
381
382 #undef TARGET_ASM_FUNCTION_PROLOGUE
383 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
384 #undef TARGET_ASM_FUNCTION_EPILOGUE
385 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
386
387 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
388 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
389 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
390 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_multipass_dfa_lookahead
391 #undef TARGET_SCHED_VARIABLE_ISSUE
392 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
393
394 #undef TARGET_SCHED_ISSUE_RATE
395 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
396 #undef TARGET_SCHED_ADJUST_COST
397 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
398 #undef TARGET_SCHED_ADJUST_PRIORITY
399 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
400
401 #undef TARGET_INIT_BUILTINS
402 #define TARGET_INIT_BUILTINS rs6000_init_builtins
403
404 #undef TARGET_EXPAND_BUILTIN
405 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
406
407 #undef TARGET_BINDS_LOCAL_P
408 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
409
410 #undef TARGET_ASM_OUTPUT_MI_THUNK
411 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
412
413 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
414 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
415
416 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
417 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
418
419 #undef TARGET_RTX_COSTS
420 #define TARGET_RTX_COSTS rs6000_rtx_costs
421 #undef TARGET_ADDRESS_COST
422 #define TARGET_ADDRESS_COST hook_int_rtx_0
423
424 #undef TARGET_VECTOR_OPAQUE_P
425 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
426
427 struct gcc_target targetm = TARGET_INITIALIZER;
428 \f
429 /* Override command line options. Mostly we process the processor
430 type and sometimes adjust other TARGET_ options. */
431
432 void
433 rs6000_override_options (default_cpu)
434 const char *default_cpu;
435 {
436 size_t i, j;
437 struct rs6000_cpu_select *ptr;
438
439 /* Simplify the entries below by making a mask for any POWER
440 variant and any PowerPC variant. */
441
442 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
443 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
444 | MASK_PPC_GFXOPT | MASK_POWERPC64)
445 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
446
447 static struct ptt
448 {
449 const char *const name; /* Canonical processor name. */
450 const enum processor_type processor; /* Processor type enum value. */
451 const int target_enable; /* Target flags to enable. */
452 const int target_disable; /* Target flags to disable. */
453 } const processor_target_table[]
454 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
455 POWER_MASKS | POWERPC_MASKS},
456 {"power", PROCESSOR_POWER,
457 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
458 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
459 {"power2", PROCESSOR_POWER,
460 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
461 POWERPC_MASKS | MASK_NEW_MNEMONICS},
462 {"power3", PROCESSOR_PPC630,
463 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
464 POWER_MASKS | MASK_PPC_GPOPT},
465 {"power4", PROCESSOR_POWER4,
466 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
467 POWER_MASKS | MASK_PPC_GPOPT},
468 {"powerpc", PROCESSOR_POWERPC,
469 MASK_POWERPC | MASK_NEW_MNEMONICS,
470 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
471 {"powerpc64", PROCESSOR_POWERPC64,
472 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
473 POWER_MASKS | POWERPC_OPT_MASKS},
474 {"rios", PROCESSOR_RIOS1,
475 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
476 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
477 {"rios1", PROCESSOR_RIOS1,
478 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
479 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
480 {"rsc", PROCESSOR_PPC601,
481 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
482 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
483 {"rsc1", PROCESSOR_PPC601,
484 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
485 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
486 {"rios2", PROCESSOR_RIOS2,
487 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
488 POWERPC_MASKS | MASK_NEW_MNEMONICS},
489 {"rs64a", PROCESSOR_RS64A,
490 MASK_POWERPC | MASK_NEW_MNEMONICS,
491 POWER_MASKS | POWERPC_OPT_MASKS},
492 {"401", PROCESSOR_PPC403,
493 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
494 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
495 {"403", PROCESSOR_PPC403,
496 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
497 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
498 {"405", PROCESSOR_PPC405,
499 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
500 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
501 {"405f", PROCESSOR_PPC405,
502 MASK_POWERPC | MASK_NEW_MNEMONICS,
503 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
504 {"505", PROCESSOR_MPCCORE,
505 MASK_POWERPC | MASK_NEW_MNEMONICS,
506 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
507 {"601", PROCESSOR_PPC601,
508 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
509 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
510 {"602", PROCESSOR_PPC603,
511 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
512 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
513 {"603", PROCESSOR_PPC603,
514 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
515 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
516 {"603e", PROCESSOR_PPC603,
517 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
518 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
519 {"ec603e", PROCESSOR_PPC603,
520 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
521 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
522 {"604", PROCESSOR_PPC604,
523 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
524 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
525 {"604e", PROCESSOR_PPC604e,
526 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
527 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
528 {"620", PROCESSOR_PPC620,
529 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
530 POWER_MASKS | MASK_PPC_GPOPT},
531 {"630", PROCESSOR_PPC630,
532 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
533 POWER_MASKS | MASK_PPC_GPOPT},
534 {"740", PROCESSOR_PPC750,
535 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
536 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
537 {"750", PROCESSOR_PPC750,
538 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
539 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
540 {"7400", PROCESSOR_PPC7400,
541 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
542 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
543 {"7450", PROCESSOR_PPC7450,
544 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
545 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
546 {"8540", PROCESSOR_PPC8540,
547 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
548 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
549 {"801", PROCESSOR_MPCCORE,
550 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
551 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
552 {"821", PROCESSOR_MPCCORE,
553 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
554 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
555 {"823", PROCESSOR_MPCCORE,
556 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
557 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
558 {"860", PROCESSOR_MPCCORE,
559 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
560 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
561
562 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
563
564 /* Save current -mmultiple/-mno-multiple status. */
565 int multiple = TARGET_MULTIPLE;
566 /* Save current -mstring/-mno-string status. */
567 int string = TARGET_STRING;
568
569 /* Identify the processor type. */
570 rs6000_select[0].string = default_cpu;
571 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
572
573 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
574 {
575 ptr = &rs6000_select[i];
576 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
577 {
578 for (j = 0; j < ptt_size; j++)
579 if (! strcmp (ptr->string, processor_target_table[j].name))
580 {
581 if (ptr->set_tune_p)
582 rs6000_cpu = processor_target_table[j].processor;
583
584 if (ptr->set_arch_p)
585 {
586 target_flags |= processor_target_table[j].target_enable;
587 target_flags &= ~processor_target_table[j].target_disable;
588 }
589 break;
590 }
591
592 if (j == ptt_size)
593 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
594 }
595 }
596
597 if (rs6000_cpu == PROCESSOR_PPC8540)
598 rs6000_isel = 1;
599
600 /* If we are optimizing big endian systems for space, use the load/store
601 multiple and string instructions. */
602 if (BYTES_BIG_ENDIAN && optimize_size)
603 target_flags |= MASK_MULTIPLE | MASK_STRING;
604
605 /* If -mmultiple or -mno-multiple was explicitly used, don't
606 override with the processor default */
607 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
608 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
609
610 /* If -mstring or -mno-string was explicitly used, don't override
611 with the processor default. */
612 if ((target_flags_explicit & MASK_STRING) != 0)
613 target_flags = (target_flags & ~MASK_STRING) | string;
614
615 /* Don't allow -mmultiple or -mstring on little endian systems
616 unless the cpu is a 750, because the hardware doesn't support the
617 instructions used in little endian mode, and causes an alignment
618 trap. The 750 does not cause an alignment trap (except when the
619 target is unaligned). */
620
621 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
622 {
623 if (TARGET_MULTIPLE)
624 {
625 target_flags &= ~MASK_MULTIPLE;
626 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
627 warning ("-mmultiple is not supported on little endian systems");
628 }
629
630 if (TARGET_STRING)
631 {
632 target_flags &= ~MASK_STRING;
633 if ((target_flags_explicit & MASK_STRING) != 0)
634 warning ("-mstring is not supported on little endian systems");
635 }
636 }
637
638 if (flag_pic != 0 && DEFAULT_ABI == ABI_AIX)
639 {
640 rs6000_flag_pic = flag_pic;
641 flag_pic = 0;
642 }
643
644 /* For Darwin, always silently make -fpic and -fPIC identical. */
645 if (flag_pic == 1 && DEFAULT_ABI == ABI_DARWIN)
646 flag_pic = 2;
647
648 /* Set debug flags */
649 if (rs6000_debug_name)
650 {
651 if (! strcmp (rs6000_debug_name, "all"))
652 rs6000_debug_stack = rs6000_debug_arg = 1;
653 else if (! strcmp (rs6000_debug_name, "stack"))
654 rs6000_debug_stack = 1;
655 else if (! strcmp (rs6000_debug_name, "arg"))
656 rs6000_debug_arg = 1;
657 else
658 error ("unknown -mdebug-%s switch", rs6000_debug_name);
659 }
660
661 if (rs6000_traceback_name)
662 {
663 if (! strncmp (rs6000_traceback_name, "full", 4))
664 rs6000_traceback = traceback_full;
665 else if (! strncmp (rs6000_traceback_name, "part", 4))
666 rs6000_traceback = traceback_part;
667 else if (! strncmp (rs6000_traceback_name, "no", 2))
668 rs6000_traceback = traceback_none;
669 else
670 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
671 rs6000_traceback_name);
672 }
673
674 /* Set size of long double */
675 rs6000_long_double_type_size = 64;
676 if (rs6000_long_double_size_string)
677 {
678 char *tail;
679 int size = strtol (rs6000_long_double_size_string, &tail, 10);
680 if (*tail != '\0' || (size != 64 && size != 128))
681 error ("Unknown switch -mlong-double-%s",
682 rs6000_long_double_size_string);
683 else
684 rs6000_long_double_type_size = size;
685 }
686
687 /* Handle -mabi= options. */
688 rs6000_parse_abi_options ();
689
690 /* Handle -mvrsave= option. */
691 rs6000_parse_vrsave_option ();
692
693 /* Handle -misel= option. */
694 rs6000_parse_isel_option ();
695
696 #ifdef SUBTARGET_OVERRIDE_OPTIONS
697 SUBTARGET_OVERRIDE_OPTIONS;
698 #endif
699 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
700 SUBSUBTARGET_OVERRIDE_OPTIONS;
701 #endif
702
703 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
704 using TARGET_OPTIONS to handle a toggle switch, but we're out of
705 bits in target_flags so TARGET_SWITCHES cannot be used.
706 Assumption here is that rs6000_longcall_switch points into the
707 text of the complete option, rather than being a copy, so we can
708 scan back for the presence or absence of the no- modifier. */
709 if (rs6000_longcall_switch)
710 {
711 const char *base = rs6000_longcall_switch;
712 while (base[-1] != 'm') base--;
713
714 if (*rs6000_longcall_switch != '\0')
715 error ("invalid option `%s'", base);
716 rs6000_default_long_calls = (base[0] != 'n');
717 }
718
719 #ifdef TARGET_REGNAMES
720 /* If the user desires alternate register names, copy in the
721 alternate names now. */
722 if (TARGET_REGNAMES)
723 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
724 #endif
725
726 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
727 If -maix-struct-return or -msvr4-struct-return was explicitly
728 used, don't override with the ABI default. */
729 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
730 {
731 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
732 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
733 else
734 target_flags |= MASK_AIX_STRUCT_RET;
735 }
736
737 if (TARGET_LONG_DOUBLE_128
738 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
739 real_format_for_mode[TFmode - QFmode] = &ibm_extended_format;
740
741 /* Allocate an alias set for register saves & restores from stack. */
742 rs6000_sr_alias_set = new_alias_set ();
743
744 if (TARGET_TOC)
745 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
746
747 /* We can only guarantee the availability of DI pseudo-ops when
748 assembling for 64-bit targets. */
749 if (!TARGET_64BIT)
750 {
751 targetm.asm_out.aligned_op.di = NULL;
752 targetm.asm_out.unaligned_op.di = NULL;
753 }
754
755 /* Set maximum branch target alignment at two instructions, eight bytes. */
756 align_jumps_max_skip = 8;
757 align_loops_max_skip = 8;
758
759 /* Arrange to save and restore machine status around nested functions. */
760 init_machine_status = rs6000_init_machine_status;
761 }
762
763 /* Handle -misel= option. */
764 static void
765 rs6000_parse_isel_option ()
766 {
767 if (rs6000_isel_string == 0)
768 return;
769 else if (! strcmp (rs6000_isel_string, "yes"))
770 rs6000_isel = 1;
771 else if (! strcmp (rs6000_isel_string, "no"))
772 rs6000_isel = 0;
773 else
774 error ("unknown -misel= option specified: '%s'",
775 rs6000_isel_string);
776 }
777
778 /* Handle -mvrsave= options. */
779 static void
780 rs6000_parse_vrsave_option ()
781 {
782 /* Generate VRSAVE instructions by default. */
783 if (rs6000_altivec_vrsave_string == 0
784 || ! strcmp (rs6000_altivec_vrsave_string, "yes"))
785 rs6000_altivec_vrsave = 1;
786 else if (! strcmp (rs6000_altivec_vrsave_string, "no"))
787 rs6000_altivec_vrsave = 0;
788 else
789 error ("unknown -mvrsave= option specified: '%s'",
790 rs6000_altivec_vrsave_string);
791 }
792
793 /* Handle -mabi= options. */
794 static void
795 rs6000_parse_abi_options ()
796 {
797 if (rs6000_abi_string == 0)
798 return;
799 else if (! strcmp (rs6000_abi_string, "altivec"))
800 rs6000_altivec_abi = 1;
801 else if (! strcmp (rs6000_abi_string, "no-altivec"))
802 rs6000_altivec_abi = 0;
803 else if (! strcmp (rs6000_abi_string, "spe"))
804 {
805 rs6000_spe_abi = 1;
806 if (!TARGET_SPE_ABI)
807 error ("not configured for ABI: '%s'", rs6000_abi_string);
808 }
809
810 else if (! strcmp (rs6000_abi_string, "no-spe"))
811 rs6000_spe_abi = 0;
812 else
813 error ("unknown ABI specified: '%s'", rs6000_abi_string);
814 }
815
816 void
817 optimization_options (level, size)
818 int level ATTRIBUTE_UNUSED;
819 int size ATTRIBUTE_UNUSED;
820 {
821 }
822 \f
823 /* Do anything needed at the start of the asm file. */
824
825 void
826 rs6000_file_start (file, default_cpu)
827 FILE *file;
828 const char *default_cpu;
829 {
830 size_t i;
831 char buffer[80];
832 const char *start = buffer;
833 struct rs6000_cpu_select *ptr;
834
835 if (flag_verbose_asm)
836 {
837 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
838 rs6000_select[0].string = default_cpu;
839
840 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
841 {
842 ptr = &rs6000_select[i];
843 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
844 {
845 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
846 start = "";
847 }
848 }
849
850 #ifdef USING_ELFOS_H
851 switch (rs6000_sdata)
852 {
853 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
854 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
855 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
856 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
857 }
858
859 if (rs6000_sdata && g_switch_value)
860 {
861 fprintf (file, "%s -G %d", start, g_switch_value);
862 start = "";
863 }
864 #endif
865
866 if (*start == '\0')
867 putc ('\n', file);
868 }
869 }
870 \f
871 /* Return nonzero if this function is known to have a null epilogue. */
872
873 int
874 direct_return ()
875 {
876 if (reload_completed)
877 {
878 rs6000_stack_t *info = rs6000_stack_info ();
879
880 if (info->first_gp_reg_save == 32
881 && info->first_fp_reg_save == 64
882 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
883 && ! info->lr_save_p
884 && ! info->cr_save_p
885 && info->vrsave_mask == 0
886 && ! info->push_p)
887 return 1;
888 }
889
890 return 0;
891 }
892
893 /* Returns 1 always. */
894
895 int
896 any_operand (op, mode)
897 rtx op ATTRIBUTE_UNUSED;
898 enum machine_mode mode ATTRIBUTE_UNUSED;
899 {
900 return 1;
901 }
902
903 /* Returns 1 if op is the count register. */
904 int
905 count_register_operand (op, mode)
906 rtx op;
907 enum machine_mode mode ATTRIBUTE_UNUSED;
908 {
909 if (GET_CODE (op) != REG)
910 return 0;
911
912 if (REGNO (op) == COUNT_REGISTER_REGNUM)
913 return 1;
914
915 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
916 return 1;
917
918 return 0;
919 }
920
921 /* Returns 1 if op is an altivec register. */
922 int
923 altivec_register_operand (op, mode)
924 rtx op;
925 enum machine_mode mode ATTRIBUTE_UNUSED;
926 {
927
928 return (register_operand (op, mode)
929 && (GET_CODE (op) != REG
930 || REGNO (op) > FIRST_PSEUDO_REGISTER
931 || ALTIVEC_REGNO_P (REGNO (op))));
932 }
933
934 int
935 xer_operand (op, mode)
936 rtx op;
937 enum machine_mode mode ATTRIBUTE_UNUSED;
938 {
939 if (GET_CODE (op) != REG)
940 return 0;
941
942 if (XER_REGNO_P (REGNO (op)))
943 return 1;
944
945 return 0;
946 }
947
948 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
949 by such constants completes more quickly. */
950
951 int
952 s8bit_cint_operand (op, mode)
953 rtx op;
954 enum machine_mode mode ATTRIBUTE_UNUSED;
955 {
956 return ( GET_CODE (op) == CONST_INT
957 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
958 }
959
960 /* Return 1 if OP is a constant that can fit in a D field. */
961
962 int
963 short_cint_operand (op, mode)
964 rtx op;
965 enum machine_mode mode ATTRIBUTE_UNUSED;
966 {
967 return (GET_CODE (op) == CONST_INT
968 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
969 }
970
971 /* Similar for an unsigned D field. */
972
973 int
974 u_short_cint_operand (op, mode)
975 rtx op;
976 enum machine_mode mode ATTRIBUTE_UNUSED;
977 {
978 return (GET_CODE (op) == CONST_INT
979 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
980 }
981
982 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
983
984 int
985 non_short_cint_operand (op, mode)
986 rtx op;
987 enum machine_mode mode ATTRIBUTE_UNUSED;
988 {
989 return (GET_CODE (op) == CONST_INT
990 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
991 }
992
993 /* Returns 1 if OP is a CONST_INT that is a positive value
994 and an exact power of 2. */
995
996 int
997 exact_log2_cint_operand (op, mode)
998 rtx op;
999 enum machine_mode mode ATTRIBUTE_UNUSED;
1000 {
1001 return (GET_CODE (op) == CONST_INT
1002 && INTVAL (op) > 0
1003 && exact_log2 (INTVAL (op)) >= 0);
1004 }
1005
1006 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1007 ctr, or lr). */
1008
1009 int
1010 gpc_reg_operand (op, mode)
1011 rtx op;
1012 enum machine_mode mode;
1013 {
1014 return (register_operand (op, mode)
1015 && (GET_CODE (op) != REG
1016 || (REGNO (op) >= ARG_POINTER_REGNUM
1017 && !XER_REGNO_P (REGNO (op)))
1018 || REGNO (op) < MQ_REGNO));
1019 }
1020
1021 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1022 CR field. */
1023
1024 int
1025 cc_reg_operand (op, mode)
1026 rtx op;
1027 enum machine_mode mode;
1028 {
1029 return (register_operand (op, mode)
1030 && (GET_CODE (op) != REG
1031 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1032 || CR_REGNO_P (REGNO (op))));
1033 }
1034
1035 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1036 CR field that isn't CR0. */
1037
1038 int
1039 cc_reg_not_cr0_operand (op, mode)
1040 rtx op;
1041 enum machine_mode mode;
1042 {
1043 return (register_operand (op, mode)
1044 && (GET_CODE (op) != REG
1045 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1046 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1047 }
1048
1049 /* Returns 1 if OP is either a constant integer valid for a D-field or
1050 a non-special register. If a register, it must be in the proper
1051 mode unless MODE is VOIDmode. */
1052
1053 int
1054 reg_or_short_operand (op, mode)
1055 rtx op;
1056 enum machine_mode mode;
1057 {
1058 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1059 }
1060
1061 /* Similar, except check if the negation of the constant would be
1062 valid for a D-field. */
1063
1064 int
1065 reg_or_neg_short_operand (op, mode)
1066 rtx op;
1067 enum machine_mode mode;
1068 {
1069 if (GET_CODE (op) == CONST_INT)
1070 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1071
1072 return gpc_reg_operand (op, mode);
1073 }
1074
1075 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1076 a non-special register. If a register, it must be in the proper
1077 mode unless MODE is VOIDmode. */
1078
1079 int
1080 reg_or_aligned_short_operand (op, mode)
1081 rtx op;
1082 enum machine_mode mode;
1083 {
1084 if (gpc_reg_operand (op, mode))
1085 return 1;
1086 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1087 return 1;
1088
1089 return 0;
1090 }
1091
1092
1093 /* Return 1 if the operand is either a register or an integer whose
1094 high-order 16 bits are zero. */
1095
1096 int
1097 reg_or_u_short_operand (op, mode)
1098 rtx op;
1099 enum machine_mode mode;
1100 {
1101 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1102 }
1103
1104 /* Return 1 is the operand is either a non-special register or ANY
1105 constant integer. */
1106
1107 int
1108 reg_or_cint_operand (op, mode)
1109 rtx op;
1110 enum machine_mode mode;
1111 {
1112 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1113 }
1114
1115 /* Return 1 is the operand is either a non-special register or ANY
1116 32-bit signed constant integer. */
1117
1118 int
1119 reg_or_arith_cint_operand (op, mode)
1120 rtx op;
1121 enum machine_mode mode;
1122 {
1123 return (gpc_reg_operand (op, mode)
1124 || (GET_CODE (op) == CONST_INT
1125 #if HOST_BITS_PER_WIDE_INT != 32
1126 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1127 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1128 #endif
1129 ));
1130 }
1131
1132 /* Return 1 is the operand is either a non-special register or a 32-bit
1133 signed constant integer valid for 64-bit addition. */
1134
1135 int
1136 reg_or_add_cint64_operand (op, mode)
1137 rtx op;
1138 enum machine_mode mode;
1139 {
1140 return (gpc_reg_operand (op, mode)
1141 || (GET_CODE (op) == CONST_INT
1142 #if HOST_BITS_PER_WIDE_INT == 32
1143 && INTVAL (op) < 0x7fff8000
1144 #else
1145 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1146 < 0x100000000ll)
1147 #endif
1148 ));
1149 }
1150
1151 /* Return 1 is the operand is either a non-special register or a 32-bit
1152 signed constant integer valid for 64-bit subtraction. */
1153
1154 int
1155 reg_or_sub_cint64_operand (op, mode)
1156 rtx op;
1157 enum machine_mode mode;
1158 {
1159 return (gpc_reg_operand (op, mode)
1160 || (GET_CODE (op) == CONST_INT
1161 #if HOST_BITS_PER_WIDE_INT == 32
1162 && (- INTVAL (op)) < 0x7fff8000
1163 #else
1164 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1165 < 0x100000000ll)
1166 #endif
1167 ));
1168 }
1169
1170 /* Return 1 is the operand is either a non-special register or ANY
1171 32-bit unsigned constant integer. */
1172
1173 int
1174 reg_or_logical_cint_operand (op, mode)
1175 rtx op;
1176 enum machine_mode mode;
1177 {
1178 if (GET_CODE (op) == CONST_INT)
1179 {
1180 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1181 {
1182 if (GET_MODE_BITSIZE (mode) <= 32)
1183 abort ();
1184
1185 if (INTVAL (op) < 0)
1186 return 0;
1187 }
1188
1189 return ((INTVAL (op) & GET_MODE_MASK (mode)
1190 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1191 }
1192 else if (GET_CODE (op) == CONST_DOUBLE)
1193 {
1194 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1195 || mode != DImode)
1196 abort ();
1197
1198 return CONST_DOUBLE_HIGH (op) == 0;
1199 }
1200 else
1201 return gpc_reg_operand (op, mode);
1202 }
1203
1204 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1205
1206 int
1207 got_operand (op, mode)
1208 rtx op;
1209 enum machine_mode mode ATTRIBUTE_UNUSED;
1210 {
1211 return (GET_CODE (op) == SYMBOL_REF
1212 || GET_CODE (op) == CONST
1213 || GET_CODE (op) == LABEL_REF);
1214 }
1215
1216 /* Return 1 if the operand is a simple references that can be loaded via
1217 the GOT (labels involving addition aren't allowed). */
1218
1219 int
1220 got_no_const_operand (op, mode)
1221 rtx op;
1222 enum machine_mode mode ATTRIBUTE_UNUSED;
1223 {
1224 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1225 }
1226
1227 /* Return the number of instructions it takes to form a constant in an
1228 integer register. */
1229
1230 static int
1231 num_insns_constant_wide (value)
1232 HOST_WIDE_INT value;
1233 {
1234 /* signed constant loadable with {cal|addi} */
1235 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1236 return 1;
1237
1238 /* constant loadable with {cau|addis} */
1239 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1240 return 1;
1241
1242 #if HOST_BITS_PER_WIDE_INT == 64
1243 else if (TARGET_POWERPC64)
1244 {
1245 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1246 HOST_WIDE_INT high = value >> 31;
1247
1248 if (high == 0 || high == -1)
1249 return 2;
1250
1251 high >>= 1;
1252
1253 if (low == 0)
1254 return num_insns_constant_wide (high) + 1;
1255 else
1256 return (num_insns_constant_wide (high)
1257 + num_insns_constant_wide (low) + 1);
1258 }
1259 #endif
1260
1261 else
1262 return 2;
1263 }
1264
1265 int
1266 num_insns_constant (op, mode)
1267 rtx op;
1268 enum machine_mode mode;
1269 {
1270 if (GET_CODE (op) == CONST_INT)
1271 {
1272 #if HOST_BITS_PER_WIDE_INT == 64
1273 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1274 && mask64_operand (op, mode))
1275 return 2;
1276 else
1277 #endif
1278 return num_insns_constant_wide (INTVAL (op));
1279 }
1280
1281 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1282 {
1283 long l;
1284 REAL_VALUE_TYPE rv;
1285
1286 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1287 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1288 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1289 }
1290
1291 else if (GET_CODE (op) == CONST_DOUBLE)
1292 {
1293 HOST_WIDE_INT low;
1294 HOST_WIDE_INT high;
1295 long l[2];
1296 REAL_VALUE_TYPE rv;
1297 int endian = (WORDS_BIG_ENDIAN == 0);
1298
1299 if (mode == VOIDmode || mode == DImode)
1300 {
1301 high = CONST_DOUBLE_HIGH (op);
1302 low = CONST_DOUBLE_LOW (op);
1303 }
1304 else
1305 {
1306 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1307 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1308 high = l[endian];
1309 low = l[1 - endian];
1310 }
1311
1312 if (TARGET_32BIT)
1313 return (num_insns_constant_wide (low)
1314 + num_insns_constant_wide (high));
1315
1316 else
1317 {
1318 if (high == 0 && low >= 0)
1319 return num_insns_constant_wide (low);
1320
1321 else if (high == -1 && low < 0)
1322 return num_insns_constant_wide (low);
1323
1324 else if (mask64_operand (op, mode))
1325 return 2;
1326
1327 else if (low == 0)
1328 return num_insns_constant_wide (high) + 1;
1329
1330 else
1331 return (num_insns_constant_wide (high)
1332 + num_insns_constant_wide (low) + 1);
1333 }
1334 }
1335
1336 else
1337 abort ();
1338 }
1339
1340 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1341 register with one instruction per word. We only do this if we can
1342 safely read CONST_DOUBLE_{LOW,HIGH}. */
1343
1344 int
1345 easy_fp_constant (op, mode)
1346 rtx op;
1347 enum machine_mode mode;
1348 {
1349 if (GET_CODE (op) != CONST_DOUBLE
1350 || GET_MODE (op) != mode
1351 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1352 return 0;
1353
1354 /* Consider all constants with -msoft-float to be easy. */
1355 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1356 && mode != DImode)
1357 return 1;
1358
1359 /* If we are using V.4 style PIC, consider all constants to be hard. */
1360 if (flag_pic && DEFAULT_ABI == ABI_V4)
1361 return 0;
1362
1363 #ifdef TARGET_RELOCATABLE
1364 /* Similarly if we are using -mrelocatable, consider all constants
1365 to be hard. */
1366 if (TARGET_RELOCATABLE)
1367 return 0;
1368 #endif
1369
1370 if (mode == TFmode)
1371 {
1372 long k[4];
1373 REAL_VALUE_TYPE rv;
1374
1375 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1376 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1377
1378 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1379 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1380 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1381 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1382 }
1383
1384 else if (mode == DFmode)
1385 {
1386 long k[2];
1387 REAL_VALUE_TYPE rv;
1388
1389 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1390 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1391
1392 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1393 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1394 }
1395
1396 else if (mode == SFmode)
1397 {
1398 long l;
1399 REAL_VALUE_TYPE rv;
1400
1401 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1402 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1403
1404 return num_insns_constant_wide (l) == 1;
1405 }
1406
1407 else if (mode == DImode)
1408 return ((TARGET_POWERPC64
1409 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1410 || (num_insns_constant (op, DImode) <= 2));
1411
1412 else if (mode == SImode)
1413 return 1;
1414 else
1415 abort ();
1416 }
1417
1418 /* Return 1 if the operand is a CONST_INT and can be put into a
1419 register with one instruction. */
1420
1421 static int
1422 easy_vector_constant (op)
1423 rtx op;
1424 {
1425 rtx elt;
1426 int units, i;
1427
1428 if (GET_CODE (op) != CONST_VECTOR)
1429 return 0;
1430
1431 units = CONST_VECTOR_NUNITS (op);
1432
1433 /* We can generate 0 easily. Look for that. */
1434 for (i = 0; i < units; ++i)
1435 {
1436 elt = CONST_VECTOR_ELT (op, i);
1437
1438 /* We could probably simplify this by just checking for equality
1439 with CONST0_RTX for the current mode, but let's be safe
1440 instead. */
1441
1442 switch (GET_CODE (elt))
1443 {
1444 case CONST_INT:
1445 if (INTVAL (elt) != 0)
1446 return 0;
1447 break;
1448 case CONST_DOUBLE:
1449 if (CONST_DOUBLE_LOW (elt) != 0 || CONST_DOUBLE_HIGH (elt) != 0)
1450 return 0;
1451 break;
1452 default:
1453 return 0;
1454 }
1455 }
1456
1457 /* We could probably generate a few other constants trivially, but
1458 gcc doesn't generate them yet. FIXME later. */
1459 return 1;
1460 }
1461
1462 /* Return 1 if the operand is the constant 0. This works for scalars
1463 as well as vectors. */
1464 int
1465 zero_constant (op, mode)
1466 rtx op;
1467 enum machine_mode mode;
1468 {
1469 return op == CONST0_RTX (mode);
1470 }
1471
1472 /* Return 1 if the operand is 0.0. */
1473 int
1474 zero_fp_constant (op, mode)
1475 rtx op;
1476 enum machine_mode mode;
1477 {
1478 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1479 }
1480
1481 /* Return 1 if the operand is in volatile memory. Note that during
1482 the RTL generation phase, memory_operand does not return TRUE for
1483 volatile memory references. So this function allows us to
1484 recognize volatile references where its safe. */
1485
1486 int
1487 volatile_mem_operand (op, mode)
1488 rtx op;
1489 enum machine_mode mode;
1490 {
1491 if (GET_CODE (op) != MEM)
1492 return 0;
1493
1494 if (!MEM_VOLATILE_P (op))
1495 return 0;
1496
1497 if (mode != GET_MODE (op))
1498 return 0;
1499
1500 if (reload_completed)
1501 return memory_operand (op, mode);
1502
1503 if (reload_in_progress)
1504 return strict_memory_address_p (mode, XEXP (op, 0));
1505
1506 return memory_address_p (mode, XEXP (op, 0));
1507 }
1508
1509 /* Return 1 if the operand is an offsettable memory operand. */
1510
1511 int
1512 offsettable_mem_operand (op, mode)
1513 rtx op;
1514 enum machine_mode mode;
1515 {
1516 return ((GET_CODE (op) == MEM)
1517 && offsettable_address_p (reload_completed || reload_in_progress,
1518 mode, XEXP (op, 0)));
1519 }
1520
1521 /* Return 1 if the operand is either an easy FP constant (see above) or
1522 memory. */
1523
1524 int
1525 mem_or_easy_const_operand (op, mode)
1526 rtx op;
1527 enum machine_mode mode;
1528 {
1529 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1530 }
1531
1532 /* Return 1 if the operand is either a non-special register or an item
1533 that can be used as the operand of a `mode' add insn. */
1534
1535 int
1536 add_operand (op, mode)
1537 rtx op;
1538 enum machine_mode mode;
1539 {
1540 if (GET_CODE (op) == CONST_INT)
1541 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1542 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1543
1544 return gpc_reg_operand (op, mode);
1545 }
1546
1547 /* Return 1 if OP is a constant but not a valid add_operand. */
1548
1549 int
1550 non_add_cint_operand (op, mode)
1551 rtx op;
1552 enum machine_mode mode ATTRIBUTE_UNUSED;
1553 {
1554 return (GET_CODE (op) == CONST_INT
1555 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1556 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1557 }
1558
1559 /* Return 1 if the operand is a non-special register or a constant that
1560 can be used as the operand of an OR or XOR insn on the RS/6000. */
1561
1562 int
1563 logical_operand (op, mode)
1564 rtx op;
1565 enum machine_mode mode;
1566 {
1567 HOST_WIDE_INT opl, oph;
1568
1569 if (gpc_reg_operand (op, mode))
1570 return 1;
1571
1572 if (GET_CODE (op) == CONST_INT)
1573 {
1574 opl = INTVAL (op) & GET_MODE_MASK (mode);
1575
1576 #if HOST_BITS_PER_WIDE_INT <= 32
1577 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1578 return 0;
1579 #endif
1580 }
1581 else if (GET_CODE (op) == CONST_DOUBLE)
1582 {
1583 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1584 abort ();
1585
1586 opl = CONST_DOUBLE_LOW (op);
1587 oph = CONST_DOUBLE_HIGH (op);
1588 if (oph != 0)
1589 return 0;
1590 }
1591 else
1592 return 0;
1593
1594 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1595 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1596 }
1597
1598 /* Return 1 if C is a constant that is not a logical operand (as
1599 above), but could be split into one. */
1600
1601 int
1602 non_logical_cint_operand (op, mode)
1603 rtx op;
1604 enum machine_mode mode;
1605 {
1606 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1607 && ! logical_operand (op, mode)
1608 && reg_or_logical_cint_operand (op, mode));
1609 }
1610
1611 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1612 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1613 Reject all ones and all zeros, since these should have been optimized
1614 away and confuse the making of MB and ME. */
1615
1616 int
1617 mask_operand (op, mode)
1618 rtx op;
1619 enum machine_mode mode ATTRIBUTE_UNUSED;
1620 {
1621 HOST_WIDE_INT c, lsb;
1622
1623 if (GET_CODE (op) != CONST_INT)
1624 return 0;
1625
1626 c = INTVAL (op);
1627
1628 /* Fail in 64-bit mode if the mask wraps around because the upper
1629 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1630 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1631 return 0;
1632
1633 /* We don't change the number of transitions by inverting,
1634 so make sure we start with the LS bit zero. */
1635 if (c & 1)
1636 c = ~c;
1637
1638 /* Reject all zeros or all ones. */
1639 if (c == 0)
1640 return 0;
1641
1642 /* Find the first transition. */
1643 lsb = c & -c;
1644
1645 /* Invert to look for a second transition. */
1646 c = ~c;
1647
1648 /* Erase first transition. */
1649 c &= -lsb;
1650
1651 /* Find the second transition (if any). */
1652 lsb = c & -c;
1653
1654 /* Match if all the bits above are 1's (or c is zero). */
1655 return c == -lsb;
1656 }
1657
1658 /* Return 1 for the PowerPC64 rlwinm corner case. */
1659
1660 int
1661 mask_operand_wrap (op, mode)
1662 rtx op;
1663 enum machine_mode mode ATTRIBUTE_UNUSED;
1664 {
1665 HOST_WIDE_INT c, lsb;
1666
1667 if (GET_CODE (op) != CONST_INT)
1668 return 0;
1669
1670 c = INTVAL (op);
1671
1672 if ((c & 0x80000001) != 0x80000001)
1673 return 0;
1674
1675 c = ~c;
1676 if (c == 0)
1677 return 0;
1678
1679 lsb = c & -c;
1680 c = ~c;
1681 c &= -lsb;
1682 lsb = c & -c;
1683 return c == -lsb;
1684 }
1685
1686 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1687 It is if there are no more than one 1->0 or 0->1 transitions.
1688 Reject all zeros, since zero should have been optimized away and
1689 confuses the making of MB and ME. */
1690
1691 int
1692 mask64_operand (op, mode)
1693 rtx op;
1694 enum machine_mode mode ATTRIBUTE_UNUSED;
1695 {
1696 if (GET_CODE (op) == CONST_INT)
1697 {
1698 HOST_WIDE_INT c, lsb;
1699
1700 c = INTVAL (op);
1701
1702 /* Reject all zeros. */
1703 if (c == 0)
1704 return 0;
1705
1706 /* We don't change the number of transitions by inverting,
1707 so make sure we start with the LS bit zero. */
1708 if (c & 1)
1709 c = ~c;
1710
1711 /* Find the transition, and check that all bits above are 1's. */
1712 lsb = c & -c;
1713
1714 /* Match if all the bits above are 1's (or c is zero). */
1715 return c == -lsb;
1716 }
1717 return 0;
1718 }
1719
1720 /* Like mask64_operand, but allow up to three transitions. This
1721 predicate is used by insn patterns that generate two rldicl or
1722 rldicr machine insns. */
1723
1724 int
1725 mask64_2_operand (op, mode)
1726 rtx op;
1727 enum machine_mode mode ATTRIBUTE_UNUSED;
1728 {
1729 if (GET_CODE (op) == CONST_INT)
1730 {
1731 HOST_WIDE_INT c, lsb;
1732
1733 c = INTVAL (op);
1734
1735 /* Disallow all zeros. */
1736 if (c == 0)
1737 return 0;
1738
1739 /* We don't change the number of transitions by inverting,
1740 so make sure we start with the LS bit zero. */
1741 if (c & 1)
1742 c = ~c;
1743
1744 /* Find the first transition. */
1745 lsb = c & -c;
1746
1747 /* Invert to look for a second transition. */
1748 c = ~c;
1749
1750 /* Erase first transition. */
1751 c &= -lsb;
1752
1753 /* Find the second transition. */
1754 lsb = c & -c;
1755
1756 /* Invert to look for a third transition. */
1757 c = ~c;
1758
1759 /* Erase second transition. */
1760 c &= -lsb;
1761
1762 /* Find the third transition (if any). */
1763 lsb = c & -c;
1764
1765 /* Match if all the bits above are 1's (or c is zero). */
1766 return c == -lsb;
1767 }
1768 return 0;
1769 }
1770
1771 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1772 implement ANDing by the mask IN. */
1773 void
1774 build_mask64_2_operands (in, out)
1775 rtx in;
1776 rtx *out;
1777 {
1778 #if HOST_BITS_PER_WIDE_INT >= 64
1779 unsigned HOST_WIDE_INT c, lsb, m1, m2;
1780 int shift;
1781
1782 if (GET_CODE (in) != CONST_INT)
1783 abort ();
1784
1785 c = INTVAL (in);
1786 if (c & 1)
1787 {
1788 /* Assume c initially something like 0x00fff000000fffff. The idea
1789 is to rotate the word so that the middle ^^^^^^ group of zeros
1790 is at the MS end and can be cleared with an rldicl mask. We then
1791 rotate back and clear off the MS ^^ group of zeros with a
1792 second rldicl. */
1793 c = ~c; /* c == 0xff000ffffff00000 */
1794 lsb = c & -c; /* lsb == 0x0000000000100000 */
1795 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
1796 c = ~c; /* c == 0x00fff000000fffff */
1797 c &= -lsb; /* c == 0x00fff00000000000 */
1798 lsb = c & -c; /* lsb == 0x0000100000000000 */
1799 c = ~c; /* c == 0xff000fffffffffff */
1800 c &= -lsb; /* c == 0xff00000000000000 */
1801 shift = 0;
1802 while ((lsb >>= 1) != 0)
1803 shift++; /* shift == 44 on exit from loop */
1804 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
1805 m1 = ~m1; /* m1 == 0x000000ffffffffff */
1806 m2 = ~c; /* m2 == 0x00ffffffffffffff */
1807 }
1808 else
1809 {
1810 /* Assume c initially something like 0xff000f0000000000. The idea
1811 is to rotate the word so that the ^^^ middle group of zeros
1812 is at the LS end and can be cleared with an rldicr mask. We then
1813 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1814 a second rldicr. */
1815 lsb = c & -c; /* lsb == 0x0000010000000000 */
1816 m2 = -lsb; /* m2 == 0xffffff0000000000 */
1817 c = ~c; /* c == 0x00fff0ffffffffff */
1818 c &= -lsb; /* c == 0x00fff00000000000 */
1819 lsb = c & -c; /* lsb == 0x0000100000000000 */
1820 c = ~c; /* c == 0xff000fffffffffff */
1821 c &= -lsb; /* c == 0xff00000000000000 */
1822 shift = 0;
1823 while ((lsb >>= 1) != 0)
1824 shift++; /* shift == 44 on exit from loop */
1825 m1 = ~c; /* m1 == 0x00ffffffffffffff */
1826 m1 >>= shift; /* m1 == 0x0000000000000fff */
1827 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
1828 }
1829
1830 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1831 masks will be all 1's. We are guaranteed more than one transition. */
1832 out[0] = GEN_INT (64 - shift);
1833 out[1] = GEN_INT (m1);
1834 out[2] = GEN_INT (shift);
1835 out[3] = GEN_INT (m2);
1836 #else
1837 (void)in;
1838 (void)out;
1839 abort ();
1840 #endif
1841 }
1842
1843 /* Return 1 if the operand is either a non-special register or a constant
1844 that can be used as the operand of a PowerPC64 logical AND insn. */
1845
1846 int
1847 and64_operand (op, mode)
1848 rtx op;
1849 enum machine_mode mode;
1850 {
1851 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1852 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1853
1854 return (logical_operand (op, mode) || mask64_operand (op, mode));
1855 }
1856
1857 /* Like the above, but also match constants that can be implemented
1858 with two rldicl or rldicr insns. */
1859
1860 int
1861 and64_2_operand (op, mode)
1862 rtx op;
1863 enum machine_mode mode;
1864 {
1865 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1866 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
1867
1868 return logical_operand (op, mode) || mask64_2_operand (op, mode);
1869 }
1870
1871 /* Return 1 if the operand is either a non-special register or a
1872 constant that can be used as the operand of an RS/6000 logical AND insn. */
1873
1874 int
1875 and_operand (op, mode)
1876 rtx op;
1877 enum machine_mode mode;
1878 {
1879 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1880 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1881
1882 return (logical_operand (op, mode) || mask_operand (op, mode));
1883 }
1884
1885 /* Return 1 if the operand is a general register or memory operand. */
1886
1887 int
1888 reg_or_mem_operand (op, mode)
1889 rtx op;
1890 enum machine_mode mode;
1891 {
1892 return (gpc_reg_operand (op, mode)
1893 || memory_operand (op, mode)
1894 || volatile_mem_operand (op, mode));
1895 }
1896
1897 /* Return 1 if the operand is a general register or memory operand without
1898 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1899 instruction. */
1900
1901 int
1902 lwa_operand (op, mode)
1903 rtx op;
1904 enum machine_mode mode;
1905 {
1906 rtx inner = op;
1907
1908 if (reload_completed && GET_CODE (inner) == SUBREG)
1909 inner = SUBREG_REG (inner);
1910
1911 return gpc_reg_operand (inner, mode)
1912 || (memory_operand (inner, mode)
1913 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1914 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1915 && (GET_CODE (XEXP (inner, 0)) != PLUS
1916 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1917 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1918 }
1919
1920 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
1921
1922 int
1923 symbol_ref_operand (op, mode)
1924 rtx op;
1925 enum machine_mode mode;
1926 {
1927 if (mode != VOIDmode && GET_MODE (op) != mode)
1928 return 0;
1929
1930 return (GET_CODE (op) == SYMBOL_REF);
1931 }
1932
1933 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1934 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
1935
1936 int
1937 call_operand (op, mode)
1938 rtx op;
1939 enum machine_mode mode;
1940 {
1941 if (mode != VOIDmode && GET_MODE (op) != mode)
1942 return 0;
1943
1944 return (GET_CODE (op) == SYMBOL_REF
1945 || (GET_CODE (op) == REG
1946 && (REGNO (op) == LINK_REGISTER_REGNUM
1947 || REGNO (op) == COUNT_REGISTER_REGNUM
1948 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1949 }
1950
1951 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1952 this file and the function is not weakly defined. */
1953
1954 int
1955 current_file_function_operand (op, mode)
1956 rtx op;
1957 enum machine_mode mode ATTRIBUTE_UNUSED;
1958 {
1959 return (GET_CODE (op) == SYMBOL_REF
1960 && (SYMBOL_REF_FLAG (op)
1961 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1962 && ! DECL_WEAK (current_function_decl))));
1963 }
1964
1965 /* Return 1 if this operand is a valid input for a move insn. */
1966
1967 int
1968 input_operand (op, mode)
1969 rtx op;
1970 enum machine_mode mode;
1971 {
1972 /* Memory is always valid. */
1973 if (memory_operand (op, mode))
1974 return 1;
1975
1976 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1977 if (GET_CODE (op) == CONSTANT_P_RTX)
1978 return 1;
1979
1980 /* For floating-point, easy constants are valid. */
1981 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1982 && CONSTANT_P (op)
1983 && easy_fp_constant (op, mode))
1984 return 1;
1985
1986 /* Allow any integer constant. */
1987 if (GET_MODE_CLASS (mode) == MODE_INT
1988 && (GET_CODE (op) == CONST_INT
1989 || GET_CODE (op) == CONST_DOUBLE))
1990 return 1;
1991
1992 /* For floating-point or multi-word mode, the only remaining valid type
1993 is a register. */
1994 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1995 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1996 return register_operand (op, mode);
1997
1998 /* The only cases left are integral modes one word or smaller (we
1999 do not get called for MODE_CC values). These can be in any
2000 register. */
2001 if (register_operand (op, mode))
2002 return 1;
2003
2004 /* A SYMBOL_REF referring to the TOC is valid. */
2005 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
2006 return 1;
2007
2008 /* A constant pool expression (relative to the TOC) is valid */
2009 if (TOC_RELATIVE_EXPR_P (op))
2010 return 1;
2011
2012 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2013 to be valid. */
2014 if (DEFAULT_ABI == ABI_V4
2015 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2016 && small_data_operand (op, Pmode))
2017 return 1;
2018
2019 return 0;
2020 }
2021
2022 /* Return 1 for an operand in small memory on V.4/eabi. */
2023
2024 int
2025 small_data_operand (op, mode)
2026 rtx op ATTRIBUTE_UNUSED;
2027 enum machine_mode mode ATTRIBUTE_UNUSED;
2028 {
2029 #if TARGET_ELF
2030 rtx sym_ref;
2031
2032 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2033 return 0;
2034
2035 if (DEFAULT_ABI != ABI_V4)
2036 return 0;
2037
2038 if (GET_CODE (op) == SYMBOL_REF)
2039 sym_ref = op;
2040
2041 else if (GET_CODE (op) != CONST
2042 || GET_CODE (XEXP (op, 0)) != PLUS
2043 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2044 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2045 return 0;
2046
2047 else
2048 {
2049 rtx sum = XEXP (op, 0);
2050 HOST_WIDE_INT summand;
2051
2052 /* We have to be careful here, because it is the referenced address
2053 that must be 32k from _SDA_BASE_, not just the symbol. */
2054 summand = INTVAL (XEXP (sum, 1));
2055 if (summand < 0 || summand > g_switch_value)
2056 return 0;
2057
2058 sym_ref = XEXP (sum, 0);
2059 }
2060
2061 if (*XSTR (sym_ref, 0) != '@')
2062 return 0;
2063
2064 return 1;
2065
2066 #else
2067 return 0;
2068 #endif
2069 }
2070 \f
2071 static int
2072 constant_pool_expr_1 (op, have_sym, have_toc)
2073 rtx op;
2074 int *have_sym;
2075 int *have_toc;
2076 {
2077 switch (GET_CODE(op))
2078 {
2079 case SYMBOL_REF:
2080 if (CONSTANT_POOL_ADDRESS_P (op))
2081 {
2082 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2083 {
2084 *have_sym = 1;
2085 return 1;
2086 }
2087 else
2088 return 0;
2089 }
2090 else if (! strcmp (XSTR (op, 0), toc_label_name))
2091 {
2092 *have_toc = 1;
2093 return 1;
2094 }
2095 else
2096 return 0;
2097 case PLUS:
2098 case MINUS:
2099 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2100 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2101 case CONST:
2102 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2103 case CONST_INT:
2104 return 1;
2105 default:
2106 return 0;
2107 }
2108 }
2109
2110 int
2111 constant_pool_expr_p (op)
2112 rtx op;
2113 {
2114 int have_sym = 0;
2115 int have_toc = 0;
2116 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2117 }
2118
2119 int
2120 toc_relative_expr_p (op)
2121 rtx op;
2122 {
2123 int have_sym = 0;
2124 int have_toc = 0;
2125 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2126 }
2127
2128 /* Try machine-dependent ways of modifying an illegitimate address
2129 to be legitimate. If we find one, return the new, valid address.
2130 This is used from only one place: `memory_address' in explow.c.
2131
2132 OLDX is the address as it was before break_out_memory_refs was
2133 called. In some cases it is useful to look at this to decide what
2134 needs to be done.
2135
2136 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2137
2138 It is always safe for this function to do nothing. It exists to
2139 recognize opportunities to optimize the output.
2140
2141 On RS/6000, first check for the sum of a register with a constant
2142 integer that is out of range. If so, generate code to add the
2143 constant with the low-order 16 bits masked to the register and force
2144 this result into another register (this can be done with `cau').
2145 Then generate an address of REG+(CONST&0xffff), allowing for the
2146 possibility of bit 16 being a one.
2147
2148 Then check for the sum of a register and something not constant, try to
2149 load the other things into a register and return the sum. */
2150 rtx
2151 rs6000_legitimize_address (x, oldx, mode)
2152 rtx x;
2153 rtx oldx ATTRIBUTE_UNUSED;
2154 enum machine_mode mode;
2155 {
2156 if (GET_CODE (x) == PLUS
2157 && GET_CODE (XEXP (x, 0)) == REG
2158 && GET_CODE (XEXP (x, 1)) == CONST_INT
2159 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2160 {
2161 HOST_WIDE_INT high_int, low_int;
2162 rtx sum;
2163 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2164 high_int = INTVAL (XEXP (x, 1)) - low_int;
2165 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2166 GEN_INT (high_int)), 0);
2167 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2168 }
2169 else if (GET_CODE (x) == PLUS
2170 && GET_CODE (XEXP (x, 0)) == REG
2171 && GET_CODE (XEXP (x, 1)) != CONST_INT
2172 && GET_MODE_NUNITS (mode) == 1
2173 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2174 || TARGET_POWERPC64
2175 || (mode != DFmode && mode != TFmode))
2176 && (TARGET_POWERPC64 || mode != DImode)
2177 && mode != TImode)
2178 {
2179 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2180 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2181 }
2182 else if (ALTIVEC_VECTOR_MODE (mode))
2183 {
2184 rtx reg;
2185
2186 /* Make sure both operands are registers. */
2187 if (GET_CODE (x) == PLUS)
2188 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2189 force_reg (Pmode, XEXP (x, 1)));
2190
2191 reg = force_reg (Pmode, x);
2192 return reg;
2193 }
2194 else if (SPE_VECTOR_MODE (mode))
2195 {
2196 /* We accept [reg + reg] and [reg + OFFSET]. */
2197
2198 if (GET_CODE (x) == PLUS)
2199 {
2200 rtx op1 = XEXP (x, 0);
2201 rtx op2 = XEXP (x, 1);
2202
2203 op1 = force_reg (Pmode, op1);
2204
2205 if (GET_CODE (op2) != REG
2206 && (GET_CODE (op2) != CONST_INT
2207 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2208 op2 = force_reg (Pmode, op2);
2209
2210 return gen_rtx_PLUS (Pmode, op1, op2);
2211 }
2212
2213 return force_reg (Pmode, x);
2214 }
2215 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
2216 && GET_CODE (x) != CONST_INT
2217 && GET_CODE (x) != CONST_DOUBLE
2218 && CONSTANT_P (x)
2219 && GET_MODE_NUNITS (mode) == 1
2220 && (GET_MODE_BITSIZE (mode) <= 32
2221 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2222 {
2223 rtx reg = gen_reg_rtx (Pmode);
2224 emit_insn (gen_elf_high (reg, (x)));
2225 return gen_rtx_LO_SUM (Pmode, reg, (x));
2226 }
2227 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2228 && ! flag_pic
2229 && GET_CODE (x) != CONST_INT
2230 && GET_CODE (x) != CONST_DOUBLE
2231 && CONSTANT_P (x)
2232 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2233 && mode != DImode
2234 && mode != TImode)
2235 {
2236 rtx reg = gen_reg_rtx (Pmode);
2237 emit_insn (gen_macho_high (reg, (x)));
2238 return gen_rtx_LO_SUM (Pmode, reg, (x));
2239 }
2240 else if (TARGET_TOC
2241 && CONSTANT_POOL_EXPR_P (x)
2242 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2243 {
2244 return create_TOC_reference (x);
2245 }
2246 else
2247 return NULL_RTX;
2248 }
2249
2250 /* The convention appears to be to define this wherever it is used.
2251 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2252 is now used here. */
2253 #ifndef REG_MODE_OK_FOR_BASE_P
2254 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2255 #endif
2256
2257 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2258 replace the input X, or the original X if no replacement is called for.
2259 The output parameter *WIN is 1 if the calling macro should goto WIN,
2260 0 if it should not.
2261
2262 For RS/6000, we wish to handle large displacements off a base
2263 register by splitting the addend across an addiu/addis and the mem insn.
2264 This cuts number of extra insns needed from 3 to 1.
2265
2266 On Darwin, we use this to generate code for floating point constants.
2267 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2268 The Darwin code is inside #if TARGET_MACHO because only then is
2269 machopic_function_base_name() defined. */
2270 rtx
2271 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
2272 rtx x;
2273 enum machine_mode mode;
2274 int opnum;
2275 int type;
2276 int ind_levels ATTRIBUTE_UNUSED;
2277 int *win;
2278 {
2279 /* We must recognize output that we have already generated ourselves. */
2280 if (GET_CODE (x) == PLUS
2281 && GET_CODE (XEXP (x, 0)) == PLUS
2282 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2283 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2284 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2285 {
2286 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2287 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2288 opnum, (enum reload_type)type);
2289 *win = 1;
2290 return x;
2291 }
2292
2293 #if TARGET_MACHO
2294 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2295 && GET_CODE (x) == LO_SUM
2296 && GET_CODE (XEXP (x, 0)) == PLUS
2297 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2298 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2299 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2300 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2301 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2302 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2303 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2304 {
2305 /* Result of previous invocation of this function on Darwin
2306 floating point constant. */
2307 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2308 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2309 opnum, (enum reload_type)type);
2310 *win = 1;
2311 return x;
2312 }
2313 #endif
2314 if (GET_CODE (x) == PLUS
2315 && GET_CODE (XEXP (x, 0)) == REG
2316 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2317 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2318 && GET_CODE (XEXP (x, 1)) == CONST_INT
2319 && !SPE_VECTOR_MODE (mode)
2320 && !ALTIVEC_VECTOR_MODE (mode))
2321 {
2322 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2323 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2324 HOST_WIDE_INT high
2325 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2326
2327 /* Check for 32-bit overflow. */
2328 if (high + low != val)
2329 {
2330 *win = 0;
2331 return x;
2332 }
2333
2334 /* Reload the high part into a base reg; leave the low part
2335 in the mem directly. */
2336
2337 x = gen_rtx_PLUS (GET_MODE (x),
2338 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2339 GEN_INT (high)),
2340 GEN_INT (low));
2341
2342 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2343 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2344 opnum, (enum reload_type)type);
2345 *win = 1;
2346 return x;
2347 }
2348 #if TARGET_MACHO
2349 if (GET_CODE (x) == SYMBOL_REF
2350 && DEFAULT_ABI == ABI_DARWIN
2351 && !ALTIVEC_VECTOR_MODE (mode)
2352 && flag_pic)
2353 {
2354 /* Darwin load of floating point constant. */
2355 rtx offset = gen_rtx (CONST, Pmode,
2356 gen_rtx (MINUS, Pmode, x,
2357 gen_rtx (SYMBOL_REF, Pmode,
2358 machopic_function_base_name ())));
2359 x = gen_rtx (LO_SUM, GET_MODE (x),
2360 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2361 gen_rtx (HIGH, Pmode, offset)), offset);
2362 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2363 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2364 opnum, (enum reload_type)type);
2365 *win = 1;
2366 return x;
2367 }
2368 #endif
2369 if (TARGET_TOC
2370 && CONSTANT_POOL_EXPR_P (x)
2371 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2372 {
2373 (x) = create_TOC_reference (x);
2374 *win = 1;
2375 return x;
2376 }
2377 *win = 0;
2378 return x;
2379 }
2380
2381 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2382 that is a valid memory address for an instruction.
2383 The MODE argument is the machine mode for the MEM expression
2384 that wants to use this address.
2385
2386 On the RS/6000, there are four valid address: a SYMBOL_REF that
2387 refers to a constant pool entry of an address (or the sum of it
2388 plus a constant), a short (16-bit signed) constant plus a register,
2389 the sum of two registers, or a register indirect, possibly with an
2390 auto-increment. For DFmode and DImode with a constant plus register,
2391 we must ensure that both words are addressable or PowerPC64 with offset
2392 word aligned.
2393
2394 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2395 32-bit DImode, TImode), indexed addressing cannot be used because
2396 adjacent memory cells are accessed by adding word-sized offsets
2397 during assembly output. */
2398 int
2399 rs6000_legitimate_address (mode, x, reg_ok_strict)
2400 enum machine_mode mode;
2401 rtx x;
2402 int reg_ok_strict;
2403 {
2404 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2405 return 1;
2406 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2407 && !ALTIVEC_VECTOR_MODE (mode)
2408 && !SPE_VECTOR_MODE (mode)
2409 && TARGET_UPDATE
2410 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2411 return 1;
2412 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2413 return 1;
2414 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2415 return 1;
2416 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2417 if (! reg_ok_strict
2418 && GET_CODE (x) == PLUS
2419 && GET_CODE (XEXP (x, 0)) == REG
2420 && XEXP (x, 0) == virtual_stack_vars_rtx
2421 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2422 return 1;
2423 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2424 return 1;
2425 if (mode != TImode
2426 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2427 || TARGET_POWERPC64
2428 || (mode != DFmode && mode != TFmode))
2429 && (TARGET_POWERPC64 || mode != DImode)
2430 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2431 return 1;
2432 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2433 return 1;
2434 return 0;
2435 }
2436 \f
2437 /* Try to output insns to set TARGET equal to the constant C if it can
2438 be done in less than N insns. Do all computations in MODE.
2439 Returns the place where the output has been placed if it can be
2440 done and the insns have been emitted. If it would take more than N
2441 insns, zero is returned and no insns and emitted. */
2442
2443 rtx
2444 rs6000_emit_set_const (dest, mode, source, n)
2445 rtx dest, source;
2446 enum machine_mode mode;
2447 int n ATTRIBUTE_UNUSED;
2448 {
2449 rtx result, insn, set;
2450 HOST_WIDE_INT c0, c1;
2451
2452 if (mode == QImode || mode == HImode)
2453 {
2454 if (dest == NULL)
2455 dest = gen_reg_rtx (mode);
2456 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2457 return dest;
2458 }
2459 else if (mode == SImode)
2460 {
2461 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
2462
2463 emit_insn (gen_rtx_SET (VOIDmode, result,
2464 GEN_INT (INTVAL (source)
2465 & (~ (HOST_WIDE_INT) 0xffff))));
2466 emit_insn (gen_rtx_SET (VOIDmode, dest,
2467 gen_rtx_IOR (SImode, result,
2468 GEN_INT (INTVAL (source) & 0xffff))));
2469 result = dest;
2470 }
2471 else if (mode == DImode)
2472 {
2473 if (GET_CODE (source) == CONST_INT)
2474 {
2475 c0 = INTVAL (source);
2476 c1 = -(c0 < 0);
2477 }
2478 else if (GET_CODE (source) == CONST_DOUBLE)
2479 {
2480 #if HOST_BITS_PER_WIDE_INT >= 64
2481 c0 = CONST_DOUBLE_LOW (source);
2482 c1 = -(c0 < 0);
2483 #else
2484 c0 = CONST_DOUBLE_LOW (source);
2485 c1 = CONST_DOUBLE_HIGH (source);
2486 #endif
2487 }
2488 else
2489 abort ();
2490
2491 result = rs6000_emit_set_long_const (dest, c0, c1);
2492 }
2493 else
2494 abort ();
2495
2496 insn = get_last_insn ();
2497 set = single_set (insn);
2498 if (! CONSTANT_P (SET_SRC (set)))
2499 set_unique_reg_note (insn, REG_EQUAL, source);
2500
2501 return result;
2502 }
2503
2504 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2505 fall back to a straight forward decomposition. We do this to avoid
2506 exponential run times encountered when looking for longer sequences
2507 with rs6000_emit_set_const. */
2508 static rtx
2509 rs6000_emit_set_long_const (dest, c1, c2)
2510 rtx dest;
2511 HOST_WIDE_INT c1, c2;
2512 {
2513 if (!TARGET_POWERPC64)
2514 {
2515 rtx operand1, operand2;
2516
2517 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2518 DImode);
2519 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2520 DImode);
2521 emit_move_insn (operand1, GEN_INT (c1));
2522 emit_move_insn (operand2, GEN_INT (c2));
2523 }
2524 else
2525 {
2526 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2527
2528 ud1 = c1 & 0xffff;
2529 ud2 = (c1 & 0xffff0000) >> 16;
2530 #if HOST_BITS_PER_WIDE_INT >= 64
2531 c2 = c1 >> 32;
2532 #endif
2533 ud3 = c2 & 0xffff;
2534 ud4 = (c2 & 0xffff0000) >> 16;
2535
2536 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2537 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2538 {
2539 if (ud1 & 0x8000)
2540 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2541 else
2542 emit_move_insn (dest, GEN_INT (ud1));
2543 }
2544
2545 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2546 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2547 {
2548 if (ud2 & 0x8000)
2549 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2550 - 0x80000000));
2551 else
2552 emit_move_insn (dest, GEN_INT (ud2 << 16));
2553 if (ud1 != 0)
2554 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2555 }
2556 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2557 || (ud4 == 0 && ! (ud3 & 0x8000)))
2558 {
2559 if (ud3 & 0x8000)
2560 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2561 - 0x80000000));
2562 else
2563 emit_move_insn (dest, GEN_INT (ud3 << 16));
2564
2565 if (ud2 != 0)
2566 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2567 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2568 if (ud1 != 0)
2569 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2570 }
2571 else
2572 {
2573 if (ud4 & 0x8000)
2574 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2575 - 0x80000000));
2576 else
2577 emit_move_insn (dest, GEN_INT (ud4 << 16));
2578
2579 if (ud3 != 0)
2580 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2581
2582 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2583 if (ud2 != 0)
2584 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2585 GEN_INT (ud2 << 16)));
2586 if (ud1 != 0)
2587 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2588 }
2589 }
2590 return dest;
2591 }
2592
2593 /* Emit a move from SOURCE to DEST in mode MODE. */
2594 void
2595 rs6000_emit_move (dest, source, mode)
2596 rtx dest;
2597 rtx source;
2598 enum machine_mode mode;
2599 {
2600 rtx operands[2];
2601 operands[0] = dest;
2602 operands[1] = source;
2603
2604 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2605 if (GET_CODE (operands[1]) == CONST_DOUBLE
2606 && ! FLOAT_MODE_P (mode)
2607 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2608 {
2609 /* FIXME. This should never happen. */
2610 /* Since it seems that it does, do the safe thing and convert
2611 to a CONST_INT. */
2612 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2613 }
2614 if (GET_CODE (operands[1]) == CONST_DOUBLE
2615 && ! FLOAT_MODE_P (mode)
2616 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2617 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2618 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2619 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2620 abort ();
2621
2622 /* Check if GCC is setting up a block move that will end up using FP
2623 registers as temporaries. We must make sure this is acceptable. */
2624 if (GET_CODE (operands[0]) == MEM
2625 && GET_CODE (operands[1]) == MEM
2626 && mode == DImode
2627 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2628 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2629 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2630 ? 32 : MEM_ALIGN (operands[0])))
2631 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2632 ? 32
2633 : MEM_ALIGN (operands[1]))))
2634 && ! MEM_VOLATILE_P (operands [0])
2635 && ! MEM_VOLATILE_P (operands [1]))
2636 {
2637 emit_move_insn (adjust_address (operands[0], SImode, 0),
2638 adjust_address (operands[1], SImode, 0));
2639 emit_move_insn (adjust_address (operands[0], SImode, 4),
2640 adjust_address (operands[1], SImode, 4));
2641 return;
2642 }
2643
2644 if (!no_new_pseudos)
2645 {
2646 if (GET_CODE (operands[1]) == MEM && optimize > 0
2647 && (mode == QImode || mode == HImode || mode == SImode)
2648 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2649 {
2650 rtx reg = gen_reg_rtx (word_mode);
2651
2652 emit_insn (gen_rtx_SET (word_mode, reg,
2653 gen_rtx_ZERO_EXTEND (word_mode,
2654 operands[1])));
2655 operands[1] = gen_lowpart (mode, reg);
2656 }
2657 if (GET_CODE (operands[0]) != REG)
2658 operands[1] = force_reg (mode, operands[1]);
2659 }
2660
2661 if (mode == SFmode && ! TARGET_POWERPC
2662 && TARGET_HARD_FLOAT && TARGET_FPRS
2663 && GET_CODE (operands[0]) == MEM)
2664 {
2665 int regnum;
2666
2667 if (reload_in_progress || reload_completed)
2668 regnum = true_regnum (operands[1]);
2669 else if (GET_CODE (operands[1]) == REG)
2670 regnum = REGNO (operands[1]);
2671 else
2672 regnum = -1;
2673
2674 /* If operands[1] is a register, on POWER it may have
2675 double-precision data in it, so truncate it to single
2676 precision. */
2677 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2678 {
2679 rtx newreg;
2680 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2681 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2682 operands[1] = newreg;
2683 }
2684 }
2685
2686 /* Handle the case where reload calls us with an invalid address;
2687 and the case of CONSTANT_P_RTX. */
2688 if (!ALTIVEC_VECTOR_MODE (mode)
2689 && (! general_operand (operands[1], mode)
2690 || ! nonimmediate_operand (operands[0], mode)
2691 || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2692 {
2693 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2694 return;
2695 }
2696
2697 /* FIXME: In the long term, this switch statement should go away
2698 and be replaced by a sequence of tests based on things like
2699 mode == Pmode. */
2700 switch (mode)
2701 {
2702 case HImode:
2703 case QImode:
2704 if (CONSTANT_P (operands[1])
2705 && GET_CODE (operands[1]) != CONST_INT)
2706 operands[1] = force_const_mem (mode, operands[1]);
2707 break;
2708
2709 case TFmode:
2710 case DFmode:
2711 case SFmode:
2712 if (CONSTANT_P (operands[1])
2713 && ! easy_fp_constant (operands[1], mode))
2714 operands[1] = force_const_mem (mode, operands[1]);
2715 break;
2716
2717 case V16QImode:
2718 case V8HImode:
2719 case V4SFmode:
2720 case V4SImode:
2721 case V4HImode:
2722 case V2SFmode:
2723 case V2SImode:
2724 case V1DImode:
2725 if (CONSTANT_P (operands[1])
2726 && !easy_vector_constant (operands[1]))
2727 operands[1] = force_const_mem (mode, operands[1]);
2728 break;
2729
2730 case SImode:
2731 case DImode:
2732 /* Use default pattern for address of ELF small data */
2733 if (TARGET_ELF
2734 && mode == Pmode
2735 && DEFAULT_ABI == ABI_V4
2736 && (GET_CODE (operands[1]) == SYMBOL_REF
2737 || GET_CODE (operands[1]) == CONST)
2738 && small_data_operand (operands[1], mode))
2739 {
2740 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2741 return;
2742 }
2743
2744 if (DEFAULT_ABI == ABI_V4
2745 && mode == Pmode && mode == SImode
2746 && flag_pic == 1 && got_operand (operands[1], mode))
2747 {
2748 emit_insn (gen_movsi_got (operands[0], operands[1]));
2749 return;
2750 }
2751
2752 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2753 && TARGET_NO_TOC && ! flag_pic
2754 && mode == Pmode
2755 && CONSTANT_P (operands[1])
2756 && GET_CODE (operands[1]) != HIGH
2757 && GET_CODE (operands[1]) != CONST_INT)
2758 {
2759 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2760
2761 /* If this is a function address on -mcall-aixdesc,
2762 convert it to the address of the descriptor. */
2763 if (DEFAULT_ABI == ABI_AIX
2764 && GET_CODE (operands[1]) == SYMBOL_REF
2765 && XSTR (operands[1], 0)[0] == '.')
2766 {
2767 const char *name = XSTR (operands[1], 0);
2768 rtx new_ref;
2769 while (*name == '.')
2770 name++;
2771 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2772 CONSTANT_POOL_ADDRESS_P (new_ref)
2773 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2774 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2775 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2776 operands[1] = new_ref;
2777 }
2778
2779 if (DEFAULT_ABI == ABI_DARWIN)
2780 {
2781 emit_insn (gen_macho_high (target, operands[1]));
2782 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2783 return;
2784 }
2785
2786 emit_insn (gen_elf_high (target, operands[1]));
2787 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2788 return;
2789 }
2790
2791 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2792 and we have put it in the TOC, we just need to make a TOC-relative
2793 reference to it. */
2794 if (TARGET_TOC
2795 && GET_CODE (operands[1]) == SYMBOL_REF
2796 && CONSTANT_POOL_EXPR_P (operands[1])
2797 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2798 get_pool_mode (operands[1])))
2799 {
2800 operands[1] = create_TOC_reference (operands[1]);
2801 }
2802 else if (mode == Pmode
2803 && CONSTANT_P (operands[1])
2804 && ((GET_CODE (operands[1]) != CONST_INT
2805 && ! easy_fp_constant (operands[1], mode))
2806 || (GET_CODE (operands[1]) == CONST_INT
2807 && num_insns_constant (operands[1], mode) > 2)
2808 || (GET_CODE (operands[0]) == REG
2809 && FP_REGNO_P (REGNO (operands[0]))))
2810 && GET_CODE (operands[1]) != HIGH
2811 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2812 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2813 {
2814 /* Emit a USE operation so that the constant isn't deleted if
2815 expensive optimizations are turned on because nobody
2816 references it. This should only be done for operands that
2817 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2818 This should not be done for operands that contain LABEL_REFs.
2819 For now, we just handle the obvious case. */
2820 if (GET_CODE (operands[1]) != LABEL_REF)
2821 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2822
2823 #if TARGET_MACHO
2824 /* Darwin uses a special PIC legitimizer. */
2825 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2826 {
2827 operands[1] =
2828 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2829 operands[0]);
2830 if (operands[0] != operands[1])
2831 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2832 return;
2833 }
2834 #endif
2835
2836 /* If we are to limit the number of things we put in the TOC and
2837 this is a symbol plus a constant we can add in one insn,
2838 just put the symbol in the TOC and add the constant. Don't do
2839 this if reload is in progress. */
2840 if (GET_CODE (operands[1]) == CONST
2841 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2842 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2843 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2844 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2845 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2846 && ! side_effects_p (operands[0]))
2847 {
2848 rtx sym =
2849 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2850 rtx other = XEXP (XEXP (operands[1], 0), 1);
2851
2852 sym = force_reg (mode, sym);
2853 if (mode == SImode)
2854 emit_insn (gen_addsi3 (operands[0], sym, other));
2855 else
2856 emit_insn (gen_adddi3 (operands[0], sym, other));
2857 return;
2858 }
2859
2860 operands[1] = force_const_mem (mode, operands[1]);
2861
2862 if (TARGET_TOC
2863 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2864 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2865 get_pool_constant (XEXP (operands[1], 0)),
2866 get_pool_mode (XEXP (operands[1], 0))))
2867 {
2868 operands[1]
2869 = gen_rtx_MEM (mode,
2870 create_TOC_reference (XEXP (operands[1], 0)));
2871 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2872 RTX_UNCHANGING_P (operands[1]) = 1;
2873 }
2874 }
2875 break;
2876
2877 case TImode:
2878 if (GET_CODE (operands[0]) == MEM
2879 && GET_CODE (XEXP (operands[0], 0)) != REG
2880 && ! reload_in_progress)
2881 operands[0]
2882 = replace_equiv_address (operands[0],
2883 copy_addr_to_reg (XEXP (operands[0], 0)));
2884
2885 if (GET_CODE (operands[1]) == MEM
2886 && GET_CODE (XEXP (operands[1], 0)) != REG
2887 && ! reload_in_progress)
2888 operands[1]
2889 = replace_equiv_address (operands[1],
2890 copy_addr_to_reg (XEXP (operands[1], 0)));
2891 if (TARGET_POWER)
2892 {
2893 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2894 gen_rtvec (2,
2895 gen_rtx_SET (VOIDmode,
2896 operands[0], operands[1]),
2897 gen_rtx_CLOBBER (VOIDmode,
2898 gen_rtx_SCRATCH (SImode)))));
2899 return;
2900 }
2901 break;
2902
2903 default:
2904 abort ();
2905 }
2906
2907 /* Above, we may have called force_const_mem which may have returned
2908 an invalid address. If we can, fix this up; otherwise, reload will
2909 have to deal with it. */
2910 if (GET_CODE (operands[1]) == MEM
2911 && ! memory_address_p (mode, XEXP (operands[1], 0))
2912 && ! reload_in_progress)
2913 operands[1] = adjust_address (operands[1], mode, 0);
2914
2915 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2916 return;
2917 }
2918 \f
2919 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2920 for a call to a function whose data type is FNTYPE.
2921 For a library call, FNTYPE is 0.
2922
2923 For incoming args we set the number of arguments in the prototype large
2924 so we never return a PARALLEL. */
2925
2926 void
2927 init_cumulative_args (cum, fntype, libname, incoming)
2928 CUMULATIVE_ARGS *cum;
2929 tree fntype;
2930 rtx libname ATTRIBUTE_UNUSED;
2931 int incoming;
2932 {
2933 static CUMULATIVE_ARGS zero_cumulative;
2934
2935 *cum = zero_cumulative;
2936 cum->words = 0;
2937 cum->fregno = FP_ARG_MIN_REG;
2938 cum->vregno = ALTIVEC_ARG_MIN_REG;
2939 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2940 cum->call_cookie = CALL_NORMAL;
2941 cum->sysv_gregno = GP_ARG_MIN_REG;
2942
2943 if (incoming)
2944 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2945
2946 else if (cum->prototype)
2947 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2948 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2949 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2950
2951 else
2952 cum->nargs_prototype = 0;
2953
2954 cum->orig_nargs = cum->nargs_prototype;
2955
2956 /* Check for a longcall attribute. */
2957 if (fntype
2958 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
2959 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
2960 cum->call_cookie = CALL_LONG;
2961
2962 if (TARGET_DEBUG_ARG)
2963 {
2964 fprintf (stderr, "\ninit_cumulative_args:");
2965 if (fntype)
2966 {
2967 tree ret_type = TREE_TYPE (fntype);
2968 fprintf (stderr, " ret code = %s,",
2969 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2970 }
2971
2972 if (cum->call_cookie & CALL_LONG)
2973 fprintf (stderr, " longcall,");
2974
2975 fprintf (stderr, " proto = %d, nargs = %d\n",
2976 cum->prototype, cum->nargs_prototype);
2977 }
2978 }
2979 \f
2980 /* If defined, a C expression which determines whether, and in which
2981 direction, to pad out an argument with extra space. The value
2982 should be of type `enum direction': either `upward' to pad above
2983 the argument, `downward' to pad below, or `none' to inhibit
2984 padding.
2985
2986 For the AIX ABI structs are always stored left shifted in their
2987 argument slot. */
2988
2989 enum direction
2990 function_arg_padding (mode, type)
2991 enum machine_mode mode;
2992 tree type;
2993 {
2994 if (type != 0 && AGGREGATE_TYPE_P (type))
2995 return upward;
2996
2997 /* This is the default definition. */
2998 return (! BYTES_BIG_ENDIAN
2999 ? upward
3000 : ((mode == BLKmode
3001 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3002 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
3003 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
3004 ? downward : upward));
3005 }
3006
3007 /* If defined, a C expression that gives the alignment boundary, in bits,
3008 of an argument with the specified mode and type. If it is not defined,
3009 PARM_BOUNDARY is used for all arguments.
3010
3011 V.4 wants long longs to be double word aligned. */
3012
3013 int
3014 function_arg_boundary (mode, type)
3015 enum machine_mode mode;
3016 tree type ATTRIBUTE_UNUSED;
3017 {
3018 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3019 return 64;
3020 else if (SPE_VECTOR_MODE (mode))
3021 return 64;
3022 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3023 return 128;
3024 else
3025 return PARM_BOUNDARY;
3026 }
3027 \f
3028 /* Update the data in CUM to advance over an argument
3029 of mode MODE and data type TYPE.
3030 (TYPE is null for libcalls where that information may not be available.) */
3031
3032 void
3033 function_arg_advance (cum, mode, type, named)
3034 CUMULATIVE_ARGS *cum;
3035 enum machine_mode mode;
3036 tree type;
3037 int named;
3038 {
3039 cum->nargs_prototype--;
3040
3041 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3042 {
3043 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
3044 cum->vregno++;
3045 else
3046 cum->words += RS6000_ARG_SIZE (mode, type);
3047 }
3048 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
3049 && named && cum->sysv_gregno <= GP_ARG_MAX_REG)
3050 cum->sysv_gregno++;
3051 else if (DEFAULT_ABI == ABI_V4)
3052 {
3053 if (TARGET_HARD_FLOAT && TARGET_FPRS
3054 && (mode == SFmode || mode == DFmode))
3055 {
3056 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3057 cum->fregno++;
3058 else
3059 {
3060 if (mode == DFmode)
3061 cum->words += cum->words & 1;
3062 cum->words += RS6000_ARG_SIZE (mode, type);
3063 }
3064 }
3065 else
3066 {
3067 int n_words;
3068 int gregno = cum->sysv_gregno;
3069
3070 /* Aggregates and IEEE quad get passed by reference. */
3071 if ((type && AGGREGATE_TYPE_P (type))
3072 || mode == TFmode)
3073 n_words = 1;
3074 else
3075 n_words = RS6000_ARG_SIZE (mode, type);
3076
3077 /* Long long and SPE vectors are put in odd registers. */
3078 if (n_words == 2 && (gregno & 1) == 0)
3079 gregno += 1;
3080
3081 /* Long long and SPE vectors are not split between registers
3082 and stack. */
3083 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
3084 {
3085 /* Long long is aligned on the stack. */
3086 if (n_words == 2)
3087 cum->words += cum->words & 1;
3088 cum->words += n_words;
3089 }
3090
3091 /* Note: continuing to accumulate gregno past when we've started
3092 spilling to the stack indicates the fact that we've started
3093 spilling to the stack to expand_builtin_saveregs. */
3094 cum->sysv_gregno = gregno + n_words;
3095 }
3096
3097 if (TARGET_DEBUG_ARG)
3098 {
3099 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3100 cum->words, cum->fregno);
3101 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3102 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3103 fprintf (stderr, "mode = %4s, named = %d\n",
3104 GET_MODE_NAME (mode), named);
3105 }
3106 }
3107 else
3108 {
3109 int align = (TARGET_32BIT && (cum->words & 1) != 0
3110 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3111
3112 cum->words += align + RS6000_ARG_SIZE (mode, type);
3113
3114 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3115 && TARGET_HARD_FLOAT && TARGET_FPRS)
3116 cum->fregno += (mode == TFmode ? 2 : 1);
3117
3118 if (TARGET_DEBUG_ARG)
3119 {
3120 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3121 cum->words, cum->fregno);
3122 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3123 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3124 fprintf (stderr, "named = %d, align = %d\n", named, align);
3125 }
3126 }
3127 }
3128 \f
3129 /* Determine where to put an argument to a function.
3130 Value is zero to push the argument on the stack,
3131 or a hard register in which to store the argument.
3132
3133 MODE is the argument's machine mode.
3134 TYPE is the data type of the argument (as a tree).
3135 This is null for libcalls where that information may
3136 not be available.
3137 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3138 the preceding args and about the function being called.
3139 NAMED is nonzero if this argument is a named parameter
3140 (otherwise it is an extra parameter matching an ellipsis).
3141
3142 On RS/6000 the first eight words of non-FP are normally in registers
3143 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3144 Under V.4, the first 8 FP args are in registers.
3145
3146 If this is floating-point and no prototype is specified, we use
3147 both an FP and integer register (or possibly FP reg and stack). Library
3148 functions (when TYPE is zero) always have the proper types for args,
3149 so we can pass the FP value just in one register. emit_library_function
3150 doesn't support PARALLEL anyway. */
3151
3152 struct rtx_def *
3153 function_arg (cum, mode, type, named)
3154 CUMULATIVE_ARGS *cum;
3155 enum machine_mode mode;
3156 tree type;
3157 int named;
3158 {
3159 enum rs6000_abi abi = DEFAULT_ABI;
3160
3161 /* Return a marker to indicate whether CR1 needs to set or clear the
3162 bit that V.4 uses to say fp args were passed in registers.
3163 Assume that we don't need the marker for software floating point,
3164 or compiler generated library calls. */
3165 if (mode == VOIDmode)
3166 {
3167 if (abi == ABI_V4
3168 && cum->nargs_prototype < 0
3169 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
3170 {
3171 /* For the SPE, we need to crxor CR6 always. */
3172 if (TARGET_SPE_ABI)
3173 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
3174 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
3175 return GEN_INT (cum->call_cookie
3176 | ((cum->fregno == FP_ARG_MIN_REG)
3177 ? CALL_V4_SET_FP_ARGS
3178 : CALL_V4_CLEAR_FP_ARGS));
3179 }
3180
3181 return GEN_INT (cum->call_cookie);
3182 }
3183
3184 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3185 {
3186 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
3187 return gen_rtx_REG (mode, cum->vregno);
3188 else
3189 return NULL;
3190 }
3191 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode) && named)
3192 {
3193 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3194 return gen_rtx_REG (mode, cum->sysv_gregno);
3195 else
3196 return NULL;
3197 }
3198 else if (abi == ABI_V4)
3199 {
3200 if (TARGET_HARD_FLOAT && TARGET_FPRS
3201 && (mode == SFmode || mode == DFmode))
3202 {
3203 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3204 return gen_rtx_REG (mode, cum->fregno);
3205 else
3206 return NULL;
3207 }
3208 else
3209 {
3210 int n_words;
3211 int gregno = cum->sysv_gregno;
3212
3213 /* Aggregates and IEEE quad get passed by reference. */
3214 if ((type && AGGREGATE_TYPE_P (type))
3215 || mode == TFmode)
3216 n_words = 1;
3217 else
3218 n_words = RS6000_ARG_SIZE (mode, type);
3219
3220 /* Long long and SPE vectors are put in odd registers. */
3221 if (n_words == 2 && (gregno & 1) == 0)
3222 gregno += 1;
3223
3224 /* Long long and SPE vectors are not split between registers
3225 and stack. */
3226 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3227 {
3228 /* SPE vectors in ... get split into 2 registers. */
3229 if (TARGET_SPE && TARGET_SPE_ABI
3230 && SPE_VECTOR_MODE (mode) && !named)
3231 {
3232 rtx r1, r2;
3233 enum machine_mode m = SImode;
3234
3235 r1 = gen_rtx_REG (m, gregno);
3236 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3237 r2 = gen_rtx_REG (m, gregno + 1);
3238 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3239 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3240 }
3241 return gen_rtx_REG (mode, gregno);
3242 }
3243 else
3244 return NULL;
3245 }
3246 }
3247 else
3248 {
3249 int align = (TARGET_32BIT && (cum->words & 1) != 0
3250 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3251 int align_words = cum->words + align;
3252
3253 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3254 return NULL_RTX;
3255
3256 if (USE_FP_FOR_ARG_P (*cum, mode, type))
3257 {
3258 if (! type
3259 || ((cum->nargs_prototype > 0)
3260 /* IBM AIX extended its linkage convention definition always
3261 to require FP args after register save area hole on the
3262 stack. */
3263 && (DEFAULT_ABI != ABI_AIX
3264 || ! TARGET_XL_CALL
3265 || (align_words < GP_ARG_NUM_REG))))
3266 return gen_rtx_REG (mode, cum->fregno);
3267
3268 return gen_rtx_PARALLEL (mode,
3269 gen_rtvec (2,
3270 gen_rtx_EXPR_LIST (VOIDmode,
3271 ((align_words >= GP_ARG_NUM_REG)
3272 ? NULL_RTX
3273 : (align_words
3274 + RS6000_ARG_SIZE (mode, type)
3275 > GP_ARG_NUM_REG
3276 /* If this is partially on the stack, then
3277 we only include the portion actually
3278 in registers here. */
3279 ? gen_rtx_REG (SImode,
3280 GP_ARG_MIN_REG + align_words)
3281 : gen_rtx_REG (mode,
3282 GP_ARG_MIN_REG + align_words))),
3283 const0_rtx),
3284 gen_rtx_EXPR_LIST (VOIDmode,
3285 gen_rtx_REG (mode, cum->fregno),
3286 const0_rtx)));
3287 }
3288 else if (align_words < GP_ARG_NUM_REG)
3289 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
3290 else
3291 return NULL_RTX;
3292 }
3293 }
3294 \f
3295 /* For an arg passed partly in registers and partly in memory,
3296 this is the number of registers used.
3297 For args passed entirely in registers or entirely in memory, zero. */
3298
3299 int
3300 function_arg_partial_nregs (cum, mode, type, named)
3301 CUMULATIVE_ARGS *cum;
3302 enum machine_mode mode;
3303 tree type;
3304 int named ATTRIBUTE_UNUSED;
3305 {
3306 if (DEFAULT_ABI == ABI_V4)
3307 return 0;
3308
3309 if (USE_FP_FOR_ARG_P (*cum, mode, type)
3310 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
3311 {
3312 if (cum->nargs_prototype >= 0)
3313 return 0;
3314 }
3315
3316 if (cum->words < GP_ARG_NUM_REG
3317 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
3318 {
3319 int ret = GP_ARG_NUM_REG - cum->words;
3320 if (ret && TARGET_DEBUG_ARG)
3321 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
3322
3323 return ret;
3324 }
3325
3326 return 0;
3327 }
3328 \f
3329 /* A C expression that indicates when an argument must be passed by
3330 reference. If nonzero for an argument, a copy of that argument is
3331 made in memory and a pointer to the argument is passed instead of
3332 the argument itself. The pointer is passed in whatever way is
3333 appropriate for passing a pointer to that type.
3334
3335 Under V.4, structures and unions are passed by reference.
3336
3337 As an extension to all ABIs, variable sized types are passed by
3338 reference. */
3339
3340 int
3341 function_arg_pass_by_reference (cum, mode, type, named)
3342 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
3343 enum machine_mode mode ATTRIBUTE_UNUSED;
3344 tree type;
3345 int named ATTRIBUTE_UNUSED;
3346 {
3347 if (DEFAULT_ABI == ABI_V4
3348 && ((type && AGGREGATE_TYPE_P (type))
3349 || mode == TFmode))
3350 {
3351 if (TARGET_DEBUG_ARG)
3352 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
3353
3354 return 1;
3355 }
3356 return type && int_size_in_bytes (type) <= 0;
3357 }
3358 \f
3359 /* Perform any needed actions needed for a function that is receiving a
3360 variable number of arguments.
3361
3362 CUM is as above.
3363
3364 MODE and TYPE are the mode and type of the current parameter.
3365
3366 PRETEND_SIZE is a variable that should be set to the amount of stack
3367 that must be pushed by the prolog to pretend that our caller pushed
3368 it.
3369
3370 Normally, this macro will push all remaining incoming registers on the
3371 stack and set PRETEND_SIZE to the length of the registers pushed. */
3372
3373 void
3374 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
3375 CUMULATIVE_ARGS *cum;
3376 enum machine_mode mode;
3377 tree type;
3378 int *pretend_size ATTRIBUTE_UNUSED;
3379 int no_rtl;
3380
3381 {
3382 CUMULATIVE_ARGS next_cum;
3383 int reg_size = TARGET_32BIT ? 4 : 8;
3384 rtx save_area = NULL_RTX, mem;
3385 int first_reg_offset, set;
3386 tree fntype;
3387 int stdarg_p;
3388
3389 fntype = TREE_TYPE (current_function_decl);
3390 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
3391 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3392 != void_type_node));
3393
3394 /* For varargs, we do not want to skip the dummy va_dcl argument.
3395 For stdargs, we do want to skip the last named argument. */
3396 next_cum = *cum;
3397 if (stdarg_p)
3398 function_arg_advance (&next_cum, mode, type, 1);
3399
3400 if (DEFAULT_ABI == ABI_V4)
3401 {
3402 /* Indicate to allocate space on the stack for varargs save area. */
3403 cfun->machine->sysv_varargs_p = 1;
3404 if (! no_rtl)
3405 save_area = plus_constant (virtual_stack_vars_rtx,
3406 - RS6000_VARARGS_SIZE);
3407
3408 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
3409 }
3410 else
3411 {
3412 first_reg_offset = next_cum.words;
3413 save_area = virtual_incoming_args_rtx;
3414 cfun->machine->sysv_varargs_p = 0;
3415
3416 if (MUST_PASS_IN_STACK (mode, type))
3417 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
3418 }
3419
3420 set = get_varargs_alias_set ();
3421 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
3422 {
3423 mem = gen_rtx_MEM (BLKmode,
3424 plus_constant (save_area,
3425 first_reg_offset * reg_size)),
3426 set_mem_alias_set (mem, set);
3427 set_mem_align (mem, BITS_PER_WORD);
3428
3429 move_block_from_reg
3430 (GP_ARG_MIN_REG + first_reg_offset, mem,
3431 GP_ARG_NUM_REG - first_reg_offset,
3432 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
3433 }
3434
3435 /* Save FP registers if needed. */
3436 if (DEFAULT_ABI == ABI_V4
3437 && TARGET_HARD_FLOAT && TARGET_FPRS
3438 && ! no_rtl
3439 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
3440 {
3441 int fregno = next_cum.fregno;
3442 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
3443 rtx lab = gen_label_rtx ();
3444 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
3445
3446 emit_jump_insn (gen_rtx_SET (VOIDmode,
3447 pc_rtx,
3448 gen_rtx_IF_THEN_ELSE (VOIDmode,
3449 gen_rtx_NE (VOIDmode, cr1,
3450 const0_rtx),
3451 gen_rtx_LABEL_REF (VOIDmode, lab),
3452 pc_rtx)));
3453
3454 while (fregno <= FP_ARG_V4_MAX_REG)
3455 {
3456 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
3457 set_mem_alias_set (mem, set);
3458 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
3459 fregno++;
3460 off += 8;
3461 }
3462
3463 emit_label (lab);
3464 }
3465 }
3466
3467 /* Create the va_list data type. */
3468
3469 tree
3470 rs6000_build_va_list ()
3471 {
3472 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
3473
3474 /* For AIX, prefer 'char *' because that's what the system
3475 header files like. */
3476 if (DEFAULT_ABI != ABI_V4)
3477 return build_pointer_type (char_type_node);
3478
3479 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3480 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3481
3482 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3483 unsigned_char_type_node);
3484 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3485 unsigned_char_type_node);
3486 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3487 ptr_type_node);
3488 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3489 ptr_type_node);
3490
3491 DECL_FIELD_CONTEXT (f_gpr) = record;
3492 DECL_FIELD_CONTEXT (f_fpr) = record;
3493 DECL_FIELD_CONTEXT (f_ovf) = record;
3494 DECL_FIELD_CONTEXT (f_sav) = record;
3495
3496 TREE_CHAIN (record) = type_decl;
3497 TYPE_NAME (record) = type_decl;
3498 TYPE_FIELDS (record) = f_gpr;
3499 TREE_CHAIN (f_gpr) = f_fpr;
3500 TREE_CHAIN (f_fpr) = f_ovf;
3501 TREE_CHAIN (f_ovf) = f_sav;
3502
3503 layout_type (record);
3504
3505 /* The correct type is an array type of one element. */
3506 return build_array_type (record, build_index_type (size_zero_node));
3507 }
3508
3509 /* Implement va_start. */
3510
3511 void
3512 rs6000_va_start (valist, nextarg)
3513 tree valist;
3514 rtx nextarg;
3515 {
3516 HOST_WIDE_INT words, n_gpr, n_fpr;
3517 tree f_gpr, f_fpr, f_ovf, f_sav;
3518 tree gpr, fpr, ovf, sav, t;
3519
3520 /* Only SVR4 needs something special. */
3521 if (DEFAULT_ABI != ABI_V4)
3522 {
3523 std_expand_builtin_va_start (valist, nextarg);
3524 return;
3525 }
3526
3527 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3528 f_fpr = TREE_CHAIN (f_gpr);
3529 f_ovf = TREE_CHAIN (f_fpr);
3530 f_sav = TREE_CHAIN (f_ovf);
3531
3532 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3533 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3534 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3535 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3536 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3537
3538 /* Count number of gp and fp argument registers used. */
3539 words = current_function_args_info.words;
3540 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3541 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3542
3543 if (TARGET_DEBUG_ARG)
3544 {
3545 fputs ("va_start: words = ", stderr);
3546 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3547 fputs (", n_gpr = ", stderr);
3548 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3549 fputs (", n_fpr = ", stderr);
3550 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3551 putc ('\n', stderr);
3552 }
3553
3554 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3555 TREE_SIDE_EFFECTS (t) = 1;
3556 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3557
3558 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3559 TREE_SIDE_EFFECTS (t) = 1;
3560 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3561
3562 /* Find the overflow area. */
3563 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3564 if (words != 0)
3565 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3566 build_int_2 (words * UNITS_PER_WORD, 0));
3567 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3568 TREE_SIDE_EFFECTS (t) = 1;
3569 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3570
3571 /* Find the register save area. */
3572 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3573 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3574 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3575 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3576 TREE_SIDE_EFFECTS (t) = 1;
3577 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3578 }
3579
3580 /* Implement va_arg. */
3581
3582 rtx
3583 rs6000_va_arg (valist, type)
3584 tree valist, type;
3585 {
3586 tree f_gpr, f_fpr, f_ovf, f_sav;
3587 tree gpr, fpr, ovf, sav, reg, t, u;
3588 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3589 rtx lab_false, lab_over, addr_rtx, r;
3590
3591 if (DEFAULT_ABI != ABI_V4)
3592 {
3593 /* Variable sized types are passed by reference. */
3594 if (int_size_in_bytes (type) <= 0)
3595 {
3596 u = build_pointer_type (type);
3597
3598 /* Args grow upward. */
3599 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
3600 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
3601 TREE_SIDE_EFFECTS (t) = 1;
3602
3603 t = build1 (NOP_EXPR, build_pointer_type (u), t);
3604 TREE_SIDE_EFFECTS (t) = 1;
3605
3606 t = build1 (INDIRECT_REF, u, t);
3607 TREE_SIDE_EFFECTS (t) = 1;
3608
3609 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3610 }
3611 else
3612 return std_expand_builtin_va_arg (valist, type);
3613 }
3614
3615 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3616 f_fpr = TREE_CHAIN (f_gpr);
3617 f_ovf = TREE_CHAIN (f_fpr);
3618 f_sav = TREE_CHAIN (f_ovf);
3619
3620 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3621 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3622 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3623 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3624 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3625
3626 size = int_size_in_bytes (type);
3627 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3628
3629 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3630 {
3631 /* Aggregates and long doubles are passed by reference. */
3632 indirect_p = 1;
3633 reg = gpr;
3634 n_reg = 1;
3635 sav_ofs = 0;
3636 sav_scale = 4;
3637 size = UNITS_PER_WORD;
3638 rsize = 1;
3639 }
3640 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
3641 {
3642 /* FP args go in FP registers, if present. */
3643 indirect_p = 0;
3644 reg = fpr;
3645 n_reg = 1;
3646 sav_ofs = 8*4;
3647 sav_scale = 8;
3648 }
3649 else
3650 {
3651 /* Otherwise into GP registers. */
3652 indirect_p = 0;
3653 reg = gpr;
3654 n_reg = rsize;
3655 sav_ofs = 0;
3656 sav_scale = 4;
3657 }
3658
3659 /* Pull the value out of the saved registers ... */
3660
3661 lab_false = gen_label_rtx ();
3662 lab_over = gen_label_rtx ();
3663 addr_rtx = gen_reg_rtx (Pmode);
3664
3665 /* AltiVec vectors never go in registers. */
3666 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
3667 {
3668 TREE_THIS_VOLATILE (reg) = 1;
3669 emit_cmp_and_jump_insns
3670 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3671 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3672 lab_false);
3673
3674 /* Long long is aligned in the registers. */
3675 if (n_reg > 1)
3676 {
3677 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3678 build_int_2 (n_reg - 1, 0));
3679 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3680 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3681 TREE_SIDE_EFFECTS (u) = 1;
3682 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3683 }
3684
3685 if (sav_ofs)
3686 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3687 else
3688 t = sav;
3689
3690 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3691 build_int_2 (n_reg, 0));
3692 TREE_SIDE_EFFECTS (u) = 1;
3693
3694 u = build1 (CONVERT_EXPR, integer_type_node, u);
3695 TREE_SIDE_EFFECTS (u) = 1;
3696
3697 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3698 TREE_SIDE_EFFECTS (u) = 1;
3699
3700 t = build (PLUS_EXPR, ptr_type_node, t, u);
3701 TREE_SIDE_EFFECTS (t) = 1;
3702
3703 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3704 if (r != addr_rtx)
3705 emit_move_insn (addr_rtx, r);
3706
3707 emit_jump_insn (gen_jump (lab_over));
3708 emit_barrier ();
3709 }
3710
3711 emit_label (lab_false);
3712
3713 /* ... otherwise out of the overflow area. */
3714
3715 /* Make sure we don't find reg 7 for the next int arg.
3716
3717 All AltiVec vectors go in the overflow area. So in the AltiVec
3718 case we need to get the vectors from the overflow area, but
3719 remember where the GPRs and FPRs are. */
3720 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
3721 || !TARGET_ALTIVEC))
3722 {
3723 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3724 TREE_SIDE_EFFECTS (t) = 1;
3725 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3726 }
3727
3728 /* Care for on-stack alignment if needed. */
3729 if (rsize <= 1)
3730 t = ovf;
3731 else
3732 {
3733 int align;
3734
3735 /* AltiVec vectors are 16 byte aligned. */
3736 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
3737 align = 15;
3738 else
3739 align = 7;
3740
3741 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3742 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3743 }
3744 t = save_expr (t);
3745
3746 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3747 if (r != addr_rtx)
3748 emit_move_insn (addr_rtx, r);
3749
3750 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3751 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3752 TREE_SIDE_EFFECTS (t) = 1;
3753 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3754
3755 emit_label (lab_over);
3756
3757 if (indirect_p)
3758 {
3759 r = gen_rtx_MEM (Pmode, addr_rtx);
3760 set_mem_alias_set (r, get_varargs_alias_set ());
3761 emit_move_insn (addr_rtx, r);
3762 }
3763
3764 return addr_rtx;
3765 }
3766
3767 /* Builtins. */
3768
3769 #define def_builtin(MASK, NAME, TYPE, CODE) \
3770 do { \
3771 if ((MASK) & target_flags) \
3772 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3773 NULL, NULL_TREE); \
3774 } while (0)
3775
3776 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3777
3778 static const struct builtin_description bdesc_3arg[] =
3779 {
3780 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3781 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3782 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3783 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3784 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3785 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3786 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3787 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3788 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3789 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3790 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3791 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3792 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3793 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3794 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3795 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3796 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3797 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3798 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3799 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3800 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3801 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3802 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3803 };
3804
3805 /* DST operations: void foo (void *, const int, const char). */
3806
3807 static const struct builtin_description bdesc_dst[] =
3808 {
3809 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3810 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3811 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3812 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3813 };
3814
3815 /* Simple binary operations: VECc = foo (VECa, VECb). */
3816
3817 static struct builtin_description bdesc_2arg[] =
3818 {
3819 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3820 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3821 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3822 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3823 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3824 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3825 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3826 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3827 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3828 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3829 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3830 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3831 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3832 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3833 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3834 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3835 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3836 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3837 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3838 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3839 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3840 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3841 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3842 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3843 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3844 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3845 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3846 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3847 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3848 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3849 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3850 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3851 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3852 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3853 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3854 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3855 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3856 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3857 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3858 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3859 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3860 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3861 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3862 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3863 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3864 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3865 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3866 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3867 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3868 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3869 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3870 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3871 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3872 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3873 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3874 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3875 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3876 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3877 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3878 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3879 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3880 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3881 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3882 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3883 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3884 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3885 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3886 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3887 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3888 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3889 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3890 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3891 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3892 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3893 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3894 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3895 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3896 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3897 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3898 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3899 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3900 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3901 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3902 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3903 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3904 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3905 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3906 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3907 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3908 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3909 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3910 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3911 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3912 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3913 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3914 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3915 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3916 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3917 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3918 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3919 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3920 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3921 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3922 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3923 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3924 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3925 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3926 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3927 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3928 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3929 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3930 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3931 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3932
3933 /* Place holder, leave as first spe builtin. */
3934 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
3935 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
3936 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
3937 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
3938 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
3939 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
3940 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
3941 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
3942 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
3943 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
3944 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
3945 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
3946 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
3947 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
3948 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
3949 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
3950 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
3951 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
3952 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
3953 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
3954 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
3955 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
3956 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
3957 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
3958 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
3959 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
3960 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
3961 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
3962 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
3963 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
3964 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
3965 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
3966 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
3967 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
3968 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
3969 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
3970 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
3971 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
3972 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
3973 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
3974 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
3975 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
3976 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
3977 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
3978 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
3979 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
3980 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
3981 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
3982 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
3983 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
3984 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
3985 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
3986 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
3987 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
3988 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
3989 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
3990 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
3991 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
3992 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
3993 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
3994 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
3995 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
3996 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
3997 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
3998 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
3999 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
4000 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
4001 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
4002 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
4003 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
4004 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
4005 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
4006 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
4007 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
4008 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
4009 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
4010 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
4011 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
4012 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
4013 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
4014 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
4015 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
4016 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
4017 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
4018 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
4019 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
4020 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
4021 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
4022 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
4023 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
4024 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
4025 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
4026 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
4027 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
4028 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
4029 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
4030 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
4031 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
4032 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
4033 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
4034 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
4035 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
4036 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
4037 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
4038 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
4039 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
4040 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
4041 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
4042 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
4043
4044 /* SPE binary operations expecting a 5-bit unsigned literal. */
4045 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
4046
4047 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
4048 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
4049 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
4050 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
4051 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
4052 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
4053 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
4054 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
4055 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
4056 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
4057 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
4058 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
4059 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
4060 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
4061 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
4062 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
4063 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
4064 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
4065 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
4066 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
4067 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
4068 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
4069 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
4070 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
4071 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
4072 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
4073
4074 /* Place-holder. Leave as last binary SPE builtin. */
4075 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
4076 };
4077
4078 /* AltiVec predicates. */
4079
4080 struct builtin_description_predicates
4081 {
4082 const unsigned int mask;
4083 const enum insn_code icode;
4084 const char *opcode;
4085 const char *const name;
4086 const enum rs6000_builtins code;
4087 };
4088
4089 static const struct builtin_description_predicates bdesc_altivec_preds[] =
4090 {
4091 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
4092 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
4093 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
4094 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
4095 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
4096 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
4097 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
4098 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
4099 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
4100 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
4101 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
4102 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
4103 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
4104 };
4105
4106 /* SPE predicates. */
4107 static struct builtin_description bdesc_spe_predicates[] =
4108 {
4109 /* Place-holder. Leave as first. */
4110 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
4111 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
4112 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
4113 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
4114 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
4115 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
4116 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
4117 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
4118 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
4119 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
4120 /* Place-holder. Leave as last. */
4121 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
4122 };
4123
4124 /* SPE evsel predicates. */
4125 static struct builtin_description bdesc_spe_evsel[] =
4126 {
4127 /* Place-holder. Leave as first. */
4128 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
4129 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
4130 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
4131 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
4132 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
4133 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
4134 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
4135 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
4136 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
4137 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
4138 /* Place-holder. Leave as last. */
4139 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4140 };
4141
4142 /* ABS* operations. */
4143
4144 static const struct builtin_description bdesc_abs[] =
4145 {
4146 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4147 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4148 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4149 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4150 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4151 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4152 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4153 };
4154
4155 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4156 foo (VECa). */
4157
4158 static struct builtin_description bdesc_1arg[] =
4159 {
4160 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4161 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4162 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4163 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4164 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4165 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4166 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4167 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4168 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4169 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4170 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4171 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4172 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4173 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4174 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4175 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4176 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4177
4178 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4179 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4180 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4181 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4182 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4183 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4184 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4185 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4186 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4187 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4188 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4189 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4190 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4191 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4192 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4193 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4194 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
4195 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
4196 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
4197 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
4198 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
4199 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
4200 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
4201 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
4202 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
4203 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
4204 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
4205 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
4206 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
4207 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
4208 { 0, CODE_FOR_spe_evsplatfi, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI },
4209 { 0, CODE_FOR_spe_evsplati, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI },
4210
4211 /* Place-holder. Leave as last unary SPE builtin. */
4212 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
4213 };
4214
4215 static rtx
4216 rs6000_expand_unop_builtin (icode, arglist, target)
4217 enum insn_code icode;
4218 tree arglist;
4219 rtx target;
4220 {
4221 rtx pat;
4222 tree arg0 = TREE_VALUE (arglist);
4223 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4224 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4225 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4226
4227 if (icode == CODE_FOR_nothing)
4228 /* Builtin not supported on this processor. */
4229 return 0;
4230
4231 /* If we got invalid arguments bail out before generating bad rtl. */
4232 if (arg0 == error_mark_node)
4233 return const0_rtx;
4234
4235 if (icode == CODE_FOR_altivec_vspltisb
4236 || icode == CODE_FOR_altivec_vspltish
4237 || icode == CODE_FOR_altivec_vspltisw
4238 || icode == CODE_FOR_spe_evsplatfi
4239 || icode == CODE_FOR_spe_evsplati)
4240 {
4241 /* Only allow 5-bit *signed* literals. */
4242 if (GET_CODE (op0) != CONST_INT
4243 || INTVAL (op0) > 0x1f
4244 || INTVAL (op0) < -0x1f)
4245 {
4246 error ("argument 1 must be a 5-bit signed literal");
4247 return const0_rtx;
4248 }
4249 }
4250
4251 if (target == 0
4252 || GET_MODE (target) != tmode
4253 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4254 target = gen_reg_rtx (tmode);
4255
4256 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4257 op0 = copy_to_mode_reg (mode0, op0);
4258
4259 pat = GEN_FCN (icode) (target, op0);
4260 if (! pat)
4261 return 0;
4262 emit_insn (pat);
4263
4264 return target;
4265 }
4266
4267 static rtx
4268 altivec_expand_abs_builtin (icode, arglist, target)
4269 enum insn_code icode;
4270 tree arglist;
4271 rtx target;
4272 {
4273 rtx pat, scratch1, scratch2;
4274 tree arg0 = TREE_VALUE (arglist);
4275 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4276 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4277 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4278
4279 /* If we have invalid arguments, bail out before generating bad rtl. */
4280 if (arg0 == error_mark_node)
4281 return const0_rtx;
4282
4283 if (target == 0
4284 || GET_MODE (target) != tmode
4285 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4286 target = gen_reg_rtx (tmode);
4287
4288 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4289 op0 = copy_to_mode_reg (mode0, op0);
4290
4291 scratch1 = gen_reg_rtx (mode0);
4292 scratch2 = gen_reg_rtx (mode0);
4293
4294 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
4295 if (! pat)
4296 return 0;
4297 emit_insn (pat);
4298
4299 return target;
4300 }
4301
4302 static rtx
4303 rs6000_expand_binop_builtin (icode, arglist, target)
4304 enum insn_code icode;
4305 tree arglist;
4306 rtx target;
4307 {
4308 rtx pat;
4309 tree arg0 = TREE_VALUE (arglist);
4310 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4311 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4312 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4313 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4314 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4315 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4316
4317 if (icode == CODE_FOR_nothing)
4318 /* Builtin not supported on this processor. */
4319 return 0;
4320
4321 /* If we got invalid arguments bail out before generating bad rtl. */
4322 if (arg0 == error_mark_node || arg1 == error_mark_node)
4323 return const0_rtx;
4324
4325 if (icode == CODE_FOR_altivec_vcfux
4326 || icode == CODE_FOR_altivec_vcfsx
4327 || icode == CODE_FOR_altivec_vctsxs
4328 || icode == CODE_FOR_altivec_vctuxs
4329 || icode == CODE_FOR_altivec_vspltb
4330 || icode == CODE_FOR_altivec_vsplth
4331 || icode == CODE_FOR_altivec_vspltw
4332 || icode == CODE_FOR_spe_evaddiw
4333 || icode == CODE_FOR_spe_evldd
4334 || icode == CODE_FOR_spe_evldh
4335 || icode == CODE_FOR_spe_evldw
4336 || icode == CODE_FOR_spe_evlhhesplat
4337 || icode == CODE_FOR_spe_evlhhossplat
4338 || icode == CODE_FOR_spe_evlhhousplat
4339 || icode == CODE_FOR_spe_evlwhe
4340 || icode == CODE_FOR_spe_evlwhos
4341 || icode == CODE_FOR_spe_evlwhou
4342 || icode == CODE_FOR_spe_evlwhsplat
4343 || icode == CODE_FOR_spe_evlwwsplat
4344 || icode == CODE_FOR_spe_evrlwi
4345 || icode == CODE_FOR_spe_evslwi
4346 || icode == CODE_FOR_spe_evsrwis
4347 || icode == CODE_FOR_spe_evsrwiu)
4348 {
4349 /* Only allow 5-bit unsigned literals. */
4350 if (TREE_CODE (arg1) != INTEGER_CST
4351 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4352 {
4353 error ("argument 2 must be a 5-bit unsigned literal");
4354 return const0_rtx;
4355 }
4356 }
4357
4358 if (target == 0
4359 || GET_MODE (target) != tmode
4360 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4361 target = gen_reg_rtx (tmode);
4362
4363 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4364 op0 = copy_to_mode_reg (mode0, op0);
4365 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4366 op1 = copy_to_mode_reg (mode1, op1);
4367
4368 pat = GEN_FCN (icode) (target, op0, op1);
4369 if (! pat)
4370 return 0;
4371 emit_insn (pat);
4372
4373 return target;
4374 }
4375
4376 static rtx
4377 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
4378 enum insn_code icode;
4379 const char *opcode;
4380 tree arglist;
4381 rtx target;
4382 {
4383 rtx pat, scratch;
4384 tree cr6_form = TREE_VALUE (arglist);
4385 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
4386 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4387 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4388 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4389 enum machine_mode tmode = SImode;
4390 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4391 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4392 int cr6_form_int;
4393
4394 if (TREE_CODE (cr6_form) != INTEGER_CST)
4395 {
4396 error ("argument 1 of __builtin_altivec_predicate must be a constant");
4397 return const0_rtx;
4398 }
4399 else
4400 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
4401
4402 if (mode0 != mode1)
4403 abort ();
4404
4405 /* If we have invalid arguments, bail out before generating bad rtl. */
4406 if (arg0 == error_mark_node || arg1 == error_mark_node)
4407 return const0_rtx;
4408
4409 if (target == 0
4410 || GET_MODE (target) != tmode
4411 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4412 target = gen_reg_rtx (tmode);
4413
4414 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4415 op0 = copy_to_mode_reg (mode0, op0);
4416 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4417 op1 = copy_to_mode_reg (mode1, op1);
4418
4419 scratch = gen_reg_rtx (mode0);
4420
4421 pat = GEN_FCN (icode) (scratch, op0, op1,
4422 gen_rtx (SYMBOL_REF, Pmode, opcode));
4423 if (! pat)
4424 return 0;
4425 emit_insn (pat);
4426
4427 /* The vec_any* and vec_all* predicates use the same opcodes for two
4428 different operations, but the bits in CR6 will be different
4429 depending on what information we want. So we have to play tricks
4430 with CR6 to get the right bits out.
4431
4432 If you think this is disgusting, look at the specs for the
4433 AltiVec predicates. */
4434
4435 switch (cr6_form_int)
4436 {
4437 case 0:
4438 emit_insn (gen_cr6_test_for_zero (target));
4439 break;
4440 case 1:
4441 emit_insn (gen_cr6_test_for_zero_reverse (target));
4442 break;
4443 case 2:
4444 emit_insn (gen_cr6_test_for_lt (target));
4445 break;
4446 case 3:
4447 emit_insn (gen_cr6_test_for_lt_reverse (target));
4448 break;
4449 default:
4450 error ("argument 1 of __builtin_altivec_predicate is out of range");
4451 break;
4452 }
4453
4454 return target;
4455 }
4456
4457 static rtx
4458 altivec_expand_stv_builtin (icode, arglist)
4459 enum insn_code icode;
4460 tree arglist;
4461 {
4462 tree arg0 = TREE_VALUE (arglist);
4463 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4464 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4465 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4466 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4467 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4468 rtx pat;
4469 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
4470 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
4471 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
4472
4473 /* Invalid arguments. Bail before doing anything stoopid! */
4474 if (arg0 == error_mark_node
4475 || arg1 == error_mark_node
4476 || arg2 == error_mark_node)
4477 return const0_rtx;
4478
4479 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
4480 op0 = copy_to_mode_reg (mode2, op0);
4481 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
4482 op1 = copy_to_mode_reg (mode0, op1);
4483 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
4484 op2 = copy_to_mode_reg (mode1, op2);
4485
4486 pat = GEN_FCN (icode) (op1, op2, op0);
4487 if (pat)
4488 emit_insn (pat);
4489 return NULL_RTX;
4490 }
4491
4492 static rtx
4493 rs6000_expand_ternop_builtin (icode, arglist, target)
4494 enum insn_code icode;
4495 tree arglist;
4496 rtx target;
4497 {
4498 rtx pat;
4499 tree arg0 = TREE_VALUE (arglist);
4500 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4501 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4502 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4503 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4504 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4505 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4506 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4507 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4508 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
4509
4510 if (icode == CODE_FOR_nothing)
4511 /* Builtin not supported on this processor. */
4512 return 0;
4513
4514 /* If we got invalid arguments bail out before generating bad rtl. */
4515 if (arg0 == error_mark_node
4516 || arg1 == error_mark_node
4517 || arg2 == error_mark_node)
4518 return const0_rtx;
4519
4520 if (icode == CODE_FOR_altivec_vsldoi_4sf
4521 || icode == CODE_FOR_altivec_vsldoi_4si
4522 || icode == CODE_FOR_altivec_vsldoi_8hi
4523 || icode == CODE_FOR_altivec_vsldoi_16qi)
4524 {
4525 /* Only allow 4-bit unsigned literals. */
4526 if (TREE_CODE (arg2) != INTEGER_CST
4527 || TREE_INT_CST_LOW (arg2) & ~0xf)
4528 {
4529 error ("argument 3 must be a 4-bit unsigned literal");
4530 return const0_rtx;
4531 }
4532 }
4533
4534 if (target == 0
4535 || GET_MODE (target) != tmode
4536 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4537 target = gen_reg_rtx (tmode);
4538
4539 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4540 op0 = copy_to_mode_reg (mode0, op0);
4541 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4542 op1 = copy_to_mode_reg (mode1, op1);
4543 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
4544 op2 = copy_to_mode_reg (mode2, op2);
4545
4546 pat = GEN_FCN (icode) (target, op0, op1, op2);
4547 if (! pat)
4548 return 0;
4549 emit_insn (pat);
4550
4551 return target;
4552 }
4553
4554 /* Expand the lvx builtins. */
4555 static rtx
4556 altivec_expand_ld_builtin (exp, target, expandedp)
4557 tree exp;
4558 rtx target;
4559 bool *expandedp;
4560 {
4561 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4562 tree arglist = TREE_OPERAND (exp, 1);
4563 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4564 tree arg0;
4565 enum machine_mode tmode, mode0;
4566 rtx pat, op0;
4567 enum insn_code icode;
4568
4569 switch (fcode)
4570 {
4571 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
4572 icode = CODE_FOR_altivec_lvx_16qi;
4573 break;
4574 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
4575 icode = CODE_FOR_altivec_lvx_8hi;
4576 break;
4577 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
4578 icode = CODE_FOR_altivec_lvx_4si;
4579 break;
4580 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
4581 icode = CODE_FOR_altivec_lvx_4sf;
4582 break;
4583 default:
4584 *expandedp = false;
4585 return NULL_RTX;
4586 }
4587
4588 *expandedp = true;
4589
4590 arg0 = TREE_VALUE (arglist);
4591 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4592 tmode = insn_data[icode].operand[0].mode;
4593 mode0 = insn_data[icode].operand[1].mode;
4594
4595 if (target == 0
4596 || GET_MODE (target) != tmode
4597 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4598 target = gen_reg_rtx (tmode);
4599
4600 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4601 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4602
4603 pat = GEN_FCN (icode) (target, op0);
4604 if (! pat)
4605 return 0;
4606 emit_insn (pat);
4607 return target;
4608 }
4609
4610 /* Expand the stvx builtins. */
4611 static rtx
4612 altivec_expand_st_builtin (exp, target, expandedp)
4613 tree exp;
4614 rtx target ATTRIBUTE_UNUSED;
4615 bool *expandedp;
4616 {
4617 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4618 tree arglist = TREE_OPERAND (exp, 1);
4619 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4620 tree arg0, arg1;
4621 enum machine_mode mode0, mode1;
4622 rtx pat, op0, op1;
4623 enum insn_code icode;
4624
4625 switch (fcode)
4626 {
4627 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
4628 icode = CODE_FOR_altivec_stvx_16qi;
4629 break;
4630 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
4631 icode = CODE_FOR_altivec_stvx_8hi;
4632 break;
4633 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
4634 icode = CODE_FOR_altivec_stvx_4si;
4635 break;
4636 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
4637 icode = CODE_FOR_altivec_stvx_4sf;
4638 break;
4639 default:
4640 *expandedp = false;
4641 return NULL_RTX;
4642 }
4643
4644 arg0 = TREE_VALUE (arglist);
4645 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4646 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4647 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4648 mode0 = insn_data[icode].operand[0].mode;
4649 mode1 = insn_data[icode].operand[1].mode;
4650
4651 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4652 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4653 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
4654 op1 = copy_to_mode_reg (mode1, op1);
4655
4656 pat = GEN_FCN (icode) (op0, op1);
4657 if (pat)
4658 emit_insn (pat);
4659
4660 *expandedp = true;
4661 return NULL_RTX;
4662 }
4663
4664 /* Expand the dst builtins. */
4665 static rtx
4666 altivec_expand_dst_builtin (exp, target, expandedp)
4667 tree exp;
4668 rtx target ATTRIBUTE_UNUSED;
4669 bool *expandedp;
4670 {
4671 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4672 tree arglist = TREE_OPERAND (exp, 1);
4673 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4674 tree arg0, arg1, arg2;
4675 enum machine_mode mode0, mode1, mode2;
4676 rtx pat, op0, op1, op2;
4677 struct builtin_description *d;
4678 size_t i;
4679
4680 *expandedp = false;
4681
4682 /* Handle DST variants. */
4683 d = (struct builtin_description *) bdesc_dst;
4684 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4685 if (d->code == fcode)
4686 {
4687 arg0 = TREE_VALUE (arglist);
4688 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4689 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4690 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4691 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4692 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4693 mode0 = insn_data[d->icode].operand[0].mode;
4694 mode1 = insn_data[d->icode].operand[1].mode;
4695 mode2 = insn_data[d->icode].operand[2].mode;
4696
4697 /* Invalid arguments, bail out before generating bad rtl. */
4698 if (arg0 == error_mark_node
4699 || arg1 == error_mark_node
4700 || arg2 == error_mark_node)
4701 return const0_rtx;
4702
4703 if (TREE_CODE (arg2) != INTEGER_CST
4704 || TREE_INT_CST_LOW (arg2) & ~0x3)
4705 {
4706 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
4707 return const0_rtx;
4708 }
4709
4710 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4711 op0 = copy_to_mode_reg (mode0, op0);
4712 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4713 op1 = copy_to_mode_reg (mode1, op1);
4714
4715 pat = GEN_FCN (d->icode) (op0, op1, op2);
4716 if (pat != 0)
4717 emit_insn (pat);
4718
4719 *expandedp = true;
4720 return NULL_RTX;
4721 }
4722
4723 return NULL_RTX;
4724 }
4725
4726 /* Expand the builtin in EXP and store the result in TARGET. Store
4727 true in *EXPANDEDP if we found a builtin to expand. */
4728 static rtx
4729 altivec_expand_builtin (exp, target, expandedp)
4730 tree exp;
4731 rtx target;
4732 bool *expandedp;
4733 {
4734 struct builtin_description *d;
4735 struct builtin_description_predicates *dp;
4736 size_t i;
4737 enum insn_code icode;
4738 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4739 tree arglist = TREE_OPERAND (exp, 1);
4740 tree arg0;
4741 rtx op0, pat;
4742 enum machine_mode tmode, mode0;
4743 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4744
4745 target = altivec_expand_ld_builtin (exp, target, expandedp);
4746 if (*expandedp)
4747 return target;
4748
4749 target = altivec_expand_st_builtin (exp, target, expandedp);
4750 if (*expandedp)
4751 return target;
4752
4753 target = altivec_expand_dst_builtin (exp, target, expandedp);
4754 if (*expandedp)
4755 return target;
4756
4757 *expandedp = true;
4758
4759 switch (fcode)
4760 {
4761 case ALTIVEC_BUILTIN_STVX:
4762 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
4763 case ALTIVEC_BUILTIN_STVEBX:
4764 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
4765 case ALTIVEC_BUILTIN_STVEHX:
4766 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
4767 case ALTIVEC_BUILTIN_STVEWX:
4768 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
4769 case ALTIVEC_BUILTIN_STVXL:
4770 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
4771
4772 case ALTIVEC_BUILTIN_MFVSCR:
4773 icode = CODE_FOR_altivec_mfvscr;
4774 tmode = insn_data[icode].operand[0].mode;
4775
4776 if (target == 0
4777 || GET_MODE (target) != tmode
4778 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4779 target = gen_reg_rtx (tmode);
4780
4781 pat = GEN_FCN (icode) (target);
4782 if (! pat)
4783 return 0;
4784 emit_insn (pat);
4785 return target;
4786
4787 case ALTIVEC_BUILTIN_MTVSCR:
4788 icode = CODE_FOR_altivec_mtvscr;
4789 arg0 = TREE_VALUE (arglist);
4790 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4791 mode0 = insn_data[icode].operand[0].mode;
4792
4793 /* If we got invalid arguments bail out before generating bad rtl. */
4794 if (arg0 == error_mark_node)
4795 return const0_rtx;
4796
4797 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4798 op0 = copy_to_mode_reg (mode0, op0);
4799
4800 pat = GEN_FCN (icode) (op0);
4801 if (pat)
4802 emit_insn (pat);
4803 return NULL_RTX;
4804
4805 case ALTIVEC_BUILTIN_DSSALL:
4806 emit_insn (gen_altivec_dssall ());
4807 return NULL_RTX;
4808
4809 case ALTIVEC_BUILTIN_DSS:
4810 icode = CODE_FOR_altivec_dss;
4811 arg0 = TREE_VALUE (arglist);
4812 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4813 mode0 = insn_data[icode].operand[0].mode;
4814
4815 /* If we got invalid arguments bail out before generating bad rtl. */
4816 if (arg0 == error_mark_node)
4817 return const0_rtx;
4818
4819 if (TREE_CODE (arg0) != INTEGER_CST
4820 || TREE_INT_CST_LOW (arg0) & ~0x3)
4821 {
4822 error ("argument to dss must be a 2-bit unsigned literal");
4823 return const0_rtx;
4824 }
4825
4826 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4827 op0 = copy_to_mode_reg (mode0, op0);
4828
4829 emit_insn (gen_altivec_dss (op0));
4830 return NULL_RTX;
4831 }
4832
4833 /* Expand abs* operations. */
4834 d = (struct builtin_description *) bdesc_abs;
4835 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4836 if (d->code == fcode)
4837 return altivec_expand_abs_builtin (d->icode, arglist, target);
4838
4839 /* Expand the AltiVec predicates. */
4840 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4841 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4842 if (dp->code == fcode)
4843 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4844
4845 /* LV* are funky. We initialized them differently. */
4846 switch (fcode)
4847 {
4848 case ALTIVEC_BUILTIN_LVSL:
4849 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4850 arglist, target);
4851 case ALTIVEC_BUILTIN_LVSR:
4852 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4853 arglist, target);
4854 case ALTIVEC_BUILTIN_LVEBX:
4855 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4856 arglist, target);
4857 case ALTIVEC_BUILTIN_LVEHX:
4858 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4859 arglist, target);
4860 case ALTIVEC_BUILTIN_LVEWX:
4861 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4862 arglist, target);
4863 case ALTIVEC_BUILTIN_LVXL:
4864 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4865 arglist, target);
4866 case ALTIVEC_BUILTIN_LVX:
4867 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
4868 arglist, target);
4869 default:
4870 break;
4871 /* Fall through. */
4872 }
4873
4874 *expandedp = false;
4875 return NULL_RTX;
4876 }
4877
4878 /* Binops that need to be initialized manually, but can be expanded
4879 automagically by rs6000_expand_binop_builtin. */
4880 static struct builtin_description bdesc_2arg_spe[] =
4881 {
4882 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
4883 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
4884 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
4885 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
4886 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
4887 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
4888 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
4889 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
4890 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
4891 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
4892 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
4893 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
4894 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
4895 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
4896 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
4897 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
4898 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
4899 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
4900 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
4901 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
4902 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
4903 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
4904 };
4905
4906 /* Expand the builtin in EXP and store the result in TARGET. Store
4907 true in *EXPANDEDP if we found a builtin to expand.
4908
4909 This expands the SPE builtins that are not simple unary and binary
4910 operations. */
4911 static rtx
4912 spe_expand_builtin (exp, target, expandedp)
4913 tree exp;
4914 rtx target;
4915 bool *expandedp;
4916 {
4917 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4918 tree arglist = TREE_OPERAND (exp, 1);
4919 tree arg1, arg0;
4920 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4921 enum insn_code icode;
4922 enum machine_mode tmode, mode0;
4923 rtx pat, op0;
4924 struct builtin_description *d;
4925 size_t i;
4926
4927 *expandedp = true;
4928
4929 /* Syntax check for a 5-bit unsigned immediate. */
4930 switch (fcode)
4931 {
4932 case SPE_BUILTIN_EVSTDD:
4933 case SPE_BUILTIN_EVSTDH:
4934 case SPE_BUILTIN_EVSTDW:
4935 case SPE_BUILTIN_EVSTWHE:
4936 case SPE_BUILTIN_EVSTWHO:
4937 case SPE_BUILTIN_EVSTWWE:
4938 case SPE_BUILTIN_EVSTWWO:
4939 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4940 if (TREE_CODE (arg1) != INTEGER_CST
4941 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4942 {
4943 error ("argument 2 must be a 5-bit unsigned literal");
4944 return const0_rtx;
4945 }
4946 break;
4947 default:
4948 break;
4949 }
4950
4951 d = (struct builtin_description *) bdesc_2arg_spe;
4952 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
4953 if (d->code == fcode)
4954 return rs6000_expand_binop_builtin (d->icode, arglist, target);
4955
4956 d = (struct builtin_description *) bdesc_spe_predicates;
4957 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
4958 if (d->code == fcode)
4959 return spe_expand_predicate_builtin (d->icode, arglist, target);
4960
4961 d = (struct builtin_description *) bdesc_spe_evsel;
4962 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
4963 if (d->code == fcode)
4964 return spe_expand_evsel_builtin (d->icode, arglist, target);
4965
4966 switch (fcode)
4967 {
4968 case SPE_BUILTIN_EVSTDDX:
4969 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
4970 case SPE_BUILTIN_EVSTDHX:
4971 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
4972 case SPE_BUILTIN_EVSTDWX:
4973 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
4974 case SPE_BUILTIN_EVSTWHEX:
4975 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
4976 case SPE_BUILTIN_EVSTWHOX:
4977 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
4978 case SPE_BUILTIN_EVSTWWEX:
4979 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
4980 case SPE_BUILTIN_EVSTWWOX:
4981 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
4982 case SPE_BUILTIN_EVSTDD:
4983 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
4984 case SPE_BUILTIN_EVSTDH:
4985 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
4986 case SPE_BUILTIN_EVSTDW:
4987 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
4988 case SPE_BUILTIN_EVSTWHE:
4989 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
4990 case SPE_BUILTIN_EVSTWHO:
4991 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
4992 case SPE_BUILTIN_EVSTWWE:
4993 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
4994 case SPE_BUILTIN_EVSTWWO:
4995 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
4996 case SPE_BUILTIN_MFSPEFSCR:
4997 icode = CODE_FOR_spe_mfspefscr;
4998 tmode = insn_data[icode].operand[0].mode;
4999
5000 if (target == 0
5001 || GET_MODE (target) != tmode
5002 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5003 target = gen_reg_rtx (tmode);
5004
5005 pat = GEN_FCN (icode) (target);
5006 if (! pat)
5007 return 0;
5008 emit_insn (pat);
5009 return target;
5010 case SPE_BUILTIN_MTSPEFSCR:
5011 icode = CODE_FOR_spe_mtspefscr;
5012 arg0 = TREE_VALUE (arglist);
5013 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5014 mode0 = insn_data[icode].operand[0].mode;
5015
5016 if (arg0 == error_mark_node)
5017 return const0_rtx;
5018
5019 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5020 op0 = copy_to_mode_reg (mode0, op0);
5021
5022 pat = GEN_FCN (icode) (op0);
5023 if (pat)
5024 emit_insn (pat);
5025 return NULL_RTX;
5026 default:
5027 break;
5028 }
5029
5030 *expandedp = false;
5031 return NULL_RTX;
5032 }
5033
5034 static rtx
5035 spe_expand_predicate_builtin (icode, arglist, target)
5036 enum insn_code icode;
5037 tree arglist;
5038 rtx target;
5039 {
5040 rtx pat, scratch, tmp;
5041 tree form = TREE_VALUE (arglist);
5042 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5043 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5044 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5045 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5046 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5047 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5048 int form_int;
5049 enum rtx_code code;
5050
5051 if (TREE_CODE (form) != INTEGER_CST)
5052 {
5053 error ("argument 1 of __builtin_spe_predicate must be a constant");
5054 return const0_rtx;
5055 }
5056 else
5057 form_int = TREE_INT_CST_LOW (form);
5058
5059 if (mode0 != mode1)
5060 abort ();
5061
5062 if (arg0 == error_mark_node || arg1 == error_mark_node)
5063 return const0_rtx;
5064
5065 if (target == 0
5066 || GET_MODE (target) != SImode
5067 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
5068 target = gen_reg_rtx (SImode);
5069
5070 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5071 op0 = copy_to_mode_reg (mode0, op0);
5072 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5073 op1 = copy_to_mode_reg (mode1, op1);
5074
5075 scratch = gen_reg_rtx (CCmode);
5076
5077 pat = GEN_FCN (icode) (scratch, op0, op1);
5078 if (! pat)
5079 return const0_rtx;
5080 emit_insn (pat);
5081
5082 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5083 _lower_. We use one compare, but look in different bits of the
5084 CR for each variant.
5085
5086 There are 2 elements in each SPE simd type (upper/lower). The CR
5087 bits are set as follows:
5088
5089 BIT0 | BIT 1 | BIT 2 | BIT 3
5090 U | L | (U | L) | (U & L)
5091
5092 So, for an "all" relationship, BIT 3 would be set.
5093 For an "any" relationship, BIT 2 would be set. Etc.
5094
5095 Following traditional nomenclature, these bits map to:
5096
5097 BIT0 | BIT 1 | BIT 2 | BIT 3
5098 LT | GT | EQ | OV
5099
5100 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5101 */
5102
5103 switch (form_int)
5104 {
5105 /* All variant. OV bit. */
5106 case 0:
5107 /* We need to get to the OV bit, which is the ORDERED bit. We
5108 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5109 that's ugly and will trigger a validate_condition_mode abort.
5110 So let's just use another pattern. */
5111 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
5112 return target;
5113 /* Any variant. EQ bit. */
5114 case 1:
5115 code = EQ;
5116 break;
5117 /* Upper variant. LT bit. */
5118 case 2:
5119 code = LT;
5120 break;
5121 /* Lower variant. GT bit. */
5122 case 3:
5123 code = GT;
5124 break;
5125 default:
5126 error ("argument 1 of __builtin_spe_predicate is out of range");
5127 return const0_rtx;
5128 }
5129
5130 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
5131 emit_move_insn (target, tmp);
5132
5133 return target;
5134 }
5135
5136 /* The evsel builtins look like this:
5137
5138 e = __builtin_spe_evsel_OP (a, b, c, d);
5139
5140 and work like this:
5141
5142 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5143 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5144 */
5145
5146 static rtx
5147 spe_expand_evsel_builtin (icode, arglist, target)
5148 enum insn_code icode;
5149 tree arglist;
5150 rtx target;
5151 {
5152 rtx pat, scratch;
5153 tree arg0 = TREE_VALUE (arglist);
5154 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5155 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5156 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5157 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5158 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5159 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5160 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5161 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5162 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5163
5164 if (mode0 != mode1)
5165 abort ();
5166
5167 if (arg0 == error_mark_node || arg1 == error_mark_node
5168 || arg2 == error_mark_node || arg3 == error_mark_node)
5169 return const0_rtx;
5170
5171 if (target == 0
5172 || GET_MODE (target) != mode0
5173 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5174 target = gen_reg_rtx (mode0);
5175
5176 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5177 op0 = copy_to_mode_reg (mode0, op0);
5178 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5179 op1 = copy_to_mode_reg (mode0, op1);
5180 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5181 op2 = copy_to_mode_reg (mode0, op2);
5182 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5183 op3 = copy_to_mode_reg (mode0, op3);
5184
5185 /* Generate the compare. */
5186 scratch = gen_reg_rtx (CCmode);
5187 pat = GEN_FCN (icode) (scratch, op0, op1);
5188 if (! pat)
5189 return const0_rtx;
5190 emit_insn (pat);
5191
5192 if (mode0 == V2SImode)
5193 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
5194 else
5195 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
5196
5197 return target;
5198 }
5199
5200 /* Expand an expression EXP that calls a built-in function,
5201 with result going to TARGET if that's convenient
5202 (and in mode MODE if that's convenient).
5203 SUBTARGET may be used as the target for computing one of EXP's operands.
5204 IGNORE is nonzero if the value is to be ignored. */
5205
5206 static rtx
5207 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
5208 tree exp;
5209 rtx target;
5210 rtx subtarget ATTRIBUTE_UNUSED;
5211 enum machine_mode mode ATTRIBUTE_UNUSED;
5212 int ignore ATTRIBUTE_UNUSED;
5213 {
5214 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5215 tree arglist = TREE_OPERAND (exp, 1);
5216 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5217 struct builtin_description *d;
5218 size_t i;
5219 rtx ret;
5220 bool success;
5221
5222 if (TARGET_ALTIVEC)
5223 {
5224 ret = altivec_expand_builtin (exp, target, &success);
5225
5226 if (success)
5227 return ret;
5228 }
5229 if (TARGET_SPE)
5230 {
5231 ret = spe_expand_builtin (exp, target, &success);
5232
5233 if (success)
5234 return ret;
5235 }
5236
5237 if (TARGET_ALTIVEC || TARGET_SPE)
5238 {
5239 /* Handle simple unary operations. */
5240 d = (struct builtin_description *) bdesc_1arg;
5241 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5242 if (d->code == fcode)
5243 return rs6000_expand_unop_builtin (d->icode, arglist, target);
5244
5245 /* Handle simple binary operations. */
5246 d = (struct builtin_description *) bdesc_2arg;
5247 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5248 if (d->code == fcode)
5249 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5250
5251 /* Handle simple ternary operations. */
5252 d = (struct builtin_description *) bdesc_3arg;
5253 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5254 if (d->code == fcode)
5255 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
5256 }
5257
5258 abort ();
5259 return NULL_RTX;
5260 }
5261
5262 static void
5263 rs6000_init_builtins ()
5264 {
5265 if (TARGET_SPE)
5266 spe_init_builtins ();
5267 if (TARGET_ALTIVEC)
5268 altivec_init_builtins ();
5269 if (TARGET_ALTIVEC || TARGET_SPE)
5270 rs6000_common_init_builtins ();
5271 }
5272
5273 /* Search through a set of builtins and enable the mask bits.
5274 DESC is an array of builtins.
5275 SIZE is the total number of builtins.
5276 START is the builtin enum at which to start.
5277 END is the builtin enum at which to end. */
5278 static void
5279 enable_mask_for_builtins (desc, size, start, end)
5280 struct builtin_description *desc;
5281 int size;
5282 enum rs6000_builtins start, end;
5283 {
5284 int i;
5285
5286 for (i = 0; i < size; ++i)
5287 if (desc[i].code == start)
5288 break;
5289
5290 if (i == size)
5291 return;
5292
5293 for (; i < size; ++i)
5294 {
5295 /* Flip all the bits on. */
5296 desc[i].mask = target_flags;
5297 if (desc[i].code == end)
5298 break;
5299 }
5300 }
5301
5302 static void
5303 spe_init_builtins ()
5304 {
5305 tree endlink = void_list_node;
5306 tree puint_type_node = build_pointer_type (unsigned_type_node);
5307 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
5308 tree pv2si_type_node = build_pointer_type (V2SI_type_node);
5309 struct builtin_description *d;
5310 size_t i;
5311
5312 tree v2si_ftype_4_v2si
5313 = build_function_type
5314 (V2SI_type_node,
5315 tree_cons (NULL_TREE, V2SI_type_node,
5316 tree_cons (NULL_TREE, V2SI_type_node,
5317 tree_cons (NULL_TREE, V2SI_type_node,
5318 tree_cons (NULL_TREE, V2SI_type_node,
5319 endlink)))));
5320
5321 tree v2sf_ftype_4_v2sf
5322 = build_function_type
5323 (V2SF_type_node,
5324 tree_cons (NULL_TREE, V2SF_type_node,
5325 tree_cons (NULL_TREE, V2SF_type_node,
5326 tree_cons (NULL_TREE, V2SF_type_node,
5327 tree_cons (NULL_TREE, V2SF_type_node,
5328 endlink)))));
5329
5330 tree int_ftype_int_v2si_v2si
5331 = build_function_type
5332 (integer_type_node,
5333 tree_cons (NULL_TREE, integer_type_node,
5334 tree_cons (NULL_TREE, V2SI_type_node,
5335 tree_cons (NULL_TREE, V2SI_type_node,
5336 endlink))));
5337
5338 tree int_ftype_int_v2sf_v2sf
5339 = build_function_type
5340 (integer_type_node,
5341 tree_cons (NULL_TREE, integer_type_node,
5342 tree_cons (NULL_TREE, V2SF_type_node,
5343 tree_cons (NULL_TREE, V2SF_type_node,
5344 endlink))));
5345
5346 tree void_ftype_v2si_puint_int
5347 = build_function_type (void_type_node,
5348 tree_cons (NULL_TREE, V2SI_type_node,
5349 tree_cons (NULL_TREE, puint_type_node,
5350 tree_cons (NULL_TREE,
5351 integer_type_node,
5352 endlink))));
5353
5354 tree void_ftype_v2si_puint_char
5355 = build_function_type (void_type_node,
5356 tree_cons (NULL_TREE, V2SI_type_node,
5357 tree_cons (NULL_TREE, puint_type_node,
5358 tree_cons (NULL_TREE,
5359 char_type_node,
5360 endlink))));
5361
5362 tree void_ftype_v2si_pv2si_int
5363 = build_function_type (void_type_node,
5364 tree_cons (NULL_TREE, V2SI_type_node,
5365 tree_cons (NULL_TREE, pv2si_type_node,
5366 tree_cons (NULL_TREE,
5367 integer_type_node,
5368 endlink))));
5369
5370 tree void_ftype_v2si_pv2si_char
5371 = build_function_type (void_type_node,
5372 tree_cons (NULL_TREE, V2SI_type_node,
5373 tree_cons (NULL_TREE, pv2si_type_node,
5374 tree_cons (NULL_TREE,
5375 char_type_node,
5376 endlink))));
5377
5378 tree void_ftype_int
5379 = build_function_type (void_type_node,
5380 tree_cons (NULL_TREE, integer_type_node, endlink));
5381
5382 tree int_ftype_void
5383 = build_function_type (integer_type_node,
5384 tree_cons (NULL_TREE, void_type_node, endlink));
5385
5386 tree v2si_ftype_pv2si_int
5387 = build_function_type (V2SI_type_node,
5388 tree_cons (NULL_TREE, pv2si_type_node,
5389 tree_cons (NULL_TREE, integer_type_node,
5390 endlink)));
5391
5392 tree v2si_ftype_puint_int
5393 = build_function_type (V2SI_type_node,
5394 tree_cons (NULL_TREE, puint_type_node,
5395 tree_cons (NULL_TREE, integer_type_node,
5396 endlink)));
5397
5398 tree v2si_ftype_pushort_int
5399 = build_function_type (V2SI_type_node,
5400 tree_cons (NULL_TREE, pushort_type_node,
5401 tree_cons (NULL_TREE, integer_type_node,
5402 endlink)));
5403
5404 /* The initialization of the simple binary and unary builtins is
5405 done in rs6000_common_init_builtins, but we have to enable the
5406 mask bits here manually because we have run out of `target_flags'
5407 bits. We really need to redesign this mask business. */
5408
5409 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
5410 ARRAY_SIZE (bdesc_2arg),
5411 SPE_BUILTIN_EVADDW,
5412 SPE_BUILTIN_EVXOR);
5413 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
5414 ARRAY_SIZE (bdesc_1arg),
5415 SPE_BUILTIN_EVABS,
5416 SPE_BUILTIN_EVSUBFUSIAAW);
5417 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
5418 ARRAY_SIZE (bdesc_spe_predicates),
5419 SPE_BUILTIN_EVCMPEQ,
5420 SPE_BUILTIN_EVFSTSTLT);
5421 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
5422 ARRAY_SIZE (bdesc_spe_evsel),
5423 SPE_BUILTIN_EVSEL_CMPGTS,
5424 SPE_BUILTIN_EVSEL_FSTSTEQ);
5425
5426 /* Initialize irregular SPE builtins. */
5427
5428 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
5429 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
5430 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
5431 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
5432 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
5433 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
5434 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
5435 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
5436 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
5437 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
5438 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
5439 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
5440 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
5441 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
5442 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
5443 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
5444
5445 /* Loads. */
5446 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
5447 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
5448 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
5449 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
5450 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
5451 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
5452 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
5453 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
5454 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
5455 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
5456 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
5457 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
5458 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
5459 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
5460 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
5461 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
5462 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
5463 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
5464 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
5465 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
5466 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
5467 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
5468
5469 /* Predicates. */
5470 d = (struct builtin_description *) bdesc_spe_predicates;
5471 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
5472 {
5473 tree type;
5474
5475 switch (insn_data[d->icode].operand[1].mode)
5476 {
5477 case V2SImode:
5478 type = int_ftype_int_v2si_v2si;
5479 break;
5480 case V2SFmode:
5481 type = int_ftype_int_v2sf_v2sf;
5482 break;
5483 default:
5484 abort ();
5485 }
5486
5487 def_builtin (d->mask, d->name, type, d->code);
5488 }
5489
5490 /* Evsel predicates. */
5491 d = (struct builtin_description *) bdesc_spe_evsel;
5492 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
5493 {
5494 tree type;
5495
5496 switch (insn_data[d->icode].operand[1].mode)
5497 {
5498 case V2SImode:
5499 type = v2si_ftype_4_v2si;
5500 break;
5501 case V2SFmode:
5502 type = v2sf_ftype_4_v2sf;
5503 break;
5504 default:
5505 abort ();
5506 }
5507
5508 def_builtin (d->mask, d->name, type, d->code);
5509 }
5510 }
5511
5512 static void
5513 altivec_init_builtins ()
5514 {
5515 struct builtin_description *d;
5516 struct builtin_description_predicates *dp;
5517 size_t i;
5518 tree pfloat_type_node = build_pointer_type (float_type_node);
5519 tree pint_type_node = build_pointer_type (integer_type_node);
5520 tree pshort_type_node = build_pointer_type (short_integer_type_node);
5521 tree pchar_type_node = build_pointer_type (char_type_node);
5522
5523 tree pvoid_type_node = build_pointer_type (void_type_node);
5524
5525 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
5526 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
5527 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
5528 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
5529
5530 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
5531
5532 tree int_ftype_int_v4si_v4si
5533 = build_function_type_list (integer_type_node,
5534 integer_type_node, V4SI_type_node,
5535 V4SI_type_node, NULL_TREE);
5536 tree v4sf_ftype_pcfloat
5537 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
5538 tree void_ftype_pfloat_v4sf
5539 = build_function_type_list (void_type_node,
5540 pfloat_type_node, V4SF_type_node, NULL_TREE);
5541 tree v4si_ftype_pcint
5542 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
5543 tree void_ftype_pint_v4si
5544 = build_function_type_list (void_type_node,
5545 pint_type_node, V4SI_type_node, NULL_TREE);
5546 tree v8hi_ftype_pcshort
5547 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
5548 tree void_ftype_pshort_v8hi
5549 = build_function_type_list (void_type_node,
5550 pshort_type_node, V8HI_type_node, NULL_TREE);
5551 tree v16qi_ftype_pcchar
5552 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
5553 tree void_ftype_pchar_v16qi
5554 = build_function_type_list (void_type_node,
5555 pchar_type_node, V16QI_type_node, NULL_TREE);
5556 tree void_ftype_v4si
5557 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
5558 tree v8hi_ftype_void
5559 = build_function_type (V8HI_type_node, void_list_node);
5560 tree void_ftype_void
5561 = build_function_type (void_type_node, void_list_node);
5562 tree void_ftype_qi
5563 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
5564
5565 tree v16qi_ftype_int_pcvoid
5566 = build_function_type_list (V16QI_type_node,
5567 integer_type_node, pcvoid_type_node, NULL_TREE);
5568 tree v8hi_ftype_int_pcvoid
5569 = build_function_type_list (V8HI_type_node,
5570 integer_type_node, pcvoid_type_node, NULL_TREE);
5571 tree v4si_ftype_int_pcvoid
5572 = build_function_type_list (V4SI_type_node,
5573 integer_type_node, pcvoid_type_node, NULL_TREE);
5574
5575 tree void_ftype_v4si_int_pvoid
5576 = build_function_type_list (void_type_node,
5577 V4SI_type_node, integer_type_node,
5578 pvoid_type_node, NULL_TREE);
5579 tree void_ftype_v16qi_int_pvoid
5580 = build_function_type_list (void_type_node,
5581 V16QI_type_node, integer_type_node,
5582 pvoid_type_node, NULL_TREE);
5583 tree void_ftype_v8hi_int_pvoid
5584 = build_function_type_list (void_type_node,
5585 V8HI_type_node, integer_type_node,
5586 pvoid_type_node, NULL_TREE);
5587 tree int_ftype_int_v8hi_v8hi
5588 = build_function_type_list (integer_type_node,
5589 integer_type_node, V8HI_type_node,
5590 V8HI_type_node, NULL_TREE);
5591 tree int_ftype_int_v16qi_v16qi
5592 = build_function_type_list (integer_type_node,
5593 integer_type_node, V16QI_type_node,
5594 V16QI_type_node, NULL_TREE);
5595 tree int_ftype_int_v4sf_v4sf
5596 = build_function_type_list (integer_type_node,
5597 integer_type_node, V4SF_type_node,
5598 V4SF_type_node, NULL_TREE);
5599 tree v4si_ftype_v4si
5600 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
5601 tree v8hi_ftype_v8hi
5602 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
5603 tree v16qi_ftype_v16qi
5604 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
5605 tree v4sf_ftype_v4sf
5606 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5607 tree void_ftype_pcvoid_int_char
5608 = build_function_type_list (void_type_node,
5609 pcvoid_type_node, integer_type_node,
5610 char_type_node, NULL_TREE);
5611
5612 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
5613 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
5614 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
5615 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
5616 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
5617 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
5618 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
5619 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
5620 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
5621 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
5622 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
5623 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
5624 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
5625 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
5626 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
5627 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
5628 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
5629 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
5630 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
5631 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
5632 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSL);
5633 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSR);
5634 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEBX);
5635 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEHX);
5636 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEWX);
5637 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVXL);
5638 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVX);
5639 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
5640 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
5641 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
5642 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
5643 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
5644
5645 /* Add the DST variants. */
5646 d = (struct builtin_description *) bdesc_dst;
5647 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5648 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
5649
5650 /* Initialize the predicates. */
5651 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5652 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5653 {
5654 enum machine_mode mode1;
5655 tree type;
5656
5657 mode1 = insn_data[dp->icode].operand[1].mode;
5658
5659 switch (mode1)
5660 {
5661 case V4SImode:
5662 type = int_ftype_int_v4si_v4si;
5663 break;
5664 case V8HImode:
5665 type = int_ftype_int_v8hi_v8hi;
5666 break;
5667 case V16QImode:
5668 type = int_ftype_int_v16qi_v16qi;
5669 break;
5670 case V4SFmode:
5671 type = int_ftype_int_v4sf_v4sf;
5672 break;
5673 default:
5674 abort ();
5675 }
5676
5677 def_builtin (dp->mask, dp->name, type, dp->code);
5678 }
5679
5680 /* Initialize the abs* operators. */
5681 d = (struct builtin_description *) bdesc_abs;
5682 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5683 {
5684 enum machine_mode mode0;
5685 tree type;
5686
5687 mode0 = insn_data[d->icode].operand[0].mode;
5688
5689 switch (mode0)
5690 {
5691 case V4SImode:
5692 type = v4si_ftype_v4si;
5693 break;
5694 case V8HImode:
5695 type = v8hi_ftype_v8hi;
5696 break;
5697 case V16QImode:
5698 type = v16qi_ftype_v16qi;
5699 break;
5700 case V4SFmode:
5701 type = v4sf_ftype_v4sf;
5702 break;
5703 default:
5704 abort ();
5705 }
5706
5707 def_builtin (d->mask, d->name, type, d->code);
5708 }
5709 }
5710
5711 static void
5712 rs6000_common_init_builtins ()
5713 {
5714 struct builtin_description *d;
5715 size_t i;
5716
5717 tree v4sf_ftype_v4sf_v4sf_v16qi
5718 = build_function_type_list (V4SF_type_node,
5719 V4SF_type_node, V4SF_type_node,
5720 V16QI_type_node, NULL_TREE);
5721 tree v4si_ftype_v4si_v4si_v16qi
5722 = build_function_type_list (V4SI_type_node,
5723 V4SI_type_node, V4SI_type_node,
5724 V16QI_type_node, NULL_TREE);
5725 tree v8hi_ftype_v8hi_v8hi_v16qi
5726 = build_function_type_list (V8HI_type_node,
5727 V8HI_type_node, V8HI_type_node,
5728 V16QI_type_node, NULL_TREE);
5729 tree v16qi_ftype_v16qi_v16qi_v16qi
5730 = build_function_type_list (V16QI_type_node,
5731 V16QI_type_node, V16QI_type_node,
5732 V16QI_type_node, NULL_TREE);
5733 tree v4si_ftype_char
5734 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
5735 tree v8hi_ftype_char
5736 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
5737 tree v16qi_ftype_char
5738 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
5739 tree v8hi_ftype_v16qi
5740 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
5741 tree v4sf_ftype_v4sf
5742 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5743
5744 tree v2si_ftype_v2si_v2si
5745 = build_function_type_list (V2SI_type_node,
5746 V2SI_type_node, V2SI_type_node, NULL_TREE);
5747
5748 tree v2sf_ftype_v2sf_v2sf
5749 = build_function_type_list (V2SF_type_node,
5750 V2SF_type_node, V2SF_type_node, NULL_TREE);
5751
5752 tree v2si_ftype_int_int
5753 = build_function_type_list (V2SI_type_node,
5754 integer_type_node, integer_type_node,
5755 NULL_TREE);
5756
5757 tree v2si_ftype_v2si
5758 = build_function_type_list (V2SI_type_node, V2SI_type_node, NULL_TREE);
5759
5760 tree v2sf_ftype_v2sf
5761 = build_function_type_list (V2SF_type_node,
5762 V2SF_type_node, NULL_TREE);
5763
5764 tree v2sf_ftype_v2si
5765 = build_function_type_list (V2SF_type_node,
5766 V2SI_type_node, NULL_TREE);
5767
5768 tree v2si_ftype_v2sf
5769 = build_function_type_list (V2SI_type_node,
5770 V2SF_type_node, NULL_TREE);
5771
5772 tree v2si_ftype_v2si_char
5773 = build_function_type_list (V2SI_type_node,
5774 V2SI_type_node, char_type_node, NULL_TREE);
5775
5776 tree v2si_ftype_int_char
5777 = build_function_type_list (V2SI_type_node,
5778 integer_type_node, char_type_node, NULL_TREE);
5779
5780 tree v2si_ftype_char
5781 = build_function_type_list (V2SI_type_node, char_type_node, NULL_TREE);
5782
5783 tree int_ftype_int_int
5784 = build_function_type_list (integer_type_node,
5785 integer_type_node, integer_type_node,
5786 NULL_TREE);
5787
5788 tree v4si_ftype_v4si_v4si
5789 = build_function_type_list (V4SI_type_node,
5790 V4SI_type_node, V4SI_type_node, NULL_TREE);
5791 tree v4sf_ftype_v4si_char
5792 = build_function_type_list (V4SF_type_node,
5793 V4SI_type_node, char_type_node, NULL_TREE);
5794 tree v4si_ftype_v4sf_char
5795 = build_function_type_list (V4SI_type_node,
5796 V4SF_type_node, char_type_node, NULL_TREE);
5797 tree v4si_ftype_v4si_char
5798 = build_function_type_list (V4SI_type_node,
5799 V4SI_type_node, char_type_node, NULL_TREE);
5800 tree v8hi_ftype_v8hi_char
5801 = build_function_type_list (V8HI_type_node,
5802 V8HI_type_node, char_type_node, NULL_TREE);
5803 tree v16qi_ftype_v16qi_char
5804 = build_function_type_list (V16QI_type_node,
5805 V16QI_type_node, char_type_node, NULL_TREE);
5806 tree v16qi_ftype_v16qi_v16qi_char
5807 = build_function_type_list (V16QI_type_node,
5808 V16QI_type_node, V16QI_type_node,
5809 char_type_node, NULL_TREE);
5810 tree v8hi_ftype_v8hi_v8hi_char
5811 = build_function_type_list (V8HI_type_node,
5812 V8HI_type_node, V8HI_type_node,
5813 char_type_node, NULL_TREE);
5814 tree v4si_ftype_v4si_v4si_char
5815 = build_function_type_list (V4SI_type_node,
5816 V4SI_type_node, V4SI_type_node,
5817 char_type_node, NULL_TREE);
5818 tree v4sf_ftype_v4sf_v4sf_char
5819 = build_function_type_list (V4SF_type_node,
5820 V4SF_type_node, V4SF_type_node,
5821 char_type_node, NULL_TREE);
5822 tree v4sf_ftype_v4sf_v4sf
5823 = build_function_type_list (V4SF_type_node,
5824 V4SF_type_node, V4SF_type_node, NULL_TREE);
5825 tree v4sf_ftype_v4sf_v4sf_v4si
5826 = build_function_type_list (V4SF_type_node,
5827 V4SF_type_node, V4SF_type_node,
5828 V4SI_type_node, NULL_TREE);
5829 tree v4sf_ftype_v4sf_v4sf_v4sf
5830 = build_function_type_list (V4SF_type_node,
5831 V4SF_type_node, V4SF_type_node,
5832 V4SF_type_node, NULL_TREE);
5833 tree v4si_ftype_v4si_v4si_v4si
5834 = build_function_type_list (V4SI_type_node,
5835 V4SI_type_node, V4SI_type_node,
5836 V4SI_type_node, NULL_TREE);
5837 tree v8hi_ftype_v8hi_v8hi
5838 = build_function_type_list (V8HI_type_node,
5839 V8HI_type_node, V8HI_type_node, NULL_TREE);
5840 tree v8hi_ftype_v8hi_v8hi_v8hi
5841 = build_function_type_list (V8HI_type_node,
5842 V8HI_type_node, V8HI_type_node,
5843 V8HI_type_node, NULL_TREE);
5844 tree v4si_ftype_v8hi_v8hi_v4si
5845 = build_function_type_list (V4SI_type_node,
5846 V8HI_type_node, V8HI_type_node,
5847 V4SI_type_node, NULL_TREE);
5848 tree v4si_ftype_v16qi_v16qi_v4si
5849 = build_function_type_list (V4SI_type_node,
5850 V16QI_type_node, V16QI_type_node,
5851 V4SI_type_node, NULL_TREE);
5852 tree v16qi_ftype_v16qi_v16qi
5853 = build_function_type_list (V16QI_type_node,
5854 V16QI_type_node, V16QI_type_node, NULL_TREE);
5855 tree v4si_ftype_v4sf_v4sf
5856 = build_function_type_list (V4SI_type_node,
5857 V4SF_type_node, V4SF_type_node, NULL_TREE);
5858 tree v8hi_ftype_v16qi_v16qi
5859 = build_function_type_list (V8HI_type_node,
5860 V16QI_type_node, V16QI_type_node, NULL_TREE);
5861 tree v4si_ftype_v8hi_v8hi
5862 = build_function_type_list (V4SI_type_node,
5863 V8HI_type_node, V8HI_type_node, NULL_TREE);
5864 tree v8hi_ftype_v4si_v4si
5865 = build_function_type_list (V8HI_type_node,
5866 V4SI_type_node, V4SI_type_node, NULL_TREE);
5867 tree v16qi_ftype_v8hi_v8hi
5868 = build_function_type_list (V16QI_type_node,
5869 V8HI_type_node, V8HI_type_node, NULL_TREE);
5870 tree v4si_ftype_v16qi_v4si
5871 = build_function_type_list (V4SI_type_node,
5872 V16QI_type_node, V4SI_type_node, NULL_TREE);
5873 tree v4si_ftype_v16qi_v16qi
5874 = build_function_type_list (V4SI_type_node,
5875 V16QI_type_node, V16QI_type_node, NULL_TREE);
5876 tree v4si_ftype_v8hi_v4si
5877 = build_function_type_list (V4SI_type_node,
5878 V8HI_type_node, V4SI_type_node, NULL_TREE);
5879 tree v4si_ftype_v8hi
5880 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
5881 tree int_ftype_v4si_v4si
5882 = build_function_type_list (integer_type_node,
5883 V4SI_type_node, V4SI_type_node, NULL_TREE);
5884 tree int_ftype_v4sf_v4sf
5885 = build_function_type_list (integer_type_node,
5886 V4SF_type_node, V4SF_type_node, NULL_TREE);
5887 tree int_ftype_v16qi_v16qi
5888 = build_function_type_list (integer_type_node,
5889 V16QI_type_node, V16QI_type_node, NULL_TREE);
5890 tree int_ftype_v8hi_v8hi
5891 = build_function_type_list (integer_type_node,
5892 V8HI_type_node, V8HI_type_node, NULL_TREE);
5893
5894 /* Add the simple ternary operators. */
5895 d = (struct builtin_description *) bdesc_3arg;
5896 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5897 {
5898
5899 enum machine_mode mode0, mode1, mode2, mode3;
5900 tree type;
5901
5902 if (d->name == 0 || d->icode == CODE_FOR_nothing)
5903 continue;
5904
5905 mode0 = insn_data[d->icode].operand[0].mode;
5906 mode1 = insn_data[d->icode].operand[1].mode;
5907 mode2 = insn_data[d->icode].operand[2].mode;
5908 mode3 = insn_data[d->icode].operand[3].mode;
5909
5910 /* When all four are of the same mode. */
5911 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
5912 {
5913 switch (mode0)
5914 {
5915 case V4SImode:
5916 type = v4si_ftype_v4si_v4si_v4si;
5917 break;
5918 case V4SFmode:
5919 type = v4sf_ftype_v4sf_v4sf_v4sf;
5920 break;
5921 case V8HImode:
5922 type = v8hi_ftype_v8hi_v8hi_v8hi;
5923 break;
5924 case V16QImode:
5925 type = v16qi_ftype_v16qi_v16qi_v16qi;
5926 break;
5927 default:
5928 abort();
5929 }
5930 }
5931 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
5932 {
5933 switch (mode0)
5934 {
5935 case V4SImode:
5936 type = v4si_ftype_v4si_v4si_v16qi;
5937 break;
5938 case V4SFmode:
5939 type = v4sf_ftype_v4sf_v4sf_v16qi;
5940 break;
5941 case V8HImode:
5942 type = v8hi_ftype_v8hi_v8hi_v16qi;
5943 break;
5944 case V16QImode:
5945 type = v16qi_ftype_v16qi_v16qi_v16qi;
5946 break;
5947 default:
5948 abort();
5949 }
5950 }
5951 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
5952 && mode3 == V4SImode)
5953 type = v4si_ftype_v16qi_v16qi_v4si;
5954 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
5955 && mode3 == V4SImode)
5956 type = v4si_ftype_v8hi_v8hi_v4si;
5957 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
5958 && mode3 == V4SImode)
5959 type = v4sf_ftype_v4sf_v4sf_v4si;
5960
5961 /* vchar, vchar, vchar, 4 bit literal. */
5962 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
5963 && mode3 == QImode)
5964 type = v16qi_ftype_v16qi_v16qi_char;
5965
5966 /* vshort, vshort, vshort, 4 bit literal. */
5967 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
5968 && mode3 == QImode)
5969 type = v8hi_ftype_v8hi_v8hi_char;
5970
5971 /* vint, vint, vint, 4 bit literal. */
5972 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
5973 && mode3 == QImode)
5974 type = v4si_ftype_v4si_v4si_char;
5975
5976 /* vfloat, vfloat, vfloat, 4 bit literal. */
5977 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
5978 && mode3 == QImode)
5979 type = v4sf_ftype_v4sf_v4sf_char;
5980
5981 else
5982 abort ();
5983
5984 def_builtin (d->mask, d->name, type, d->code);
5985 }
5986
5987 /* Add the simple binary operators. */
5988 d = (struct builtin_description *) bdesc_2arg;
5989 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5990 {
5991 enum machine_mode mode0, mode1, mode2;
5992 tree type;
5993
5994 if (d->name == 0 || d->icode == CODE_FOR_nothing)
5995 continue;
5996
5997 mode0 = insn_data[d->icode].operand[0].mode;
5998 mode1 = insn_data[d->icode].operand[1].mode;
5999 mode2 = insn_data[d->icode].operand[2].mode;
6000
6001 /* When all three operands are of the same mode. */
6002 if (mode0 == mode1 && mode1 == mode2)
6003 {
6004 switch (mode0)
6005 {
6006 case V4SFmode:
6007 type = v4sf_ftype_v4sf_v4sf;
6008 break;
6009 case V4SImode:
6010 type = v4si_ftype_v4si_v4si;
6011 break;
6012 case V16QImode:
6013 type = v16qi_ftype_v16qi_v16qi;
6014 break;
6015 case V8HImode:
6016 type = v8hi_ftype_v8hi_v8hi;
6017 break;
6018 case V2SImode:
6019 type = v2si_ftype_v2si_v2si;
6020 break;
6021 case V2SFmode:
6022 type = v2sf_ftype_v2sf_v2sf;
6023 break;
6024 case SImode:
6025 type = int_ftype_int_int;
6026 break;
6027 default:
6028 abort ();
6029 }
6030 }
6031
6032 /* A few other combos we really don't want to do manually. */
6033
6034 /* vint, vfloat, vfloat. */
6035 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
6036 type = v4si_ftype_v4sf_v4sf;
6037
6038 /* vshort, vchar, vchar. */
6039 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
6040 type = v8hi_ftype_v16qi_v16qi;
6041
6042 /* vint, vshort, vshort. */
6043 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
6044 type = v4si_ftype_v8hi_v8hi;
6045
6046 /* vshort, vint, vint. */
6047 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
6048 type = v8hi_ftype_v4si_v4si;
6049
6050 /* vchar, vshort, vshort. */
6051 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
6052 type = v16qi_ftype_v8hi_v8hi;
6053
6054 /* vint, vchar, vint. */
6055 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
6056 type = v4si_ftype_v16qi_v4si;
6057
6058 /* vint, vchar, vchar. */
6059 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
6060 type = v4si_ftype_v16qi_v16qi;
6061
6062 /* vint, vshort, vint. */
6063 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
6064 type = v4si_ftype_v8hi_v4si;
6065
6066 /* vint, vint, 5 bit literal. */
6067 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
6068 type = v4si_ftype_v4si_char;
6069
6070 /* vshort, vshort, 5 bit literal. */
6071 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
6072 type = v8hi_ftype_v8hi_char;
6073
6074 /* vchar, vchar, 5 bit literal. */
6075 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
6076 type = v16qi_ftype_v16qi_char;
6077
6078 /* vfloat, vint, 5 bit literal. */
6079 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
6080 type = v4sf_ftype_v4si_char;
6081
6082 /* vint, vfloat, 5 bit literal. */
6083 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
6084 type = v4si_ftype_v4sf_char;
6085
6086 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
6087 type = v2si_ftype_int_int;
6088
6089 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
6090 type = v2si_ftype_v2si_char;
6091
6092 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
6093 type = v2si_ftype_int_char;
6094
6095 /* int, x, x. */
6096 else if (mode0 == SImode)
6097 {
6098 switch (mode1)
6099 {
6100 case V4SImode:
6101 type = int_ftype_v4si_v4si;
6102 break;
6103 case V4SFmode:
6104 type = int_ftype_v4sf_v4sf;
6105 break;
6106 case V16QImode:
6107 type = int_ftype_v16qi_v16qi;
6108 break;
6109 case V8HImode:
6110 type = int_ftype_v8hi_v8hi;
6111 break;
6112 default:
6113 abort ();
6114 }
6115 }
6116
6117 else
6118 abort ();
6119
6120 def_builtin (d->mask, d->name, type, d->code);
6121 }
6122
6123 /* Add the simple unary operators. */
6124 d = (struct builtin_description *) bdesc_1arg;
6125 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6126 {
6127 enum machine_mode mode0, mode1;
6128 tree type;
6129
6130 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6131 continue;
6132
6133 mode0 = insn_data[d->icode].operand[0].mode;
6134 mode1 = insn_data[d->icode].operand[1].mode;
6135
6136 if (mode0 == V4SImode && mode1 == QImode)
6137 type = v4si_ftype_char;
6138 else if (mode0 == V8HImode && mode1 == QImode)
6139 type = v8hi_ftype_char;
6140 else if (mode0 == V16QImode && mode1 == QImode)
6141 type = v16qi_ftype_char;
6142 else if (mode0 == V4SFmode && mode1 == V4SFmode)
6143 type = v4sf_ftype_v4sf;
6144 else if (mode0 == V8HImode && mode1 == V16QImode)
6145 type = v8hi_ftype_v16qi;
6146 else if (mode0 == V4SImode && mode1 == V8HImode)
6147 type = v4si_ftype_v8hi;
6148 else if (mode0 == V2SImode && mode1 == V2SImode)
6149 type = v2si_ftype_v2si;
6150 else if (mode0 == V2SFmode && mode1 == V2SFmode)
6151 type = v2sf_ftype_v2sf;
6152 else if (mode0 == V2SFmode && mode1 == V2SImode)
6153 type = v2sf_ftype_v2si;
6154 else if (mode0 == V2SImode && mode1 == V2SFmode)
6155 type = v2si_ftype_v2sf;
6156 else if (mode0 == V2SImode && mode1 == QImode)
6157 type = v2si_ftype_char;
6158 else
6159 abort ();
6160
6161 def_builtin (d->mask, d->name, type, d->code);
6162 }
6163 }
6164
6165 \f
6166 /* Expand a block move operation, and return 1 if successful. Return 0
6167 if we should let the compiler generate normal code.
6168
6169 operands[0] is the destination
6170 operands[1] is the source
6171 operands[2] is the length
6172 operands[3] is the alignment */
6173
6174 #define MAX_MOVE_REG 4
6175
6176 int
6177 expand_block_move (operands)
6178 rtx operands[];
6179 {
6180 rtx orig_dest = operands[0];
6181 rtx orig_src = operands[1];
6182 rtx bytes_rtx = operands[2];
6183 rtx align_rtx = operands[3];
6184 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
6185 int align;
6186 int bytes;
6187 int offset;
6188 int move_bytes;
6189
6190 /* If this is not a fixed size move, just call memcpy */
6191 if (! constp)
6192 return 0;
6193
6194 /* If this is not a fixed size alignment, abort */
6195 if (GET_CODE (align_rtx) != CONST_INT)
6196 abort ();
6197 align = INTVAL (align_rtx);
6198
6199 /* Anything to move? */
6200 bytes = INTVAL (bytes_rtx);
6201 if (bytes <= 0)
6202 return 1;
6203
6204 /* store_one_arg depends on expand_block_move to handle at least the size of
6205 reg_parm_stack_space. */
6206 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
6207 return 0;
6208
6209 if (TARGET_STRING) /* string instructions are available */
6210 {
6211 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6212 {
6213 union {
6214 rtx (*movstrsi) PARAMS ((rtx, rtx, rtx, rtx));
6215 rtx (*mov) PARAMS ((rtx, rtx));
6216 } gen_func;
6217 enum machine_mode mode = BLKmode;
6218 rtx src, dest;
6219
6220 if (bytes > 24 /* move up to 32 bytes at a time */
6221 && ! fixed_regs[5]
6222 && ! fixed_regs[6]
6223 && ! fixed_regs[7]
6224 && ! fixed_regs[8]
6225 && ! fixed_regs[9]
6226 && ! fixed_regs[10]
6227 && ! fixed_regs[11]
6228 && ! fixed_regs[12])
6229 {
6230 move_bytes = (bytes > 32) ? 32 : bytes;
6231 gen_func.movstrsi = gen_movstrsi_8reg;
6232 }
6233 else if (bytes > 16 /* move up to 24 bytes at a time */
6234 && ! fixed_regs[5]
6235 && ! fixed_regs[6]
6236 && ! fixed_regs[7]
6237 && ! fixed_regs[8]
6238 && ! fixed_regs[9]
6239 && ! fixed_regs[10])
6240 {
6241 move_bytes = (bytes > 24) ? 24 : bytes;
6242 gen_func.movstrsi = gen_movstrsi_6reg;
6243 }
6244 else if (bytes > 8 /* move up to 16 bytes at a time */
6245 && ! fixed_regs[5]
6246 && ! fixed_regs[6]
6247 && ! fixed_regs[7]
6248 && ! fixed_regs[8])
6249 {
6250 move_bytes = (bytes > 16) ? 16 : bytes;
6251 gen_func.movstrsi = gen_movstrsi_4reg;
6252 }
6253 else if (bytes >= 8 && TARGET_POWERPC64
6254 /* 64-bit loads and stores require word-aligned
6255 displacements. */
6256 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6257 {
6258 move_bytes = 8;
6259 mode = DImode;
6260 gen_func.mov = gen_movdi;
6261 }
6262 else if (bytes > 4 && !TARGET_POWERPC64)
6263 { /* move up to 8 bytes at a time */
6264 move_bytes = (bytes > 8) ? 8 : bytes;
6265 gen_func.movstrsi = gen_movstrsi_2reg;
6266 }
6267 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6268 { /* move 4 bytes */
6269 move_bytes = 4;
6270 mode = SImode;
6271 gen_func.mov = gen_movsi;
6272 }
6273 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6274 { /* move 2 bytes */
6275 move_bytes = 2;
6276 mode = HImode;
6277 gen_func.mov = gen_movhi;
6278 }
6279 else if (bytes == 1) /* move 1 byte */
6280 {
6281 move_bytes = 1;
6282 mode = QImode;
6283 gen_func.mov = gen_movqi;
6284 }
6285 else
6286 { /* move up to 4 bytes at a time */
6287 move_bytes = (bytes > 4) ? 4 : bytes;
6288 gen_func.movstrsi = gen_movstrsi_1reg;
6289 }
6290
6291 src = adjust_address (orig_src, mode, offset);
6292 dest = adjust_address (orig_dest, mode, offset);
6293
6294 if (mode == BLKmode)
6295 {
6296 /* Move the address into scratch registers. The movstrsi
6297 patterns require zero offset. */
6298 if (!REG_P (XEXP (src, 0)))
6299 {
6300 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
6301 src = replace_equiv_address (src, src_reg);
6302 }
6303 set_mem_size (src, GEN_INT (move_bytes));
6304
6305 if (!REG_P (XEXP (dest, 0)))
6306 {
6307 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
6308 dest = replace_equiv_address (dest, dest_reg);
6309 }
6310 set_mem_size (dest, GEN_INT (move_bytes));
6311
6312 emit_insn ((*gen_func.movstrsi) (dest, src,
6313 GEN_INT (move_bytes & 31),
6314 align_rtx));
6315 }
6316 else
6317 {
6318 rtx tmp_reg = gen_reg_rtx (mode);
6319
6320 emit_insn ((*gen_func.mov) (tmp_reg, src));
6321 emit_insn ((*gen_func.mov) (dest, tmp_reg));
6322 }
6323 }
6324 }
6325
6326 else /* string instructions not available */
6327 {
6328 rtx stores[MAX_MOVE_REG];
6329 int num_reg = 0;
6330 int i;
6331
6332 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6333 {
6334 rtx (*gen_mov_func) PARAMS ((rtx, rtx));
6335 enum machine_mode mode;
6336 rtx src, dest, tmp_reg;
6337
6338 /* Generate the appropriate load and store, saving the stores
6339 for later. */
6340 if (bytes >= 8 && TARGET_POWERPC64
6341 /* 64-bit loads and stores require word-aligned
6342 displacements. */
6343 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6344 {
6345 move_bytes = 8;
6346 mode = DImode;
6347 gen_mov_func = gen_movdi;
6348 }
6349 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6350 {
6351 move_bytes = 4;
6352 mode = SImode;
6353 gen_mov_func = gen_movsi;
6354 }
6355 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6356 {
6357 move_bytes = 2;
6358 mode = HImode;
6359 gen_mov_func = gen_movhi;
6360 }
6361 else
6362 {
6363 move_bytes = 1;
6364 mode = QImode;
6365 gen_mov_func = gen_movqi;
6366 }
6367
6368 src = adjust_address (orig_src, mode, offset);
6369 dest = adjust_address (orig_dest, mode, offset);
6370 tmp_reg = gen_reg_rtx (mode);
6371
6372 emit_insn ((*gen_mov_func) (tmp_reg, src));
6373 stores[num_reg++] = (*gen_mov_func) (dest, tmp_reg);
6374
6375 if (num_reg >= MAX_MOVE_REG)
6376 {
6377 for (i = 0; i < num_reg; i++)
6378 emit_insn (stores[i]);
6379 num_reg = 0;
6380 }
6381 }
6382
6383 for (i = 0; i < num_reg; i++)
6384 emit_insn (stores[i]);
6385 }
6386
6387 return 1;
6388 }
6389
6390 \f
6391 /* Return 1 if OP is a load multiple operation. It is known to be a
6392 PARALLEL and the first section will be tested. */
6393
6394 int
6395 load_multiple_operation (op, mode)
6396 rtx op;
6397 enum machine_mode mode ATTRIBUTE_UNUSED;
6398 {
6399 int count = XVECLEN (op, 0);
6400 unsigned int dest_regno;
6401 rtx src_addr;
6402 int i;
6403
6404 /* Perform a quick check so we don't blow up below. */
6405 if (count <= 1
6406 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6407 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6408 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6409 return 0;
6410
6411 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6412 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6413
6414 for (i = 1; i < count; i++)
6415 {
6416 rtx elt = XVECEXP (op, 0, i);
6417
6418 if (GET_CODE (elt) != SET
6419 || GET_CODE (SET_DEST (elt)) != REG
6420 || GET_MODE (SET_DEST (elt)) != SImode
6421 || REGNO (SET_DEST (elt)) != dest_regno + i
6422 || GET_CODE (SET_SRC (elt)) != MEM
6423 || GET_MODE (SET_SRC (elt)) != SImode
6424 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
6425 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
6426 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
6427 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
6428 return 0;
6429 }
6430
6431 return 1;
6432 }
6433
6434 /* Similar, but tests for store multiple. Here, the second vector element
6435 is a CLOBBER. It will be tested later. */
6436
6437 int
6438 store_multiple_operation (op, mode)
6439 rtx op;
6440 enum machine_mode mode ATTRIBUTE_UNUSED;
6441 {
6442 int count = XVECLEN (op, 0) - 1;
6443 unsigned int src_regno;
6444 rtx dest_addr;
6445 int i;
6446
6447 /* Perform a quick check so we don't blow up below. */
6448 if (count <= 1
6449 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6450 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6451 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6452 return 0;
6453
6454 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6455 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6456
6457 for (i = 1; i < count; i++)
6458 {
6459 rtx elt = XVECEXP (op, 0, i + 1);
6460
6461 if (GET_CODE (elt) != SET
6462 || GET_CODE (SET_SRC (elt)) != REG
6463 || GET_MODE (SET_SRC (elt)) != SImode
6464 || REGNO (SET_SRC (elt)) != src_regno + i
6465 || GET_CODE (SET_DEST (elt)) != MEM
6466 || GET_MODE (SET_DEST (elt)) != SImode
6467 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
6468 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
6469 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
6470 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
6471 return 0;
6472 }
6473
6474 return 1;
6475 }
6476
6477 /* Return a string to perform a load_multiple operation.
6478 operands[0] is the vector.
6479 operands[1] is the source address.
6480 operands[2] is the first destination register. */
6481
6482 const char *
6483 rs6000_output_load_multiple (operands)
6484 rtx operands[3];
6485 {
6486 /* We have to handle the case where the pseudo used to contain the address
6487 is assigned to one of the output registers. */
6488 int i, j;
6489 int words = XVECLEN (operands[0], 0);
6490 rtx xop[10];
6491
6492 if (XVECLEN (operands[0], 0) == 1)
6493 return "{l|lwz} %2,0(%1)";
6494
6495 for (i = 0; i < words; i++)
6496 if (refers_to_regno_p (REGNO (operands[2]) + i,
6497 REGNO (operands[2]) + i + 1, operands[1], 0))
6498 {
6499 if (i == words-1)
6500 {
6501 xop[0] = GEN_INT (4 * (words-1));
6502 xop[1] = operands[1];
6503 xop[2] = operands[2];
6504 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
6505 return "";
6506 }
6507 else if (i == 0)
6508 {
6509 xop[0] = GEN_INT (4 * (words-1));
6510 xop[1] = operands[1];
6511 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
6512 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
6513 return "";
6514 }
6515 else
6516 {
6517 for (j = 0; j < words; j++)
6518 if (j != i)
6519 {
6520 xop[0] = GEN_INT (j * 4);
6521 xop[1] = operands[1];
6522 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
6523 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
6524 }
6525 xop[0] = GEN_INT (i * 4);
6526 xop[1] = operands[1];
6527 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
6528 return "";
6529 }
6530 }
6531
6532 return "{lsi|lswi} %2,%1,%N0";
6533 }
6534
6535 /* Return 1 for a parallel vrsave operation. */
6536
6537 int
6538 vrsave_operation (op, mode)
6539 rtx op;
6540 enum machine_mode mode ATTRIBUTE_UNUSED;
6541 {
6542 int count = XVECLEN (op, 0);
6543 unsigned int dest_regno, src_regno;
6544 int i;
6545
6546 if (count <= 1
6547 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6548 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6549 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
6550 return 0;
6551
6552 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6553 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6554
6555 if (dest_regno != VRSAVE_REGNO
6556 && src_regno != VRSAVE_REGNO)
6557 return 0;
6558
6559 for (i = 1; i < count; i++)
6560 {
6561 rtx elt = XVECEXP (op, 0, i);
6562
6563 if (GET_CODE (elt) != CLOBBER
6564 && GET_CODE (elt) != SET)
6565 return 0;
6566 }
6567
6568 return 1;
6569 }
6570
6571 /* Return 1 for an PARALLEL suitable for mtcrf. */
6572
6573 int
6574 mtcrf_operation (op, mode)
6575 rtx op;
6576 enum machine_mode mode ATTRIBUTE_UNUSED;
6577 {
6578 int count = XVECLEN (op, 0);
6579 int i;
6580 rtx src_reg;
6581
6582 /* Perform a quick check so we don't blow up below. */
6583 if (count < 1
6584 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6585 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
6586 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
6587 return 0;
6588 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
6589
6590 if (GET_CODE (src_reg) != REG
6591 || GET_MODE (src_reg) != SImode
6592 || ! INT_REGNO_P (REGNO (src_reg)))
6593 return 0;
6594
6595 for (i = 0; i < count; i++)
6596 {
6597 rtx exp = XVECEXP (op, 0, i);
6598 rtx unspec;
6599 int maskval;
6600
6601 if (GET_CODE (exp) != SET
6602 || GET_CODE (SET_DEST (exp)) != REG
6603 || GET_MODE (SET_DEST (exp)) != CCmode
6604 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
6605 return 0;
6606 unspec = SET_SRC (exp);
6607 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
6608
6609 if (GET_CODE (unspec) != UNSPEC
6610 || XINT (unspec, 1) != 20
6611 || XVECLEN (unspec, 0) != 2
6612 || XVECEXP (unspec, 0, 0) != src_reg
6613 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
6614 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
6615 return 0;
6616 }
6617 return 1;
6618 }
6619
6620 /* Return 1 for an PARALLEL suitable for lmw. */
6621
6622 int
6623 lmw_operation (op, mode)
6624 rtx op;
6625 enum machine_mode mode ATTRIBUTE_UNUSED;
6626 {
6627 int count = XVECLEN (op, 0);
6628 unsigned int dest_regno;
6629 rtx src_addr;
6630 unsigned int base_regno;
6631 HOST_WIDE_INT offset;
6632 int i;
6633
6634 /* Perform a quick check so we don't blow up below. */
6635 if (count <= 1
6636 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6637 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6638 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6639 return 0;
6640
6641 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6642 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6643
6644 if (dest_regno > 31
6645 || count != 32 - (int) dest_regno)
6646 return 0;
6647
6648 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
6649 {
6650 offset = 0;
6651 base_regno = REGNO (src_addr);
6652 if (base_regno == 0)
6653 return 0;
6654 }
6655 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
6656 {
6657 offset = INTVAL (XEXP (src_addr, 1));
6658 base_regno = REGNO (XEXP (src_addr, 0));
6659 }
6660 else
6661 return 0;
6662
6663 for (i = 0; i < count; i++)
6664 {
6665 rtx elt = XVECEXP (op, 0, i);
6666 rtx newaddr;
6667 rtx addr_reg;
6668 HOST_WIDE_INT newoffset;
6669
6670 if (GET_CODE (elt) != SET
6671 || GET_CODE (SET_DEST (elt)) != REG
6672 || GET_MODE (SET_DEST (elt)) != SImode
6673 || REGNO (SET_DEST (elt)) != dest_regno + i
6674 || GET_CODE (SET_SRC (elt)) != MEM
6675 || GET_MODE (SET_SRC (elt)) != SImode)
6676 return 0;
6677 newaddr = XEXP (SET_SRC (elt), 0);
6678 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6679 {
6680 newoffset = 0;
6681 addr_reg = newaddr;
6682 }
6683 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6684 {
6685 addr_reg = XEXP (newaddr, 0);
6686 newoffset = INTVAL (XEXP (newaddr, 1));
6687 }
6688 else
6689 return 0;
6690 if (REGNO (addr_reg) != base_regno
6691 || newoffset != offset + 4 * i)
6692 return 0;
6693 }
6694
6695 return 1;
6696 }
6697
6698 /* Return 1 for an PARALLEL suitable for stmw. */
6699
6700 int
6701 stmw_operation (op, mode)
6702 rtx op;
6703 enum machine_mode mode ATTRIBUTE_UNUSED;
6704 {
6705 int count = XVECLEN (op, 0);
6706 unsigned int src_regno;
6707 rtx dest_addr;
6708 unsigned int base_regno;
6709 HOST_WIDE_INT offset;
6710 int i;
6711
6712 /* Perform a quick check so we don't blow up below. */
6713 if (count <= 1
6714 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6715 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6716 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6717 return 0;
6718
6719 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6720 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6721
6722 if (src_regno > 31
6723 || count != 32 - (int) src_regno)
6724 return 0;
6725
6726 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
6727 {
6728 offset = 0;
6729 base_regno = REGNO (dest_addr);
6730 if (base_regno == 0)
6731 return 0;
6732 }
6733 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
6734 {
6735 offset = INTVAL (XEXP (dest_addr, 1));
6736 base_regno = REGNO (XEXP (dest_addr, 0));
6737 }
6738 else
6739 return 0;
6740
6741 for (i = 0; i < count; i++)
6742 {
6743 rtx elt = XVECEXP (op, 0, i);
6744 rtx newaddr;
6745 rtx addr_reg;
6746 HOST_WIDE_INT newoffset;
6747
6748 if (GET_CODE (elt) != SET
6749 || GET_CODE (SET_SRC (elt)) != REG
6750 || GET_MODE (SET_SRC (elt)) != SImode
6751 || REGNO (SET_SRC (elt)) != src_regno + i
6752 || GET_CODE (SET_DEST (elt)) != MEM
6753 || GET_MODE (SET_DEST (elt)) != SImode)
6754 return 0;
6755 newaddr = XEXP (SET_DEST (elt), 0);
6756 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6757 {
6758 newoffset = 0;
6759 addr_reg = newaddr;
6760 }
6761 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6762 {
6763 addr_reg = XEXP (newaddr, 0);
6764 newoffset = INTVAL (XEXP (newaddr, 1));
6765 }
6766 else
6767 return 0;
6768 if (REGNO (addr_reg) != base_regno
6769 || newoffset != offset + 4 * i)
6770 return 0;
6771 }
6772
6773 return 1;
6774 }
6775 \f
6776 /* A validation routine: say whether CODE, a condition code, and MODE
6777 match. The other alternatives either don't make sense or should
6778 never be generated. */
6779
6780 static void
6781 validate_condition_mode (code, mode)
6782 enum rtx_code code;
6783 enum machine_mode mode;
6784 {
6785 if (GET_RTX_CLASS (code) != '<'
6786 || GET_MODE_CLASS (mode) != MODE_CC)
6787 abort ();
6788
6789 /* These don't make sense. */
6790 if ((code == GT || code == LT || code == GE || code == LE)
6791 && mode == CCUNSmode)
6792 abort ();
6793
6794 if ((code == GTU || code == LTU || code == GEU || code == LEU)
6795 && mode != CCUNSmode)
6796 abort ();
6797
6798 if (mode != CCFPmode
6799 && (code == ORDERED || code == UNORDERED
6800 || code == UNEQ || code == LTGT
6801 || code == UNGT || code == UNLT
6802 || code == UNGE || code == UNLE))
6803 abort ();
6804
6805 /* These should never be generated except for
6806 flag_unsafe_math_optimizations and flag_finite_math_only. */
6807 if (mode == CCFPmode
6808 && ! flag_unsafe_math_optimizations
6809 && ! flag_finite_math_only
6810 && (code == LE || code == GE
6811 || code == UNEQ || code == LTGT
6812 || code == UNGT || code == UNLT))
6813 abort ();
6814
6815 /* These are invalid; the information is not there. */
6816 if (mode == CCEQmode
6817 && code != EQ && code != NE)
6818 abort ();
6819 }
6820
6821 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
6822 We only check the opcode against the mode of the CC value here. */
6823
6824 int
6825 branch_comparison_operator (op, mode)
6826 rtx op;
6827 enum machine_mode mode ATTRIBUTE_UNUSED;
6828 {
6829 enum rtx_code code = GET_CODE (op);
6830 enum machine_mode cc_mode;
6831
6832 if (GET_RTX_CLASS (code) != '<')
6833 return 0;
6834
6835 cc_mode = GET_MODE (XEXP (op, 0));
6836 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
6837 return 0;
6838
6839 validate_condition_mode (code, cc_mode);
6840
6841 return 1;
6842 }
6843
6844 /* Return 1 if OP is a comparison operation that is valid for a branch
6845 insn and which is true if the corresponding bit in the CC register
6846 is set. */
6847
6848 int
6849 branch_positive_comparison_operator (op, mode)
6850 rtx op;
6851 enum machine_mode mode;
6852 {
6853 enum rtx_code code;
6854
6855 if (! branch_comparison_operator (op, mode))
6856 return 0;
6857
6858 code = GET_CODE (op);
6859 return (code == EQ || code == LT || code == GT
6860 || (TARGET_SPE && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
6861 || code == LTU || code == GTU
6862 || code == UNORDERED);
6863 }
6864
6865 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
6866 We check the opcode against the mode of the CC value and disallow EQ or
6867 NE comparisons for integers. */
6868
6869 int
6870 scc_comparison_operator (op, mode)
6871 rtx op;
6872 enum machine_mode mode;
6873 {
6874 enum rtx_code code = GET_CODE (op);
6875 enum machine_mode cc_mode;
6876
6877 if (GET_MODE (op) != mode && mode != VOIDmode)
6878 return 0;
6879
6880 if (GET_RTX_CLASS (code) != '<')
6881 return 0;
6882
6883 cc_mode = GET_MODE (XEXP (op, 0));
6884 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
6885 return 0;
6886
6887 validate_condition_mode (code, cc_mode);
6888
6889 if (code == NE && cc_mode != CCFPmode)
6890 return 0;
6891
6892 return 1;
6893 }
6894
6895 int
6896 trap_comparison_operator (op, mode)
6897 rtx op;
6898 enum machine_mode mode;
6899 {
6900 if (mode != VOIDmode && mode != GET_MODE (op))
6901 return 0;
6902 return GET_RTX_CLASS (GET_CODE (op)) == '<';
6903 }
6904
6905 int
6906 boolean_operator (op, mode)
6907 rtx op;
6908 enum machine_mode mode ATTRIBUTE_UNUSED;
6909 {
6910 enum rtx_code code = GET_CODE (op);
6911 return (code == AND || code == IOR || code == XOR);
6912 }
6913
6914 int
6915 boolean_or_operator (op, mode)
6916 rtx op;
6917 enum machine_mode mode ATTRIBUTE_UNUSED;
6918 {
6919 enum rtx_code code = GET_CODE (op);
6920 return (code == IOR || code == XOR);
6921 }
6922
6923 int
6924 min_max_operator (op, mode)
6925 rtx op;
6926 enum machine_mode mode ATTRIBUTE_UNUSED;
6927 {
6928 enum rtx_code code = GET_CODE (op);
6929 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
6930 }
6931 \f
6932 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
6933 mask required to convert the result of a rotate insn into a shift
6934 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
6935
6936 int
6937 includes_lshift_p (shiftop, andop)
6938 rtx shiftop;
6939 rtx andop;
6940 {
6941 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
6942
6943 shift_mask <<= INTVAL (shiftop);
6944
6945 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
6946 }
6947
6948 /* Similar, but for right shift. */
6949
6950 int
6951 includes_rshift_p (shiftop, andop)
6952 rtx shiftop;
6953 rtx andop;
6954 {
6955 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
6956
6957 shift_mask >>= INTVAL (shiftop);
6958
6959 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
6960 }
6961
6962 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
6963 to perform a left shift. It must have exactly SHIFTOP least
6964 significant 0's, then one or more 1's, then zero or more 0's. */
6965
6966 int
6967 includes_rldic_lshift_p (shiftop, andop)
6968 rtx shiftop;
6969 rtx andop;
6970 {
6971 if (GET_CODE (andop) == CONST_INT)
6972 {
6973 HOST_WIDE_INT c, lsb, shift_mask;
6974
6975 c = INTVAL (andop);
6976 if (c == 0 || c == ~0)
6977 return 0;
6978
6979 shift_mask = ~0;
6980 shift_mask <<= INTVAL (shiftop);
6981
6982 /* Find the least significant one bit. */
6983 lsb = c & -c;
6984
6985 /* It must coincide with the LSB of the shift mask. */
6986 if (-lsb != shift_mask)
6987 return 0;
6988
6989 /* Invert to look for the next transition (if any). */
6990 c = ~c;
6991
6992 /* Remove the low group of ones (originally low group of zeros). */
6993 c &= -lsb;
6994
6995 /* Again find the lsb, and check we have all 1's above. */
6996 lsb = c & -c;
6997 return c == -lsb;
6998 }
6999 else if (GET_CODE (andop) == CONST_DOUBLE
7000 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7001 {
7002 HOST_WIDE_INT low, high, lsb;
7003 HOST_WIDE_INT shift_mask_low, shift_mask_high;
7004
7005 low = CONST_DOUBLE_LOW (andop);
7006 if (HOST_BITS_PER_WIDE_INT < 64)
7007 high = CONST_DOUBLE_HIGH (andop);
7008
7009 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
7010 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
7011 return 0;
7012
7013 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7014 {
7015 shift_mask_high = ~0;
7016 if (INTVAL (shiftop) > 32)
7017 shift_mask_high <<= INTVAL (shiftop) - 32;
7018
7019 lsb = high & -high;
7020
7021 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
7022 return 0;
7023
7024 high = ~high;
7025 high &= -lsb;
7026
7027 lsb = high & -high;
7028 return high == -lsb;
7029 }
7030
7031 shift_mask_low = ~0;
7032 shift_mask_low <<= INTVAL (shiftop);
7033
7034 lsb = low & -low;
7035
7036 if (-lsb != shift_mask_low)
7037 return 0;
7038
7039 if (HOST_BITS_PER_WIDE_INT < 64)
7040 high = ~high;
7041 low = ~low;
7042 low &= -lsb;
7043
7044 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7045 {
7046 lsb = high & -high;
7047 return high == -lsb;
7048 }
7049
7050 lsb = low & -low;
7051 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
7052 }
7053 else
7054 return 0;
7055 }
7056
7057 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7058 to perform a left shift. It must have SHIFTOP or more least
7059 signifigant 0's, with the remainder of the word 1's. */
7060
7061 int
7062 includes_rldicr_lshift_p (shiftop, andop)
7063 rtx shiftop;
7064 rtx andop;
7065 {
7066 if (GET_CODE (andop) == CONST_INT)
7067 {
7068 HOST_WIDE_INT c, lsb, shift_mask;
7069
7070 shift_mask = ~0;
7071 shift_mask <<= INTVAL (shiftop);
7072 c = INTVAL (andop);
7073
7074 /* Find the least signifigant one bit. */
7075 lsb = c & -c;
7076
7077 /* It must be covered by the shift mask.
7078 This test also rejects c == 0. */
7079 if ((lsb & shift_mask) == 0)
7080 return 0;
7081
7082 /* Check we have all 1's above the transition, and reject all 1's. */
7083 return c == -lsb && lsb != 1;
7084 }
7085 else if (GET_CODE (andop) == CONST_DOUBLE
7086 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7087 {
7088 HOST_WIDE_INT low, lsb, shift_mask_low;
7089
7090 low = CONST_DOUBLE_LOW (andop);
7091
7092 if (HOST_BITS_PER_WIDE_INT < 64)
7093 {
7094 HOST_WIDE_INT high, shift_mask_high;
7095
7096 high = CONST_DOUBLE_HIGH (andop);
7097
7098 if (low == 0)
7099 {
7100 shift_mask_high = ~0;
7101 if (INTVAL (shiftop) > 32)
7102 shift_mask_high <<= INTVAL (shiftop) - 32;
7103
7104 lsb = high & -high;
7105
7106 if ((lsb & shift_mask_high) == 0)
7107 return 0;
7108
7109 return high == -lsb;
7110 }
7111 if (high != ~0)
7112 return 0;
7113 }
7114
7115 shift_mask_low = ~0;
7116 shift_mask_low <<= INTVAL (shiftop);
7117
7118 lsb = low & -low;
7119
7120 if ((lsb & shift_mask_low) == 0)
7121 return 0;
7122
7123 return low == -lsb && lsb != 1;
7124 }
7125 else
7126 return 0;
7127 }
7128
7129 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7130 for lfq and stfq insns.
7131
7132 Note reg1 and reg2 *must* be hard registers. To be sure we will
7133 abort if we are passed pseudo registers. */
7134
7135 int
7136 registers_ok_for_quad_peep (reg1, reg2)
7137 rtx reg1, reg2;
7138 {
7139 /* We might have been passed a SUBREG. */
7140 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
7141 return 0;
7142
7143 return (REGNO (reg1) == REGNO (reg2) - 1);
7144 }
7145
7146 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7147 addr1 and addr2 must be in consecutive memory locations
7148 (addr2 == addr1 + 8). */
7149
7150 int
7151 addrs_ok_for_quad_peep (addr1, addr2)
7152 rtx addr1;
7153 rtx addr2;
7154 {
7155 unsigned int reg1;
7156 int offset1;
7157
7158 /* Extract an offset (if used) from the first addr. */
7159 if (GET_CODE (addr1) == PLUS)
7160 {
7161 /* If not a REG, return zero. */
7162 if (GET_CODE (XEXP (addr1, 0)) != REG)
7163 return 0;
7164 else
7165 {
7166 reg1 = REGNO (XEXP (addr1, 0));
7167 /* The offset must be constant! */
7168 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
7169 return 0;
7170 offset1 = INTVAL (XEXP (addr1, 1));
7171 }
7172 }
7173 else if (GET_CODE (addr1) != REG)
7174 return 0;
7175 else
7176 {
7177 reg1 = REGNO (addr1);
7178 /* This was a simple (mem (reg)) expression. Offset is 0. */
7179 offset1 = 0;
7180 }
7181
7182 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
7183 if (GET_CODE (addr2) != PLUS)
7184 return 0;
7185
7186 if (GET_CODE (XEXP (addr2, 0)) != REG
7187 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7188 return 0;
7189
7190 if (reg1 != REGNO (XEXP (addr2, 0)))
7191 return 0;
7192
7193 /* The offset for the second addr must be 8 more than the first addr. */
7194 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7195 return 0;
7196
7197 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7198 instructions. */
7199 return 1;
7200 }
7201 \f
7202 /* Return the register class of a scratch register needed to copy IN into
7203 or out of a register in CLASS in MODE. If it can be done directly,
7204 NO_REGS is returned. */
7205
7206 enum reg_class
7207 secondary_reload_class (class, mode, in)
7208 enum reg_class class;
7209 enum machine_mode mode ATTRIBUTE_UNUSED;
7210 rtx in;
7211 {
7212 int regno;
7213
7214 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
7215 {
7216 /* We cannot copy a symbolic operand directly into anything
7217 other than BASE_REGS for TARGET_ELF. So indicate that a
7218 register from BASE_REGS is needed as an intermediate
7219 register.
7220
7221 On Darwin, pic addresses require a load from memory, which
7222 needs a base register. */
7223 if (class != BASE_REGS
7224 && (GET_CODE (in) == SYMBOL_REF
7225 || GET_CODE (in) == HIGH
7226 || GET_CODE (in) == LABEL_REF
7227 || GET_CODE (in) == CONST))
7228 return BASE_REGS;
7229 }
7230
7231 if (GET_CODE (in) == REG)
7232 {
7233 regno = REGNO (in);
7234 if (regno >= FIRST_PSEUDO_REGISTER)
7235 {
7236 regno = true_regnum (in);
7237 if (regno >= FIRST_PSEUDO_REGISTER)
7238 regno = -1;
7239 }
7240 }
7241 else if (GET_CODE (in) == SUBREG)
7242 {
7243 regno = true_regnum (in);
7244 if (regno >= FIRST_PSEUDO_REGISTER)
7245 regno = -1;
7246 }
7247 else
7248 regno = -1;
7249
7250 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7251 into anything. */
7252 if (class == GENERAL_REGS || class == BASE_REGS
7253 || (regno >= 0 && INT_REGNO_P (regno)))
7254 return NO_REGS;
7255
7256 /* Constants, memory, and FP registers can go into FP registers. */
7257 if ((regno == -1 || FP_REGNO_P (regno))
7258 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
7259 return NO_REGS;
7260
7261 /* Memory, and AltiVec registers can go into AltiVec registers. */
7262 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
7263 && class == ALTIVEC_REGS)
7264 return NO_REGS;
7265
7266 /* We can copy among the CR registers. */
7267 if ((class == CR_REGS || class == CR0_REGS)
7268 && regno >= 0 && CR_REGNO_P (regno))
7269 return NO_REGS;
7270
7271 /* Otherwise, we need GENERAL_REGS. */
7272 return GENERAL_REGS;
7273 }
7274 \f
7275 /* Given a comparison operation, return the bit number in CCR to test. We
7276 know this is a valid comparison.
7277
7278 SCC_P is 1 if this is for an scc. That means that %D will have been
7279 used instead of %C, so the bits will be in different places.
7280
7281 Return -1 if OP isn't a valid comparison for some reason. */
7282
7283 int
7284 ccr_bit (op, scc_p)
7285 rtx op;
7286 int scc_p;
7287 {
7288 enum rtx_code code = GET_CODE (op);
7289 enum machine_mode cc_mode;
7290 int cc_regnum;
7291 int base_bit;
7292 rtx reg;
7293
7294 if (GET_RTX_CLASS (code) != '<')
7295 return -1;
7296
7297 reg = XEXP (op, 0);
7298
7299 if (GET_CODE (reg) != REG
7300 || ! CR_REGNO_P (REGNO (reg)))
7301 abort ();
7302
7303 cc_mode = GET_MODE (reg);
7304 cc_regnum = REGNO (reg);
7305 base_bit = 4 * (cc_regnum - CR0_REGNO);
7306
7307 validate_condition_mode (code, cc_mode);
7308
7309 switch (code)
7310 {
7311 case NE:
7312 if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7313 return base_bit + 1;
7314 return scc_p ? base_bit + 3 : base_bit + 2;
7315 case EQ:
7316 if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7317 return base_bit + 1;
7318 return base_bit + 2;
7319 case GT: case GTU: case UNLE:
7320 return base_bit + 1;
7321 case LT: case LTU: case UNGE:
7322 return base_bit;
7323 case ORDERED: case UNORDERED:
7324 return base_bit + 3;
7325
7326 case GE: case GEU:
7327 /* If scc, we will have done a cror to put the bit in the
7328 unordered position. So test that bit. For integer, this is ! LT
7329 unless this is an scc insn. */
7330 return scc_p ? base_bit + 3 : base_bit;
7331
7332 case LE: case LEU:
7333 return scc_p ? base_bit + 3 : base_bit + 1;
7334
7335 default:
7336 abort ();
7337 }
7338 }
7339 \f
7340 /* Return the GOT register. */
7341
7342 struct rtx_def *
7343 rs6000_got_register (value)
7344 rtx value ATTRIBUTE_UNUSED;
7345 {
7346 /* The second flow pass currently (June 1999) can't update
7347 regs_ever_live without disturbing other parts of the compiler, so
7348 update it here to make the prolog/epilogue code happy. */
7349 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
7350 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
7351
7352 current_function_uses_pic_offset_table = 1;
7353
7354 return pic_offset_table_rtx;
7355 }
7356 \f
7357 /* Function to init struct machine_function.
7358 This will be called, via a pointer variable,
7359 from push_function_context. */
7360
7361 static struct machine_function *
7362 rs6000_init_machine_status ()
7363 {
7364 return ggc_alloc_cleared (sizeof (machine_function));
7365 }
7366 \f
7367 /* These macros test for integers and extract the low-order bits. */
7368 #define INT_P(X) \
7369 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7370 && GET_MODE (X) == VOIDmode)
7371
7372 #define INT_LOWPART(X) \
7373 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7374
7375 int
7376 extract_MB (op)
7377 rtx op;
7378 {
7379 int i;
7380 unsigned long val = INT_LOWPART (op);
7381
7382 /* If the high bit is zero, the value is the first 1 bit we find
7383 from the left. */
7384 if ((val & 0x80000000) == 0)
7385 {
7386 if ((val & 0xffffffff) == 0)
7387 abort ();
7388
7389 i = 1;
7390 while (((val <<= 1) & 0x80000000) == 0)
7391 ++i;
7392 return i;
7393 }
7394
7395 /* If the high bit is set and the low bit is not, or the mask is all
7396 1's, the value is zero. */
7397 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
7398 return 0;
7399
7400 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7401 from the right. */
7402 i = 31;
7403 while (((val >>= 1) & 1) != 0)
7404 --i;
7405
7406 return i;
7407 }
7408
7409 int
7410 extract_ME (op)
7411 rtx op;
7412 {
7413 int i;
7414 unsigned long val = INT_LOWPART (op);
7415
7416 /* If the low bit is zero, the value is the first 1 bit we find from
7417 the right. */
7418 if ((val & 1) == 0)
7419 {
7420 if ((val & 0xffffffff) == 0)
7421 abort ();
7422
7423 i = 30;
7424 while (((val >>= 1) & 1) == 0)
7425 --i;
7426
7427 return i;
7428 }
7429
7430 /* If the low bit is set and the high bit is not, or the mask is all
7431 1's, the value is 31. */
7432 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
7433 return 31;
7434
7435 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7436 from the left. */
7437 i = 0;
7438 while (((val <<= 1) & 0x80000000) != 0)
7439 ++i;
7440
7441 return i;
7442 }
7443
7444 /* Print an operand. Recognize special options, documented below. */
7445
7446 #if TARGET_ELF
7447 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7448 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7449 #else
7450 #define SMALL_DATA_RELOC "sda21"
7451 #define SMALL_DATA_REG 0
7452 #endif
7453
7454 void
7455 print_operand (file, x, code)
7456 FILE *file;
7457 rtx x;
7458 int code;
7459 {
7460 int i;
7461 HOST_WIDE_INT val;
7462 unsigned HOST_WIDE_INT uval;
7463
7464 switch (code)
7465 {
7466 case '.':
7467 /* Write out an instruction after the call which may be replaced
7468 with glue code by the loader. This depends on the AIX version. */
7469 asm_fprintf (file, RS6000_CALL_GLUE);
7470 return;
7471
7472 /* %a is output_address. */
7473
7474 case 'A':
7475 /* If X is a constant integer whose low-order 5 bits are zero,
7476 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
7477 in the AIX assembler where "sri" with a zero shift count
7478 writes a trash instruction. */
7479 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
7480 putc ('l', file);
7481 else
7482 putc ('r', file);
7483 return;
7484
7485 case 'b':
7486 /* If constant, low-order 16 bits of constant, unsigned.
7487 Otherwise, write normally. */
7488 if (INT_P (x))
7489 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
7490 else
7491 print_operand (file, x, 0);
7492 return;
7493
7494 case 'B':
7495 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7496 for 64-bit mask direction. */
7497 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
7498 return;
7499
7500 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7501 output_operand. */
7502
7503 case 'D':
7504 /* There used to be a comment for 'C' reading "This is an
7505 optional cror needed for certain floating-point
7506 comparisons. Otherwise write nothing." */
7507
7508 /* Similar, except that this is for an scc, so we must be able to
7509 encode the test in a single bit that is one. We do the above
7510 for any LE, GE, GEU, or LEU and invert the bit for NE. */
7511 if (GET_CODE (x) == LE || GET_CODE (x) == GE
7512 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
7513 {
7514 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7515
7516 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
7517 base_bit + 2,
7518 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
7519 }
7520
7521 else if (GET_CODE (x) == NE)
7522 {
7523 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7524
7525 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
7526 base_bit + 2, base_bit + 2);
7527 }
7528 else if (TARGET_SPE && TARGET_HARD_FLOAT
7529 && GET_CODE (x) == EQ
7530 && GET_MODE (XEXP (x, 0)) == CCFPmode)
7531 {
7532 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7533
7534 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 1,
7535 base_bit + 1, base_bit + 1);
7536 }
7537 return;
7538
7539 case 'E':
7540 /* X is a CR register. Print the number of the EQ bit of the CR */
7541 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7542 output_operand_lossage ("invalid %%E value");
7543 else
7544 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
7545 return;
7546
7547 case 'f':
7548 /* X is a CR register. Print the shift count needed to move it
7549 to the high-order four bits. */
7550 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7551 output_operand_lossage ("invalid %%f value");
7552 else
7553 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
7554 return;
7555
7556 case 'F':
7557 /* Similar, but print the count for the rotate in the opposite
7558 direction. */
7559 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7560 output_operand_lossage ("invalid %%F value");
7561 else
7562 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
7563 return;
7564
7565 case 'G':
7566 /* X is a constant integer. If it is negative, print "m",
7567 otherwise print "z". This is to make an aze or ame insn. */
7568 if (GET_CODE (x) != CONST_INT)
7569 output_operand_lossage ("invalid %%G value");
7570 else if (INTVAL (x) >= 0)
7571 putc ('z', file);
7572 else
7573 putc ('m', file);
7574 return;
7575
7576 case 'h':
7577 /* If constant, output low-order five bits. Otherwise, write
7578 normally. */
7579 if (INT_P (x))
7580 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
7581 else
7582 print_operand (file, x, 0);
7583 return;
7584
7585 case 'H':
7586 /* If constant, output low-order six bits. Otherwise, write
7587 normally. */
7588 if (INT_P (x))
7589 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
7590 else
7591 print_operand (file, x, 0);
7592 return;
7593
7594 case 'I':
7595 /* Print `i' if this is a constant, else nothing. */
7596 if (INT_P (x))
7597 putc ('i', file);
7598 return;
7599
7600 case 'j':
7601 /* Write the bit number in CCR for jump. */
7602 i = ccr_bit (x, 0);
7603 if (i == -1)
7604 output_operand_lossage ("invalid %%j code");
7605 else
7606 fprintf (file, "%d", i);
7607 return;
7608
7609 case 'J':
7610 /* Similar, but add one for shift count in rlinm for scc and pass
7611 scc flag to `ccr_bit'. */
7612 i = ccr_bit (x, 1);
7613 if (i == -1)
7614 output_operand_lossage ("invalid %%J code");
7615 else
7616 /* If we want bit 31, write a shift count of zero, not 32. */
7617 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7618 return;
7619
7620 case 'k':
7621 /* X must be a constant. Write the 1's complement of the
7622 constant. */
7623 if (! INT_P (x))
7624 output_operand_lossage ("invalid %%k value");
7625 else
7626 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
7627 return;
7628
7629 case 'K':
7630 /* X must be a symbolic constant on ELF. Write an
7631 expression suitable for an 'addi' that adds in the low 16
7632 bits of the MEM. */
7633 if (GET_CODE (x) != CONST)
7634 {
7635 print_operand_address (file, x);
7636 fputs ("@l", file);
7637 }
7638 else
7639 {
7640 if (GET_CODE (XEXP (x, 0)) != PLUS
7641 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
7642 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
7643 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
7644 output_operand_lossage ("invalid %%K value");
7645 print_operand_address (file, XEXP (XEXP (x, 0), 0));
7646 fputs ("@l", file);
7647 /* For GNU as, there must be a non-alphanumeric character
7648 between 'l' and the number. The '-' is added by
7649 print_operand() already. */
7650 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
7651 fputs ("+", file);
7652 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
7653 }
7654 return;
7655
7656 /* %l is output_asm_label. */
7657
7658 case 'L':
7659 /* Write second word of DImode or DFmode reference. Works on register
7660 or non-indexed memory only. */
7661 if (GET_CODE (x) == REG)
7662 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
7663 else if (GET_CODE (x) == MEM)
7664 {
7665 /* Handle possible auto-increment. Since it is pre-increment and
7666 we have already done it, we can just use an offset of word. */
7667 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7668 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7669 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
7670 UNITS_PER_WORD));
7671 else
7672 output_address (XEXP (adjust_address_nv (x, SImode,
7673 UNITS_PER_WORD),
7674 0));
7675
7676 if (small_data_operand (x, GET_MODE (x)))
7677 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7678 reg_names[SMALL_DATA_REG]);
7679 }
7680 return;
7681
7682 case 'm':
7683 /* MB value for a mask operand. */
7684 if (! mask_operand (x, SImode))
7685 output_operand_lossage ("invalid %%m value");
7686
7687 fprintf (file, "%d", extract_MB (x));
7688 return;
7689
7690 case 'M':
7691 /* ME value for a mask operand. */
7692 if (! mask_operand (x, SImode))
7693 output_operand_lossage ("invalid %%M value");
7694
7695 fprintf (file, "%d", extract_ME (x));
7696 return;
7697
7698 /* %n outputs the negative of its operand. */
7699
7700 case 'N':
7701 /* Write the number of elements in the vector times 4. */
7702 if (GET_CODE (x) != PARALLEL)
7703 output_operand_lossage ("invalid %%N value");
7704 else
7705 fprintf (file, "%d", XVECLEN (x, 0) * 4);
7706 return;
7707
7708 case 'O':
7709 /* Similar, but subtract 1 first. */
7710 if (GET_CODE (x) != PARALLEL)
7711 output_operand_lossage ("invalid %%O value");
7712 else
7713 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
7714 return;
7715
7716 case 'p':
7717 /* X is a CONST_INT that is a power of two. Output the logarithm. */
7718 if (! INT_P (x)
7719 || INT_LOWPART (x) < 0
7720 || (i = exact_log2 (INT_LOWPART (x))) < 0)
7721 output_operand_lossage ("invalid %%p value");
7722 else
7723 fprintf (file, "%d", i);
7724 return;
7725
7726 case 'P':
7727 /* The operand must be an indirect memory reference. The result
7728 is the register number. */
7729 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
7730 || REGNO (XEXP (x, 0)) >= 32)
7731 output_operand_lossage ("invalid %%P value");
7732 else
7733 fprintf (file, "%d", REGNO (XEXP (x, 0)));
7734 return;
7735
7736 case 'q':
7737 /* This outputs the logical code corresponding to a boolean
7738 expression. The expression may have one or both operands
7739 negated (if one, only the first one). For condition register
7740 logical operations, it will also treat the negated
7741 CR codes as NOTs, but not handle NOTs of them. */
7742 {
7743 const char *const *t = 0;
7744 const char *s;
7745 enum rtx_code code = GET_CODE (x);
7746 static const char * const tbl[3][3] = {
7747 { "and", "andc", "nor" },
7748 { "or", "orc", "nand" },
7749 { "xor", "eqv", "xor" } };
7750
7751 if (code == AND)
7752 t = tbl[0];
7753 else if (code == IOR)
7754 t = tbl[1];
7755 else if (code == XOR)
7756 t = tbl[2];
7757 else
7758 output_operand_lossage ("invalid %%q value");
7759
7760 if (GET_CODE (XEXP (x, 0)) != NOT)
7761 s = t[0];
7762 else
7763 {
7764 if (GET_CODE (XEXP (x, 1)) == NOT)
7765 s = t[2];
7766 else
7767 s = t[1];
7768 }
7769
7770 fputs (s, file);
7771 }
7772 return;
7773
7774 case 'R':
7775 /* X is a CR register. Print the mask for `mtcrf'. */
7776 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7777 output_operand_lossage ("invalid %%R value");
7778 else
7779 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
7780 return;
7781
7782 case 's':
7783 /* Low 5 bits of 32 - value */
7784 if (! INT_P (x))
7785 output_operand_lossage ("invalid %%s value");
7786 else
7787 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
7788 return;
7789
7790 case 'S':
7791 /* PowerPC64 mask position. All 0's is excluded.
7792 CONST_INT 32-bit mask is considered sign-extended so any
7793 transition must occur within the CONST_INT, not on the boundary. */
7794 if (! mask64_operand (x, DImode))
7795 output_operand_lossage ("invalid %%S value");
7796
7797 uval = INT_LOWPART (x);
7798
7799 if (uval & 1) /* Clear Left */
7800 {
7801 #if HOST_BITS_PER_WIDE_INT > 64
7802 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
7803 #endif
7804 i = 64;
7805 }
7806 else /* Clear Right */
7807 {
7808 uval = ~uval;
7809 #if HOST_BITS_PER_WIDE_INT > 64
7810 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
7811 #endif
7812 i = 63;
7813 }
7814 while (uval != 0)
7815 --i, uval >>= 1;
7816 if (i < 0)
7817 abort ();
7818 fprintf (file, "%d", i);
7819 return;
7820
7821 case 't':
7822 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
7823 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
7824 abort ();
7825
7826 /* Bit 3 is OV bit. */
7827 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
7828
7829 /* If we want bit 31, write a shift count of zero, not 32. */
7830 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7831 return;
7832
7833 case 'T':
7834 /* Print the symbolic name of a branch target register. */
7835 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
7836 && REGNO (x) != COUNT_REGISTER_REGNUM))
7837 output_operand_lossage ("invalid %%T value");
7838 else if (REGNO (x) == LINK_REGISTER_REGNUM)
7839 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
7840 else
7841 fputs ("ctr", file);
7842 return;
7843
7844 case 'u':
7845 /* High-order 16 bits of constant for use in unsigned operand. */
7846 if (! INT_P (x))
7847 output_operand_lossage ("invalid %%u value");
7848 else
7849 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
7850 (INT_LOWPART (x) >> 16) & 0xffff);
7851 return;
7852
7853 case 'v':
7854 /* High-order 16 bits of constant for use in signed operand. */
7855 if (! INT_P (x))
7856 output_operand_lossage ("invalid %%v value");
7857 else
7858 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
7859 (INT_LOWPART (x) >> 16) & 0xffff);
7860 return;
7861
7862 case 'U':
7863 /* Print `u' if this has an auto-increment or auto-decrement. */
7864 if (GET_CODE (x) == MEM
7865 && (GET_CODE (XEXP (x, 0)) == PRE_INC
7866 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
7867 putc ('u', file);
7868 return;
7869
7870 case 'V':
7871 /* Print the trap code for this operand. */
7872 switch (GET_CODE (x))
7873 {
7874 case EQ:
7875 fputs ("eq", file); /* 4 */
7876 break;
7877 case NE:
7878 fputs ("ne", file); /* 24 */
7879 break;
7880 case LT:
7881 fputs ("lt", file); /* 16 */
7882 break;
7883 case LE:
7884 fputs ("le", file); /* 20 */
7885 break;
7886 case GT:
7887 fputs ("gt", file); /* 8 */
7888 break;
7889 case GE:
7890 fputs ("ge", file); /* 12 */
7891 break;
7892 case LTU:
7893 fputs ("llt", file); /* 2 */
7894 break;
7895 case LEU:
7896 fputs ("lle", file); /* 6 */
7897 break;
7898 case GTU:
7899 fputs ("lgt", file); /* 1 */
7900 break;
7901 case GEU:
7902 fputs ("lge", file); /* 5 */
7903 break;
7904 default:
7905 abort ();
7906 }
7907 break;
7908
7909 case 'w':
7910 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
7911 normally. */
7912 if (INT_P (x))
7913 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7914 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
7915 else
7916 print_operand (file, x, 0);
7917 return;
7918
7919 case 'W':
7920 /* MB value for a PowerPC64 rldic operand. */
7921 val = (GET_CODE (x) == CONST_INT
7922 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
7923
7924 if (val < 0)
7925 i = -1;
7926 else
7927 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
7928 if ((val <<= 1) < 0)
7929 break;
7930
7931 #if HOST_BITS_PER_WIDE_INT == 32
7932 if (GET_CODE (x) == CONST_INT && i >= 0)
7933 i += 32; /* zero-extend high-part was all 0's */
7934 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
7935 {
7936 val = CONST_DOUBLE_LOW (x);
7937
7938 if (val == 0)
7939 abort ();
7940 else if (val < 0)
7941 --i;
7942 else
7943 for ( ; i < 64; i++)
7944 if ((val <<= 1) < 0)
7945 break;
7946 }
7947 #endif
7948
7949 fprintf (file, "%d", i + 1);
7950 return;
7951
7952 case 'X':
7953 if (GET_CODE (x) == MEM
7954 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
7955 putc ('x', file);
7956 return;
7957
7958 case 'Y':
7959 /* Like 'L', for third word of TImode */
7960 if (GET_CODE (x) == REG)
7961 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
7962 else if (GET_CODE (x) == MEM)
7963 {
7964 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7965 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7966 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
7967 else
7968 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
7969 if (small_data_operand (x, GET_MODE (x)))
7970 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7971 reg_names[SMALL_DATA_REG]);
7972 }
7973 return;
7974
7975 case 'z':
7976 /* X is a SYMBOL_REF. Write out the name preceded by a
7977 period and without any trailing data in brackets. Used for function
7978 names. If we are configured for System V (or the embedded ABI) on
7979 the PowerPC, do not emit the period, since those systems do not use
7980 TOCs and the like. */
7981 if (GET_CODE (x) != SYMBOL_REF)
7982 abort ();
7983
7984 if (XSTR (x, 0)[0] != '.')
7985 {
7986 switch (DEFAULT_ABI)
7987 {
7988 default:
7989 abort ();
7990
7991 case ABI_AIX:
7992 putc ('.', file);
7993 break;
7994
7995 case ABI_V4:
7996 case ABI_AIX_NODESC:
7997 case ABI_DARWIN:
7998 break;
7999 }
8000 }
8001 #if TARGET_AIX
8002 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
8003 #else
8004 assemble_name (file, XSTR (x, 0));
8005 #endif
8006 return;
8007
8008 case 'Z':
8009 /* Like 'L', for last word of TImode. */
8010 if (GET_CODE (x) == REG)
8011 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
8012 else if (GET_CODE (x) == MEM)
8013 {
8014 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8015 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8016 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
8017 else
8018 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
8019 if (small_data_operand (x, GET_MODE (x)))
8020 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8021 reg_names[SMALL_DATA_REG]);
8022 }
8023 return;
8024
8025 /* Print AltiVec or SPE memory operand. */
8026 case 'y':
8027 {
8028 rtx tmp;
8029
8030 if (GET_CODE (x) != MEM)
8031 abort ();
8032
8033 tmp = XEXP (x, 0);
8034
8035 if (TARGET_SPE)
8036 {
8037 /* Handle [reg]. */
8038 if (GET_CODE (tmp) == REG)
8039 {
8040 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
8041 break;
8042 }
8043 /* Handle [reg+UIMM]. */
8044 else if (GET_CODE (tmp) == PLUS &&
8045 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
8046 {
8047 int x;
8048
8049 if (GET_CODE (XEXP (tmp, 0)) != REG)
8050 abort ();
8051
8052 x = INTVAL (XEXP (tmp, 1));
8053 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
8054 break;
8055 }
8056
8057 /* Fall through. Must be [reg+reg]. */
8058 }
8059 if (GET_CODE (tmp) == REG)
8060 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
8061 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
8062 {
8063 if (REGNO (XEXP (tmp, 0)) == 0)
8064 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
8065 reg_names[ REGNO (XEXP (tmp, 0)) ]);
8066 else
8067 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
8068 reg_names[ REGNO (XEXP (tmp, 1)) ]);
8069 }
8070 else
8071 abort ();
8072 break;
8073 }
8074
8075 case 0:
8076 if (GET_CODE (x) == REG)
8077 fprintf (file, "%s", reg_names[REGNO (x)]);
8078 else if (GET_CODE (x) == MEM)
8079 {
8080 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8081 know the width from the mode. */
8082 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
8083 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
8084 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8085 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
8086 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
8087 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8088 else
8089 output_address (XEXP (x, 0));
8090 }
8091 else
8092 output_addr_const (file, x);
8093 return;
8094
8095 default:
8096 output_operand_lossage ("invalid %%xn code");
8097 }
8098 }
8099 \f
8100 /* Print the address of an operand. */
8101
8102 void
8103 print_operand_address (file, x)
8104 FILE *file;
8105 rtx x;
8106 {
8107 if (GET_CODE (x) == REG)
8108 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
8109 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
8110 || GET_CODE (x) == LABEL_REF)
8111 {
8112 output_addr_const (file, x);
8113 if (small_data_operand (x, GET_MODE (x)))
8114 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8115 reg_names[SMALL_DATA_REG]);
8116 else if (TARGET_TOC)
8117 abort ();
8118 }
8119 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
8120 {
8121 if (REGNO (XEXP (x, 0)) == 0)
8122 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
8123 reg_names[ REGNO (XEXP (x, 0)) ]);
8124 else
8125 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
8126 reg_names[ REGNO (XEXP (x, 1)) ]);
8127 }
8128 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
8129 {
8130 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
8131 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8132 }
8133 #if TARGET_ELF
8134 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8135 && CONSTANT_P (XEXP (x, 1)))
8136 {
8137 output_addr_const (file, XEXP (x, 1));
8138 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8139 }
8140 #endif
8141 #if TARGET_MACHO
8142 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8143 && CONSTANT_P (XEXP (x, 1)))
8144 {
8145 fprintf (file, "lo16(");
8146 output_addr_const (file, XEXP (x, 1));
8147 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8148 }
8149 #endif
8150 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
8151 {
8152 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
8153 {
8154 rtx contains_minus = XEXP (x, 1);
8155 rtx minus, symref;
8156 const char *name;
8157
8158 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8159 turn it into (sym) for output_addr_const. */
8160 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
8161 contains_minus = XEXP (contains_minus, 0);
8162
8163 minus = XEXP (contains_minus, 0);
8164 symref = XEXP (minus, 0);
8165 XEXP (contains_minus, 0) = symref;
8166 if (TARGET_ELF)
8167 {
8168 char *newname;
8169
8170 name = XSTR (symref, 0);
8171 newname = alloca (strlen (name) + sizeof ("@toc"));
8172 strcpy (newname, name);
8173 strcat (newname, "@toc");
8174 XSTR (symref, 0) = newname;
8175 }
8176 output_addr_const (file, XEXP (x, 1));
8177 if (TARGET_ELF)
8178 XSTR (symref, 0) = name;
8179 XEXP (contains_minus, 0) = minus;
8180 }
8181 else
8182 output_addr_const (file, XEXP (x, 1));
8183
8184 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
8185 }
8186 else
8187 abort ();
8188 }
8189 \f
8190 /* Target hook for assembling integer objects. The PowerPC version has
8191 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8192 is defined. It also needs to handle DI-mode objects on 64-bit
8193 targets. */
8194
8195 static bool
8196 rs6000_assemble_integer (x, size, aligned_p)
8197 rtx x;
8198 unsigned int size;
8199 int aligned_p;
8200 {
8201 #ifdef RELOCATABLE_NEEDS_FIXUP
8202 /* Special handling for SI values. */
8203 if (size == 4 && aligned_p)
8204 {
8205 extern int in_toc_section PARAMS ((void));
8206 static int recurse = 0;
8207
8208 /* For -mrelocatable, we mark all addresses that need to be fixed up
8209 in the .fixup section. */
8210 if (TARGET_RELOCATABLE
8211 && !in_toc_section ()
8212 && !in_text_section ()
8213 && !recurse
8214 && GET_CODE (x) != CONST_INT
8215 && GET_CODE (x) != CONST_DOUBLE
8216 && CONSTANT_P (x))
8217 {
8218 char buf[256];
8219
8220 recurse = 1;
8221 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
8222 fixuplabelno++;
8223 ASM_OUTPUT_LABEL (asm_out_file, buf);
8224 fprintf (asm_out_file, "\t.long\t(");
8225 output_addr_const (asm_out_file, x);
8226 fprintf (asm_out_file, ")@fixup\n");
8227 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
8228 ASM_OUTPUT_ALIGN (asm_out_file, 2);
8229 fprintf (asm_out_file, "\t.long\t");
8230 assemble_name (asm_out_file, buf);
8231 fprintf (asm_out_file, "\n\t.previous\n");
8232 recurse = 0;
8233 return true;
8234 }
8235 /* Remove initial .'s to turn a -mcall-aixdesc function
8236 address into the address of the descriptor, not the function
8237 itself. */
8238 else if (GET_CODE (x) == SYMBOL_REF
8239 && XSTR (x, 0)[0] == '.'
8240 && DEFAULT_ABI == ABI_AIX)
8241 {
8242 const char *name = XSTR (x, 0);
8243 while (*name == '.')
8244 name++;
8245
8246 fprintf (asm_out_file, "\t.long\t%s\n", name);
8247 return true;
8248 }
8249 }
8250 #endif /* RELOCATABLE_NEEDS_FIXUP */
8251 return default_assemble_integer (x, size, aligned_p);
8252 }
8253
8254 #ifdef HAVE_GAS_HIDDEN
8255 /* Emit an assembler directive to set symbol visibility for DECL to
8256 VISIBILITY_TYPE. */
8257
8258 static void
8259 rs6000_assemble_visibility (decl, vis)
8260 tree decl;
8261 int vis;
8262 {
8263 /* Functions need to have their entry point symbol visibility set as
8264 well as their descriptor symbol visibility. */
8265 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
8266 {
8267 static const char * const visibility_types[] = {
8268 NULL, "internal", "hidden", "protected"
8269 };
8270
8271 const char *name, *type;
8272
8273 name = ((* targetm.strip_name_encoding)
8274 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
8275 type = visibility_types[vis];
8276
8277 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
8278 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
8279 }
8280 else
8281 default_assemble_visibility (decl, vis);
8282 }
8283 #endif
8284 \f
8285 enum rtx_code
8286 rs6000_reverse_condition (mode, code)
8287 enum machine_mode mode;
8288 enum rtx_code code;
8289 {
8290 /* Reversal of FP compares takes care -- an ordered compare
8291 becomes an unordered compare and vice versa. */
8292 if (mode == CCFPmode && !flag_unsafe_math_optimizations)
8293 return reverse_condition_maybe_unordered (code);
8294 else
8295 return reverse_condition (code);
8296 }
8297
8298 /* Generate a compare for CODE. Return a brand-new rtx that
8299 represents the result of the compare. */
8300
8301 static rtx
8302 rs6000_generate_compare (code)
8303 enum rtx_code code;
8304 {
8305 enum machine_mode comp_mode;
8306 rtx compare_result;
8307
8308 if (rs6000_compare_fp_p)
8309 comp_mode = CCFPmode;
8310 else if (code == GTU || code == LTU
8311 || code == GEU || code == LEU)
8312 comp_mode = CCUNSmode;
8313 else
8314 comp_mode = CCmode;
8315
8316 /* First, the compare. */
8317 compare_result = gen_reg_rtx (comp_mode);
8318
8319 /* SPE FP compare instructions on the GPRs. Yuck! */
8320 if ((TARGET_SPE && TARGET_HARD_FLOAT) && rs6000_compare_fp_p)
8321 {
8322 rtx cmp, or1, or2, or_result, compare_result2;
8323
8324 switch (code)
8325 {
8326 case EQ:
8327 case UNEQ:
8328 case NE:
8329 case LTGT:
8330 cmp = flag_unsafe_math_optimizations
8331 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
8332 rs6000_compare_op1)
8333 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
8334 rs6000_compare_op1);
8335 break;
8336 case GT:
8337 case GTU:
8338 case UNGT:
8339 case UNGE:
8340 case GE:
8341 case GEU:
8342 cmp = flag_unsafe_math_optimizations
8343 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
8344 rs6000_compare_op1)
8345 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
8346 rs6000_compare_op1);
8347 break;
8348 case LT:
8349 case LTU:
8350 case UNLT:
8351 case UNLE:
8352 case LE:
8353 case LEU:
8354 cmp = flag_unsafe_math_optimizations
8355 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
8356 rs6000_compare_op1)
8357 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
8358 rs6000_compare_op1);
8359 break;
8360 default:
8361 abort ();
8362 }
8363
8364 /* Synthesize LE and GE from LT/GT || EQ. */
8365 if (code == LE || code == GE || code == LEU || code == GEU)
8366 {
8367 /* Synthesize GE/LE frome GT/LT || EQ. */
8368
8369 emit_insn (cmp);
8370
8371 switch (code)
8372 {
8373 case LE: code = LT; break;
8374 case GE: code = GT; break;
8375 case LEU: code = LT; break;
8376 case GEU: code = GT; break;
8377 default: abort ();
8378 }
8379
8380 or1 = gen_reg_rtx (SImode);
8381 or2 = gen_reg_rtx (SImode);
8382 or_result = gen_reg_rtx (CCEQmode);
8383 compare_result2 = gen_reg_rtx (CCFPmode);
8384
8385 /* Do the EQ. */
8386 cmp = flag_unsafe_math_optimizations
8387 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
8388 rs6000_compare_op1)
8389 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
8390 rs6000_compare_op1);
8391 emit_insn (cmp);
8392
8393 /* The MC8540 FP compare instructions set the CR bits
8394 differently than other PPC compare instructions. For
8395 that matter, there is no generic test instruction, but a
8396 testgt, testlt, and testeq. For a true condition, bit 2
8397 is set (x1xx) in the CR. Following the traditional CR
8398 values:
8399
8400 LT GT EQ OV
8401 bit3 bit2 bit1 bit0
8402
8403 ... bit 2 would be a GT CR alias, so later on we
8404 look in the GT bits for the branch instructions.
8405 However, we must be careful to emit correct RTL in
8406 the meantime, so optimizations don't get confused. */
8407
8408 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
8409 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
8410
8411 /* OR them together. */
8412 cmp = gen_rtx_SET (VOIDmode, or_result,
8413 gen_rtx_COMPARE (CCEQmode,
8414 gen_rtx_IOR (SImode, or1, or2),
8415 const_true_rtx));
8416 compare_result = or_result;
8417 code = EQ;
8418 }
8419 else
8420 {
8421 /* We only care about 1 bit (x1xx), so map everything to NE to
8422 maintain rtl sanity. We'll get to the right bit (x1xx) at
8423 code output time. */
8424 if (code == NE || code == LTGT)
8425 /* Do the inverse here because we have no cmpne
8426 instruction. We use the cmpeq instruction and expect
8427 to get a 0 instead. */
8428 code = EQ;
8429 else
8430 code = NE;
8431 }
8432
8433 emit_insn (cmp);
8434 }
8435 else
8436 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
8437 gen_rtx_COMPARE (comp_mode,
8438 rs6000_compare_op0,
8439 rs6000_compare_op1)));
8440
8441 /* Some kinds of FP comparisons need an OR operation;
8442 except for flag_unsafe_math_optimizations we don't bother. */
8443 if (rs6000_compare_fp_p
8444 && ! flag_unsafe_math_optimizations
8445 && ! (TARGET_HARD_FLOAT && TARGET_SPE)
8446 && (code == LE || code == GE
8447 || code == UNEQ || code == LTGT
8448 || code == UNGT || code == UNLT))
8449 {
8450 enum rtx_code or1, or2;
8451 rtx or1_rtx, or2_rtx, compare2_rtx;
8452 rtx or_result = gen_reg_rtx (CCEQmode);
8453
8454 switch (code)
8455 {
8456 case LE: or1 = LT; or2 = EQ; break;
8457 case GE: or1 = GT; or2 = EQ; break;
8458 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
8459 case LTGT: or1 = LT; or2 = GT; break;
8460 case UNGT: or1 = UNORDERED; or2 = GT; break;
8461 case UNLT: or1 = UNORDERED; or2 = LT; break;
8462 default: abort ();
8463 }
8464 validate_condition_mode (or1, comp_mode);
8465 validate_condition_mode (or2, comp_mode);
8466 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
8467 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
8468 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
8469 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
8470 const_true_rtx);
8471 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
8472
8473 compare_result = or_result;
8474 code = EQ;
8475 }
8476
8477 validate_condition_mode (code, GET_MODE (compare_result));
8478
8479 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
8480 }
8481
8482
8483 /* Emit the RTL for an sCOND pattern. */
8484
8485 void
8486 rs6000_emit_sCOND (code, result)
8487 enum rtx_code code;
8488 rtx result;
8489 {
8490 rtx condition_rtx;
8491 enum machine_mode op_mode;
8492
8493 condition_rtx = rs6000_generate_compare (code);
8494
8495 op_mode = GET_MODE (rs6000_compare_op0);
8496 if (op_mode == VOIDmode)
8497 op_mode = GET_MODE (rs6000_compare_op1);
8498
8499 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
8500 {
8501 PUT_MODE (condition_rtx, DImode);
8502 convert_move (result, condition_rtx, 0);
8503 }
8504 else
8505 {
8506 PUT_MODE (condition_rtx, SImode);
8507 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
8508 }
8509 }
8510
8511 /* Emit a branch of kind CODE to location LOC. */
8512
8513 void
8514 rs6000_emit_cbranch (code, loc)
8515 enum rtx_code code;
8516 rtx loc;
8517 {
8518 rtx condition_rtx, loc_ref;
8519
8520 condition_rtx = rs6000_generate_compare (code);
8521 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
8522 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
8523 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
8524 loc_ref, pc_rtx)));
8525 }
8526
8527 /* Return the string to output a conditional branch to LABEL, which is
8528 the operand number of the label, or -1 if the branch is really a
8529 conditional return.
8530
8531 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
8532 condition code register and its mode specifies what kind of
8533 comparison we made.
8534
8535 REVERSED is nonzero if we should reverse the sense of the comparison.
8536
8537 INSN is the insn. */
8538
8539 char *
8540 output_cbranch (op, label, reversed, insn)
8541 rtx op;
8542 const char * label;
8543 int reversed;
8544 rtx insn;
8545 {
8546 static char string[64];
8547 enum rtx_code code = GET_CODE (op);
8548 rtx cc_reg = XEXP (op, 0);
8549 enum machine_mode mode = GET_MODE (cc_reg);
8550 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
8551 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
8552 int really_reversed = reversed ^ need_longbranch;
8553 char *s = string;
8554 const char *ccode;
8555 const char *pred;
8556 rtx note;
8557
8558 validate_condition_mode (code, mode);
8559
8560 /* Work out which way this really branches. We could use
8561 reverse_condition_maybe_unordered here always but this
8562 makes the resulting assembler clearer. */
8563 if (really_reversed)
8564 {
8565 /* Reversal of FP compares takes care -- an ordered compare
8566 becomes an unordered compare and vice versa. */
8567 if (mode == CCFPmode)
8568 code = reverse_condition_maybe_unordered (code);
8569 else
8570 code = reverse_condition (code);
8571 }
8572
8573 if ((TARGET_SPE && TARGET_HARD_FLOAT) && mode == CCFPmode)
8574 {
8575 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8576 to the GT bit. */
8577 if (code == EQ)
8578 /* Opposite of GT. */
8579 code = UNLE;
8580 else if (code == NE)
8581 code = GT;
8582 else
8583 abort ();
8584 }
8585
8586 switch (code)
8587 {
8588 /* Not all of these are actually distinct opcodes, but
8589 we distinguish them for clarity of the resulting assembler. */
8590 case NE: case LTGT:
8591 ccode = "ne"; break;
8592 case EQ: case UNEQ:
8593 ccode = "eq"; break;
8594 case GE: case GEU:
8595 ccode = "ge"; break;
8596 case GT: case GTU: case UNGT:
8597 ccode = "gt"; break;
8598 case LE: case LEU:
8599 ccode = "le"; break;
8600 case LT: case LTU: case UNLT:
8601 ccode = "lt"; break;
8602 case UNORDERED: ccode = "un"; break;
8603 case ORDERED: ccode = "nu"; break;
8604 case UNGE: ccode = "nl"; break;
8605 case UNLE: ccode = "ng"; break;
8606 default:
8607 abort ();
8608 }
8609
8610 /* Maybe we have a guess as to how likely the branch is.
8611 The old mnemonics don't have a way to specify this information. */
8612 pred = "";
8613 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
8614 if (note != NULL_RTX)
8615 {
8616 /* PROB is the difference from 50%. */
8617 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
8618 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
8619
8620 /* Only hint for highly probable/improbable branches on newer
8621 cpus as static prediction overrides processor dynamic
8622 prediction. For older cpus we may as well always hint, but
8623 assume not taken for branches that are very close to 50% as a
8624 mispredicted taken branch is more expensive than a
8625 mispredicted not-taken branch. */
8626 if (always_hint
8627 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
8628 {
8629 if (abs (prob) > REG_BR_PROB_BASE / 20
8630 && ((prob > 0) ^ need_longbranch))
8631 pred = "+";
8632 else
8633 pred = "-";
8634 }
8635 }
8636
8637 if (label == NULL)
8638 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
8639 else
8640 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
8641
8642 /* We need to escape any '%' characters in the reg_names string.
8643 Assume they'd only be the first character... */
8644 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
8645 *s++ = '%';
8646 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
8647
8648 if (label != NULL)
8649 {
8650 /* If the branch distance was too far, we may have to use an
8651 unconditional branch to go the distance. */
8652 if (need_longbranch)
8653 s += sprintf (s, ",$+8\n\tb %s", label);
8654 else
8655 s += sprintf (s, ",%s", label);
8656 }
8657
8658 return string;
8659 }
8660
8661 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
8662 operands of the last comparison is nonzero/true, FALSE_COND if it
8663 is zero/false. Return 0 if the hardware has no such operation. */
8664
8665 int
8666 rs6000_emit_cmove (dest, op, true_cond, false_cond)
8667 rtx dest;
8668 rtx op;
8669 rtx true_cond;
8670 rtx false_cond;
8671 {
8672 enum rtx_code code = GET_CODE (op);
8673 rtx op0 = rs6000_compare_op0;
8674 rtx op1 = rs6000_compare_op1;
8675 REAL_VALUE_TYPE c1;
8676 enum machine_mode compare_mode = GET_MODE (op0);
8677 enum machine_mode result_mode = GET_MODE (dest);
8678 rtx temp;
8679
8680 /* These modes should always match. */
8681 if (GET_MODE (op1) != compare_mode
8682 /* In the isel case however, we can use a compare immediate, so
8683 op1 may be a small constant. */
8684 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
8685 return 0;
8686 if (GET_MODE (true_cond) != result_mode)
8687 return 0;
8688 if (GET_MODE (false_cond) != result_mode)
8689 return 0;
8690
8691 /* First, work out if the hardware can do this at all, or
8692 if it's too slow... */
8693 if (! rs6000_compare_fp_p)
8694 {
8695 if (TARGET_ISEL)
8696 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
8697 return 0;
8698 }
8699
8700 /* Eliminate half of the comparisons by switching operands, this
8701 makes the remaining code simpler. */
8702 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
8703 || code == LTGT || code == LT)
8704 {
8705 code = reverse_condition_maybe_unordered (code);
8706 temp = true_cond;
8707 true_cond = false_cond;
8708 false_cond = temp;
8709 }
8710
8711 /* UNEQ and LTGT take four instructions for a comparison with zero,
8712 it'll probably be faster to use a branch here too. */
8713 if (code == UNEQ)
8714 return 0;
8715
8716 if (GET_CODE (op1) == CONST_DOUBLE)
8717 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
8718
8719 /* We're going to try to implement comparisons by performing
8720 a subtract, then comparing against zero. Unfortunately,
8721 Inf - Inf is NaN which is not zero, and so if we don't
8722 know that the operand is finite and the comparison
8723 would treat EQ different to UNORDERED, we can't do it. */
8724 if (! flag_unsafe_math_optimizations
8725 && code != GT && code != UNGE
8726 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
8727 /* Constructs of the form (a OP b ? a : b) are safe. */
8728 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
8729 || (! rtx_equal_p (op0, true_cond)
8730 && ! rtx_equal_p (op1, true_cond))))
8731 return 0;
8732 /* At this point we know we can use fsel. */
8733
8734 /* Reduce the comparison to a comparison against zero. */
8735 temp = gen_reg_rtx (compare_mode);
8736 emit_insn (gen_rtx_SET (VOIDmode, temp,
8737 gen_rtx_MINUS (compare_mode, op0, op1)));
8738 op0 = temp;
8739 op1 = CONST0_RTX (compare_mode);
8740
8741 /* If we don't care about NaNs we can reduce some of the comparisons
8742 down to faster ones. */
8743 if (flag_unsafe_math_optimizations)
8744 switch (code)
8745 {
8746 case GT:
8747 code = LE;
8748 temp = true_cond;
8749 true_cond = false_cond;
8750 false_cond = temp;
8751 break;
8752 case UNGE:
8753 code = GE;
8754 break;
8755 case UNEQ:
8756 code = EQ;
8757 break;
8758 default:
8759 break;
8760 }
8761
8762 /* Now, reduce everything down to a GE. */
8763 switch (code)
8764 {
8765 case GE:
8766 break;
8767
8768 case LE:
8769 temp = gen_reg_rtx (compare_mode);
8770 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8771 op0 = temp;
8772 break;
8773
8774 case ORDERED:
8775 temp = gen_reg_rtx (compare_mode);
8776 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
8777 op0 = temp;
8778 break;
8779
8780 case EQ:
8781 temp = gen_reg_rtx (compare_mode);
8782 emit_insn (gen_rtx_SET (VOIDmode, temp,
8783 gen_rtx_NEG (compare_mode,
8784 gen_rtx_ABS (compare_mode, op0))));
8785 op0 = temp;
8786 break;
8787
8788 case UNGE:
8789 temp = gen_reg_rtx (result_mode);
8790 emit_insn (gen_rtx_SET (VOIDmode, temp,
8791 gen_rtx_IF_THEN_ELSE (result_mode,
8792 gen_rtx_GE (VOIDmode,
8793 op0, op1),
8794 true_cond, false_cond)));
8795 false_cond = temp;
8796 true_cond = false_cond;
8797
8798 temp = gen_reg_rtx (compare_mode);
8799 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8800 op0 = temp;
8801 break;
8802
8803 case GT:
8804 temp = gen_reg_rtx (result_mode);
8805 emit_insn (gen_rtx_SET (VOIDmode, temp,
8806 gen_rtx_IF_THEN_ELSE (result_mode,
8807 gen_rtx_GE (VOIDmode,
8808 op0, op1),
8809 true_cond, false_cond)));
8810 true_cond = temp;
8811 false_cond = true_cond;
8812
8813 temp = gen_reg_rtx (compare_mode);
8814 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8815 op0 = temp;
8816 break;
8817
8818 default:
8819 abort ();
8820 }
8821
8822 emit_insn (gen_rtx_SET (VOIDmode, dest,
8823 gen_rtx_IF_THEN_ELSE (result_mode,
8824 gen_rtx_GE (VOIDmode,
8825 op0, op1),
8826 true_cond, false_cond)));
8827 return 1;
8828 }
8829
8830 /* Same as above, but for ints (isel). */
8831
8832 static int
8833 rs6000_emit_int_cmove (dest, op, true_cond, false_cond)
8834 rtx dest;
8835 rtx op;
8836 rtx true_cond;
8837 rtx false_cond;
8838 {
8839 rtx condition_rtx, cr;
8840
8841 /* All isel implementations thus far are 32-bits. */
8842 if (GET_MODE (rs6000_compare_op0) != SImode)
8843 return 0;
8844
8845 /* We still have to do the compare, because isel doesn't do a
8846 compare, it just looks at the CRx bits set by a previous compare
8847 instruction. */
8848 condition_rtx = rs6000_generate_compare (GET_CODE (op));
8849 cr = XEXP (condition_rtx, 0);
8850
8851 if (GET_MODE (cr) == CCmode)
8852 emit_insn (gen_isel_signed (dest, condition_rtx,
8853 true_cond, false_cond, cr));
8854 else
8855 emit_insn (gen_isel_unsigned (dest, condition_rtx,
8856 true_cond, false_cond, cr));
8857
8858 return 1;
8859 }
8860
8861 const char *
8862 output_isel (operands)
8863 rtx *operands;
8864 {
8865 enum rtx_code code;
8866
8867 code = GET_CODE (operands[1]);
8868 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
8869 {
8870 PUT_CODE (operands[1], reverse_condition (code));
8871 return "isel %0,%3,%2,%j1";
8872 }
8873 else
8874 return "isel %0,%2,%3,%j1";
8875 }
8876
8877 void
8878 rs6000_emit_minmax (dest, code, op0, op1)
8879 rtx dest;
8880 enum rtx_code code;
8881 rtx op0;
8882 rtx op1;
8883 {
8884 enum machine_mode mode = GET_MODE (op0);
8885 enum rtx_code c;
8886 rtx target;
8887
8888 if (code == SMAX || code == SMIN)
8889 c = GE;
8890 else
8891 c = GEU;
8892
8893 if (code == SMAX || code == UMAX)
8894 target = emit_conditional_move (dest, c, op0, op1, mode,
8895 op0, op1, mode, 0);
8896 else
8897 target = emit_conditional_move (dest, c, op0, op1, mode,
8898 op1, op0, mode, 0);
8899 if (target == NULL_RTX)
8900 abort ();
8901 if (target != dest)
8902 emit_move_insn (dest, target);
8903 }
8904 \f
8905 /* This page contains routines that are used to determine what the
8906 function prologue and epilogue code will do and write them out. */
8907
8908 /* Return the first fixed-point register that is required to be
8909 saved. 32 if none. */
8910
8911 int
8912 first_reg_to_save ()
8913 {
8914 int first_reg;
8915
8916 /* Find lowest numbered live register. */
8917 for (first_reg = 13; first_reg <= 31; first_reg++)
8918 if (regs_ever_live[first_reg]
8919 && (! call_used_regs[first_reg]
8920 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
8921 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8922 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
8923 break;
8924
8925 #if TARGET_MACHO
8926 if (flag_pic
8927 && current_function_uses_pic_offset_table
8928 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
8929 return RS6000_PIC_OFFSET_TABLE_REGNUM;
8930 #endif
8931
8932 return first_reg;
8933 }
8934
8935 /* Similar, for FP regs. */
8936
8937 int
8938 first_fp_reg_to_save ()
8939 {
8940 int first_reg;
8941
8942 /* Find lowest numbered live register. */
8943 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
8944 if (regs_ever_live[first_reg])
8945 break;
8946
8947 return first_reg;
8948 }
8949
8950 /* Similar, for AltiVec regs. */
8951
8952 static int
8953 first_altivec_reg_to_save ()
8954 {
8955 int i;
8956
8957 /* Stack frame remains as is unless we are in AltiVec ABI. */
8958 if (! TARGET_ALTIVEC_ABI)
8959 return LAST_ALTIVEC_REGNO + 1;
8960
8961 /* Find lowest numbered live register. */
8962 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
8963 if (regs_ever_live[i])
8964 break;
8965
8966 return i;
8967 }
8968
8969 /* Return a 32-bit mask of the AltiVec registers we need to set in
8970 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
8971 the 32-bit word is 0. */
8972
8973 static unsigned int
8974 compute_vrsave_mask ()
8975 {
8976 unsigned int i, mask = 0;
8977
8978 /* First, find out if we use _any_ altivec registers. */
8979 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8980 if (regs_ever_live[i])
8981 mask |= ALTIVEC_REG_BIT (i);
8982
8983 if (mask == 0)
8984 return mask;
8985
8986 /* Next, add all registers that are call-clobbered. We do this
8987 because post-reload register optimizers such as regrename_optimize
8988 may choose to use them. They never change the register class
8989 chosen by reload, so cannot create new uses of altivec registers
8990 if there were none before, so the early exit above is safe. */
8991 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
8992 altivec registers not saved in the mask, which might well make the
8993 adjustments below more effective in eliding the save/restore of
8994 VRSAVE in small functions. */
8995 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8996 if (call_used_regs[i])
8997 mask |= ALTIVEC_REG_BIT (i);
8998
8999 /* Next, remove the argument registers from the set. These must
9000 be in the VRSAVE mask set by the caller, so we don't need to add
9001 them in again. More importantly, the mask we compute here is
9002 used to generate CLOBBERs in the set_vrsave insn, and we do not
9003 wish the argument registers to die. */
9004 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
9005 mask &= ~ALTIVEC_REG_BIT (i);
9006
9007 /* Similarly, remove the return value from the set. */
9008 {
9009 bool yes = false;
9010 diddle_return_value (is_altivec_return_reg, &yes);
9011 if (yes)
9012 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
9013 }
9014
9015 return mask;
9016 }
9017
9018 static void
9019 is_altivec_return_reg (reg, xyes)
9020 rtx reg;
9021 void *xyes;
9022 {
9023 bool *yes = (bool *) xyes;
9024 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
9025 *yes = true;
9026 }
9027
9028 \f
9029 /* Calculate the stack information for the current function. This is
9030 complicated by having two separate calling sequences, the AIX calling
9031 sequence and the V.4 calling sequence.
9032
9033 AIX (and Darwin/Mac OS X) stack frames look like:
9034 32-bit 64-bit
9035 SP----> +---------------------------------------+
9036 | back chain to caller | 0 0
9037 +---------------------------------------+
9038 | saved CR | 4 8 (8-11)
9039 +---------------------------------------+
9040 | saved LR | 8 16
9041 +---------------------------------------+
9042 | reserved for compilers | 12 24
9043 +---------------------------------------+
9044 | reserved for binders | 16 32
9045 +---------------------------------------+
9046 | saved TOC pointer | 20 40
9047 +---------------------------------------+
9048 | Parameter save area (P) | 24 48
9049 +---------------------------------------+
9050 | Alloca space (A) | 24+P etc.
9051 +---------------------------------------+
9052 | Local variable space (L) | 24+P+A
9053 +---------------------------------------+
9054 | Float/int conversion temporary (X) | 24+P+A+L
9055 +---------------------------------------+
9056 | Save area for AltiVec registers (W) | 24+P+A+L+X
9057 +---------------------------------------+
9058 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9059 +---------------------------------------+
9060 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9061 +---------------------------------------+
9062 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9063 +---------------------------------------+
9064 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9065 +---------------------------------------+
9066 old SP->| back chain to caller's caller |
9067 +---------------------------------------+
9068
9069 The required alignment for AIX configurations is two words (i.e., 8
9070 or 16 bytes).
9071
9072
9073 V.4 stack frames look like:
9074
9075 SP----> +---------------------------------------+
9076 | back chain to caller | 0
9077 +---------------------------------------+
9078 | caller's saved LR | 4
9079 +---------------------------------------+
9080 | Parameter save area (P) | 8
9081 +---------------------------------------+
9082 | Alloca space (A) | 8+P
9083 +---------------------------------------+
9084 | Varargs save area (V) | 8+P+A
9085 +---------------------------------------+
9086 | Local variable space (L) | 8+P+A+V
9087 +---------------------------------------+
9088 | Float/int conversion temporary (X) | 8+P+A+V+L
9089 +---------------------------------------+
9090 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9091 +---------------------------------------+
9092 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9093 +---------------------------------------+
9094 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9095 +---------------------------------------+
9096 | SPE: area for 64-bit GP registers |
9097 +---------------------------------------+
9098 | SPE alignment padding |
9099 +---------------------------------------+
9100 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9101 +---------------------------------------+
9102 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9103 +---------------------------------------+
9104 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9105 +---------------------------------------+
9106 old SP->| back chain to caller's caller |
9107 +---------------------------------------+
9108
9109 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9110 given. (But note below and in sysv4.h that we require only 8 and
9111 may round up the size of our stack frame anyways. The historical
9112 reason is early versions of powerpc-linux which didn't properly
9113 align the stack at program startup. A happy side-effect is that
9114 -mno-eabi libraries can be used with -meabi programs.)
9115
9116 The EABI configuration defaults to the V.4 layout, unless
9117 -mcall-aix is used, in which case the AIX layout is used. However,
9118 the stack alignment requirements may differ. If -mno-eabi is not
9119 given, the required stack alignment is 8 bytes; if -mno-eabi is
9120 given, the required alignment is 16 bytes. (But see V.4 comment
9121 above.) */
9122
9123 #ifndef ABI_STACK_BOUNDARY
9124 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9125 #endif
9126
9127 rs6000_stack_t *
9128 rs6000_stack_info ()
9129 {
9130 static rs6000_stack_t info, zero_info;
9131 rs6000_stack_t *info_ptr = &info;
9132 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9133 enum rs6000_abi abi;
9134 int ehrd_size;
9135 int total_raw_size;
9136
9137 /* Zero all fields portably. */
9138 info = zero_info;
9139
9140 /* Select which calling sequence. */
9141 info_ptr->abi = abi = DEFAULT_ABI;
9142
9143 /* Calculate which registers need to be saved & save area size. */
9144 info_ptr->first_gp_reg_save = first_reg_to_save ();
9145 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9146 even if it currently looks like we won't. */
9147 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
9148 || (flag_pic == 1 && abi == ABI_V4)
9149 || (flag_pic && abi == ABI_DARWIN))
9150 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
9151 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
9152 else
9153 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
9154
9155 /* For the SPE, we have an additional upper 32-bits on each GPR.
9156 Ideally we should save the entire 64-bits only when the upper
9157 half is used in SIMD instructions. Since we only record
9158 registers live (not the size they are used in), this proves
9159 difficult because we'd have to traverse the instruction chain at
9160 the right time, taking reload into account. This is a real pain,
9161 so we opt to save the GPRs in 64-bits always. Anyone overly
9162 concerned with frame size can fix this. ;-).
9163
9164 So... since we save all GPRs (except the SP) in 64-bits, the
9165 traditional GP save area will be empty. */
9166 if (TARGET_SPE_ABI)
9167 info_ptr->gp_size = 0;
9168
9169 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
9170 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
9171
9172 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
9173 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
9174 - info_ptr->first_altivec_reg_save);
9175
9176 /* Does this function call anything? */
9177 info_ptr->calls_p = (! current_function_is_leaf
9178 || cfun->machine->ra_needs_full_frame);
9179
9180 /* Determine if we need to save the link register. */
9181 if (rs6000_ra_ever_killed ()
9182 || (DEFAULT_ABI == ABI_AIX && current_function_profile)
9183 #ifdef TARGET_RELOCATABLE
9184 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
9185 #endif
9186 || (info_ptr->first_fp_reg_save != 64
9187 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
9188 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
9189 || (abi == ABI_V4 && current_function_calls_alloca)
9190 || (DEFAULT_ABI == ABI_DARWIN
9191 && flag_pic
9192 && current_function_uses_pic_offset_table)
9193 || info_ptr->calls_p)
9194 {
9195 info_ptr->lr_save_p = 1;
9196 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
9197 }
9198
9199 /* Determine if we need to save the condition code registers. */
9200 if (regs_ever_live[CR2_REGNO]
9201 || regs_ever_live[CR3_REGNO]
9202 || regs_ever_live[CR4_REGNO])
9203 {
9204 info_ptr->cr_save_p = 1;
9205 if (abi == ABI_V4)
9206 info_ptr->cr_size = reg_size;
9207 }
9208
9209 /* If the current function calls __builtin_eh_return, then we need
9210 to allocate stack space for registers that will hold data for
9211 the exception handler. */
9212 if (current_function_calls_eh_return)
9213 {
9214 unsigned int i;
9215 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
9216 continue;
9217
9218 /* SPE saves EH registers in 64-bits. */
9219 ehrd_size = i * (TARGET_SPE_ABI ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
9220 }
9221 else
9222 ehrd_size = 0;
9223
9224 /* Determine various sizes. */
9225 info_ptr->reg_size = reg_size;
9226 info_ptr->fixed_size = RS6000_SAVE_AREA;
9227 info_ptr->varargs_size = RS6000_VARARGS_AREA;
9228 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
9229 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
9230 8);
9231
9232 if (TARGET_SPE_ABI)
9233 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
9234 else
9235 info_ptr->spe_gp_size = 0;
9236
9237 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
9238 {
9239 info_ptr->vrsave_mask = compute_vrsave_mask ();
9240 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
9241 }
9242 else
9243 {
9244 info_ptr->vrsave_mask = 0;
9245 info_ptr->vrsave_size = 0;
9246 }
9247
9248 /* Calculate the offsets. */
9249 switch (abi)
9250 {
9251 case ABI_NONE:
9252 default:
9253 abort ();
9254
9255 case ABI_AIX:
9256 case ABI_AIX_NODESC:
9257 case ABI_DARWIN:
9258 info_ptr->fp_save_offset = - info_ptr->fp_size;
9259 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9260
9261 if (TARGET_ALTIVEC_ABI)
9262 {
9263 info_ptr->vrsave_save_offset
9264 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
9265
9266 /* Align stack so vector save area is on a quadword boundary. */
9267 if (info_ptr->altivec_size != 0)
9268 info_ptr->altivec_padding_size
9269 = 16 - (-info_ptr->vrsave_save_offset % 16);
9270 else
9271 info_ptr->altivec_padding_size = 0;
9272
9273 info_ptr->altivec_save_offset
9274 = info_ptr->vrsave_save_offset
9275 - info_ptr->altivec_padding_size
9276 - info_ptr->altivec_size;
9277
9278 /* Adjust for AltiVec case. */
9279 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
9280 }
9281 else
9282 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
9283 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
9284 info_ptr->lr_save_offset = 2*reg_size;
9285 break;
9286
9287 case ABI_V4:
9288 info_ptr->fp_save_offset = - info_ptr->fp_size;
9289 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9290 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
9291
9292 if (TARGET_SPE_ABI)
9293 {
9294 /* Align stack so SPE GPR save area is aligned on a
9295 double-word boundary. */
9296 if (info_ptr->spe_gp_size != 0)
9297 info_ptr->spe_padding_size
9298 = 8 - (-info_ptr->cr_save_offset % 8);
9299 else
9300 info_ptr->spe_padding_size = 0;
9301
9302 info_ptr->spe_gp_save_offset
9303 = info_ptr->cr_save_offset
9304 - info_ptr->spe_padding_size
9305 - info_ptr->spe_gp_size;
9306
9307 /* Adjust for SPE case. */
9308 info_ptr->toc_save_offset
9309 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
9310 }
9311 else if (TARGET_ALTIVEC_ABI)
9312 {
9313 info_ptr->vrsave_save_offset
9314 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
9315
9316 /* Align stack so vector save area is on a quadword boundary. */
9317 if (info_ptr->altivec_size != 0)
9318 info_ptr->altivec_padding_size
9319 = 16 - (-info_ptr->vrsave_save_offset % 16);
9320 else
9321 info_ptr->altivec_padding_size = 0;
9322
9323 info_ptr->altivec_save_offset
9324 = info_ptr->vrsave_save_offset
9325 - info_ptr->altivec_padding_size
9326 - info_ptr->altivec_size;
9327
9328 /* Adjust for AltiVec case. */
9329 info_ptr->toc_save_offset
9330 = info_ptr->altivec_save_offset - info_ptr->toc_size;
9331 }
9332 else
9333 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
9334 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
9335 info_ptr->lr_save_offset = reg_size;
9336 break;
9337 }
9338
9339 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
9340 + info_ptr->gp_size
9341 + info_ptr->altivec_size
9342 + info_ptr->altivec_padding_size
9343 + info_ptr->vrsave_size
9344 + info_ptr->spe_gp_size
9345 + info_ptr->spe_padding_size
9346 + ehrd_size
9347 + info_ptr->cr_size
9348 + info_ptr->lr_size
9349 + info_ptr->vrsave_size
9350 + info_ptr->toc_size,
9351 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
9352 ? 16 : 8);
9353
9354 total_raw_size = (info_ptr->vars_size
9355 + info_ptr->parm_size
9356 + info_ptr->save_size
9357 + info_ptr->varargs_size
9358 + info_ptr->fixed_size);
9359
9360 info_ptr->total_size =
9361 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
9362
9363 /* Determine if we need to allocate any stack frame:
9364
9365 For AIX we need to push the stack if a frame pointer is needed
9366 (because the stack might be dynamically adjusted), if we are
9367 debugging, if we make calls, or if the sum of fp_save, gp_save,
9368 and local variables are more than the space needed to save all
9369 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9370 + 18*8 = 288 (GPR13 reserved).
9371
9372 For V.4 we don't have the stack cushion that AIX uses, but assume
9373 that the debugger can handle stackless frames. */
9374
9375 if (info_ptr->calls_p)
9376 info_ptr->push_p = 1;
9377
9378 else if (abi == ABI_V4)
9379 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
9380
9381 else
9382 info_ptr->push_p = (frame_pointer_needed
9383 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
9384 || ((total_raw_size - info_ptr->fixed_size)
9385 > (TARGET_32BIT ? 220 : 288)));
9386
9387 /* Zero offsets if we're not saving those registers. */
9388 if (info_ptr->fp_size == 0)
9389 info_ptr->fp_save_offset = 0;
9390
9391 if (info_ptr->gp_size == 0)
9392 info_ptr->gp_save_offset = 0;
9393
9394 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
9395 info_ptr->altivec_save_offset = 0;
9396
9397 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
9398 info_ptr->vrsave_save_offset = 0;
9399
9400 if (! TARGET_SPE_ABI || info_ptr->spe_gp_size == 0)
9401 info_ptr->spe_gp_save_offset = 0;
9402
9403 if (! info_ptr->lr_save_p)
9404 info_ptr->lr_save_offset = 0;
9405
9406 if (! info_ptr->cr_save_p)
9407 info_ptr->cr_save_offset = 0;
9408
9409 if (! info_ptr->toc_save_p)
9410 info_ptr->toc_save_offset = 0;
9411
9412 return info_ptr;
9413 }
9414
9415 void
9416 debug_stack_info (info)
9417 rs6000_stack_t *info;
9418 {
9419 const char *abi_string;
9420
9421 if (! info)
9422 info = rs6000_stack_info ();
9423
9424 fprintf (stderr, "\nStack information for function %s:\n",
9425 ((current_function_decl && DECL_NAME (current_function_decl))
9426 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
9427 : "<unknown>"));
9428
9429 switch (info->abi)
9430 {
9431 default: abi_string = "Unknown"; break;
9432 case ABI_NONE: abi_string = "NONE"; break;
9433 case ABI_AIX:
9434 case ABI_AIX_NODESC: abi_string = "AIX"; break;
9435 case ABI_DARWIN: abi_string = "Darwin"; break;
9436 case ABI_V4: abi_string = "V.4"; break;
9437 }
9438
9439 fprintf (stderr, "\tABI = %5s\n", abi_string);
9440
9441 if (TARGET_ALTIVEC_ABI)
9442 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
9443
9444 if (TARGET_SPE_ABI)
9445 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
9446
9447 if (info->first_gp_reg_save != 32)
9448 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
9449
9450 if (info->first_fp_reg_save != 64)
9451 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9452
9453 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
9454 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
9455 info->first_altivec_reg_save);
9456
9457 if (info->lr_save_p)
9458 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9459
9460 if (info->cr_save_p)
9461 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
9462
9463 if (info->toc_save_p)
9464 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
9465
9466 if (info->vrsave_mask)
9467 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
9468
9469 if (info->push_p)
9470 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
9471
9472 if (info->calls_p)
9473 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
9474
9475 if (info->gp_save_offset)
9476 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
9477
9478 if (info->fp_save_offset)
9479 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
9480
9481 if (info->altivec_save_offset)
9482 fprintf (stderr, "\taltivec_save_offset = %5d\n",
9483 info->altivec_save_offset);
9484
9485 if (info->spe_gp_save_offset)
9486 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
9487 info->spe_gp_save_offset);
9488
9489 if (info->vrsave_save_offset)
9490 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
9491 info->vrsave_save_offset);
9492
9493 if (info->lr_save_offset)
9494 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
9495
9496 if (info->cr_save_offset)
9497 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
9498
9499 if (info->toc_save_offset)
9500 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
9501
9502 if (info->varargs_save_offset)
9503 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
9504
9505 if (info->total_size)
9506 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
9507
9508 if (info->varargs_size)
9509 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
9510
9511 if (info->vars_size)
9512 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
9513
9514 if (info->parm_size)
9515 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
9516
9517 if (info->fixed_size)
9518 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
9519
9520 if (info->gp_size)
9521 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
9522
9523 if (info->spe_gp_size)
9524 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
9525
9526 if (info->fp_size)
9527 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
9528
9529 if (info->altivec_size)
9530 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
9531
9532 if (info->vrsave_size)
9533 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
9534
9535 if (info->altivec_padding_size)
9536 fprintf (stderr, "\taltivec_padding_size= %5d\n",
9537 info->altivec_padding_size);
9538
9539 if (info->spe_padding_size)
9540 fprintf (stderr, "\tspe_padding_size = %5d\n",
9541 info->spe_padding_size);
9542
9543 if (info->lr_size)
9544 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
9545
9546 if (info->cr_size)
9547 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
9548
9549 if (info->toc_size)
9550 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
9551
9552 if (info->save_size)
9553 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
9554
9555 if (info->reg_size != 4)
9556 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
9557
9558 fprintf (stderr, "\n");
9559 }
9560
9561 rtx
9562 rs6000_return_addr (count, frame)
9563 int count;
9564 rtx frame;
9565 {
9566 /* Currently we don't optimize very well between prolog and body
9567 code and for PIC code the code can be actually quite bad, so
9568 don't try to be too clever here. */
9569 if (count != 0 || flag_pic != 0)
9570 {
9571 cfun->machine->ra_needs_full_frame = 1;
9572
9573 return
9574 gen_rtx_MEM
9575 (Pmode,
9576 memory_address
9577 (Pmode,
9578 plus_constant (copy_to_reg
9579 (gen_rtx_MEM (Pmode,
9580 memory_address (Pmode, frame))),
9581 RETURN_ADDRESS_OFFSET)));
9582 }
9583
9584 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
9585 }
9586
9587 /* Say whether a function is a candidate for sibcall handling or not.
9588 We do not allow indirect calls to be optimized into sibling calls.
9589 Also, we can't do it if there are any vector parameters; there's
9590 nowhere to put the VRsave code so it works; note that functions with
9591 vector parameters are required to have a prototype, so the argument
9592 type info must be available here. (The tail recursion case can work
9593 with vector parameters, but there's no way to distinguish here.) */
9594 static bool
9595 rs6000_function_ok_for_sibcall (decl, exp)
9596 tree decl;
9597 tree exp ATTRIBUTE_UNUSED;
9598 {
9599 tree type;
9600 if (decl)
9601 {
9602 if (TARGET_ALTIVEC_VRSAVE)
9603 {
9604 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
9605 type; type = TREE_CHAIN (type))
9606 {
9607 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
9608 return false;
9609 }
9610 }
9611 if (DEFAULT_ABI == ABI_DARWIN
9612 || (*targetm.binds_local_p) (decl))
9613 {
9614 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
9615
9616 if (!lookup_attribute ("longcall", attr_list)
9617 || lookup_attribute ("shortcall", attr_list))
9618 return true;
9619 }
9620 }
9621 return false;
9622 }
9623
9624 static int
9625 rs6000_ra_ever_killed ()
9626 {
9627 rtx top;
9628 rtx reg;
9629 rtx insn;
9630
9631 /* Irritatingly, there are two kinds of thunks -- those created with
9632 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
9633 through the regular part of the compiler. This is a very hacky
9634 way to tell them apart. */
9635 if (current_function_is_thunk && !no_new_pseudos)
9636 return 0;
9637
9638 /* regs_ever_live has LR marked as used if any sibcalls are present,
9639 but this should not force saving and restoring in the
9640 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
9641 clobbers LR, so that is inappropriate. */
9642
9643 /* Also, the prologue can generate a store into LR that
9644 doesn't really count, like this:
9645
9646 move LR->R0
9647 bcl to set PIC register
9648 move LR->R31
9649 move R0->LR
9650
9651 When we're called from the epilogue, we need to avoid counting
9652 this as a store. */
9653
9654 push_topmost_sequence ();
9655 top = get_insns ();
9656 pop_topmost_sequence ();
9657 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
9658
9659 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
9660 {
9661 if (INSN_P (insn))
9662 {
9663 if (FIND_REG_INC_NOTE (insn, reg))
9664 return 1;
9665 else if (GET_CODE (insn) == CALL_INSN
9666 && !SIBLING_CALL_P (insn))
9667 return 1;
9668 else if (set_of (reg, insn) != NULL_RTX
9669 && !prologue_epilogue_contains (insn))
9670 return 1;
9671 }
9672 }
9673 return 0;
9674 }
9675 \f
9676 /* Add a REG_MAYBE_DEAD note to the insn. */
9677 static void
9678 rs6000_maybe_dead (insn)
9679 rtx insn;
9680 {
9681 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
9682 const0_rtx,
9683 REG_NOTES (insn));
9684 }
9685
9686 /* Emit instructions needed to load the TOC register.
9687 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9688 a constant pool; or for SVR4 -fpic. */
9689
9690 void
9691 rs6000_emit_load_toc_table (fromprolog)
9692 int fromprolog;
9693 {
9694 rtx dest;
9695 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
9696
9697 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9698 {
9699 rtx temp = (fromprolog
9700 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9701 : gen_reg_rtx (Pmode));
9702 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
9703 rs6000_maybe_dead (emit_move_insn (dest, temp));
9704 }
9705 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
9706 {
9707 char buf[30];
9708 rtx tempLR = (fromprolog
9709 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9710 : gen_reg_rtx (Pmode));
9711 rtx temp0 = (fromprolog
9712 ? gen_rtx_REG (Pmode, 0)
9713 : gen_reg_rtx (Pmode));
9714 rtx symF;
9715
9716 /* possibly create the toc section */
9717 if (! toc_initialized)
9718 {
9719 toc_section ();
9720 function_section (current_function_decl);
9721 }
9722
9723 if (fromprolog)
9724 {
9725 rtx symL;
9726
9727 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
9728 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9729
9730 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
9731 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9732
9733 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
9734 symF)));
9735 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
9736 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
9737 symL,
9738 symF)));
9739 }
9740 else
9741 {
9742 rtx tocsym;
9743 static int reload_toc_labelno = 0;
9744
9745 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
9746
9747 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
9748 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9749
9750 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
9751 symF,
9752 tocsym)));
9753 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
9754 rs6000_maybe_dead (emit_move_insn (temp0,
9755 gen_rtx_MEM (Pmode, dest)));
9756 }
9757 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
9758 }
9759 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
9760 {
9761 /* This is for AIX code running in non-PIC ELF32. */
9762 char buf[30];
9763 rtx realsym;
9764 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
9765 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9766
9767 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
9768 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
9769 }
9770 else if (DEFAULT_ABI == ABI_AIX)
9771 {
9772 if (TARGET_32BIT)
9773 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
9774 else
9775 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
9776 }
9777 else
9778 abort ();
9779 }
9780
9781 int
9782 get_TOC_alias_set ()
9783 {
9784 static int set = -1;
9785 if (set == -1)
9786 set = new_alias_set ();
9787 return set;
9788 }
9789
9790 /* This retuns nonzero if the current function uses the TOC. This is
9791 determined by the presence of (unspec ... 7), which is generated by
9792 the various load_toc_* patterns. */
9793
9794 int
9795 uses_TOC ()
9796 {
9797 rtx insn;
9798
9799 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9800 if (INSN_P (insn))
9801 {
9802 rtx pat = PATTERN (insn);
9803 int i;
9804
9805 if (GET_CODE (pat) == PARALLEL)
9806 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
9807 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
9808 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
9809 return 1;
9810 }
9811 return 0;
9812 }
9813
9814 rtx
9815 create_TOC_reference (symbol)
9816 rtx symbol;
9817 {
9818 return gen_rtx_PLUS (Pmode,
9819 gen_rtx_REG (Pmode, TOC_REGISTER),
9820 gen_rtx_CONST (Pmode,
9821 gen_rtx_MINUS (Pmode, symbol,
9822 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9823 }
9824
9825 #if TARGET_AIX
9826 /* __throw will restore its own return address to be the same as the
9827 return address of the function that the throw is being made to.
9828 This is unfortunate, because we want to check the original
9829 return address to see if we need to restore the TOC.
9830 So we have to squirrel it away here.
9831 This is used only in compiling __throw and __rethrow.
9832
9833 Most of this code should be removed by CSE. */
9834 static rtx insn_after_throw;
9835
9836 /* This does the saving... */
9837 void
9838 rs6000_aix_emit_builtin_unwind_init ()
9839 {
9840 rtx mem;
9841 rtx stack_top = gen_reg_rtx (Pmode);
9842 rtx opcode_addr = gen_reg_rtx (Pmode);
9843
9844 insn_after_throw = gen_reg_rtx (SImode);
9845
9846 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
9847 emit_move_insn (stack_top, mem);
9848
9849 mem = gen_rtx_MEM (Pmode,
9850 gen_rtx_PLUS (Pmode, stack_top,
9851 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9852 emit_move_insn (opcode_addr, mem);
9853 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
9854 }
9855
9856 /* Emit insns to _restore_ the TOC register, at runtime (specifically
9857 in _eh.o). Only used on AIX.
9858
9859 The idea is that on AIX, function calls look like this:
9860 bl somefunction-trampoline
9861 lwz r2,20(sp)
9862
9863 and later,
9864 somefunction-trampoline:
9865 stw r2,20(sp)
9866 ... load function address in the count register ...
9867 bctr
9868 or like this, if the linker determines that this is not a cross-module call
9869 and so the TOC need not be restored:
9870 bl somefunction
9871 nop
9872 or like this, if the compiler could determine that this is not a
9873 cross-module call:
9874 bl somefunction
9875 now, the tricky bit here is that register 2 is saved and restored
9876 by the _linker_, so we can't readily generate debugging information
9877 for it. So we need to go back up the call chain looking at the
9878 insns at return addresses to see which calls saved the TOC register
9879 and so see where it gets restored from.
9880
9881 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
9882 just before the actual epilogue.
9883
9884 On the bright side, this incurs no space or time overhead unless an
9885 exception is thrown, except for the extra code in libgcc.a.
9886
9887 The parameter STACKSIZE is a register containing (at runtime)
9888 the amount to be popped off the stack in addition to the stack frame
9889 of this routine (which will be __throw or __rethrow, and so is
9890 guaranteed to have a stack frame). */
9891
9892 void
9893 rs6000_emit_eh_toc_restore (stacksize)
9894 rtx stacksize;
9895 {
9896 rtx top_of_stack;
9897 rtx bottom_of_stack = gen_reg_rtx (Pmode);
9898 rtx tocompare = gen_reg_rtx (SImode);
9899 rtx opcode = gen_reg_rtx (SImode);
9900 rtx opcode_addr = gen_reg_rtx (Pmode);
9901 rtx mem;
9902 rtx loop_start = gen_label_rtx ();
9903 rtx no_toc_restore_needed = gen_label_rtx ();
9904 rtx loop_exit = gen_label_rtx ();
9905
9906 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
9907 set_mem_alias_set (mem, rs6000_sr_alias_set);
9908 emit_move_insn (bottom_of_stack, mem);
9909
9910 top_of_stack = expand_binop (Pmode, add_optab,
9911 bottom_of_stack, stacksize,
9912 NULL_RTX, 1, OPTAB_WIDEN);
9913
9914 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
9915 : 0xE8410028, SImode));
9916
9917 if (insn_after_throw == NULL_RTX)
9918 abort ();
9919 emit_move_insn (opcode, insn_after_throw);
9920
9921 emit_note (NULL, NOTE_INSN_LOOP_BEG);
9922 emit_label (loop_start);
9923
9924 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
9925 SImode, NULL_RTX, NULL_RTX,
9926 no_toc_restore_needed);
9927
9928 mem = gen_rtx_MEM (Pmode,
9929 gen_rtx_PLUS (Pmode, bottom_of_stack,
9930 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
9931 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
9932
9933 emit_label (no_toc_restore_needed);
9934 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
9935 Pmode, NULL_RTX, NULL_RTX,
9936 loop_exit);
9937
9938 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
9939 set_mem_alias_set (mem, rs6000_sr_alias_set);
9940 emit_move_insn (bottom_of_stack, mem);
9941
9942 mem = gen_rtx_MEM (Pmode,
9943 gen_rtx_PLUS (Pmode, bottom_of_stack,
9944 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9945 emit_move_insn (opcode_addr, mem);
9946 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
9947
9948 emit_note (NULL, NOTE_INSN_LOOP_CONT);
9949 emit_jump (loop_start);
9950 emit_note (NULL, NOTE_INSN_LOOP_END);
9951 emit_label (loop_exit);
9952 }
9953 #endif /* TARGET_AIX */
9954 \f
9955 /* This ties together stack memory (MEM with an alias set of
9956 rs6000_sr_alias_set) and the change to the stack pointer. */
9957
9958 static void
9959 rs6000_emit_stack_tie ()
9960 {
9961 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
9962
9963 set_mem_alias_set (mem, rs6000_sr_alias_set);
9964 emit_insn (gen_stack_tie (mem));
9965 }
9966
9967 /* Emit the correct code for allocating stack space, as insns.
9968 If COPY_R12, make sure a copy of the old frame is left in r12.
9969 The generated code may use hard register 0 as a temporary. */
9970
9971 static void
9972 rs6000_emit_allocate_stack (size, copy_r12)
9973 HOST_WIDE_INT size;
9974 int copy_r12;
9975 {
9976 rtx insn;
9977 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
9978 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
9979 rtx todec = GEN_INT (-size);
9980
9981 if (current_function_limit_stack)
9982 {
9983 if (REG_P (stack_limit_rtx)
9984 && REGNO (stack_limit_rtx) > 1
9985 && REGNO (stack_limit_rtx) <= 31)
9986 {
9987 emit_insn (TARGET_32BIT
9988 ? gen_addsi3 (tmp_reg,
9989 stack_limit_rtx,
9990 GEN_INT (size))
9991 : gen_adddi3 (tmp_reg,
9992 stack_limit_rtx,
9993 GEN_INT (size)));
9994
9995 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
9996 const0_rtx));
9997 }
9998 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9999 && TARGET_32BIT
10000 && DEFAULT_ABI == ABI_V4)
10001 {
10002 rtx toload = gen_rtx_CONST (VOIDmode,
10003 gen_rtx_PLUS (Pmode,
10004 stack_limit_rtx,
10005 GEN_INT (size)));
10006
10007 emit_insn (gen_elf_high (tmp_reg, toload));
10008 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
10009 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10010 const0_rtx));
10011 }
10012 else
10013 warning ("stack limit expression is not supported");
10014 }
10015
10016 if (copy_r12 || ! TARGET_UPDATE)
10017 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
10018
10019 if (TARGET_UPDATE)
10020 {
10021 if (size > 32767)
10022 {
10023 /* Need a note here so that try_split doesn't get confused. */
10024 if (get_last_insn() == NULL_RTX)
10025 emit_note (0, NOTE_INSN_DELETED);
10026 insn = emit_move_insn (tmp_reg, todec);
10027 try_split (PATTERN (insn), insn, 0);
10028 todec = tmp_reg;
10029 }
10030
10031 insn = emit_insn (TARGET_32BIT
10032 ? gen_movsi_update (stack_reg, stack_reg,
10033 todec, stack_reg)
10034 : gen_movdi_update (stack_reg, stack_reg,
10035 todec, stack_reg));
10036 }
10037 else
10038 {
10039 insn = emit_insn (TARGET_32BIT
10040 ? gen_addsi3 (stack_reg, stack_reg, todec)
10041 : gen_adddi3 (stack_reg, stack_reg, todec));
10042 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
10043 gen_rtx_REG (Pmode, 12));
10044 }
10045
10046 RTX_FRAME_RELATED_P (insn) = 1;
10047 REG_NOTES (insn) =
10048 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10049 gen_rtx_SET (VOIDmode, stack_reg,
10050 gen_rtx_PLUS (Pmode, stack_reg,
10051 GEN_INT (-size))),
10052 REG_NOTES (insn));
10053 }
10054
10055 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
10056 knows that:
10057
10058 (mem (plus (blah) (regXX)))
10059
10060 is really:
10061
10062 (mem (plus (blah) (const VALUE_OF_REGXX))). */
10063
10064 static void
10065 altivec_frame_fixup (insn, reg, val)
10066 rtx insn, reg;
10067 HOST_WIDE_INT val;
10068 {
10069 rtx real;
10070
10071 real = copy_rtx (PATTERN (insn));
10072
10073 real = replace_rtx (real, reg, GEN_INT (val));
10074
10075 RTX_FRAME_RELATED_P (insn) = 1;
10076 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10077 real,
10078 REG_NOTES (insn));
10079 }
10080
10081 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10082 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10083 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10084 deduce these equivalences by itself so it wasn't necessary to hold
10085 its hand so much. */
10086
10087 static void
10088 rs6000_frame_related (insn, reg, val, reg2, rreg)
10089 rtx insn;
10090 rtx reg;
10091 HOST_WIDE_INT val;
10092 rtx reg2;
10093 rtx rreg;
10094 {
10095 rtx real, temp;
10096
10097 /* copy_rtx will not make unique copies of registers, so we need to
10098 ensure we don't have unwanted sharing here. */
10099 if (reg == reg2)
10100 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10101
10102 if (reg == rreg)
10103 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10104
10105 real = copy_rtx (PATTERN (insn));
10106
10107 if (reg2 != NULL_RTX)
10108 real = replace_rtx (real, reg2, rreg);
10109
10110 real = replace_rtx (real, reg,
10111 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
10112 STACK_POINTER_REGNUM),
10113 GEN_INT (val)));
10114
10115 /* We expect that 'real' is either a SET or a PARALLEL containing
10116 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10117 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10118
10119 if (GET_CODE (real) == SET)
10120 {
10121 rtx set = real;
10122
10123 temp = simplify_rtx (SET_SRC (set));
10124 if (temp)
10125 SET_SRC (set) = temp;
10126 temp = simplify_rtx (SET_DEST (set));
10127 if (temp)
10128 SET_DEST (set) = temp;
10129 if (GET_CODE (SET_DEST (set)) == MEM)
10130 {
10131 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10132 if (temp)
10133 XEXP (SET_DEST (set), 0) = temp;
10134 }
10135 }
10136 else if (GET_CODE (real) == PARALLEL)
10137 {
10138 int i;
10139 for (i = 0; i < XVECLEN (real, 0); i++)
10140 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
10141 {
10142 rtx set = XVECEXP (real, 0, i);
10143
10144 temp = simplify_rtx (SET_SRC (set));
10145 if (temp)
10146 SET_SRC (set) = temp;
10147 temp = simplify_rtx (SET_DEST (set));
10148 if (temp)
10149 SET_DEST (set) = temp;
10150 if (GET_CODE (SET_DEST (set)) == MEM)
10151 {
10152 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10153 if (temp)
10154 XEXP (SET_DEST (set), 0) = temp;
10155 }
10156 RTX_FRAME_RELATED_P (set) = 1;
10157 }
10158 }
10159 else
10160 abort ();
10161
10162 RTX_FRAME_RELATED_P (insn) = 1;
10163 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10164 real,
10165 REG_NOTES (insn));
10166 }
10167
10168 /* Returns an insn that has a vrsave set operation with the
10169 appropriate CLOBBERs. */
10170
10171 static rtx
10172 generate_set_vrsave (reg, info, epiloguep)
10173 rtx reg;
10174 rs6000_stack_t *info;
10175 int epiloguep;
10176 {
10177 int nclobs, i;
10178 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
10179 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10180
10181 clobs[0]
10182 = gen_rtx_SET (VOIDmode,
10183 vrsave,
10184 gen_rtx_UNSPEC_VOLATILE (SImode,
10185 gen_rtvec (2, reg, vrsave),
10186 30));
10187
10188 nclobs = 1;
10189
10190 /* We need to clobber the registers in the mask so the scheduler
10191 does not move sets to VRSAVE before sets of AltiVec registers.
10192
10193 However, if the function receives nonlocal gotos, reload will set
10194 all call saved registers live. We will end up with:
10195
10196 (set (reg 999) (mem))
10197 (parallel [ (set (reg vrsave) (unspec blah))
10198 (clobber (reg 999))])
10199
10200 The clobber will cause the store into reg 999 to be dead, and
10201 flow will attempt to delete an epilogue insn. In this case, we
10202 need an unspec use/set of the register. */
10203
10204 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10205 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
10206 {
10207 if (!epiloguep || call_used_regs [i])
10208 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
10209 gen_rtx_REG (V4SImode, i));
10210 else
10211 {
10212 rtx reg = gen_rtx_REG (V4SImode, i);
10213
10214 clobs[nclobs++]
10215 = gen_rtx_SET (VOIDmode,
10216 reg,
10217 gen_rtx_UNSPEC (V4SImode,
10218 gen_rtvec (1, reg), 27));
10219 }
10220 }
10221
10222 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
10223
10224 for (i = 0; i < nclobs; ++i)
10225 XVECEXP (insn, 0, i) = clobs[i];
10226
10227 return insn;
10228 }
10229
10230 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10231 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
10232
10233 static void
10234 emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
10235 rtx frame_reg;
10236 rtx frame_ptr;
10237 enum machine_mode mode;
10238 unsigned int regno;
10239 int offset;
10240 int total_size;
10241 {
10242 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
10243 rtx replacea, replaceb;
10244
10245 int_rtx = GEN_INT (offset);
10246
10247 /* Some cases that need register indexed addressing. */
10248 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
10249 || (TARGET_SPE_ABI
10250 && SPE_VECTOR_MODE (mode)
10251 && !SPE_CONST_OFFSET_OK (offset)))
10252 {
10253 /* Whomever calls us must make sure r11 is available in the
10254 flow path of instructions in the prologue. */
10255 offset_rtx = gen_rtx_REG (Pmode, 11);
10256 emit_move_insn (offset_rtx, int_rtx);
10257
10258 replacea = offset_rtx;
10259 replaceb = int_rtx;
10260 }
10261 else
10262 {
10263 offset_rtx = int_rtx;
10264 replacea = NULL_RTX;
10265 replaceb = NULL_RTX;
10266 }
10267
10268 reg = gen_rtx_REG (mode, regno);
10269 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
10270 mem = gen_rtx_MEM (mode, addr);
10271 set_mem_alias_set (mem, rs6000_sr_alias_set);
10272
10273 insn = emit_move_insn (mem, reg);
10274
10275 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
10276 }
10277
10278 /* Emit an offset memory reference suitable for a frame store, while
10279 converting to a valid addressing mode. */
10280
10281 static rtx
10282 gen_frame_mem_offset (mode, reg, offset)
10283 enum machine_mode mode;
10284 rtx reg;
10285 int offset;
10286 {
10287 rtx int_rtx, offset_rtx;
10288
10289 int_rtx = GEN_INT (offset);
10290
10291 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
10292 {
10293 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10294 emit_move_insn (offset_rtx, int_rtx);
10295 }
10296 else
10297 offset_rtx = int_rtx;
10298
10299 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
10300 }
10301
10302 /* Emit function prologue as insns. */
10303
10304 void
10305 rs6000_emit_prologue ()
10306 {
10307 rs6000_stack_t *info = rs6000_stack_info ();
10308 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10309 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10310 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10311 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
10312 rtx frame_reg_rtx = sp_reg_rtx;
10313 rtx cr_save_rtx = NULL;
10314 rtx insn;
10315 int saving_FPRs_inline;
10316 int using_store_multiple;
10317 HOST_WIDE_INT sp_offset = 0;
10318
10319 if (TARGET_SPE_ABI)
10320 {
10321 reg_mode = V2SImode;
10322 reg_size = 8;
10323 }
10324
10325 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10326 && !TARGET_SPE_ABI
10327 && info->first_gp_reg_save < 31);
10328 saving_FPRs_inline = (info->first_fp_reg_save == 64
10329 || FP_SAVE_INLINE (info->first_fp_reg_save));
10330
10331 /* For V.4, update stack before we do any saving and set back pointer. */
10332 if (info->push_p && DEFAULT_ABI == ABI_V4)
10333 {
10334 if (info->total_size < 32767)
10335 sp_offset = info->total_size;
10336 else
10337 frame_reg_rtx = frame_ptr_rtx;
10338 rs6000_emit_allocate_stack (info->total_size,
10339 (frame_reg_rtx != sp_reg_rtx
10340 && (info->cr_save_p
10341 || info->lr_save_p
10342 || info->first_fp_reg_save < 64
10343 || info->first_gp_reg_save < 32
10344 )));
10345 if (frame_reg_rtx != sp_reg_rtx)
10346 rs6000_emit_stack_tie ();
10347 }
10348
10349 /* Save AltiVec registers if needed. */
10350 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10351 {
10352 int i;
10353
10354 /* There should be a non inline version of this, for when we
10355 are saving lots of vector registers. */
10356 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10357 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10358 {
10359 rtx areg, savereg, mem;
10360 int offset;
10361
10362 offset = info->altivec_save_offset + sp_offset
10363 + 16 * (i - info->first_altivec_reg_save);
10364
10365 savereg = gen_rtx_REG (V4SImode, i);
10366
10367 areg = gen_rtx_REG (Pmode, 0);
10368 emit_move_insn (areg, GEN_INT (offset));
10369
10370 /* AltiVec addressing mode is [reg+reg]. */
10371 mem = gen_rtx_MEM (V4SImode,
10372 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
10373
10374 set_mem_alias_set (mem, rs6000_sr_alias_set);
10375
10376 insn = emit_move_insn (mem, savereg);
10377
10378 altivec_frame_fixup (insn, areg, offset);
10379 }
10380 }
10381
10382 /* VRSAVE is a bit vector representing which AltiVec registers
10383 are used. The OS uses this to determine which vector
10384 registers to save on a context switch. We need to save
10385 VRSAVE on the stack frame, add whatever AltiVec registers we
10386 used in this function, and do the corresponding magic in the
10387 epilogue. */
10388
10389 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
10390 {
10391 rtx reg, mem, vrsave;
10392 int offset;
10393
10394 /* Get VRSAVE onto a GPR. */
10395 reg = gen_rtx_REG (SImode, 12);
10396 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10397 if (TARGET_MACHO)
10398 emit_insn (gen_get_vrsave_internal (reg));
10399 else
10400 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
10401
10402 /* Save VRSAVE. */
10403 offset = info->vrsave_save_offset + sp_offset;
10404 mem
10405 = gen_rtx_MEM (SImode,
10406 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
10407 set_mem_alias_set (mem, rs6000_sr_alias_set);
10408 insn = emit_move_insn (mem, reg);
10409
10410 /* Include the registers in the mask. */
10411 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
10412
10413 insn = emit_insn (generate_set_vrsave (reg, info, 0));
10414 }
10415
10416 /* If we use the link register, get it into r0. */
10417 if (info->lr_save_p)
10418 emit_move_insn (gen_rtx_REG (Pmode, 0),
10419 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10420
10421 /* If we need to save CR, put it into r12. */
10422 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
10423 {
10424 cr_save_rtx = gen_rtx_REG (SImode, 12);
10425 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10426 }
10427
10428 /* Do any required saving of fpr's. If only one or two to save, do
10429 it ourselves. Otherwise, call function. */
10430 if (saving_FPRs_inline)
10431 {
10432 int i;
10433 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10434 if ((regs_ever_live[info->first_fp_reg_save+i]
10435 && ! call_used_regs[info->first_fp_reg_save+i]))
10436 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
10437 info->first_fp_reg_save + i,
10438 info->fp_save_offset + sp_offset + 8 * i,
10439 info->total_size);
10440 }
10441 else if (info->first_fp_reg_save != 64)
10442 {
10443 int i;
10444 char rname[30];
10445 const char *alloc_rname;
10446 rtvec p;
10447 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
10448
10449 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
10450 gen_rtx_REG (Pmode,
10451 LINK_REGISTER_REGNUM));
10452 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
10453 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
10454 alloc_rname = ggc_strdup (rname);
10455 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
10456 gen_rtx_SYMBOL_REF (Pmode,
10457 alloc_rname));
10458 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10459 {
10460 rtx addr, reg, mem;
10461 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
10462 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10463 GEN_INT (info->fp_save_offset
10464 + sp_offset + 8*i));
10465 mem = gen_rtx_MEM (DFmode, addr);
10466 set_mem_alias_set (mem, rs6000_sr_alias_set);
10467
10468 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
10469 }
10470 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10471 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10472 NULL_RTX, NULL_RTX);
10473 }
10474
10475 /* Save GPRs. This is done as a PARALLEL if we are using
10476 the store-multiple instructions. */
10477 if (using_store_multiple)
10478 {
10479 rtvec p;
10480 int i;
10481 p = rtvec_alloc (32 - info->first_gp_reg_save);
10482 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10483 {
10484 rtx addr, reg, mem;
10485 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10486 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10487 GEN_INT (info->gp_save_offset
10488 + sp_offset
10489 + reg_size * i));
10490 mem = gen_rtx_MEM (reg_mode, addr);
10491 set_mem_alias_set (mem, rs6000_sr_alias_set);
10492
10493 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
10494 }
10495 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10496 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10497 NULL_RTX, NULL_RTX);
10498 }
10499 else
10500 {
10501 int i;
10502 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10503 if ((regs_ever_live[info->first_gp_reg_save+i]
10504 && ! call_used_regs[info->first_gp_reg_save+i])
10505 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10506 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10507 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10508 {
10509 rtx addr, reg, mem;
10510 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10511
10512 if (TARGET_SPE_ABI)
10513 {
10514 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10515 rtx b;
10516
10517 if (!SPE_CONST_OFFSET_OK (offset))
10518 {
10519 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10520 emit_move_insn (b, GEN_INT (offset));
10521 }
10522 else
10523 b = GEN_INT (offset);
10524
10525 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10526 mem = gen_rtx_MEM (V2SImode, addr);
10527 set_mem_alias_set (mem, rs6000_sr_alias_set);
10528 insn = emit_move_insn (mem, reg);
10529
10530 if (GET_CODE (b) == CONST_INT)
10531 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10532 NULL_RTX, NULL_RTX);
10533 else
10534 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10535 b, GEN_INT (offset));
10536 }
10537 else
10538 {
10539 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10540 GEN_INT (info->gp_save_offset
10541 + sp_offset
10542 + reg_size * i));
10543 mem = gen_rtx_MEM (reg_mode, addr);
10544 set_mem_alias_set (mem, rs6000_sr_alias_set);
10545
10546 insn = emit_move_insn (mem, reg);
10547 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10548 NULL_RTX, NULL_RTX);
10549 }
10550 }
10551 }
10552
10553 /* ??? There's no need to emit actual instructions here, but it's the
10554 easiest way to get the frame unwind information emitted. */
10555 if (current_function_calls_eh_return)
10556 {
10557 unsigned int i, regno;
10558
10559 for (i = 0; ; ++i)
10560 {
10561 regno = EH_RETURN_DATA_REGNO (i);
10562 if (regno == INVALID_REGNUM)
10563 break;
10564
10565 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
10566 info->ehrd_offset + sp_offset
10567 + reg_size * (int) i,
10568 info->total_size);
10569 }
10570 }
10571
10572 /* Save lr if we used it. */
10573 if (info->lr_save_p)
10574 {
10575 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10576 GEN_INT (info->lr_save_offset + sp_offset));
10577 rtx reg = gen_rtx_REG (Pmode, 0);
10578 rtx mem = gen_rtx_MEM (Pmode, addr);
10579 /* This should not be of rs6000_sr_alias_set, because of
10580 __builtin_return_address. */
10581
10582 insn = emit_move_insn (mem, reg);
10583 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10584 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10585 }
10586
10587 /* Save CR if we use any that must be preserved. */
10588 if (info->cr_save_p)
10589 {
10590 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10591 GEN_INT (info->cr_save_offset + sp_offset));
10592 rtx mem = gen_rtx_MEM (SImode, addr);
10593
10594 set_mem_alias_set (mem, rs6000_sr_alias_set);
10595
10596 /* If r12 was used to hold the original sp, copy cr into r0 now
10597 that it's free. */
10598 if (REGNO (frame_reg_rtx) == 12)
10599 {
10600 cr_save_rtx = gen_rtx_REG (SImode, 0);
10601 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10602 }
10603 insn = emit_move_insn (mem, cr_save_rtx);
10604
10605 /* Now, there's no way that dwarf2out_frame_debug_expr is going
10606 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
10607 OK. All we have to do is specify that _one_ condition code
10608 register is saved in this stack slot. The thrower's epilogue
10609 will then restore all the call-saved registers.
10610 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
10611 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10612 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
10613 }
10614
10615 /* Update stack and set back pointer unless this is V.4,
10616 for which it was done previously. */
10617 if (info->push_p && DEFAULT_ABI != ABI_V4)
10618 rs6000_emit_allocate_stack (info->total_size, FALSE);
10619
10620 /* Set frame pointer, if needed. */
10621 if (frame_pointer_needed)
10622 {
10623 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
10624 sp_reg_rtx);
10625 RTX_FRAME_RELATED_P (insn) = 1;
10626 }
10627
10628 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
10629 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
10630 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
10631 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
10632 {
10633 /* If emit_load_toc_table will use the link register, we need to save
10634 it. We use R11 for this purpose because emit_load_toc_table
10635 can use register 0. This allows us to use a plain 'blr' to return
10636 from the procedure more often. */
10637 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
10638 && ! info->lr_save_p
10639 && EXIT_BLOCK_PTR->pred != NULL);
10640 if (save_LR_around_toc_setup)
10641 emit_move_insn (gen_rtx_REG (Pmode, 11),
10642 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10643
10644 rs6000_emit_load_toc_table (TRUE);
10645
10646 if (save_LR_around_toc_setup)
10647 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10648 gen_rtx_REG (Pmode, 11));
10649 }
10650
10651 #if TARGET_MACHO
10652 if (DEFAULT_ABI == ABI_DARWIN
10653 && flag_pic && current_function_uses_pic_offset_table)
10654 {
10655 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10656 const char *picbase = machopic_function_base_name ();
10657 rtx src = gen_rtx_SYMBOL_REF (Pmode, ggc_alloc_string (picbase, -1));
10658
10659 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
10660
10661 rs6000_maybe_dead (
10662 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
10663 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
10664 }
10665 #endif
10666 }
10667
10668 /* Write function prologue. */
10669
10670 static void
10671 rs6000_output_function_prologue (file, size)
10672 FILE *file;
10673 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10674 {
10675 rs6000_stack_t *info = rs6000_stack_info ();
10676
10677 if (TARGET_DEBUG_STACK)
10678 debug_stack_info (info);
10679
10680 /* Write .extern for any function we will call to save and restore
10681 fp values. */
10682 if (info->first_fp_reg_save < 64
10683 && !FP_SAVE_INLINE (info->first_fp_reg_save))
10684 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
10685 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
10686 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
10687 RESTORE_FP_SUFFIX);
10688
10689 /* Write .extern for AIX common mode routines, if needed. */
10690 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
10691 {
10692 fputs ("\t.extern __mulh\n", file);
10693 fputs ("\t.extern __mull\n", file);
10694 fputs ("\t.extern __divss\n", file);
10695 fputs ("\t.extern __divus\n", file);
10696 fputs ("\t.extern __quoss\n", file);
10697 fputs ("\t.extern __quous\n", file);
10698 common_mode_defined = 1;
10699 }
10700
10701 if (! HAVE_prologue)
10702 {
10703 start_sequence ();
10704
10705 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
10706 the "toplevel" insn chain. */
10707 emit_note (0, NOTE_INSN_DELETED);
10708 rs6000_emit_prologue ();
10709 emit_note (0, NOTE_INSN_DELETED);
10710
10711 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10712 {
10713 rtx insn;
10714 unsigned addr = 0;
10715 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
10716 {
10717 INSN_ADDRESSES_NEW (insn, addr);
10718 addr += 4;
10719 }
10720 }
10721
10722 if (TARGET_DEBUG_STACK)
10723 debug_rtx_list (get_insns (), 100);
10724 final (get_insns (), file, FALSE, FALSE);
10725 end_sequence ();
10726 }
10727
10728 rs6000_pic_labelno++;
10729 }
10730
10731 /* Emit function epilogue as insns.
10732
10733 At present, dwarf2out_frame_debug_expr doesn't understand
10734 register restores, so we don't bother setting RTX_FRAME_RELATED_P
10735 anywhere in the epilogue. Most of the insns below would in any case
10736 need special notes to explain where r11 is in relation to the stack. */
10737
10738 void
10739 rs6000_emit_epilogue (sibcall)
10740 int sibcall;
10741 {
10742 rs6000_stack_t *info;
10743 int restoring_FPRs_inline;
10744 int using_load_multiple;
10745 int using_mfcr_multiple;
10746 int use_backchain_to_restore_sp;
10747 int sp_offset = 0;
10748 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
10749 rtx frame_reg_rtx = sp_reg_rtx;
10750 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10751 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10752 int i;
10753
10754 if (TARGET_SPE_ABI)
10755 {
10756 reg_mode = V2SImode;
10757 reg_size = 8;
10758 }
10759
10760 info = rs6000_stack_info ();
10761 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10762 && !TARGET_SPE_ABI
10763 && info->first_gp_reg_save < 31);
10764 restoring_FPRs_inline = (sibcall
10765 || current_function_calls_eh_return
10766 || info->first_fp_reg_save == 64
10767 || FP_SAVE_INLINE (info->first_fp_reg_save));
10768 use_backchain_to_restore_sp = (frame_pointer_needed
10769 || current_function_calls_alloca
10770 || info->total_size > 32767);
10771 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
10772 || rs6000_cpu == PROCESSOR_PPC603
10773 || rs6000_cpu == PROCESSOR_PPC750
10774 || optimize_size);
10775
10776 /* If we have a frame pointer, a call to alloca, or a large stack
10777 frame, restore the old stack pointer using the backchain. Otherwise,
10778 we know what size to update it with. */
10779 if (use_backchain_to_restore_sp)
10780 {
10781 /* Under V.4, don't reset the stack pointer until after we're done
10782 loading the saved registers. */
10783 if (DEFAULT_ABI == ABI_V4)
10784 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
10785
10786 emit_move_insn (frame_reg_rtx,
10787 gen_rtx_MEM (Pmode, sp_reg_rtx));
10788
10789 }
10790 else if (info->push_p)
10791 {
10792 if (DEFAULT_ABI == ABI_V4)
10793 sp_offset = info->total_size;
10794 else
10795 {
10796 emit_insn (TARGET_32BIT
10797 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
10798 GEN_INT (info->total_size))
10799 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
10800 GEN_INT (info->total_size)));
10801 }
10802 }
10803
10804 /* Restore AltiVec registers if needed. */
10805 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10806 {
10807 int i;
10808
10809 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10810 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10811 {
10812 rtx addr, areg, mem;
10813
10814 areg = gen_rtx_REG (Pmode, 0);
10815 emit_move_insn
10816 (areg, GEN_INT (info->altivec_save_offset
10817 + sp_offset
10818 + 16 * (i - info->first_altivec_reg_save)));
10819
10820 /* AltiVec addressing mode is [reg+reg]. */
10821 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
10822 mem = gen_rtx_MEM (V4SImode, addr);
10823 set_mem_alias_set (mem, rs6000_sr_alias_set);
10824
10825 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
10826 }
10827 }
10828
10829 /* Restore VRSAVE if needed. */
10830 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
10831 {
10832 rtx addr, mem, reg;
10833
10834 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10835 GEN_INT (info->vrsave_save_offset + sp_offset));
10836 mem = gen_rtx_MEM (SImode, addr);
10837 set_mem_alias_set (mem, rs6000_sr_alias_set);
10838 reg = gen_rtx_REG (SImode, 12);
10839 emit_move_insn (reg, mem);
10840
10841 emit_insn (generate_set_vrsave (reg, info, 1));
10842 }
10843
10844 /* Get the old lr if we saved it. */
10845 if (info->lr_save_p)
10846 {
10847 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
10848 info->lr_save_offset + sp_offset);
10849
10850 set_mem_alias_set (mem, rs6000_sr_alias_set);
10851
10852 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
10853 }
10854
10855 /* Get the old cr if we saved it. */
10856 if (info->cr_save_p)
10857 {
10858 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10859 GEN_INT (info->cr_save_offset + sp_offset));
10860 rtx mem = gen_rtx_MEM (SImode, addr);
10861
10862 set_mem_alias_set (mem, rs6000_sr_alias_set);
10863
10864 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
10865 }
10866
10867 /* Set LR here to try to overlap restores below. */
10868 if (info->lr_save_p)
10869 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10870 gen_rtx_REG (Pmode, 0));
10871
10872 /* Load exception handler data registers, if needed. */
10873 if (current_function_calls_eh_return)
10874 {
10875 unsigned int i, regno;
10876
10877 for (i = 0; ; ++i)
10878 {
10879 rtx mem;
10880
10881 regno = EH_RETURN_DATA_REGNO (i);
10882 if (regno == INVALID_REGNUM)
10883 break;
10884
10885 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
10886 info->ehrd_offset + sp_offset
10887 + reg_size * (int) i);
10888 set_mem_alias_set (mem, rs6000_sr_alias_set);
10889
10890 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
10891 }
10892 }
10893
10894 /* Restore GPRs. This is done as a PARALLEL if we are using
10895 the load-multiple instructions. */
10896 if (using_load_multiple)
10897 {
10898 rtvec p;
10899 p = rtvec_alloc (32 - info->first_gp_reg_save);
10900 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10901 {
10902 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10903 GEN_INT (info->gp_save_offset
10904 + sp_offset
10905 + reg_size * i));
10906 rtx mem = gen_rtx_MEM (reg_mode, addr);
10907
10908 set_mem_alias_set (mem, rs6000_sr_alias_set);
10909
10910 RTVEC_ELT (p, i) =
10911 gen_rtx_SET (VOIDmode,
10912 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
10913 mem);
10914 }
10915 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10916 }
10917 else
10918 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10919 if ((regs_ever_live[info->first_gp_reg_save+i]
10920 && ! call_used_regs[info->first_gp_reg_save+i])
10921 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10922 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10923 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10924 {
10925 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10926 GEN_INT (info->gp_save_offset
10927 + sp_offset
10928 + reg_size * i));
10929 rtx mem = gen_rtx_MEM (reg_mode, addr);
10930
10931 /* Restore 64-bit quantities for SPE. */
10932 if (TARGET_SPE_ABI)
10933 {
10934 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10935 rtx b;
10936
10937 if (!SPE_CONST_OFFSET_OK (offset))
10938 {
10939 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10940 emit_move_insn (b, GEN_INT (offset));
10941 }
10942 else
10943 b = GEN_INT (offset);
10944
10945 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10946 mem = gen_rtx_MEM (V2SImode, addr);
10947 }
10948
10949 set_mem_alias_set (mem, rs6000_sr_alias_set);
10950
10951 emit_move_insn (gen_rtx_REG (reg_mode,
10952 info->first_gp_reg_save + i), mem);
10953 }
10954
10955 /* Restore fpr's if we need to do it without calling a function. */
10956 if (restoring_FPRs_inline)
10957 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10958 if ((regs_ever_live[info->first_fp_reg_save+i]
10959 && ! call_used_regs[info->first_fp_reg_save+i]))
10960 {
10961 rtx addr, mem;
10962 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10963 GEN_INT (info->fp_save_offset
10964 + sp_offset
10965 + 8 * i));
10966 mem = gen_rtx_MEM (DFmode, addr);
10967 set_mem_alias_set (mem, rs6000_sr_alias_set);
10968
10969 emit_move_insn (gen_rtx_REG (DFmode,
10970 info->first_fp_reg_save + i),
10971 mem);
10972 }
10973
10974 /* If we saved cr, restore it here. Just those that were used. */
10975 if (info->cr_save_p)
10976 {
10977 rtx r12_rtx = gen_rtx_REG (SImode, 12);
10978 int count = 0;
10979
10980 if (using_mfcr_multiple)
10981 {
10982 for (i = 0; i < 8; i++)
10983 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10984 count++;
10985 if (count == 0)
10986 abort ();
10987 }
10988
10989 if (using_mfcr_multiple && count > 1)
10990 {
10991 rtvec p;
10992 int ndx;
10993
10994 p = rtvec_alloc (count);
10995
10996 ndx = 0;
10997 for (i = 0; i < 8; i++)
10998 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10999 {
11000 rtvec r = rtvec_alloc (2);
11001 RTVEC_ELT (r, 0) = r12_rtx;
11002 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
11003 RTVEC_ELT (p, ndx) =
11004 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
11005 gen_rtx_UNSPEC (CCmode, r, 20));
11006 ndx++;
11007 }
11008 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11009 if (ndx != count)
11010 abort ();
11011 }
11012 else
11013 for (i = 0; i < 8; i++)
11014 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11015 {
11016 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
11017 CR0_REGNO+i),
11018 r12_rtx));
11019 }
11020 }
11021
11022 /* If this is V.4, unwind the stack pointer after all of the loads
11023 have been done. We need to emit a block here so that sched
11024 doesn't decide to move the sp change before the register restores
11025 (which may not have any obvious dependency on the stack). This
11026 doesn't hurt performance, because there is no scheduling that can
11027 be done after this point. */
11028 if (DEFAULT_ABI == ABI_V4)
11029 {
11030 if (frame_reg_rtx != sp_reg_rtx)
11031 rs6000_emit_stack_tie ();
11032
11033 if (use_backchain_to_restore_sp)
11034 {
11035 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
11036 }
11037 else if (sp_offset != 0)
11038 {
11039 emit_insn (TARGET_32BIT
11040 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11041 GEN_INT (sp_offset))
11042 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11043 GEN_INT (sp_offset)));
11044 }
11045 }
11046
11047 if (current_function_calls_eh_return)
11048 {
11049 rtx sa = EH_RETURN_STACKADJ_RTX;
11050 emit_insn (TARGET_32BIT
11051 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
11052 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
11053 }
11054
11055 if (!sibcall)
11056 {
11057 rtvec p;
11058 if (! restoring_FPRs_inline)
11059 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
11060 else
11061 p = rtvec_alloc (2);
11062
11063 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
11064 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11065 gen_rtx_REG (Pmode,
11066 LINK_REGISTER_REGNUM));
11067
11068 /* If we have to restore more than two FP registers, branch to the
11069 restore function. It will return to our caller. */
11070 if (! restoring_FPRs_inline)
11071 {
11072 int i;
11073 char rname[30];
11074 const char *alloc_rname;
11075
11076 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
11077 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
11078 alloc_rname = ggc_strdup (rname);
11079 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
11080 gen_rtx_SYMBOL_REF (Pmode,
11081 alloc_rname));
11082
11083 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11084 {
11085 rtx addr, mem;
11086 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
11087 GEN_INT (info->fp_save_offset + 8*i));
11088 mem = gen_rtx_MEM (DFmode, addr);
11089 set_mem_alias_set (mem, rs6000_sr_alias_set);
11090
11091 RTVEC_ELT (p, i+3) =
11092 gen_rtx_SET (VOIDmode,
11093 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
11094 mem);
11095 }
11096 }
11097
11098 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
11099 }
11100 }
11101
11102 /* Write function epilogue. */
11103
11104 static void
11105 rs6000_output_function_epilogue (file, size)
11106 FILE *file;
11107 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11108 {
11109 rs6000_stack_t *info = rs6000_stack_info ();
11110
11111 if (! HAVE_epilogue)
11112 {
11113 rtx insn = get_last_insn ();
11114 /* If the last insn was a BARRIER, we don't have to write anything except
11115 the trace table. */
11116 if (GET_CODE (insn) == NOTE)
11117 insn = prev_nonnote_insn (insn);
11118 if (insn == 0 || GET_CODE (insn) != BARRIER)
11119 {
11120 /* This is slightly ugly, but at least we don't have two
11121 copies of the epilogue-emitting code. */
11122 start_sequence ();
11123
11124 /* A NOTE_INSN_DELETED is supposed to be at the start
11125 and end of the "toplevel" insn chain. */
11126 emit_note (0, NOTE_INSN_DELETED);
11127 rs6000_emit_epilogue (FALSE);
11128 emit_note (0, NOTE_INSN_DELETED);
11129
11130 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11131 {
11132 rtx insn;
11133 unsigned addr = 0;
11134 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11135 {
11136 INSN_ADDRESSES_NEW (insn, addr);
11137 addr += 4;
11138 }
11139 }
11140
11141 if (TARGET_DEBUG_STACK)
11142 debug_rtx_list (get_insns (), 100);
11143 final (get_insns (), file, FALSE, FALSE);
11144 end_sequence ();
11145 }
11146 }
11147
11148 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11149 on its format.
11150
11151 We don't output a traceback table if -finhibit-size-directive was
11152 used. The documentation for -finhibit-size-directive reads
11153 ``don't output a @code{.size} assembler directive, or anything
11154 else that would cause trouble if the function is split in the
11155 middle, and the two halves are placed at locations far apart in
11156 memory.'' The traceback table has this property, since it
11157 includes the offset from the start of the function to the
11158 traceback table itself.
11159
11160 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11161 different traceback table. */
11162 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
11163 && rs6000_traceback != traceback_none)
11164 {
11165 const char *fname = NULL;
11166 const char *language_string = lang_hooks.name;
11167 int fixed_parms = 0, float_parms = 0, parm_info = 0;
11168 int i;
11169 int optional_tbtab;
11170
11171 if (rs6000_traceback == traceback_full)
11172 optional_tbtab = 1;
11173 else if (rs6000_traceback == traceback_part)
11174 optional_tbtab = 0;
11175 else
11176 optional_tbtab = !optimize_size && !TARGET_ELF;
11177
11178 if (optional_tbtab)
11179 {
11180 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
11181 while (*fname == '.') /* V.4 encodes . in the name */
11182 fname++;
11183
11184 /* Need label immediately before tbtab, so we can compute
11185 its offset from the function start. */
11186 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11187 ASM_OUTPUT_LABEL (file, fname);
11188 }
11189
11190 /* The .tbtab pseudo-op can only be used for the first eight
11191 expressions, since it can't handle the possibly variable
11192 length fields that follow. However, if you omit the optional
11193 fields, the assembler outputs zeros for all optional fields
11194 anyways, giving each variable length field is minimum length
11195 (as defined in sys/debug.h). Thus we can not use the .tbtab
11196 pseudo-op at all. */
11197
11198 /* An all-zero word flags the start of the tbtab, for debuggers
11199 that have to find it by searching forward from the entry
11200 point or from the current pc. */
11201 fputs ("\t.long 0\n", file);
11202
11203 /* Tbtab format type. Use format type 0. */
11204 fputs ("\t.byte 0,", file);
11205
11206 /* Language type. Unfortunately, there doesn't seem to be any
11207 official way to get this info, so we use language_string. C
11208 is 0. C++ is 9. No number defined for Obj-C, so use the
11209 value for C for now. There is no official value for Java,
11210 although IBM appears to be using 13. There is no official value
11211 for Chill, so we've chosen 44 pseudo-randomly. */
11212 if (! strcmp (language_string, "GNU C")
11213 || ! strcmp (language_string, "GNU Objective-C"))
11214 i = 0;
11215 else if (! strcmp (language_string, "GNU F77"))
11216 i = 1;
11217 else if (! strcmp (language_string, "GNU Ada"))
11218 i = 3;
11219 else if (! strcmp (language_string, "GNU Pascal"))
11220 i = 2;
11221 else if (! strcmp (language_string, "GNU C++"))
11222 i = 9;
11223 else if (! strcmp (language_string, "GNU Java"))
11224 i = 13;
11225 else if (! strcmp (language_string, "GNU CHILL"))
11226 i = 44;
11227 else
11228 abort ();
11229 fprintf (file, "%d,", i);
11230
11231 /* 8 single bit fields: global linkage (not set for C extern linkage,
11232 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
11233 from start of procedure stored in tbtab, internal function, function
11234 has controlled storage, function has no toc, function uses fp,
11235 function logs/aborts fp operations. */
11236 /* Assume that fp operations are used if any fp reg must be saved. */
11237 fprintf (file, "%d,",
11238 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
11239
11240 /* 6 bitfields: function is interrupt handler, name present in
11241 proc table, function calls alloca, on condition directives
11242 (controls stack walks, 3 bits), saves condition reg, saves
11243 link reg. */
11244 /* The `function calls alloca' bit seems to be set whenever reg 31 is
11245 set up as a frame pointer, even when there is no alloca call. */
11246 fprintf (file, "%d,",
11247 ((optional_tbtab << 6)
11248 | ((optional_tbtab & frame_pointer_needed) << 5)
11249 | (info->cr_save_p << 1)
11250 | (info->lr_save_p)));
11251
11252 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
11253 (6 bits). */
11254 fprintf (file, "%d,",
11255 (info->push_p << 7) | (64 - info->first_fp_reg_save));
11256
11257 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
11258 fprintf (file, "%d,", (32 - first_reg_to_save ()));
11259
11260 if (optional_tbtab)
11261 {
11262 /* Compute the parameter info from the function decl argument
11263 list. */
11264 tree decl;
11265 int next_parm_info_bit = 31;
11266
11267 for (decl = DECL_ARGUMENTS (current_function_decl);
11268 decl; decl = TREE_CHAIN (decl))
11269 {
11270 rtx parameter = DECL_INCOMING_RTL (decl);
11271 enum machine_mode mode = GET_MODE (parameter);
11272
11273 if (GET_CODE (parameter) == REG)
11274 {
11275 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
11276 {
11277 int bits;
11278
11279 float_parms++;
11280
11281 if (mode == SFmode)
11282 bits = 0x2;
11283 else if (mode == DFmode || mode == TFmode)
11284 bits = 0x3;
11285 else
11286 abort ();
11287
11288 /* If only one bit will fit, don't or in this entry. */
11289 if (next_parm_info_bit > 0)
11290 parm_info |= (bits << (next_parm_info_bit - 1));
11291 next_parm_info_bit -= 2;
11292 }
11293 else
11294 {
11295 fixed_parms += ((GET_MODE_SIZE (mode)
11296 + (UNITS_PER_WORD - 1))
11297 / UNITS_PER_WORD);
11298 next_parm_info_bit -= 1;
11299 }
11300 }
11301 }
11302 }
11303
11304 /* Number of fixed point parameters. */
11305 /* This is actually the number of words of fixed point parameters; thus
11306 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11307 fprintf (file, "%d,", fixed_parms);
11308
11309 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11310 all on stack. */
11311 /* This is actually the number of fp registers that hold parameters;
11312 and thus the maximum value is 13. */
11313 /* Set parameters on stack bit if parameters are not in their original
11314 registers, regardless of whether they are on the stack? Xlc
11315 seems to set the bit when not optimizing. */
11316 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
11317
11318 if (! optional_tbtab)
11319 return;
11320
11321 /* Optional fields follow. Some are variable length. */
11322
11323 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11324 11 double float. */
11325 /* There is an entry for each parameter in a register, in the order that
11326 they occur in the parameter list. Any intervening arguments on the
11327 stack are ignored. If the list overflows a long (max possible length
11328 34 bits) then completely leave off all elements that don't fit. */
11329 /* Only emit this long if there was at least one parameter. */
11330 if (fixed_parms || float_parms)
11331 fprintf (file, "\t.long %d\n", parm_info);
11332
11333 /* Offset from start of code to tb table. */
11334 fputs ("\t.long ", file);
11335 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11336 #if TARGET_AIX
11337 RS6000_OUTPUT_BASENAME (file, fname);
11338 #else
11339 assemble_name (file, fname);
11340 #endif
11341 fputs ("-.", file);
11342 #if TARGET_AIX
11343 RS6000_OUTPUT_BASENAME (file, fname);
11344 #else
11345 assemble_name (file, fname);
11346 #endif
11347 putc ('\n', file);
11348
11349 /* Interrupt handler mask. */
11350 /* Omit this long, since we never set the interrupt handler bit
11351 above. */
11352
11353 /* Number of CTL (controlled storage) anchors. */
11354 /* Omit this long, since the has_ctl bit is never set above. */
11355
11356 /* Displacement into stack of each CTL anchor. */
11357 /* Omit this list of longs, because there are no CTL anchors. */
11358
11359 /* Length of function name. */
11360 if (*fname == '*')
11361 ++fname;
11362 fprintf (file, "\t.short %d\n", (int) strlen (fname));
11363
11364 /* Function name. */
11365 assemble_string (fname, strlen (fname));
11366
11367 /* Register for alloca automatic storage; this is always reg 31.
11368 Only emit this if the alloca bit was set above. */
11369 if (frame_pointer_needed)
11370 fputs ("\t.byte 31\n", file);
11371
11372 fputs ("\t.align 2\n", file);
11373 }
11374 }
11375 \f
11376 /* A C compound statement that outputs the assembler code for a thunk
11377 function, used to implement C++ virtual function calls with
11378 multiple inheritance. The thunk acts as a wrapper around a virtual
11379 function, adjusting the implicit object parameter before handing
11380 control off to the real function.
11381
11382 First, emit code to add the integer DELTA to the location that
11383 contains the incoming first argument. Assume that this argument
11384 contains a pointer, and is the one used to pass the `this' pointer
11385 in C++. This is the incoming argument *before* the function
11386 prologue, e.g. `%o0' on a sparc. The addition must preserve the
11387 values of all other incoming arguments.
11388
11389 After the addition, emit code to jump to FUNCTION, which is a
11390 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
11391 not touch the return address. Hence returning from FUNCTION will
11392 return to whoever called the current `thunk'.
11393
11394 The effect must be as if FUNCTION had been called directly with the
11395 adjusted first argument. This macro is responsible for emitting
11396 all of the code for a thunk function; output_function_prologue()
11397 and output_function_epilogue() are not invoked.
11398
11399 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
11400 been extracted from it.) It might possibly be useful on some
11401 targets, but probably not.
11402
11403 If you do not define this macro, the target-independent code in the
11404 C++ frontend will generate a less efficient heavyweight thunk that
11405 calls FUNCTION instead of jumping to it. The generic approach does
11406 not support varargs. */
11407
11408 static void
11409 rs6000_output_mi_thunk (file, thunk_fndecl, delta, vcall_offset, function)
11410 FILE *file;
11411 tree thunk_fndecl ATTRIBUTE_UNUSED;
11412 HOST_WIDE_INT delta;
11413 HOST_WIDE_INT vcall_offset;
11414 tree function;
11415 {
11416 rtx this, insn, funexp;
11417
11418 reload_completed = 1;
11419 no_new_pseudos = 1;
11420
11421 /* Mark the end of the (empty) prologue. */
11422 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
11423
11424 /* Find the "this" pointer. If the function returns a structure,
11425 the structure return pointer is in r3. */
11426 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
11427 this = gen_rtx_REG (Pmode, 4);
11428 else
11429 this = gen_rtx_REG (Pmode, 3);
11430
11431 /* Apply the constant offset, if required. */
11432 if (delta)
11433 {
11434 rtx delta_rtx = GEN_INT (delta);
11435 emit_insn (TARGET_32BIT
11436 ? gen_addsi3 (this, this, delta_rtx)
11437 : gen_adddi3 (this, this, delta_rtx));
11438 }
11439
11440 /* Apply the offset from the vtable, if required. */
11441 if (vcall_offset)
11442 {
11443 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
11444 rtx tmp = gen_rtx_REG (Pmode, 12);
11445
11446 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
11447 emit_insn (TARGET_32BIT
11448 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
11449 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
11450 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
11451 emit_insn (TARGET_32BIT
11452 ? gen_addsi3 (this, this, tmp)
11453 : gen_adddi3 (this, this, tmp));
11454 }
11455
11456 /* Generate a tail call to the target function. */
11457 if (!TREE_USED (function))
11458 {
11459 assemble_external (function);
11460 TREE_USED (function) = 1;
11461 }
11462 funexp = XEXP (DECL_RTL (function), 0);
11463
11464 SYMBOL_REF_FLAG (funexp) = 0;
11465 if (current_file_function_operand (funexp, VOIDmode)
11466 && (! lookup_attribute ("longcall",
11467 TYPE_ATTRIBUTES (TREE_TYPE (function)))
11468 || lookup_attribute ("shortcall",
11469 TYPE_ATTRIBUTES (TREE_TYPE (function)))))
11470 SYMBOL_REF_FLAG (funexp) = 1;
11471
11472 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
11473
11474 #if TARGET_MACHO
11475 if (flag_pic)
11476 funexp = machopic_indirect_call_target (funexp);
11477 #endif
11478
11479 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
11480 generate sibcall RTL explicitly to avoid constraint abort. */
11481 insn = emit_call_insn (
11482 gen_rtx_PARALLEL (VOIDmode,
11483 gen_rtvec (4,
11484 gen_rtx_CALL (VOIDmode,
11485 funexp, const0_rtx),
11486 gen_rtx_USE (VOIDmode, const0_rtx),
11487 gen_rtx_USE (VOIDmode,
11488 gen_rtx_REG (SImode,
11489 LINK_REGISTER_REGNUM)),
11490 gen_rtx_RETURN (VOIDmode))));
11491 SIBLING_CALL_P (insn) = 1;
11492 emit_barrier ();
11493
11494 /* Run just enough of rest_of_compilation to get the insns emitted.
11495 There's not really enough bulk here to make other passes such as
11496 instruction scheduling worth while. Note that use_thunk calls
11497 assemble_start_function and assemble_end_function. */
11498 insn = get_insns ();
11499 shorten_branches (insn);
11500 final_start_function (insn, file, 1);
11501 final (insn, file, 1, 0);
11502 final_end_function ();
11503
11504 reload_completed = 0;
11505 no_new_pseudos = 0;
11506 }
11507 \f
11508 /* A quick summary of the various types of 'constant-pool tables'
11509 under PowerPC:
11510
11511 Target Flags Name One table per
11512 AIX (none) AIX TOC object file
11513 AIX -mfull-toc AIX TOC object file
11514 AIX -mminimal-toc AIX minimal TOC translation unit
11515 SVR4/EABI (none) SVR4 SDATA object file
11516 SVR4/EABI -fpic SVR4 pic object file
11517 SVR4/EABI -fPIC SVR4 PIC translation unit
11518 SVR4/EABI -mrelocatable EABI TOC function
11519 SVR4/EABI -maix AIX TOC object file
11520 SVR4/EABI -maix -mminimal-toc
11521 AIX minimal TOC translation unit
11522
11523 Name Reg. Set by entries contains:
11524 made by addrs? fp? sum?
11525
11526 AIX TOC 2 crt0 as Y option option
11527 AIX minimal TOC 30 prolog gcc Y Y option
11528 SVR4 SDATA 13 crt0 gcc N Y N
11529 SVR4 pic 30 prolog ld Y not yet N
11530 SVR4 PIC 30 prolog gcc Y option option
11531 EABI TOC 30 prolog gcc Y option option
11532
11533 */
11534
11535 /* Hash functions for the hash table. */
11536
11537 static unsigned
11538 rs6000_hash_constant (k)
11539 rtx k;
11540 {
11541 enum rtx_code code = GET_CODE (k);
11542 enum machine_mode mode = GET_MODE (k);
11543 unsigned result = (code << 3) ^ mode;
11544 const char *format;
11545 int flen, fidx;
11546
11547 format = GET_RTX_FORMAT (code);
11548 flen = strlen (format);
11549 fidx = 0;
11550
11551 switch (code)
11552 {
11553 case LABEL_REF:
11554 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
11555
11556 case CONST_DOUBLE:
11557 if (mode != VOIDmode)
11558 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
11559 flen = 2;
11560 break;
11561
11562 case CODE_LABEL:
11563 fidx = 3;
11564 break;
11565
11566 default:
11567 break;
11568 }
11569
11570 for (; fidx < flen; fidx++)
11571 switch (format[fidx])
11572 {
11573 case 's':
11574 {
11575 unsigned i, len;
11576 const char *str = XSTR (k, fidx);
11577 len = strlen (str);
11578 result = result * 613 + len;
11579 for (i = 0; i < len; i++)
11580 result = result * 613 + (unsigned) str[i];
11581 break;
11582 }
11583 case 'u':
11584 case 'e':
11585 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
11586 break;
11587 case 'i':
11588 case 'n':
11589 result = result * 613 + (unsigned) XINT (k, fidx);
11590 break;
11591 case 'w':
11592 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
11593 result = result * 613 + (unsigned) XWINT (k, fidx);
11594 else
11595 {
11596 size_t i;
11597 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
11598 result = result * 613 + (unsigned) (XWINT (k, fidx)
11599 >> CHAR_BIT * i);
11600 }
11601 break;
11602 default:
11603 abort ();
11604 }
11605
11606 return result;
11607 }
11608
11609 static unsigned
11610 toc_hash_function (hash_entry)
11611 const void * hash_entry;
11612 {
11613 const struct toc_hash_struct *thc =
11614 (const struct toc_hash_struct *) hash_entry;
11615 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
11616 }
11617
11618 /* Compare H1 and H2 for equivalence. */
11619
11620 static int
11621 toc_hash_eq (h1, h2)
11622 const void * h1;
11623 const void * h2;
11624 {
11625 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
11626 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
11627
11628 if (((const struct toc_hash_struct *) h1)->key_mode
11629 != ((const struct toc_hash_struct *) h2)->key_mode)
11630 return 0;
11631
11632 return rtx_equal_p (r1, r2);
11633 }
11634
11635 /* These are the names given by the C++ front-end to vtables, and
11636 vtable-like objects. Ideally, this logic should not be here;
11637 instead, there should be some programmatic way of inquiring as
11638 to whether or not an object is a vtable. */
11639
11640 #define VTABLE_NAME_P(NAME) \
11641 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
11642 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
11643 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
11644 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
11645
11646 void
11647 rs6000_output_symbol_ref (file, x)
11648 FILE *file;
11649 rtx x;
11650 {
11651 /* Currently C++ toc references to vtables can be emitted before it
11652 is decided whether the vtable is public or private. If this is
11653 the case, then the linker will eventually complain that there is
11654 a reference to an unknown section. Thus, for vtables only,
11655 we emit the TOC reference to reference the symbol and not the
11656 section. */
11657 const char *name = XSTR (x, 0);
11658
11659 if (VTABLE_NAME_P (name))
11660 {
11661 RS6000_OUTPUT_BASENAME (file, name);
11662 }
11663 else
11664 assemble_name (file, name);
11665 }
11666
11667 /* Output a TOC entry. We derive the entry name from what is being
11668 written. */
11669
11670 void
11671 output_toc (file, x, labelno, mode)
11672 FILE *file;
11673 rtx x;
11674 int labelno;
11675 enum machine_mode mode;
11676 {
11677 char buf[256];
11678 const char *name = buf;
11679 const char *real_name;
11680 rtx base = x;
11681 int offset = 0;
11682
11683 if (TARGET_NO_TOC)
11684 abort ();
11685
11686 /* When the linker won't eliminate them, don't output duplicate
11687 TOC entries (this happens on AIX if there is any kind of TOC,
11688 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
11689 CODE_LABELs. */
11690 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
11691 {
11692 struct toc_hash_struct *h;
11693 void * * found;
11694
11695 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
11696 time because GGC is not initialised at that point. */
11697 if (toc_hash_table == NULL)
11698 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
11699 toc_hash_eq, NULL);
11700
11701 h = ggc_alloc (sizeof (*h));
11702 h->key = x;
11703 h->key_mode = mode;
11704 h->labelno = labelno;
11705
11706 found = htab_find_slot (toc_hash_table, h, 1);
11707 if (*found == NULL)
11708 *found = h;
11709 else /* This is indeed a duplicate.
11710 Set this label equal to that label. */
11711 {
11712 fputs ("\t.set ", file);
11713 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11714 fprintf (file, "%d,", labelno);
11715 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11716 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
11717 found)->labelno));
11718 return;
11719 }
11720 }
11721
11722 /* If we're going to put a double constant in the TOC, make sure it's
11723 aligned properly when strict alignment is on. */
11724 if (GET_CODE (x) == CONST_DOUBLE
11725 && STRICT_ALIGNMENT
11726 && GET_MODE_BITSIZE (mode) >= 64
11727 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
11728 ASM_OUTPUT_ALIGN (file, 3);
11729 }
11730
11731 (*targetm.asm_out.internal_label) (file, "LC", labelno);
11732
11733 /* Handle FP constants specially. Note that if we have a minimal
11734 TOC, things we put here aren't actually in the TOC, so we can allow
11735 FP constants. */
11736 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
11737 {
11738 REAL_VALUE_TYPE rv;
11739 long k[4];
11740
11741 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11742 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
11743
11744 if (TARGET_64BIT)
11745 {
11746 if (TARGET_MINIMAL_TOC)
11747 fputs (DOUBLE_INT_ASM_OP, file);
11748 else
11749 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
11750 k[0] & 0xffffffff, k[1] & 0xffffffff,
11751 k[2] & 0xffffffff, k[3] & 0xffffffff);
11752 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
11753 k[0] & 0xffffffff, k[1] & 0xffffffff,
11754 k[2] & 0xffffffff, k[3] & 0xffffffff);
11755 return;
11756 }
11757 else
11758 {
11759 if (TARGET_MINIMAL_TOC)
11760 fputs ("\t.long ", file);
11761 else
11762 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
11763 k[0] & 0xffffffff, k[1] & 0xffffffff,
11764 k[2] & 0xffffffff, k[3] & 0xffffffff);
11765 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
11766 k[0] & 0xffffffff, k[1] & 0xffffffff,
11767 k[2] & 0xffffffff, k[3] & 0xffffffff);
11768 return;
11769 }
11770 }
11771 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
11772 {
11773 REAL_VALUE_TYPE rv;
11774 long k[2];
11775
11776 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11777 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
11778
11779 if (TARGET_64BIT)
11780 {
11781 if (TARGET_MINIMAL_TOC)
11782 fputs (DOUBLE_INT_ASM_OP, file);
11783 else
11784 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
11785 k[0] & 0xffffffff, k[1] & 0xffffffff);
11786 fprintf (file, "0x%lx%08lx\n",
11787 k[0] & 0xffffffff, k[1] & 0xffffffff);
11788 return;
11789 }
11790 else
11791 {
11792 if (TARGET_MINIMAL_TOC)
11793 fputs ("\t.long ", file);
11794 else
11795 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
11796 k[0] & 0xffffffff, k[1] & 0xffffffff);
11797 fprintf (file, "0x%lx,0x%lx\n",
11798 k[0] & 0xffffffff, k[1] & 0xffffffff);
11799 return;
11800 }
11801 }
11802 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
11803 {
11804 REAL_VALUE_TYPE rv;
11805 long l;
11806
11807 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11808 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
11809
11810 if (TARGET_64BIT)
11811 {
11812 if (TARGET_MINIMAL_TOC)
11813 fputs (DOUBLE_INT_ASM_OP, file);
11814 else
11815 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
11816 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
11817 return;
11818 }
11819 else
11820 {
11821 if (TARGET_MINIMAL_TOC)
11822 fputs ("\t.long ", file);
11823 else
11824 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
11825 fprintf (file, "0x%lx\n", l & 0xffffffff);
11826 return;
11827 }
11828 }
11829 else if (GET_MODE (x) == VOIDmode
11830 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
11831 {
11832 unsigned HOST_WIDE_INT low;
11833 HOST_WIDE_INT high;
11834
11835 if (GET_CODE (x) == CONST_DOUBLE)
11836 {
11837 low = CONST_DOUBLE_LOW (x);
11838 high = CONST_DOUBLE_HIGH (x);
11839 }
11840 else
11841 #if HOST_BITS_PER_WIDE_INT == 32
11842 {
11843 low = INTVAL (x);
11844 high = (low & 0x80000000) ? ~0 : 0;
11845 }
11846 #else
11847 {
11848 low = INTVAL (x) & 0xffffffff;
11849 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
11850 }
11851 #endif
11852
11853 /* TOC entries are always Pmode-sized, but since this
11854 is a bigendian machine then if we're putting smaller
11855 integer constants in the TOC we have to pad them.
11856 (This is still a win over putting the constants in
11857 a separate constant pool, because then we'd have
11858 to have both a TOC entry _and_ the actual constant.)
11859
11860 For a 32-bit target, CONST_INT values are loaded and shifted
11861 entirely within `low' and can be stored in one TOC entry. */
11862
11863 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
11864 abort ();/* It would be easy to make this work, but it doesn't now. */
11865
11866 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
11867 {
11868 #if HOST_BITS_PER_WIDE_INT == 32
11869 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
11870 POINTER_SIZE, &low, &high, 0);
11871 #else
11872 low |= high << 32;
11873 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
11874 high = (HOST_WIDE_INT) low >> 32;
11875 low &= 0xffffffff;
11876 #endif
11877 }
11878
11879 if (TARGET_64BIT)
11880 {
11881 if (TARGET_MINIMAL_TOC)
11882 fputs (DOUBLE_INT_ASM_OP, file);
11883 else
11884 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
11885 (long) high & 0xffffffff, (long) low & 0xffffffff);
11886 fprintf (file, "0x%lx%08lx\n",
11887 (long) high & 0xffffffff, (long) low & 0xffffffff);
11888 return;
11889 }
11890 else
11891 {
11892 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
11893 {
11894 if (TARGET_MINIMAL_TOC)
11895 fputs ("\t.long ", file);
11896 else
11897 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
11898 (long) high & 0xffffffff, (long) low & 0xffffffff);
11899 fprintf (file, "0x%lx,0x%lx\n",
11900 (long) high & 0xffffffff, (long) low & 0xffffffff);
11901 }
11902 else
11903 {
11904 if (TARGET_MINIMAL_TOC)
11905 fputs ("\t.long ", file);
11906 else
11907 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
11908 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
11909 }
11910 return;
11911 }
11912 }
11913
11914 if (GET_CODE (x) == CONST)
11915 {
11916 if (GET_CODE (XEXP (x, 0)) != PLUS)
11917 abort ();
11918
11919 base = XEXP (XEXP (x, 0), 0);
11920 offset = INTVAL (XEXP (XEXP (x, 0), 1));
11921 }
11922
11923 if (GET_CODE (base) == SYMBOL_REF)
11924 name = XSTR (base, 0);
11925 else if (GET_CODE (base) == LABEL_REF)
11926 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
11927 else if (GET_CODE (base) == CODE_LABEL)
11928 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
11929 else
11930 abort ();
11931
11932 real_name = (*targetm.strip_name_encoding) (name);
11933 if (TARGET_MINIMAL_TOC)
11934 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
11935 else
11936 {
11937 fprintf (file, "\t.tc %s", real_name);
11938
11939 if (offset < 0)
11940 fprintf (file, ".N%d", - offset);
11941 else if (offset)
11942 fprintf (file, ".P%d", offset);
11943
11944 fputs ("[TC],", file);
11945 }
11946
11947 /* Currently C++ toc references to vtables can be emitted before it
11948 is decided whether the vtable is public or private. If this is
11949 the case, then the linker will eventually complain that there is
11950 a TOC reference to an unknown section. Thus, for vtables only,
11951 we emit the TOC reference to reference the symbol and not the
11952 section. */
11953 if (VTABLE_NAME_P (name))
11954 {
11955 RS6000_OUTPUT_BASENAME (file, name);
11956 if (offset < 0)
11957 fprintf (file, "%d", offset);
11958 else if (offset > 0)
11959 fprintf (file, "+%d", offset);
11960 }
11961 else
11962 output_addr_const (file, x);
11963 putc ('\n', file);
11964 }
11965 \f
11966 /* Output an assembler pseudo-op to write an ASCII string of N characters
11967 starting at P to FILE.
11968
11969 On the RS/6000, we have to do this using the .byte operation and
11970 write out special characters outside the quoted string.
11971 Also, the assembler is broken; very long strings are truncated,
11972 so we must artificially break them up early. */
11973
11974 void
11975 output_ascii (file, p, n)
11976 FILE *file;
11977 const char *p;
11978 int n;
11979 {
11980 char c;
11981 int i, count_string;
11982 const char *for_string = "\t.byte \"";
11983 const char *for_decimal = "\t.byte ";
11984 const char *to_close = NULL;
11985
11986 count_string = 0;
11987 for (i = 0; i < n; i++)
11988 {
11989 c = *p++;
11990 if (c >= ' ' && c < 0177)
11991 {
11992 if (for_string)
11993 fputs (for_string, file);
11994 putc (c, file);
11995
11996 /* Write two quotes to get one. */
11997 if (c == '"')
11998 {
11999 putc (c, file);
12000 ++count_string;
12001 }
12002
12003 for_string = NULL;
12004 for_decimal = "\"\n\t.byte ";
12005 to_close = "\"\n";
12006 ++count_string;
12007
12008 if (count_string >= 512)
12009 {
12010 fputs (to_close, file);
12011
12012 for_string = "\t.byte \"";
12013 for_decimal = "\t.byte ";
12014 to_close = NULL;
12015 count_string = 0;
12016 }
12017 }
12018 else
12019 {
12020 if (for_decimal)
12021 fputs (for_decimal, file);
12022 fprintf (file, "%d", c);
12023
12024 for_string = "\n\t.byte \"";
12025 for_decimal = ", ";
12026 to_close = "\n";
12027 count_string = 0;
12028 }
12029 }
12030
12031 /* Now close the string if we have written one. Then end the line. */
12032 if (to_close)
12033 fputs (to_close, file);
12034 }
12035 \f
12036 /* Generate a unique section name for FILENAME for a section type
12037 represented by SECTION_DESC. Output goes into BUF.
12038
12039 SECTION_DESC can be any string, as long as it is different for each
12040 possible section type.
12041
12042 We name the section in the same manner as xlc. The name begins with an
12043 underscore followed by the filename (after stripping any leading directory
12044 names) with the last period replaced by the string SECTION_DESC. If
12045 FILENAME does not contain a period, SECTION_DESC is appended to the end of
12046 the name. */
12047
12048 void
12049 rs6000_gen_section_name (buf, filename, section_desc)
12050 char **buf;
12051 const char *filename;
12052 const char *section_desc;
12053 {
12054 const char *q, *after_last_slash, *last_period = 0;
12055 char *p;
12056 int len;
12057
12058 after_last_slash = filename;
12059 for (q = filename; *q; q++)
12060 {
12061 if (*q == '/')
12062 after_last_slash = q + 1;
12063 else if (*q == '.')
12064 last_period = q;
12065 }
12066
12067 len = strlen (after_last_slash) + strlen (section_desc) + 2;
12068 *buf = (char *) xmalloc (len);
12069
12070 p = *buf;
12071 *p++ = '_';
12072
12073 for (q = after_last_slash; *q; q++)
12074 {
12075 if (q == last_period)
12076 {
12077 strcpy (p, section_desc);
12078 p += strlen (section_desc);
12079 break;
12080 }
12081
12082 else if (ISALNUM (*q))
12083 *p++ = *q;
12084 }
12085
12086 if (last_period == 0)
12087 strcpy (p, section_desc);
12088 else
12089 *p = '\0';
12090 }
12091 \f
12092 /* Emit profile function. */
12093
12094 void
12095 output_profile_hook (labelno)
12096 int labelno ATTRIBUTE_UNUSED;
12097 {
12098 if (DEFAULT_ABI == ABI_AIX)
12099 {
12100 #ifdef NO_PROFILE_COUNTERS
12101 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
12102 #else
12103 char buf[30];
12104 const char *label_name;
12105 rtx fun;
12106
12107 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12108 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
12109 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
12110
12111 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
12112 fun, Pmode);
12113 #endif
12114 }
12115 else if (DEFAULT_ABI == ABI_DARWIN)
12116 {
12117 const char *mcount_name = RS6000_MCOUNT;
12118 int caller_addr_regno = LINK_REGISTER_REGNUM;
12119
12120 /* Be conservative and always set this, at least for now. */
12121 current_function_uses_pic_offset_table = 1;
12122
12123 #if TARGET_MACHO
12124 /* For PIC code, set up a stub and collect the caller's address
12125 from r0, which is where the prologue puts it. */
12126 if (flag_pic)
12127 {
12128 mcount_name = machopic_stub_name (mcount_name);
12129 if (current_function_uses_pic_offset_table)
12130 caller_addr_regno = 0;
12131 }
12132 #endif
12133 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
12134 0, VOIDmode, 1,
12135 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
12136 }
12137 }
12138
12139 /* Write function profiler code. */
12140
12141 void
12142 output_function_profiler (file, labelno)
12143 FILE *file;
12144 int labelno;
12145 {
12146 char buf[100];
12147 int save_lr = 8;
12148
12149 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12150 switch (DEFAULT_ABI)
12151 {
12152 default:
12153 abort ();
12154
12155 case ABI_V4:
12156 save_lr = 4;
12157 /* Fall through. */
12158
12159 case ABI_AIX_NODESC:
12160 if (!TARGET_32BIT)
12161 {
12162 warning ("no profiling of 64-bit code for this ABI");
12163 return;
12164 }
12165 fprintf (file, "\tmflr %s\n", reg_names[0]);
12166 if (flag_pic == 1)
12167 {
12168 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
12169 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12170 reg_names[0], save_lr, reg_names[1]);
12171 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
12172 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
12173 assemble_name (file, buf);
12174 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
12175 }
12176 else if (flag_pic > 1)
12177 {
12178 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12179 reg_names[0], save_lr, reg_names[1]);
12180 /* Now, we need to get the address of the label. */
12181 fputs ("\tbl 1f\n\t.long ", file);
12182 assemble_name (file, buf);
12183 fputs ("-.\n1:", file);
12184 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
12185 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
12186 reg_names[0], reg_names[11]);
12187 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
12188 reg_names[0], reg_names[0], reg_names[11]);
12189 }
12190 else
12191 {
12192 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
12193 assemble_name (file, buf);
12194 fputs ("@ha\n", file);
12195 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12196 reg_names[0], save_lr, reg_names[1]);
12197 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
12198 assemble_name (file, buf);
12199 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
12200 }
12201
12202 if (current_function_needs_context && DEFAULT_ABI == ABI_AIX_NODESC)
12203 {
12204 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12205 reg_names[STATIC_CHAIN_REGNUM],
12206 12, reg_names[1]);
12207 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12208 asm_fprintf (file, "\t{l|lwz} %s,%d(%s)\n",
12209 reg_names[STATIC_CHAIN_REGNUM],
12210 12, reg_names[1]);
12211 }
12212 else
12213 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12214 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12215 break;
12216
12217 case ABI_AIX:
12218 case ABI_DARWIN:
12219 /* Don't do anything, done in output_profile_hook (). */
12220 break;
12221 }
12222 }
12223
12224 \f
12225 static int
12226 rs6000_use_dfa_pipeline_interface ()
12227 {
12228 return 1;
12229 }
12230
12231 static int
12232 rs6000_multipass_dfa_lookahead ()
12233 {
12234 if (rs6000_cpu == PROCESSOR_POWER4)
12235 return 4;
12236 else
12237 return 1;
12238 }
12239
12240 /* Power4 load update and store update instructions are cracked into a
12241 load or store and an integer insn which are executed in the same cycle.
12242 Branches have their own dispatch slot which does not count against the
12243 GCC issue rate, but it changes the program flow so there are no other
12244 instructions to issue in this cycle. */
12245
12246 static int
12247 rs6000_variable_issue (stream, verbose, insn, more)
12248 FILE *stream ATTRIBUTE_UNUSED;
12249 int verbose ATTRIBUTE_UNUSED;
12250 rtx insn;
12251 int more;
12252 {
12253 if (GET_CODE (PATTERN (insn)) == USE
12254 || GET_CODE (PATTERN (insn)) == CLOBBER)
12255 return more;
12256
12257 if (rs6000_cpu == PROCESSOR_POWER4)
12258 {
12259 enum attr_type type = get_attr_type (insn);
12260 if (type == TYPE_LOAD_EXT_U || type == TYPE_LOAD_EXT_UX
12261 || type == TYPE_LOAD_UX || type == TYPE_STORE_UX
12262 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX)
12263 return 0;
12264 else if (type == TYPE_LOAD_U || type == TYPE_STORE_U
12265 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
12266 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR)
12267 return more - 2;
12268 else
12269 return more - 1;
12270 }
12271 else
12272 return more - 1;
12273 }
12274
12275 /* Adjust the cost of a scheduling dependency. Return the new cost of
12276 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
12277
12278 static int
12279 rs6000_adjust_cost (insn, link, dep_insn, cost)
12280 rtx insn;
12281 rtx link;
12282 rtx dep_insn ATTRIBUTE_UNUSED;
12283 int cost;
12284 {
12285 if (! recog_memoized (insn))
12286 return 0;
12287
12288 if (REG_NOTE_KIND (link) != 0)
12289 return 0;
12290
12291 if (REG_NOTE_KIND (link) == 0)
12292 {
12293 /* Data dependency; DEP_INSN writes a register that INSN reads
12294 some cycles later. */
12295 switch (get_attr_type (insn))
12296 {
12297 case TYPE_JMPREG:
12298 /* Tell the first scheduling pass about the latency between
12299 a mtctr and bctr (and mtlr and br/blr). The first
12300 scheduling pass will not know about this latency since
12301 the mtctr instruction, which has the latency associated
12302 to it, will be generated by reload. */
12303 return TARGET_POWER ? 5 : 4;
12304 case TYPE_BRANCH:
12305 /* Leave some extra cycles between a compare and its
12306 dependent branch, to inhibit expensive mispredicts. */
12307 if ((rs6000_cpu_attr == CPU_PPC603
12308 || rs6000_cpu_attr == CPU_PPC604
12309 || rs6000_cpu_attr == CPU_PPC604E
12310 || rs6000_cpu_attr == CPU_PPC620
12311 || rs6000_cpu_attr == CPU_PPC630
12312 || rs6000_cpu_attr == CPU_PPC750
12313 || rs6000_cpu_attr == CPU_PPC7400
12314 || rs6000_cpu_attr == CPU_PPC7450
12315 || rs6000_cpu_attr == CPU_POWER4)
12316 && recog_memoized (dep_insn)
12317 && (INSN_CODE (dep_insn) >= 0)
12318 && (get_attr_type (dep_insn) == TYPE_CMP
12319 || get_attr_type (dep_insn) == TYPE_COMPARE
12320 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
12321 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
12322 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
12323 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
12324 return cost + 2;
12325 default:
12326 break;
12327 }
12328 /* Fall out to return default cost. */
12329 }
12330
12331 return cost;
12332 }
12333
12334 /* A C statement (sans semicolon) to update the integer scheduling
12335 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12336 INSN earlier, increase the priority to execute INSN later. Do not
12337 define this macro if you do not need to adjust the scheduling
12338 priorities of insns. */
12339
12340 static int
12341 rs6000_adjust_priority (insn, priority)
12342 rtx insn ATTRIBUTE_UNUSED;
12343 int priority;
12344 {
12345 /* On machines (like the 750) which have asymmetric integer units,
12346 where one integer unit can do multiply and divides and the other
12347 can't, reduce the priority of multiply/divide so it is scheduled
12348 before other integer operations. */
12349
12350 #if 0
12351 if (! INSN_P (insn))
12352 return priority;
12353
12354 if (GET_CODE (PATTERN (insn)) == USE)
12355 return priority;
12356
12357 switch (rs6000_cpu_attr) {
12358 case CPU_PPC750:
12359 switch (get_attr_type (insn))
12360 {
12361 default:
12362 break;
12363
12364 case TYPE_IMUL:
12365 case TYPE_IDIV:
12366 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
12367 priority, priority);
12368 if (priority >= 0 && priority < 0x01000000)
12369 priority >>= 3;
12370 break;
12371 }
12372 }
12373 #endif
12374
12375 return priority;
12376 }
12377
12378 /* Return how many instructions the machine can issue per cycle. */
12379
12380 static int
12381 rs6000_issue_rate ()
12382 {
12383 switch (rs6000_cpu_attr) {
12384 case CPU_RIOS1: /* ? */
12385 case CPU_RS64A:
12386 case CPU_PPC601: /* ? */
12387 case CPU_PPC7450:
12388 return 3;
12389 case CPU_PPC440:
12390 case CPU_PPC603:
12391 case CPU_PPC750:
12392 case CPU_PPC7400:
12393 return 2;
12394 case CPU_RIOS2:
12395 case CPU_PPC604:
12396 case CPU_PPC604E:
12397 case CPU_PPC620:
12398 case CPU_PPC630:
12399 case CPU_POWER4:
12400 return 4;
12401 default:
12402 return 1;
12403 }
12404 }
12405
12406 \f
12407 /* Length in units of the trampoline for entering a nested function. */
12408
12409 int
12410 rs6000_trampoline_size ()
12411 {
12412 int ret = 0;
12413
12414 switch (DEFAULT_ABI)
12415 {
12416 default:
12417 abort ();
12418
12419 case ABI_AIX:
12420 ret = (TARGET_32BIT) ? 12 : 24;
12421 break;
12422
12423 case ABI_DARWIN:
12424 case ABI_V4:
12425 case ABI_AIX_NODESC:
12426 ret = (TARGET_32BIT) ? 40 : 48;
12427 break;
12428 }
12429
12430 return ret;
12431 }
12432
12433 /* Emit RTL insns to initialize the variable parts of a trampoline.
12434 FNADDR is an RTX for the address of the function's pure code.
12435 CXT is an RTX for the static chain value for the function. */
12436
12437 void
12438 rs6000_initialize_trampoline (addr, fnaddr, cxt)
12439 rtx addr;
12440 rtx fnaddr;
12441 rtx cxt;
12442 {
12443 enum machine_mode pmode = Pmode;
12444 int regsize = (TARGET_32BIT) ? 4 : 8;
12445 rtx ctx_reg = force_reg (pmode, cxt);
12446
12447 switch (DEFAULT_ABI)
12448 {
12449 default:
12450 abort ();
12451
12452 /* Macros to shorten the code expansions below. */
12453 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12454 #define MEM_PLUS(addr,offset) \
12455 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12456
12457 /* Under AIX, just build the 3 word function descriptor */
12458 case ABI_AIX:
12459 {
12460 rtx fn_reg = gen_reg_rtx (pmode);
12461 rtx toc_reg = gen_reg_rtx (pmode);
12462 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
12463 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
12464 emit_move_insn (MEM_DEREF (addr), fn_reg);
12465 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
12466 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
12467 }
12468 break;
12469
12470 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
12471 case ABI_DARWIN:
12472 case ABI_V4:
12473 case ABI_AIX_NODESC:
12474 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
12475 FALSE, VOIDmode, 4,
12476 addr, pmode,
12477 GEN_INT (rs6000_trampoline_size ()), SImode,
12478 fnaddr, pmode,
12479 ctx_reg, pmode);
12480 break;
12481 }
12482
12483 return;
12484 }
12485
12486 \f
12487 /* Table of valid machine attributes. */
12488
12489 const struct attribute_spec rs6000_attribute_table[] =
12490 {
12491 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12492 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12493 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12494 { NULL, 0, 0, false, false, false, NULL }
12495 };
12496
12497 /* Handle a "longcall" or "shortcall" attribute; arguments as in
12498 struct attribute_spec.handler. */
12499
12500 static tree
12501 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
12502 tree *node;
12503 tree name;
12504 tree args ATTRIBUTE_UNUSED;
12505 int flags ATTRIBUTE_UNUSED;
12506 bool *no_add_attrs;
12507 {
12508 if (TREE_CODE (*node) != FUNCTION_TYPE
12509 && TREE_CODE (*node) != FIELD_DECL
12510 && TREE_CODE (*node) != TYPE_DECL)
12511 {
12512 warning ("`%s' attribute only applies to functions",
12513 IDENTIFIER_POINTER (name));
12514 *no_add_attrs = true;
12515 }
12516
12517 return NULL_TREE;
12518 }
12519
12520 /* Set longcall attributes on all functions declared when
12521 rs6000_default_long_calls is true. */
12522 static void
12523 rs6000_set_default_type_attributes (type)
12524 tree type;
12525 {
12526 if (rs6000_default_long_calls
12527 && (TREE_CODE (type) == FUNCTION_TYPE
12528 || TREE_CODE (type) == METHOD_TYPE))
12529 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
12530 NULL_TREE,
12531 TYPE_ATTRIBUTES (type));
12532 }
12533
12534 /* Return a reference suitable for calling a function with the
12535 longcall attribute. */
12536
12537 struct rtx_def *
12538 rs6000_longcall_ref (call_ref)
12539 rtx call_ref;
12540 {
12541 const char *call_name;
12542 tree node;
12543
12544 if (GET_CODE (call_ref) != SYMBOL_REF)
12545 return call_ref;
12546
12547 /* System V adds '.' to the internal name, so skip them. */
12548 call_name = XSTR (call_ref, 0);
12549 if (*call_name == '.')
12550 {
12551 while (*call_name == '.')
12552 call_name++;
12553
12554 node = get_identifier (call_name);
12555 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
12556 }
12557
12558 return force_reg (Pmode, call_ref);
12559 }
12560
12561 \f
12562 #ifdef USING_ELFOS_H
12563
12564 /* A C statement or statements to switch to the appropriate section
12565 for output of RTX in mode MODE. You can assume that RTX is some
12566 kind of constant in RTL. The argument MODE is redundant except in
12567 the case of a `const_int' rtx. Select the section by calling
12568 `text_section' or one of the alternatives for other sections.
12569
12570 Do not define this macro if you put all constants in the read-only
12571 data section. */
12572
12573 static void
12574 rs6000_elf_select_rtx_section (mode, x, align)
12575 enum machine_mode mode;
12576 rtx x;
12577 unsigned HOST_WIDE_INT align;
12578 {
12579 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
12580 toc_section ();
12581 else
12582 default_elf_select_rtx_section (mode, x, align);
12583 }
12584
12585 /* A C statement or statements to switch to the appropriate
12586 section for output of DECL. DECL is either a `VAR_DECL' node
12587 or a constant of some sort. RELOC indicates whether forming
12588 the initial value of DECL requires link-time relocations. */
12589
12590 static void
12591 rs6000_elf_select_section (decl, reloc, align)
12592 tree decl;
12593 int reloc;
12594 unsigned HOST_WIDE_INT align;
12595 {
12596 default_elf_select_section_1 (decl, reloc, align,
12597 flag_pic || DEFAULT_ABI == ABI_AIX);
12598 }
12599
12600 /* A C statement to build up a unique section name, expressed as a
12601 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
12602 RELOC indicates whether the initial value of EXP requires
12603 link-time relocations. If you do not define this macro, GCC will use
12604 the symbol name prefixed by `.' as the section name. Note - this
12605 macro can now be called for uninitialized data items as well as
12606 initialized data and functions. */
12607
12608 static void
12609 rs6000_elf_unique_section (decl, reloc)
12610 tree decl;
12611 int reloc;
12612 {
12613 default_unique_section_1 (decl, reloc,
12614 flag_pic || DEFAULT_ABI == ABI_AIX);
12615 }
12616
12617 \f
12618 /* If we are referencing a function that is static or is known to be
12619 in this file, make the SYMBOL_REF special. We can use this to indicate
12620 that we can branch to this function without emitting a no-op after the
12621 call. For real AIX calling sequences, we also replace the
12622 function name with the real name (1 or 2 leading .'s), rather than
12623 the function descriptor name. This saves a lot of overriding code
12624 to read the prefixes. */
12625
12626 static void
12627 rs6000_elf_encode_section_info (decl, first)
12628 tree decl;
12629 int first;
12630 {
12631 if (!first)
12632 return;
12633
12634 if (TREE_CODE (decl) == FUNCTION_DECL)
12635 {
12636 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12637 if ((*targetm.binds_local_p) (decl))
12638 SYMBOL_REF_FLAG (sym_ref) = 1;
12639
12640 if (DEFAULT_ABI == ABI_AIX)
12641 {
12642 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
12643 size_t len2 = strlen (XSTR (sym_ref, 0));
12644 char *str = alloca (len1 + len2 + 1);
12645 str[0] = '.';
12646 str[1] = '.';
12647 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
12648
12649 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
12650 }
12651 }
12652 else if (rs6000_sdata != SDATA_NONE
12653 && DEFAULT_ABI == ABI_V4
12654 && TREE_CODE (decl) == VAR_DECL)
12655 {
12656 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12657 int size = int_size_in_bytes (TREE_TYPE (decl));
12658 tree section_name = DECL_SECTION_NAME (decl);
12659 const char *name = (char *)0;
12660 int len = 0;
12661
12662 if ((*targetm.binds_local_p) (decl))
12663 SYMBOL_REF_FLAG (sym_ref) = 1;
12664
12665 if (section_name)
12666 {
12667 if (TREE_CODE (section_name) == STRING_CST)
12668 {
12669 name = TREE_STRING_POINTER (section_name);
12670 len = TREE_STRING_LENGTH (section_name);
12671 }
12672 else
12673 abort ();
12674 }
12675
12676 if (name
12677 ? ((len == sizeof (".sdata") - 1
12678 && strcmp (name, ".sdata") == 0)
12679 || (len == sizeof (".sdata2") - 1
12680 && strcmp (name, ".sdata2") == 0)
12681 || (len == sizeof (".sbss") - 1
12682 && strcmp (name, ".sbss") == 0)
12683 || (len == sizeof (".sbss2") - 1
12684 && strcmp (name, ".sbss2") == 0)
12685 || (len == sizeof (".PPC.EMB.sdata0") - 1
12686 && strcmp (name, ".PPC.EMB.sdata0") == 0)
12687 || (len == sizeof (".PPC.EMB.sbss0") - 1
12688 && strcmp (name, ".PPC.EMB.sbss0") == 0))
12689 : (size > 0 && size <= g_switch_value))
12690 {
12691 size_t len = strlen (XSTR (sym_ref, 0));
12692 char *str = alloca (len + 2);
12693
12694 str[0] = '@';
12695 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
12696 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
12697 }
12698 }
12699 }
12700
12701 static const char *
12702 rs6000_elf_strip_name_encoding (str)
12703 const char *str;
12704 {
12705 while (*str == '*' || *str == '@')
12706 str++;
12707 return str;
12708 }
12709
12710 static bool
12711 rs6000_elf_in_small_data_p (decl)
12712 tree decl;
12713 {
12714 if (rs6000_sdata == SDATA_NONE)
12715 return false;
12716
12717 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
12718 {
12719 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
12720 if (strcmp (section, ".sdata") == 0
12721 || strcmp (section, ".sdata2") == 0
12722 || strcmp (section, ".sbss") == 0)
12723 return true;
12724 }
12725 else
12726 {
12727 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
12728
12729 if (size > 0
12730 && size <= g_switch_value
12731 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
12732 return true;
12733 }
12734
12735 return false;
12736 }
12737
12738 #endif /* USING_ELFOS_H */
12739
12740 \f
12741 /* Return a REG that occurs in ADDR with coefficient 1.
12742 ADDR can be effectively incremented by incrementing REG.
12743
12744 r0 is special and we must not select it as an address
12745 register by this routine since our caller will try to
12746 increment the returned register via an "la" instruction. */
12747
12748 struct rtx_def *
12749 find_addr_reg (addr)
12750 rtx addr;
12751 {
12752 while (GET_CODE (addr) == PLUS)
12753 {
12754 if (GET_CODE (XEXP (addr, 0)) == REG
12755 && REGNO (XEXP (addr, 0)) != 0)
12756 addr = XEXP (addr, 0);
12757 else if (GET_CODE (XEXP (addr, 1)) == REG
12758 && REGNO (XEXP (addr, 1)) != 0)
12759 addr = XEXP (addr, 1);
12760 else if (CONSTANT_P (XEXP (addr, 0)))
12761 addr = XEXP (addr, 1);
12762 else if (CONSTANT_P (XEXP (addr, 1)))
12763 addr = XEXP (addr, 0);
12764 else
12765 abort ();
12766 }
12767 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
12768 return addr;
12769 abort ();
12770 }
12771
12772 void
12773 rs6000_fatal_bad_address (op)
12774 rtx op;
12775 {
12776 fatal_insn ("bad address", op);
12777 }
12778
12779 #if TARGET_MACHO
12780
12781 #if 0
12782 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
12783 reference and a constant. */
12784
12785 int
12786 symbolic_operand (op)
12787 rtx op;
12788 {
12789 switch (GET_CODE (op))
12790 {
12791 case SYMBOL_REF:
12792 case LABEL_REF:
12793 return 1;
12794 case CONST:
12795 op = XEXP (op, 0);
12796 return (GET_CODE (op) == SYMBOL_REF ||
12797 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
12798 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
12799 && GET_CODE (XEXP (op, 1)) == CONST_INT);
12800 default:
12801 return 0;
12802 }
12803 }
12804 #endif
12805
12806 #ifdef RS6000_LONG_BRANCH
12807
12808 static tree stub_list = 0;
12809
12810 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
12811 procedure calls to the linked list. */
12812
12813 void
12814 add_compiler_stub (label_name, function_name, line_number)
12815 tree label_name;
12816 tree function_name;
12817 int line_number;
12818 {
12819 tree stub = build_tree_list (function_name, label_name);
12820 TREE_TYPE (stub) = build_int_2 (line_number, 0);
12821 TREE_CHAIN (stub) = stub_list;
12822 stub_list = stub;
12823 }
12824
12825 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
12826 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
12827 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
12828
12829 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
12830 handling procedure calls from the linked list and initializes the
12831 linked list. */
12832
12833 void
12834 output_compiler_stub ()
12835 {
12836 char tmp_buf[256];
12837 char label_buf[256];
12838 tree stub;
12839
12840 if (!flag_pic)
12841 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12842 {
12843 fprintf (asm_out_file,
12844 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
12845
12846 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12847 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
12848 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
12849 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12850
12851 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
12852 strcpy (label_buf,
12853 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
12854 else
12855 {
12856 label_buf[0] = '_';
12857 strcpy (label_buf+1,
12858 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
12859 }
12860
12861 strcpy (tmp_buf, "lis r12,hi16(");
12862 strcat (tmp_buf, label_buf);
12863 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
12864 strcat (tmp_buf, label_buf);
12865 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
12866 output_asm_insn (tmp_buf, 0);
12867
12868 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12869 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
12870 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
12871 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12872 }
12873
12874 stub_list = 0;
12875 }
12876
12877 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
12878 already there or not. */
12879
12880 int
12881 no_previous_def (function_name)
12882 tree function_name;
12883 {
12884 tree stub;
12885 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12886 if (function_name == STUB_FUNCTION_NAME (stub))
12887 return 0;
12888 return 1;
12889 }
12890
12891 /* GET_PREV_LABEL gets the label name from the previous definition of
12892 the function. */
12893
12894 tree
12895 get_prev_label (function_name)
12896 tree function_name;
12897 {
12898 tree stub;
12899 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12900 if (function_name == STUB_FUNCTION_NAME (stub))
12901 return STUB_LABEL_NAME (stub);
12902 return 0;
12903 }
12904
12905 /* INSN is either a function call or a millicode call. It may have an
12906 unconditional jump in its delay slot.
12907
12908 CALL_DEST is the routine we are calling. */
12909
12910 char *
12911 output_call (insn, call_dest, operand_number)
12912 rtx insn;
12913 rtx call_dest;
12914 int operand_number;
12915 {
12916 static char buf[256];
12917 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
12918 {
12919 tree labelname;
12920 tree funname = get_identifier (XSTR (call_dest, 0));
12921
12922 if (no_previous_def (funname))
12923 {
12924 int line_number = 0;
12925 rtx label_rtx = gen_label_rtx ();
12926 char *label_buf, temp_buf[256];
12927 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
12928 CODE_LABEL_NUMBER (label_rtx));
12929 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
12930 labelname = get_identifier (label_buf);
12931 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
12932 if (insn)
12933 line_number = NOTE_LINE_NUMBER (insn);
12934 add_compiler_stub (labelname, funname, line_number);
12935 }
12936 else
12937 labelname = get_prev_label (funname);
12938
12939 sprintf (buf, "jbsr %%z%d,%.246s",
12940 operand_number, IDENTIFIER_POINTER (labelname));
12941 return buf;
12942 }
12943 else
12944 {
12945 sprintf (buf, "bl %%z%d", operand_number);
12946 return buf;
12947 }
12948 }
12949
12950 #endif /* RS6000_LONG_BRANCH */
12951
12952 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
12953 do { \
12954 const char *const symbol_ = (SYMBOL); \
12955 char *buffer_ = (BUF); \
12956 if (symbol_[0] == '"') \
12957 { \
12958 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
12959 } \
12960 else if (name_needs_quotes(symbol_)) \
12961 { \
12962 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
12963 } \
12964 else \
12965 { \
12966 sprintf(buffer_, "L%d$%s", (N), symbol_); \
12967 } \
12968 } while (0)
12969
12970
12971 /* Generate PIC and indirect symbol stubs. */
12972
12973 void
12974 machopic_output_stub (file, symb, stub)
12975 FILE *file;
12976 const char *symb, *stub;
12977 {
12978 unsigned int length;
12979 char *symbol_name, *lazy_ptr_name;
12980 char *local_label_0;
12981 static int label = 0;
12982
12983 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
12984 symb = (*targetm.strip_name_encoding) (symb);
12985
12986 label += 1;
12987
12988 length = strlen (symb);
12989 symbol_name = alloca (length + 32);
12990 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
12991
12992 lazy_ptr_name = alloca (length + 32);
12993 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
12994
12995 local_label_0 = alloca (length + 32);
12996 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
12997
12998 if (flag_pic == 2)
12999 machopic_picsymbol_stub1_section ();
13000 else
13001 machopic_symbol_stub1_section ();
13002 fprintf (file, "\t.align 2\n");
13003
13004 fprintf (file, "%s:\n", stub);
13005 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13006
13007 if (flag_pic == 2)
13008 {
13009 fprintf (file, "\tmflr r0\n");
13010 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
13011 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
13012 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
13013 lazy_ptr_name, local_label_0);
13014 fprintf (file, "\tmtlr r0\n");
13015 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
13016 lazy_ptr_name, local_label_0);
13017 fprintf (file, "\tmtctr r12\n");
13018 fprintf (file, "\tbctr\n");
13019 }
13020 else
13021 fprintf (file, "non-pure not supported\n");
13022
13023 machopic_lazy_symbol_ptr_section ();
13024 fprintf (file, "%s:\n", lazy_ptr_name);
13025 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13026 fprintf (file, "\t.long dyld_stub_binding_helper\n");
13027 }
13028
13029 /* Legitimize PIC addresses. If the address is already
13030 position-independent, we return ORIG. Newly generated
13031 position-independent addresses go into a reg. This is REG if non
13032 zero, otherwise we allocate register(s) as necessary. */
13033
13034 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13035
13036 rtx
13037 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
13038 rtx orig;
13039 enum machine_mode mode;
13040 rtx reg;
13041 {
13042 rtx base, offset;
13043
13044 if (reg == NULL && ! reload_in_progress && ! reload_completed)
13045 reg = gen_reg_rtx (Pmode);
13046
13047 if (GET_CODE (orig) == CONST)
13048 {
13049 if (GET_CODE (XEXP (orig, 0)) == PLUS
13050 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
13051 return orig;
13052
13053 if (GET_CODE (XEXP (orig, 0)) == PLUS)
13054 {
13055 base =
13056 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
13057 Pmode, reg);
13058 offset =
13059 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
13060 Pmode, reg);
13061 }
13062 else
13063 abort ();
13064
13065 if (GET_CODE (offset) == CONST_INT)
13066 {
13067 if (SMALL_INT (offset))
13068 return plus_constant (base, INTVAL (offset));
13069 else if (! reload_in_progress && ! reload_completed)
13070 offset = force_reg (Pmode, offset);
13071 else
13072 {
13073 rtx mem = force_const_mem (Pmode, orig);
13074 return machopic_legitimize_pic_address (mem, Pmode, reg);
13075 }
13076 }
13077 return gen_rtx (PLUS, Pmode, base, offset);
13078 }
13079
13080 /* Fall back on generic machopic code. */
13081 return machopic_legitimize_pic_address (orig, mode, reg);
13082 }
13083
13084 /* This is just a placeholder to make linking work without having to
13085 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13086 ever needed for Darwin (not too likely!) this would have to get a
13087 real definition. */
13088
13089 void
13090 toc_section ()
13091 {
13092 }
13093
13094 #endif /* TARGET_MACHO */
13095
13096 #if TARGET_ELF
13097 static unsigned int
13098 rs6000_elf_section_type_flags (decl, name, reloc)
13099 tree decl;
13100 const char *name;
13101 int reloc;
13102 {
13103 unsigned int flags
13104 = default_section_type_flags_1 (decl, name, reloc,
13105 flag_pic || DEFAULT_ABI == ABI_AIX);
13106
13107 if (TARGET_RELOCATABLE)
13108 flags |= SECTION_WRITE;
13109
13110 return flags;
13111 }
13112
13113 /* Record an element in the table of global constructors. SYMBOL is
13114 a SYMBOL_REF of the function to be called; PRIORITY is a number
13115 between 0 and MAX_INIT_PRIORITY.
13116
13117 This differs from default_named_section_asm_out_constructor in
13118 that we have special handling for -mrelocatable. */
13119
13120 static void
13121 rs6000_elf_asm_out_constructor (symbol, priority)
13122 rtx symbol;
13123 int priority;
13124 {
13125 const char *section = ".ctors";
13126 char buf[16];
13127
13128 if (priority != DEFAULT_INIT_PRIORITY)
13129 {
13130 sprintf (buf, ".ctors.%.5u",
13131 /* Invert the numbering so the linker puts us in the proper
13132 order; constructors are run from right to left, and the
13133 linker sorts in increasing order. */
13134 MAX_INIT_PRIORITY - priority);
13135 section = buf;
13136 }
13137
13138 named_section_flags (section, SECTION_WRITE);
13139 assemble_align (POINTER_SIZE);
13140
13141 if (TARGET_RELOCATABLE)
13142 {
13143 fputs ("\t.long (", asm_out_file);
13144 output_addr_const (asm_out_file, symbol);
13145 fputs (")@fixup\n", asm_out_file);
13146 }
13147 else
13148 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13149 }
13150
13151 static void
13152 rs6000_elf_asm_out_destructor (symbol, priority)
13153 rtx symbol;
13154 int priority;
13155 {
13156 const char *section = ".dtors";
13157 char buf[16];
13158
13159 if (priority != DEFAULT_INIT_PRIORITY)
13160 {
13161 sprintf (buf, ".dtors.%.5u",
13162 /* Invert the numbering so the linker puts us in the proper
13163 order; constructors are run from right to left, and the
13164 linker sorts in increasing order. */
13165 MAX_INIT_PRIORITY - priority);
13166 section = buf;
13167 }
13168
13169 named_section_flags (section, SECTION_WRITE);
13170 assemble_align (POINTER_SIZE);
13171
13172 if (TARGET_RELOCATABLE)
13173 {
13174 fputs ("\t.long (", asm_out_file);
13175 output_addr_const (asm_out_file, symbol);
13176 fputs (")@fixup\n", asm_out_file);
13177 }
13178 else
13179 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13180 }
13181 #endif
13182
13183 #if TARGET_XCOFF
13184 static void
13185 rs6000_xcoff_asm_globalize_label (stream, name)
13186 FILE *stream;
13187 const char *name;
13188 {
13189 fputs (GLOBAL_ASM_OP, stream);
13190 RS6000_OUTPUT_BASENAME (stream, name);
13191 putc ('\n', stream);
13192 }
13193
13194 static void
13195 rs6000_xcoff_asm_named_section (name, flags)
13196 const char *name;
13197 unsigned int flags;
13198 {
13199 int smclass;
13200 static const char * const suffix[3] = { "PR", "RO", "RW" };
13201
13202 if (flags & SECTION_CODE)
13203 smclass = 0;
13204 else if (flags & SECTION_WRITE)
13205 smclass = 2;
13206 else
13207 smclass = 1;
13208
13209 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
13210 (flags & SECTION_CODE) ? "." : "",
13211 name, suffix[smclass], flags & SECTION_ENTSIZE);
13212 }
13213
13214 static void
13215 rs6000_xcoff_select_section (decl, reloc, align)
13216 tree decl;
13217 int reloc;
13218 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13219 {
13220 if (decl_readonly_section_1 (decl, reloc, 1))
13221 {
13222 if (TREE_PUBLIC (decl))
13223 read_only_data_section ();
13224 else
13225 read_only_private_data_section ();
13226 }
13227 else
13228 {
13229 if (TREE_PUBLIC (decl))
13230 data_section ();
13231 else
13232 private_data_section ();
13233 }
13234 }
13235
13236 static void
13237 rs6000_xcoff_unique_section (decl, reloc)
13238 tree decl;
13239 int reloc ATTRIBUTE_UNUSED;
13240 {
13241 const char *name;
13242
13243 /* Use select_section for private and uninitialized data. */
13244 if (!TREE_PUBLIC (decl)
13245 || DECL_COMMON (decl)
13246 || DECL_INITIAL (decl) == NULL_TREE
13247 || DECL_INITIAL (decl) == error_mark_node
13248 || (flag_zero_initialized_in_bss
13249 && initializer_zerop (DECL_INITIAL (decl))))
13250 return;
13251
13252 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
13253 name = (*targetm.strip_name_encoding) (name);
13254 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
13255 }
13256
13257 /* Select section for constant in constant pool.
13258
13259 On RS/6000, all constants are in the private read-only data area.
13260 However, if this is being placed in the TOC it must be output as a
13261 toc entry. */
13262
13263 static void
13264 rs6000_xcoff_select_rtx_section (mode, x, align)
13265 enum machine_mode mode;
13266 rtx x;
13267 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13268 {
13269 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13270 toc_section ();
13271 else
13272 read_only_private_data_section ();
13273 }
13274
13275 /* Remove any trailing [DS] or the like from the symbol name. */
13276
13277 static const char *
13278 rs6000_xcoff_strip_name_encoding (name)
13279 const char *name;
13280 {
13281 size_t len;
13282 if (*name == '*')
13283 name++;
13284 len = strlen (name);
13285 if (name[len - 1] == ']')
13286 return ggc_alloc_string (name, len - 4);
13287 else
13288 return name;
13289 }
13290
13291 /* Section attributes. AIX is always PIC. */
13292
13293 static unsigned int
13294 rs6000_xcoff_section_type_flags (decl, name, reloc)
13295 tree decl;
13296 const char *name;
13297 int reloc;
13298 {
13299 unsigned int align;
13300 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
13301
13302 /* Align to at least UNIT size. */
13303 if (flags & SECTION_CODE)
13304 align = MIN_UNITS_PER_WORD;
13305 else
13306 /* Increase alignment of large objects if not already stricter. */
13307 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
13308 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
13309 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
13310
13311 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
13312 }
13313
13314 #endif /* TARGET_XCOFF */
13315
13316 /* Note that this is also used for PPC64 Linux. */
13317
13318 static void
13319 rs6000_xcoff_encode_section_info (decl, first)
13320 tree decl;
13321 int first ATTRIBUTE_UNUSED;
13322 {
13323 if (TREE_CODE (decl) == FUNCTION_DECL
13324 && (*targetm.binds_local_p) (decl))
13325 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
13326 }
13327
13328 /* Cross-module name binding. For AIX and PPC64 Linux, which always are
13329 PIC, use private copy of flag_pic. */
13330
13331 static bool
13332 rs6000_binds_local_p (decl)
13333 tree decl;
13334 {
13335 return default_binds_local_p_1 (decl, flag_pic || rs6000_flag_pic);
13336 }
13337
13338 /* Compute a (partial) cost for rtx X. Return true if the complete
13339 cost has been computed, and false if subexpressions should be
13340 scanned. In either case, *TOTAL contains the cost result. */
13341
13342 static bool
13343 rs6000_rtx_costs (x, code, outer_code, total)
13344 rtx x;
13345 int code, outer_code ATTRIBUTE_UNUSED;
13346 int *total;
13347 {
13348 switch (code)
13349 {
13350 /* On the RS/6000, if it is valid in the insn, it is free.
13351 So this always returns 0. */
13352 case CONST_INT:
13353 case CONST:
13354 case LABEL_REF:
13355 case SYMBOL_REF:
13356 case CONST_DOUBLE:
13357 case HIGH:
13358 *total = 0;
13359 return true;
13360
13361 case PLUS:
13362 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
13363 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
13364 + 0x8000) >= 0x10000)
13365 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
13366 ? COSTS_N_INSNS (2)
13367 : COSTS_N_INSNS (1));
13368 return true;
13369
13370 case AND:
13371 case IOR:
13372 case XOR:
13373 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
13374 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
13375 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
13376 ? COSTS_N_INSNS (2)
13377 : COSTS_N_INSNS (1));
13378 return true;
13379
13380 case MULT:
13381 if (optimize_size)
13382 {
13383 *total = COSTS_N_INSNS (2);
13384 return true;
13385 }
13386 switch (rs6000_cpu)
13387 {
13388 case PROCESSOR_RIOS1:
13389 case PROCESSOR_PPC405:
13390 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13391 ? COSTS_N_INSNS (5)
13392 : (INTVAL (XEXP (x, 1)) >= -256
13393 && INTVAL (XEXP (x, 1)) <= 255)
13394 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13395 return true;
13396
13397 case PROCESSOR_RS64A:
13398 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13399 ? GET_MODE (XEXP (x, 1)) != DImode
13400 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
13401 : (INTVAL (XEXP (x, 1)) >= -256
13402 && INTVAL (XEXP (x, 1)) <= 255)
13403 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
13404 return true;
13405
13406 case PROCESSOR_RIOS2:
13407 case PROCESSOR_MPCCORE:
13408 case PROCESSOR_PPC604e:
13409 *total = COSTS_N_INSNS (2);
13410 return true;
13411
13412 case PROCESSOR_PPC601:
13413 *total = COSTS_N_INSNS (5);
13414 return true;
13415
13416 case PROCESSOR_PPC603:
13417 case PROCESSOR_PPC7400:
13418 case PROCESSOR_PPC750:
13419 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13420 ? COSTS_N_INSNS (5)
13421 : (INTVAL (XEXP (x, 1)) >= -256
13422 && INTVAL (XEXP (x, 1)) <= 255)
13423 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
13424 return true;
13425
13426 case PROCESSOR_PPC7450:
13427 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13428 ? COSTS_N_INSNS (4)
13429 : COSTS_N_INSNS (3));
13430 return true;
13431
13432 case PROCESSOR_PPC403:
13433 case PROCESSOR_PPC604:
13434 case PROCESSOR_PPC8540:
13435 *total = COSTS_N_INSNS (4);
13436 return true;
13437
13438 case PROCESSOR_PPC620:
13439 case PROCESSOR_PPC630:
13440 case PROCESSOR_POWER4:
13441 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13442 ? GET_MODE (XEXP (x, 1)) != DImode
13443 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
13444 : (INTVAL (XEXP (x, 1)) >= -256
13445 && INTVAL (XEXP (x, 1)) <= 255)
13446 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13447 return true;
13448
13449 default:
13450 abort ();
13451 }
13452
13453 case DIV:
13454 case MOD:
13455 if (GET_CODE (XEXP (x, 1)) == CONST_INT
13456 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
13457 {
13458 *total = COSTS_N_INSNS (2);
13459 return true;
13460 }
13461 /* FALLTHRU */
13462
13463 case UDIV:
13464 case UMOD:
13465 switch (rs6000_cpu)
13466 {
13467 case PROCESSOR_RIOS1:
13468 *total = COSTS_N_INSNS (19);
13469 return true;
13470
13471 case PROCESSOR_RIOS2:
13472 *total = COSTS_N_INSNS (13);
13473 return true;
13474
13475 case PROCESSOR_RS64A:
13476 *total = (GET_MODE (XEXP (x, 1)) != DImode
13477 ? COSTS_N_INSNS (65)
13478 : COSTS_N_INSNS (67));
13479 return true;
13480
13481 case PROCESSOR_MPCCORE:
13482 *total = COSTS_N_INSNS (6);
13483 return true;
13484
13485 case PROCESSOR_PPC403:
13486 *total = COSTS_N_INSNS (33);
13487 return true;
13488
13489 case PROCESSOR_PPC405:
13490 *total = COSTS_N_INSNS (35);
13491 return true;
13492
13493 case PROCESSOR_PPC601:
13494 *total = COSTS_N_INSNS (36);
13495 return true;
13496
13497 case PROCESSOR_PPC603:
13498 *total = COSTS_N_INSNS (37);
13499 return true;
13500
13501 case PROCESSOR_PPC604:
13502 case PROCESSOR_PPC604e:
13503 *total = COSTS_N_INSNS (20);
13504 return true;
13505
13506 case PROCESSOR_PPC620:
13507 case PROCESSOR_PPC630:
13508 case PROCESSOR_POWER4:
13509 *total = (GET_MODE (XEXP (x, 1)) != DImode
13510 ? COSTS_N_INSNS (21)
13511 : COSTS_N_INSNS (37));
13512 return true;
13513
13514 case PROCESSOR_PPC750:
13515 case PROCESSOR_PPC8540:
13516 case PROCESSOR_PPC7400:
13517 *total = COSTS_N_INSNS (19);
13518 return true;
13519
13520 case PROCESSOR_PPC7450:
13521 *total = COSTS_N_INSNS (23);
13522 return true;
13523
13524 default:
13525 abort ();
13526 }
13527
13528 case FFS:
13529 *total = COSTS_N_INSNS (4);
13530 return true;
13531
13532 case MEM:
13533 /* MEM should be slightly more expensive than (plus (reg) (const)) */
13534 *total = 5;
13535 return true;
13536
13537 default:
13538 return false;
13539 }
13540 }
13541
13542 /* A C expression returning the cost of moving data from a register of class
13543 CLASS1 to one of CLASS2. */
13544
13545 int
13546 rs6000_register_move_cost (mode, from, to)
13547 enum machine_mode mode;
13548 enum reg_class from, to;
13549 {
13550 /* Moves from/to GENERAL_REGS. */
13551 if (reg_classes_intersect_p (to, GENERAL_REGS)
13552 || reg_classes_intersect_p (from, GENERAL_REGS))
13553 {
13554 if (! reg_classes_intersect_p (to, GENERAL_REGS))
13555 from = to;
13556
13557 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
13558 return (rs6000_memory_move_cost (mode, from, 0)
13559 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
13560
13561 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
13562 else if (from == CR_REGS)
13563 return 4;
13564
13565 else
13566 /* A move will cost one instruction per GPR moved. */
13567 return 2 * HARD_REGNO_NREGS (0, mode);
13568 }
13569
13570 /* Moving between two similar registers is just one instruction. */
13571 else if (reg_classes_intersect_p (to, from))
13572 return mode == TFmode ? 4 : 2;
13573
13574 /* Everything else has to go through GENERAL_REGS. */
13575 else
13576 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
13577 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
13578 }
13579
13580 /* A C expressions returning the cost of moving data of MODE from a register to
13581 or from memory. */
13582
13583 int
13584 rs6000_memory_move_cost (mode, class, in)
13585 enum machine_mode mode;
13586 enum reg_class class;
13587 int in ATTRIBUTE_UNUSED;
13588 {
13589 if (reg_classes_intersect_p (class, GENERAL_REGS))
13590 return 4 * HARD_REGNO_NREGS (0, mode);
13591 else if (reg_classes_intersect_p (class, FLOAT_REGS))
13592 return 4 * HARD_REGNO_NREGS (32, mode);
13593 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
13594 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
13595 else
13596 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
13597 }
13598
13599 /* Return true if TYPE is of type __ev64_opaque__. */
13600
13601 static bool
13602 is_ev64_opaque_type (type)
13603 tree type;
13604 {
13605 return (TARGET_SPE
13606 && TREE_CODE (type) == VECTOR_TYPE
13607 && TYPE_NAME (type)
13608 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
13609 && DECL_NAME (TYPE_NAME (type))
13610 && strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))),
13611 "__ev64_opaque__") == 0);
13612 }
13613
13614 #include "gt-rs6000.h"
This page took 0.672684 seconds and 6 git commands to generate.