]> gcc.gnu.org Git - gcc.git/blob - gcc/config/rs6000/rs6000.c
linux64.h (ASM_OUTPUT_LABELREF): Remove.
[gcc.git] / gcc / config / rs6000 / rs6000.c
1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "obstack.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "except.h"
41 #include "function.h"
42 #include "output.h"
43 #include "basic-block.h"
44 #include "integrate.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "hashtab.h"
48 #include "tm_p.h"
49 #include "target.h"
50 #include "target-def.h"
51 #include "langhooks.h"
52 #include "reload.h"
53
54 #ifndef TARGET_NO_PROTOTYPE
55 #define TARGET_NO_PROTOTYPE 0
56 #endif
57
58 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
59 && easy_vector_same (x, y))
60
61 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
62 && !((n) & 1) \
63 && easy_vector_same (x, y))
64
65 #define min(A,B) ((A) < (B) ? (A) : (B))
66 #define max(A,B) ((A) > (B) ? (A) : (B))
67
68 /* Target cpu type */
69
70 enum processor_type rs6000_cpu;
71 struct rs6000_cpu_select rs6000_select[3] =
72 {
73 /* switch name, tune arch */
74 { (const char *)0, "--with-cpu=", 1, 1 },
75 { (const char *)0, "-mcpu=", 1, 1 },
76 { (const char *)0, "-mtune=", 1, 0 },
77 };
78
79 /* Size of long double */
80 const char *rs6000_long_double_size_string;
81 int rs6000_long_double_type_size;
82
83 /* Whether -mabi=altivec has appeared */
84 int rs6000_altivec_abi;
85
86 /* Whether VRSAVE instructions should be generated. */
87 int rs6000_altivec_vrsave;
88
89 /* String from -mvrsave= option. */
90 const char *rs6000_altivec_vrsave_string;
91
92 /* Nonzero if we want SPE ABI extensions. */
93 int rs6000_spe_abi;
94
95 /* Whether isel instructions should be generated. */
96 int rs6000_isel;
97
98 /* Whether SPE simd instructions should be generated. */
99 int rs6000_spe;
100
101 /* Nonzero if floating point operations are done in the GPRs. */
102 int rs6000_float_gprs = 0;
103
104 /* String from -mfloat-gprs=. */
105 const char *rs6000_float_gprs_string;
106
107 /* String from -misel=. */
108 const char *rs6000_isel_string;
109
110 /* String from -mspe=. */
111 const char *rs6000_spe_string;
112
113 /* Set to nonzero once AIX common-mode calls have been defined. */
114 static GTY(()) int common_mode_defined;
115
116 /* Save information from a "cmpxx" operation until the branch or scc is
117 emitted. */
118 rtx rs6000_compare_op0, rs6000_compare_op1;
119 int rs6000_compare_fp_p;
120
121 /* Label number of label created for -mrelocatable, to call to so we can
122 get the address of the GOT section */
123 int rs6000_pic_labelno;
124
125 #ifdef USING_ELFOS_H
126 /* Which abi to adhere to */
127 const char *rs6000_abi_name = RS6000_ABI_NAME;
128
129 /* Semantics of the small data area */
130 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
131
132 /* Which small data model to use */
133 const char *rs6000_sdata_name = (char *)0;
134
135 /* Counter for labels which are to be placed in .fixup. */
136 int fixuplabelno = 0;
137 #endif
138
139 /* ABI enumeration available for subtarget to use. */
140 enum rs6000_abi rs6000_current_abi;
141
142 /* ABI string from -mabi= option. */
143 const char *rs6000_abi_string;
144
145 /* Debug flags */
146 const char *rs6000_debug_name;
147 int rs6000_debug_stack; /* debug stack applications */
148 int rs6000_debug_arg; /* debug argument handling */
149
150 /* A copy of V2SI_type_node to be used as an opaque type. */
151 static GTY(()) tree opaque_V2SI_type_node;
152
153 /* Same, but for V2SF. */
154 static GTY(()) tree opaque_V2SF_type_node;
155
156 const char *rs6000_traceback_name;
157 static enum {
158 traceback_default = 0,
159 traceback_none,
160 traceback_part,
161 traceback_full
162 } rs6000_traceback;
163
164 /* Flag to say the TOC is initialized */
165 int toc_initialized;
166 char toc_label_name[10];
167
168 /* Alias set for saves and restores from the rs6000 stack. */
169 static int rs6000_sr_alias_set;
170
171 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
172 The only place that looks at this is rs6000_set_default_type_attributes;
173 everywhere else should rely on the presence or absence of a longcall
174 attribute on the function declaration. */
175 int rs6000_default_long_calls;
176 const char *rs6000_longcall_switch;
177
178 struct builtin_description
179 {
180 /* mask is not const because we're going to alter it below. This
181 nonsense will go away when we rewrite the -march infrastructure
182 to give us more target flag bits. */
183 unsigned int mask;
184 const enum insn_code icode;
185 const char *const name;
186 const enum rs6000_builtins code;
187 };
188
189 static bool rs6000_function_ok_for_sibcall PARAMS ((tree, tree));
190 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
191 static void validate_condition_mode
192 PARAMS ((enum rtx_code, enum machine_mode));
193 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
194 static void rs6000_maybe_dead PARAMS ((rtx));
195 static void rs6000_emit_stack_tie PARAMS ((void));
196 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
197 static rtx spe_synthesize_frame_save PARAMS ((rtx));
198 static bool spe_func_has_64bit_regs_p PARAMS ((void));
199 static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
200 unsigned int, int, int));
201 static rtx gen_frame_mem_offset PARAMS ((enum machine_mode, rtx, int));
202 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
203 static unsigned rs6000_hash_constant PARAMS ((rtx));
204 static unsigned toc_hash_function PARAMS ((const void *));
205 static int toc_hash_eq PARAMS ((const void *, const void *));
206 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
207 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
208 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
209 #ifdef HAVE_GAS_HIDDEN
210 static void rs6000_assemble_visibility PARAMS ((tree, int));
211 #endif
212 static int rs6000_ra_ever_killed PARAMS ((void));
213 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
214 const struct attribute_spec rs6000_attribute_table[];
215 static void rs6000_set_default_type_attributes PARAMS ((tree));
216 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
217 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
218 static void rs6000_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
219 HOST_WIDE_INT, tree));
220 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
221 HOST_WIDE_INT, HOST_WIDE_INT));
222 #if TARGET_ELF
223 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
224 int));
225 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
226 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
227 static void rs6000_elf_select_section PARAMS ((tree, int,
228 unsigned HOST_WIDE_INT));
229 static void rs6000_elf_unique_section PARAMS ((tree, int));
230 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
231 unsigned HOST_WIDE_INT));
232 static void rs6000_elf_encode_section_info PARAMS ((tree, rtx, int))
233 ATTRIBUTE_UNUSED;
234 static bool rs6000_elf_in_small_data_p PARAMS ((tree));
235 #endif
236 #if TARGET_XCOFF
237 static void rs6000_xcoff_asm_globalize_label PARAMS ((FILE *, const char *));
238 static void rs6000_xcoff_asm_named_section PARAMS ((const char *, unsigned int));
239 static void rs6000_xcoff_select_section PARAMS ((tree, int,
240 unsigned HOST_WIDE_INT));
241 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
242 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
243 unsigned HOST_WIDE_INT));
244 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
245 static unsigned int rs6000_xcoff_section_type_flags PARAMS ((tree, const char *, int));
246 #endif
247 #if TARGET_MACHO
248 static bool rs6000_binds_local_p PARAMS ((tree));
249 #endif
250 static int rs6000_use_dfa_pipeline_interface PARAMS ((void));
251 static int rs6000_variable_issue PARAMS ((FILE *, int, rtx, int));
252 static bool rs6000_rtx_costs PARAMS ((rtx, int, int, int *));
253 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
254 static int rs6000_adjust_priority PARAMS ((rtx, int));
255 static int rs6000_issue_rate PARAMS ((void));
256 static int rs6000_use_sched_lookahead PARAMS ((void));
257
258 static void rs6000_init_builtins PARAMS ((void));
259 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
260 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
261 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
262 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
263 static void altivec_init_builtins PARAMS ((void));
264 static void rs6000_common_init_builtins PARAMS ((void));
265
266 static void enable_mask_for_builtins PARAMS ((struct builtin_description *,
267 int, enum rs6000_builtins,
268 enum rs6000_builtins));
269 static void spe_init_builtins PARAMS ((void));
270 static rtx spe_expand_builtin PARAMS ((tree, rtx, bool *));
271 static rtx spe_expand_predicate_builtin PARAMS ((enum insn_code, tree, rtx));
272 static rtx spe_expand_evsel_builtin PARAMS ((enum insn_code, tree, rtx));
273 static int rs6000_emit_int_cmove PARAMS ((rtx, rtx, rtx, rtx));
274
275 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
276 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
277 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
278 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
279 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
280 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
281 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
282 static void rs6000_parse_abi_options PARAMS ((void));
283 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
284 static int first_altivec_reg_to_save PARAMS ((void));
285 static unsigned int compute_vrsave_mask PARAMS ((void));
286 static void is_altivec_return_reg PARAMS ((rtx, void *));
287 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
288 int easy_vector_constant PARAMS ((rtx, enum machine_mode));
289 static int easy_vector_same PARAMS ((rtx, enum machine_mode));
290 static bool is_ev64_opaque_type PARAMS ((tree));
291 static rtx rs6000_dwarf_register_span PARAMS ((rtx));
292
293 /* Hash table stuff for keeping track of TOC entries. */
294
295 struct toc_hash_struct GTY(())
296 {
297 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
298 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
299 rtx key;
300 enum machine_mode key_mode;
301 int labelno;
302 };
303
304 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
305 \f
306 /* Default register names. */
307 char rs6000_reg_names[][8] =
308 {
309 "0", "1", "2", "3", "4", "5", "6", "7",
310 "8", "9", "10", "11", "12", "13", "14", "15",
311 "16", "17", "18", "19", "20", "21", "22", "23",
312 "24", "25", "26", "27", "28", "29", "30", "31",
313 "0", "1", "2", "3", "4", "5", "6", "7",
314 "8", "9", "10", "11", "12", "13", "14", "15",
315 "16", "17", "18", "19", "20", "21", "22", "23",
316 "24", "25", "26", "27", "28", "29", "30", "31",
317 "mq", "lr", "ctr","ap",
318 "0", "1", "2", "3", "4", "5", "6", "7",
319 "xer",
320 /* AltiVec registers. */
321 "0", "1", "2", "3", "4", "5", "6", "7",
322 "8", "9", "10", "11", "12", "13", "14", "15",
323 "16", "17", "18", "19", "20", "21", "22", "23",
324 "24", "25", "26", "27", "28", "29", "30", "31",
325 "vrsave", "vscr",
326 /* SPE registers. */
327 "spe_acc", "spefscr"
328 };
329
330 #ifdef TARGET_REGNAMES
331 static const char alt_reg_names[][8] =
332 {
333 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
334 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
335 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
336 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
337 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
338 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
339 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
340 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
341 "mq", "lr", "ctr", "ap",
342 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
343 "xer",
344 /* AltiVec registers. */
345 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
346 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
347 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
348 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
349 "vrsave", "vscr",
350 /* SPE registers. */
351 "spe_acc", "spefscr"
352 };
353 #endif
354 \f
355 #ifndef MASK_STRICT_ALIGN
356 #define MASK_STRICT_ALIGN 0
357 #endif
358 #ifndef TARGET_PROFILE_KERNEL
359 #define TARGET_PROFILE_KERNEL 0
360 #endif
361
362 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
363 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
364 \f
365 /* Initialize the GCC target structure. */
366 #undef TARGET_ATTRIBUTE_TABLE
367 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
368 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
369 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
370
371 #undef TARGET_ASM_ALIGNED_DI_OP
372 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
373
374 /* Default unaligned ops are only provided for ELF. Find the ops needed
375 for non-ELF systems. */
376 #ifndef OBJECT_FORMAT_ELF
377 #if TARGET_XCOFF
378 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
379 64-bit targets. */
380 #undef TARGET_ASM_UNALIGNED_HI_OP
381 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
382 #undef TARGET_ASM_UNALIGNED_SI_OP
383 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
384 #undef TARGET_ASM_UNALIGNED_DI_OP
385 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
386 #else
387 /* For Darwin. */
388 #undef TARGET_ASM_UNALIGNED_HI_OP
389 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
390 #undef TARGET_ASM_UNALIGNED_SI_OP
391 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
392 #endif
393 #endif
394
395 /* This hook deals with fixups for relocatable code and DI-mode objects
396 in 64-bit code. */
397 #undef TARGET_ASM_INTEGER
398 #define TARGET_ASM_INTEGER rs6000_assemble_integer
399
400 #ifdef HAVE_GAS_HIDDEN
401 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
402 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
403 #endif
404
405 #undef TARGET_ASM_FUNCTION_PROLOGUE
406 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
407 #undef TARGET_ASM_FUNCTION_EPILOGUE
408 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
409
410 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
411 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
412 #undef TARGET_SCHED_VARIABLE_ISSUE
413 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
414
415 #undef TARGET_SCHED_ISSUE_RATE
416 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
417 #undef TARGET_SCHED_ADJUST_COST
418 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
419 #undef TARGET_SCHED_ADJUST_PRIORITY
420 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
421
422 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
423 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
424
425 #undef TARGET_INIT_BUILTINS
426 #define TARGET_INIT_BUILTINS rs6000_init_builtins
427
428 #undef TARGET_EXPAND_BUILTIN
429 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
430
431 #if TARGET_MACHO
432 #undef TARGET_BINDS_LOCAL_P
433 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
434 #endif
435
436 #undef TARGET_ASM_OUTPUT_MI_THUNK
437 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
438
439 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
440 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
441
442 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
443 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
444
445 #undef TARGET_RTX_COSTS
446 #define TARGET_RTX_COSTS rs6000_rtx_costs
447 #undef TARGET_ADDRESS_COST
448 #define TARGET_ADDRESS_COST hook_int_rtx_0
449
450 #undef TARGET_VECTOR_OPAQUE_P
451 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
452
453 #undef TARGET_DWARF_REGISTER_SPAN
454 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
455
456 struct gcc_target targetm = TARGET_INITIALIZER;
457 \f
458 /* Override command line options. Mostly we process the processor
459 type and sometimes adjust other TARGET_ options. */
460
461 void
462 rs6000_override_options (default_cpu)
463 const char *default_cpu;
464 {
465 size_t i, j;
466 struct rs6000_cpu_select *ptr;
467
468 /* Simplify the entries below by making a mask for any POWER
469 variant and any PowerPC variant. */
470
471 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
472 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
473 | MASK_PPC_GFXOPT | MASK_POWERPC64)
474 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
475
476 static struct ptt
477 {
478 const char *const name; /* Canonical processor name. */
479 const enum processor_type processor; /* Processor type enum value. */
480 const int target_enable; /* Target flags to enable. */
481 const int target_disable; /* Target flags to disable. */
482 } const processor_target_table[]
483 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
484 POWER_MASKS | POWERPC_MASKS},
485 {"power", PROCESSOR_POWER,
486 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
487 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
488 {"power2", PROCESSOR_POWER,
489 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
490 POWERPC_MASKS | MASK_NEW_MNEMONICS},
491 {"power3", PROCESSOR_PPC630,
492 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
493 POWER_MASKS},
494 {"power4", PROCESSOR_POWER4,
495 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
496 POWER_MASKS},
497 {"powerpc", PROCESSOR_POWERPC,
498 MASK_POWERPC | MASK_NEW_MNEMONICS,
499 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
500 {"powerpc64", PROCESSOR_POWERPC64,
501 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
502 POWER_MASKS | POWERPC_OPT_MASKS},
503 {"rios", PROCESSOR_RIOS1,
504 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
505 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
506 {"rios1", PROCESSOR_RIOS1,
507 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
508 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
509 {"rsc", PROCESSOR_PPC601,
510 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
511 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
512 {"rsc1", PROCESSOR_PPC601,
513 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
514 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
515 {"rios2", PROCESSOR_RIOS2,
516 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
517 POWERPC_MASKS | MASK_NEW_MNEMONICS},
518 {"rs64a", PROCESSOR_RS64A,
519 MASK_POWERPC | MASK_NEW_MNEMONICS,
520 POWER_MASKS | POWERPC_OPT_MASKS},
521 {"401", PROCESSOR_PPC403,
522 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
523 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
524 {"403", PROCESSOR_PPC403,
525 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
526 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
527 {"405", PROCESSOR_PPC405,
528 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
529 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
530 {"405f", PROCESSOR_PPC405,
531 MASK_POWERPC | MASK_NEW_MNEMONICS,
532 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
533 {"505", PROCESSOR_MPCCORE,
534 MASK_POWERPC | MASK_NEW_MNEMONICS,
535 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
536 {"601", PROCESSOR_PPC601,
537 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
538 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
539 {"602", PROCESSOR_PPC603,
540 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
541 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
542 {"603", PROCESSOR_PPC603,
543 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
544 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
545 {"603e", PROCESSOR_PPC603,
546 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
547 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
548 {"ec603e", PROCESSOR_PPC603,
549 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
550 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
551 {"604", PROCESSOR_PPC604,
552 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
553 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
554 {"604e", PROCESSOR_PPC604e,
555 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
556 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
557 {"620", PROCESSOR_PPC620,
558 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
559 POWER_MASKS},
560 {"630", PROCESSOR_PPC630,
561 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
562 POWER_MASKS},
563 {"740", PROCESSOR_PPC750,
564 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
565 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
566 {"750", PROCESSOR_PPC750,
567 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
568 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
569 {"7400", PROCESSOR_PPC7400,
570 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
571 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
572 {"7450", PROCESSOR_PPC7450,
573 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
574 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
575 {"8540", PROCESSOR_PPC8540,
576 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
577 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
578 {"801", PROCESSOR_MPCCORE,
579 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
580 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
581 {"821", PROCESSOR_MPCCORE,
582 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
583 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
584 {"823", PROCESSOR_MPCCORE,
585 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
586 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
587 {"860", PROCESSOR_MPCCORE,
588 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
589 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
590
591 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
592
593 /* Save current -mmultiple/-mno-multiple status. */
594 int multiple = TARGET_MULTIPLE;
595 /* Save current -mstring/-mno-string status. */
596 int string = TARGET_STRING;
597
598 /* Identify the processor type. */
599 rs6000_select[0].string = default_cpu;
600 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
601
602 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
603 {
604 ptr = &rs6000_select[i];
605 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
606 {
607 for (j = 0; j < ptt_size; j++)
608 if (! strcmp (ptr->string, processor_target_table[j].name))
609 {
610 if (ptr->set_tune_p)
611 rs6000_cpu = processor_target_table[j].processor;
612
613 if (ptr->set_arch_p)
614 {
615 target_flags |= processor_target_table[j].target_enable;
616 target_flags &= ~processor_target_table[j].target_disable;
617 }
618 break;
619 }
620
621 if (j == ptt_size)
622 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
623 }
624 }
625
626 if (TARGET_E500)
627 rs6000_isel = 1;
628
629 /* If we are optimizing big endian systems for space, use the load/store
630 multiple and string instructions. */
631 if (BYTES_BIG_ENDIAN && optimize_size)
632 target_flags |= MASK_MULTIPLE | MASK_STRING;
633
634 /* If -mmultiple or -mno-multiple was explicitly used, don't
635 override with the processor default */
636 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
637 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
638
639 /* If -mstring or -mno-string was explicitly used, don't override
640 with the processor default. */
641 if ((target_flags_explicit & MASK_STRING) != 0)
642 target_flags = (target_flags & ~MASK_STRING) | string;
643
644 /* Don't allow -mmultiple or -mstring on little endian systems
645 unless the cpu is a 750, because the hardware doesn't support the
646 instructions used in little endian mode, and causes an alignment
647 trap. The 750 does not cause an alignment trap (except when the
648 target is unaligned). */
649
650 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
651 {
652 if (TARGET_MULTIPLE)
653 {
654 target_flags &= ~MASK_MULTIPLE;
655 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
656 warning ("-mmultiple is not supported on little endian systems");
657 }
658
659 if (TARGET_STRING)
660 {
661 target_flags &= ~MASK_STRING;
662 if ((target_flags_explicit & MASK_STRING) != 0)
663 warning ("-mstring is not supported on little endian systems");
664 }
665 }
666
667 /* Set debug flags */
668 if (rs6000_debug_name)
669 {
670 if (! strcmp (rs6000_debug_name, "all"))
671 rs6000_debug_stack = rs6000_debug_arg = 1;
672 else if (! strcmp (rs6000_debug_name, "stack"))
673 rs6000_debug_stack = 1;
674 else if (! strcmp (rs6000_debug_name, "arg"))
675 rs6000_debug_arg = 1;
676 else
677 error ("unknown -mdebug-%s switch", rs6000_debug_name);
678 }
679
680 if (rs6000_traceback_name)
681 {
682 if (! strncmp (rs6000_traceback_name, "full", 4))
683 rs6000_traceback = traceback_full;
684 else if (! strncmp (rs6000_traceback_name, "part", 4))
685 rs6000_traceback = traceback_part;
686 else if (! strncmp (rs6000_traceback_name, "no", 2))
687 rs6000_traceback = traceback_none;
688 else
689 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
690 rs6000_traceback_name);
691 }
692
693 /* Set size of long double */
694 rs6000_long_double_type_size = 64;
695 if (rs6000_long_double_size_string)
696 {
697 char *tail;
698 int size = strtol (rs6000_long_double_size_string, &tail, 10);
699 if (*tail != '\0' || (size != 64 && size != 128))
700 error ("Unknown switch -mlong-double-%s",
701 rs6000_long_double_size_string);
702 else
703 rs6000_long_double_type_size = size;
704 }
705
706 /* Handle -mabi= options. */
707 rs6000_parse_abi_options ();
708
709 /* Handle generic -mFOO=YES/NO options. */
710 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
711 &rs6000_altivec_vrsave);
712 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
713 &rs6000_isel);
714 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
715 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
716 &rs6000_float_gprs);
717
718 #ifdef SUBTARGET_OVERRIDE_OPTIONS
719 SUBTARGET_OVERRIDE_OPTIONS;
720 #endif
721 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
722 SUBSUBTARGET_OVERRIDE_OPTIONS;
723 #endif
724
725 if (TARGET_E500)
726 {
727 /* The e500 does not have string instructions, and we set
728 MASK_STRING above when optimizing for size. */
729 if ((target_flags & MASK_STRING) != 0)
730 target_flags = target_flags & ~MASK_STRING;
731
732 /* No SPE means 64-bit long doubles, even if an E500. */
733 if (rs6000_spe_string != 0
734 && !strcmp (rs6000_spe_string, "no"))
735 rs6000_long_double_type_size = 64;
736 }
737 else if (rs6000_select[1].string != NULL)
738 {
739 /* For the powerpc-eabispe configuration, we set all these by
740 default, so let's unset them if we manually set another
741 CPU that is not the E500. */
742 if (rs6000_abi_string == 0)
743 rs6000_spe_abi = 0;
744 if (rs6000_spe_string == 0)
745 rs6000_spe = 0;
746 if (rs6000_float_gprs_string == 0)
747 rs6000_float_gprs = 0;
748 if (rs6000_isel_string == 0)
749 rs6000_isel = 0;
750 if (rs6000_long_double_size_string == 0)
751 rs6000_long_double_type_size = 64;
752 }
753
754 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
755 using TARGET_OPTIONS to handle a toggle switch, but we're out of
756 bits in target_flags so TARGET_SWITCHES cannot be used.
757 Assumption here is that rs6000_longcall_switch points into the
758 text of the complete option, rather than being a copy, so we can
759 scan back for the presence or absence of the no- modifier. */
760 if (rs6000_longcall_switch)
761 {
762 const char *base = rs6000_longcall_switch;
763 while (base[-1] != 'm') base--;
764
765 if (*rs6000_longcall_switch != '\0')
766 error ("invalid option `%s'", base);
767 rs6000_default_long_calls = (base[0] != 'n');
768 }
769
770 #ifdef TARGET_REGNAMES
771 /* If the user desires alternate register names, copy in the
772 alternate names now. */
773 if (TARGET_REGNAMES)
774 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
775 #endif
776
777 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
778 If -maix-struct-return or -msvr4-struct-return was explicitly
779 used, don't override with the ABI default. */
780 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
781 {
782 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
783 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
784 else
785 target_flags |= MASK_AIX_STRUCT_RET;
786 }
787
788 if (TARGET_LONG_DOUBLE_128
789 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
790 real_format_for_mode[TFmode - QFmode] = &ibm_extended_format;
791
792 /* Allocate an alias set for register saves & restores from stack. */
793 rs6000_sr_alias_set = new_alias_set ();
794
795 if (TARGET_TOC)
796 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
797
798 /* We can only guarantee the availability of DI pseudo-ops when
799 assembling for 64-bit targets. */
800 if (!TARGET_64BIT)
801 {
802 targetm.asm_out.aligned_op.di = NULL;
803 targetm.asm_out.unaligned_op.di = NULL;
804 }
805
806 /* Set maximum branch target alignment at two instructions, eight bytes. */
807 align_jumps_max_skip = 8;
808 align_loops_max_skip = 8;
809
810 /* Arrange to save and restore machine status around nested functions. */
811 init_machine_status = rs6000_init_machine_status;
812 }
813
814 /* Handle generic options of the form -mfoo=yes/no.
815 NAME is the option name.
816 VALUE is the option value.
817 FLAG is the pointer to the flag where to store a 1 or 0, depending on
818 whether the option value is 'yes' or 'no' respectively. */
819 static void
820 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
821 {
822 if (value == 0)
823 return;
824 else if (!strcmp (value, "yes"))
825 *flag = 1;
826 else if (!strcmp (value, "no"))
827 *flag = 0;
828 else
829 error ("unknown -m%s= option specified: '%s'", name, value);
830 }
831
832 /* Handle -mabi= options. */
833 static void
834 rs6000_parse_abi_options ()
835 {
836 if (rs6000_abi_string == 0)
837 return;
838 else if (! strcmp (rs6000_abi_string, "altivec"))
839 rs6000_altivec_abi = 1;
840 else if (! strcmp (rs6000_abi_string, "no-altivec"))
841 rs6000_altivec_abi = 0;
842 else if (! strcmp (rs6000_abi_string, "spe"))
843 {
844 rs6000_spe_abi = 1;
845 if (!TARGET_SPE_ABI)
846 error ("not configured for ABI: '%s'", rs6000_abi_string);
847 }
848
849 else if (! strcmp (rs6000_abi_string, "no-spe"))
850 rs6000_spe_abi = 0;
851 else
852 error ("unknown ABI specified: '%s'", rs6000_abi_string);
853 }
854
855 void
856 optimization_options (level, size)
857 int level ATTRIBUTE_UNUSED;
858 int size ATTRIBUTE_UNUSED;
859 {
860 }
861 \f
862 /* Do anything needed at the start of the asm file. */
863
864 void
865 rs6000_file_start (file, default_cpu)
866 FILE *file;
867 const char *default_cpu;
868 {
869 size_t i;
870 char buffer[80];
871 const char *start = buffer;
872 struct rs6000_cpu_select *ptr;
873
874 if (flag_verbose_asm)
875 {
876 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
877 rs6000_select[0].string = default_cpu;
878
879 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
880 {
881 ptr = &rs6000_select[i];
882 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
883 {
884 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
885 start = "";
886 }
887 }
888
889 #ifdef USING_ELFOS_H
890 switch (rs6000_sdata)
891 {
892 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
893 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
894 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
895 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
896 }
897
898 if (rs6000_sdata && g_switch_value)
899 {
900 fprintf (file, "%s -G %d", start, g_switch_value);
901 start = "";
902 }
903 #endif
904
905 if (*start == '\0')
906 putc ('\n', file);
907 }
908 }
909 \f
910 /* Return nonzero if this function is known to have a null epilogue. */
911
912 int
913 direct_return ()
914 {
915 if (reload_completed)
916 {
917 rs6000_stack_t *info = rs6000_stack_info ();
918
919 if (info->first_gp_reg_save == 32
920 && info->first_fp_reg_save == 64
921 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
922 && ! info->lr_save_p
923 && ! info->cr_save_p
924 && info->vrsave_mask == 0
925 && ! info->push_p)
926 return 1;
927 }
928
929 return 0;
930 }
931
932 /* Returns 1 always. */
933
934 int
935 any_operand (op, mode)
936 rtx op ATTRIBUTE_UNUSED;
937 enum machine_mode mode ATTRIBUTE_UNUSED;
938 {
939 return 1;
940 }
941
942 /* Returns 1 if op is the count register. */
943 int
944 count_register_operand (op, mode)
945 rtx op;
946 enum machine_mode mode ATTRIBUTE_UNUSED;
947 {
948 if (GET_CODE (op) != REG)
949 return 0;
950
951 if (REGNO (op) == COUNT_REGISTER_REGNUM)
952 return 1;
953
954 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
955 return 1;
956
957 return 0;
958 }
959
960 /* Returns 1 if op is an altivec register. */
961 int
962 altivec_register_operand (op, mode)
963 rtx op;
964 enum machine_mode mode ATTRIBUTE_UNUSED;
965 {
966
967 return (register_operand (op, mode)
968 && (GET_CODE (op) != REG
969 || REGNO (op) > FIRST_PSEUDO_REGISTER
970 || ALTIVEC_REGNO_P (REGNO (op))));
971 }
972
973 int
974 xer_operand (op, mode)
975 rtx op;
976 enum machine_mode mode ATTRIBUTE_UNUSED;
977 {
978 if (GET_CODE (op) != REG)
979 return 0;
980
981 if (XER_REGNO_P (REGNO (op)))
982 return 1;
983
984 return 0;
985 }
986
987 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
988 by such constants completes more quickly. */
989
990 int
991 s8bit_cint_operand (op, mode)
992 rtx op;
993 enum machine_mode mode ATTRIBUTE_UNUSED;
994 {
995 return ( GET_CODE (op) == CONST_INT
996 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
997 }
998
999 /* Return 1 if OP is a constant that can fit in a D field. */
1000
1001 int
1002 short_cint_operand (op, mode)
1003 rtx op;
1004 enum machine_mode mode ATTRIBUTE_UNUSED;
1005 {
1006 return (GET_CODE (op) == CONST_INT
1007 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1008 }
1009
1010 /* Similar for an unsigned D field. */
1011
1012 int
1013 u_short_cint_operand (op, mode)
1014 rtx op;
1015 enum machine_mode mode ATTRIBUTE_UNUSED;
1016 {
1017 return (GET_CODE (op) == CONST_INT
1018 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1019 }
1020
1021 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1022
1023 int
1024 non_short_cint_operand (op, mode)
1025 rtx op;
1026 enum machine_mode mode ATTRIBUTE_UNUSED;
1027 {
1028 return (GET_CODE (op) == CONST_INT
1029 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1030 }
1031
1032 /* Returns 1 if OP is a CONST_INT that is a positive value
1033 and an exact power of 2. */
1034
1035 int
1036 exact_log2_cint_operand (op, mode)
1037 rtx op;
1038 enum machine_mode mode ATTRIBUTE_UNUSED;
1039 {
1040 return (GET_CODE (op) == CONST_INT
1041 && INTVAL (op) > 0
1042 && exact_log2 (INTVAL (op)) >= 0);
1043 }
1044
1045 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1046 ctr, or lr). */
1047
1048 int
1049 gpc_reg_operand (op, mode)
1050 rtx op;
1051 enum machine_mode mode;
1052 {
1053 return (register_operand (op, mode)
1054 && (GET_CODE (op) != REG
1055 || (REGNO (op) >= ARG_POINTER_REGNUM
1056 && !XER_REGNO_P (REGNO (op)))
1057 || REGNO (op) < MQ_REGNO));
1058 }
1059
1060 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1061 CR field. */
1062
1063 int
1064 cc_reg_operand (op, mode)
1065 rtx op;
1066 enum machine_mode mode;
1067 {
1068 return (register_operand (op, mode)
1069 && (GET_CODE (op) != REG
1070 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1071 || CR_REGNO_P (REGNO (op))));
1072 }
1073
1074 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1075 CR field that isn't CR0. */
1076
1077 int
1078 cc_reg_not_cr0_operand (op, mode)
1079 rtx op;
1080 enum machine_mode mode;
1081 {
1082 return (register_operand (op, mode)
1083 && (GET_CODE (op) != REG
1084 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1085 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1086 }
1087
1088 /* Returns 1 if OP is either a constant integer valid for a D-field or
1089 a non-special register. If a register, it must be in the proper
1090 mode unless MODE is VOIDmode. */
1091
1092 int
1093 reg_or_short_operand (op, mode)
1094 rtx op;
1095 enum machine_mode mode;
1096 {
1097 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1098 }
1099
1100 /* Similar, except check if the negation of the constant would be
1101 valid for a D-field. */
1102
1103 int
1104 reg_or_neg_short_operand (op, mode)
1105 rtx op;
1106 enum machine_mode mode;
1107 {
1108 if (GET_CODE (op) == CONST_INT)
1109 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1110
1111 return gpc_reg_operand (op, mode);
1112 }
1113
1114 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1115 a non-special register. If a register, it must be in the proper
1116 mode unless MODE is VOIDmode. */
1117
1118 int
1119 reg_or_aligned_short_operand (op, mode)
1120 rtx op;
1121 enum machine_mode mode;
1122 {
1123 if (gpc_reg_operand (op, mode))
1124 return 1;
1125 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1126 return 1;
1127
1128 return 0;
1129 }
1130
1131
1132 /* Return 1 if the operand is either a register or an integer whose
1133 high-order 16 bits are zero. */
1134
1135 int
1136 reg_or_u_short_operand (op, mode)
1137 rtx op;
1138 enum machine_mode mode;
1139 {
1140 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1141 }
1142
1143 /* Return 1 is the operand is either a non-special register or ANY
1144 constant integer. */
1145
1146 int
1147 reg_or_cint_operand (op, mode)
1148 rtx op;
1149 enum machine_mode mode;
1150 {
1151 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1152 }
1153
1154 /* Return 1 is the operand is either a non-special register or ANY
1155 32-bit signed constant integer. */
1156
1157 int
1158 reg_or_arith_cint_operand (op, mode)
1159 rtx op;
1160 enum machine_mode mode;
1161 {
1162 return (gpc_reg_operand (op, mode)
1163 || (GET_CODE (op) == CONST_INT
1164 #if HOST_BITS_PER_WIDE_INT != 32
1165 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1166 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1167 #endif
1168 ));
1169 }
1170
1171 /* Return 1 is the operand is either a non-special register or a 32-bit
1172 signed constant integer valid for 64-bit addition. */
1173
1174 int
1175 reg_or_add_cint64_operand (op, mode)
1176 rtx op;
1177 enum machine_mode mode;
1178 {
1179 return (gpc_reg_operand (op, mode)
1180 || (GET_CODE (op) == CONST_INT
1181 #if HOST_BITS_PER_WIDE_INT == 32
1182 && INTVAL (op) < 0x7fff8000
1183 #else
1184 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1185 < 0x100000000ll)
1186 #endif
1187 ));
1188 }
1189
1190 /* Return 1 is the operand is either a non-special register or a 32-bit
1191 signed constant integer valid for 64-bit subtraction. */
1192
1193 int
1194 reg_or_sub_cint64_operand (op, mode)
1195 rtx op;
1196 enum machine_mode mode;
1197 {
1198 return (gpc_reg_operand (op, mode)
1199 || (GET_CODE (op) == CONST_INT
1200 #if HOST_BITS_PER_WIDE_INT == 32
1201 && (- INTVAL (op)) < 0x7fff8000
1202 #else
1203 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1204 < 0x100000000ll)
1205 #endif
1206 ));
1207 }
1208
1209 /* Return 1 is the operand is either a non-special register or ANY
1210 32-bit unsigned constant integer. */
1211
1212 int
1213 reg_or_logical_cint_operand (op, mode)
1214 rtx op;
1215 enum machine_mode mode;
1216 {
1217 if (GET_CODE (op) == CONST_INT)
1218 {
1219 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1220 {
1221 if (GET_MODE_BITSIZE (mode) <= 32)
1222 abort ();
1223
1224 if (INTVAL (op) < 0)
1225 return 0;
1226 }
1227
1228 return ((INTVAL (op) & GET_MODE_MASK (mode)
1229 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1230 }
1231 else if (GET_CODE (op) == CONST_DOUBLE)
1232 {
1233 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1234 || mode != DImode)
1235 abort ();
1236
1237 return CONST_DOUBLE_HIGH (op) == 0;
1238 }
1239 else
1240 return gpc_reg_operand (op, mode);
1241 }
1242
1243 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1244
1245 int
1246 got_operand (op, mode)
1247 rtx op;
1248 enum machine_mode mode ATTRIBUTE_UNUSED;
1249 {
1250 return (GET_CODE (op) == SYMBOL_REF
1251 || GET_CODE (op) == CONST
1252 || GET_CODE (op) == LABEL_REF);
1253 }
1254
1255 /* Return 1 if the operand is a simple references that can be loaded via
1256 the GOT (labels involving addition aren't allowed). */
1257
1258 int
1259 got_no_const_operand (op, mode)
1260 rtx op;
1261 enum machine_mode mode ATTRIBUTE_UNUSED;
1262 {
1263 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1264 }
1265
1266 /* Return the number of instructions it takes to form a constant in an
1267 integer register. */
1268
1269 static int
1270 num_insns_constant_wide (value)
1271 HOST_WIDE_INT value;
1272 {
1273 /* signed constant loadable with {cal|addi} */
1274 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1275 return 1;
1276
1277 /* constant loadable with {cau|addis} */
1278 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1279 return 1;
1280
1281 #if HOST_BITS_PER_WIDE_INT == 64
1282 else if (TARGET_POWERPC64)
1283 {
1284 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1285 HOST_WIDE_INT high = value >> 31;
1286
1287 if (high == 0 || high == -1)
1288 return 2;
1289
1290 high >>= 1;
1291
1292 if (low == 0)
1293 return num_insns_constant_wide (high) + 1;
1294 else
1295 return (num_insns_constant_wide (high)
1296 + num_insns_constant_wide (low) + 1);
1297 }
1298 #endif
1299
1300 else
1301 return 2;
1302 }
1303
1304 int
1305 num_insns_constant (op, mode)
1306 rtx op;
1307 enum machine_mode mode;
1308 {
1309 if (GET_CODE (op) == CONST_INT)
1310 {
1311 #if HOST_BITS_PER_WIDE_INT == 64
1312 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1313 && mask64_operand (op, mode))
1314 return 2;
1315 else
1316 #endif
1317 return num_insns_constant_wide (INTVAL (op));
1318 }
1319
1320 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1321 {
1322 long l;
1323 REAL_VALUE_TYPE rv;
1324
1325 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1326 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1327 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1328 }
1329
1330 else if (GET_CODE (op) == CONST_DOUBLE)
1331 {
1332 HOST_WIDE_INT low;
1333 HOST_WIDE_INT high;
1334 long l[2];
1335 REAL_VALUE_TYPE rv;
1336 int endian = (WORDS_BIG_ENDIAN == 0);
1337
1338 if (mode == VOIDmode || mode == DImode)
1339 {
1340 high = CONST_DOUBLE_HIGH (op);
1341 low = CONST_DOUBLE_LOW (op);
1342 }
1343 else
1344 {
1345 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1346 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1347 high = l[endian];
1348 low = l[1 - endian];
1349 }
1350
1351 if (TARGET_32BIT)
1352 return (num_insns_constant_wide (low)
1353 + num_insns_constant_wide (high));
1354
1355 else
1356 {
1357 if (high == 0 && low >= 0)
1358 return num_insns_constant_wide (low);
1359
1360 else if (high == -1 && low < 0)
1361 return num_insns_constant_wide (low);
1362
1363 else if (mask64_operand (op, mode))
1364 return 2;
1365
1366 else if (low == 0)
1367 return num_insns_constant_wide (high) + 1;
1368
1369 else
1370 return (num_insns_constant_wide (high)
1371 + num_insns_constant_wide (low) + 1);
1372 }
1373 }
1374
1375 else
1376 abort ();
1377 }
1378
1379 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1380 register with one instruction per word. We only do this if we can
1381 safely read CONST_DOUBLE_{LOW,HIGH}. */
1382
1383 int
1384 easy_fp_constant (op, mode)
1385 rtx op;
1386 enum machine_mode mode;
1387 {
1388 if (GET_CODE (op) != CONST_DOUBLE
1389 || GET_MODE (op) != mode
1390 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1391 return 0;
1392
1393 /* Consider all constants with -msoft-float to be easy. */
1394 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1395 && mode != DImode)
1396 return 1;
1397
1398 /* If we are using V.4 style PIC, consider all constants to be hard. */
1399 if (flag_pic && DEFAULT_ABI == ABI_V4)
1400 return 0;
1401
1402 #ifdef TARGET_RELOCATABLE
1403 /* Similarly if we are using -mrelocatable, consider all constants
1404 to be hard. */
1405 if (TARGET_RELOCATABLE)
1406 return 0;
1407 #endif
1408
1409 if (mode == TFmode)
1410 {
1411 long k[4];
1412 REAL_VALUE_TYPE rv;
1413
1414 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1415 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1416
1417 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1418 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1419 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1420 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1421 }
1422
1423 else if (mode == DFmode)
1424 {
1425 long k[2];
1426 REAL_VALUE_TYPE rv;
1427
1428 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1429 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1430
1431 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1432 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1433 }
1434
1435 else if (mode == SFmode)
1436 {
1437 long l;
1438 REAL_VALUE_TYPE rv;
1439
1440 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1441 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1442
1443 return num_insns_constant_wide (l) == 1;
1444 }
1445
1446 else if (mode == DImode)
1447 return ((TARGET_POWERPC64
1448 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1449 || (num_insns_constant (op, DImode) <= 2));
1450
1451 else if (mode == SImode)
1452 return 1;
1453 else
1454 abort ();
1455 }
1456
1457 /* Return non zero if all elements of a vector have the same value. */
1458
1459 static int
1460 easy_vector_same (op, mode)
1461 rtx op;
1462 enum machine_mode mode ATTRIBUTE_UNUSED;
1463 {
1464 int units, i, cst;
1465
1466 units = CONST_VECTOR_NUNITS (op);
1467
1468 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1469 for (i = 1; i < units; ++i)
1470 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1471 break;
1472 if (i == units)
1473 return 1;
1474 return 0;
1475 }
1476
1477 /* Return 1 if the operand is a CONST_INT and can be put into a
1478 register without using memory. */
1479
1480 int
1481 easy_vector_constant (op, mode)
1482 rtx op;
1483 enum machine_mode mode;
1484 {
1485 int cst, cst2;
1486
1487 if (GET_CODE (op) != CONST_VECTOR
1488 || (!TARGET_ALTIVEC
1489 && !TARGET_SPE))
1490 return 0;
1491
1492 if (zero_constant (op, mode)
1493 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1494 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1495 return 1;
1496
1497 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1498 return 0;
1499
1500 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1501 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1502
1503 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1504 li r0, CONSTANT1
1505 evmergelo r0, r0, r0
1506 li r0, CONSTANT2
1507
1508 I don't know how efficient it would be to allow bigger constants,
1509 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1510 instructions is better than a 64-bit memory load, but I don't
1511 have the e500 timing specs. */
1512 if (TARGET_SPE && mode == V2SImode
1513 && cst >= -0x7fff && cst <= 0x7fff
1514 && cst2 >= -0x7fff && cst <= 0x7fff)
1515 return 1;
1516
1517 if (TARGET_ALTIVEC && EASY_VECTOR_15 (cst, op, mode))
1518 return 1;
1519
1520 if (TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode))
1521 return 1;
1522
1523 return 0;
1524 }
1525
1526 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1527
1528 int
1529 easy_vector_constant_add_self (op, mode)
1530 rtx op;
1531 enum machine_mode mode;
1532 {
1533 int cst;
1534
1535 if (!easy_vector_constant (op, mode))
1536 return 0;
1537
1538 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1539
1540 return TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode);
1541 }
1542
1543 const char *
1544 output_vec_const_move (operands)
1545 rtx *operands;
1546 {
1547 int cst, cst2;
1548 enum machine_mode mode;
1549 rtx dest, vec;
1550
1551 dest = operands[0];
1552 vec = operands[1];
1553
1554 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1555 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1556 mode = GET_MODE (dest);
1557
1558 if (TARGET_ALTIVEC)
1559 {
1560 if (zero_constant (vec, mode))
1561 return "vxor %0,%0,%0";
1562 else if (EASY_VECTOR_15 (cst, vec, mode))
1563 {
1564 operands[1] = GEN_INT (cst);
1565 switch (mode)
1566 {
1567 case V4SImode:
1568 return "vspltisw %0,%1";
1569 case V8HImode:
1570 return "vspltish %0,%1";
1571 case V16QImode:
1572 return "vspltisb %0,%1";
1573 default:
1574 abort ();
1575 }
1576 }
1577 else if (EASY_VECTOR_15_ADD_SELF (cst, vec, mode))
1578 return "#";
1579 else
1580 abort ();
1581 }
1582
1583 if (TARGET_SPE)
1584 {
1585 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1586 pattern of V1DI, V4HI, and V2SF.
1587
1588 FIXME: We should probabl return # and add post reload
1589 splitters for these, but this way is so easy ;-).
1590 */
1591 operands[1] = GEN_INT (cst);
1592 operands[2] = GEN_INT (cst2);
1593 if (cst == cst2)
1594 return "li %0,%1\n\tevmergelo %0,%0,%0";
1595 else
1596 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1597 }
1598
1599 abort ();
1600 }
1601
1602 /* Return 1 if the operand is the constant 0. This works for scalars
1603 as well as vectors. */
1604 int
1605 zero_constant (op, mode)
1606 rtx op;
1607 enum machine_mode mode;
1608 {
1609 return op == CONST0_RTX (mode);
1610 }
1611
1612 /* Return 1 if the operand is 0.0. */
1613 int
1614 zero_fp_constant (op, mode)
1615 rtx op;
1616 enum machine_mode mode;
1617 {
1618 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1619 }
1620
1621 /* Return 1 if the operand is in volatile memory. Note that during
1622 the RTL generation phase, memory_operand does not return TRUE for
1623 volatile memory references. So this function allows us to
1624 recognize volatile references where its safe. */
1625
1626 int
1627 volatile_mem_operand (op, mode)
1628 rtx op;
1629 enum machine_mode mode;
1630 {
1631 if (GET_CODE (op) != MEM)
1632 return 0;
1633
1634 if (!MEM_VOLATILE_P (op))
1635 return 0;
1636
1637 if (mode != GET_MODE (op))
1638 return 0;
1639
1640 if (reload_completed)
1641 return memory_operand (op, mode);
1642
1643 if (reload_in_progress)
1644 return strict_memory_address_p (mode, XEXP (op, 0));
1645
1646 return memory_address_p (mode, XEXP (op, 0));
1647 }
1648
1649 /* Return 1 if the operand is an offsettable memory operand. */
1650
1651 int
1652 offsettable_mem_operand (op, mode)
1653 rtx op;
1654 enum machine_mode mode;
1655 {
1656 return ((GET_CODE (op) == MEM)
1657 && offsettable_address_p (reload_completed || reload_in_progress,
1658 mode, XEXP (op, 0)));
1659 }
1660
1661 /* Return 1 if the operand is either an easy FP constant (see above) or
1662 memory. */
1663
1664 int
1665 mem_or_easy_const_operand (op, mode)
1666 rtx op;
1667 enum machine_mode mode;
1668 {
1669 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1670 }
1671
1672 /* Return 1 if the operand is either a non-special register or an item
1673 that can be used as the operand of a `mode' add insn. */
1674
1675 int
1676 add_operand (op, mode)
1677 rtx op;
1678 enum machine_mode mode;
1679 {
1680 if (GET_CODE (op) == CONST_INT)
1681 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1682 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1683
1684 return gpc_reg_operand (op, mode);
1685 }
1686
1687 /* Return 1 if OP is a constant but not a valid add_operand. */
1688
1689 int
1690 non_add_cint_operand (op, mode)
1691 rtx op;
1692 enum machine_mode mode ATTRIBUTE_UNUSED;
1693 {
1694 return (GET_CODE (op) == CONST_INT
1695 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1696 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1697 }
1698
1699 /* Return 1 if the operand is a non-special register or a constant that
1700 can be used as the operand of an OR or XOR insn on the RS/6000. */
1701
1702 int
1703 logical_operand (op, mode)
1704 rtx op;
1705 enum machine_mode mode;
1706 {
1707 HOST_WIDE_INT opl, oph;
1708
1709 if (gpc_reg_operand (op, mode))
1710 return 1;
1711
1712 if (GET_CODE (op) == CONST_INT)
1713 {
1714 opl = INTVAL (op) & GET_MODE_MASK (mode);
1715
1716 #if HOST_BITS_PER_WIDE_INT <= 32
1717 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1718 return 0;
1719 #endif
1720 }
1721 else if (GET_CODE (op) == CONST_DOUBLE)
1722 {
1723 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1724 abort ();
1725
1726 opl = CONST_DOUBLE_LOW (op);
1727 oph = CONST_DOUBLE_HIGH (op);
1728 if (oph != 0)
1729 return 0;
1730 }
1731 else
1732 return 0;
1733
1734 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1735 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1736 }
1737
1738 /* Return 1 if C is a constant that is not a logical operand (as
1739 above), but could be split into one. */
1740
1741 int
1742 non_logical_cint_operand (op, mode)
1743 rtx op;
1744 enum machine_mode mode;
1745 {
1746 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1747 && ! logical_operand (op, mode)
1748 && reg_or_logical_cint_operand (op, mode));
1749 }
1750
1751 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1752 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1753 Reject all ones and all zeros, since these should have been optimized
1754 away and confuse the making of MB and ME. */
1755
1756 int
1757 mask_operand (op, mode)
1758 rtx op;
1759 enum machine_mode mode ATTRIBUTE_UNUSED;
1760 {
1761 HOST_WIDE_INT c, lsb;
1762
1763 if (GET_CODE (op) != CONST_INT)
1764 return 0;
1765
1766 c = INTVAL (op);
1767
1768 /* Fail in 64-bit mode if the mask wraps around because the upper
1769 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1770 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1771 return 0;
1772
1773 /* We don't change the number of transitions by inverting,
1774 so make sure we start with the LS bit zero. */
1775 if (c & 1)
1776 c = ~c;
1777
1778 /* Reject all zeros or all ones. */
1779 if (c == 0)
1780 return 0;
1781
1782 /* Find the first transition. */
1783 lsb = c & -c;
1784
1785 /* Invert to look for a second transition. */
1786 c = ~c;
1787
1788 /* Erase first transition. */
1789 c &= -lsb;
1790
1791 /* Find the second transition (if any). */
1792 lsb = c & -c;
1793
1794 /* Match if all the bits above are 1's (or c is zero). */
1795 return c == -lsb;
1796 }
1797
1798 /* Return 1 for the PowerPC64 rlwinm corner case. */
1799
1800 int
1801 mask_operand_wrap (op, mode)
1802 rtx op;
1803 enum machine_mode mode ATTRIBUTE_UNUSED;
1804 {
1805 HOST_WIDE_INT c, lsb;
1806
1807 if (GET_CODE (op) != CONST_INT)
1808 return 0;
1809
1810 c = INTVAL (op);
1811
1812 if ((c & 0x80000001) != 0x80000001)
1813 return 0;
1814
1815 c = ~c;
1816 if (c == 0)
1817 return 0;
1818
1819 lsb = c & -c;
1820 c = ~c;
1821 c &= -lsb;
1822 lsb = c & -c;
1823 return c == -lsb;
1824 }
1825
1826 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1827 It is if there are no more than one 1->0 or 0->1 transitions.
1828 Reject all zeros, since zero should have been optimized away and
1829 confuses the making of MB and ME. */
1830
1831 int
1832 mask64_operand (op, mode)
1833 rtx op;
1834 enum machine_mode mode ATTRIBUTE_UNUSED;
1835 {
1836 if (GET_CODE (op) == CONST_INT)
1837 {
1838 HOST_WIDE_INT c, lsb;
1839
1840 c = INTVAL (op);
1841
1842 /* Reject all zeros. */
1843 if (c == 0)
1844 return 0;
1845
1846 /* We don't change the number of transitions by inverting,
1847 so make sure we start with the LS bit zero. */
1848 if (c & 1)
1849 c = ~c;
1850
1851 /* Find the transition, and check that all bits above are 1's. */
1852 lsb = c & -c;
1853
1854 /* Match if all the bits above are 1's (or c is zero). */
1855 return c == -lsb;
1856 }
1857 return 0;
1858 }
1859
1860 /* Like mask64_operand, but allow up to three transitions. This
1861 predicate is used by insn patterns that generate two rldicl or
1862 rldicr machine insns. */
1863
1864 int
1865 mask64_2_operand (op, mode)
1866 rtx op;
1867 enum machine_mode mode ATTRIBUTE_UNUSED;
1868 {
1869 if (GET_CODE (op) == CONST_INT)
1870 {
1871 HOST_WIDE_INT c, lsb;
1872
1873 c = INTVAL (op);
1874
1875 /* Disallow all zeros. */
1876 if (c == 0)
1877 return 0;
1878
1879 /* We don't change the number of transitions by inverting,
1880 so make sure we start with the LS bit zero. */
1881 if (c & 1)
1882 c = ~c;
1883
1884 /* Find the first transition. */
1885 lsb = c & -c;
1886
1887 /* Invert to look for a second transition. */
1888 c = ~c;
1889
1890 /* Erase first transition. */
1891 c &= -lsb;
1892
1893 /* Find the second transition. */
1894 lsb = c & -c;
1895
1896 /* Invert to look for a third transition. */
1897 c = ~c;
1898
1899 /* Erase second transition. */
1900 c &= -lsb;
1901
1902 /* Find the third transition (if any). */
1903 lsb = c & -c;
1904
1905 /* Match if all the bits above are 1's (or c is zero). */
1906 return c == -lsb;
1907 }
1908 return 0;
1909 }
1910
1911 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1912 implement ANDing by the mask IN. */
1913 void
1914 build_mask64_2_operands (in, out)
1915 rtx in;
1916 rtx *out;
1917 {
1918 #if HOST_BITS_PER_WIDE_INT >= 64
1919 unsigned HOST_WIDE_INT c, lsb, m1, m2;
1920 int shift;
1921
1922 if (GET_CODE (in) != CONST_INT)
1923 abort ();
1924
1925 c = INTVAL (in);
1926 if (c & 1)
1927 {
1928 /* Assume c initially something like 0x00fff000000fffff. The idea
1929 is to rotate the word so that the middle ^^^^^^ group of zeros
1930 is at the MS end and can be cleared with an rldicl mask. We then
1931 rotate back and clear off the MS ^^ group of zeros with a
1932 second rldicl. */
1933 c = ~c; /* c == 0xff000ffffff00000 */
1934 lsb = c & -c; /* lsb == 0x0000000000100000 */
1935 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
1936 c = ~c; /* c == 0x00fff000000fffff */
1937 c &= -lsb; /* c == 0x00fff00000000000 */
1938 lsb = c & -c; /* lsb == 0x0000100000000000 */
1939 c = ~c; /* c == 0xff000fffffffffff */
1940 c &= -lsb; /* c == 0xff00000000000000 */
1941 shift = 0;
1942 while ((lsb >>= 1) != 0)
1943 shift++; /* shift == 44 on exit from loop */
1944 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
1945 m1 = ~m1; /* m1 == 0x000000ffffffffff */
1946 m2 = ~c; /* m2 == 0x00ffffffffffffff */
1947 }
1948 else
1949 {
1950 /* Assume c initially something like 0xff000f0000000000. The idea
1951 is to rotate the word so that the ^^^ middle group of zeros
1952 is at the LS end and can be cleared with an rldicr mask. We then
1953 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1954 a second rldicr. */
1955 lsb = c & -c; /* lsb == 0x0000010000000000 */
1956 m2 = -lsb; /* m2 == 0xffffff0000000000 */
1957 c = ~c; /* c == 0x00fff0ffffffffff */
1958 c &= -lsb; /* c == 0x00fff00000000000 */
1959 lsb = c & -c; /* lsb == 0x0000100000000000 */
1960 c = ~c; /* c == 0xff000fffffffffff */
1961 c &= -lsb; /* c == 0xff00000000000000 */
1962 shift = 0;
1963 while ((lsb >>= 1) != 0)
1964 shift++; /* shift == 44 on exit from loop */
1965 m1 = ~c; /* m1 == 0x00ffffffffffffff */
1966 m1 >>= shift; /* m1 == 0x0000000000000fff */
1967 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
1968 }
1969
1970 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1971 masks will be all 1's. We are guaranteed more than one transition. */
1972 out[0] = GEN_INT (64 - shift);
1973 out[1] = GEN_INT (m1);
1974 out[2] = GEN_INT (shift);
1975 out[3] = GEN_INT (m2);
1976 #else
1977 (void)in;
1978 (void)out;
1979 abort ();
1980 #endif
1981 }
1982
1983 /* Return 1 if the operand is either a non-special register or a constant
1984 that can be used as the operand of a PowerPC64 logical AND insn. */
1985
1986 int
1987 and64_operand (op, mode)
1988 rtx op;
1989 enum machine_mode mode;
1990 {
1991 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1992 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1993
1994 return (logical_operand (op, mode) || mask64_operand (op, mode));
1995 }
1996
1997 /* Like the above, but also match constants that can be implemented
1998 with two rldicl or rldicr insns. */
1999
2000 int
2001 and64_2_operand (op, mode)
2002 rtx op;
2003 enum machine_mode mode;
2004 {
2005 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2006 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2007
2008 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2009 }
2010
2011 /* Return 1 if the operand is either a non-special register or a
2012 constant that can be used as the operand of an RS/6000 logical AND insn. */
2013
2014 int
2015 and_operand (op, mode)
2016 rtx op;
2017 enum machine_mode mode;
2018 {
2019 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2020 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2021
2022 return (logical_operand (op, mode) || mask_operand (op, mode));
2023 }
2024
2025 /* Return 1 if the operand is a general register or memory operand. */
2026
2027 int
2028 reg_or_mem_operand (op, mode)
2029 rtx op;
2030 enum machine_mode mode;
2031 {
2032 return (gpc_reg_operand (op, mode)
2033 || memory_operand (op, mode)
2034 || volatile_mem_operand (op, mode));
2035 }
2036
2037 /* Return 1 if the operand is a general register or memory operand without
2038 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2039 instruction. */
2040
2041 int
2042 lwa_operand (op, mode)
2043 rtx op;
2044 enum machine_mode mode;
2045 {
2046 rtx inner = op;
2047
2048 if (reload_completed && GET_CODE (inner) == SUBREG)
2049 inner = SUBREG_REG (inner);
2050
2051 return gpc_reg_operand (inner, mode)
2052 || (memory_operand (inner, mode)
2053 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2054 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2055 && (GET_CODE (XEXP (inner, 0)) != PLUS
2056 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2057 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2058 }
2059
2060 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2061
2062 int
2063 symbol_ref_operand (op, mode)
2064 rtx op;
2065 enum machine_mode mode;
2066 {
2067 if (mode != VOIDmode && GET_MODE (op) != mode)
2068 return 0;
2069
2070 return (GET_CODE (op) == SYMBOL_REF);
2071 }
2072
2073 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2074 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2075
2076 int
2077 call_operand (op, mode)
2078 rtx op;
2079 enum machine_mode mode;
2080 {
2081 if (mode != VOIDmode && GET_MODE (op) != mode)
2082 return 0;
2083
2084 return (GET_CODE (op) == SYMBOL_REF
2085 || (GET_CODE (op) == REG
2086 && (REGNO (op) == LINK_REGISTER_REGNUM
2087 || REGNO (op) == COUNT_REGISTER_REGNUM
2088 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2089 }
2090
2091 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2092 this file. */
2093
2094 int
2095 current_file_function_operand (op, mode)
2096 rtx op;
2097 enum machine_mode mode ATTRIBUTE_UNUSED;
2098 {
2099 if (GET_CODE (op) == SYMBOL_REF
2100 && (SYMBOL_REF_LOCAL_P (op)
2101 || (op == XEXP (DECL_RTL (current_function_decl), 0))))
2102 {
2103 #ifdef ENABLE_CHECKING
2104 if (!SYMBOL_REF_FUNCTION_P (op))
2105 abort ();
2106 #endif
2107 return 1;
2108 }
2109 return 0;
2110 }
2111
2112 /* Return 1 if this operand is a valid input for a move insn. */
2113
2114 int
2115 input_operand (op, mode)
2116 rtx op;
2117 enum machine_mode mode;
2118 {
2119 /* Memory is always valid. */
2120 if (memory_operand (op, mode))
2121 return 1;
2122
2123 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2124 if (GET_CODE (op) == CONSTANT_P_RTX)
2125 return 1;
2126
2127 /* For floating-point, easy constants are valid. */
2128 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2129 && CONSTANT_P (op)
2130 && easy_fp_constant (op, mode))
2131 return 1;
2132
2133 /* Allow any integer constant. */
2134 if (GET_MODE_CLASS (mode) == MODE_INT
2135 && (GET_CODE (op) == CONST_INT
2136 || GET_CODE (op) == CONST_DOUBLE))
2137 return 1;
2138
2139 /* Allow easy vector constants. */
2140 if (GET_CODE (op) == CONST_VECTOR
2141 && easy_vector_constant (op, mode))
2142 return 1;
2143
2144 /* For floating-point or multi-word mode, the only remaining valid type
2145 is a register. */
2146 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2147 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2148 return register_operand (op, mode);
2149
2150 /* The only cases left are integral modes one word or smaller (we
2151 do not get called for MODE_CC values). These can be in any
2152 register. */
2153 if (register_operand (op, mode))
2154 return 1;
2155
2156 /* A SYMBOL_REF referring to the TOC is valid. */
2157 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
2158 return 1;
2159
2160 /* A constant pool expression (relative to the TOC) is valid */
2161 if (TOC_RELATIVE_EXPR_P (op))
2162 return 1;
2163
2164 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2165 to be valid. */
2166 if (DEFAULT_ABI == ABI_V4
2167 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2168 && small_data_operand (op, Pmode))
2169 return 1;
2170
2171 return 0;
2172 }
2173
2174 /* Return 1 for an operand in small memory on V.4/eabi. */
2175
2176 int
2177 small_data_operand (op, mode)
2178 rtx op ATTRIBUTE_UNUSED;
2179 enum machine_mode mode ATTRIBUTE_UNUSED;
2180 {
2181 #if TARGET_ELF
2182 rtx sym_ref;
2183
2184 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2185 return 0;
2186
2187 if (DEFAULT_ABI != ABI_V4)
2188 return 0;
2189
2190 if (GET_CODE (op) == SYMBOL_REF)
2191 sym_ref = op;
2192
2193 else if (GET_CODE (op) != CONST
2194 || GET_CODE (XEXP (op, 0)) != PLUS
2195 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2196 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2197 return 0;
2198
2199 else
2200 {
2201 rtx sum = XEXP (op, 0);
2202 HOST_WIDE_INT summand;
2203
2204 /* We have to be careful here, because it is the referenced address
2205 that must be 32k from _SDA_BASE_, not just the symbol. */
2206 summand = INTVAL (XEXP (sum, 1));
2207 if (summand < 0 || summand > g_switch_value)
2208 return 0;
2209
2210 sym_ref = XEXP (sum, 0);
2211 }
2212
2213 return SYMBOL_REF_SMALL_V4_P (sym_ref);
2214 #else
2215 return 0;
2216 #endif
2217 }
2218 \f
2219 static int
2220 constant_pool_expr_1 (op, have_sym, have_toc)
2221 rtx op;
2222 int *have_sym;
2223 int *have_toc;
2224 {
2225 switch (GET_CODE(op))
2226 {
2227 case SYMBOL_REF:
2228 if (CONSTANT_POOL_ADDRESS_P (op))
2229 {
2230 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2231 {
2232 *have_sym = 1;
2233 return 1;
2234 }
2235 else
2236 return 0;
2237 }
2238 else if (! strcmp (XSTR (op, 0), toc_label_name))
2239 {
2240 *have_toc = 1;
2241 return 1;
2242 }
2243 else
2244 return 0;
2245 case PLUS:
2246 case MINUS:
2247 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2248 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2249 case CONST:
2250 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2251 case CONST_INT:
2252 return 1;
2253 default:
2254 return 0;
2255 }
2256 }
2257
2258 int
2259 constant_pool_expr_p (op)
2260 rtx op;
2261 {
2262 int have_sym = 0;
2263 int have_toc = 0;
2264 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2265 }
2266
2267 int
2268 toc_relative_expr_p (op)
2269 rtx op;
2270 {
2271 int have_sym = 0;
2272 int have_toc = 0;
2273 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2274 }
2275
2276 /* Try machine-dependent ways of modifying an illegitimate address
2277 to be legitimate. If we find one, return the new, valid address.
2278 This is used from only one place: `memory_address' in explow.c.
2279
2280 OLDX is the address as it was before break_out_memory_refs was
2281 called. In some cases it is useful to look at this to decide what
2282 needs to be done.
2283
2284 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2285
2286 It is always safe for this function to do nothing. It exists to
2287 recognize opportunities to optimize the output.
2288
2289 On RS/6000, first check for the sum of a register with a constant
2290 integer that is out of range. If so, generate code to add the
2291 constant with the low-order 16 bits masked to the register and force
2292 this result into another register (this can be done with `cau').
2293 Then generate an address of REG+(CONST&0xffff), allowing for the
2294 possibility of bit 16 being a one.
2295
2296 Then check for the sum of a register and something not constant, try to
2297 load the other things into a register and return the sum. */
2298 rtx
2299 rs6000_legitimize_address (x, oldx, mode)
2300 rtx x;
2301 rtx oldx ATTRIBUTE_UNUSED;
2302 enum machine_mode mode;
2303 {
2304 if (GET_CODE (x) == PLUS
2305 && GET_CODE (XEXP (x, 0)) == REG
2306 && GET_CODE (XEXP (x, 1)) == CONST_INT
2307 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2308 {
2309 HOST_WIDE_INT high_int, low_int;
2310 rtx sum;
2311 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2312 high_int = INTVAL (XEXP (x, 1)) - low_int;
2313 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2314 GEN_INT (high_int)), 0);
2315 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2316 }
2317 else if (GET_CODE (x) == PLUS
2318 && GET_CODE (XEXP (x, 0)) == REG
2319 && GET_CODE (XEXP (x, 1)) != CONST_INT
2320 && GET_MODE_NUNITS (mode) == 1
2321 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2322 || TARGET_POWERPC64
2323 || (mode != DFmode && mode != TFmode))
2324 && (TARGET_POWERPC64 || mode != DImode)
2325 && mode != TImode)
2326 {
2327 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2328 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2329 }
2330 else if (ALTIVEC_VECTOR_MODE (mode))
2331 {
2332 rtx reg;
2333
2334 /* Make sure both operands are registers. */
2335 if (GET_CODE (x) == PLUS)
2336 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2337 force_reg (Pmode, XEXP (x, 1)));
2338
2339 reg = force_reg (Pmode, x);
2340 return reg;
2341 }
2342 else if (SPE_VECTOR_MODE (mode))
2343 {
2344 /* We accept [reg + reg] and [reg + OFFSET]. */
2345
2346 if (GET_CODE (x) == PLUS)
2347 {
2348 rtx op1 = XEXP (x, 0);
2349 rtx op2 = XEXP (x, 1);
2350
2351 op1 = force_reg (Pmode, op1);
2352
2353 if (GET_CODE (op2) != REG
2354 && (GET_CODE (op2) != CONST_INT
2355 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2356 op2 = force_reg (Pmode, op2);
2357
2358 return gen_rtx_PLUS (Pmode, op1, op2);
2359 }
2360
2361 return force_reg (Pmode, x);
2362 }
2363 else if (TARGET_ELF
2364 && TARGET_32BIT
2365 && TARGET_NO_TOC
2366 && ! flag_pic
2367 && GET_CODE (x) != CONST_INT
2368 && GET_CODE (x) != CONST_DOUBLE
2369 && CONSTANT_P (x)
2370 && GET_MODE_NUNITS (mode) == 1
2371 && (GET_MODE_BITSIZE (mode) <= 32
2372 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2373 {
2374 rtx reg = gen_reg_rtx (Pmode);
2375 emit_insn (gen_elf_high (reg, (x)));
2376 return gen_rtx_LO_SUM (Pmode, reg, (x));
2377 }
2378 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2379 && ! flag_pic
2380 #if TARGET_MACHO
2381 && ! MACHO_DYNAMIC_NO_PIC_P
2382 #endif
2383 && GET_CODE (x) != CONST_INT
2384 && GET_CODE (x) != CONST_DOUBLE
2385 && CONSTANT_P (x)
2386 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2387 && mode != DImode
2388 && mode != TImode)
2389 {
2390 rtx reg = gen_reg_rtx (Pmode);
2391 emit_insn (gen_macho_high (reg, (x)));
2392 return gen_rtx_LO_SUM (Pmode, reg, (x));
2393 }
2394 else if (TARGET_TOC
2395 && CONSTANT_POOL_EXPR_P (x)
2396 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2397 {
2398 return create_TOC_reference (x);
2399 }
2400 else
2401 return NULL_RTX;
2402 }
2403
2404 /* The convention appears to be to define this wherever it is used.
2405 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2406 is now used here. */
2407 #ifndef REG_MODE_OK_FOR_BASE_P
2408 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2409 #endif
2410
2411 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2412 replace the input X, or the original X if no replacement is called for.
2413 The output parameter *WIN is 1 if the calling macro should goto WIN,
2414 0 if it should not.
2415
2416 For RS/6000, we wish to handle large displacements off a base
2417 register by splitting the addend across an addiu/addis and the mem insn.
2418 This cuts number of extra insns needed from 3 to 1.
2419
2420 On Darwin, we use this to generate code for floating point constants.
2421 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2422 The Darwin code is inside #if TARGET_MACHO because only then is
2423 machopic_function_base_name() defined. */
2424 rtx
2425 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
2426 rtx x;
2427 enum machine_mode mode;
2428 int opnum;
2429 int type;
2430 int ind_levels ATTRIBUTE_UNUSED;
2431 int *win;
2432 {
2433 /* We must recognize output that we have already generated ourselves. */
2434 if (GET_CODE (x) == PLUS
2435 && GET_CODE (XEXP (x, 0)) == PLUS
2436 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2437 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2438 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2439 {
2440 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2441 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2442 opnum, (enum reload_type)type);
2443 *win = 1;
2444 return x;
2445 }
2446
2447 #if TARGET_MACHO
2448 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2449 && GET_CODE (x) == LO_SUM
2450 && GET_CODE (XEXP (x, 0)) == PLUS
2451 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2452 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2453 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2454 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2455 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2456 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2457 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2458 {
2459 /* Result of previous invocation of this function on Darwin
2460 floating point constant. */
2461 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2462 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2463 opnum, (enum reload_type)type);
2464 *win = 1;
2465 return x;
2466 }
2467 #endif
2468 if (GET_CODE (x) == PLUS
2469 && GET_CODE (XEXP (x, 0)) == REG
2470 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2471 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2472 && GET_CODE (XEXP (x, 1)) == CONST_INT
2473 && !SPE_VECTOR_MODE (mode)
2474 && !ALTIVEC_VECTOR_MODE (mode))
2475 {
2476 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2477 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2478 HOST_WIDE_INT high
2479 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2480
2481 /* Check for 32-bit overflow. */
2482 if (high + low != val)
2483 {
2484 *win = 0;
2485 return x;
2486 }
2487
2488 /* Reload the high part into a base reg; leave the low part
2489 in the mem directly. */
2490
2491 x = gen_rtx_PLUS (GET_MODE (x),
2492 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2493 GEN_INT (high)),
2494 GEN_INT (low));
2495
2496 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2497 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2498 opnum, (enum reload_type)type);
2499 *win = 1;
2500 return x;
2501 }
2502 #if TARGET_MACHO
2503 if (GET_CODE (x) == SYMBOL_REF
2504 && DEFAULT_ABI == ABI_DARWIN
2505 && !ALTIVEC_VECTOR_MODE (mode)
2506 && flag_pic)
2507 {
2508 /* Darwin load of floating point constant. */
2509 rtx offset = gen_rtx (CONST, Pmode,
2510 gen_rtx (MINUS, Pmode, x,
2511 gen_rtx (SYMBOL_REF, Pmode,
2512 machopic_function_base_name ())));
2513 x = gen_rtx (LO_SUM, GET_MODE (x),
2514 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2515 gen_rtx (HIGH, Pmode, offset)), offset);
2516 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2517 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2518 opnum, (enum reload_type)type);
2519 *win = 1;
2520 return x;
2521 }
2522 if (GET_CODE (x) == SYMBOL_REF
2523 && DEFAULT_ABI == ABI_DARWIN
2524 && !ALTIVEC_VECTOR_MODE (mode)
2525 && MACHO_DYNAMIC_NO_PIC_P)
2526 {
2527 /* Darwin load of floating point constant. */
2528 x = gen_rtx (LO_SUM, GET_MODE (x),
2529 gen_rtx (HIGH, Pmode, x), x);
2530 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2531 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2532 opnum, (enum reload_type)type);
2533 *win = 1;
2534 return x;
2535 }
2536 #endif
2537 if (TARGET_TOC
2538 && CONSTANT_POOL_EXPR_P (x)
2539 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2540 {
2541 (x) = create_TOC_reference (x);
2542 *win = 1;
2543 return x;
2544 }
2545 *win = 0;
2546 return x;
2547 }
2548
2549 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2550 that is a valid memory address for an instruction.
2551 The MODE argument is the machine mode for the MEM expression
2552 that wants to use this address.
2553
2554 On the RS/6000, there are four valid address: a SYMBOL_REF that
2555 refers to a constant pool entry of an address (or the sum of it
2556 plus a constant), a short (16-bit signed) constant plus a register,
2557 the sum of two registers, or a register indirect, possibly with an
2558 auto-increment. For DFmode and DImode with a constant plus register,
2559 we must ensure that both words are addressable or PowerPC64 with offset
2560 word aligned.
2561
2562 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2563 32-bit DImode, TImode), indexed addressing cannot be used because
2564 adjacent memory cells are accessed by adding word-sized offsets
2565 during assembly output. */
2566 int
2567 rs6000_legitimate_address (mode, x, reg_ok_strict)
2568 enum machine_mode mode;
2569 rtx x;
2570 int reg_ok_strict;
2571 {
2572 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2573 return 1;
2574 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2575 && !ALTIVEC_VECTOR_MODE (mode)
2576 && !SPE_VECTOR_MODE (mode)
2577 && TARGET_UPDATE
2578 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2579 return 1;
2580 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2581 return 1;
2582 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2583 return 1;
2584 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2585 if (! reg_ok_strict
2586 && GET_CODE (x) == PLUS
2587 && GET_CODE (XEXP (x, 0)) == REG
2588 && XEXP (x, 0) == virtual_stack_vars_rtx
2589 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2590 return 1;
2591 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2592 return 1;
2593 if (mode != TImode
2594 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2595 || TARGET_POWERPC64
2596 || (mode != DFmode && mode != TFmode))
2597 && (TARGET_POWERPC64 || mode != DImode)
2598 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2599 return 1;
2600 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2601 return 1;
2602 return 0;
2603 }
2604 \f
2605 /* Try to output insns to set TARGET equal to the constant C if it can
2606 be done in less than N insns. Do all computations in MODE.
2607 Returns the place where the output has been placed if it can be
2608 done and the insns have been emitted. If it would take more than N
2609 insns, zero is returned and no insns and emitted. */
2610
2611 rtx
2612 rs6000_emit_set_const (dest, mode, source, n)
2613 rtx dest, source;
2614 enum machine_mode mode;
2615 int n ATTRIBUTE_UNUSED;
2616 {
2617 rtx result, insn, set;
2618 HOST_WIDE_INT c0, c1;
2619
2620 if (mode == QImode || mode == HImode)
2621 {
2622 if (dest == NULL)
2623 dest = gen_reg_rtx (mode);
2624 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2625 return dest;
2626 }
2627 else if (mode == SImode)
2628 {
2629 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
2630
2631 emit_insn (gen_rtx_SET (VOIDmode, result,
2632 GEN_INT (INTVAL (source)
2633 & (~ (HOST_WIDE_INT) 0xffff))));
2634 emit_insn (gen_rtx_SET (VOIDmode, dest,
2635 gen_rtx_IOR (SImode, result,
2636 GEN_INT (INTVAL (source) & 0xffff))));
2637 result = dest;
2638 }
2639 else if (mode == DImode)
2640 {
2641 if (GET_CODE (source) == CONST_INT)
2642 {
2643 c0 = INTVAL (source);
2644 c1 = -(c0 < 0);
2645 }
2646 else if (GET_CODE (source) == CONST_DOUBLE)
2647 {
2648 #if HOST_BITS_PER_WIDE_INT >= 64
2649 c0 = CONST_DOUBLE_LOW (source);
2650 c1 = -(c0 < 0);
2651 #else
2652 c0 = CONST_DOUBLE_LOW (source);
2653 c1 = CONST_DOUBLE_HIGH (source);
2654 #endif
2655 }
2656 else
2657 abort ();
2658
2659 result = rs6000_emit_set_long_const (dest, c0, c1);
2660 }
2661 else
2662 abort ();
2663
2664 insn = get_last_insn ();
2665 set = single_set (insn);
2666 if (! CONSTANT_P (SET_SRC (set)))
2667 set_unique_reg_note (insn, REG_EQUAL, source);
2668
2669 return result;
2670 }
2671
2672 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2673 fall back to a straight forward decomposition. We do this to avoid
2674 exponential run times encountered when looking for longer sequences
2675 with rs6000_emit_set_const. */
2676 static rtx
2677 rs6000_emit_set_long_const (dest, c1, c2)
2678 rtx dest;
2679 HOST_WIDE_INT c1, c2;
2680 {
2681 if (!TARGET_POWERPC64)
2682 {
2683 rtx operand1, operand2;
2684
2685 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2686 DImode);
2687 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2688 DImode);
2689 emit_move_insn (operand1, GEN_INT (c1));
2690 emit_move_insn (operand2, GEN_INT (c2));
2691 }
2692 else
2693 {
2694 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2695
2696 ud1 = c1 & 0xffff;
2697 ud2 = (c1 & 0xffff0000) >> 16;
2698 #if HOST_BITS_PER_WIDE_INT >= 64
2699 c2 = c1 >> 32;
2700 #endif
2701 ud3 = c2 & 0xffff;
2702 ud4 = (c2 & 0xffff0000) >> 16;
2703
2704 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2705 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2706 {
2707 if (ud1 & 0x8000)
2708 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2709 else
2710 emit_move_insn (dest, GEN_INT (ud1));
2711 }
2712
2713 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2714 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2715 {
2716 if (ud2 & 0x8000)
2717 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2718 - 0x80000000));
2719 else
2720 emit_move_insn (dest, GEN_INT (ud2 << 16));
2721 if (ud1 != 0)
2722 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2723 }
2724 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2725 || (ud4 == 0 && ! (ud3 & 0x8000)))
2726 {
2727 if (ud3 & 0x8000)
2728 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2729 - 0x80000000));
2730 else
2731 emit_move_insn (dest, GEN_INT (ud3 << 16));
2732
2733 if (ud2 != 0)
2734 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2735 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2736 if (ud1 != 0)
2737 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2738 }
2739 else
2740 {
2741 if (ud4 & 0x8000)
2742 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2743 - 0x80000000));
2744 else
2745 emit_move_insn (dest, GEN_INT (ud4 << 16));
2746
2747 if (ud3 != 0)
2748 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2749
2750 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2751 if (ud2 != 0)
2752 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2753 GEN_INT (ud2 << 16)));
2754 if (ud1 != 0)
2755 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2756 }
2757 }
2758 return dest;
2759 }
2760
2761 /* Emit a move from SOURCE to DEST in mode MODE. */
2762 void
2763 rs6000_emit_move (dest, source, mode)
2764 rtx dest;
2765 rtx source;
2766 enum machine_mode mode;
2767 {
2768 rtx operands[2];
2769 operands[0] = dest;
2770 operands[1] = source;
2771
2772 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2773 if (GET_CODE (operands[1]) == CONST_DOUBLE
2774 && ! FLOAT_MODE_P (mode)
2775 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2776 {
2777 /* FIXME. This should never happen. */
2778 /* Since it seems that it does, do the safe thing and convert
2779 to a CONST_INT. */
2780 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2781 }
2782 if (GET_CODE (operands[1]) == CONST_DOUBLE
2783 && ! FLOAT_MODE_P (mode)
2784 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2785 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2786 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2787 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2788 abort ();
2789
2790 /* Check if GCC is setting up a block move that will end up using FP
2791 registers as temporaries. We must make sure this is acceptable. */
2792 if (GET_CODE (operands[0]) == MEM
2793 && GET_CODE (operands[1]) == MEM
2794 && mode == DImode
2795 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2796 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2797 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2798 ? 32 : MEM_ALIGN (operands[0])))
2799 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2800 ? 32
2801 : MEM_ALIGN (operands[1]))))
2802 && ! MEM_VOLATILE_P (operands [0])
2803 && ! MEM_VOLATILE_P (operands [1]))
2804 {
2805 emit_move_insn (adjust_address (operands[0], SImode, 0),
2806 adjust_address (operands[1], SImode, 0));
2807 emit_move_insn (adjust_address (operands[0], SImode, 4),
2808 adjust_address (operands[1], SImode, 4));
2809 return;
2810 }
2811
2812 if (!no_new_pseudos)
2813 {
2814 if (GET_CODE (operands[1]) == MEM && optimize > 0
2815 && (mode == QImode || mode == HImode || mode == SImode)
2816 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2817 {
2818 rtx reg = gen_reg_rtx (word_mode);
2819
2820 emit_insn (gen_rtx_SET (word_mode, reg,
2821 gen_rtx_ZERO_EXTEND (word_mode,
2822 operands[1])));
2823 operands[1] = gen_lowpart (mode, reg);
2824 }
2825 if (GET_CODE (operands[0]) != REG)
2826 operands[1] = force_reg (mode, operands[1]);
2827 }
2828
2829 if (mode == SFmode && ! TARGET_POWERPC
2830 && TARGET_HARD_FLOAT && TARGET_FPRS
2831 && GET_CODE (operands[0]) == MEM)
2832 {
2833 int regnum;
2834
2835 if (reload_in_progress || reload_completed)
2836 regnum = true_regnum (operands[1]);
2837 else if (GET_CODE (operands[1]) == REG)
2838 regnum = REGNO (operands[1]);
2839 else
2840 regnum = -1;
2841
2842 /* If operands[1] is a register, on POWER it may have
2843 double-precision data in it, so truncate it to single
2844 precision. */
2845 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2846 {
2847 rtx newreg;
2848 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2849 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2850 operands[1] = newreg;
2851 }
2852 }
2853
2854 /* Handle the case where reload calls us with an invalid address. */
2855 if (reload_in_progress && mode == Pmode
2856 && (! general_operand (operands[1], mode)
2857 || ! nonimmediate_operand (operands[0], mode)))
2858 goto emit_set;
2859
2860 /* Handle the case of CONSTANT_P_RTX. */
2861 if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
2862 goto emit_set;
2863
2864 /* FIXME: In the long term, this switch statement should go away
2865 and be replaced by a sequence of tests based on things like
2866 mode == Pmode. */
2867 switch (mode)
2868 {
2869 case HImode:
2870 case QImode:
2871 if (CONSTANT_P (operands[1])
2872 && GET_CODE (operands[1]) != CONST_INT)
2873 operands[1] = force_const_mem (mode, operands[1]);
2874 break;
2875
2876 case TFmode:
2877 case DFmode:
2878 case SFmode:
2879 if (CONSTANT_P (operands[1])
2880 && ! easy_fp_constant (operands[1], mode))
2881 operands[1] = force_const_mem (mode, operands[1]);
2882 break;
2883
2884 case V16QImode:
2885 case V8HImode:
2886 case V4SFmode:
2887 case V4SImode:
2888 case V4HImode:
2889 case V2SFmode:
2890 case V2SImode:
2891 case V1DImode:
2892 if (CONSTANT_P (operands[1])
2893 && !easy_vector_constant (operands[1], mode))
2894 operands[1] = force_const_mem (mode, operands[1]);
2895 break;
2896
2897 case SImode:
2898 case DImode:
2899 /* Use default pattern for address of ELF small data */
2900 if (TARGET_ELF
2901 && mode == Pmode
2902 && DEFAULT_ABI == ABI_V4
2903 && (GET_CODE (operands[1]) == SYMBOL_REF
2904 || GET_CODE (operands[1]) == CONST)
2905 && small_data_operand (operands[1], mode))
2906 {
2907 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2908 return;
2909 }
2910
2911 if (DEFAULT_ABI == ABI_V4
2912 && mode == Pmode && mode == SImode
2913 && flag_pic == 1 && got_operand (operands[1], mode))
2914 {
2915 emit_insn (gen_movsi_got (operands[0], operands[1]));
2916 return;
2917 }
2918
2919 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2920 && TARGET_NO_TOC
2921 && ! flag_pic
2922 && mode == Pmode
2923 && CONSTANT_P (operands[1])
2924 && GET_CODE (operands[1]) != HIGH
2925 && GET_CODE (operands[1]) != CONST_INT)
2926 {
2927 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2928
2929 /* If this is a function address on -mcall-aixdesc,
2930 convert it to the address of the descriptor. */
2931 if (DEFAULT_ABI == ABI_AIX
2932 && GET_CODE (operands[1]) == SYMBOL_REF
2933 && XSTR (operands[1], 0)[0] == '.')
2934 {
2935 const char *name = XSTR (operands[1], 0);
2936 rtx new_ref;
2937 while (*name == '.')
2938 name++;
2939 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2940 CONSTANT_POOL_ADDRESS_P (new_ref)
2941 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2942 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
2943 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2944 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
2945 operands[1] = new_ref;
2946 }
2947
2948 if (DEFAULT_ABI == ABI_DARWIN)
2949 {
2950 #if TARGET_MACHO
2951 if (MACHO_DYNAMIC_NO_PIC_P)
2952 {
2953 /* Take care of any required data indirection. */
2954 operands[1] = rs6000_machopic_legitimize_pic_address (
2955 operands[1], mode, operands[0]);
2956 if (operands[0] != operands[1])
2957 emit_insn (gen_rtx_SET (VOIDmode,
2958 operands[0], operands[1]));
2959 return;
2960 }
2961 #endif
2962 emit_insn (gen_macho_high (target, operands[1]));
2963 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2964 return;
2965 }
2966
2967 emit_insn (gen_elf_high (target, operands[1]));
2968 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2969 return;
2970 }
2971
2972 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2973 and we have put it in the TOC, we just need to make a TOC-relative
2974 reference to it. */
2975 if (TARGET_TOC
2976 && GET_CODE (operands[1]) == SYMBOL_REF
2977 && CONSTANT_POOL_EXPR_P (operands[1])
2978 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2979 get_pool_mode (operands[1])))
2980 {
2981 operands[1] = create_TOC_reference (operands[1]);
2982 }
2983 else if (mode == Pmode
2984 && CONSTANT_P (operands[1])
2985 && ((GET_CODE (operands[1]) != CONST_INT
2986 && ! easy_fp_constant (operands[1], mode))
2987 || (GET_CODE (operands[1]) == CONST_INT
2988 && num_insns_constant (operands[1], mode) > 2)
2989 || (GET_CODE (operands[0]) == REG
2990 && FP_REGNO_P (REGNO (operands[0]))))
2991 && GET_CODE (operands[1]) != HIGH
2992 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2993 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2994 {
2995 /* Emit a USE operation so that the constant isn't deleted if
2996 expensive optimizations are turned on because nobody
2997 references it. This should only be done for operands that
2998 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2999 This should not be done for operands that contain LABEL_REFs.
3000 For now, we just handle the obvious case. */
3001 if (GET_CODE (operands[1]) != LABEL_REF)
3002 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3003
3004 #if TARGET_MACHO
3005 /* Darwin uses a special PIC legitimizer. */
3006 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3007 {
3008 operands[1] =
3009 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3010 operands[0]);
3011 if (operands[0] != operands[1])
3012 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3013 return;
3014 }
3015 #endif
3016
3017 /* If we are to limit the number of things we put in the TOC and
3018 this is a symbol plus a constant we can add in one insn,
3019 just put the symbol in the TOC and add the constant. Don't do
3020 this if reload is in progress. */
3021 if (GET_CODE (operands[1]) == CONST
3022 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3023 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3024 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3025 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3026 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3027 && ! side_effects_p (operands[0]))
3028 {
3029 rtx sym =
3030 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3031 rtx other = XEXP (XEXP (operands[1], 0), 1);
3032
3033 sym = force_reg (mode, sym);
3034 if (mode == SImode)
3035 emit_insn (gen_addsi3 (operands[0], sym, other));
3036 else
3037 emit_insn (gen_adddi3 (operands[0], sym, other));
3038 return;
3039 }
3040
3041 operands[1] = force_const_mem (mode, operands[1]);
3042
3043 if (TARGET_TOC
3044 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
3045 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3046 get_pool_constant (XEXP (operands[1], 0)),
3047 get_pool_mode (XEXP (operands[1], 0))))
3048 {
3049 operands[1]
3050 = gen_rtx_MEM (mode,
3051 create_TOC_reference (XEXP (operands[1], 0)));
3052 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3053 RTX_UNCHANGING_P (operands[1]) = 1;
3054 }
3055 }
3056 break;
3057
3058 case TImode:
3059 if (GET_CODE (operands[0]) == MEM
3060 && GET_CODE (XEXP (operands[0], 0)) != REG
3061 && ! reload_in_progress)
3062 operands[0]
3063 = replace_equiv_address (operands[0],
3064 copy_addr_to_reg (XEXP (operands[0], 0)));
3065
3066 if (GET_CODE (operands[1]) == MEM
3067 && GET_CODE (XEXP (operands[1], 0)) != REG
3068 && ! reload_in_progress)
3069 operands[1]
3070 = replace_equiv_address (operands[1],
3071 copy_addr_to_reg (XEXP (operands[1], 0)));
3072 if (TARGET_POWER)
3073 {
3074 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3075 gen_rtvec (2,
3076 gen_rtx_SET (VOIDmode,
3077 operands[0], operands[1]),
3078 gen_rtx_CLOBBER (VOIDmode,
3079 gen_rtx_SCRATCH (SImode)))));
3080 return;
3081 }
3082 break;
3083
3084 default:
3085 abort ();
3086 }
3087
3088 /* Above, we may have called force_const_mem which may have returned
3089 an invalid address. If we can, fix this up; otherwise, reload will
3090 have to deal with it. */
3091 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3092 operands[1] = validize_mem (operands[1]);
3093
3094 emit_set:
3095 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3096 }
3097 \f
3098 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3099 for a call to a function whose data type is FNTYPE.
3100 For a library call, FNTYPE is 0.
3101
3102 For incoming args we set the number of arguments in the prototype large
3103 so we never return a PARALLEL. */
3104
3105 void
3106 init_cumulative_args (cum, fntype, libname, incoming)
3107 CUMULATIVE_ARGS *cum;
3108 tree fntype;
3109 rtx libname ATTRIBUTE_UNUSED;
3110 int incoming;
3111 {
3112 static CUMULATIVE_ARGS zero_cumulative;
3113
3114 *cum = zero_cumulative;
3115 cum->words = 0;
3116 cum->fregno = FP_ARG_MIN_REG;
3117 cum->vregno = ALTIVEC_ARG_MIN_REG;
3118 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3119 cum->call_cookie = CALL_NORMAL;
3120 cum->sysv_gregno = GP_ARG_MIN_REG;
3121
3122 if (incoming)
3123 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
3124
3125 else if (cum->prototype)
3126 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
3127 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
3128 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
3129
3130 else
3131 cum->nargs_prototype = 0;
3132
3133 cum->orig_nargs = cum->nargs_prototype;
3134
3135 /* Check for a longcall attribute. */
3136 if (fntype
3137 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3138 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
3139 cum->call_cookie = CALL_LONG;
3140
3141 if (TARGET_DEBUG_ARG)
3142 {
3143 fprintf (stderr, "\ninit_cumulative_args:");
3144 if (fntype)
3145 {
3146 tree ret_type = TREE_TYPE (fntype);
3147 fprintf (stderr, " ret code = %s,",
3148 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3149 }
3150
3151 if (cum->call_cookie & CALL_LONG)
3152 fprintf (stderr, " longcall,");
3153
3154 fprintf (stderr, " proto = %d, nargs = %d\n",
3155 cum->prototype, cum->nargs_prototype);
3156 }
3157 }
3158 \f
3159 /* If defined, a C expression which determines whether, and in which
3160 direction, to pad out an argument with extra space. The value
3161 should be of type `enum direction': either `upward' to pad above
3162 the argument, `downward' to pad below, or `none' to inhibit
3163 padding.
3164
3165 For the AIX ABI structs are always stored left shifted in their
3166 argument slot. */
3167
3168 enum direction
3169 function_arg_padding (mode, type)
3170 enum machine_mode mode;
3171 tree type;
3172 {
3173 if (type != 0 && AGGREGATE_TYPE_P (type))
3174 return upward;
3175
3176 /* This is the default definition. */
3177 return (! BYTES_BIG_ENDIAN
3178 ? upward
3179 : ((mode == BLKmode
3180 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3181 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
3182 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
3183 ? downward : upward));
3184 }
3185
3186 /* If defined, a C expression that gives the alignment boundary, in bits,
3187 of an argument with the specified mode and type. If it is not defined,
3188 PARM_BOUNDARY is used for all arguments.
3189
3190 V.4 wants long longs to be double word aligned. */
3191
3192 int
3193 function_arg_boundary (mode, type)
3194 enum machine_mode mode;
3195 tree type ATTRIBUTE_UNUSED;
3196 {
3197 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3198 return 64;
3199 else if (SPE_VECTOR_MODE (mode))
3200 return 64;
3201 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3202 return 128;
3203 else
3204 return PARM_BOUNDARY;
3205 }
3206 \f
3207 /* Update the data in CUM to advance over an argument
3208 of mode MODE and data type TYPE.
3209 (TYPE is null for libcalls where that information may not be available.) */
3210
3211 void
3212 function_arg_advance (cum, mode, type, named)
3213 CUMULATIVE_ARGS *cum;
3214 enum machine_mode mode;
3215 tree type;
3216 int named;
3217 {
3218 cum->nargs_prototype--;
3219
3220 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3221 {
3222 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
3223 cum->vregno++;
3224 else
3225 cum->words += RS6000_ARG_SIZE (mode, type);
3226 }
3227 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
3228 && named && cum->sysv_gregno <= GP_ARG_MAX_REG)
3229 cum->sysv_gregno++;
3230 else if (DEFAULT_ABI == ABI_V4)
3231 {
3232 if (TARGET_HARD_FLOAT && TARGET_FPRS
3233 && (mode == SFmode || mode == DFmode))
3234 {
3235 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3236 cum->fregno++;
3237 else
3238 {
3239 if (mode == DFmode)
3240 cum->words += cum->words & 1;
3241 cum->words += RS6000_ARG_SIZE (mode, type);
3242 }
3243 }
3244 else
3245 {
3246 int n_words;
3247 int gregno = cum->sysv_gregno;
3248
3249 /* Aggregates and IEEE quad get passed by reference. */
3250 if ((type && AGGREGATE_TYPE_P (type))
3251 || mode == TFmode)
3252 n_words = 1;
3253 else
3254 n_words = RS6000_ARG_SIZE (mode, type);
3255
3256 /* Long long and SPE vectors are put in odd registers. */
3257 if (n_words == 2 && (gregno & 1) == 0)
3258 gregno += 1;
3259
3260 /* Long long and SPE vectors are not split between registers
3261 and stack. */
3262 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
3263 {
3264 /* Long long is aligned on the stack. */
3265 if (n_words == 2)
3266 cum->words += cum->words & 1;
3267 cum->words += n_words;
3268 }
3269
3270 /* Note: continuing to accumulate gregno past when we've started
3271 spilling to the stack indicates the fact that we've started
3272 spilling to the stack to expand_builtin_saveregs. */
3273 cum->sysv_gregno = gregno + n_words;
3274 }
3275
3276 if (TARGET_DEBUG_ARG)
3277 {
3278 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3279 cum->words, cum->fregno);
3280 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3281 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3282 fprintf (stderr, "mode = %4s, named = %d\n",
3283 GET_MODE_NAME (mode), named);
3284 }
3285 }
3286 else
3287 {
3288 int align = (TARGET_32BIT && (cum->words & 1) != 0
3289 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3290
3291 cum->words += align + RS6000_ARG_SIZE (mode, type);
3292
3293 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3294 && TARGET_HARD_FLOAT && TARGET_FPRS)
3295 cum->fregno += (mode == TFmode ? 2 : 1);
3296
3297 if (TARGET_DEBUG_ARG)
3298 {
3299 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3300 cum->words, cum->fregno);
3301 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3302 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3303 fprintf (stderr, "named = %d, align = %d\n", named, align);
3304 }
3305 }
3306 }
3307 \f
3308 /* Determine where to put an argument to a function.
3309 Value is zero to push the argument on the stack,
3310 or a hard register in which to store the argument.
3311
3312 MODE is the argument's machine mode.
3313 TYPE is the data type of the argument (as a tree).
3314 This is null for libcalls where that information may
3315 not be available.
3316 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3317 the preceding args and about the function being called.
3318 NAMED is nonzero if this argument is a named parameter
3319 (otherwise it is an extra parameter matching an ellipsis).
3320
3321 On RS/6000 the first eight words of non-FP are normally in registers
3322 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3323 Under V.4, the first 8 FP args are in registers.
3324
3325 If this is floating-point and no prototype is specified, we use
3326 both an FP and integer register (or possibly FP reg and stack). Library
3327 functions (when TYPE is zero) always have the proper types for args,
3328 so we can pass the FP value just in one register. emit_library_function
3329 doesn't support PARALLEL anyway. */
3330
3331 struct rtx_def *
3332 function_arg (cum, mode, type, named)
3333 CUMULATIVE_ARGS *cum;
3334 enum machine_mode mode;
3335 tree type;
3336 int named;
3337 {
3338 enum rs6000_abi abi = DEFAULT_ABI;
3339
3340 /* Return a marker to indicate whether CR1 needs to set or clear the
3341 bit that V.4 uses to say fp args were passed in registers.
3342 Assume that we don't need the marker for software floating point,
3343 or compiler generated library calls. */
3344 if (mode == VOIDmode)
3345 {
3346 if (abi == ABI_V4
3347 && cum->nargs_prototype < 0
3348 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
3349 {
3350 /* For the SPE, we need to crxor CR6 always. */
3351 if (TARGET_SPE_ABI)
3352 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
3353 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
3354 return GEN_INT (cum->call_cookie
3355 | ((cum->fregno == FP_ARG_MIN_REG)
3356 ? CALL_V4_SET_FP_ARGS
3357 : CALL_V4_CLEAR_FP_ARGS));
3358 }
3359
3360 return GEN_INT (cum->call_cookie);
3361 }
3362
3363 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3364 {
3365 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
3366 return gen_rtx_REG (mode, cum->vregno);
3367 else
3368 return NULL;
3369 }
3370 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode) && named)
3371 {
3372 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3373 return gen_rtx_REG (mode, cum->sysv_gregno);
3374 else
3375 return NULL;
3376 }
3377 else if (abi == ABI_V4)
3378 {
3379 if (TARGET_HARD_FLOAT && TARGET_FPRS
3380 && (mode == SFmode || mode == DFmode))
3381 {
3382 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3383 return gen_rtx_REG (mode, cum->fregno);
3384 else
3385 return NULL;
3386 }
3387 else
3388 {
3389 int n_words;
3390 int gregno = cum->sysv_gregno;
3391
3392 /* Aggregates and IEEE quad get passed by reference. */
3393 if ((type && AGGREGATE_TYPE_P (type))
3394 || mode == TFmode)
3395 n_words = 1;
3396 else
3397 n_words = RS6000_ARG_SIZE (mode, type);
3398
3399 /* Long long and SPE vectors are put in odd registers. */
3400 if (n_words == 2 && (gregno & 1) == 0)
3401 gregno += 1;
3402
3403 /* Long long and SPE vectors are not split between registers
3404 and stack. */
3405 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3406 {
3407 /* SPE vectors in ... get split into 2 registers. */
3408 if (TARGET_SPE && TARGET_SPE_ABI
3409 && SPE_VECTOR_MODE (mode) && !named)
3410 {
3411 rtx r1, r2;
3412 enum machine_mode m = SImode;
3413
3414 r1 = gen_rtx_REG (m, gregno);
3415 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3416 r2 = gen_rtx_REG (m, gregno + 1);
3417 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3418 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3419 }
3420 return gen_rtx_REG (mode, gregno);
3421 }
3422 else
3423 return NULL;
3424 }
3425 }
3426 else
3427 {
3428 int align = (TARGET_32BIT && (cum->words & 1) != 0
3429 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3430 int align_words = cum->words + align;
3431
3432 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3433 return NULL_RTX;
3434
3435 if (USE_FP_FOR_ARG_P (*cum, mode, type))
3436 {
3437 if (! type
3438 || ((cum->nargs_prototype > 0)
3439 /* IBM AIX extended its linkage convention definition always
3440 to require FP args after register save area hole on the
3441 stack. */
3442 && (DEFAULT_ABI != ABI_AIX
3443 || ! TARGET_XL_CALL
3444 || (align_words < GP_ARG_NUM_REG))))
3445 return gen_rtx_REG (mode, cum->fregno);
3446
3447 return gen_rtx_PARALLEL (mode,
3448 gen_rtvec (2,
3449 gen_rtx_EXPR_LIST (VOIDmode,
3450 ((align_words >= GP_ARG_NUM_REG)
3451 ? NULL_RTX
3452 : (align_words
3453 + RS6000_ARG_SIZE (mode, type)
3454 > GP_ARG_NUM_REG
3455 /* If this is partially on the stack, then
3456 we only include the portion actually
3457 in registers here. */
3458 ? gen_rtx_REG (SImode,
3459 GP_ARG_MIN_REG + align_words)
3460 : gen_rtx_REG (mode,
3461 GP_ARG_MIN_REG + align_words))),
3462 const0_rtx),
3463 gen_rtx_EXPR_LIST (VOIDmode,
3464 gen_rtx_REG (mode, cum->fregno),
3465 const0_rtx)));
3466 }
3467 else if (align_words < GP_ARG_NUM_REG)
3468 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
3469 else
3470 return NULL_RTX;
3471 }
3472 }
3473 \f
3474 /* For an arg passed partly in registers and partly in memory,
3475 this is the number of registers used.
3476 For args passed entirely in registers or entirely in memory, zero. */
3477
3478 int
3479 function_arg_partial_nregs (cum, mode, type, named)
3480 CUMULATIVE_ARGS *cum;
3481 enum machine_mode mode;
3482 tree type;
3483 int named ATTRIBUTE_UNUSED;
3484 {
3485 if (DEFAULT_ABI == ABI_V4)
3486 return 0;
3487
3488 if (USE_FP_FOR_ARG_P (*cum, mode, type)
3489 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
3490 {
3491 if (cum->nargs_prototype >= 0)
3492 return 0;
3493 }
3494
3495 if (cum->words < GP_ARG_NUM_REG
3496 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
3497 {
3498 int ret = GP_ARG_NUM_REG - cum->words;
3499 if (ret && TARGET_DEBUG_ARG)
3500 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
3501
3502 return ret;
3503 }
3504
3505 return 0;
3506 }
3507 \f
3508 /* A C expression that indicates when an argument must be passed by
3509 reference. If nonzero for an argument, a copy of that argument is
3510 made in memory and a pointer to the argument is passed instead of
3511 the argument itself. The pointer is passed in whatever way is
3512 appropriate for passing a pointer to that type.
3513
3514 Under V.4, structures and unions are passed by reference.
3515
3516 As an extension to all ABIs, variable sized types are passed by
3517 reference. */
3518
3519 int
3520 function_arg_pass_by_reference (cum, mode, type, named)
3521 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
3522 enum machine_mode mode ATTRIBUTE_UNUSED;
3523 tree type;
3524 int named ATTRIBUTE_UNUSED;
3525 {
3526 if (DEFAULT_ABI == ABI_V4
3527 && ((type && AGGREGATE_TYPE_P (type))
3528 || mode == TFmode))
3529 {
3530 if (TARGET_DEBUG_ARG)
3531 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
3532
3533 return 1;
3534 }
3535 return type && int_size_in_bytes (type) <= 0;
3536 }
3537 \f
3538 /* Perform any needed actions needed for a function that is receiving a
3539 variable number of arguments.
3540
3541 CUM is as above.
3542
3543 MODE and TYPE are the mode and type of the current parameter.
3544
3545 PRETEND_SIZE is a variable that should be set to the amount of stack
3546 that must be pushed by the prolog to pretend that our caller pushed
3547 it.
3548
3549 Normally, this macro will push all remaining incoming registers on the
3550 stack and set PRETEND_SIZE to the length of the registers pushed. */
3551
3552 void
3553 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
3554 CUMULATIVE_ARGS *cum;
3555 enum machine_mode mode;
3556 tree type;
3557 int *pretend_size ATTRIBUTE_UNUSED;
3558 int no_rtl;
3559
3560 {
3561 CUMULATIVE_ARGS next_cum;
3562 int reg_size = TARGET_32BIT ? 4 : 8;
3563 rtx save_area = NULL_RTX, mem;
3564 int first_reg_offset, set;
3565 tree fntype;
3566 int stdarg_p;
3567
3568 fntype = TREE_TYPE (current_function_decl);
3569 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
3570 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3571 != void_type_node));
3572
3573 /* For varargs, we do not want to skip the dummy va_dcl argument.
3574 For stdargs, we do want to skip the last named argument. */
3575 next_cum = *cum;
3576 if (stdarg_p)
3577 function_arg_advance (&next_cum, mode, type, 1);
3578
3579 if (DEFAULT_ABI == ABI_V4)
3580 {
3581 /* Indicate to allocate space on the stack for varargs save area. */
3582 cfun->machine->sysv_varargs_p = 1;
3583 if (! no_rtl)
3584 save_area = plus_constant (virtual_stack_vars_rtx,
3585 - RS6000_VARARGS_SIZE);
3586
3587 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
3588 }
3589 else
3590 {
3591 first_reg_offset = next_cum.words;
3592 save_area = virtual_incoming_args_rtx;
3593 cfun->machine->sysv_varargs_p = 0;
3594
3595 if (MUST_PASS_IN_STACK (mode, type))
3596 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
3597 }
3598
3599 set = get_varargs_alias_set ();
3600 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
3601 {
3602 mem = gen_rtx_MEM (BLKmode,
3603 plus_constant (save_area,
3604 first_reg_offset * reg_size)),
3605 set_mem_alias_set (mem, set);
3606 set_mem_align (mem, BITS_PER_WORD);
3607
3608 move_block_from_reg
3609 (GP_ARG_MIN_REG + first_reg_offset, mem,
3610 GP_ARG_NUM_REG - first_reg_offset,
3611 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
3612 }
3613
3614 /* Save FP registers if needed. */
3615 if (DEFAULT_ABI == ABI_V4
3616 && TARGET_HARD_FLOAT && TARGET_FPRS
3617 && ! no_rtl
3618 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
3619 {
3620 int fregno = next_cum.fregno;
3621 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
3622 rtx lab = gen_label_rtx ();
3623 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
3624
3625 emit_jump_insn (gen_rtx_SET (VOIDmode,
3626 pc_rtx,
3627 gen_rtx_IF_THEN_ELSE (VOIDmode,
3628 gen_rtx_NE (VOIDmode, cr1,
3629 const0_rtx),
3630 gen_rtx_LABEL_REF (VOIDmode, lab),
3631 pc_rtx)));
3632
3633 while (fregno <= FP_ARG_V4_MAX_REG)
3634 {
3635 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
3636 set_mem_alias_set (mem, set);
3637 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
3638 fregno++;
3639 off += 8;
3640 }
3641
3642 emit_label (lab);
3643 }
3644 }
3645
3646 /* Create the va_list data type. */
3647
3648 tree
3649 rs6000_build_va_list ()
3650 {
3651 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
3652
3653 /* For AIX, prefer 'char *' because that's what the system
3654 header files like. */
3655 if (DEFAULT_ABI != ABI_V4)
3656 return build_pointer_type (char_type_node);
3657
3658 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3659 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3660
3661 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3662 unsigned_char_type_node);
3663 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3664 unsigned_char_type_node);
3665 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3666 ptr_type_node);
3667 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3668 ptr_type_node);
3669
3670 DECL_FIELD_CONTEXT (f_gpr) = record;
3671 DECL_FIELD_CONTEXT (f_fpr) = record;
3672 DECL_FIELD_CONTEXT (f_ovf) = record;
3673 DECL_FIELD_CONTEXT (f_sav) = record;
3674
3675 TREE_CHAIN (record) = type_decl;
3676 TYPE_NAME (record) = type_decl;
3677 TYPE_FIELDS (record) = f_gpr;
3678 TREE_CHAIN (f_gpr) = f_fpr;
3679 TREE_CHAIN (f_fpr) = f_ovf;
3680 TREE_CHAIN (f_ovf) = f_sav;
3681
3682 layout_type (record);
3683
3684 /* The correct type is an array type of one element. */
3685 return build_array_type (record, build_index_type (size_zero_node));
3686 }
3687
3688 /* Implement va_start. */
3689
3690 void
3691 rs6000_va_start (valist, nextarg)
3692 tree valist;
3693 rtx nextarg;
3694 {
3695 HOST_WIDE_INT words, n_gpr, n_fpr;
3696 tree f_gpr, f_fpr, f_ovf, f_sav;
3697 tree gpr, fpr, ovf, sav, t;
3698
3699 /* Only SVR4 needs something special. */
3700 if (DEFAULT_ABI != ABI_V4)
3701 {
3702 std_expand_builtin_va_start (valist, nextarg);
3703 return;
3704 }
3705
3706 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3707 f_fpr = TREE_CHAIN (f_gpr);
3708 f_ovf = TREE_CHAIN (f_fpr);
3709 f_sav = TREE_CHAIN (f_ovf);
3710
3711 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3712 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3713 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3714 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3715 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3716
3717 /* Count number of gp and fp argument registers used. */
3718 words = current_function_args_info.words;
3719 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3720 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3721
3722 if (TARGET_DEBUG_ARG)
3723 {
3724 fputs ("va_start: words = ", stderr);
3725 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3726 fputs (", n_gpr = ", stderr);
3727 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3728 fputs (", n_fpr = ", stderr);
3729 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3730 putc ('\n', stderr);
3731 }
3732
3733 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3734 TREE_SIDE_EFFECTS (t) = 1;
3735 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3736
3737 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3738 TREE_SIDE_EFFECTS (t) = 1;
3739 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3740
3741 /* Find the overflow area. */
3742 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3743 if (words != 0)
3744 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3745 build_int_2 (words * UNITS_PER_WORD, 0));
3746 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3747 TREE_SIDE_EFFECTS (t) = 1;
3748 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3749
3750 /* Find the register save area. */
3751 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3752 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3753 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3754 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3755 TREE_SIDE_EFFECTS (t) = 1;
3756 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3757 }
3758
3759 /* Implement va_arg. */
3760
3761 rtx
3762 rs6000_va_arg (valist, type)
3763 tree valist, type;
3764 {
3765 tree f_gpr, f_fpr, f_ovf, f_sav;
3766 tree gpr, fpr, ovf, sav, reg, t, u;
3767 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3768 rtx lab_false, lab_over, addr_rtx, r;
3769
3770 if (DEFAULT_ABI != ABI_V4)
3771 {
3772 /* Variable sized types are passed by reference. */
3773 if (int_size_in_bytes (type) <= 0)
3774 {
3775 u = build_pointer_type (type);
3776
3777 /* Args grow upward. */
3778 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
3779 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
3780 TREE_SIDE_EFFECTS (t) = 1;
3781
3782 t = build1 (NOP_EXPR, build_pointer_type (u), t);
3783 TREE_SIDE_EFFECTS (t) = 1;
3784
3785 t = build1 (INDIRECT_REF, u, t);
3786 TREE_SIDE_EFFECTS (t) = 1;
3787
3788 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3789 }
3790 else
3791 return std_expand_builtin_va_arg (valist, type);
3792 }
3793
3794 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3795 f_fpr = TREE_CHAIN (f_gpr);
3796 f_ovf = TREE_CHAIN (f_fpr);
3797 f_sav = TREE_CHAIN (f_ovf);
3798
3799 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3800 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3801 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3802 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3803 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3804
3805 size = int_size_in_bytes (type);
3806 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3807
3808 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3809 {
3810 /* Aggregates and long doubles are passed by reference. */
3811 indirect_p = 1;
3812 reg = gpr;
3813 n_reg = 1;
3814 sav_ofs = 0;
3815 sav_scale = 4;
3816 size = UNITS_PER_WORD;
3817 rsize = 1;
3818 }
3819 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
3820 {
3821 /* FP args go in FP registers, if present. */
3822 indirect_p = 0;
3823 reg = fpr;
3824 n_reg = 1;
3825 sav_ofs = 8*4;
3826 sav_scale = 8;
3827 }
3828 else
3829 {
3830 /* Otherwise into GP registers. */
3831 indirect_p = 0;
3832 reg = gpr;
3833 n_reg = rsize;
3834 sav_ofs = 0;
3835 sav_scale = 4;
3836 }
3837
3838 /* Pull the value out of the saved registers ... */
3839
3840 lab_false = gen_label_rtx ();
3841 lab_over = gen_label_rtx ();
3842 addr_rtx = gen_reg_rtx (Pmode);
3843
3844 /* AltiVec vectors never go in registers. */
3845 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
3846 {
3847 TREE_THIS_VOLATILE (reg) = 1;
3848 emit_cmp_and_jump_insns
3849 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3850 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3851 lab_false);
3852
3853 /* Long long is aligned in the registers. */
3854 if (n_reg > 1)
3855 {
3856 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3857 build_int_2 (n_reg - 1, 0));
3858 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3859 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3860 TREE_SIDE_EFFECTS (u) = 1;
3861 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3862 }
3863
3864 if (sav_ofs)
3865 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3866 else
3867 t = sav;
3868
3869 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3870 build_int_2 (n_reg, 0));
3871 TREE_SIDE_EFFECTS (u) = 1;
3872
3873 u = build1 (CONVERT_EXPR, integer_type_node, u);
3874 TREE_SIDE_EFFECTS (u) = 1;
3875
3876 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3877 TREE_SIDE_EFFECTS (u) = 1;
3878
3879 t = build (PLUS_EXPR, ptr_type_node, t, u);
3880 TREE_SIDE_EFFECTS (t) = 1;
3881
3882 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3883 if (r != addr_rtx)
3884 emit_move_insn (addr_rtx, r);
3885
3886 emit_jump_insn (gen_jump (lab_over));
3887 emit_barrier ();
3888 }
3889
3890 emit_label (lab_false);
3891
3892 /* ... otherwise out of the overflow area. */
3893
3894 /* Make sure we don't find reg 7 for the next int arg.
3895
3896 All AltiVec vectors go in the overflow area. So in the AltiVec
3897 case we need to get the vectors from the overflow area, but
3898 remember where the GPRs and FPRs are. */
3899 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
3900 || !TARGET_ALTIVEC))
3901 {
3902 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3903 TREE_SIDE_EFFECTS (t) = 1;
3904 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3905 }
3906
3907 /* Care for on-stack alignment if needed. */
3908 if (rsize <= 1)
3909 t = ovf;
3910 else
3911 {
3912 int align;
3913
3914 /* AltiVec vectors are 16 byte aligned. */
3915 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
3916 align = 15;
3917 else
3918 align = 7;
3919
3920 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3921 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3922 }
3923 t = save_expr (t);
3924
3925 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3926 if (r != addr_rtx)
3927 emit_move_insn (addr_rtx, r);
3928
3929 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3930 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3931 TREE_SIDE_EFFECTS (t) = 1;
3932 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3933
3934 emit_label (lab_over);
3935
3936 if (indirect_p)
3937 {
3938 r = gen_rtx_MEM (Pmode, addr_rtx);
3939 set_mem_alias_set (r, get_varargs_alias_set ());
3940 emit_move_insn (addr_rtx, r);
3941 }
3942
3943 return addr_rtx;
3944 }
3945
3946 /* Builtins. */
3947
3948 #define def_builtin(MASK, NAME, TYPE, CODE) \
3949 do { \
3950 if ((MASK) & target_flags) \
3951 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3952 NULL, NULL_TREE); \
3953 } while (0)
3954
3955 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3956
3957 static const struct builtin_description bdesc_3arg[] =
3958 {
3959 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3960 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3961 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3962 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3963 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3964 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3965 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3966 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3967 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3968 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3969 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3970 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3971 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3972 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3973 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3974 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3975 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3976 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3977 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3978 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3979 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3980 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3981 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3982 };
3983
3984 /* DST operations: void foo (void *, const int, const char). */
3985
3986 static const struct builtin_description bdesc_dst[] =
3987 {
3988 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3989 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3990 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3991 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3992 };
3993
3994 /* Simple binary operations: VECc = foo (VECa, VECb). */
3995
3996 static struct builtin_description bdesc_2arg[] =
3997 {
3998 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3999 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
4000 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
4001 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
4002 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
4003 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
4004 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
4005 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
4006 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
4007 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
4008 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
4009 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
4010 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
4011 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
4012 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
4013 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
4014 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
4015 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
4016 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
4017 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
4018 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
4019 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
4020 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
4021 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
4022 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
4023 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
4024 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
4025 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
4026 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
4027 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
4028 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
4029 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
4030 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
4031 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
4032 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
4033 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
4034 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
4035 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
4036 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
4037 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
4038 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
4039 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
4040 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
4041 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
4042 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
4043 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
4044 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
4045 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
4046 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
4047 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
4048 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
4049 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
4050 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
4051 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
4052 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
4053 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
4054 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
4055 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
4056 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
4057 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
4058 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
4059 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
4060 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
4061 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
4062 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
4063 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
4064 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
4065 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
4066 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
4067 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
4068 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
4069 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
4070 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
4071 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
4072 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
4073 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
4074 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
4075 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
4076 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
4077 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
4078 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
4079 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
4080 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
4081 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
4082 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
4083 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
4084 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
4085 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
4086 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
4087 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
4088 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
4089 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
4090 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
4091 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
4092 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
4093 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
4094 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
4095 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
4096 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
4097 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
4098 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
4099 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
4100 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
4101 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
4102 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
4103 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
4104 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
4105 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
4106 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
4107 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
4108 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
4109 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
4110 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
4111
4112 /* Place holder, leave as first spe builtin. */
4113 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
4114 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
4115 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
4116 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
4117 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
4118 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
4119 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
4120 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
4121 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
4122 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
4123 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
4124 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
4125 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
4126 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
4127 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
4128 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
4129 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
4130 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
4131 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
4132 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
4133 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
4134 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
4135 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
4136 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
4137 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
4138 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
4139 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
4140 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
4141 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
4142 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
4143 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
4144 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
4145 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
4146 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
4147 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
4148 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
4149 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
4150 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
4151 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
4152 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
4153 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
4154 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
4155 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
4156 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
4157 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
4158 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
4159 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
4160 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
4161 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
4162 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
4163 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
4164 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
4165 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
4166 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
4167 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
4168 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
4169 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
4170 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
4171 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
4172 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
4173 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
4174 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
4175 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
4176 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
4177 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
4178 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
4179 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
4180 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
4181 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
4182 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
4183 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
4184 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
4185 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
4186 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
4187 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
4188 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
4189 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
4190 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
4191 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
4192 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
4193 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
4194 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
4195 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
4196 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
4197 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
4198 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
4199 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
4200 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
4201 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
4202 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
4203 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
4204 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
4205 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
4206 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
4207 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
4208 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
4209 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
4210 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
4211 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
4212 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
4213 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
4214 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
4215 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
4216 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
4217 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
4218 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
4219 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
4220 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
4221 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
4222
4223 /* SPE binary operations expecting a 5-bit unsigned literal. */
4224 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
4225
4226 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
4227 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
4228 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
4229 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
4230 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
4231 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
4232 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
4233 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
4234 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
4235 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
4236 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
4237 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
4238 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
4239 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
4240 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
4241 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
4242 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
4243 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
4244 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
4245 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
4246 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
4247 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
4248 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
4249 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
4250 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
4251 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
4252
4253 /* Place-holder. Leave as last binary SPE builtin. */
4254 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
4255 };
4256
4257 /* AltiVec predicates. */
4258
4259 struct builtin_description_predicates
4260 {
4261 const unsigned int mask;
4262 const enum insn_code icode;
4263 const char *opcode;
4264 const char *const name;
4265 const enum rs6000_builtins code;
4266 };
4267
4268 static const struct builtin_description_predicates bdesc_altivec_preds[] =
4269 {
4270 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
4271 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
4272 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
4273 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
4274 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
4275 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
4276 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
4277 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
4278 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
4279 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
4280 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
4281 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
4282 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
4283 };
4284
4285 /* SPE predicates. */
4286 static struct builtin_description bdesc_spe_predicates[] =
4287 {
4288 /* Place-holder. Leave as first. */
4289 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
4290 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
4291 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
4292 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
4293 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
4294 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
4295 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
4296 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
4297 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
4298 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
4299 /* Place-holder. Leave as last. */
4300 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
4301 };
4302
4303 /* SPE evsel predicates. */
4304 static struct builtin_description bdesc_spe_evsel[] =
4305 {
4306 /* Place-holder. Leave as first. */
4307 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
4308 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
4309 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
4310 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
4311 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
4312 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
4313 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
4314 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
4315 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
4316 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
4317 /* Place-holder. Leave as last. */
4318 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4319 };
4320
4321 /* ABS* operations. */
4322
4323 static const struct builtin_description bdesc_abs[] =
4324 {
4325 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4326 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4327 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4328 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4329 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4330 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4331 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4332 };
4333
4334 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4335 foo (VECa). */
4336
4337 static struct builtin_description bdesc_1arg[] =
4338 {
4339 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4340 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4341 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4342 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4343 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4344 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4345 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4346 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4347 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4348 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4349 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4350 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4351 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4352 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4353 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4354 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4355 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4356
4357 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4358 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4359 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4360 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4361 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4362 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4363 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4364 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4365 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4366 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4367 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4368 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4369 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4370 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4371 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4372 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4373 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
4374 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
4375 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
4376 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
4377 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
4378 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
4379 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
4380 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
4381 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
4382 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
4383 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
4384 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
4385 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
4386 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
4387 { 0, CODE_FOR_spe_evsplatfi, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI },
4388 { 0, CODE_FOR_spe_evsplati, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI },
4389
4390 /* Place-holder. Leave as last unary SPE builtin. */
4391 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
4392 };
4393
4394 static rtx
4395 rs6000_expand_unop_builtin (icode, arglist, target)
4396 enum insn_code icode;
4397 tree arglist;
4398 rtx target;
4399 {
4400 rtx pat;
4401 tree arg0 = TREE_VALUE (arglist);
4402 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4403 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4404 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4405
4406 if (icode == CODE_FOR_nothing)
4407 /* Builtin not supported on this processor. */
4408 return 0;
4409
4410 /* If we got invalid arguments bail out before generating bad rtl. */
4411 if (arg0 == error_mark_node)
4412 return const0_rtx;
4413
4414 if (icode == CODE_FOR_altivec_vspltisb
4415 || icode == CODE_FOR_altivec_vspltish
4416 || icode == CODE_FOR_altivec_vspltisw
4417 || icode == CODE_FOR_spe_evsplatfi
4418 || icode == CODE_FOR_spe_evsplati)
4419 {
4420 /* Only allow 5-bit *signed* literals. */
4421 if (GET_CODE (op0) != CONST_INT
4422 || INTVAL (op0) > 0x1f
4423 || INTVAL (op0) < -0x1f)
4424 {
4425 error ("argument 1 must be a 5-bit signed literal");
4426 return const0_rtx;
4427 }
4428 }
4429
4430 if (target == 0
4431 || GET_MODE (target) != tmode
4432 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4433 target = gen_reg_rtx (tmode);
4434
4435 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4436 op0 = copy_to_mode_reg (mode0, op0);
4437
4438 pat = GEN_FCN (icode) (target, op0);
4439 if (! pat)
4440 return 0;
4441 emit_insn (pat);
4442
4443 return target;
4444 }
4445
4446 static rtx
4447 altivec_expand_abs_builtin (icode, arglist, target)
4448 enum insn_code icode;
4449 tree arglist;
4450 rtx target;
4451 {
4452 rtx pat, scratch1, scratch2;
4453 tree arg0 = TREE_VALUE (arglist);
4454 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4455 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4456 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4457
4458 /* If we have invalid arguments, bail out before generating bad rtl. */
4459 if (arg0 == error_mark_node)
4460 return const0_rtx;
4461
4462 if (target == 0
4463 || GET_MODE (target) != tmode
4464 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4465 target = gen_reg_rtx (tmode);
4466
4467 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4468 op0 = copy_to_mode_reg (mode0, op0);
4469
4470 scratch1 = gen_reg_rtx (mode0);
4471 scratch2 = gen_reg_rtx (mode0);
4472
4473 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
4474 if (! pat)
4475 return 0;
4476 emit_insn (pat);
4477
4478 return target;
4479 }
4480
4481 static rtx
4482 rs6000_expand_binop_builtin (icode, arglist, target)
4483 enum insn_code icode;
4484 tree arglist;
4485 rtx target;
4486 {
4487 rtx pat;
4488 tree arg0 = TREE_VALUE (arglist);
4489 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4490 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4491 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4492 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4493 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4494 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4495
4496 if (icode == CODE_FOR_nothing)
4497 /* Builtin not supported on this processor. */
4498 return 0;
4499
4500 /* If we got invalid arguments bail out before generating bad rtl. */
4501 if (arg0 == error_mark_node || arg1 == error_mark_node)
4502 return const0_rtx;
4503
4504 if (icode == CODE_FOR_altivec_vcfux
4505 || icode == CODE_FOR_altivec_vcfsx
4506 || icode == CODE_FOR_altivec_vctsxs
4507 || icode == CODE_FOR_altivec_vctuxs
4508 || icode == CODE_FOR_altivec_vspltb
4509 || icode == CODE_FOR_altivec_vsplth
4510 || icode == CODE_FOR_altivec_vspltw
4511 || icode == CODE_FOR_spe_evaddiw
4512 || icode == CODE_FOR_spe_evldd
4513 || icode == CODE_FOR_spe_evldh
4514 || icode == CODE_FOR_spe_evldw
4515 || icode == CODE_FOR_spe_evlhhesplat
4516 || icode == CODE_FOR_spe_evlhhossplat
4517 || icode == CODE_FOR_spe_evlhhousplat
4518 || icode == CODE_FOR_spe_evlwhe
4519 || icode == CODE_FOR_spe_evlwhos
4520 || icode == CODE_FOR_spe_evlwhou
4521 || icode == CODE_FOR_spe_evlwhsplat
4522 || icode == CODE_FOR_spe_evlwwsplat
4523 || icode == CODE_FOR_spe_evrlwi
4524 || icode == CODE_FOR_spe_evslwi
4525 || icode == CODE_FOR_spe_evsrwis
4526 || icode == CODE_FOR_spe_evsrwiu)
4527 {
4528 /* Only allow 5-bit unsigned literals. */
4529 if (TREE_CODE (arg1) != INTEGER_CST
4530 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4531 {
4532 error ("argument 2 must be a 5-bit unsigned literal");
4533 return const0_rtx;
4534 }
4535 }
4536
4537 if (target == 0
4538 || GET_MODE (target) != tmode
4539 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4540 target = gen_reg_rtx (tmode);
4541
4542 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4543 op0 = copy_to_mode_reg (mode0, op0);
4544 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4545 op1 = copy_to_mode_reg (mode1, op1);
4546
4547 pat = GEN_FCN (icode) (target, op0, op1);
4548 if (! pat)
4549 return 0;
4550 emit_insn (pat);
4551
4552 return target;
4553 }
4554
4555 static rtx
4556 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
4557 enum insn_code icode;
4558 const char *opcode;
4559 tree arglist;
4560 rtx target;
4561 {
4562 rtx pat, scratch;
4563 tree cr6_form = TREE_VALUE (arglist);
4564 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
4565 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4566 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4567 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4568 enum machine_mode tmode = SImode;
4569 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4570 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4571 int cr6_form_int;
4572
4573 if (TREE_CODE (cr6_form) != INTEGER_CST)
4574 {
4575 error ("argument 1 of __builtin_altivec_predicate must be a constant");
4576 return const0_rtx;
4577 }
4578 else
4579 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
4580
4581 if (mode0 != mode1)
4582 abort ();
4583
4584 /* If we have invalid arguments, bail out before generating bad rtl. */
4585 if (arg0 == error_mark_node || arg1 == error_mark_node)
4586 return const0_rtx;
4587
4588 if (target == 0
4589 || GET_MODE (target) != tmode
4590 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4591 target = gen_reg_rtx (tmode);
4592
4593 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4594 op0 = copy_to_mode_reg (mode0, op0);
4595 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4596 op1 = copy_to_mode_reg (mode1, op1);
4597
4598 scratch = gen_reg_rtx (mode0);
4599
4600 pat = GEN_FCN (icode) (scratch, op0, op1,
4601 gen_rtx (SYMBOL_REF, Pmode, opcode));
4602 if (! pat)
4603 return 0;
4604 emit_insn (pat);
4605
4606 /* The vec_any* and vec_all* predicates use the same opcodes for two
4607 different operations, but the bits in CR6 will be different
4608 depending on what information we want. So we have to play tricks
4609 with CR6 to get the right bits out.
4610
4611 If you think this is disgusting, look at the specs for the
4612 AltiVec predicates. */
4613
4614 switch (cr6_form_int)
4615 {
4616 case 0:
4617 emit_insn (gen_cr6_test_for_zero (target));
4618 break;
4619 case 1:
4620 emit_insn (gen_cr6_test_for_zero_reverse (target));
4621 break;
4622 case 2:
4623 emit_insn (gen_cr6_test_for_lt (target));
4624 break;
4625 case 3:
4626 emit_insn (gen_cr6_test_for_lt_reverse (target));
4627 break;
4628 default:
4629 error ("argument 1 of __builtin_altivec_predicate is out of range");
4630 break;
4631 }
4632
4633 return target;
4634 }
4635
4636 static rtx
4637 altivec_expand_stv_builtin (icode, arglist)
4638 enum insn_code icode;
4639 tree arglist;
4640 {
4641 tree arg0 = TREE_VALUE (arglist);
4642 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4643 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4644 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4645 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4646 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4647 rtx pat;
4648 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
4649 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
4650 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
4651
4652 /* Invalid arguments. Bail before doing anything stoopid! */
4653 if (arg0 == error_mark_node
4654 || arg1 == error_mark_node
4655 || arg2 == error_mark_node)
4656 return const0_rtx;
4657
4658 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
4659 op0 = copy_to_mode_reg (mode2, op0);
4660 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
4661 op1 = copy_to_mode_reg (mode0, op1);
4662 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
4663 op2 = copy_to_mode_reg (mode1, op2);
4664
4665 pat = GEN_FCN (icode) (op1, op2, op0);
4666 if (pat)
4667 emit_insn (pat);
4668 return NULL_RTX;
4669 }
4670
4671 static rtx
4672 rs6000_expand_ternop_builtin (icode, arglist, target)
4673 enum insn_code icode;
4674 tree arglist;
4675 rtx target;
4676 {
4677 rtx pat;
4678 tree arg0 = TREE_VALUE (arglist);
4679 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4680 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4681 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4682 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4683 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4684 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4685 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4686 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4687 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
4688
4689 if (icode == CODE_FOR_nothing)
4690 /* Builtin not supported on this processor. */
4691 return 0;
4692
4693 /* If we got invalid arguments bail out before generating bad rtl. */
4694 if (arg0 == error_mark_node
4695 || arg1 == error_mark_node
4696 || arg2 == error_mark_node)
4697 return const0_rtx;
4698
4699 if (icode == CODE_FOR_altivec_vsldoi_4sf
4700 || icode == CODE_FOR_altivec_vsldoi_4si
4701 || icode == CODE_FOR_altivec_vsldoi_8hi
4702 || icode == CODE_FOR_altivec_vsldoi_16qi)
4703 {
4704 /* Only allow 4-bit unsigned literals. */
4705 if (TREE_CODE (arg2) != INTEGER_CST
4706 || TREE_INT_CST_LOW (arg2) & ~0xf)
4707 {
4708 error ("argument 3 must be a 4-bit unsigned literal");
4709 return const0_rtx;
4710 }
4711 }
4712
4713 if (target == 0
4714 || GET_MODE (target) != tmode
4715 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4716 target = gen_reg_rtx (tmode);
4717
4718 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4719 op0 = copy_to_mode_reg (mode0, op0);
4720 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4721 op1 = copy_to_mode_reg (mode1, op1);
4722 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
4723 op2 = copy_to_mode_reg (mode2, op2);
4724
4725 pat = GEN_FCN (icode) (target, op0, op1, op2);
4726 if (! pat)
4727 return 0;
4728 emit_insn (pat);
4729
4730 return target;
4731 }
4732
4733 /* Expand the lvx builtins. */
4734 static rtx
4735 altivec_expand_ld_builtin (exp, target, expandedp)
4736 tree exp;
4737 rtx target;
4738 bool *expandedp;
4739 {
4740 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4741 tree arglist = TREE_OPERAND (exp, 1);
4742 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4743 tree arg0;
4744 enum machine_mode tmode, mode0;
4745 rtx pat, op0;
4746 enum insn_code icode;
4747
4748 switch (fcode)
4749 {
4750 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
4751 icode = CODE_FOR_altivec_lvx_16qi;
4752 break;
4753 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
4754 icode = CODE_FOR_altivec_lvx_8hi;
4755 break;
4756 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
4757 icode = CODE_FOR_altivec_lvx_4si;
4758 break;
4759 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
4760 icode = CODE_FOR_altivec_lvx_4sf;
4761 break;
4762 default:
4763 *expandedp = false;
4764 return NULL_RTX;
4765 }
4766
4767 *expandedp = true;
4768
4769 arg0 = TREE_VALUE (arglist);
4770 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4771 tmode = insn_data[icode].operand[0].mode;
4772 mode0 = insn_data[icode].operand[1].mode;
4773
4774 if (target == 0
4775 || GET_MODE (target) != tmode
4776 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4777 target = gen_reg_rtx (tmode);
4778
4779 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4780 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4781
4782 pat = GEN_FCN (icode) (target, op0);
4783 if (! pat)
4784 return 0;
4785 emit_insn (pat);
4786 return target;
4787 }
4788
4789 /* Expand the stvx builtins. */
4790 static rtx
4791 altivec_expand_st_builtin (exp, target, expandedp)
4792 tree exp;
4793 rtx target ATTRIBUTE_UNUSED;
4794 bool *expandedp;
4795 {
4796 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4797 tree arglist = TREE_OPERAND (exp, 1);
4798 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4799 tree arg0, arg1;
4800 enum machine_mode mode0, mode1;
4801 rtx pat, op0, op1;
4802 enum insn_code icode;
4803
4804 switch (fcode)
4805 {
4806 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
4807 icode = CODE_FOR_altivec_stvx_16qi;
4808 break;
4809 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
4810 icode = CODE_FOR_altivec_stvx_8hi;
4811 break;
4812 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
4813 icode = CODE_FOR_altivec_stvx_4si;
4814 break;
4815 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
4816 icode = CODE_FOR_altivec_stvx_4sf;
4817 break;
4818 default:
4819 *expandedp = false;
4820 return NULL_RTX;
4821 }
4822
4823 arg0 = TREE_VALUE (arglist);
4824 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4825 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4826 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4827 mode0 = insn_data[icode].operand[0].mode;
4828 mode1 = insn_data[icode].operand[1].mode;
4829
4830 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4831 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4832 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
4833 op1 = copy_to_mode_reg (mode1, op1);
4834
4835 pat = GEN_FCN (icode) (op0, op1);
4836 if (pat)
4837 emit_insn (pat);
4838
4839 *expandedp = true;
4840 return NULL_RTX;
4841 }
4842
4843 /* Expand the dst builtins. */
4844 static rtx
4845 altivec_expand_dst_builtin (exp, target, expandedp)
4846 tree exp;
4847 rtx target ATTRIBUTE_UNUSED;
4848 bool *expandedp;
4849 {
4850 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4851 tree arglist = TREE_OPERAND (exp, 1);
4852 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4853 tree arg0, arg1, arg2;
4854 enum machine_mode mode0, mode1, mode2;
4855 rtx pat, op0, op1, op2;
4856 struct builtin_description *d;
4857 size_t i;
4858
4859 *expandedp = false;
4860
4861 /* Handle DST variants. */
4862 d = (struct builtin_description *) bdesc_dst;
4863 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4864 if (d->code == fcode)
4865 {
4866 arg0 = TREE_VALUE (arglist);
4867 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4868 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4869 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4870 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4871 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4872 mode0 = insn_data[d->icode].operand[0].mode;
4873 mode1 = insn_data[d->icode].operand[1].mode;
4874 mode2 = insn_data[d->icode].operand[2].mode;
4875
4876 /* Invalid arguments, bail out before generating bad rtl. */
4877 if (arg0 == error_mark_node
4878 || arg1 == error_mark_node
4879 || arg2 == error_mark_node)
4880 return const0_rtx;
4881
4882 if (TREE_CODE (arg2) != INTEGER_CST
4883 || TREE_INT_CST_LOW (arg2) & ~0x3)
4884 {
4885 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
4886 return const0_rtx;
4887 }
4888
4889 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4890 op0 = copy_to_mode_reg (mode0, op0);
4891 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4892 op1 = copy_to_mode_reg (mode1, op1);
4893
4894 pat = GEN_FCN (d->icode) (op0, op1, op2);
4895 if (pat != 0)
4896 emit_insn (pat);
4897
4898 *expandedp = true;
4899 return NULL_RTX;
4900 }
4901
4902 return NULL_RTX;
4903 }
4904
4905 /* Expand the builtin in EXP and store the result in TARGET. Store
4906 true in *EXPANDEDP if we found a builtin to expand. */
4907 static rtx
4908 altivec_expand_builtin (exp, target, expandedp)
4909 tree exp;
4910 rtx target;
4911 bool *expandedp;
4912 {
4913 struct builtin_description *d;
4914 struct builtin_description_predicates *dp;
4915 size_t i;
4916 enum insn_code icode;
4917 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4918 tree arglist = TREE_OPERAND (exp, 1);
4919 tree arg0;
4920 rtx op0, pat;
4921 enum machine_mode tmode, mode0;
4922 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4923
4924 target = altivec_expand_ld_builtin (exp, target, expandedp);
4925 if (*expandedp)
4926 return target;
4927
4928 target = altivec_expand_st_builtin (exp, target, expandedp);
4929 if (*expandedp)
4930 return target;
4931
4932 target = altivec_expand_dst_builtin (exp, target, expandedp);
4933 if (*expandedp)
4934 return target;
4935
4936 *expandedp = true;
4937
4938 switch (fcode)
4939 {
4940 case ALTIVEC_BUILTIN_STVX:
4941 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
4942 case ALTIVEC_BUILTIN_STVEBX:
4943 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
4944 case ALTIVEC_BUILTIN_STVEHX:
4945 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
4946 case ALTIVEC_BUILTIN_STVEWX:
4947 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
4948 case ALTIVEC_BUILTIN_STVXL:
4949 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
4950
4951 case ALTIVEC_BUILTIN_MFVSCR:
4952 icode = CODE_FOR_altivec_mfvscr;
4953 tmode = insn_data[icode].operand[0].mode;
4954
4955 if (target == 0
4956 || GET_MODE (target) != tmode
4957 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4958 target = gen_reg_rtx (tmode);
4959
4960 pat = GEN_FCN (icode) (target);
4961 if (! pat)
4962 return 0;
4963 emit_insn (pat);
4964 return target;
4965
4966 case ALTIVEC_BUILTIN_MTVSCR:
4967 icode = CODE_FOR_altivec_mtvscr;
4968 arg0 = TREE_VALUE (arglist);
4969 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4970 mode0 = insn_data[icode].operand[0].mode;
4971
4972 /* If we got invalid arguments bail out before generating bad rtl. */
4973 if (arg0 == error_mark_node)
4974 return const0_rtx;
4975
4976 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4977 op0 = copy_to_mode_reg (mode0, op0);
4978
4979 pat = GEN_FCN (icode) (op0);
4980 if (pat)
4981 emit_insn (pat);
4982 return NULL_RTX;
4983
4984 case ALTIVEC_BUILTIN_DSSALL:
4985 emit_insn (gen_altivec_dssall ());
4986 return NULL_RTX;
4987
4988 case ALTIVEC_BUILTIN_DSS:
4989 icode = CODE_FOR_altivec_dss;
4990 arg0 = TREE_VALUE (arglist);
4991 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4992 mode0 = insn_data[icode].operand[0].mode;
4993
4994 /* If we got invalid arguments bail out before generating bad rtl. */
4995 if (arg0 == error_mark_node)
4996 return const0_rtx;
4997
4998 if (TREE_CODE (arg0) != INTEGER_CST
4999 || TREE_INT_CST_LOW (arg0) & ~0x3)
5000 {
5001 error ("argument to dss must be a 2-bit unsigned literal");
5002 return const0_rtx;
5003 }
5004
5005 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5006 op0 = copy_to_mode_reg (mode0, op0);
5007
5008 emit_insn (gen_altivec_dss (op0));
5009 return NULL_RTX;
5010 }
5011
5012 /* Expand abs* operations. */
5013 d = (struct builtin_description *) bdesc_abs;
5014 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5015 if (d->code == fcode)
5016 return altivec_expand_abs_builtin (d->icode, arglist, target);
5017
5018 /* Expand the AltiVec predicates. */
5019 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5020 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5021 if (dp->code == fcode)
5022 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
5023
5024 /* LV* are funky. We initialized them differently. */
5025 switch (fcode)
5026 {
5027 case ALTIVEC_BUILTIN_LVSL:
5028 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
5029 arglist, target);
5030 case ALTIVEC_BUILTIN_LVSR:
5031 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
5032 arglist, target);
5033 case ALTIVEC_BUILTIN_LVEBX:
5034 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
5035 arglist, target);
5036 case ALTIVEC_BUILTIN_LVEHX:
5037 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
5038 arglist, target);
5039 case ALTIVEC_BUILTIN_LVEWX:
5040 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
5041 arglist, target);
5042 case ALTIVEC_BUILTIN_LVXL:
5043 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
5044 arglist, target);
5045 case ALTIVEC_BUILTIN_LVX:
5046 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
5047 arglist, target);
5048 default:
5049 break;
5050 /* Fall through. */
5051 }
5052
5053 *expandedp = false;
5054 return NULL_RTX;
5055 }
5056
5057 /* Binops that need to be initialized manually, but can be expanded
5058 automagically by rs6000_expand_binop_builtin. */
5059 static struct builtin_description bdesc_2arg_spe[] =
5060 {
5061 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
5062 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
5063 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
5064 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
5065 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
5066 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
5067 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
5068 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
5069 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
5070 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
5071 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
5072 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
5073 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
5074 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
5075 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
5076 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
5077 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
5078 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
5079 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
5080 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
5081 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
5082 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
5083 };
5084
5085 /* Expand the builtin in EXP and store the result in TARGET. Store
5086 true in *EXPANDEDP if we found a builtin to expand.
5087
5088 This expands the SPE builtins that are not simple unary and binary
5089 operations. */
5090 static rtx
5091 spe_expand_builtin (exp, target, expandedp)
5092 tree exp;
5093 rtx target;
5094 bool *expandedp;
5095 {
5096 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5097 tree arglist = TREE_OPERAND (exp, 1);
5098 tree arg1, arg0;
5099 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5100 enum insn_code icode;
5101 enum machine_mode tmode, mode0;
5102 rtx pat, op0;
5103 struct builtin_description *d;
5104 size_t i;
5105
5106 *expandedp = true;
5107
5108 /* Syntax check for a 5-bit unsigned immediate. */
5109 switch (fcode)
5110 {
5111 case SPE_BUILTIN_EVSTDD:
5112 case SPE_BUILTIN_EVSTDH:
5113 case SPE_BUILTIN_EVSTDW:
5114 case SPE_BUILTIN_EVSTWHE:
5115 case SPE_BUILTIN_EVSTWHO:
5116 case SPE_BUILTIN_EVSTWWE:
5117 case SPE_BUILTIN_EVSTWWO:
5118 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5119 if (TREE_CODE (arg1) != INTEGER_CST
5120 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5121 {
5122 error ("argument 2 must be a 5-bit unsigned literal");
5123 return const0_rtx;
5124 }
5125 break;
5126 default:
5127 break;
5128 }
5129
5130 d = (struct builtin_description *) bdesc_2arg_spe;
5131 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
5132 if (d->code == fcode)
5133 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5134
5135 d = (struct builtin_description *) bdesc_spe_predicates;
5136 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
5137 if (d->code == fcode)
5138 return spe_expand_predicate_builtin (d->icode, arglist, target);
5139
5140 d = (struct builtin_description *) bdesc_spe_evsel;
5141 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
5142 if (d->code == fcode)
5143 return spe_expand_evsel_builtin (d->icode, arglist, target);
5144
5145 switch (fcode)
5146 {
5147 case SPE_BUILTIN_EVSTDDX:
5148 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
5149 case SPE_BUILTIN_EVSTDHX:
5150 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
5151 case SPE_BUILTIN_EVSTDWX:
5152 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
5153 case SPE_BUILTIN_EVSTWHEX:
5154 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
5155 case SPE_BUILTIN_EVSTWHOX:
5156 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
5157 case SPE_BUILTIN_EVSTWWEX:
5158 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
5159 case SPE_BUILTIN_EVSTWWOX:
5160 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
5161 case SPE_BUILTIN_EVSTDD:
5162 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
5163 case SPE_BUILTIN_EVSTDH:
5164 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
5165 case SPE_BUILTIN_EVSTDW:
5166 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
5167 case SPE_BUILTIN_EVSTWHE:
5168 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
5169 case SPE_BUILTIN_EVSTWHO:
5170 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
5171 case SPE_BUILTIN_EVSTWWE:
5172 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
5173 case SPE_BUILTIN_EVSTWWO:
5174 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
5175 case SPE_BUILTIN_MFSPEFSCR:
5176 icode = CODE_FOR_spe_mfspefscr;
5177 tmode = insn_data[icode].operand[0].mode;
5178
5179 if (target == 0
5180 || GET_MODE (target) != tmode
5181 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5182 target = gen_reg_rtx (tmode);
5183
5184 pat = GEN_FCN (icode) (target);
5185 if (! pat)
5186 return 0;
5187 emit_insn (pat);
5188 return target;
5189 case SPE_BUILTIN_MTSPEFSCR:
5190 icode = CODE_FOR_spe_mtspefscr;
5191 arg0 = TREE_VALUE (arglist);
5192 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5193 mode0 = insn_data[icode].operand[0].mode;
5194
5195 if (arg0 == error_mark_node)
5196 return const0_rtx;
5197
5198 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5199 op0 = copy_to_mode_reg (mode0, op0);
5200
5201 pat = GEN_FCN (icode) (op0);
5202 if (pat)
5203 emit_insn (pat);
5204 return NULL_RTX;
5205 default:
5206 break;
5207 }
5208
5209 *expandedp = false;
5210 return NULL_RTX;
5211 }
5212
5213 static rtx
5214 spe_expand_predicate_builtin (icode, arglist, target)
5215 enum insn_code icode;
5216 tree arglist;
5217 rtx target;
5218 {
5219 rtx pat, scratch, tmp;
5220 tree form = TREE_VALUE (arglist);
5221 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5222 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5223 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5224 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5225 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5226 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5227 int form_int;
5228 enum rtx_code code;
5229
5230 if (TREE_CODE (form) != INTEGER_CST)
5231 {
5232 error ("argument 1 of __builtin_spe_predicate must be a constant");
5233 return const0_rtx;
5234 }
5235 else
5236 form_int = TREE_INT_CST_LOW (form);
5237
5238 if (mode0 != mode1)
5239 abort ();
5240
5241 if (arg0 == error_mark_node || arg1 == error_mark_node)
5242 return const0_rtx;
5243
5244 if (target == 0
5245 || GET_MODE (target) != SImode
5246 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
5247 target = gen_reg_rtx (SImode);
5248
5249 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5250 op0 = copy_to_mode_reg (mode0, op0);
5251 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5252 op1 = copy_to_mode_reg (mode1, op1);
5253
5254 scratch = gen_reg_rtx (CCmode);
5255
5256 pat = GEN_FCN (icode) (scratch, op0, op1);
5257 if (! pat)
5258 return const0_rtx;
5259 emit_insn (pat);
5260
5261 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5262 _lower_. We use one compare, but look in different bits of the
5263 CR for each variant.
5264
5265 There are 2 elements in each SPE simd type (upper/lower). The CR
5266 bits are set as follows:
5267
5268 BIT0 | BIT 1 | BIT 2 | BIT 3
5269 U | L | (U | L) | (U & L)
5270
5271 So, for an "all" relationship, BIT 3 would be set.
5272 For an "any" relationship, BIT 2 would be set. Etc.
5273
5274 Following traditional nomenclature, these bits map to:
5275
5276 BIT0 | BIT 1 | BIT 2 | BIT 3
5277 LT | GT | EQ | OV
5278
5279 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5280 */
5281
5282 switch (form_int)
5283 {
5284 /* All variant. OV bit. */
5285 case 0:
5286 /* We need to get to the OV bit, which is the ORDERED bit. We
5287 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5288 that's ugly and will trigger a validate_condition_mode abort.
5289 So let's just use another pattern. */
5290 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
5291 return target;
5292 /* Any variant. EQ bit. */
5293 case 1:
5294 code = EQ;
5295 break;
5296 /* Upper variant. LT bit. */
5297 case 2:
5298 code = LT;
5299 break;
5300 /* Lower variant. GT bit. */
5301 case 3:
5302 code = GT;
5303 break;
5304 default:
5305 error ("argument 1 of __builtin_spe_predicate is out of range");
5306 return const0_rtx;
5307 }
5308
5309 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
5310 emit_move_insn (target, tmp);
5311
5312 return target;
5313 }
5314
5315 /* The evsel builtins look like this:
5316
5317 e = __builtin_spe_evsel_OP (a, b, c, d);
5318
5319 and work like this:
5320
5321 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5322 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5323 */
5324
5325 static rtx
5326 spe_expand_evsel_builtin (icode, arglist, target)
5327 enum insn_code icode;
5328 tree arglist;
5329 rtx target;
5330 {
5331 rtx pat, scratch;
5332 tree arg0 = TREE_VALUE (arglist);
5333 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5334 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5335 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5336 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5337 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5338 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5339 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5340 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5341 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5342
5343 if (mode0 != mode1)
5344 abort ();
5345
5346 if (arg0 == error_mark_node || arg1 == error_mark_node
5347 || arg2 == error_mark_node || arg3 == error_mark_node)
5348 return const0_rtx;
5349
5350 if (target == 0
5351 || GET_MODE (target) != mode0
5352 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5353 target = gen_reg_rtx (mode0);
5354
5355 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5356 op0 = copy_to_mode_reg (mode0, op0);
5357 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5358 op1 = copy_to_mode_reg (mode0, op1);
5359 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5360 op2 = copy_to_mode_reg (mode0, op2);
5361 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5362 op3 = copy_to_mode_reg (mode0, op3);
5363
5364 /* Generate the compare. */
5365 scratch = gen_reg_rtx (CCmode);
5366 pat = GEN_FCN (icode) (scratch, op0, op1);
5367 if (! pat)
5368 return const0_rtx;
5369 emit_insn (pat);
5370
5371 if (mode0 == V2SImode)
5372 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
5373 else
5374 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
5375
5376 return target;
5377 }
5378
5379 /* Expand an expression EXP that calls a built-in function,
5380 with result going to TARGET if that's convenient
5381 (and in mode MODE if that's convenient).
5382 SUBTARGET may be used as the target for computing one of EXP's operands.
5383 IGNORE is nonzero if the value is to be ignored. */
5384
5385 static rtx
5386 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
5387 tree exp;
5388 rtx target;
5389 rtx subtarget ATTRIBUTE_UNUSED;
5390 enum machine_mode mode ATTRIBUTE_UNUSED;
5391 int ignore ATTRIBUTE_UNUSED;
5392 {
5393 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5394 tree arglist = TREE_OPERAND (exp, 1);
5395 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5396 struct builtin_description *d;
5397 size_t i;
5398 rtx ret;
5399 bool success;
5400
5401 if (TARGET_ALTIVEC)
5402 {
5403 ret = altivec_expand_builtin (exp, target, &success);
5404
5405 if (success)
5406 return ret;
5407 }
5408 if (TARGET_SPE)
5409 {
5410 ret = spe_expand_builtin (exp, target, &success);
5411
5412 if (success)
5413 return ret;
5414 }
5415
5416 if (TARGET_ALTIVEC || TARGET_SPE)
5417 {
5418 /* Handle simple unary operations. */
5419 d = (struct builtin_description *) bdesc_1arg;
5420 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5421 if (d->code == fcode)
5422 return rs6000_expand_unop_builtin (d->icode, arglist, target);
5423
5424 /* Handle simple binary operations. */
5425 d = (struct builtin_description *) bdesc_2arg;
5426 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5427 if (d->code == fcode)
5428 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5429
5430 /* Handle simple ternary operations. */
5431 d = (struct builtin_description *) bdesc_3arg;
5432 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5433 if (d->code == fcode)
5434 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
5435 }
5436
5437 abort ();
5438 return NULL_RTX;
5439 }
5440
5441 static void
5442 rs6000_init_builtins ()
5443 {
5444 opaque_V2SI_type_node = copy_node (V2SI_type_node);
5445 opaque_V2SF_type_node = copy_node (V2SF_type_node);
5446
5447 if (TARGET_SPE)
5448 spe_init_builtins ();
5449 if (TARGET_ALTIVEC)
5450 altivec_init_builtins ();
5451 if (TARGET_ALTIVEC || TARGET_SPE)
5452 rs6000_common_init_builtins ();
5453 }
5454
5455 /* Search through a set of builtins and enable the mask bits.
5456 DESC is an array of builtins.
5457 SIZE is the total number of builtins.
5458 START is the builtin enum at which to start.
5459 END is the builtin enum at which to end. */
5460 static void
5461 enable_mask_for_builtins (desc, size, start, end)
5462 struct builtin_description *desc;
5463 int size;
5464 enum rs6000_builtins start, end;
5465 {
5466 int i;
5467
5468 for (i = 0; i < size; ++i)
5469 if (desc[i].code == start)
5470 break;
5471
5472 if (i == size)
5473 return;
5474
5475 for (; i < size; ++i)
5476 {
5477 /* Flip all the bits on. */
5478 desc[i].mask = target_flags;
5479 if (desc[i].code == end)
5480 break;
5481 }
5482 }
5483
5484 static void
5485 spe_init_builtins ()
5486 {
5487 tree endlink = void_list_node;
5488 tree puint_type_node = build_pointer_type (unsigned_type_node);
5489 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
5490 tree pv2si_type_node = build_pointer_type (opaque_V2SI_type_node);
5491 struct builtin_description *d;
5492 size_t i;
5493
5494 tree v2si_ftype_4_v2si
5495 = build_function_type
5496 (opaque_V2SI_type_node,
5497 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5498 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5499 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5500 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5501 endlink)))));
5502
5503 tree v2sf_ftype_4_v2sf
5504 = build_function_type
5505 (opaque_V2SF_type_node,
5506 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5507 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5508 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5509 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5510 endlink)))));
5511
5512 tree int_ftype_int_v2si_v2si
5513 = build_function_type
5514 (integer_type_node,
5515 tree_cons (NULL_TREE, integer_type_node,
5516 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5517 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5518 endlink))));
5519
5520 tree int_ftype_int_v2sf_v2sf
5521 = build_function_type
5522 (integer_type_node,
5523 tree_cons (NULL_TREE, integer_type_node,
5524 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5525 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5526 endlink))));
5527
5528 tree void_ftype_v2si_puint_int
5529 = build_function_type (void_type_node,
5530 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5531 tree_cons (NULL_TREE, puint_type_node,
5532 tree_cons (NULL_TREE,
5533 integer_type_node,
5534 endlink))));
5535
5536 tree void_ftype_v2si_puint_char
5537 = build_function_type (void_type_node,
5538 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5539 tree_cons (NULL_TREE, puint_type_node,
5540 tree_cons (NULL_TREE,
5541 char_type_node,
5542 endlink))));
5543
5544 tree void_ftype_v2si_pv2si_int
5545 = build_function_type (void_type_node,
5546 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5547 tree_cons (NULL_TREE, pv2si_type_node,
5548 tree_cons (NULL_TREE,
5549 integer_type_node,
5550 endlink))));
5551
5552 tree void_ftype_v2si_pv2si_char
5553 = build_function_type (void_type_node,
5554 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5555 tree_cons (NULL_TREE, pv2si_type_node,
5556 tree_cons (NULL_TREE,
5557 char_type_node,
5558 endlink))));
5559
5560 tree void_ftype_int
5561 = build_function_type (void_type_node,
5562 tree_cons (NULL_TREE, integer_type_node, endlink));
5563
5564 tree int_ftype_void
5565 = build_function_type (integer_type_node,
5566 tree_cons (NULL_TREE, void_type_node, endlink));
5567
5568 tree v2si_ftype_pv2si_int
5569 = build_function_type (opaque_V2SI_type_node,
5570 tree_cons (NULL_TREE, pv2si_type_node,
5571 tree_cons (NULL_TREE, integer_type_node,
5572 endlink)));
5573
5574 tree v2si_ftype_puint_int
5575 = build_function_type (opaque_V2SI_type_node,
5576 tree_cons (NULL_TREE, puint_type_node,
5577 tree_cons (NULL_TREE, integer_type_node,
5578 endlink)));
5579
5580 tree v2si_ftype_pushort_int
5581 = build_function_type (opaque_V2SI_type_node,
5582 tree_cons (NULL_TREE, pushort_type_node,
5583 tree_cons (NULL_TREE, integer_type_node,
5584 endlink)));
5585
5586 /* The initialization of the simple binary and unary builtins is
5587 done in rs6000_common_init_builtins, but we have to enable the
5588 mask bits here manually because we have run out of `target_flags'
5589 bits. We really need to redesign this mask business. */
5590
5591 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
5592 ARRAY_SIZE (bdesc_2arg),
5593 SPE_BUILTIN_EVADDW,
5594 SPE_BUILTIN_EVXOR);
5595 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
5596 ARRAY_SIZE (bdesc_1arg),
5597 SPE_BUILTIN_EVABS,
5598 SPE_BUILTIN_EVSUBFUSIAAW);
5599 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
5600 ARRAY_SIZE (bdesc_spe_predicates),
5601 SPE_BUILTIN_EVCMPEQ,
5602 SPE_BUILTIN_EVFSTSTLT);
5603 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
5604 ARRAY_SIZE (bdesc_spe_evsel),
5605 SPE_BUILTIN_EVSEL_CMPGTS,
5606 SPE_BUILTIN_EVSEL_FSTSTEQ);
5607
5608 /* Initialize irregular SPE builtins. */
5609
5610 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
5611 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
5612 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
5613 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
5614 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
5615 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
5616 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
5617 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
5618 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
5619 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
5620 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
5621 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
5622 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
5623 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
5624 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
5625 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
5626
5627 /* Loads. */
5628 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
5629 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
5630 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
5631 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
5632 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
5633 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
5634 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
5635 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
5636 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
5637 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
5638 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
5639 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
5640 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
5641 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
5642 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
5643 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
5644 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
5645 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
5646 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
5647 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
5648 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
5649 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
5650
5651 /* Predicates. */
5652 d = (struct builtin_description *) bdesc_spe_predicates;
5653 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
5654 {
5655 tree type;
5656
5657 switch (insn_data[d->icode].operand[1].mode)
5658 {
5659 case V2SImode:
5660 type = int_ftype_int_v2si_v2si;
5661 break;
5662 case V2SFmode:
5663 type = int_ftype_int_v2sf_v2sf;
5664 break;
5665 default:
5666 abort ();
5667 }
5668
5669 def_builtin (d->mask, d->name, type, d->code);
5670 }
5671
5672 /* Evsel predicates. */
5673 d = (struct builtin_description *) bdesc_spe_evsel;
5674 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
5675 {
5676 tree type;
5677
5678 switch (insn_data[d->icode].operand[1].mode)
5679 {
5680 case V2SImode:
5681 type = v2si_ftype_4_v2si;
5682 break;
5683 case V2SFmode:
5684 type = v2sf_ftype_4_v2sf;
5685 break;
5686 default:
5687 abort ();
5688 }
5689
5690 def_builtin (d->mask, d->name, type, d->code);
5691 }
5692 }
5693
5694 static void
5695 altivec_init_builtins ()
5696 {
5697 struct builtin_description *d;
5698 struct builtin_description_predicates *dp;
5699 size_t i;
5700 tree pfloat_type_node = build_pointer_type (float_type_node);
5701 tree pint_type_node = build_pointer_type (integer_type_node);
5702 tree pshort_type_node = build_pointer_type (short_integer_type_node);
5703 tree pchar_type_node = build_pointer_type (char_type_node);
5704
5705 tree pvoid_type_node = build_pointer_type (void_type_node);
5706
5707 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
5708 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
5709 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
5710 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
5711
5712 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
5713
5714 tree int_ftype_int_v4si_v4si
5715 = build_function_type_list (integer_type_node,
5716 integer_type_node, V4SI_type_node,
5717 V4SI_type_node, NULL_TREE);
5718 tree v4sf_ftype_pcfloat
5719 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
5720 tree void_ftype_pfloat_v4sf
5721 = build_function_type_list (void_type_node,
5722 pfloat_type_node, V4SF_type_node, NULL_TREE);
5723 tree v4si_ftype_pcint
5724 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
5725 tree void_ftype_pint_v4si
5726 = build_function_type_list (void_type_node,
5727 pint_type_node, V4SI_type_node, NULL_TREE);
5728 tree v8hi_ftype_pcshort
5729 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
5730 tree void_ftype_pshort_v8hi
5731 = build_function_type_list (void_type_node,
5732 pshort_type_node, V8HI_type_node, NULL_TREE);
5733 tree v16qi_ftype_pcchar
5734 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
5735 tree void_ftype_pchar_v16qi
5736 = build_function_type_list (void_type_node,
5737 pchar_type_node, V16QI_type_node, NULL_TREE);
5738 tree void_ftype_v4si
5739 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
5740 tree v8hi_ftype_void
5741 = build_function_type (V8HI_type_node, void_list_node);
5742 tree void_ftype_void
5743 = build_function_type (void_type_node, void_list_node);
5744 tree void_ftype_qi
5745 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
5746
5747 tree v16qi_ftype_int_pcvoid
5748 = build_function_type_list (V16QI_type_node,
5749 integer_type_node, pcvoid_type_node, NULL_TREE);
5750 tree v8hi_ftype_int_pcvoid
5751 = build_function_type_list (V8HI_type_node,
5752 integer_type_node, pcvoid_type_node, NULL_TREE);
5753 tree v4si_ftype_int_pcvoid
5754 = build_function_type_list (V4SI_type_node,
5755 integer_type_node, pcvoid_type_node, NULL_TREE);
5756
5757 tree void_ftype_v4si_int_pvoid
5758 = build_function_type_list (void_type_node,
5759 V4SI_type_node, integer_type_node,
5760 pvoid_type_node, NULL_TREE);
5761 tree void_ftype_v16qi_int_pvoid
5762 = build_function_type_list (void_type_node,
5763 V16QI_type_node, integer_type_node,
5764 pvoid_type_node, NULL_TREE);
5765 tree void_ftype_v8hi_int_pvoid
5766 = build_function_type_list (void_type_node,
5767 V8HI_type_node, integer_type_node,
5768 pvoid_type_node, NULL_TREE);
5769 tree int_ftype_int_v8hi_v8hi
5770 = build_function_type_list (integer_type_node,
5771 integer_type_node, V8HI_type_node,
5772 V8HI_type_node, NULL_TREE);
5773 tree int_ftype_int_v16qi_v16qi
5774 = build_function_type_list (integer_type_node,
5775 integer_type_node, V16QI_type_node,
5776 V16QI_type_node, NULL_TREE);
5777 tree int_ftype_int_v4sf_v4sf
5778 = build_function_type_list (integer_type_node,
5779 integer_type_node, V4SF_type_node,
5780 V4SF_type_node, NULL_TREE);
5781 tree v4si_ftype_v4si
5782 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
5783 tree v8hi_ftype_v8hi
5784 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
5785 tree v16qi_ftype_v16qi
5786 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
5787 tree v4sf_ftype_v4sf
5788 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5789 tree void_ftype_pcvoid_int_char
5790 = build_function_type_list (void_type_node,
5791 pcvoid_type_node, integer_type_node,
5792 char_type_node, NULL_TREE);
5793
5794 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
5795 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
5796 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
5797 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
5798 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
5799 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
5800 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
5801 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
5802 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
5803 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
5804 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
5805 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
5806 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
5807 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
5808 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
5809 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
5810 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
5811 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
5812 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
5813 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
5814 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSL);
5815 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSR);
5816 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEBX);
5817 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEHX);
5818 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEWX);
5819 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVXL);
5820 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVX);
5821 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
5822 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
5823 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
5824 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
5825 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
5826
5827 /* Add the DST variants. */
5828 d = (struct builtin_description *) bdesc_dst;
5829 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5830 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
5831
5832 /* Initialize the predicates. */
5833 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5834 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5835 {
5836 enum machine_mode mode1;
5837 tree type;
5838
5839 mode1 = insn_data[dp->icode].operand[1].mode;
5840
5841 switch (mode1)
5842 {
5843 case V4SImode:
5844 type = int_ftype_int_v4si_v4si;
5845 break;
5846 case V8HImode:
5847 type = int_ftype_int_v8hi_v8hi;
5848 break;
5849 case V16QImode:
5850 type = int_ftype_int_v16qi_v16qi;
5851 break;
5852 case V4SFmode:
5853 type = int_ftype_int_v4sf_v4sf;
5854 break;
5855 default:
5856 abort ();
5857 }
5858
5859 def_builtin (dp->mask, dp->name, type, dp->code);
5860 }
5861
5862 /* Initialize the abs* operators. */
5863 d = (struct builtin_description *) bdesc_abs;
5864 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5865 {
5866 enum machine_mode mode0;
5867 tree type;
5868
5869 mode0 = insn_data[d->icode].operand[0].mode;
5870
5871 switch (mode0)
5872 {
5873 case V4SImode:
5874 type = v4si_ftype_v4si;
5875 break;
5876 case V8HImode:
5877 type = v8hi_ftype_v8hi;
5878 break;
5879 case V16QImode:
5880 type = v16qi_ftype_v16qi;
5881 break;
5882 case V4SFmode:
5883 type = v4sf_ftype_v4sf;
5884 break;
5885 default:
5886 abort ();
5887 }
5888
5889 def_builtin (d->mask, d->name, type, d->code);
5890 }
5891 }
5892
5893 static void
5894 rs6000_common_init_builtins ()
5895 {
5896 struct builtin_description *d;
5897 size_t i;
5898
5899 tree v4sf_ftype_v4sf_v4sf_v16qi
5900 = build_function_type_list (V4SF_type_node,
5901 V4SF_type_node, V4SF_type_node,
5902 V16QI_type_node, NULL_TREE);
5903 tree v4si_ftype_v4si_v4si_v16qi
5904 = build_function_type_list (V4SI_type_node,
5905 V4SI_type_node, V4SI_type_node,
5906 V16QI_type_node, NULL_TREE);
5907 tree v8hi_ftype_v8hi_v8hi_v16qi
5908 = build_function_type_list (V8HI_type_node,
5909 V8HI_type_node, V8HI_type_node,
5910 V16QI_type_node, NULL_TREE);
5911 tree v16qi_ftype_v16qi_v16qi_v16qi
5912 = build_function_type_list (V16QI_type_node,
5913 V16QI_type_node, V16QI_type_node,
5914 V16QI_type_node, NULL_TREE);
5915 tree v4si_ftype_char
5916 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
5917 tree v8hi_ftype_char
5918 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
5919 tree v16qi_ftype_char
5920 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
5921 tree v8hi_ftype_v16qi
5922 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
5923 tree v4sf_ftype_v4sf
5924 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5925
5926 tree v2si_ftype_v2si_v2si
5927 = build_function_type_list (opaque_V2SI_type_node,
5928 opaque_V2SI_type_node,
5929 opaque_V2SI_type_node, NULL_TREE);
5930
5931 tree v2sf_ftype_v2sf_v2sf
5932 = build_function_type_list (opaque_V2SF_type_node,
5933 opaque_V2SF_type_node,
5934 opaque_V2SF_type_node, NULL_TREE);
5935
5936 tree v2si_ftype_int_int
5937 = build_function_type_list (opaque_V2SI_type_node,
5938 integer_type_node, integer_type_node,
5939 NULL_TREE);
5940
5941 tree v2si_ftype_v2si
5942 = build_function_type_list (opaque_V2SI_type_node,
5943 opaque_V2SI_type_node, NULL_TREE);
5944
5945 tree v2sf_ftype_v2sf
5946 = build_function_type_list (opaque_V2SF_type_node,
5947 opaque_V2SF_type_node, NULL_TREE);
5948
5949 tree v2sf_ftype_v2si
5950 = build_function_type_list (opaque_V2SF_type_node,
5951 opaque_V2SI_type_node, NULL_TREE);
5952
5953 tree v2si_ftype_v2sf
5954 = build_function_type_list (opaque_V2SI_type_node,
5955 opaque_V2SF_type_node, NULL_TREE);
5956
5957 tree v2si_ftype_v2si_char
5958 = build_function_type_list (opaque_V2SI_type_node,
5959 opaque_V2SI_type_node,
5960 char_type_node, NULL_TREE);
5961
5962 tree v2si_ftype_int_char
5963 = build_function_type_list (opaque_V2SI_type_node,
5964 integer_type_node, char_type_node, NULL_TREE);
5965
5966 tree v2si_ftype_char
5967 = build_function_type_list (opaque_V2SI_type_node,
5968 char_type_node, NULL_TREE);
5969
5970 tree int_ftype_int_int
5971 = build_function_type_list (integer_type_node,
5972 integer_type_node, integer_type_node,
5973 NULL_TREE);
5974
5975 tree v4si_ftype_v4si_v4si
5976 = build_function_type_list (V4SI_type_node,
5977 V4SI_type_node, V4SI_type_node, NULL_TREE);
5978 tree v4sf_ftype_v4si_char
5979 = build_function_type_list (V4SF_type_node,
5980 V4SI_type_node, char_type_node, NULL_TREE);
5981 tree v4si_ftype_v4sf_char
5982 = build_function_type_list (V4SI_type_node,
5983 V4SF_type_node, char_type_node, NULL_TREE);
5984 tree v4si_ftype_v4si_char
5985 = build_function_type_list (V4SI_type_node,
5986 V4SI_type_node, char_type_node, NULL_TREE);
5987 tree v8hi_ftype_v8hi_char
5988 = build_function_type_list (V8HI_type_node,
5989 V8HI_type_node, char_type_node, NULL_TREE);
5990 tree v16qi_ftype_v16qi_char
5991 = build_function_type_list (V16QI_type_node,
5992 V16QI_type_node, char_type_node, NULL_TREE);
5993 tree v16qi_ftype_v16qi_v16qi_char
5994 = build_function_type_list (V16QI_type_node,
5995 V16QI_type_node, V16QI_type_node,
5996 char_type_node, NULL_TREE);
5997 tree v8hi_ftype_v8hi_v8hi_char
5998 = build_function_type_list (V8HI_type_node,
5999 V8HI_type_node, V8HI_type_node,
6000 char_type_node, NULL_TREE);
6001 tree v4si_ftype_v4si_v4si_char
6002 = build_function_type_list (V4SI_type_node,
6003 V4SI_type_node, V4SI_type_node,
6004 char_type_node, NULL_TREE);
6005 tree v4sf_ftype_v4sf_v4sf_char
6006 = build_function_type_list (V4SF_type_node,
6007 V4SF_type_node, V4SF_type_node,
6008 char_type_node, NULL_TREE);
6009 tree v4sf_ftype_v4sf_v4sf
6010 = build_function_type_list (V4SF_type_node,
6011 V4SF_type_node, V4SF_type_node, NULL_TREE);
6012 tree v4sf_ftype_v4sf_v4sf_v4si
6013 = build_function_type_list (V4SF_type_node,
6014 V4SF_type_node, V4SF_type_node,
6015 V4SI_type_node, NULL_TREE);
6016 tree v4sf_ftype_v4sf_v4sf_v4sf
6017 = build_function_type_list (V4SF_type_node,
6018 V4SF_type_node, V4SF_type_node,
6019 V4SF_type_node, NULL_TREE);
6020 tree v4si_ftype_v4si_v4si_v4si
6021 = build_function_type_list (V4SI_type_node,
6022 V4SI_type_node, V4SI_type_node,
6023 V4SI_type_node, NULL_TREE);
6024 tree v8hi_ftype_v8hi_v8hi
6025 = build_function_type_list (V8HI_type_node,
6026 V8HI_type_node, V8HI_type_node, NULL_TREE);
6027 tree v8hi_ftype_v8hi_v8hi_v8hi
6028 = build_function_type_list (V8HI_type_node,
6029 V8HI_type_node, V8HI_type_node,
6030 V8HI_type_node, NULL_TREE);
6031 tree v4si_ftype_v8hi_v8hi_v4si
6032 = build_function_type_list (V4SI_type_node,
6033 V8HI_type_node, V8HI_type_node,
6034 V4SI_type_node, NULL_TREE);
6035 tree v4si_ftype_v16qi_v16qi_v4si
6036 = build_function_type_list (V4SI_type_node,
6037 V16QI_type_node, V16QI_type_node,
6038 V4SI_type_node, NULL_TREE);
6039 tree v16qi_ftype_v16qi_v16qi
6040 = build_function_type_list (V16QI_type_node,
6041 V16QI_type_node, V16QI_type_node, NULL_TREE);
6042 tree v4si_ftype_v4sf_v4sf
6043 = build_function_type_list (V4SI_type_node,
6044 V4SF_type_node, V4SF_type_node, NULL_TREE);
6045 tree v8hi_ftype_v16qi_v16qi
6046 = build_function_type_list (V8HI_type_node,
6047 V16QI_type_node, V16QI_type_node, NULL_TREE);
6048 tree v4si_ftype_v8hi_v8hi
6049 = build_function_type_list (V4SI_type_node,
6050 V8HI_type_node, V8HI_type_node, NULL_TREE);
6051 tree v8hi_ftype_v4si_v4si
6052 = build_function_type_list (V8HI_type_node,
6053 V4SI_type_node, V4SI_type_node, NULL_TREE);
6054 tree v16qi_ftype_v8hi_v8hi
6055 = build_function_type_list (V16QI_type_node,
6056 V8HI_type_node, V8HI_type_node, NULL_TREE);
6057 tree v4si_ftype_v16qi_v4si
6058 = build_function_type_list (V4SI_type_node,
6059 V16QI_type_node, V4SI_type_node, NULL_TREE);
6060 tree v4si_ftype_v16qi_v16qi
6061 = build_function_type_list (V4SI_type_node,
6062 V16QI_type_node, V16QI_type_node, NULL_TREE);
6063 tree v4si_ftype_v8hi_v4si
6064 = build_function_type_list (V4SI_type_node,
6065 V8HI_type_node, V4SI_type_node, NULL_TREE);
6066 tree v4si_ftype_v8hi
6067 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
6068 tree int_ftype_v4si_v4si
6069 = build_function_type_list (integer_type_node,
6070 V4SI_type_node, V4SI_type_node, NULL_TREE);
6071 tree int_ftype_v4sf_v4sf
6072 = build_function_type_list (integer_type_node,
6073 V4SF_type_node, V4SF_type_node, NULL_TREE);
6074 tree int_ftype_v16qi_v16qi
6075 = build_function_type_list (integer_type_node,
6076 V16QI_type_node, V16QI_type_node, NULL_TREE);
6077 tree int_ftype_v8hi_v8hi
6078 = build_function_type_list (integer_type_node,
6079 V8HI_type_node, V8HI_type_node, NULL_TREE);
6080
6081 /* Add the simple ternary operators. */
6082 d = (struct builtin_description *) bdesc_3arg;
6083 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6084 {
6085
6086 enum machine_mode mode0, mode1, mode2, mode3;
6087 tree type;
6088
6089 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6090 continue;
6091
6092 mode0 = insn_data[d->icode].operand[0].mode;
6093 mode1 = insn_data[d->icode].operand[1].mode;
6094 mode2 = insn_data[d->icode].operand[2].mode;
6095 mode3 = insn_data[d->icode].operand[3].mode;
6096
6097 /* When all four are of the same mode. */
6098 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
6099 {
6100 switch (mode0)
6101 {
6102 case V4SImode:
6103 type = v4si_ftype_v4si_v4si_v4si;
6104 break;
6105 case V4SFmode:
6106 type = v4sf_ftype_v4sf_v4sf_v4sf;
6107 break;
6108 case V8HImode:
6109 type = v8hi_ftype_v8hi_v8hi_v8hi;
6110 break;
6111 case V16QImode:
6112 type = v16qi_ftype_v16qi_v16qi_v16qi;
6113 break;
6114 default:
6115 abort();
6116 }
6117 }
6118 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
6119 {
6120 switch (mode0)
6121 {
6122 case V4SImode:
6123 type = v4si_ftype_v4si_v4si_v16qi;
6124 break;
6125 case V4SFmode:
6126 type = v4sf_ftype_v4sf_v4sf_v16qi;
6127 break;
6128 case V8HImode:
6129 type = v8hi_ftype_v8hi_v8hi_v16qi;
6130 break;
6131 case V16QImode:
6132 type = v16qi_ftype_v16qi_v16qi_v16qi;
6133 break;
6134 default:
6135 abort();
6136 }
6137 }
6138 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
6139 && mode3 == V4SImode)
6140 type = v4si_ftype_v16qi_v16qi_v4si;
6141 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
6142 && mode3 == V4SImode)
6143 type = v4si_ftype_v8hi_v8hi_v4si;
6144 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
6145 && mode3 == V4SImode)
6146 type = v4sf_ftype_v4sf_v4sf_v4si;
6147
6148 /* vchar, vchar, vchar, 4 bit literal. */
6149 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
6150 && mode3 == QImode)
6151 type = v16qi_ftype_v16qi_v16qi_char;
6152
6153 /* vshort, vshort, vshort, 4 bit literal. */
6154 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
6155 && mode3 == QImode)
6156 type = v8hi_ftype_v8hi_v8hi_char;
6157
6158 /* vint, vint, vint, 4 bit literal. */
6159 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
6160 && mode3 == QImode)
6161 type = v4si_ftype_v4si_v4si_char;
6162
6163 /* vfloat, vfloat, vfloat, 4 bit literal. */
6164 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
6165 && mode3 == QImode)
6166 type = v4sf_ftype_v4sf_v4sf_char;
6167
6168 else
6169 abort ();
6170
6171 def_builtin (d->mask, d->name, type, d->code);
6172 }
6173
6174 /* Add the simple binary operators. */
6175 d = (struct builtin_description *) bdesc_2arg;
6176 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6177 {
6178 enum machine_mode mode0, mode1, mode2;
6179 tree type;
6180
6181 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6182 continue;
6183
6184 mode0 = insn_data[d->icode].operand[0].mode;
6185 mode1 = insn_data[d->icode].operand[1].mode;
6186 mode2 = insn_data[d->icode].operand[2].mode;
6187
6188 /* When all three operands are of the same mode. */
6189 if (mode0 == mode1 && mode1 == mode2)
6190 {
6191 switch (mode0)
6192 {
6193 case V4SFmode:
6194 type = v4sf_ftype_v4sf_v4sf;
6195 break;
6196 case V4SImode:
6197 type = v4si_ftype_v4si_v4si;
6198 break;
6199 case V16QImode:
6200 type = v16qi_ftype_v16qi_v16qi;
6201 break;
6202 case V8HImode:
6203 type = v8hi_ftype_v8hi_v8hi;
6204 break;
6205 case V2SImode:
6206 type = v2si_ftype_v2si_v2si;
6207 break;
6208 case V2SFmode:
6209 type = v2sf_ftype_v2sf_v2sf;
6210 break;
6211 case SImode:
6212 type = int_ftype_int_int;
6213 break;
6214 default:
6215 abort ();
6216 }
6217 }
6218
6219 /* A few other combos we really don't want to do manually. */
6220
6221 /* vint, vfloat, vfloat. */
6222 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
6223 type = v4si_ftype_v4sf_v4sf;
6224
6225 /* vshort, vchar, vchar. */
6226 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
6227 type = v8hi_ftype_v16qi_v16qi;
6228
6229 /* vint, vshort, vshort. */
6230 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
6231 type = v4si_ftype_v8hi_v8hi;
6232
6233 /* vshort, vint, vint. */
6234 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
6235 type = v8hi_ftype_v4si_v4si;
6236
6237 /* vchar, vshort, vshort. */
6238 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
6239 type = v16qi_ftype_v8hi_v8hi;
6240
6241 /* vint, vchar, vint. */
6242 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
6243 type = v4si_ftype_v16qi_v4si;
6244
6245 /* vint, vchar, vchar. */
6246 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
6247 type = v4si_ftype_v16qi_v16qi;
6248
6249 /* vint, vshort, vint. */
6250 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
6251 type = v4si_ftype_v8hi_v4si;
6252
6253 /* vint, vint, 5 bit literal. */
6254 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
6255 type = v4si_ftype_v4si_char;
6256
6257 /* vshort, vshort, 5 bit literal. */
6258 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
6259 type = v8hi_ftype_v8hi_char;
6260
6261 /* vchar, vchar, 5 bit literal. */
6262 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
6263 type = v16qi_ftype_v16qi_char;
6264
6265 /* vfloat, vint, 5 bit literal. */
6266 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
6267 type = v4sf_ftype_v4si_char;
6268
6269 /* vint, vfloat, 5 bit literal. */
6270 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
6271 type = v4si_ftype_v4sf_char;
6272
6273 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
6274 type = v2si_ftype_int_int;
6275
6276 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
6277 type = v2si_ftype_v2si_char;
6278
6279 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
6280 type = v2si_ftype_int_char;
6281
6282 /* int, x, x. */
6283 else if (mode0 == SImode)
6284 {
6285 switch (mode1)
6286 {
6287 case V4SImode:
6288 type = int_ftype_v4si_v4si;
6289 break;
6290 case V4SFmode:
6291 type = int_ftype_v4sf_v4sf;
6292 break;
6293 case V16QImode:
6294 type = int_ftype_v16qi_v16qi;
6295 break;
6296 case V8HImode:
6297 type = int_ftype_v8hi_v8hi;
6298 break;
6299 default:
6300 abort ();
6301 }
6302 }
6303
6304 else
6305 abort ();
6306
6307 def_builtin (d->mask, d->name, type, d->code);
6308 }
6309
6310 /* Add the simple unary operators. */
6311 d = (struct builtin_description *) bdesc_1arg;
6312 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6313 {
6314 enum machine_mode mode0, mode1;
6315 tree type;
6316
6317 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6318 continue;
6319
6320 mode0 = insn_data[d->icode].operand[0].mode;
6321 mode1 = insn_data[d->icode].operand[1].mode;
6322
6323 if (mode0 == V4SImode && mode1 == QImode)
6324 type = v4si_ftype_char;
6325 else if (mode0 == V8HImode && mode1 == QImode)
6326 type = v8hi_ftype_char;
6327 else if (mode0 == V16QImode && mode1 == QImode)
6328 type = v16qi_ftype_char;
6329 else if (mode0 == V4SFmode && mode1 == V4SFmode)
6330 type = v4sf_ftype_v4sf;
6331 else if (mode0 == V8HImode && mode1 == V16QImode)
6332 type = v8hi_ftype_v16qi;
6333 else if (mode0 == V4SImode && mode1 == V8HImode)
6334 type = v4si_ftype_v8hi;
6335 else if (mode0 == V2SImode && mode1 == V2SImode)
6336 type = v2si_ftype_v2si;
6337 else if (mode0 == V2SFmode && mode1 == V2SFmode)
6338 type = v2sf_ftype_v2sf;
6339 else if (mode0 == V2SFmode && mode1 == V2SImode)
6340 type = v2sf_ftype_v2si;
6341 else if (mode0 == V2SImode && mode1 == V2SFmode)
6342 type = v2si_ftype_v2sf;
6343 else if (mode0 == V2SImode && mode1 == QImode)
6344 type = v2si_ftype_char;
6345 else
6346 abort ();
6347
6348 def_builtin (d->mask, d->name, type, d->code);
6349 }
6350 }
6351
6352 \f
6353 /* Expand a block move operation, and return 1 if successful. Return 0
6354 if we should let the compiler generate normal code.
6355
6356 operands[0] is the destination
6357 operands[1] is the source
6358 operands[2] is the length
6359 operands[3] is the alignment */
6360
6361 #define MAX_MOVE_REG 4
6362
6363 int
6364 expand_block_move (operands)
6365 rtx operands[];
6366 {
6367 rtx orig_dest = operands[0];
6368 rtx orig_src = operands[1];
6369 rtx bytes_rtx = operands[2];
6370 rtx align_rtx = operands[3];
6371 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
6372 int align;
6373 int bytes;
6374 int offset;
6375 int move_bytes;
6376
6377 /* If this is not a fixed size move, just call memcpy */
6378 if (! constp)
6379 return 0;
6380
6381 /* If this is not a fixed size alignment, abort */
6382 if (GET_CODE (align_rtx) != CONST_INT)
6383 abort ();
6384 align = INTVAL (align_rtx);
6385
6386 /* Anything to move? */
6387 bytes = INTVAL (bytes_rtx);
6388 if (bytes <= 0)
6389 return 1;
6390
6391 /* store_one_arg depends on expand_block_move to handle at least the size of
6392 reg_parm_stack_space. */
6393 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
6394 return 0;
6395
6396 if (TARGET_STRING) /* string instructions are available */
6397 {
6398 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6399 {
6400 union {
6401 rtx (*movstrsi) PARAMS ((rtx, rtx, rtx, rtx));
6402 rtx (*mov) PARAMS ((rtx, rtx));
6403 } gen_func;
6404 enum machine_mode mode = BLKmode;
6405 rtx src, dest;
6406
6407 if (bytes > 24 /* move up to 32 bytes at a time */
6408 && ! fixed_regs[5]
6409 && ! fixed_regs[6]
6410 && ! fixed_regs[7]
6411 && ! fixed_regs[8]
6412 && ! fixed_regs[9]
6413 && ! fixed_regs[10]
6414 && ! fixed_regs[11]
6415 && ! fixed_regs[12])
6416 {
6417 move_bytes = (bytes > 32) ? 32 : bytes;
6418 gen_func.movstrsi = gen_movstrsi_8reg;
6419 }
6420 else if (bytes > 16 /* move up to 24 bytes at a time */
6421 && ! fixed_regs[5]
6422 && ! fixed_regs[6]
6423 && ! fixed_regs[7]
6424 && ! fixed_regs[8]
6425 && ! fixed_regs[9]
6426 && ! fixed_regs[10])
6427 {
6428 move_bytes = (bytes > 24) ? 24 : bytes;
6429 gen_func.movstrsi = gen_movstrsi_6reg;
6430 }
6431 else if (bytes > 8 /* move up to 16 bytes at a time */
6432 && ! fixed_regs[5]
6433 && ! fixed_regs[6]
6434 && ! fixed_regs[7]
6435 && ! fixed_regs[8])
6436 {
6437 move_bytes = (bytes > 16) ? 16 : bytes;
6438 gen_func.movstrsi = gen_movstrsi_4reg;
6439 }
6440 else if (bytes >= 8 && TARGET_POWERPC64
6441 /* 64-bit loads and stores require word-aligned
6442 displacements. */
6443 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6444 {
6445 move_bytes = 8;
6446 mode = DImode;
6447 gen_func.mov = gen_movdi;
6448 }
6449 else if (bytes > 4 && !TARGET_POWERPC64)
6450 { /* move up to 8 bytes at a time */
6451 move_bytes = (bytes > 8) ? 8 : bytes;
6452 gen_func.movstrsi = gen_movstrsi_2reg;
6453 }
6454 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6455 { /* move 4 bytes */
6456 move_bytes = 4;
6457 mode = SImode;
6458 gen_func.mov = gen_movsi;
6459 }
6460 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6461 { /* move 2 bytes */
6462 move_bytes = 2;
6463 mode = HImode;
6464 gen_func.mov = gen_movhi;
6465 }
6466 else if (bytes == 1) /* move 1 byte */
6467 {
6468 move_bytes = 1;
6469 mode = QImode;
6470 gen_func.mov = gen_movqi;
6471 }
6472 else
6473 { /* move up to 4 bytes at a time */
6474 move_bytes = (bytes > 4) ? 4 : bytes;
6475 gen_func.movstrsi = gen_movstrsi_1reg;
6476 }
6477
6478 src = adjust_address (orig_src, mode, offset);
6479 dest = adjust_address (orig_dest, mode, offset);
6480
6481 if (mode == BLKmode)
6482 {
6483 /* Move the address into scratch registers. The movstrsi
6484 patterns require zero offset. */
6485 if (!REG_P (XEXP (src, 0)))
6486 {
6487 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
6488 src = replace_equiv_address (src, src_reg);
6489 }
6490 set_mem_size (src, GEN_INT (move_bytes));
6491
6492 if (!REG_P (XEXP (dest, 0)))
6493 {
6494 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
6495 dest = replace_equiv_address (dest, dest_reg);
6496 }
6497 set_mem_size (dest, GEN_INT (move_bytes));
6498
6499 emit_insn ((*gen_func.movstrsi) (dest, src,
6500 GEN_INT (move_bytes & 31),
6501 align_rtx));
6502 }
6503 else
6504 {
6505 rtx tmp_reg = gen_reg_rtx (mode);
6506
6507 emit_insn ((*gen_func.mov) (tmp_reg, src));
6508 emit_insn ((*gen_func.mov) (dest, tmp_reg));
6509 }
6510 }
6511 }
6512
6513 else /* string instructions not available */
6514 {
6515 rtx stores[MAX_MOVE_REG];
6516 int num_reg = 0;
6517 int i;
6518
6519 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6520 {
6521 rtx (*gen_mov_func) PARAMS ((rtx, rtx));
6522 enum machine_mode mode;
6523 rtx src, dest, tmp_reg;
6524
6525 /* Generate the appropriate load and store, saving the stores
6526 for later. */
6527 if (bytes >= 8 && TARGET_POWERPC64
6528 /* 64-bit loads and stores require word-aligned
6529 displacements. */
6530 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6531 {
6532 move_bytes = 8;
6533 mode = DImode;
6534 gen_mov_func = gen_movdi;
6535 }
6536 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6537 {
6538 move_bytes = 4;
6539 mode = SImode;
6540 gen_mov_func = gen_movsi;
6541 }
6542 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6543 {
6544 move_bytes = 2;
6545 mode = HImode;
6546 gen_mov_func = gen_movhi;
6547 }
6548 else
6549 {
6550 move_bytes = 1;
6551 mode = QImode;
6552 gen_mov_func = gen_movqi;
6553 }
6554
6555 src = adjust_address (orig_src, mode, offset);
6556 dest = adjust_address (orig_dest, mode, offset);
6557 tmp_reg = gen_reg_rtx (mode);
6558
6559 emit_insn ((*gen_mov_func) (tmp_reg, src));
6560 stores[num_reg++] = (*gen_mov_func) (dest, tmp_reg);
6561
6562 if (num_reg >= MAX_MOVE_REG)
6563 {
6564 for (i = 0; i < num_reg; i++)
6565 emit_insn (stores[i]);
6566 num_reg = 0;
6567 }
6568 }
6569
6570 for (i = 0; i < num_reg; i++)
6571 emit_insn (stores[i]);
6572 }
6573
6574 return 1;
6575 }
6576
6577 \f
6578 /* Return 1 if OP is a load multiple operation. It is known to be a
6579 PARALLEL and the first section will be tested. */
6580
6581 int
6582 load_multiple_operation (op, mode)
6583 rtx op;
6584 enum machine_mode mode ATTRIBUTE_UNUSED;
6585 {
6586 int count = XVECLEN (op, 0);
6587 unsigned int dest_regno;
6588 rtx src_addr;
6589 int i;
6590
6591 /* Perform a quick check so we don't blow up below. */
6592 if (count <= 1
6593 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6594 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6595 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6596 return 0;
6597
6598 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6599 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6600
6601 for (i = 1; i < count; i++)
6602 {
6603 rtx elt = XVECEXP (op, 0, i);
6604
6605 if (GET_CODE (elt) != SET
6606 || GET_CODE (SET_DEST (elt)) != REG
6607 || GET_MODE (SET_DEST (elt)) != SImode
6608 || REGNO (SET_DEST (elt)) != dest_regno + i
6609 || GET_CODE (SET_SRC (elt)) != MEM
6610 || GET_MODE (SET_SRC (elt)) != SImode
6611 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
6612 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
6613 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
6614 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
6615 return 0;
6616 }
6617
6618 return 1;
6619 }
6620
6621 /* Similar, but tests for store multiple. Here, the second vector element
6622 is a CLOBBER. It will be tested later. */
6623
6624 int
6625 store_multiple_operation (op, mode)
6626 rtx op;
6627 enum machine_mode mode ATTRIBUTE_UNUSED;
6628 {
6629 int count = XVECLEN (op, 0) - 1;
6630 unsigned int src_regno;
6631 rtx dest_addr;
6632 int i;
6633
6634 /* Perform a quick check so we don't blow up below. */
6635 if (count <= 1
6636 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6637 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6638 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6639 return 0;
6640
6641 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6642 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6643
6644 for (i = 1; i < count; i++)
6645 {
6646 rtx elt = XVECEXP (op, 0, i + 1);
6647
6648 if (GET_CODE (elt) != SET
6649 || GET_CODE (SET_SRC (elt)) != REG
6650 || GET_MODE (SET_SRC (elt)) != SImode
6651 || REGNO (SET_SRC (elt)) != src_regno + i
6652 || GET_CODE (SET_DEST (elt)) != MEM
6653 || GET_MODE (SET_DEST (elt)) != SImode
6654 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
6655 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
6656 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
6657 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
6658 return 0;
6659 }
6660
6661 return 1;
6662 }
6663
6664 /* Return a string to perform a load_multiple operation.
6665 operands[0] is the vector.
6666 operands[1] is the source address.
6667 operands[2] is the first destination register. */
6668
6669 const char *
6670 rs6000_output_load_multiple (operands)
6671 rtx operands[3];
6672 {
6673 /* We have to handle the case where the pseudo used to contain the address
6674 is assigned to one of the output registers. */
6675 int i, j;
6676 int words = XVECLEN (operands[0], 0);
6677 rtx xop[10];
6678
6679 if (XVECLEN (operands[0], 0) == 1)
6680 return "{l|lwz} %2,0(%1)";
6681
6682 for (i = 0; i < words; i++)
6683 if (refers_to_regno_p (REGNO (operands[2]) + i,
6684 REGNO (operands[2]) + i + 1, operands[1], 0))
6685 {
6686 if (i == words-1)
6687 {
6688 xop[0] = GEN_INT (4 * (words-1));
6689 xop[1] = operands[1];
6690 xop[2] = operands[2];
6691 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
6692 return "";
6693 }
6694 else if (i == 0)
6695 {
6696 xop[0] = GEN_INT (4 * (words-1));
6697 xop[1] = operands[1];
6698 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
6699 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
6700 return "";
6701 }
6702 else
6703 {
6704 for (j = 0; j < words; j++)
6705 if (j != i)
6706 {
6707 xop[0] = GEN_INT (j * 4);
6708 xop[1] = operands[1];
6709 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
6710 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
6711 }
6712 xop[0] = GEN_INT (i * 4);
6713 xop[1] = operands[1];
6714 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
6715 return "";
6716 }
6717 }
6718
6719 return "{lsi|lswi} %2,%1,%N0";
6720 }
6721
6722 /* Return 1 for a parallel vrsave operation. */
6723
6724 int
6725 vrsave_operation (op, mode)
6726 rtx op;
6727 enum machine_mode mode ATTRIBUTE_UNUSED;
6728 {
6729 int count = XVECLEN (op, 0);
6730 unsigned int dest_regno, src_regno;
6731 int i;
6732
6733 if (count <= 1
6734 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6735 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6736 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
6737 return 0;
6738
6739 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6740 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6741
6742 if (dest_regno != VRSAVE_REGNO
6743 && src_regno != VRSAVE_REGNO)
6744 return 0;
6745
6746 for (i = 1; i < count; i++)
6747 {
6748 rtx elt = XVECEXP (op, 0, i);
6749
6750 if (GET_CODE (elt) != CLOBBER
6751 && GET_CODE (elt) != SET)
6752 return 0;
6753 }
6754
6755 return 1;
6756 }
6757
6758 /* Return 1 for an PARALLEL suitable for mtcrf. */
6759
6760 int
6761 mtcrf_operation (op, mode)
6762 rtx op;
6763 enum machine_mode mode ATTRIBUTE_UNUSED;
6764 {
6765 int count = XVECLEN (op, 0);
6766 int i;
6767 rtx src_reg;
6768
6769 /* Perform a quick check so we don't blow up below. */
6770 if (count < 1
6771 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6772 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
6773 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
6774 return 0;
6775 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
6776
6777 if (GET_CODE (src_reg) != REG
6778 || GET_MODE (src_reg) != SImode
6779 || ! INT_REGNO_P (REGNO (src_reg)))
6780 return 0;
6781
6782 for (i = 0; i < count; i++)
6783 {
6784 rtx exp = XVECEXP (op, 0, i);
6785 rtx unspec;
6786 int maskval;
6787
6788 if (GET_CODE (exp) != SET
6789 || GET_CODE (SET_DEST (exp)) != REG
6790 || GET_MODE (SET_DEST (exp)) != CCmode
6791 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
6792 return 0;
6793 unspec = SET_SRC (exp);
6794 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
6795
6796 if (GET_CODE (unspec) != UNSPEC
6797 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
6798 || XVECLEN (unspec, 0) != 2
6799 || XVECEXP (unspec, 0, 0) != src_reg
6800 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
6801 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
6802 return 0;
6803 }
6804 return 1;
6805 }
6806
6807 /* Return 1 for an PARALLEL suitable for lmw. */
6808
6809 int
6810 lmw_operation (op, mode)
6811 rtx op;
6812 enum machine_mode mode ATTRIBUTE_UNUSED;
6813 {
6814 int count = XVECLEN (op, 0);
6815 unsigned int dest_regno;
6816 rtx src_addr;
6817 unsigned int base_regno;
6818 HOST_WIDE_INT offset;
6819 int i;
6820
6821 /* Perform a quick check so we don't blow up below. */
6822 if (count <= 1
6823 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6824 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6825 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6826 return 0;
6827
6828 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6829 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6830
6831 if (dest_regno > 31
6832 || count != 32 - (int) dest_regno)
6833 return 0;
6834
6835 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
6836 {
6837 offset = 0;
6838 base_regno = REGNO (src_addr);
6839 if (base_regno == 0)
6840 return 0;
6841 }
6842 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
6843 {
6844 offset = INTVAL (XEXP (src_addr, 1));
6845 base_regno = REGNO (XEXP (src_addr, 0));
6846 }
6847 else
6848 return 0;
6849
6850 for (i = 0; i < count; i++)
6851 {
6852 rtx elt = XVECEXP (op, 0, i);
6853 rtx newaddr;
6854 rtx addr_reg;
6855 HOST_WIDE_INT newoffset;
6856
6857 if (GET_CODE (elt) != SET
6858 || GET_CODE (SET_DEST (elt)) != REG
6859 || GET_MODE (SET_DEST (elt)) != SImode
6860 || REGNO (SET_DEST (elt)) != dest_regno + i
6861 || GET_CODE (SET_SRC (elt)) != MEM
6862 || GET_MODE (SET_SRC (elt)) != SImode)
6863 return 0;
6864 newaddr = XEXP (SET_SRC (elt), 0);
6865 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6866 {
6867 newoffset = 0;
6868 addr_reg = newaddr;
6869 }
6870 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6871 {
6872 addr_reg = XEXP (newaddr, 0);
6873 newoffset = INTVAL (XEXP (newaddr, 1));
6874 }
6875 else
6876 return 0;
6877 if (REGNO (addr_reg) != base_regno
6878 || newoffset != offset + 4 * i)
6879 return 0;
6880 }
6881
6882 return 1;
6883 }
6884
6885 /* Return 1 for an PARALLEL suitable for stmw. */
6886
6887 int
6888 stmw_operation (op, mode)
6889 rtx op;
6890 enum machine_mode mode ATTRIBUTE_UNUSED;
6891 {
6892 int count = XVECLEN (op, 0);
6893 unsigned int src_regno;
6894 rtx dest_addr;
6895 unsigned int base_regno;
6896 HOST_WIDE_INT offset;
6897 int i;
6898
6899 /* Perform a quick check so we don't blow up below. */
6900 if (count <= 1
6901 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6902 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6903 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6904 return 0;
6905
6906 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6907 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6908
6909 if (src_regno > 31
6910 || count != 32 - (int) src_regno)
6911 return 0;
6912
6913 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
6914 {
6915 offset = 0;
6916 base_regno = REGNO (dest_addr);
6917 if (base_regno == 0)
6918 return 0;
6919 }
6920 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
6921 {
6922 offset = INTVAL (XEXP (dest_addr, 1));
6923 base_regno = REGNO (XEXP (dest_addr, 0));
6924 }
6925 else
6926 return 0;
6927
6928 for (i = 0; i < count; i++)
6929 {
6930 rtx elt = XVECEXP (op, 0, i);
6931 rtx newaddr;
6932 rtx addr_reg;
6933 HOST_WIDE_INT newoffset;
6934
6935 if (GET_CODE (elt) != SET
6936 || GET_CODE (SET_SRC (elt)) != REG
6937 || GET_MODE (SET_SRC (elt)) != SImode
6938 || REGNO (SET_SRC (elt)) != src_regno + i
6939 || GET_CODE (SET_DEST (elt)) != MEM
6940 || GET_MODE (SET_DEST (elt)) != SImode)
6941 return 0;
6942 newaddr = XEXP (SET_DEST (elt), 0);
6943 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6944 {
6945 newoffset = 0;
6946 addr_reg = newaddr;
6947 }
6948 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6949 {
6950 addr_reg = XEXP (newaddr, 0);
6951 newoffset = INTVAL (XEXP (newaddr, 1));
6952 }
6953 else
6954 return 0;
6955 if (REGNO (addr_reg) != base_regno
6956 || newoffset != offset + 4 * i)
6957 return 0;
6958 }
6959
6960 return 1;
6961 }
6962 \f
6963 /* A validation routine: say whether CODE, a condition code, and MODE
6964 match. The other alternatives either don't make sense or should
6965 never be generated. */
6966
6967 static void
6968 validate_condition_mode (code, mode)
6969 enum rtx_code code;
6970 enum machine_mode mode;
6971 {
6972 if (GET_RTX_CLASS (code) != '<'
6973 || GET_MODE_CLASS (mode) != MODE_CC)
6974 abort ();
6975
6976 /* These don't make sense. */
6977 if ((code == GT || code == LT || code == GE || code == LE)
6978 && mode == CCUNSmode)
6979 abort ();
6980
6981 if ((code == GTU || code == LTU || code == GEU || code == LEU)
6982 && mode != CCUNSmode)
6983 abort ();
6984
6985 if (mode != CCFPmode
6986 && (code == ORDERED || code == UNORDERED
6987 || code == UNEQ || code == LTGT
6988 || code == UNGT || code == UNLT
6989 || code == UNGE || code == UNLE))
6990 abort ();
6991
6992 /* These should never be generated except for
6993 flag_unsafe_math_optimizations and flag_finite_math_only. */
6994 if (mode == CCFPmode
6995 && ! flag_unsafe_math_optimizations
6996 && ! flag_finite_math_only
6997 && (code == LE || code == GE
6998 || code == UNEQ || code == LTGT
6999 || code == UNGT || code == UNLT))
7000 abort ();
7001
7002 /* These are invalid; the information is not there. */
7003 if (mode == CCEQmode
7004 && code != EQ && code != NE)
7005 abort ();
7006 }
7007
7008 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
7009 We only check the opcode against the mode of the CC value here. */
7010
7011 int
7012 branch_comparison_operator (op, mode)
7013 rtx op;
7014 enum machine_mode mode ATTRIBUTE_UNUSED;
7015 {
7016 enum rtx_code code = GET_CODE (op);
7017 enum machine_mode cc_mode;
7018
7019 if (GET_RTX_CLASS (code) != '<')
7020 return 0;
7021
7022 cc_mode = GET_MODE (XEXP (op, 0));
7023 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
7024 return 0;
7025
7026 validate_condition_mode (code, cc_mode);
7027
7028 return 1;
7029 }
7030
7031 /* Return 1 if OP is a comparison operation that is valid for a branch
7032 insn and which is true if the corresponding bit in the CC register
7033 is set. */
7034
7035 int
7036 branch_positive_comparison_operator (op, mode)
7037 rtx op;
7038 enum machine_mode mode;
7039 {
7040 enum rtx_code code;
7041
7042 if (! branch_comparison_operator (op, mode))
7043 return 0;
7044
7045 code = GET_CODE (op);
7046 return (code == EQ || code == LT || code == GT
7047 || (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
7048 || code == LTU || code == GTU
7049 || code == UNORDERED);
7050 }
7051
7052 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
7053 We check the opcode against the mode of the CC value and disallow EQ or
7054 NE comparisons for integers. */
7055
7056 int
7057 scc_comparison_operator (op, mode)
7058 rtx op;
7059 enum machine_mode mode;
7060 {
7061 enum rtx_code code = GET_CODE (op);
7062 enum machine_mode cc_mode;
7063
7064 if (GET_MODE (op) != mode && mode != VOIDmode)
7065 return 0;
7066
7067 if (GET_RTX_CLASS (code) != '<')
7068 return 0;
7069
7070 cc_mode = GET_MODE (XEXP (op, 0));
7071 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
7072 return 0;
7073
7074 validate_condition_mode (code, cc_mode);
7075
7076 if (code == NE && cc_mode != CCFPmode)
7077 return 0;
7078
7079 return 1;
7080 }
7081
7082 int
7083 trap_comparison_operator (op, mode)
7084 rtx op;
7085 enum machine_mode mode;
7086 {
7087 if (mode != VOIDmode && mode != GET_MODE (op))
7088 return 0;
7089 return GET_RTX_CLASS (GET_CODE (op)) == '<';
7090 }
7091
7092 int
7093 boolean_operator (op, mode)
7094 rtx op;
7095 enum machine_mode mode ATTRIBUTE_UNUSED;
7096 {
7097 enum rtx_code code = GET_CODE (op);
7098 return (code == AND || code == IOR || code == XOR);
7099 }
7100
7101 int
7102 boolean_or_operator (op, mode)
7103 rtx op;
7104 enum machine_mode mode ATTRIBUTE_UNUSED;
7105 {
7106 enum rtx_code code = GET_CODE (op);
7107 return (code == IOR || code == XOR);
7108 }
7109
7110 int
7111 min_max_operator (op, mode)
7112 rtx op;
7113 enum machine_mode mode ATTRIBUTE_UNUSED;
7114 {
7115 enum rtx_code code = GET_CODE (op);
7116 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
7117 }
7118 \f
7119 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
7120 mask required to convert the result of a rotate insn into a shift
7121 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
7122
7123 int
7124 includes_lshift_p (shiftop, andop)
7125 rtx shiftop;
7126 rtx andop;
7127 {
7128 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7129
7130 shift_mask <<= INTVAL (shiftop);
7131
7132 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7133 }
7134
7135 /* Similar, but for right shift. */
7136
7137 int
7138 includes_rshift_p (shiftop, andop)
7139 rtx shiftop;
7140 rtx andop;
7141 {
7142 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7143
7144 shift_mask >>= INTVAL (shiftop);
7145
7146 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7147 }
7148
7149 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
7150 to perform a left shift. It must have exactly SHIFTOP least
7151 significant 0's, then one or more 1's, then zero or more 0's. */
7152
7153 int
7154 includes_rldic_lshift_p (shiftop, andop)
7155 rtx shiftop;
7156 rtx andop;
7157 {
7158 if (GET_CODE (andop) == CONST_INT)
7159 {
7160 HOST_WIDE_INT c, lsb, shift_mask;
7161
7162 c = INTVAL (andop);
7163 if (c == 0 || c == ~0)
7164 return 0;
7165
7166 shift_mask = ~0;
7167 shift_mask <<= INTVAL (shiftop);
7168
7169 /* Find the least significant one bit. */
7170 lsb = c & -c;
7171
7172 /* It must coincide with the LSB of the shift mask. */
7173 if (-lsb != shift_mask)
7174 return 0;
7175
7176 /* Invert to look for the next transition (if any). */
7177 c = ~c;
7178
7179 /* Remove the low group of ones (originally low group of zeros). */
7180 c &= -lsb;
7181
7182 /* Again find the lsb, and check we have all 1's above. */
7183 lsb = c & -c;
7184 return c == -lsb;
7185 }
7186 else if (GET_CODE (andop) == CONST_DOUBLE
7187 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7188 {
7189 HOST_WIDE_INT low, high, lsb;
7190 HOST_WIDE_INT shift_mask_low, shift_mask_high;
7191
7192 low = CONST_DOUBLE_LOW (andop);
7193 if (HOST_BITS_PER_WIDE_INT < 64)
7194 high = CONST_DOUBLE_HIGH (andop);
7195
7196 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
7197 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
7198 return 0;
7199
7200 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7201 {
7202 shift_mask_high = ~0;
7203 if (INTVAL (shiftop) > 32)
7204 shift_mask_high <<= INTVAL (shiftop) - 32;
7205
7206 lsb = high & -high;
7207
7208 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
7209 return 0;
7210
7211 high = ~high;
7212 high &= -lsb;
7213
7214 lsb = high & -high;
7215 return high == -lsb;
7216 }
7217
7218 shift_mask_low = ~0;
7219 shift_mask_low <<= INTVAL (shiftop);
7220
7221 lsb = low & -low;
7222
7223 if (-lsb != shift_mask_low)
7224 return 0;
7225
7226 if (HOST_BITS_PER_WIDE_INT < 64)
7227 high = ~high;
7228 low = ~low;
7229 low &= -lsb;
7230
7231 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7232 {
7233 lsb = high & -high;
7234 return high == -lsb;
7235 }
7236
7237 lsb = low & -low;
7238 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
7239 }
7240 else
7241 return 0;
7242 }
7243
7244 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7245 to perform a left shift. It must have SHIFTOP or more least
7246 signifigant 0's, with the remainder of the word 1's. */
7247
7248 int
7249 includes_rldicr_lshift_p (shiftop, andop)
7250 rtx shiftop;
7251 rtx andop;
7252 {
7253 if (GET_CODE (andop) == CONST_INT)
7254 {
7255 HOST_WIDE_INT c, lsb, shift_mask;
7256
7257 shift_mask = ~0;
7258 shift_mask <<= INTVAL (shiftop);
7259 c = INTVAL (andop);
7260
7261 /* Find the least signifigant one bit. */
7262 lsb = c & -c;
7263
7264 /* It must be covered by the shift mask.
7265 This test also rejects c == 0. */
7266 if ((lsb & shift_mask) == 0)
7267 return 0;
7268
7269 /* Check we have all 1's above the transition, and reject all 1's. */
7270 return c == -lsb && lsb != 1;
7271 }
7272 else if (GET_CODE (andop) == CONST_DOUBLE
7273 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7274 {
7275 HOST_WIDE_INT low, lsb, shift_mask_low;
7276
7277 low = CONST_DOUBLE_LOW (andop);
7278
7279 if (HOST_BITS_PER_WIDE_INT < 64)
7280 {
7281 HOST_WIDE_INT high, shift_mask_high;
7282
7283 high = CONST_DOUBLE_HIGH (andop);
7284
7285 if (low == 0)
7286 {
7287 shift_mask_high = ~0;
7288 if (INTVAL (shiftop) > 32)
7289 shift_mask_high <<= INTVAL (shiftop) - 32;
7290
7291 lsb = high & -high;
7292
7293 if ((lsb & shift_mask_high) == 0)
7294 return 0;
7295
7296 return high == -lsb;
7297 }
7298 if (high != ~0)
7299 return 0;
7300 }
7301
7302 shift_mask_low = ~0;
7303 shift_mask_low <<= INTVAL (shiftop);
7304
7305 lsb = low & -low;
7306
7307 if ((lsb & shift_mask_low) == 0)
7308 return 0;
7309
7310 return low == -lsb && lsb != 1;
7311 }
7312 else
7313 return 0;
7314 }
7315
7316 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7317 for lfq and stfq insns.
7318
7319 Note reg1 and reg2 *must* be hard registers. To be sure we will
7320 abort if we are passed pseudo registers. */
7321
7322 int
7323 registers_ok_for_quad_peep (reg1, reg2)
7324 rtx reg1, reg2;
7325 {
7326 /* We might have been passed a SUBREG. */
7327 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
7328 return 0;
7329
7330 return (REGNO (reg1) == REGNO (reg2) - 1);
7331 }
7332
7333 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7334 addr1 and addr2 must be in consecutive memory locations
7335 (addr2 == addr1 + 8). */
7336
7337 int
7338 addrs_ok_for_quad_peep (addr1, addr2)
7339 rtx addr1;
7340 rtx addr2;
7341 {
7342 unsigned int reg1;
7343 int offset1;
7344
7345 /* Extract an offset (if used) from the first addr. */
7346 if (GET_CODE (addr1) == PLUS)
7347 {
7348 /* If not a REG, return zero. */
7349 if (GET_CODE (XEXP (addr1, 0)) != REG)
7350 return 0;
7351 else
7352 {
7353 reg1 = REGNO (XEXP (addr1, 0));
7354 /* The offset must be constant! */
7355 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
7356 return 0;
7357 offset1 = INTVAL (XEXP (addr1, 1));
7358 }
7359 }
7360 else if (GET_CODE (addr1) != REG)
7361 return 0;
7362 else
7363 {
7364 reg1 = REGNO (addr1);
7365 /* This was a simple (mem (reg)) expression. Offset is 0. */
7366 offset1 = 0;
7367 }
7368
7369 /* Make sure the second address is a (mem (plus (reg) (const_int)))
7370 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
7371 register as addr1. */
7372 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
7373 return 1;
7374 if (GET_CODE (addr2) != PLUS)
7375 return 0;
7376
7377 if (GET_CODE (XEXP (addr2, 0)) != REG
7378 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7379 return 0;
7380
7381 if (reg1 != REGNO (XEXP (addr2, 0)))
7382 return 0;
7383
7384 /* The offset for the second addr must be 8 more than the first addr. */
7385 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7386 return 0;
7387
7388 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7389 instructions. */
7390 return 1;
7391 }
7392 \f
7393 /* Return the register class of a scratch register needed to copy IN into
7394 or out of a register in CLASS in MODE. If it can be done directly,
7395 NO_REGS is returned. */
7396
7397 enum reg_class
7398 secondary_reload_class (class, mode, in)
7399 enum reg_class class;
7400 enum machine_mode mode ATTRIBUTE_UNUSED;
7401 rtx in;
7402 {
7403 int regno;
7404
7405 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
7406 #if TARGET_MACHO
7407 && MACHOPIC_INDIRECT
7408 #endif
7409 ))
7410 {
7411 /* We cannot copy a symbolic operand directly into anything
7412 other than BASE_REGS for TARGET_ELF. So indicate that a
7413 register from BASE_REGS is needed as an intermediate
7414 register.
7415
7416 On Darwin, pic addresses require a load from memory, which
7417 needs a base register. */
7418 if (class != BASE_REGS
7419 && (GET_CODE (in) == SYMBOL_REF
7420 || GET_CODE (in) == HIGH
7421 || GET_CODE (in) == LABEL_REF
7422 || GET_CODE (in) == CONST))
7423 return BASE_REGS;
7424 }
7425
7426 if (GET_CODE (in) == REG)
7427 {
7428 regno = REGNO (in);
7429 if (regno >= FIRST_PSEUDO_REGISTER)
7430 {
7431 regno = true_regnum (in);
7432 if (regno >= FIRST_PSEUDO_REGISTER)
7433 regno = -1;
7434 }
7435 }
7436 else if (GET_CODE (in) == SUBREG)
7437 {
7438 regno = true_regnum (in);
7439 if (regno >= FIRST_PSEUDO_REGISTER)
7440 regno = -1;
7441 }
7442 else
7443 regno = -1;
7444
7445 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7446 into anything. */
7447 if (class == GENERAL_REGS || class == BASE_REGS
7448 || (regno >= 0 && INT_REGNO_P (regno)))
7449 return NO_REGS;
7450
7451 /* Constants, memory, and FP registers can go into FP registers. */
7452 if ((regno == -1 || FP_REGNO_P (regno))
7453 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
7454 return NO_REGS;
7455
7456 /* Memory, and AltiVec registers can go into AltiVec registers. */
7457 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
7458 && class == ALTIVEC_REGS)
7459 return NO_REGS;
7460
7461 /* We can copy among the CR registers. */
7462 if ((class == CR_REGS || class == CR0_REGS)
7463 && regno >= 0 && CR_REGNO_P (regno))
7464 return NO_REGS;
7465
7466 /* Otherwise, we need GENERAL_REGS. */
7467 return GENERAL_REGS;
7468 }
7469 \f
7470 /* Given a comparison operation, return the bit number in CCR to test. We
7471 know this is a valid comparison.
7472
7473 SCC_P is 1 if this is for an scc. That means that %D will have been
7474 used instead of %C, so the bits will be in different places.
7475
7476 Return -1 if OP isn't a valid comparison for some reason. */
7477
7478 int
7479 ccr_bit (op, scc_p)
7480 rtx op;
7481 int scc_p;
7482 {
7483 enum rtx_code code = GET_CODE (op);
7484 enum machine_mode cc_mode;
7485 int cc_regnum;
7486 int base_bit;
7487 rtx reg;
7488
7489 if (GET_RTX_CLASS (code) != '<')
7490 return -1;
7491
7492 reg = XEXP (op, 0);
7493
7494 if (GET_CODE (reg) != REG
7495 || ! CR_REGNO_P (REGNO (reg)))
7496 abort ();
7497
7498 cc_mode = GET_MODE (reg);
7499 cc_regnum = REGNO (reg);
7500 base_bit = 4 * (cc_regnum - CR0_REGNO);
7501
7502 validate_condition_mode (code, cc_mode);
7503
7504 switch (code)
7505 {
7506 case NE:
7507 if (TARGET_E500 && !TARGET_FPRS
7508 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7509 return base_bit + 1;
7510 return scc_p ? base_bit + 3 : base_bit + 2;
7511 case EQ:
7512 if (TARGET_E500 && !TARGET_FPRS
7513 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7514 return base_bit + 1;
7515 return base_bit + 2;
7516 case GT: case GTU: case UNLE:
7517 return base_bit + 1;
7518 case LT: case LTU: case UNGE:
7519 return base_bit;
7520 case ORDERED: case UNORDERED:
7521 return base_bit + 3;
7522
7523 case GE: case GEU:
7524 /* If scc, we will have done a cror to put the bit in the
7525 unordered position. So test that bit. For integer, this is ! LT
7526 unless this is an scc insn. */
7527 return scc_p ? base_bit + 3 : base_bit;
7528
7529 case LE: case LEU:
7530 return scc_p ? base_bit + 3 : base_bit + 1;
7531
7532 default:
7533 abort ();
7534 }
7535 }
7536 \f
7537 /* Return the GOT register. */
7538
7539 struct rtx_def *
7540 rs6000_got_register (value)
7541 rtx value ATTRIBUTE_UNUSED;
7542 {
7543 /* The second flow pass currently (June 1999) can't update
7544 regs_ever_live without disturbing other parts of the compiler, so
7545 update it here to make the prolog/epilogue code happy. */
7546 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
7547 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
7548
7549 current_function_uses_pic_offset_table = 1;
7550
7551 return pic_offset_table_rtx;
7552 }
7553 \f
7554 /* Function to init struct machine_function.
7555 This will be called, via a pointer variable,
7556 from push_function_context. */
7557
7558 static struct machine_function *
7559 rs6000_init_machine_status ()
7560 {
7561 return ggc_alloc_cleared (sizeof (machine_function));
7562 }
7563 \f
7564 /* These macros test for integers and extract the low-order bits. */
7565 #define INT_P(X) \
7566 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7567 && GET_MODE (X) == VOIDmode)
7568
7569 #define INT_LOWPART(X) \
7570 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7571
7572 int
7573 extract_MB (op)
7574 rtx op;
7575 {
7576 int i;
7577 unsigned long val = INT_LOWPART (op);
7578
7579 /* If the high bit is zero, the value is the first 1 bit we find
7580 from the left. */
7581 if ((val & 0x80000000) == 0)
7582 {
7583 if ((val & 0xffffffff) == 0)
7584 abort ();
7585
7586 i = 1;
7587 while (((val <<= 1) & 0x80000000) == 0)
7588 ++i;
7589 return i;
7590 }
7591
7592 /* If the high bit is set and the low bit is not, or the mask is all
7593 1's, the value is zero. */
7594 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
7595 return 0;
7596
7597 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7598 from the right. */
7599 i = 31;
7600 while (((val >>= 1) & 1) != 0)
7601 --i;
7602
7603 return i;
7604 }
7605
7606 int
7607 extract_ME (op)
7608 rtx op;
7609 {
7610 int i;
7611 unsigned long val = INT_LOWPART (op);
7612
7613 /* If the low bit is zero, the value is the first 1 bit we find from
7614 the right. */
7615 if ((val & 1) == 0)
7616 {
7617 if ((val & 0xffffffff) == 0)
7618 abort ();
7619
7620 i = 30;
7621 while (((val >>= 1) & 1) == 0)
7622 --i;
7623
7624 return i;
7625 }
7626
7627 /* If the low bit is set and the high bit is not, or the mask is all
7628 1's, the value is 31. */
7629 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
7630 return 31;
7631
7632 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7633 from the left. */
7634 i = 0;
7635 while (((val <<= 1) & 0x80000000) != 0)
7636 ++i;
7637
7638 return i;
7639 }
7640
7641 /* Print an operand. Recognize special options, documented below. */
7642
7643 #if TARGET_ELF
7644 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7645 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7646 #else
7647 #define SMALL_DATA_RELOC "sda21"
7648 #define SMALL_DATA_REG 0
7649 #endif
7650
7651 void
7652 print_operand (file, x, code)
7653 FILE *file;
7654 rtx x;
7655 int code;
7656 {
7657 int i;
7658 HOST_WIDE_INT val;
7659 unsigned HOST_WIDE_INT uval;
7660
7661 switch (code)
7662 {
7663 case '.':
7664 /* Write out an instruction after the call which may be replaced
7665 with glue code by the loader. This depends on the AIX version. */
7666 asm_fprintf (file, RS6000_CALL_GLUE);
7667 return;
7668
7669 /* %a is output_address. */
7670
7671 case 'A':
7672 /* If X is a constant integer whose low-order 5 bits are zero,
7673 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
7674 in the AIX assembler where "sri" with a zero shift count
7675 writes a trash instruction. */
7676 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
7677 putc ('l', file);
7678 else
7679 putc ('r', file);
7680 return;
7681
7682 case 'b':
7683 /* If constant, low-order 16 bits of constant, unsigned.
7684 Otherwise, write normally. */
7685 if (INT_P (x))
7686 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
7687 else
7688 print_operand (file, x, 0);
7689 return;
7690
7691 case 'B':
7692 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7693 for 64-bit mask direction. */
7694 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
7695 return;
7696
7697 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7698 output_operand. */
7699
7700 case 'D':
7701 /* There used to be a comment for 'C' reading "This is an
7702 optional cror needed for certain floating-point
7703 comparisons. Otherwise write nothing." */
7704
7705 /* Similar, except that this is for an scc, so we must be able to
7706 encode the test in a single bit that is one. We do the above
7707 for any LE, GE, GEU, or LEU and invert the bit for NE. */
7708 if (GET_CODE (x) == LE || GET_CODE (x) == GE
7709 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
7710 {
7711 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7712
7713 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
7714 base_bit + 2,
7715 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
7716 }
7717
7718 else if (GET_CODE (x) == NE)
7719 {
7720 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7721
7722 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
7723 base_bit + 2, base_bit + 2);
7724 }
7725 else if (TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT
7726 && GET_CODE (x) == EQ
7727 && GET_MODE (XEXP (x, 0)) == CCFPmode)
7728 {
7729 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7730
7731 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 1,
7732 base_bit + 1, base_bit + 1);
7733 }
7734 return;
7735
7736 case 'E':
7737 /* X is a CR register. Print the number of the EQ bit of the CR */
7738 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7739 output_operand_lossage ("invalid %%E value");
7740 else
7741 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
7742 return;
7743
7744 case 'f':
7745 /* X is a CR register. Print the shift count needed to move it
7746 to the high-order four bits. */
7747 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7748 output_operand_lossage ("invalid %%f value");
7749 else
7750 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
7751 return;
7752
7753 case 'F':
7754 /* Similar, but print the count for the rotate in the opposite
7755 direction. */
7756 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7757 output_operand_lossage ("invalid %%F value");
7758 else
7759 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
7760 return;
7761
7762 case 'G':
7763 /* X is a constant integer. If it is negative, print "m",
7764 otherwise print "z". This is to make an aze or ame insn. */
7765 if (GET_CODE (x) != CONST_INT)
7766 output_operand_lossage ("invalid %%G value");
7767 else if (INTVAL (x) >= 0)
7768 putc ('z', file);
7769 else
7770 putc ('m', file);
7771 return;
7772
7773 case 'h':
7774 /* If constant, output low-order five bits. Otherwise, write
7775 normally. */
7776 if (INT_P (x))
7777 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
7778 else
7779 print_operand (file, x, 0);
7780 return;
7781
7782 case 'H':
7783 /* If constant, output low-order six bits. Otherwise, write
7784 normally. */
7785 if (INT_P (x))
7786 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
7787 else
7788 print_operand (file, x, 0);
7789 return;
7790
7791 case 'I':
7792 /* Print `i' if this is a constant, else nothing. */
7793 if (INT_P (x))
7794 putc ('i', file);
7795 return;
7796
7797 case 'j':
7798 /* Write the bit number in CCR for jump. */
7799 i = ccr_bit (x, 0);
7800 if (i == -1)
7801 output_operand_lossage ("invalid %%j code");
7802 else
7803 fprintf (file, "%d", i);
7804 return;
7805
7806 case 'J':
7807 /* Similar, but add one for shift count in rlinm for scc and pass
7808 scc flag to `ccr_bit'. */
7809 i = ccr_bit (x, 1);
7810 if (i == -1)
7811 output_operand_lossage ("invalid %%J code");
7812 else
7813 /* If we want bit 31, write a shift count of zero, not 32. */
7814 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7815 return;
7816
7817 case 'k':
7818 /* X must be a constant. Write the 1's complement of the
7819 constant. */
7820 if (! INT_P (x))
7821 output_operand_lossage ("invalid %%k value");
7822 else
7823 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
7824 return;
7825
7826 case 'K':
7827 /* X must be a symbolic constant on ELF. Write an
7828 expression suitable for an 'addi' that adds in the low 16
7829 bits of the MEM. */
7830 if (GET_CODE (x) != CONST)
7831 {
7832 print_operand_address (file, x);
7833 fputs ("@l", file);
7834 }
7835 else
7836 {
7837 if (GET_CODE (XEXP (x, 0)) != PLUS
7838 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
7839 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
7840 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
7841 output_operand_lossage ("invalid %%K value");
7842 print_operand_address (file, XEXP (XEXP (x, 0), 0));
7843 fputs ("@l", file);
7844 /* For GNU as, there must be a non-alphanumeric character
7845 between 'l' and the number. The '-' is added by
7846 print_operand() already. */
7847 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
7848 fputs ("+", file);
7849 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
7850 }
7851 return;
7852
7853 /* %l is output_asm_label. */
7854
7855 case 'L':
7856 /* Write second word of DImode or DFmode reference. Works on register
7857 or non-indexed memory only. */
7858 if (GET_CODE (x) == REG)
7859 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
7860 else if (GET_CODE (x) == MEM)
7861 {
7862 /* Handle possible auto-increment. Since it is pre-increment and
7863 we have already done it, we can just use an offset of word. */
7864 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7865 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7866 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
7867 UNITS_PER_WORD));
7868 else
7869 output_address (XEXP (adjust_address_nv (x, SImode,
7870 UNITS_PER_WORD),
7871 0));
7872
7873 if (small_data_operand (x, GET_MODE (x)))
7874 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7875 reg_names[SMALL_DATA_REG]);
7876 }
7877 return;
7878
7879 case 'm':
7880 /* MB value for a mask operand. */
7881 if (! mask_operand (x, SImode))
7882 output_operand_lossage ("invalid %%m value");
7883
7884 fprintf (file, "%d", extract_MB (x));
7885 return;
7886
7887 case 'M':
7888 /* ME value for a mask operand. */
7889 if (! mask_operand (x, SImode))
7890 output_operand_lossage ("invalid %%M value");
7891
7892 fprintf (file, "%d", extract_ME (x));
7893 return;
7894
7895 /* %n outputs the negative of its operand. */
7896
7897 case 'N':
7898 /* Write the number of elements in the vector times 4. */
7899 if (GET_CODE (x) != PARALLEL)
7900 output_operand_lossage ("invalid %%N value");
7901 else
7902 fprintf (file, "%d", XVECLEN (x, 0) * 4);
7903 return;
7904
7905 case 'O':
7906 /* Similar, but subtract 1 first. */
7907 if (GET_CODE (x) != PARALLEL)
7908 output_operand_lossage ("invalid %%O value");
7909 else
7910 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
7911 return;
7912
7913 case 'p':
7914 /* X is a CONST_INT that is a power of two. Output the logarithm. */
7915 if (! INT_P (x)
7916 || INT_LOWPART (x) < 0
7917 || (i = exact_log2 (INT_LOWPART (x))) < 0)
7918 output_operand_lossage ("invalid %%p value");
7919 else
7920 fprintf (file, "%d", i);
7921 return;
7922
7923 case 'P':
7924 /* The operand must be an indirect memory reference. The result
7925 is the register number. */
7926 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
7927 || REGNO (XEXP (x, 0)) >= 32)
7928 output_operand_lossage ("invalid %%P value");
7929 else
7930 fprintf (file, "%d", REGNO (XEXP (x, 0)));
7931 return;
7932
7933 case 'q':
7934 /* This outputs the logical code corresponding to a boolean
7935 expression. The expression may have one or both operands
7936 negated (if one, only the first one). For condition register
7937 logical operations, it will also treat the negated
7938 CR codes as NOTs, but not handle NOTs of them. */
7939 {
7940 const char *const *t = 0;
7941 const char *s;
7942 enum rtx_code code = GET_CODE (x);
7943 static const char * const tbl[3][3] = {
7944 { "and", "andc", "nor" },
7945 { "or", "orc", "nand" },
7946 { "xor", "eqv", "xor" } };
7947
7948 if (code == AND)
7949 t = tbl[0];
7950 else if (code == IOR)
7951 t = tbl[1];
7952 else if (code == XOR)
7953 t = tbl[2];
7954 else
7955 output_operand_lossage ("invalid %%q value");
7956
7957 if (GET_CODE (XEXP (x, 0)) != NOT)
7958 s = t[0];
7959 else
7960 {
7961 if (GET_CODE (XEXP (x, 1)) == NOT)
7962 s = t[2];
7963 else
7964 s = t[1];
7965 }
7966
7967 fputs (s, file);
7968 }
7969 return;
7970
7971 case 'R':
7972 /* X is a CR register. Print the mask for `mtcrf'. */
7973 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7974 output_operand_lossage ("invalid %%R value");
7975 else
7976 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
7977 return;
7978
7979 case 's':
7980 /* Low 5 bits of 32 - value */
7981 if (! INT_P (x))
7982 output_operand_lossage ("invalid %%s value");
7983 else
7984 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
7985 return;
7986
7987 case 'S':
7988 /* PowerPC64 mask position. All 0's is excluded.
7989 CONST_INT 32-bit mask is considered sign-extended so any
7990 transition must occur within the CONST_INT, not on the boundary. */
7991 if (! mask64_operand (x, DImode))
7992 output_operand_lossage ("invalid %%S value");
7993
7994 uval = INT_LOWPART (x);
7995
7996 if (uval & 1) /* Clear Left */
7997 {
7998 #if HOST_BITS_PER_WIDE_INT > 64
7999 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8000 #endif
8001 i = 64;
8002 }
8003 else /* Clear Right */
8004 {
8005 uval = ~uval;
8006 #if HOST_BITS_PER_WIDE_INT > 64
8007 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8008 #endif
8009 i = 63;
8010 }
8011 while (uval != 0)
8012 --i, uval >>= 1;
8013 if (i < 0)
8014 abort ();
8015 fprintf (file, "%d", i);
8016 return;
8017
8018 case 't':
8019 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
8020 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
8021 abort ();
8022
8023 /* Bit 3 is OV bit. */
8024 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
8025
8026 /* If we want bit 31, write a shift count of zero, not 32. */
8027 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8028 return;
8029
8030 case 'T':
8031 /* Print the symbolic name of a branch target register. */
8032 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
8033 && REGNO (x) != COUNT_REGISTER_REGNUM))
8034 output_operand_lossage ("invalid %%T value");
8035 else if (REGNO (x) == LINK_REGISTER_REGNUM)
8036 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
8037 else
8038 fputs ("ctr", file);
8039 return;
8040
8041 case 'u':
8042 /* High-order 16 bits of constant for use in unsigned operand. */
8043 if (! INT_P (x))
8044 output_operand_lossage ("invalid %%u value");
8045 else
8046 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8047 (INT_LOWPART (x) >> 16) & 0xffff);
8048 return;
8049
8050 case 'v':
8051 /* High-order 16 bits of constant for use in signed operand. */
8052 if (! INT_P (x))
8053 output_operand_lossage ("invalid %%v value");
8054 else
8055 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8056 (INT_LOWPART (x) >> 16) & 0xffff);
8057 return;
8058
8059 case 'U':
8060 /* Print `u' if this has an auto-increment or auto-decrement. */
8061 if (GET_CODE (x) == MEM
8062 && (GET_CODE (XEXP (x, 0)) == PRE_INC
8063 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
8064 putc ('u', file);
8065 return;
8066
8067 case 'V':
8068 /* Print the trap code for this operand. */
8069 switch (GET_CODE (x))
8070 {
8071 case EQ:
8072 fputs ("eq", file); /* 4 */
8073 break;
8074 case NE:
8075 fputs ("ne", file); /* 24 */
8076 break;
8077 case LT:
8078 fputs ("lt", file); /* 16 */
8079 break;
8080 case LE:
8081 fputs ("le", file); /* 20 */
8082 break;
8083 case GT:
8084 fputs ("gt", file); /* 8 */
8085 break;
8086 case GE:
8087 fputs ("ge", file); /* 12 */
8088 break;
8089 case LTU:
8090 fputs ("llt", file); /* 2 */
8091 break;
8092 case LEU:
8093 fputs ("lle", file); /* 6 */
8094 break;
8095 case GTU:
8096 fputs ("lgt", file); /* 1 */
8097 break;
8098 case GEU:
8099 fputs ("lge", file); /* 5 */
8100 break;
8101 default:
8102 abort ();
8103 }
8104 break;
8105
8106 case 'w':
8107 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
8108 normally. */
8109 if (INT_P (x))
8110 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
8111 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
8112 else
8113 print_operand (file, x, 0);
8114 return;
8115
8116 case 'W':
8117 /* MB value for a PowerPC64 rldic operand. */
8118 val = (GET_CODE (x) == CONST_INT
8119 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
8120
8121 if (val < 0)
8122 i = -1;
8123 else
8124 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
8125 if ((val <<= 1) < 0)
8126 break;
8127
8128 #if HOST_BITS_PER_WIDE_INT == 32
8129 if (GET_CODE (x) == CONST_INT && i >= 0)
8130 i += 32; /* zero-extend high-part was all 0's */
8131 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
8132 {
8133 val = CONST_DOUBLE_LOW (x);
8134
8135 if (val == 0)
8136 abort ();
8137 else if (val < 0)
8138 --i;
8139 else
8140 for ( ; i < 64; i++)
8141 if ((val <<= 1) < 0)
8142 break;
8143 }
8144 #endif
8145
8146 fprintf (file, "%d", i + 1);
8147 return;
8148
8149 case 'X':
8150 if (GET_CODE (x) == MEM
8151 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
8152 putc ('x', file);
8153 return;
8154
8155 case 'Y':
8156 /* Like 'L', for third word of TImode */
8157 if (GET_CODE (x) == REG)
8158 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
8159 else if (GET_CODE (x) == MEM)
8160 {
8161 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8162 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8163 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
8164 else
8165 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
8166 if (small_data_operand (x, GET_MODE (x)))
8167 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8168 reg_names[SMALL_DATA_REG]);
8169 }
8170 return;
8171
8172 case 'z':
8173 /* X is a SYMBOL_REF. Write out the name preceded by a
8174 period and without any trailing data in brackets. Used for function
8175 names. If we are configured for System V (or the embedded ABI) on
8176 the PowerPC, do not emit the period, since those systems do not use
8177 TOCs and the like. */
8178 if (GET_CODE (x) != SYMBOL_REF)
8179 abort ();
8180
8181 if (XSTR (x, 0)[0] != '.')
8182 {
8183 switch (DEFAULT_ABI)
8184 {
8185 default:
8186 abort ();
8187
8188 case ABI_AIX:
8189 putc ('.', file);
8190 break;
8191
8192 case ABI_V4:
8193 case ABI_DARWIN:
8194 break;
8195 }
8196 }
8197 #if TARGET_AIX
8198 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
8199 #else
8200 assemble_name (file, XSTR (x, 0));
8201 #endif
8202 return;
8203
8204 case 'Z':
8205 /* Like 'L', for last word of TImode. */
8206 if (GET_CODE (x) == REG)
8207 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
8208 else if (GET_CODE (x) == MEM)
8209 {
8210 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8211 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8212 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
8213 else
8214 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
8215 if (small_data_operand (x, GET_MODE (x)))
8216 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8217 reg_names[SMALL_DATA_REG]);
8218 }
8219 return;
8220
8221 /* Print AltiVec or SPE memory operand. */
8222 case 'y':
8223 {
8224 rtx tmp;
8225
8226 if (GET_CODE (x) != MEM)
8227 abort ();
8228
8229 tmp = XEXP (x, 0);
8230
8231 if (TARGET_E500)
8232 {
8233 /* Handle [reg]. */
8234 if (GET_CODE (tmp) == REG)
8235 {
8236 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
8237 break;
8238 }
8239 /* Handle [reg+UIMM]. */
8240 else if (GET_CODE (tmp) == PLUS &&
8241 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
8242 {
8243 int x;
8244
8245 if (GET_CODE (XEXP (tmp, 0)) != REG)
8246 abort ();
8247
8248 x = INTVAL (XEXP (tmp, 1));
8249 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
8250 break;
8251 }
8252
8253 /* Fall through. Must be [reg+reg]. */
8254 }
8255 if (GET_CODE (tmp) == REG)
8256 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
8257 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
8258 {
8259 if (REGNO (XEXP (tmp, 0)) == 0)
8260 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
8261 reg_names[ REGNO (XEXP (tmp, 0)) ]);
8262 else
8263 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
8264 reg_names[ REGNO (XEXP (tmp, 1)) ]);
8265 }
8266 else
8267 abort ();
8268 break;
8269 }
8270
8271 case 0:
8272 if (GET_CODE (x) == REG)
8273 fprintf (file, "%s", reg_names[REGNO (x)]);
8274 else if (GET_CODE (x) == MEM)
8275 {
8276 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8277 know the width from the mode. */
8278 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
8279 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
8280 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8281 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
8282 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
8283 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8284 else
8285 output_address (XEXP (x, 0));
8286 }
8287 else
8288 output_addr_const (file, x);
8289 return;
8290
8291 default:
8292 output_operand_lossage ("invalid %%xn code");
8293 }
8294 }
8295 \f
8296 /* Print the address of an operand. */
8297
8298 void
8299 print_operand_address (file, x)
8300 FILE *file;
8301 rtx x;
8302 {
8303 if (GET_CODE (x) == REG)
8304 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
8305 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
8306 || GET_CODE (x) == LABEL_REF)
8307 {
8308 output_addr_const (file, x);
8309 if (small_data_operand (x, GET_MODE (x)))
8310 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8311 reg_names[SMALL_DATA_REG]);
8312 else if (TARGET_TOC)
8313 abort ();
8314 }
8315 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
8316 {
8317 if (REGNO (XEXP (x, 0)) == 0)
8318 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
8319 reg_names[ REGNO (XEXP (x, 0)) ]);
8320 else
8321 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
8322 reg_names[ REGNO (XEXP (x, 1)) ]);
8323 }
8324 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
8325 {
8326 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
8327 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8328 }
8329 #if TARGET_ELF
8330 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8331 && CONSTANT_P (XEXP (x, 1)))
8332 {
8333 output_addr_const (file, XEXP (x, 1));
8334 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8335 }
8336 #endif
8337 #if TARGET_MACHO
8338 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8339 && CONSTANT_P (XEXP (x, 1)))
8340 {
8341 fprintf (file, "lo16(");
8342 output_addr_const (file, XEXP (x, 1));
8343 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8344 }
8345 #endif
8346 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
8347 {
8348 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
8349 {
8350 rtx contains_minus = XEXP (x, 1);
8351 rtx minus, symref;
8352 const char *name;
8353
8354 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8355 turn it into (sym) for output_addr_const. */
8356 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
8357 contains_minus = XEXP (contains_minus, 0);
8358
8359 minus = XEXP (contains_minus, 0);
8360 symref = XEXP (minus, 0);
8361 XEXP (contains_minus, 0) = symref;
8362 if (TARGET_ELF)
8363 {
8364 char *newname;
8365
8366 name = XSTR (symref, 0);
8367 newname = alloca (strlen (name) + sizeof ("@toc"));
8368 strcpy (newname, name);
8369 strcat (newname, "@toc");
8370 XSTR (symref, 0) = newname;
8371 }
8372 output_addr_const (file, XEXP (x, 1));
8373 if (TARGET_ELF)
8374 XSTR (symref, 0) = name;
8375 XEXP (contains_minus, 0) = minus;
8376 }
8377 else
8378 output_addr_const (file, XEXP (x, 1));
8379
8380 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
8381 }
8382 else
8383 abort ();
8384 }
8385 \f
8386 /* Target hook for assembling integer objects. The PowerPC version has
8387 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8388 is defined. It also needs to handle DI-mode objects on 64-bit
8389 targets. */
8390
8391 static bool
8392 rs6000_assemble_integer (x, size, aligned_p)
8393 rtx x;
8394 unsigned int size;
8395 int aligned_p;
8396 {
8397 #ifdef RELOCATABLE_NEEDS_FIXUP
8398 /* Special handling for SI values. */
8399 if (size == 4 && aligned_p)
8400 {
8401 extern int in_toc_section PARAMS ((void));
8402 static int recurse = 0;
8403
8404 /* For -mrelocatable, we mark all addresses that need to be fixed up
8405 in the .fixup section. */
8406 if (TARGET_RELOCATABLE
8407 && !in_toc_section ()
8408 && !in_text_section ()
8409 && !recurse
8410 && GET_CODE (x) != CONST_INT
8411 && GET_CODE (x) != CONST_DOUBLE
8412 && CONSTANT_P (x))
8413 {
8414 char buf[256];
8415
8416 recurse = 1;
8417 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
8418 fixuplabelno++;
8419 ASM_OUTPUT_LABEL (asm_out_file, buf);
8420 fprintf (asm_out_file, "\t.long\t(");
8421 output_addr_const (asm_out_file, x);
8422 fprintf (asm_out_file, ")@fixup\n");
8423 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
8424 ASM_OUTPUT_ALIGN (asm_out_file, 2);
8425 fprintf (asm_out_file, "\t.long\t");
8426 assemble_name (asm_out_file, buf);
8427 fprintf (asm_out_file, "\n\t.previous\n");
8428 recurse = 0;
8429 return true;
8430 }
8431 /* Remove initial .'s to turn a -mcall-aixdesc function
8432 address into the address of the descriptor, not the function
8433 itself. */
8434 else if (GET_CODE (x) == SYMBOL_REF
8435 && XSTR (x, 0)[0] == '.'
8436 && DEFAULT_ABI == ABI_AIX)
8437 {
8438 const char *name = XSTR (x, 0);
8439 while (*name == '.')
8440 name++;
8441
8442 fprintf (asm_out_file, "\t.long\t%s\n", name);
8443 return true;
8444 }
8445 }
8446 #endif /* RELOCATABLE_NEEDS_FIXUP */
8447 return default_assemble_integer (x, size, aligned_p);
8448 }
8449
8450 #ifdef HAVE_GAS_HIDDEN
8451 /* Emit an assembler directive to set symbol visibility for DECL to
8452 VISIBILITY_TYPE. */
8453
8454 static void
8455 rs6000_assemble_visibility (decl, vis)
8456 tree decl;
8457 int vis;
8458 {
8459 /* Functions need to have their entry point symbol visibility set as
8460 well as their descriptor symbol visibility. */
8461 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
8462 {
8463 static const char * const visibility_types[] = {
8464 NULL, "internal", "hidden", "protected"
8465 };
8466
8467 const char *name, *type;
8468
8469 name = ((* targetm.strip_name_encoding)
8470 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
8471 type = visibility_types[vis];
8472
8473 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
8474 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
8475 }
8476 else
8477 default_assemble_visibility (decl, vis);
8478 }
8479 #endif
8480 \f
8481 enum rtx_code
8482 rs6000_reverse_condition (mode, code)
8483 enum machine_mode mode;
8484 enum rtx_code code;
8485 {
8486 /* Reversal of FP compares takes care -- an ordered compare
8487 becomes an unordered compare and vice versa. */
8488 if (mode == CCFPmode && !flag_unsafe_math_optimizations)
8489 return reverse_condition_maybe_unordered (code);
8490 else
8491 return reverse_condition (code);
8492 }
8493
8494 /* Generate a compare for CODE. Return a brand-new rtx that
8495 represents the result of the compare. */
8496
8497 static rtx
8498 rs6000_generate_compare (code)
8499 enum rtx_code code;
8500 {
8501 enum machine_mode comp_mode;
8502 rtx compare_result;
8503
8504 if (rs6000_compare_fp_p)
8505 comp_mode = CCFPmode;
8506 else if (code == GTU || code == LTU
8507 || code == GEU || code == LEU)
8508 comp_mode = CCUNSmode;
8509 else
8510 comp_mode = CCmode;
8511
8512 /* First, the compare. */
8513 compare_result = gen_reg_rtx (comp_mode);
8514
8515 /* SPE FP compare instructions on the GPRs. Yuck! */
8516 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
8517 && rs6000_compare_fp_p)
8518 {
8519 rtx cmp, or1, or2, or_result, compare_result2;
8520
8521 switch (code)
8522 {
8523 case EQ:
8524 case UNEQ:
8525 case NE:
8526 case LTGT:
8527 cmp = flag_unsafe_math_optimizations
8528 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
8529 rs6000_compare_op1)
8530 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
8531 rs6000_compare_op1);
8532 break;
8533 case GT:
8534 case GTU:
8535 case UNGT:
8536 case UNGE:
8537 case GE:
8538 case GEU:
8539 cmp = flag_unsafe_math_optimizations
8540 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
8541 rs6000_compare_op1)
8542 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
8543 rs6000_compare_op1);
8544 break;
8545 case LT:
8546 case LTU:
8547 case UNLT:
8548 case UNLE:
8549 case LE:
8550 case LEU:
8551 cmp = flag_unsafe_math_optimizations
8552 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
8553 rs6000_compare_op1)
8554 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
8555 rs6000_compare_op1);
8556 break;
8557 default:
8558 abort ();
8559 }
8560
8561 /* Synthesize LE and GE from LT/GT || EQ. */
8562 if (code == LE || code == GE || code == LEU || code == GEU)
8563 {
8564 /* Synthesize GE/LE frome GT/LT || EQ. */
8565
8566 emit_insn (cmp);
8567
8568 switch (code)
8569 {
8570 case LE: code = LT; break;
8571 case GE: code = GT; break;
8572 case LEU: code = LT; break;
8573 case GEU: code = GT; break;
8574 default: abort ();
8575 }
8576
8577 or1 = gen_reg_rtx (SImode);
8578 or2 = gen_reg_rtx (SImode);
8579 or_result = gen_reg_rtx (CCEQmode);
8580 compare_result2 = gen_reg_rtx (CCFPmode);
8581
8582 /* Do the EQ. */
8583 cmp = flag_unsafe_math_optimizations
8584 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
8585 rs6000_compare_op1)
8586 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
8587 rs6000_compare_op1);
8588 emit_insn (cmp);
8589
8590 /* The MC8540 FP compare instructions set the CR bits
8591 differently than other PPC compare instructions. For
8592 that matter, there is no generic test instruction, but a
8593 testgt, testlt, and testeq. For a true condition, bit 2
8594 is set (x1xx) in the CR. Following the traditional CR
8595 values:
8596
8597 LT GT EQ OV
8598 bit3 bit2 bit1 bit0
8599
8600 ... bit 2 would be a GT CR alias, so later on we
8601 look in the GT bits for the branch instructions.
8602 However, we must be careful to emit correct RTL in
8603 the meantime, so optimizations don't get confused. */
8604
8605 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
8606 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
8607
8608 /* OR them together. */
8609 cmp = gen_rtx_SET (VOIDmode, or_result,
8610 gen_rtx_COMPARE (CCEQmode,
8611 gen_rtx_IOR (SImode, or1, or2),
8612 const_true_rtx));
8613 compare_result = or_result;
8614 code = EQ;
8615 }
8616 else
8617 {
8618 /* We only care about 1 bit (x1xx), so map everything to NE to
8619 maintain rtl sanity. We'll get to the right bit (x1xx) at
8620 code output time. */
8621 if (code == NE || code == LTGT)
8622 /* Do the inverse here because we have no cmpne
8623 instruction. We use the cmpeq instruction and expect
8624 to get a 0 instead. */
8625 code = EQ;
8626 else
8627 code = NE;
8628 }
8629
8630 emit_insn (cmp);
8631 }
8632 else
8633 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
8634 gen_rtx_COMPARE (comp_mode,
8635 rs6000_compare_op0,
8636 rs6000_compare_op1)));
8637
8638 /* Some kinds of FP comparisons need an OR operation;
8639 except for flag_unsafe_math_optimizations we don't bother. */
8640 if (rs6000_compare_fp_p
8641 && ! flag_unsafe_math_optimizations
8642 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
8643 && (code == LE || code == GE
8644 || code == UNEQ || code == LTGT
8645 || code == UNGT || code == UNLT))
8646 {
8647 enum rtx_code or1, or2;
8648 rtx or1_rtx, or2_rtx, compare2_rtx;
8649 rtx or_result = gen_reg_rtx (CCEQmode);
8650
8651 switch (code)
8652 {
8653 case LE: or1 = LT; or2 = EQ; break;
8654 case GE: or1 = GT; or2 = EQ; break;
8655 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
8656 case LTGT: or1 = LT; or2 = GT; break;
8657 case UNGT: or1 = UNORDERED; or2 = GT; break;
8658 case UNLT: or1 = UNORDERED; or2 = LT; break;
8659 default: abort ();
8660 }
8661 validate_condition_mode (or1, comp_mode);
8662 validate_condition_mode (or2, comp_mode);
8663 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
8664 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
8665 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
8666 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
8667 const_true_rtx);
8668 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
8669
8670 compare_result = or_result;
8671 code = EQ;
8672 }
8673
8674 validate_condition_mode (code, GET_MODE (compare_result));
8675
8676 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
8677 }
8678
8679
8680 /* Emit the RTL for an sCOND pattern. */
8681
8682 void
8683 rs6000_emit_sCOND (code, result)
8684 enum rtx_code code;
8685 rtx result;
8686 {
8687 rtx condition_rtx;
8688 enum machine_mode op_mode;
8689
8690 condition_rtx = rs6000_generate_compare (code);
8691
8692 op_mode = GET_MODE (rs6000_compare_op0);
8693 if (op_mode == VOIDmode)
8694 op_mode = GET_MODE (rs6000_compare_op1);
8695
8696 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
8697 {
8698 PUT_MODE (condition_rtx, DImode);
8699 convert_move (result, condition_rtx, 0);
8700 }
8701 else
8702 {
8703 PUT_MODE (condition_rtx, SImode);
8704 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
8705 }
8706 }
8707
8708 /* Emit a branch of kind CODE to location LOC. */
8709
8710 void
8711 rs6000_emit_cbranch (code, loc)
8712 enum rtx_code code;
8713 rtx loc;
8714 {
8715 rtx condition_rtx, loc_ref;
8716
8717 condition_rtx = rs6000_generate_compare (code);
8718 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
8719 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
8720 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
8721 loc_ref, pc_rtx)));
8722 }
8723
8724 /* Return the string to output a conditional branch to LABEL, which is
8725 the operand number of the label, or -1 if the branch is really a
8726 conditional return.
8727
8728 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
8729 condition code register and its mode specifies what kind of
8730 comparison we made.
8731
8732 REVERSED is nonzero if we should reverse the sense of the comparison.
8733
8734 INSN is the insn. */
8735
8736 char *
8737 output_cbranch (op, label, reversed, insn)
8738 rtx op;
8739 const char * label;
8740 int reversed;
8741 rtx insn;
8742 {
8743 static char string[64];
8744 enum rtx_code code = GET_CODE (op);
8745 rtx cc_reg = XEXP (op, 0);
8746 enum machine_mode mode = GET_MODE (cc_reg);
8747 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
8748 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
8749 int really_reversed = reversed ^ need_longbranch;
8750 char *s = string;
8751 const char *ccode;
8752 const char *pred;
8753 rtx note;
8754
8755 validate_condition_mode (code, mode);
8756
8757 /* Work out which way this really branches. We could use
8758 reverse_condition_maybe_unordered here always but this
8759 makes the resulting assembler clearer. */
8760 if (really_reversed)
8761 {
8762 /* Reversal of FP compares takes care -- an ordered compare
8763 becomes an unordered compare and vice versa. */
8764 if (mode == CCFPmode)
8765 code = reverse_condition_maybe_unordered (code);
8766 else
8767 code = reverse_condition (code);
8768 }
8769
8770 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
8771 {
8772 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8773 to the GT bit. */
8774 if (code == EQ)
8775 /* Opposite of GT. */
8776 code = UNLE;
8777 else if (code == NE)
8778 code = GT;
8779 else
8780 abort ();
8781 }
8782
8783 switch (code)
8784 {
8785 /* Not all of these are actually distinct opcodes, but
8786 we distinguish them for clarity of the resulting assembler. */
8787 case NE: case LTGT:
8788 ccode = "ne"; break;
8789 case EQ: case UNEQ:
8790 ccode = "eq"; break;
8791 case GE: case GEU:
8792 ccode = "ge"; break;
8793 case GT: case GTU: case UNGT:
8794 ccode = "gt"; break;
8795 case LE: case LEU:
8796 ccode = "le"; break;
8797 case LT: case LTU: case UNLT:
8798 ccode = "lt"; break;
8799 case UNORDERED: ccode = "un"; break;
8800 case ORDERED: ccode = "nu"; break;
8801 case UNGE: ccode = "nl"; break;
8802 case UNLE: ccode = "ng"; break;
8803 default:
8804 abort ();
8805 }
8806
8807 /* Maybe we have a guess as to how likely the branch is.
8808 The old mnemonics don't have a way to specify this information. */
8809 pred = "";
8810 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
8811 if (note != NULL_RTX)
8812 {
8813 /* PROB is the difference from 50%. */
8814 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
8815 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
8816
8817 /* Only hint for highly probable/improbable branches on newer
8818 cpus as static prediction overrides processor dynamic
8819 prediction. For older cpus we may as well always hint, but
8820 assume not taken for branches that are very close to 50% as a
8821 mispredicted taken branch is more expensive than a
8822 mispredicted not-taken branch. */
8823 if (always_hint
8824 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
8825 {
8826 if (abs (prob) > REG_BR_PROB_BASE / 20
8827 && ((prob > 0) ^ need_longbranch))
8828 pred = "+";
8829 else
8830 pred = "-";
8831 }
8832 }
8833
8834 if (label == NULL)
8835 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
8836 else
8837 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
8838
8839 /* We need to escape any '%' characters in the reg_names string.
8840 Assume they'd only be the first character... */
8841 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
8842 *s++ = '%';
8843 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
8844
8845 if (label != NULL)
8846 {
8847 /* If the branch distance was too far, we may have to use an
8848 unconditional branch to go the distance. */
8849 if (need_longbranch)
8850 s += sprintf (s, ",$+8\n\tb %s", label);
8851 else
8852 s += sprintf (s, ",%s", label);
8853 }
8854
8855 return string;
8856 }
8857
8858 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
8859 operands of the last comparison is nonzero/true, FALSE_COND if it
8860 is zero/false. Return 0 if the hardware has no such operation. */
8861
8862 int
8863 rs6000_emit_cmove (dest, op, true_cond, false_cond)
8864 rtx dest;
8865 rtx op;
8866 rtx true_cond;
8867 rtx false_cond;
8868 {
8869 enum rtx_code code = GET_CODE (op);
8870 rtx op0 = rs6000_compare_op0;
8871 rtx op1 = rs6000_compare_op1;
8872 REAL_VALUE_TYPE c1;
8873 enum machine_mode compare_mode = GET_MODE (op0);
8874 enum machine_mode result_mode = GET_MODE (dest);
8875 rtx temp;
8876
8877 /* These modes should always match. */
8878 if (GET_MODE (op1) != compare_mode
8879 /* In the isel case however, we can use a compare immediate, so
8880 op1 may be a small constant. */
8881 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
8882 return 0;
8883 if (GET_MODE (true_cond) != result_mode)
8884 return 0;
8885 if (GET_MODE (false_cond) != result_mode)
8886 return 0;
8887
8888 /* First, work out if the hardware can do this at all, or
8889 if it's too slow... */
8890 if (! rs6000_compare_fp_p)
8891 {
8892 if (TARGET_ISEL)
8893 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
8894 return 0;
8895 }
8896
8897 /* Eliminate half of the comparisons by switching operands, this
8898 makes the remaining code simpler. */
8899 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
8900 || code == LTGT || code == LT)
8901 {
8902 code = reverse_condition_maybe_unordered (code);
8903 temp = true_cond;
8904 true_cond = false_cond;
8905 false_cond = temp;
8906 }
8907
8908 /* UNEQ and LTGT take four instructions for a comparison with zero,
8909 it'll probably be faster to use a branch here too. */
8910 if (code == UNEQ)
8911 return 0;
8912
8913 if (GET_CODE (op1) == CONST_DOUBLE)
8914 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
8915
8916 /* We're going to try to implement comparisons by performing
8917 a subtract, then comparing against zero. Unfortunately,
8918 Inf - Inf is NaN which is not zero, and so if we don't
8919 know that the operand is finite and the comparison
8920 would treat EQ different to UNORDERED, we can't do it. */
8921 if (! flag_unsafe_math_optimizations
8922 && code != GT && code != UNGE
8923 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
8924 /* Constructs of the form (a OP b ? a : b) are safe. */
8925 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
8926 || (! rtx_equal_p (op0, true_cond)
8927 && ! rtx_equal_p (op1, true_cond))))
8928 return 0;
8929 /* At this point we know we can use fsel. */
8930
8931 /* Reduce the comparison to a comparison against zero. */
8932 temp = gen_reg_rtx (compare_mode);
8933 emit_insn (gen_rtx_SET (VOIDmode, temp,
8934 gen_rtx_MINUS (compare_mode, op0, op1)));
8935 op0 = temp;
8936 op1 = CONST0_RTX (compare_mode);
8937
8938 /* If we don't care about NaNs we can reduce some of the comparisons
8939 down to faster ones. */
8940 if (flag_unsafe_math_optimizations)
8941 switch (code)
8942 {
8943 case GT:
8944 code = LE;
8945 temp = true_cond;
8946 true_cond = false_cond;
8947 false_cond = temp;
8948 break;
8949 case UNGE:
8950 code = GE;
8951 break;
8952 case UNEQ:
8953 code = EQ;
8954 break;
8955 default:
8956 break;
8957 }
8958
8959 /* Now, reduce everything down to a GE. */
8960 switch (code)
8961 {
8962 case GE:
8963 break;
8964
8965 case LE:
8966 temp = gen_reg_rtx (compare_mode);
8967 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8968 op0 = temp;
8969 break;
8970
8971 case ORDERED:
8972 temp = gen_reg_rtx (compare_mode);
8973 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
8974 op0 = temp;
8975 break;
8976
8977 case EQ:
8978 temp = gen_reg_rtx (compare_mode);
8979 emit_insn (gen_rtx_SET (VOIDmode, temp,
8980 gen_rtx_NEG (compare_mode,
8981 gen_rtx_ABS (compare_mode, op0))));
8982 op0 = temp;
8983 break;
8984
8985 case UNGE:
8986 temp = gen_reg_rtx (result_mode);
8987 emit_insn (gen_rtx_SET (VOIDmode, temp,
8988 gen_rtx_IF_THEN_ELSE (result_mode,
8989 gen_rtx_GE (VOIDmode,
8990 op0, op1),
8991 true_cond, false_cond)));
8992 false_cond = temp;
8993 true_cond = false_cond;
8994
8995 temp = gen_reg_rtx (compare_mode);
8996 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8997 op0 = temp;
8998 break;
8999
9000 case GT:
9001 temp = gen_reg_rtx (result_mode);
9002 emit_insn (gen_rtx_SET (VOIDmode, temp,
9003 gen_rtx_IF_THEN_ELSE (result_mode,
9004 gen_rtx_GE (VOIDmode,
9005 op0, op1),
9006 true_cond, false_cond)));
9007 true_cond = temp;
9008 false_cond = true_cond;
9009
9010 temp = gen_reg_rtx (compare_mode);
9011 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9012 op0 = temp;
9013 break;
9014
9015 default:
9016 abort ();
9017 }
9018
9019 emit_insn (gen_rtx_SET (VOIDmode, dest,
9020 gen_rtx_IF_THEN_ELSE (result_mode,
9021 gen_rtx_GE (VOIDmode,
9022 op0, op1),
9023 true_cond, false_cond)));
9024 return 1;
9025 }
9026
9027 /* Same as above, but for ints (isel). */
9028
9029 static int
9030 rs6000_emit_int_cmove (dest, op, true_cond, false_cond)
9031 rtx dest;
9032 rtx op;
9033 rtx true_cond;
9034 rtx false_cond;
9035 {
9036 rtx condition_rtx, cr;
9037
9038 /* All isel implementations thus far are 32-bits. */
9039 if (GET_MODE (rs6000_compare_op0) != SImode)
9040 return 0;
9041
9042 /* We still have to do the compare, because isel doesn't do a
9043 compare, it just looks at the CRx bits set by a previous compare
9044 instruction. */
9045 condition_rtx = rs6000_generate_compare (GET_CODE (op));
9046 cr = XEXP (condition_rtx, 0);
9047
9048 if (GET_MODE (cr) == CCmode)
9049 emit_insn (gen_isel_signed (dest, condition_rtx,
9050 true_cond, false_cond, cr));
9051 else
9052 emit_insn (gen_isel_unsigned (dest, condition_rtx,
9053 true_cond, false_cond, cr));
9054
9055 return 1;
9056 }
9057
9058 const char *
9059 output_isel (operands)
9060 rtx *operands;
9061 {
9062 enum rtx_code code;
9063
9064 code = GET_CODE (operands[1]);
9065 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
9066 {
9067 PUT_CODE (operands[1], reverse_condition (code));
9068 return "isel %0,%3,%2,%j1";
9069 }
9070 else
9071 return "isel %0,%2,%3,%j1";
9072 }
9073
9074 void
9075 rs6000_emit_minmax (dest, code, op0, op1)
9076 rtx dest;
9077 enum rtx_code code;
9078 rtx op0;
9079 rtx op1;
9080 {
9081 enum machine_mode mode = GET_MODE (op0);
9082 enum rtx_code c;
9083 rtx target;
9084
9085 if (code == SMAX || code == SMIN)
9086 c = GE;
9087 else
9088 c = GEU;
9089
9090 if (code == SMAX || code == UMAX)
9091 target = emit_conditional_move (dest, c, op0, op1, mode,
9092 op0, op1, mode, 0);
9093 else
9094 target = emit_conditional_move (dest, c, op0, op1, mode,
9095 op1, op0, mode, 0);
9096 if (target == NULL_RTX)
9097 abort ();
9098 if (target != dest)
9099 emit_move_insn (dest, target);
9100 }
9101 \f
9102 /* This page contains routines that are used to determine what the
9103 function prologue and epilogue code will do and write them out. */
9104
9105 /* Return the first fixed-point register that is required to be
9106 saved. 32 if none. */
9107
9108 int
9109 first_reg_to_save ()
9110 {
9111 int first_reg;
9112
9113 /* Find lowest numbered live register. */
9114 for (first_reg = 13; first_reg <= 31; first_reg++)
9115 if (regs_ever_live[first_reg]
9116 && (! call_used_regs[first_reg]
9117 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
9118 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
9119 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
9120 break;
9121
9122 #if TARGET_MACHO
9123 if (flag_pic
9124 && current_function_uses_pic_offset_table
9125 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
9126 return RS6000_PIC_OFFSET_TABLE_REGNUM;
9127 #endif
9128
9129 return first_reg;
9130 }
9131
9132 /* Similar, for FP regs. */
9133
9134 int
9135 first_fp_reg_to_save ()
9136 {
9137 int first_reg;
9138
9139 /* Find lowest numbered live register. */
9140 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
9141 if (regs_ever_live[first_reg])
9142 break;
9143
9144 return first_reg;
9145 }
9146
9147 /* Similar, for AltiVec regs. */
9148
9149 static int
9150 first_altivec_reg_to_save ()
9151 {
9152 int i;
9153
9154 /* Stack frame remains as is unless we are in AltiVec ABI. */
9155 if (! TARGET_ALTIVEC_ABI)
9156 return LAST_ALTIVEC_REGNO + 1;
9157
9158 /* Find lowest numbered live register. */
9159 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
9160 if (regs_ever_live[i])
9161 break;
9162
9163 return i;
9164 }
9165
9166 /* Return a 32-bit mask of the AltiVec registers we need to set in
9167 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
9168 the 32-bit word is 0. */
9169
9170 static unsigned int
9171 compute_vrsave_mask ()
9172 {
9173 unsigned int i, mask = 0;
9174
9175 /* First, find out if we use _any_ altivec registers. */
9176 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
9177 if (regs_ever_live[i])
9178 mask |= ALTIVEC_REG_BIT (i);
9179
9180 if (mask == 0)
9181 return mask;
9182
9183 /* Next, remove the argument registers from the set. These must
9184 be in the VRSAVE mask set by the caller, so we don't need to add
9185 them in again. More importantly, the mask we compute here is
9186 used to generate CLOBBERs in the set_vrsave insn, and we do not
9187 wish the argument registers to die. */
9188 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
9189 mask &= ~ALTIVEC_REG_BIT (i);
9190
9191 /* Similarly, remove the return value from the set. */
9192 {
9193 bool yes = false;
9194 diddle_return_value (is_altivec_return_reg, &yes);
9195 if (yes)
9196 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
9197 }
9198
9199 return mask;
9200 }
9201
9202 static void
9203 is_altivec_return_reg (reg, xyes)
9204 rtx reg;
9205 void *xyes;
9206 {
9207 bool *yes = (bool *) xyes;
9208 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
9209 *yes = true;
9210 }
9211
9212 \f
9213 /* Calculate the stack information for the current function. This is
9214 complicated by having two separate calling sequences, the AIX calling
9215 sequence and the V.4 calling sequence.
9216
9217 AIX (and Darwin/Mac OS X) stack frames look like:
9218 32-bit 64-bit
9219 SP----> +---------------------------------------+
9220 | back chain to caller | 0 0
9221 +---------------------------------------+
9222 | saved CR | 4 8 (8-11)
9223 +---------------------------------------+
9224 | saved LR | 8 16
9225 +---------------------------------------+
9226 | reserved for compilers | 12 24
9227 +---------------------------------------+
9228 | reserved for binders | 16 32
9229 +---------------------------------------+
9230 | saved TOC pointer | 20 40
9231 +---------------------------------------+
9232 | Parameter save area (P) | 24 48
9233 +---------------------------------------+
9234 | Alloca space (A) | 24+P etc.
9235 +---------------------------------------+
9236 | Local variable space (L) | 24+P+A
9237 +---------------------------------------+
9238 | Float/int conversion temporary (X) | 24+P+A+L
9239 +---------------------------------------+
9240 | Save area for AltiVec registers (W) | 24+P+A+L+X
9241 +---------------------------------------+
9242 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9243 +---------------------------------------+
9244 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9245 +---------------------------------------+
9246 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9247 +---------------------------------------+
9248 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9249 +---------------------------------------+
9250 old SP->| back chain to caller's caller |
9251 +---------------------------------------+
9252
9253 The required alignment for AIX configurations is two words (i.e., 8
9254 or 16 bytes).
9255
9256
9257 V.4 stack frames look like:
9258
9259 SP----> +---------------------------------------+
9260 | back chain to caller | 0
9261 +---------------------------------------+
9262 | caller's saved LR | 4
9263 +---------------------------------------+
9264 | Parameter save area (P) | 8
9265 +---------------------------------------+
9266 | Alloca space (A) | 8+P
9267 +---------------------------------------+
9268 | Varargs save area (V) | 8+P+A
9269 +---------------------------------------+
9270 | Local variable space (L) | 8+P+A+V
9271 +---------------------------------------+
9272 | Float/int conversion temporary (X) | 8+P+A+V+L
9273 +---------------------------------------+
9274 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9275 +---------------------------------------+
9276 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9277 +---------------------------------------+
9278 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9279 +---------------------------------------+
9280 | SPE: area for 64-bit GP registers |
9281 +---------------------------------------+
9282 | SPE alignment padding |
9283 +---------------------------------------+
9284 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9285 +---------------------------------------+
9286 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9287 +---------------------------------------+
9288 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9289 +---------------------------------------+
9290 old SP->| back chain to caller's caller |
9291 +---------------------------------------+
9292
9293 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9294 given. (But note below and in sysv4.h that we require only 8 and
9295 may round up the size of our stack frame anyways. The historical
9296 reason is early versions of powerpc-linux which didn't properly
9297 align the stack at program startup. A happy side-effect is that
9298 -mno-eabi libraries can be used with -meabi programs.)
9299
9300 The EABI configuration defaults to the V.4 layout. However,
9301 the stack alignment requirements may differ. If -mno-eabi is not
9302 given, the required stack alignment is 8 bytes; if -mno-eabi is
9303 given, the required alignment is 16 bytes. (But see V.4 comment
9304 above.) */
9305
9306 #ifndef ABI_STACK_BOUNDARY
9307 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9308 #endif
9309
9310 rs6000_stack_t *
9311 rs6000_stack_info ()
9312 {
9313 static rs6000_stack_t info, zero_info;
9314 rs6000_stack_t *info_ptr = &info;
9315 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9316 int ehrd_size;
9317 int total_raw_size;
9318
9319 /* Zero all fields portably. */
9320 info = zero_info;
9321
9322 if (TARGET_SPE)
9323 {
9324 /* Cache value so we don't rescan instruction chain over and over. */
9325 if (cfun->machine->insn_chain_scanned_p == 0)
9326 {
9327 cfun->machine->insn_chain_scanned_p = 1;
9328 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
9329 }
9330 }
9331
9332 /* Select which calling sequence. */
9333 info_ptr->abi = DEFAULT_ABI;
9334
9335 /* Calculate which registers need to be saved & save area size. */
9336 info_ptr->first_gp_reg_save = first_reg_to_save ();
9337 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9338 even if it currently looks like we won't. */
9339 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
9340 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
9341 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
9342 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
9343 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
9344 else
9345 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
9346
9347 /* For the SPE, we have an additional upper 32-bits on each GPR.
9348 Ideally we should save the entire 64-bits only when the upper
9349 half is used in SIMD instructions. Since we only record
9350 registers live (not the size they are used in), this proves
9351 difficult because we'd have to traverse the instruction chain at
9352 the right time, taking reload into account. This is a real pain,
9353 so we opt to save the GPRs in 64-bits always if but one register
9354 gets used in 64-bits. Otherwise, all the registers in the frame
9355 get saved in 32-bits.
9356
9357 So... since when we save all GPRs (except the SP) in 64-bits, the
9358 traditional GP save area will be empty. */
9359 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9360 info_ptr->gp_size = 0;
9361
9362 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
9363 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
9364
9365 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
9366 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
9367 - info_ptr->first_altivec_reg_save);
9368
9369 /* Does this function call anything? */
9370 info_ptr->calls_p = (! current_function_is_leaf
9371 || cfun->machine->ra_needs_full_frame);
9372
9373 /* Determine if we need to save the link register. */
9374 if (rs6000_ra_ever_killed ()
9375 || (DEFAULT_ABI == ABI_AIX
9376 && current_function_profile
9377 && !TARGET_PROFILE_KERNEL)
9378 #ifdef TARGET_RELOCATABLE
9379 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
9380 #endif
9381 || (info_ptr->first_fp_reg_save != 64
9382 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
9383 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
9384 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
9385 || (DEFAULT_ABI == ABI_DARWIN
9386 && flag_pic
9387 && current_function_uses_pic_offset_table)
9388 || info_ptr->calls_p)
9389 {
9390 info_ptr->lr_save_p = 1;
9391 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
9392 }
9393
9394 /* Determine if we need to save the condition code registers. */
9395 if (regs_ever_live[CR2_REGNO]
9396 || regs_ever_live[CR3_REGNO]
9397 || regs_ever_live[CR4_REGNO])
9398 {
9399 info_ptr->cr_save_p = 1;
9400 if (DEFAULT_ABI == ABI_V4)
9401 info_ptr->cr_size = reg_size;
9402 }
9403
9404 /* If the current function calls __builtin_eh_return, then we need
9405 to allocate stack space for registers that will hold data for
9406 the exception handler. */
9407 if (current_function_calls_eh_return)
9408 {
9409 unsigned int i;
9410 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
9411 continue;
9412
9413 /* SPE saves EH registers in 64-bits. */
9414 ehrd_size = i * (TARGET_SPE_ABI
9415 && info_ptr->spe_64bit_regs_used != 0
9416 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
9417 }
9418 else
9419 ehrd_size = 0;
9420
9421 /* Determine various sizes. */
9422 info_ptr->reg_size = reg_size;
9423 info_ptr->fixed_size = RS6000_SAVE_AREA;
9424 info_ptr->varargs_size = RS6000_VARARGS_AREA;
9425 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
9426 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
9427 8);
9428
9429 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9430 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
9431 else
9432 info_ptr->spe_gp_size = 0;
9433
9434 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
9435 {
9436 info_ptr->vrsave_mask = compute_vrsave_mask ();
9437 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
9438 }
9439 else
9440 {
9441 info_ptr->vrsave_mask = 0;
9442 info_ptr->vrsave_size = 0;
9443 }
9444
9445 /* Calculate the offsets. */
9446 switch (DEFAULT_ABI)
9447 {
9448 case ABI_NONE:
9449 default:
9450 abort ();
9451
9452 case ABI_AIX:
9453 case ABI_DARWIN:
9454 info_ptr->fp_save_offset = - info_ptr->fp_size;
9455 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9456
9457 if (TARGET_ALTIVEC_ABI)
9458 {
9459 info_ptr->vrsave_save_offset
9460 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
9461
9462 /* Align stack so vector save area is on a quadword boundary. */
9463 if (info_ptr->altivec_size != 0)
9464 info_ptr->altivec_padding_size
9465 = 16 - (-info_ptr->vrsave_save_offset % 16);
9466 else
9467 info_ptr->altivec_padding_size = 0;
9468
9469 info_ptr->altivec_save_offset
9470 = info_ptr->vrsave_save_offset
9471 - info_ptr->altivec_padding_size
9472 - info_ptr->altivec_size;
9473
9474 /* Adjust for AltiVec case. */
9475 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
9476 }
9477 else
9478 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
9479 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
9480 info_ptr->lr_save_offset = 2*reg_size;
9481 break;
9482
9483 case ABI_V4:
9484 info_ptr->fp_save_offset = - info_ptr->fp_size;
9485 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9486 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
9487
9488 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9489 {
9490 /* Align stack so SPE GPR save area is aligned on a
9491 double-word boundary. */
9492 if (info_ptr->spe_gp_size != 0)
9493 info_ptr->spe_padding_size
9494 = 8 - (-info_ptr->cr_save_offset % 8);
9495 else
9496 info_ptr->spe_padding_size = 0;
9497
9498 info_ptr->spe_gp_save_offset
9499 = info_ptr->cr_save_offset
9500 - info_ptr->spe_padding_size
9501 - info_ptr->spe_gp_size;
9502
9503 /* Adjust for SPE case. */
9504 info_ptr->toc_save_offset
9505 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
9506 }
9507 else if (TARGET_ALTIVEC_ABI)
9508 {
9509 info_ptr->vrsave_save_offset
9510 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
9511
9512 /* Align stack so vector save area is on a quadword boundary. */
9513 if (info_ptr->altivec_size != 0)
9514 info_ptr->altivec_padding_size
9515 = 16 - (-info_ptr->vrsave_save_offset % 16);
9516 else
9517 info_ptr->altivec_padding_size = 0;
9518
9519 info_ptr->altivec_save_offset
9520 = info_ptr->vrsave_save_offset
9521 - info_ptr->altivec_padding_size
9522 - info_ptr->altivec_size;
9523
9524 /* Adjust for AltiVec case. */
9525 info_ptr->toc_save_offset
9526 = info_ptr->altivec_save_offset - info_ptr->toc_size;
9527 }
9528 else
9529 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
9530 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
9531 info_ptr->lr_save_offset = reg_size;
9532 break;
9533 }
9534
9535 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
9536 + info_ptr->gp_size
9537 + info_ptr->altivec_size
9538 + info_ptr->altivec_padding_size
9539 + info_ptr->vrsave_size
9540 + info_ptr->spe_gp_size
9541 + info_ptr->spe_padding_size
9542 + ehrd_size
9543 + info_ptr->cr_size
9544 + info_ptr->lr_size
9545 + info_ptr->vrsave_size
9546 + info_ptr->toc_size,
9547 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
9548 ? 16 : 8);
9549
9550 total_raw_size = (info_ptr->vars_size
9551 + info_ptr->parm_size
9552 + info_ptr->save_size
9553 + info_ptr->varargs_size
9554 + info_ptr->fixed_size);
9555
9556 info_ptr->total_size =
9557 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
9558
9559 /* Determine if we need to allocate any stack frame:
9560
9561 For AIX we need to push the stack if a frame pointer is needed
9562 (because the stack might be dynamically adjusted), if we are
9563 debugging, if we make calls, or if the sum of fp_save, gp_save,
9564 and local variables are more than the space needed to save all
9565 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9566 + 18*8 = 288 (GPR13 reserved).
9567
9568 For V.4 we don't have the stack cushion that AIX uses, but assume
9569 that the debugger can handle stackless frames. */
9570
9571 if (info_ptr->calls_p)
9572 info_ptr->push_p = 1;
9573
9574 else if (DEFAULT_ABI == ABI_V4)
9575 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
9576
9577 else if (frame_pointer_needed)
9578 info_ptr->push_p = 1;
9579
9580 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
9581 info_ptr->push_p = 1;
9582
9583 else
9584 info_ptr->push_p
9585 = total_raw_size - info_ptr->fixed_size > (TARGET_32BIT ? 220 : 288);
9586
9587 /* Zero offsets if we're not saving those registers. */
9588 if (info_ptr->fp_size == 0)
9589 info_ptr->fp_save_offset = 0;
9590
9591 if (info_ptr->gp_size == 0)
9592 info_ptr->gp_save_offset = 0;
9593
9594 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
9595 info_ptr->altivec_save_offset = 0;
9596
9597 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
9598 info_ptr->vrsave_save_offset = 0;
9599
9600 if (! TARGET_SPE_ABI
9601 || info_ptr->spe_64bit_regs_used == 0
9602 || info_ptr->spe_gp_size == 0)
9603 info_ptr->spe_gp_save_offset = 0;
9604
9605 if (! info_ptr->lr_save_p)
9606 info_ptr->lr_save_offset = 0;
9607
9608 if (! info_ptr->cr_save_p)
9609 info_ptr->cr_save_offset = 0;
9610
9611 if (! info_ptr->toc_save_p)
9612 info_ptr->toc_save_offset = 0;
9613
9614 return info_ptr;
9615 }
9616
9617 /* Return true if the current function uses any GPRs in 64-bit SIMD
9618 mode. */
9619
9620 static bool
9621 spe_func_has_64bit_regs_p ()
9622 {
9623 rtx insns, insn;
9624
9625 /* Functions that save and restore all the call-saved registers will
9626 need to save/restore the registers in 64-bits. */
9627 if (current_function_calls_eh_return
9628 || current_function_calls_setjmp
9629 || current_function_has_nonlocal_goto)
9630 return true;
9631
9632 insns = get_insns ();
9633
9634 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
9635 {
9636 if (INSN_P (insn))
9637 {
9638 rtx i;
9639
9640 i = PATTERN (insn);
9641 if (GET_CODE (i) == SET
9642 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
9643 return true;
9644 }
9645 }
9646
9647 return false;
9648 }
9649
9650 void
9651 debug_stack_info (info)
9652 rs6000_stack_t *info;
9653 {
9654 const char *abi_string;
9655
9656 if (! info)
9657 info = rs6000_stack_info ();
9658
9659 fprintf (stderr, "\nStack information for function %s:\n",
9660 ((current_function_decl && DECL_NAME (current_function_decl))
9661 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
9662 : "<unknown>"));
9663
9664 switch (info->abi)
9665 {
9666 default: abi_string = "Unknown"; break;
9667 case ABI_NONE: abi_string = "NONE"; break;
9668 case ABI_AIX: abi_string = "AIX"; break;
9669 case ABI_DARWIN: abi_string = "Darwin"; break;
9670 case ABI_V4: abi_string = "V.4"; break;
9671 }
9672
9673 fprintf (stderr, "\tABI = %5s\n", abi_string);
9674
9675 if (TARGET_ALTIVEC_ABI)
9676 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
9677
9678 if (TARGET_SPE_ABI)
9679 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
9680
9681 if (info->first_gp_reg_save != 32)
9682 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
9683
9684 if (info->first_fp_reg_save != 64)
9685 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9686
9687 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
9688 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
9689 info->first_altivec_reg_save);
9690
9691 if (info->lr_save_p)
9692 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9693
9694 if (info->cr_save_p)
9695 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
9696
9697 if (info->toc_save_p)
9698 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
9699
9700 if (info->vrsave_mask)
9701 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
9702
9703 if (info->push_p)
9704 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
9705
9706 if (info->calls_p)
9707 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
9708
9709 if (info->gp_save_offset)
9710 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
9711
9712 if (info->fp_save_offset)
9713 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
9714
9715 if (info->altivec_save_offset)
9716 fprintf (stderr, "\taltivec_save_offset = %5d\n",
9717 info->altivec_save_offset);
9718
9719 if (info->spe_gp_save_offset)
9720 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
9721 info->spe_gp_save_offset);
9722
9723 if (info->vrsave_save_offset)
9724 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
9725 info->vrsave_save_offset);
9726
9727 if (info->lr_save_offset)
9728 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
9729
9730 if (info->cr_save_offset)
9731 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
9732
9733 if (info->toc_save_offset)
9734 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
9735
9736 if (info->varargs_save_offset)
9737 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
9738
9739 if (info->total_size)
9740 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
9741
9742 if (info->varargs_size)
9743 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
9744
9745 if (info->vars_size)
9746 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
9747
9748 if (info->parm_size)
9749 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
9750
9751 if (info->fixed_size)
9752 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
9753
9754 if (info->gp_size)
9755 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
9756
9757 if (info->spe_gp_size)
9758 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
9759
9760 if (info->fp_size)
9761 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
9762
9763 if (info->altivec_size)
9764 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
9765
9766 if (info->vrsave_size)
9767 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
9768
9769 if (info->altivec_padding_size)
9770 fprintf (stderr, "\taltivec_padding_size= %5d\n",
9771 info->altivec_padding_size);
9772
9773 if (info->spe_padding_size)
9774 fprintf (stderr, "\tspe_padding_size = %5d\n",
9775 info->spe_padding_size);
9776
9777 if (info->lr_size)
9778 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
9779
9780 if (info->cr_size)
9781 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
9782
9783 if (info->toc_size)
9784 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
9785
9786 if (info->save_size)
9787 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
9788
9789 if (info->reg_size != 4)
9790 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
9791
9792 fprintf (stderr, "\n");
9793 }
9794
9795 rtx
9796 rs6000_return_addr (count, frame)
9797 int count;
9798 rtx frame;
9799 {
9800 /* Currently we don't optimize very well between prolog and body
9801 code and for PIC code the code can be actually quite bad, so
9802 don't try to be too clever here. */
9803 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
9804 {
9805 cfun->machine->ra_needs_full_frame = 1;
9806
9807 return
9808 gen_rtx_MEM
9809 (Pmode,
9810 memory_address
9811 (Pmode,
9812 plus_constant (copy_to_reg
9813 (gen_rtx_MEM (Pmode,
9814 memory_address (Pmode, frame))),
9815 RETURN_ADDRESS_OFFSET)));
9816 }
9817
9818 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
9819 }
9820
9821 /* Say whether a function is a candidate for sibcall handling or not.
9822 We do not allow indirect calls to be optimized into sibling calls.
9823 Also, we can't do it if there are any vector parameters; there's
9824 nowhere to put the VRsave code so it works; note that functions with
9825 vector parameters are required to have a prototype, so the argument
9826 type info must be available here. (The tail recursion case can work
9827 with vector parameters, but there's no way to distinguish here.) */
9828 static bool
9829 rs6000_function_ok_for_sibcall (decl, exp)
9830 tree decl;
9831 tree exp ATTRIBUTE_UNUSED;
9832 {
9833 tree type;
9834 if (decl)
9835 {
9836 if (TARGET_ALTIVEC_VRSAVE)
9837 {
9838 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
9839 type; type = TREE_CHAIN (type))
9840 {
9841 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
9842 return false;
9843 }
9844 }
9845 if (DEFAULT_ABI == ABI_DARWIN
9846 || (*targetm.binds_local_p) (decl))
9847 {
9848 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
9849
9850 if (!lookup_attribute ("longcall", attr_list)
9851 || lookup_attribute ("shortcall", attr_list))
9852 return true;
9853 }
9854 }
9855 return false;
9856 }
9857
9858 static int
9859 rs6000_ra_ever_killed ()
9860 {
9861 rtx top;
9862 rtx reg;
9863 rtx insn;
9864
9865 /* Irritatingly, there are two kinds of thunks -- those created with
9866 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
9867 through the regular part of the compiler. This is a very hacky
9868 way to tell them apart. */
9869 if (current_function_is_thunk && !no_new_pseudos)
9870 return 0;
9871
9872 /* regs_ever_live has LR marked as used if any sibcalls are present,
9873 but this should not force saving and restoring in the
9874 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
9875 clobbers LR, so that is inappropriate. */
9876
9877 /* Also, the prologue can generate a store into LR that
9878 doesn't really count, like this:
9879
9880 move LR->R0
9881 bcl to set PIC register
9882 move LR->R31
9883 move R0->LR
9884
9885 When we're called from the epilogue, we need to avoid counting
9886 this as a store. */
9887
9888 push_topmost_sequence ();
9889 top = get_insns ();
9890 pop_topmost_sequence ();
9891 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
9892
9893 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
9894 {
9895 if (INSN_P (insn))
9896 {
9897 if (FIND_REG_INC_NOTE (insn, reg))
9898 return 1;
9899 else if (GET_CODE (insn) == CALL_INSN
9900 && !SIBLING_CALL_P (insn))
9901 return 1;
9902 else if (set_of (reg, insn) != NULL_RTX
9903 && !prologue_epilogue_contains (insn))
9904 return 1;
9905 }
9906 }
9907 return 0;
9908 }
9909 \f
9910 /* Add a REG_MAYBE_DEAD note to the insn. */
9911 static void
9912 rs6000_maybe_dead (insn)
9913 rtx insn;
9914 {
9915 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
9916 const0_rtx,
9917 REG_NOTES (insn));
9918 }
9919
9920 /* Emit instructions needed to load the TOC register.
9921 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9922 a constant pool; or for SVR4 -fpic. */
9923
9924 void
9925 rs6000_emit_load_toc_table (fromprolog)
9926 int fromprolog;
9927 {
9928 rtx dest, insn;
9929 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
9930
9931 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9932 {
9933 rtx temp = (fromprolog
9934 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9935 : gen_reg_rtx (Pmode));
9936 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
9937 if (fromprolog)
9938 rs6000_maybe_dead (insn);
9939 insn = emit_move_insn (dest, temp);
9940 if (fromprolog)
9941 rs6000_maybe_dead (insn);
9942 }
9943 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
9944 {
9945 char buf[30];
9946 rtx tempLR = (fromprolog
9947 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9948 : gen_reg_rtx (Pmode));
9949 rtx temp0 = (fromprolog
9950 ? gen_rtx_REG (Pmode, 0)
9951 : gen_reg_rtx (Pmode));
9952 rtx symF;
9953
9954 /* possibly create the toc section */
9955 if (! toc_initialized)
9956 {
9957 toc_section ();
9958 function_section (current_function_decl);
9959 }
9960
9961 if (fromprolog)
9962 {
9963 rtx symL;
9964
9965 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
9966 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9967
9968 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
9969 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9970
9971 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
9972 symF)));
9973 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
9974 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
9975 symL,
9976 symF)));
9977 }
9978 else
9979 {
9980 rtx tocsym;
9981 static int reload_toc_labelno = 0;
9982
9983 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
9984
9985 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
9986 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9987
9988 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
9989 emit_move_insn (dest, tempLR);
9990 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
9991 }
9992 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
9993 if (fromprolog)
9994 rs6000_maybe_dead (insn);
9995 }
9996 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
9997 {
9998 /* This is for AIX code running in non-PIC ELF32. */
9999 char buf[30];
10000 rtx realsym;
10001 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
10002 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10003
10004 insn = emit_insn (gen_elf_high (dest, realsym));
10005 if (fromprolog)
10006 rs6000_maybe_dead (insn);
10007 insn = emit_insn (gen_elf_low (dest, dest, realsym));
10008 if (fromprolog)
10009 rs6000_maybe_dead (insn);
10010 }
10011 else if (DEFAULT_ABI == ABI_AIX)
10012 {
10013 if (TARGET_32BIT)
10014 insn = emit_insn (gen_load_toc_aix_si (dest));
10015 else
10016 insn = emit_insn (gen_load_toc_aix_di (dest));
10017 if (fromprolog)
10018 rs6000_maybe_dead (insn);
10019 }
10020 else
10021 abort ();
10022 }
10023
10024 int
10025 get_TOC_alias_set ()
10026 {
10027 static int set = -1;
10028 if (set == -1)
10029 set = new_alias_set ();
10030 return set;
10031 }
10032
10033 /* This retuns nonzero if the current function uses the TOC. This is
10034 determined by the presence of (unspec ... UNSPEC_TOC), which is
10035 generated by the various load_toc_* patterns. */
10036
10037 int
10038 uses_TOC ()
10039 {
10040 rtx insn;
10041
10042 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10043 if (INSN_P (insn))
10044 {
10045 rtx pat = PATTERN (insn);
10046 int i;
10047
10048 if (GET_CODE (pat) == PARALLEL)
10049 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
10050 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
10051 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == UNSPEC_TOC)
10052 return 1;
10053 }
10054 return 0;
10055 }
10056
10057 rtx
10058 create_TOC_reference (symbol)
10059 rtx symbol;
10060 {
10061 return gen_rtx_PLUS (Pmode,
10062 gen_rtx_REG (Pmode, TOC_REGISTER),
10063 gen_rtx_CONST (Pmode,
10064 gen_rtx_MINUS (Pmode, symbol,
10065 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
10066 }
10067
10068 #if TARGET_AIX
10069 /* __throw will restore its own return address to be the same as the
10070 return address of the function that the throw is being made to.
10071 This is unfortunate, because we want to check the original
10072 return address to see if we need to restore the TOC.
10073 So we have to squirrel it away here.
10074 This is used only in compiling __throw and __rethrow.
10075
10076 Most of this code should be removed by CSE. */
10077 static rtx insn_after_throw;
10078
10079 /* This does the saving... */
10080 void
10081 rs6000_aix_emit_builtin_unwind_init ()
10082 {
10083 rtx mem;
10084 rtx stack_top = gen_reg_rtx (Pmode);
10085 rtx opcode_addr = gen_reg_rtx (Pmode);
10086
10087 insn_after_throw = gen_reg_rtx (SImode);
10088
10089 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10090 emit_move_insn (stack_top, mem);
10091
10092 mem = gen_rtx_MEM (Pmode,
10093 gen_rtx_PLUS (Pmode, stack_top,
10094 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10095 emit_move_insn (opcode_addr, mem);
10096 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
10097 }
10098
10099 /* Emit insns to _restore_ the TOC register, at runtime (specifically
10100 in _eh.o). Only used on AIX.
10101
10102 The idea is that on AIX, function calls look like this:
10103 bl somefunction-trampoline
10104 lwz r2,20(sp)
10105
10106 and later,
10107 somefunction-trampoline:
10108 stw r2,20(sp)
10109 ... load function address in the count register ...
10110 bctr
10111 or like this, if the linker determines that this is not a cross-module call
10112 and so the TOC need not be restored:
10113 bl somefunction
10114 nop
10115 or like this, if the compiler could determine that this is not a
10116 cross-module call:
10117 bl somefunction
10118 now, the tricky bit here is that register 2 is saved and restored
10119 by the _linker_, so we can't readily generate debugging information
10120 for it. So we need to go back up the call chain looking at the
10121 insns at return addresses to see which calls saved the TOC register
10122 and so see where it gets restored from.
10123
10124 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
10125 just before the actual epilogue.
10126
10127 On the bright side, this incurs no space or time overhead unless an
10128 exception is thrown, except for the extra code in libgcc.a.
10129
10130 The parameter STACKSIZE is a register containing (at runtime)
10131 the amount to be popped off the stack in addition to the stack frame
10132 of this routine (which will be __throw or __rethrow, and so is
10133 guaranteed to have a stack frame). */
10134
10135 void
10136 rs6000_emit_eh_toc_restore (stacksize)
10137 rtx stacksize;
10138 {
10139 rtx top_of_stack;
10140 rtx bottom_of_stack = gen_reg_rtx (Pmode);
10141 rtx tocompare = gen_reg_rtx (SImode);
10142 rtx opcode = gen_reg_rtx (SImode);
10143 rtx opcode_addr = gen_reg_rtx (Pmode);
10144 rtx mem;
10145 rtx loop_start = gen_label_rtx ();
10146 rtx no_toc_restore_needed = gen_label_rtx ();
10147 rtx loop_exit = gen_label_rtx ();
10148
10149 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10150 set_mem_alias_set (mem, rs6000_sr_alias_set);
10151 emit_move_insn (bottom_of_stack, mem);
10152
10153 top_of_stack = expand_binop (Pmode, add_optab,
10154 bottom_of_stack, stacksize,
10155 NULL_RTX, 1, OPTAB_WIDEN);
10156
10157 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
10158 : 0xE8410028, SImode));
10159
10160 if (insn_after_throw == NULL_RTX)
10161 abort ();
10162 emit_move_insn (opcode, insn_after_throw);
10163
10164 emit_note (NULL, NOTE_INSN_LOOP_BEG);
10165 emit_label (loop_start);
10166
10167 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
10168 SImode, NULL_RTX, NULL_RTX,
10169 no_toc_restore_needed);
10170
10171 mem = gen_rtx_MEM (Pmode,
10172 gen_rtx_PLUS (Pmode, bottom_of_stack,
10173 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
10174 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
10175
10176 emit_label (no_toc_restore_needed);
10177 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
10178 Pmode, NULL_RTX, NULL_RTX,
10179 loop_exit);
10180
10181 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
10182 set_mem_alias_set (mem, rs6000_sr_alias_set);
10183 emit_move_insn (bottom_of_stack, mem);
10184
10185 mem = gen_rtx_MEM (Pmode,
10186 gen_rtx_PLUS (Pmode, bottom_of_stack,
10187 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10188 emit_move_insn (opcode_addr, mem);
10189 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
10190
10191 emit_note (NULL, NOTE_INSN_LOOP_CONT);
10192 emit_jump (loop_start);
10193 emit_note (NULL, NOTE_INSN_LOOP_END);
10194 emit_label (loop_exit);
10195 }
10196 #endif /* TARGET_AIX */
10197 \f
10198 /* This ties together stack memory (MEM with an alias set of
10199 rs6000_sr_alias_set) and the change to the stack pointer. */
10200
10201 static void
10202 rs6000_emit_stack_tie ()
10203 {
10204 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
10205
10206 set_mem_alias_set (mem, rs6000_sr_alias_set);
10207 emit_insn (gen_stack_tie (mem));
10208 }
10209
10210 /* Emit the correct code for allocating stack space, as insns.
10211 If COPY_R12, make sure a copy of the old frame is left in r12.
10212 The generated code may use hard register 0 as a temporary. */
10213
10214 static void
10215 rs6000_emit_allocate_stack (size, copy_r12)
10216 HOST_WIDE_INT size;
10217 int copy_r12;
10218 {
10219 rtx insn;
10220 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10221 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
10222 rtx todec = GEN_INT (-size);
10223
10224 if (current_function_limit_stack)
10225 {
10226 if (REG_P (stack_limit_rtx)
10227 && REGNO (stack_limit_rtx) > 1
10228 && REGNO (stack_limit_rtx) <= 31)
10229 {
10230 emit_insn (TARGET_32BIT
10231 ? gen_addsi3 (tmp_reg,
10232 stack_limit_rtx,
10233 GEN_INT (size))
10234 : gen_adddi3 (tmp_reg,
10235 stack_limit_rtx,
10236 GEN_INT (size)));
10237
10238 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10239 const0_rtx));
10240 }
10241 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
10242 && TARGET_32BIT
10243 && DEFAULT_ABI == ABI_V4)
10244 {
10245 rtx toload = gen_rtx_CONST (VOIDmode,
10246 gen_rtx_PLUS (Pmode,
10247 stack_limit_rtx,
10248 GEN_INT (size)));
10249
10250 emit_insn (gen_elf_high (tmp_reg, toload));
10251 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
10252 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10253 const0_rtx));
10254 }
10255 else
10256 warning ("stack limit expression is not supported");
10257 }
10258
10259 if (copy_r12 || ! TARGET_UPDATE)
10260 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
10261
10262 if (TARGET_UPDATE)
10263 {
10264 if (size > 32767)
10265 {
10266 /* Need a note here so that try_split doesn't get confused. */
10267 if (get_last_insn() == NULL_RTX)
10268 emit_note (0, NOTE_INSN_DELETED);
10269 insn = emit_move_insn (tmp_reg, todec);
10270 try_split (PATTERN (insn), insn, 0);
10271 todec = tmp_reg;
10272 }
10273
10274 insn = emit_insn (TARGET_32BIT
10275 ? gen_movsi_update (stack_reg, stack_reg,
10276 todec, stack_reg)
10277 : gen_movdi_update (stack_reg, stack_reg,
10278 todec, stack_reg));
10279 }
10280 else
10281 {
10282 insn = emit_insn (TARGET_32BIT
10283 ? gen_addsi3 (stack_reg, stack_reg, todec)
10284 : gen_adddi3 (stack_reg, stack_reg, todec));
10285 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
10286 gen_rtx_REG (Pmode, 12));
10287 }
10288
10289 RTX_FRAME_RELATED_P (insn) = 1;
10290 REG_NOTES (insn) =
10291 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10292 gen_rtx_SET (VOIDmode, stack_reg,
10293 gen_rtx_PLUS (Pmode, stack_reg,
10294 GEN_INT (-size))),
10295 REG_NOTES (insn));
10296 }
10297
10298 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10299 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10300 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10301 deduce these equivalences by itself so it wasn't necessary to hold
10302 its hand so much. */
10303
10304 static void
10305 rs6000_frame_related (insn, reg, val, reg2, rreg)
10306 rtx insn;
10307 rtx reg;
10308 HOST_WIDE_INT val;
10309 rtx reg2;
10310 rtx rreg;
10311 {
10312 rtx real, temp;
10313
10314 /* copy_rtx will not make unique copies of registers, so we need to
10315 ensure we don't have unwanted sharing here. */
10316 if (reg == reg2)
10317 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10318
10319 if (reg == rreg)
10320 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10321
10322 real = copy_rtx (PATTERN (insn));
10323
10324 if (reg2 != NULL_RTX)
10325 real = replace_rtx (real, reg2, rreg);
10326
10327 real = replace_rtx (real, reg,
10328 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
10329 STACK_POINTER_REGNUM),
10330 GEN_INT (val)));
10331
10332 /* We expect that 'real' is either a SET or a PARALLEL containing
10333 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10334 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10335
10336 if (GET_CODE (real) == SET)
10337 {
10338 rtx set = real;
10339
10340 temp = simplify_rtx (SET_SRC (set));
10341 if (temp)
10342 SET_SRC (set) = temp;
10343 temp = simplify_rtx (SET_DEST (set));
10344 if (temp)
10345 SET_DEST (set) = temp;
10346 if (GET_CODE (SET_DEST (set)) == MEM)
10347 {
10348 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10349 if (temp)
10350 XEXP (SET_DEST (set), 0) = temp;
10351 }
10352 }
10353 else if (GET_CODE (real) == PARALLEL)
10354 {
10355 int i;
10356 for (i = 0; i < XVECLEN (real, 0); i++)
10357 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
10358 {
10359 rtx set = XVECEXP (real, 0, i);
10360
10361 temp = simplify_rtx (SET_SRC (set));
10362 if (temp)
10363 SET_SRC (set) = temp;
10364 temp = simplify_rtx (SET_DEST (set));
10365 if (temp)
10366 SET_DEST (set) = temp;
10367 if (GET_CODE (SET_DEST (set)) == MEM)
10368 {
10369 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10370 if (temp)
10371 XEXP (SET_DEST (set), 0) = temp;
10372 }
10373 RTX_FRAME_RELATED_P (set) = 1;
10374 }
10375 }
10376 else
10377 abort ();
10378
10379 if (TARGET_SPE)
10380 real = spe_synthesize_frame_save (real);
10381
10382 RTX_FRAME_RELATED_P (insn) = 1;
10383 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10384 real,
10385 REG_NOTES (insn));
10386 }
10387
10388 /* Given an SPE frame note, return a PARALLEL of SETs with the
10389 original note, plus a synthetic register save. */
10390
10391 static rtx
10392 spe_synthesize_frame_save (real)
10393 rtx real;
10394 {
10395 rtx synth, offset, reg, real2;
10396
10397 if (GET_CODE (real) != SET
10398 || GET_MODE (SET_SRC (real)) != V2SImode)
10399 return real;
10400
10401 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
10402 frame related note. The parallel contains a set of the register
10403 being saved, and another set to a synthetic register (n+1200).
10404 This is so we can differentiate between 64-bit and 32-bit saves.
10405 Words cannot describe this nastiness. */
10406
10407 if (GET_CODE (SET_DEST (real)) != MEM
10408 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
10409 || GET_CODE (SET_SRC (real)) != REG)
10410 abort ();
10411
10412 /* Transform:
10413 (set (mem (plus (reg x) (const y)))
10414 (reg z))
10415 into:
10416 (set (mem (plus (reg x) (const y+4)))
10417 (reg z+1200))
10418 */
10419
10420 real2 = copy_rtx (real);
10421 PUT_MODE (SET_DEST (real2), SImode);
10422 reg = SET_SRC (real2);
10423 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
10424 synth = copy_rtx (real2);
10425
10426 if (BYTES_BIG_ENDIAN)
10427 {
10428 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
10429 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
10430 }
10431
10432 reg = SET_SRC (synth);
10433
10434 synth = replace_rtx (synth, reg,
10435 gen_rtx_REG (SImode, REGNO (reg) + 1200));
10436
10437 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
10438 synth = replace_rtx (synth, offset,
10439 GEN_INT (INTVAL (offset)
10440 + (BYTES_BIG_ENDIAN ? 0 : 4)));
10441
10442 RTX_FRAME_RELATED_P (synth) = 1;
10443 RTX_FRAME_RELATED_P (real2) = 1;
10444 if (BYTES_BIG_ENDIAN)
10445 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
10446 else
10447 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
10448
10449 return real;
10450 }
10451
10452 /* Returns an insn that has a vrsave set operation with the
10453 appropriate CLOBBERs. */
10454
10455 static rtx
10456 generate_set_vrsave (reg, info, epiloguep)
10457 rtx reg;
10458 rs6000_stack_t *info;
10459 int epiloguep;
10460 {
10461 int nclobs, i;
10462 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
10463 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10464
10465 clobs[0]
10466 = gen_rtx_SET (VOIDmode,
10467 vrsave,
10468 gen_rtx_UNSPEC_VOLATILE (SImode,
10469 gen_rtvec (2, reg, vrsave),
10470 30));
10471
10472 nclobs = 1;
10473
10474 /* We need to clobber the registers in the mask so the scheduler
10475 does not move sets to VRSAVE before sets of AltiVec registers.
10476
10477 However, if the function receives nonlocal gotos, reload will set
10478 all call saved registers live. We will end up with:
10479
10480 (set (reg 999) (mem))
10481 (parallel [ (set (reg vrsave) (unspec blah))
10482 (clobber (reg 999))])
10483
10484 The clobber will cause the store into reg 999 to be dead, and
10485 flow will attempt to delete an epilogue insn. In this case, we
10486 need an unspec use/set of the register. */
10487
10488 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10489 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
10490 {
10491 if (!epiloguep || call_used_regs [i])
10492 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
10493 gen_rtx_REG (V4SImode, i));
10494 else
10495 {
10496 rtx reg = gen_rtx_REG (V4SImode, i);
10497
10498 clobs[nclobs++]
10499 = gen_rtx_SET (VOIDmode,
10500 reg,
10501 gen_rtx_UNSPEC (V4SImode,
10502 gen_rtvec (1, reg), 27));
10503 }
10504 }
10505
10506 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
10507
10508 for (i = 0; i < nclobs; ++i)
10509 XVECEXP (insn, 0, i) = clobs[i];
10510
10511 return insn;
10512 }
10513
10514 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10515 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
10516
10517 static void
10518 emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
10519 rtx frame_reg;
10520 rtx frame_ptr;
10521 enum machine_mode mode;
10522 unsigned int regno;
10523 int offset;
10524 int total_size;
10525 {
10526 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
10527 rtx replacea, replaceb;
10528
10529 int_rtx = GEN_INT (offset);
10530
10531 /* Some cases that need register indexed addressing. */
10532 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
10533 || (TARGET_SPE_ABI
10534 && SPE_VECTOR_MODE (mode)
10535 && !SPE_CONST_OFFSET_OK (offset)))
10536 {
10537 /* Whomever calls us must make sure r11 is available in the
10538 flow path of instructions in the prologue. */
10539 offset_rtx = gen_rtx_REG (Pmode, 11);
10540 emit_move_insn (offset_rtx, int_rtx);
10541
10542 replacea = offset_rtx;
10543 replaceb = int_rtx;
10544 }
10545 else
10546 {
10547 offset_rtx = int_rtx;
10548 replacea = NULL_RTX;
10549 replaceb = NULL_RTX;
10550 }
10551
10552 reg = gen_rtx_REG (mode, regno);
10553 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
10554 mem = gen_rtx_MEM (mode, addr);
10555 set_mem_alias_set (mem, rs6000_sr_alias_set);
10556
10557 insn = emit_move_insn (mem, reg);
10558
10559 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
10560 }
10561
10562 /* Emit an offset memory reference suitable for a frame store, while
10563 converting to a valid addressing mode. */
10564
10565 static rtx
10566 gen_frame_mem_offset (mode, reg, offset)
10567 enum machine_mode mode;
10568 rtx reg;
10569 int offset;
10570 {
10571 rtx int_rtx, offset_rtx;
10572
10573 int_rtx = GEN_INT (offset);
10574
10575 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
10576 {
10577 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10578 emit_move_insn (offset_rtx, int_rtx);
10579 }
10580 else
10581 offset_rtx = int_rtx;
10582
10583 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
10584 }
10585
10586 /* Emit function prologue as insns. */
10587
10588 void
10589 rs6000_emit_prologue ()
10590 {
10591 rs6000_stack_t *info = rs6000_stack_info ();
10592 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10593 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10594 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10595 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
10596 rtx frame_reg_rtx = sp_reg_rtx;
10597 rtx cr_save_rtx = NULL;
10598 rtx insn;
10599 int saving_FPRs_inline;
10600 int using_store_multiple;
10601 HOST_WIDE_INT sp_offset = 0;
10602
10603 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
10604 {
10605 reg_mode = V2SImode;
10606 reg_size = 8;
10607 }
10608
10609 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10610 && (!TARGET_SPE_ABI
10611 || info->spe_64bit_regs_used == 0)
10612 && info->first_gp_reg_save < 31);
10613 saving_FPRs_inline = (info->first_fp_reg_save == 64
10614 || FP_SAVE_INLINE (info->first_fp_reg_save));
10615
10616 /* For V.4, update stack before we do any saving and set back pointer. */
10617 if (info->push_p && DEFAULT_ABI == ABI_V4)
10618 {
10619 if (info->total_size < 32767)
10620 sp_offset = info->total_size;
10621 else
10622 frame_reg_rtx = frame_ptr_rtx;
10623 rs6000_emit_allocate_stack (info->total_size,
10624 (frame_reg_rtx != sp_reg_rtx
10625 && (info->cr_save_p
10626 || info->lr_save_p
10627 || info->first_fp_reg_save < 64
10628 || info->first_gp_reg_save < 32
10629 )));
10630 if (frame_reg_rtx != sp_reg_rtx)
10631 rs6000_emit_stack_tie ();
10632 }
10633
10634 /* Save AltiVec registers if needed. */
10635 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10636 {
10637 int i;
10638
10639 /* There should be a non inline version of this, for when we
10640 are saving lots of vector registers. */
10641 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10642 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10643 {
10644 rtx areg, savereg, mem;
10645 int offset;
10646
10647 offset = info->altivec_save_offset + sp_offset
10648 + 16 * (i - info->first_altivec_reg_save);
10649
10650 savereg = gen_rtx_REG (V4SImode, i);
10651
10652 areg = gen_rtx_REG (Pmode, 0);
10653 emit_move_insn (areg, GEN_INT (offset));
10654
10655 /* AltiVec addressing mode is [reg+reg]. */
10656 mem = gen_rtx_MEM (V4SImode,
10657 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
10658
10659 set_mem_alias_set (mem, rs6000_sr_alias_set);
10660
10661 insn = emit_move_insn (mem, savereg);
10662
10663 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10664 areg, GEN_INT (offset));
10665 }
10666 }
10667
10668 /* VRSAVE is a bit vector representing which AltiVec registers
10669 are used. The OS uses this to determine which vector
10670 registers to save on a context switch. We need to save
10671 VRSAVE on the stack frame, add whatever AltiVec registers we
10672 used in this function, and do the corresponding magic in the
10673 epilogue. */
10674
10675 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
10676 {
10677 rtx reg, mem, vrsave;
10678 int offset;
10679
10680 /* Get VRSAVE onto a GPR. */
10681 reg = gen_rtx_REG (SImode, 12);
10682 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10683 if (TARGET_MACHO)
10684 emit_insn (gen_get_vrsave_internal (reg));
10685 else
10686 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
10687
10688 /* Save VRSAVE. */
10689 offset = info->vrsave_save_offset + sp_offset;
10690 mem
10691 = gen_rtx_MEM (SImode,
10692 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
10693 set_mem_alias_set (mem, rs6000_sr_alias_set);
10694 insn = emit_move_insn (mem, reg);
10695
10696 /* Include the registers in the mask. */
10697 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
10698
10699 insn = emit_insn (generate_set_vrsave (reg, info, 0));
10700 }
10701
10702 /* If we use the link register, get it into r0. */
10703 if (info->lr_save_p)
10704 emit_move_insn (gen_rtx_REG (Pmode, 0),
10705 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10706
10707 /* If we need to save CR, put it into r12. */
10708 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
10709 {
10710 cr_save_rtx = gen_rtx_REG (SImode, 12);
10711 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10712 }
10713
10714 /* Do any required saving of fpr's. If only one or two to save, do
10715 it ourselves. Otherwise, call function. */
10716 if (saving_FPRs_inline)
10717 {
10718 int i;
10719 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10720 if ((regs_ever_live[info->first_fp_reg_save+i]
10721 && ! call_used_regs[info->first_fp_reg_save+i]))
10722 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
10723 info->first_fp_reg_save + i,
10724 info->fp_save_offset + sp_offset + 8 * i,
10725 info->total_size);
10726 }
10727 else if (info->first_fp_reg_save != 64)
10728 {
10729 int i;
10730 char rname[30];
10731 const char *alloc_rname;
10732 rtvec p;
10733 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
10734
10735 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
10736 gen_rtx_REG (Pmode,
10737 LINK_REGISTER_REGNUM));
10738 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
10739 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
10740 alloc_rname = ggc_strdup (rname);
10741 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
10742 gen_rtx_SYMBOL_REF (Pmode,
10743 alloc_rname));
10744 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10745 {
10746 rtx addr, reg, mem;
10747 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
10748 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10749 GEN_INT (info->fp_save_offset
10750 + sp_offset + 8*i));
10751 mem = gen_rtx_MEM (DFmode, addr);
10752 set_mem_alias_set (mem, rs6000_sr_alias_set);
10753
10754 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
10755 }
10756 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10757 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10758 NULL_RTX, NULL_RTX);
10759 }
10760
10761 /* Save GPRs. This is done as a PARALLEL if we are using
10762 the store-multiple instructions. */
10763 if (using_store_multiple)
10764 {
10765 rtvec p;
10766 int i;
10767 p = rtvec_alloc (32 - info->first_gp_reg_save);
10768 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10769 {
10770 rtx addr, reg, mem;
10771 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10772 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10773 GEN_INT (info->gp_save_offset
10774 + sp_offset
10775 + reg_size * i));
10776 mem = gen_rtx_MEM (reg_mode, addr);
10777 set_mem_alias_set (mem, rs6000_sr_alias_set);
10778
10779 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
10780 }
10781 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10782 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10783 NULL_RTX, NULL_RTX);
10784 }
10785 else
10786 {
10787 int i;
10788 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10789 if ((regs_ever_live[info->first_gp_reg_save+i]
10790 && ! call_used_regs[info->first_gp_reg_save+i])
10791 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10792 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10793 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10794 {
10795 rtx addr, reg, mem;
10796 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10797
10798 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
10799 {
10800 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10801 rtx b;
10802
10803 if (!SPE_CONST_OFFSET_OK (offset))
10804 {
10805 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10806 emit_move_insn (b, GEN_INT (offset));
10807 }
10808 else
10809 b = GEN_INT (offset);
10810
10811 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10812 mem = gen_rtx_MEM (V2SImode, addr);
10813 set_mem_alias_set (mem, rs6000_sr_alias_set);
10814 insn = emit_move_insn (mem, reg);
10815
10816 if (GET_CODE (b) == CONST_INT)
10817 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10818 NULL_RTX, NULL_RTX);
10819 else
10820 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10821 b, GEN_INT (offset));
10822 }
10823 else
10824 {
10825 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10826 GEN_INT (info->gp_save_offset
10827 + sp_offset
10828 + reg_size * i));
10829 mem = gen_rtx_MEM (reg_mode, addr);
10830 set_mem_alias_set (mem, rs6000_sr_alias_set);
10831
10832 insn = emit_move_insn (mem, reg);
10833 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10834 NULL_RTX, NULL_RTX);
10835 }
10836 }
10837 }
10838
10839 /* ??? There's no need to emit actual instructions here, but it's the
10840 easiest way to get the frame unwind information emitted. */
10841 if (current_function_calls_eh_return)
10842 {
10843 unsigned int i, regno;
10844
10845 for (i = 0; ; ++i)
10846 {
10847 regno = EH_RETURN_DATA_REGNO (i);
10848 if (regno == INVALID_REGNUM)
10849 break;
10850
10851 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
10852 info->ehrd_offset + sp_offset
10853 + reg_size * (int) i,
10854 info->total_size);
10855 }
10856 }
10857
10858 /* Save lr if we used it. */
10859 if (info->lr_save_p)
10860 {
10861 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10862 GEN_INT (info->lr_save_offset + sp_offset));
10863 rtx reg = gen_rtx_REG (Pmode, 0);
10864 rtx mem = gen_rtx_MEM (Pmode, addr);
10865 /* This should not be of rs6000_sr_alias_set, because of
10866 __builtin_return_address. */
10867
10868 insn = emit_move_insn (mem, reg);
10869 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10870 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10871 }
10872
10873 /* Save CR if we use any that must be preserved. */
10874 if (info->cr_save_p)
10875 {
10876 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10877 GEN_INT (info->cr_save_offset + sp_offset));
10878 rtx mem = gen_rtx_MEM (SImode, addr);
10879
10880 set_mem_alias_set (mem, rs6000_sr_alias_set);
10881
10882 /* If r12 was used to hold the original sp, copy cr into r0 now
10883 that it's free. */
10884 if (REGNO (frame_reg_rtx) == 12)
10885 {
10886 cr_save_rtx = gen_rtx_REG (SImode, 0);
10887 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10888 }
10889 insn = emit_move_insn (mem, cr_save_rtx);
10890
10891 /* Now, there's no way that dwarf2out_frame_debug_expr is going
10892 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
10893 But that's OK. All we have to do is specify that _one_ condition
10894 code register is saved in this stack slot. The thrower's epilogue
10895 will then restore all the call-saved registers.
10896 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
10897 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10898 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
10899 }
10900
10901 /* Update stack and set back pointer unless this is V.4,
10902 for which it was done previously. */
10903 if (info->push_p && DEFAULT_ABI != ABI_V4)
10904 rs6000_emit_allocate_stack (info->total_size, FALSE);
10905
10906 /* Set frame pointer, if needed. */
10907 if (frame_pointer_needed)
10908 {
10909 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
10910 sp_reg_rtx);
10911 RTX_FRAME_RELATED_P (insn) = 1;
10912 }
10913
10914 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
10915 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
10916 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
10917 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
10918 {
10919 /* If emit_load_toc_table will use the link register, we need to save
10920 it. We use R11 for this purpose because emit_load_toc_table
10921 can use register 0. This allows us to use a plain 'blr' to return
10922 from the procedure more often. */
10923 int save_LR_around_toc_setup = (TARGET_ELF
10924 && DEFAULT_ABI != ABI_AIX
10925 && flag_pic
10926 && ! info->lr_save_p
10927 && EXIT_BLOCK_PTR->pred != NULL);
10928 if (save_LR_around_toc_setup)
10929 emit_move_insn (gen_rtx_REG (Pmode, 11),
10930 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10931
10932 rs6000_emit_load_toc_table (TRUE);
10933
10934 if (save_LR_around_toc_setup)
10935 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10936 gen_rtx_REG (Pmode, 11));
10937 }
10938
10939 #if TARGET_MACHO
10940 if (DEFAULT_ABI == ABI_DARWIN
10941 && flag_pic && current_function_uses_pic_offset_table)
10942 {
10943 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10944 const char *picbase = machopic_function_base_name ();
10945 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
10946
10947 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
10948
10949 rs6000_maybe_dead (
10950 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
10951 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
10952 }
10953 #endif
10954 }
10955
10956 /* Write function prologue. */
10957
10958 static void
10959 rs6000_output_function_prologue (file, size)
10960 FILE *file;
10961 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10962 {
10963 rs6000_stack_t *info = rs6000_stack_info ();
10964
10965 if (TARGET_DEBUG_STACK)
10966 debug_stack_info (info);
10967
10968 /* Write .extern for any function we will call to save and restore
10969 fp values. */
10970 if (info->first_fp_reg_save < 64
10971 && !FP_SAVE_INLINE (info->first_fp_reg_save))
10972 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
10973 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
10974 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
10975 RESTORE_FP_SUFFIX);
10976
10977 /* Write .extern for AIX common mode routines, if needed. */
10978 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
10979 {
10980 fputs ("\t.extern __mulh\n", file);
10981 fputs ("\t.extern __mull\n", file);
10982 fputs ("\t.extern __divss\n", file);
10983 fputs ("\t.extern __divus\n", file);
10984 fputs ("\t.extern __quoss\n", file);
10985 fputs ("\t.extern __quous\n", file);
10986 common_mode_defined = 1;
10987 }
10988
10989 if (! HAVE_prologue)
10990 {
10991 start_sequence ();
10992
10993 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
10994 the "toplevel" insn chain. */
10995 emit_note (0, NOTE_INSN_DELETED);
10996 rs6000_emit_prologue ();
10997 emit_note (0, NOTE_INSN_DELETED);
10998
10999 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11000 {
11001 rtx insn;
11002 unsigned addr = 0;
11003 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11004 {
11005 INSN_ADDRESSES_NEW (insn, addr);
11006 addr += 4;
11007 }
11008 }
11009
11010 if (TARGET_DEBUG_STACK)
11011 debug_rtx_list (get_insns (), 100);
11012 final (get_insns (), file, FALSE, FALSE);
11013 end_sequence ();
11014 }
11015
11016 rs6000_pic_labelno++;
11017 }
11018
11019 /* Emit function epilogue as insns.
11020
11021 At present, dwarf2out_frame_debug_expr doesn't understand
11022 register restores, so we don't bother setting RTX_FRAME_RELATED_P
11023 anywhere in the epilogue. Most of the insns below would in any case
11024 need special notes to explain where r11 is in relation to the stack. */
11025
11026 void
11027 rs6000_emit_epilogue (sibcall)
11028 int sibcall;
11029 {
11030 rs6000_stack_t *info;
11031 int restoring_FPRs_inline;
11032 int using_load_multiple;
11033 int using_mfcr_multiple;
11034 int use_backchain_to_restore_sp;
11035 int sp_offset = 0;
11036 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
11037 rtx frame_reg_rtx = sp_reg_rtx;
11038 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
11039 int reg_size = TARGET_POWERPC64 ? 8 : 4;
11040 int i;
11041
11042 info = rs6000_stack_info ();
11043
11044 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11045 {
11046 reg_mode = V2SImode;
11047 reg_size = 8;
11048 }
11049
11050 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11051 && (!TARGET_SPE_ABI
11052 || info->spe_64bit_regs_used == 0)
11053 && info->first_gp_reg_save < 31);
11054 restoring_FPRs_inline = (sibcall
11055 || current_function_calls_eh_return
11056 || info->first_fp_reg_save == 64
11057 || FP_SAVE_INLINE (info->first_fp_reg_save));
11058 use_backchain_to_restore_sp = (frame_pointer_needed
11059 || current_function_calls_alloca
11060 || info->total_size > 32767);
11061 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
11062 || rs6000_cpu == PROCESSOR_PPC603
11063 || rs6000_cpu == PROCESSOR_PPC750
11064 || optimize_size);
11065
11066 /* If we have a frame pointer, a call to alloca, or a large stack
11067 frame, restore the old stack pointer using the backchain. Otherwise,
11068 we know what size to update it with. */
11069 if (use_backchain_to_restore_sp)
11070 {
11071 /* Under V.4, don't reset the stack pointer until after we're done
11072 loading the saved registers. */
11073 if (DEFAULT_ABI == ABI_V4)
11074 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
11075
11076 emit_move_insn (frame_reg_rtx,
11077 gen_rtx_MEM (Pmode, sp_reg_rtx));
11078
11079 }
11080 else if (info->push_p)
11081 {
11082 if (DEFAULT_ABI == ABI_V4)
11083 sp_offset = info->total_size;
11084 else
11085 {
11086 emit_insn (TARGET_32BIT
11087 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11088 GEN_INT (info->total_size))
11089 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11090 GEN_INT (info->total_size)));
11091 }
11092 }
11093
11094 /* Restore AltiVec registers if needed. */
11095 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11096 {
11097 int i;
11098
11099 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11100 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11101 {
11102 rtx addr, areg, mem;
11103
11104 areg = gen_rtx_REG (Pmode, 0);
11105 emit_move_insn
11106 (areg, GEN_INT (info->altivec_save_offset
11107 + sp_offset
11108 + 16 * (i - info->first_altivec_reg_save)));
11109
11110 /* AltiVec addressing mode is [reg+reg]. */
11111 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
11112 mem = gen_rtx_MEM (V4SImode, addr);
11113 set_mem_alias_set (mem, rs6000_sr_alias_set);
11114
11115 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
11116 }
11117 }
11118
11119 /* Restore VRSAVE if needed. */
11120 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
11121 {
11122 rtx addr, mem, reg;
11123
11124 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11125 GEN_INT (info->vrsave_save_offset + sp_offset));
11126 mem = gen_rtx_MEM (SImode, addr);
11127 set_mem_alias_set (mem, rs6000_sr_alias_set);
11128 reg = gen_rtx_REG (SImode, 12);
11129 emit_move_insn (reg, mem);
11130
11131 emit_insn (generate_set_vrsave (reg, info, 1));
11132 }
11133
11134 /* Get the old lr if we saved it. */
11135 if (info->lr_save_p)
11136 {
11137 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
11138 info->lr_save_offset + sp_offset);
11139
11140 set_mem_alias_set (mem, rs6000_sr_alias_set);
11141
11142 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
11143 }
11144
11145 /* Get the old cr if we saved it. */
11146 if (info->cr_save_p)
11147 {
11148 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11149 GEN_INT (info->cr_save_offset + sp_offset));
11150 rtx mem = gen_rtx_MEM (SImode, addr);
11151
11152 set_mem_alias_set (mem, rs6000_sr_alias_set);
11153
11154 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
11155 }
11156
11157 /* Set LR here to try to overlap restores below. */
11158 if (info->lr_save_p)
11159 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
11160 gen_rtx_REG (Pmode, 0));
11161
11162 /* Load exception handler data registers, if needed. */
11163 if (current_function_calls_eh_return)
11164 {
11165 unsigned int i, regno;
11166
11167 for (i = 0; ; ++i)
11168 {
11169 rtx mem;
11170
11171 regno = EH_RETURN_DATA_REGNO (i);
11172 if (regno == INVALID_REGNUM)
11173 break;
11174
11175 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
11176 info->ehrd_offset + sp_offset
11177 + reg_size * (int) i);
11178 set_mem_alias_set (mem, rs6000_sr_alias_set);
11179
11180 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
11181 }
11182 }
11183
11184 /* Restore GPRs. This is done as a PARALLEL if we are using
11185 the load-multiple instructions. */
11186 if (using_load_multiple)
11187 {
11188 rtvec p;
11189 p = rtvec_alloc (32 - info->first_gp_reg_save);
11190 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11191 {
11192 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11193 GEN_INT (info->gp_save_offset
11194 + sp_offset
11195 + reg_size * i));
11196 rtx mem = gen_rtx_MEM (reg_mode, addr);
11197
11198 set_mem_alias_set (mem, rs6000_sr_alias_set);
11199
11200 RTVEC_ELT (p, i) =
11201 gen_rtx_SET (VOIDmode,
11202 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
11203 mem);
11204 }
11205 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11206 }
11207 else
11208 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11209 if ((regs_ever_live[info->first_gp_reg_save+i]
11210 && ! call_used_regs[info->first_gp_reg_save+i])
11211 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11212 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11213 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11214 {
11215 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11216 GEN_INT (info->gp_save_offset
11217 + sp_offset
11218 + reg_size * i));
11219 rtx mem = gen_rtx_MEM (reg_mode, addr);
11220
11221 /* Restore 64-bit quantities for SPE. */
11222 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11223 {
11224 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11225 rtx b;
11226
11227 if (!SPE_CONST_OFFSET_OK (offset))
11228 {
11229 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11230 emit_move_insn (b, GEN_INT (offset));
11231 }
11232 else
11233 b = GEN_INT (offset);
11234
11235 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11236 mem = gen_rtx_MEM (V2SImode, addr);
11237 }
11238
11239 set_mem_alias_set (mem, rs6000_sr_alias_set);
11240
11241 emit_move_insn (gen_rtx_REG (reg_mode,
11242 info->first_gp_reg_save + i), mem);
11243 }
11244
11245 /* Restore fpr's if we need to do it without calling a function. */
11246 if (restoring_FPRs_inline)
11247 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11248 if ((regs_ever_live[info->first_fp_reg_save+i]
11249 && ! call_used_regs[info->first_fp_reg_save+i]))
11250 {
11251 rtx addr, mem;
11252 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11253 GEN_INT (info->fp_save_offset
11254 + sp_offset
11255 + 8 * i));
11256 mem = gen_rtx_MEM (DFmode, addr);
11257 set_mem_alias_set (mem, rs6000_sr_alias_set);
11258
11259 emit_move_insn (gen_rtx_REG (DFmode,
11260 info->first_fp_reg_save + i),
11261 mem);
11262 }
11263
11264 /* If we saved cr, restore it here. Just those that were used. */
11265 if (info->cr_save_p)
11266 {
11267 rtx r12_rtx = gen_rtx_REG (SImode, 12);
11268 int count = 0;
11269
11270 if (using_mfcr_multiple)
11271 {
11272 for (i = 0; i < 8; i++)
11273 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11274 count++;
11275 if (count == 0)
11276 abort ();
11277 }
11278
11279 if (using_mfcr_multiple && count > 1)
11280 {
11281 rtvec p;
11282 int ndx;
11283
11284 p = rtvec_alloc (count);
11285
11286 ndx = 0;
11287 for (i = 0; i < 8; i++)
11288 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11289 {
11290 rtvec r = rtvec_alloc (2);
11291 RTVEC_ELT (r, 0) = r12_rtx;
11292 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
11293 RTVEC_ELT (p, ndx) =
11294 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
11295 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
11296 ndx++;
11297 }
11298 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11299 if (ndx != count)
11300 abort ();
11301 }
11302 else
11303 for (i = 0; i < 8; i++)
11304 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11305 {
11306 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
11307 CR0_REGNO+i),
11308 r12_rtx));
11309 }
11310 }
11311
11312 /* If this is V.4, unwind the stack pointer after all of the loads
11313 have been done. We need to emit a block here so that sched
11314 doesn't decide to move the sp change before the register restores
11315 (which may not have any obvious dependency on the stack). This
11316 doesn't hurt performance, because there is no scheduling that can
11317 be done after this point. */
11318 if (DEFAULT_ABI == ABI_V4)
11319 {
11320 if (frame_reg_rtx != sp_reg_rtx)
11321 rs6000_emit_stack_tie ();
11322
11323 if (use_backchain_to_restore_sp)
11324 {
11325 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
11326 }
11327 else if (sp_offset != 0)
11328 {
11329 emit_insn (TARGET_32BIT
11330 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11331 GEN_INT (sp_offset))
11332 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11333 GEN_INT (sp_offset)));
11334 }
11335 }
11336
11337 if (current_function_calls_eh_return)
11338 {
11339 rtx sa = EH_RETURN_STACKADJ_RTX;
11340 emit_insn (TARGET_32BIT
11341 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
11342 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
11343 }
11344
11345 if (!sibcall)
11346 {
11347 rtvec p;
11348 if (! restoring_FPRs_inline)
11349 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
11350 else
11351 p = rtvec_alloc (2);
11352
11353 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
11354 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11355 gen_rtx_REG (Pmode,
11356 LINK_REGISTER_REGNUM));
11357
11358 /* If we have to restore more than two FP registers, branch to the
11359 restore function. It will return to our caller. */
11360 if (! restoring_FPRs_inline)
11361 {
11362 int i;
11363 char rname[30];
11364 const char *alloc_rname;
11365
11366 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
11367 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
11368 alloc_rname = ggc_strdup (rname);
11369 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
11370 gen_rtx_SYMBOL_REF (Pmode,
11371 alloc_rname));
11372
11373 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11374 {
11375 rtx addr, mem;
11376 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
11377 GEN_INT (info->fp_save_offset + 8*i));
11378 mem = gen_rtx_MEM (DFmode, addr);
11379 set_mem_alias_set (mem, rs6000_sr_alias_set);
11380
11381 RTVEC_ELT (p, i+3) =
11382 gen_rtx_SET (VOIDmode,
11383 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
11384 mem);
11385 }
11386 }
11387
11388 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
11389 }
11390 }
11391
11392 /* Write function epilogue. */
11393
11394 static void
11395 rs6000_output_function_epilogue (file, size)
11396 FILE *file;
11397 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11398 {
11399 rs6000_stack_t *info = rs6000_stack_info ();
11400
11401 if (! HAVE_epilogue)
11402 {
11403 rtx insn = get_last_insn ();
11404 /* If the last insn was a BARRIER, we don't have to write anything except
11405 the trace table. */
11406 if (GET_CODE (insn) == NOTE)
11407 insn = prev_nonnote_insn (insn);
11408 if (insn == 0 || GET_CODE (insn) != BARRIER)
11409 {
11410 /* This is slightly ugly, but at least we don't have two
11411 copies of the epilogue-emitting code. */
11412 start_sequence ();
11413
11414 /* A NOTE_INSN_DELETED is supposed to be at the start
11415 and end of the "toplevel" insn chain. */
11416 emit_note (0, NOTE_INSN_DELETED);
11417 rs6000_emit_epilogue (FALSE);
11418 emit_note (0, NOTE_INSN_DELETED);
11419
11420 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11421 {
11422 rtx insn;
11423 unsigned addr = 0;
11424 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11425 {
11426 INSN_ADDRESSES_NEW (insn, addr);
11427 addr += 4;
11428 }
11429 }
11430
11431 if (TARGET_DEBUG_STACK)
11432 debug_rtx_list (get_insns (), 100);
11433 final (get_insns (), file, FALSE, FALSE);
11434 end_sequence ();
11435 }
11436 }
11437
11438 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11439 on its format.
11440
11441 We don't output a traceback table if -finhibit-size-directive was
11442 used. The documentation for -finhibit-size-directive reads
11443 ``don't output a @code{.size} assembler directive, or anything
11444 else that would cause trouble if the function is split in the
11445 middle, and the two halves are placed at locations far apart in
11446 memory.'' The traceback table has this property, since it
11447 includes the offset from the start of the function to the
11448 traceback table itself.
11449
11450 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11451 different traceback table. */
11452 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
11453 && rs6000_traceback != traceback_none)
11454 {
11455 const char *fname = NULL;
11456 const char *language_string = lang_hooks.name;
11457 int fixed_parms = 0, float_parms = 0, parm_info = 0;
11458 int i;
11459 int optional_tbtab;
11460
11461 if (rs6000_traceback == traceback_full)
11462 optional_tbtab = 1;
11463 else if (rs6000_traceback == traceback_part)
11464 optional_tbtab = 0;
11465 else
11466 optional_tbtab = !optimize_size && !TARGET_ELF;
11467
11468 if (optional_tbtab)
11469 {
11470 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
11471 while (*fname == '.') /* V.4 encodes . in the name */
11472 fname++;
11473
11474 /* Need label immediately before tbtab, so we can compute
11475 its offset from the function start. */
11476 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11477 ASM_OUTPUT_LABEL (file, fname);
11478 }
11479
11480 /* The .tbtab pseudo-op can only be used for the first eight
11481 expressions, since it can't handle the possibly variable
11482 length fields that follow. However, if you omit the optional
11483 fields, the assembler outputs zeros for all optional fields
11484 anyways, giving each variable length field is minimum length
11485 (as defined in sys/debug.h). Thus we can not use the .tbtab
11486 pseudo-op at all. */
11487
11488 /* An all-zero word flags the start of the tbtab, for debuggers
11489 that have to find it by searching forward from the entry
11490 point or from the current pc. */
11491 fputs ("\t.long 0\n", file);
11492
11493 /* Tbtab format type. Use format type 0. */
11494 fputs ("\t.byte 0,", file);
11495
11496 /* Language type. Unfortunately, there doesn't seem to be any
11497 official way to get this info, so we use language_string. C
11498 is 0. C++ is 9. No number defined for Obj-C, so use the
11499 value for C for now. There is no official value for Java,
11500 although IBM appears to be using 13. There is no official value
11501 for Chill, so we've chosen 44 pseudo-randomly. */
11502 if (! strcmp (language_string, "GNU C")
11503 || ! strcmp (language_string, "GNU Objective-C"))
11504 i = 0;
11505 else if (! strcmp (language_string, "GNU F77"))
11506 i = 1;
11507 else if (! strcmp (language_string, "GNU Ada"))
11508 i = 3;
11509 else if (! strcmp (language_string, "GNU Pascal"))
11510 i = 2;
11511 else if (! strcmp (language_string, "GNU C++"))
11512 i = 9;
11513 else if (! strcmp (language_string, "GNU Java"))
11514 i = 13;
11515 else if (! strcmp (language_string, "GNU CHILL"))
11516 i = 44;
11517 else
11518 abort ();
11519 fprintf (file, "%d,", i);
11520
11521 /* 8 single bit fields: global linkage (not set for C extern linkage,
11522 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
11523 from start of procedure stored in tbtab, internal function, function
11524 has controlled storage, function has no toc, function uses fp,
11525 function logs/aborts fp operations. */
11526 /* Assume that fp operations are used if any fp reg must be saved. */
11527 fprintf (file, "%d,",
11528 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
11529
11530 /* 6 bitfields: function is interrupt handler, name present in
11531 proc table, function calls alloca, on condition directives
11532 (controls stack walks, 3 bits), saves condition reg, saves
11533 link reg. */
11534 /* The `function calls alloca' bit seems to be set whenever reg 31 is
11535 set up as a frame pointer, even when there is no alloca call. */
11536 fprintf (file, "%d,",
11537 ((optional_tbtab << 6)
11538 | ((optional_tbtab & frame_pointer_needed) << 5)
11539 | (info->cr_save_p << 1)
11540 | (info->lr_save_p)));
11541
11542 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
11543 (6 bits). */
11544 fprintf (file, "%d,",
11545 (info->push_p << 7) | (64 - info->first_fp_reg_save));
11546
11547 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
11548 fprintf (file, "%d,", (32 - first_reg_to_save ()));
11549
11550 if (optional_tbtab)
11551 {
11552 /* Compute the parameter info from the function decl argument
11553 list. */
11554 tree decl;
11555 int next_parm_info_bit = 31;
11556
11557 for (decl = DECL_ARGUMENTS (current_function_decl);
11558 decl; decl = TREE_CHAIN (decl))
11559 {
11560 rtx parameter = DECL_INCOMING_RTL (decl);
11561 enum machine_mode mode = GET_MODE (parameter);
11562
11563 if (GET_CODE (parameter) == REG)
11564 {
11565 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
11566 {
11567 int bits;
11568
11569 float_parms++;
11570
11571 if (mode == SFmode)
11572 bits = 0x2;
11573 else if (mode == DFmode || mode == TFmode)
11574 bits = 0x3;
11575 else
11576 abort ();
11577
11578 /* If only one bit will fit, don't or in this entry. */
11579 if (next_parm_info_bit > 0)
11580 parm_info |= (bits << (next_parm_info_bit - 1));
11581 next_parm_info_bit -= 2;
11582 }
11583 else
11584 {
11585 fixed_parms += ((GET_MODE_SIZE (mode)
11586 + (UNITS_PER_WORD - 1))
11587 / UNITS_PER_WORD);
11588 next_parm_info_bit -= 1;
11589 }
11590 }
11591 }
11592 }
11593
11594 /* Number of fixed point parameters. */
11595 /* This is actually the number of words of fixed point parameters; thus
11596 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11597 fprintf (file, "%d,", fixed_parms);
11598
11599 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11600 all on stack. */
11601 /* This is actually the number of fp registers that hold parameters;
11602 and thus the maximum value is 13. */
11603 /* Set parameters on stack bit if parameters are not in their original
11604 registers, regardless of whether they are on the stack? Xlc
11605 seems to set the bit when not optimizing. */
11606 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
11607
11608 if (! optional_tbtab)
11609 return;
11610
11611 /* Optional fields follow. Some are variable length. */
11612
11613 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11614 11 double float. */
11615 /* There is an entry for each parameter in a register, in the order that
11616 they occur in the parameter list. Any intervening arguments on the
11617 stack are ignored. If the list overflows a long (max possible length
11618 34 bits) then completely leave off all elements that don't fit. */
11619 /* Only emit this long if there was at least one parameter. */
11620 if (fixed_parms || float_parms)
11621 fprintf (file, "\t.long %d\n", parm_info);
11622
11623 /* Offset from start of code to tb table. */
11624 fputs ("\t.long ", file);
11625 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11626 #if TARGET_AIX
11627 RS6000_OUTPUT_BASENAME (file, fname);
11628 #else
11629 assemble_name (file, fname);
11630 #endif
11631 fputs ("-.", file);
11632 #if TARGET_AIX
11633 RS6000_OUTPUT_BASENAME (file, fname);
11634 #else
11635 assemble_name (file, fname);
11636 #endif
11637 putc ('\n', file);
11638
11639 /* Interrupt handler mask. */
11640 /* Omit this long, since we never set the interrupt handler bit
11641 above. */
11642
11643 /* Number of CTL (controlled storage) anchors. */
11644 /* Omit this long, since the has_ctl bit is never set above. */
11645
11646 /* Displacement into stack of each CTL anchor. */
11647 /* Omit this list of longs, because there are no CTL anchors. */
11648
11649 /* Length of function name. */
11650 if (*fname == '*')
11651 ++fname;
11652 fprintf (file, "\t.short %d\n", (int) strlen (fname));
11653
11654 /* Function name. */
11655 assemble_string (fname, strlen (fname));
11656
11657 /* Register for alloca automatic storage; this is always reg 31.
11658 Only emit this if the alloca bit was set above. */
11659 if (frame_pointer_needed)
11660 fputs ("\t.byte 31\n", file);
11661
11662 fputs ("\t.align 2\n", file);
11663 }
11664 }
11665 \f
11666 /* A C compound statement that outputs the assembler code for a thunk
11667 function, used to implement C++ virtual function calls with
11668 multiple inheritance. The thunk acts as a wrapper around a virtual
11669 function, adjusting the implicit object parameter before handing
11670 control off to the real function.
11671
11672 First, emit code to add the integer DELTA to the location that
11673 contains the incoming first argument. Assume that this argument
11674 contains a pointer, and is the one used to pass the `this' pointer
11675 in C++. This is the incoming argument *before* the function
11676 prologue, e.g. `%o0' on a sparc. The addition must preserve the
11677 values of all other incoming arguments.
11678
11679 After the addition, emit code to jump to FUNCTION, which is a
11680 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
11681 not touch the return address. Hence returning from FUNCTION will
11682 return to whoever called the current `thunk'.
11683
11684 The effect must be as if FUNCTION had been called directly with the
11685 adjusted first argument. This macro is responsible for emitting
11686 all of the code for a thunk function; output_function_prologue()
11687 and output_function_epilogue() are not invoked.
11688
11689 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
11690 been extracted from it.) It might possibly be useful on some
11691 targets, but probably not.
11692
11693 If you do not define this macro, the target-independent code in the
11694 C++ frontend will generate a less efficient heavyweight thunk that
11695 calls FUNCTION instead of jumping to it. The generic approach does
11696 not support varargs. */
11697
11698 static void
11699 rs6000_output_mi_thunk (file, thunk_fndecl, delta, vcall_offset, function)
11700 FILE *file;
11701 tree thunk_fndecl ATTRIBUTE_UNUSED;
11702 HOST_WIDE_INT delta;
11703 HOST_WIDE_INT vcall_offset;
11704 tree function;
11705 {
11706 rtx this, insn, funexp;
11707
11708 reload_completed = 1;
11709 no_new_pseudos = 1;
11710
11711 /* Mark the end of the (empty) prologue. */
11712 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
11713
11714 /* Find the "this" pointer. If the function returns a structure,
11715 the structure return pointer is in r3. */
11716 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
11717 this = gen_rtx_REG (Pmode, 4);
11718 else
11719 this = gen_rtx_REG (Pmode, 3);
11720
11721 /* Apply the constant offset, if required. */
11722 if (delta)
11723 {
11724 rtx delta_rtx = GEN_INT (delta);
11725 emit_insn (TARGET_32BIT
11726 ? gen_addsi3 (this, this, delta_rtx)
11727 : gen_adddi3 (this, this, delta_rtx));
11728 }
11729
11730 /* Apply the offset from the vtable, if required. */
11731 if (vcall_offset)
11732 {
11733 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
11734 rtx tmp = gen_rtx_REG (Pmode, 12);
11735
11736 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
11737 emit_insn (TARGET_32BIT
11738 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
11739 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
11740 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
11741 emit_insn (TARGET_32BIT
11742 ? gen_addsi3 (this, this, tmp)
11743 : gen_adddi3 (this, this, tmp));
11744 }
11745
11746 /* Generate a tail call to the target function. */
11747 if (!TREE_USED (function))
11748 {
11749 assemble_external (function);
11750 TREE_USED (function) = 1;
11751 }
11752 funexp = XEXP (DECL_RTL (function), 0);
11753 SYMBOL_REF_FLAGS (funexp) &= ~SYMBOL_FLAG_LOCAL;
11754 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
11755
11756 #if TARGET_MACHO
11757 if (MACHOPIC_INDIRECT)
11758 funexp = machopic_indirect_call_target (funexp);
11759 #endif
11760
11761 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
11762 generate sibcall RTL explicitly to avoid constraint abort. */
11763 insn = emit_call_insn (
11764 gen_rtx_PARALLEL (VOIDmode,
11765 gen_rtvec (4,
11766 gen_rtx_CALL (VOIDmode,
11767 funexp, const0_rtx),
11768 gen_rtx_USE (VOIDmode, const0_rtx),
11769 gen_rtx_USE (VOIDmode,
11770 gen_rtx_REG (SImode,
11771 LINK_REGISTER_REGNUM)),
11772 gen_rtx_RETURN (VOIDmode))));
11773 SIBLING_CALL_P (insn) = 1;
11774 emit_barrier ();
11775
11776 /* Run just enough of rest_of_compilation to get the insns emitted.
11777 There's not really enough bulk here to make other passes such as
11778 instruction scheduling worth while. Note that use_thunk calls
11779 assemble_start_function and assemble_end_function. */
11780 insn = get_insns ();
11781 shorten_branches (insn);
11782 final_start_function (insn, file, 1);
11783 final (insn, file, 1, 0);
11784 final_end_function ();
11785
11786 reload_completed = 0;
11787 no_new_pseudos = 0;
11788 }
11789 \f
11790 /* A quick summary of the various types of 'constant-pool tables'
11791 under PowerPC:
11792
11793 Target Flags Name One table per
11794 AIX (none) AIX TOC object file
11795 AIX -mfull-toc AIX TOC object file
11796 AIX -mminimal-toc AIX minimal TOC translation unit
11797 SVR4/EABI (none) SVR4 SDATA object file
11798 SVR4/EABI -fpic SVR4 pic object file
11799 SVR4/EABI -fPIC SVR4 PIC translation unit
11800 SVR4/EABI -mrelocatable EABI TOC function
11801 SVR4/EABI -maix AIX TOC object file
11802 SVR4/EABI -maix -mminimal-toc
11803 AIX minimal TOC translation unit
11804
11805 Name Reg. Set by entries contains:
11806 made by addrs? fp? sum?
11807
11808 AIX TOC 2 crt0 as Y option option
11809 AIX minimal TOC 30 prolog gcc Y Y option
11810 SVR4 SDATA 13 crt0 gcc N Y N
11811 SVR4 pic 30 prolog ld Y not yet N
11812 SVR4 PIC 30 prolog gcc Y option option
11813 EABI TOC 30 prolog gcc Y option option
11814
11815 */
11816
11817 /* Hash functions for the hash table. */
11818
11819 static unsigned
11820 rs6000_hash_constant (k)
11821 rtx k;
11822 {
11823 enum rtx_code code = GET_CODE (k);
11824 enum machine_mode mode = GET_MODE (k);
11825 unsigned result = (code << 3) ^ mode;
11826 const char *format;
11827 int flen, fidx;
11828
11829 format = GET_RTX_FORMAT (code);
11830 flen = strlen (format);
11831 fidx = 0;
11832
11833 switch (code)
11834 {
11835 case LABEL_REF:
11836 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
11837
11838 case CONST_DOUBLE:
11839 if (mode != VOIDmode)
11840 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
11841 flen = 2;
11842 break;
11843
11844 case CODE_LABEL:
11845 fidx = 3;
11846 break;
11847
11848 default:
11849 break;
11850 }
11851
11852 for (; fidx < flen; fidx++)
11853 switch (format[fidx])
11854 {
11855 case 's':
11856 {
11857 unsigned i, len;
11858 const char *str = XSTR (k, fidx);
11859 len = strlen (str);
11860 result = result * 613 + len;
11861 for (i = 0; i < len; i++)
11862 result = result * 613 + (unsigned) str[i];
11863 break;
11864 }
11865 case 'u':
11866 case 'e':
11867 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
11868 break;
11869 case 'i':
11870 case 'n':
11871 result = result * 613 + (unsigned) XINT (k, fidx);
11872 break;
11873 case 'w':
11874 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
11875 result = result * 613 + (unsigned) XWINT (k, fidx);
11876 else
11877 {
11878 size_t i;
11879 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
11880 result = result * 613 + (unsigned) (XWINT (k, fidx)
11881 >> CHAR_BIT * i);
11882 }
11883 break;
11884 case '0':
11885 break;
11886 default:
11887 abort ();
11888 }
11889
11890 return result;
11891 }
11892
11893 static unsigned
11894 toc_hash_function (hash_entry)
11895 const void * hash_entry;
11896 {
11897 const struct toc_hash_struct *thc =
11898 (const struct toc_hash_struct *) hash_entry;
11899 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
11900 }
11901
11902 /* Compare H1 and H2 for equivalence. */
11903
11904 static int
11905 toc_hash_eq (h1, h2)
11906 const void * h1;
11907 const void * h2;
11908 {
11909 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
11910 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
11911
11912 if (((const struct toc_hash_struct *) h1)->key_mode
11913 != ((const struct toc_hash_struct *) h2)->key_mode)
11914 return 0;
11915
11916 return rtx_equal_p (r1, r2);
11917 }
11918
11919 /* These are the names given by the C++ front-end to vtables, and
11920 vtable-like objects. Ideally, this logic should not be here;
11921 instead, there should be some programmatic way of inquiring as
11922 to whether or not an object is a vtable. */
11923
11924 #define VTABLE_NAME_P(NAME) \
11925 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
11926 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
11927 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
11928 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
11929
11930 void
11931 rs6000_output_symbol_ref (file, x)
11932 FILE *file;
11933 rtx x;
11934 {
11935 /* Currently C++ toc references to vtables can be emitted before it
11936 is decided whether the vtable is public or private. If this is
11937 the case, then the linker will eventually complain that there is
11938 a reference to an unknown section. Thus, for vtables only,
11939 we emit the TOC reference to reference the symbol and not the
11940 section. */
11941 const char *name = XSTR (x, 0);
11942
11943 if (VTABLE_NAME_P (name))
11944 {
11945 RS6000_OUTPUT_BASENAME (file, name);
11946 }
11947 else
11948 assemble_name (file, name);
11949 }
11950
11951 /* Output a TOC entry. We derive the entry name from what is being
11952 written. */
11953
11954 void
11955 output_toc (file, x, labelno, mode)
11956 FILE *file;
11957 rtx x;
11958 int labelno;
11959 enum machine_mode mode;
11960 {
11961 char buf[256];
11962 const char *name = buf;
11963 const char *real_name;
11964 rtx base = x;
11965 int offset = 0;
11966
11967 if (TARGET_NO_TOC)
11968 abort ();
11969
11970 /* When the linker won't eliminate them, don't output duplicate
11971 TOC entries (this happens on AIX if there is any kind of TOC,
11972 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
11973 CODE_LABELs. */
11974 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
11975 {
11976 struct toc_hash_struct *h;
11977 void * * found;
11978
11979 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
11980 time because GGC is not initialised at that point. */
11981 if (toc_hash_table == NULL)
11982 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
11983 toc_hash_eq, NULL);
11984
11985 h = ggc_alloc (sizeof (*h));
11986 h->key = x;
11987 h->key_mode = mode;
11988 h->labelno = labelno;
11989
11990 found = htab_find_slot (toc_hash_table, h, 1);
11991 if (*found == NULL)
11992 *found = h;
11993 else /* This is indeed a duplicate.
11994 Set this label equal to that label. */
11995 {
11996 fputs ("\t.set ", file);
11997 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11998 fprintf (file, "%d,", labelno);
11999 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12000 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
12001 found)->labelno));
12002 return;
12003 }
12004 }
12005
12006 /* If we're going to put a double constant in the TOC, make sure it's
12007 aligned properly when strict alignment is on. */
12008 if (GET_CODE (x) == CONST_DOUBLE
12009 && STRICT_ALIGNMENT
12010 && GET_MODE_BITSIZE (mode) >= 64
12011 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
12012 ASM_OUTPUT_ALIGN (file, 3);
12013 }
12014
12015 (*targetm.asm_out.internal_label) (file, "LC", labelno);
12016
12017 /* Handle FP constants specially. Note that if we have a minimal
12018 TOC, things we put here aren't actually in the TOC, so we can allow
12019 FP constants. */
12020 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
12021 {
12022 REAL_VALUE_TYPE rv;
12023 long k[4];
12024
12025 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12026 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
12027
12028 if (TARGET_64BIT)
12029 {
12030 if (TARGET_MINIMAL_TOC)
12031 fputs (DOUBLE_INT_ASM_OP, file);
12032 else
12033 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12034 k[0] & 0xffffffff, k[1] & 0xffffffff,
12035 k[2] & 0xffffffff, k[3] & 0xffffffff);
12036 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
12037 k[0] & 0xffffffff, k[1] & 0xffffffff,
12038 k[2] & 0xffffffff, k[3] & 0xffffffff);
12039 return;
12040 }
12041 else
12042 {
12043 if (TARGET_MINIMAL_TOC)
12044 fputs ("\t.long ", file);
12045 else
12046 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12047 k[0] & 0xffffffff, k[1] & 0xffffffff,
12048 k[2] & 0xffffffff, k[3] & 0xffffffff);
12049 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
12050 k[0] & 0xffffffff, k[1] & 0xffffffff,
12051 k[2] & 0xffffffff, k[3] & 0xffffffff);
12052 return;
12053 }
12054 }
12055 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
12056 {
12057 REAL_VALUE_TYPE rv;
12058 long k[2];
12059
12060 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12061 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
12062
12063 if (TARGET_64BIT)
12064 {
12065 if (TARGET_MINIMAL_TOC)
12066 fputs (DOUBLE_INT_ASM_OP, file);
12067 else
12068 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12069 k[0] & 0xffffffff, k[1] & 0xffffffff);
12070 fprintf (file, "0x%lx%08lx\n",
12071 k[0] & 0xffffffff, k[1] & 0xffffffff);
12072 return;
12073 }
12074 else
12075 {
12076 if (TARGET_MINIMAL_TOC)
12077 fputs ("\t.long ", file);
12078 else
12079 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12080 k[0] & 0xffffffff, k[1] & 0xffffffff);
12081 fprintf (file, "0x%lx,0x%lx\n",
12082 k[0] & 0xffffffff, k[1] & 0xffffffff);
12083 return;
12084 }
12085 }
12086 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
12087 {
12088 REAL_VALUE_TYPE rv;
12089 long l;
12090
12091 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12092 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
12093
12094 if (TARGET_64BIT)
12095 {
12096 if (TARGET_MINIMAL_TOC)
12097 fputs (DOUBLE_INT_ASM_OP, file);
12098 else
12099 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12100 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
12101 return;
12102 }
12103 else
12104 {
12105 if (TARGET_MINIMAL_TOC)
12106 fputs ("\t.long ", file);
12107 else
12108 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12109 fprintf (file, "0x%lx\n", l & 0xffffffff);
12110 return;
12111 }
12112 }
12113 else if (GET_MODE (x) == VOIDmode
12114 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
12115 {
12116 unsigned HOST_WIDE_INT low;
12117 HOST_WIDE_INT high;
12118
12119 if (GET_CODE (x) == CONST_DOUBLE)
12120 {
12121 low = CONST_DOUBLE_LOW (x);
12122 high = CONST_DOUBLE_HIGH (x);
12123 }
12124 else
12125 #if HOST_BITS_PER_WIDE_INT == 32
12126 {
12127 low = INTVAL (x);
12128 high = (low & 0x80000000) ? ~0 : 0;
12129 }
12130 #else
12131 {
12132 low = INTVAL (x) & 0xffffffff;
12133 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
12134 }
12135 #endif
12136
12137 /* TOC entries are always Pmode-sized, but since this
12138 is a bigendian machine then if we're putting smaller
12139 integer constants in the TOC we have to pad them.
12140 (This is still a win over putting the constants in
12141 a separate constant pool, because then we'd have
12142 to have both a TOC entry _and_ the actual constant.)
12143
12144 For a 32-bit target, CONST_INT values are loaded and shifted
12145 entirely within `low' and can be stored in one TOC entry. */
12146
12147 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
12148 abort ();/* It would be easy to make this work, but it doesn't now. */
12149
12150 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
12151 {
12152 #if HOST_BITS_PER_WIDE_INT == 32
12153 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
12154 POINTER_SIZE, &low, &high, 0);
12155 #else
12156 low |= high << 32;
12157 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
12158 high = (HOST_WIDE_INT) low >> 32;
12159 low &= 0xffffffff;
12160 #endif
12161 }
12162
12163 if (TARGET_64BIT)
12164 {
12165 if (TARGET_MINIMAL_TOC)
12166 fputs (DOUBLE_INT_ASM_OP, file);
12167 else
12168 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12169 (long) high & 0xffffffff, (long) low & 0xffffffff);
12170 fprintf (file, "0x%lx%08lx\n",
12171 (long) high & 0xffffffff, (long) low & 0xffffffff);
12172 return;
12173 }
12174 else
12175 {
12176 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
12177 {
12178 if (TARGET_MINIMAL_TOC)
12179 fputs ("\t.long ", file);
12180 else
12181 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12182 (long) high & 0xffffffff, (long) low & 0xffffffff);
12183 fprintf (file, "0x%lx,0x%lx\n",
12184 (long) high & 0xffffffff, (long) low & 0xffffffff);
12185 }
12186 else
12187 {
12188 if (TARGET_MINIMAL_TOC)
12189 fputs ("\t.long ", file);
12190 else
12191 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
12192 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
12193 }
12194 return;
12195 }
12196 }
12197
12198 if (GET_CODE (x) == CONST)
12199 {
12200 if (GET_CODE (XEXP (x, 0)) != PLUS)
12201 abort ();
12202
12203 base = XEXP (XEXP (x, 0), 0);
12204 offset = INTVAL (XEXP (XEXP (x, 0), 1));
12205 }
12206
12207 if (GET_CODE (base) == SYMBOL_REF)
12208 name = XSTR (base, 0);
12209 else if (GET_CODE (base) == LABEL_REF)
12210 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
12211 else if (GET_CODE (base) == CODE_LABEL)
12212 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
12213 else
12214 abort ();
12215
12216 real_name = (*targetm.strip_name_encoding) (name);
12217 if (TARGET_MINIMAL_TOC)
12218 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
12219 else
12220 {
12221 fprintf (file, "\t.tc %s", real_name);
12222
12223 if (offset < 0)
12224 fprintf (file, ".N%d", - offset);
12225 else if (offset)
12226 fprintf (file, ".P%d", offset);
12227
12228 fputs ("[TC],", file);
12229 }
12230
12231 /* Currently C++ toc references to vtables can be emitted before it
12232 is decided whether the vtable is public or private. If this is
12233 the case, then the linker will eventually complain that there is
12234 a TOC reference to an unknown section. Thus, for vtables only,
12235 we emit the TOC reference to reference the symbol and not the
12236 section. */
12237 if (VTABLE_NAME_P (name))
12238 {
12239 RS6000_OUTPUT_BASENAME (file, name);
12240 if (offset < 0)
12241 fprintf (file, "%d", offset);
12242 else if (offset > 0)
12243 fprintf (file, "+%d", offset);
12244 }
12245 else
12246 output_addr_const (file, x);
12247 putc ('\n', file);
12248 }
12249 \f
12250 /* Output an assembler pseudo-op to write an ASCII string of N characters
12251 starting at P to FILE.
12252
12253 On the RS/6000, we have to do this using the .byte operation and
12254 write out special characters outside the quoted string.
12255 Also, the assembler is broken; very long strings are truncated,
12256 so we must artificially break them up early. */
12257
12258 void
12259 output_ascii (file, p, n)
12260 FILE *file;
12261 const char *p;
12262 int n;
12263 {
12264 char c;
12265 int i, count_string;
12266 const char *for_string = "\t.byte \"";
12267 const char *for_decimal = "\t.byte ";
12268 const char *to_close = NULL;
12269
12270 count_string = 0;
12271 for (i = 0; i < n; i++)
12272 {
12273 c = *p++;
12274 if (c >= ' ' && c < 0177)
12275 {
12276 if (for_string)
12277 fputs (for_string, file);
12278 putc (c, file);
12279
12280 /* Write two quotes to get one. */
12281 if (c == '"')
12282 {
12283 putc (c, file);
12284 ++count_string;
12285 }
12286
12287 for_string = NULL;
12288 for_decimal = "\"\n\t.byte ";
12289 to_close = "\"\n";
12290 ++count_string;
12291
12292 if (count_string >= 512)
12293 {
12294 fputs (to_close, file);
12295
12296 for_string = "\t.byte \"";
12297 for_decimal = "\t.byte ";
12298 to_close = NULL;
12299 count_string = 0;
12300 }
12301 }
12302 else
12303 {
12304 if (for_decimal)
12305 fputs (for_decimal, file);
12306 fprintf (file, "%d", c);
12307
12308 for_string = "\n\t.byte \"";
12309 for_decimal = ", ";
12310 to_close = "\n";
12311 count_string = 0;
12312 }
12313 }
12314
12315 /* Now close the string if we have written one. Then end the line. */
12316 if (to_close)
12317 fputs (to_close, file);
12318 }
12319 \f
12320 /* Generate a unique section name for FILENAME for a section type
12321 represented by SECTION_DESC. Output goes into BUF.
12322
12323 SECTION_DESC can be any string, as long as it is different for each
12324 possible section type.
12325
12326 We name the section in the same manner as xlc. The name begins with an
12327 underscore followed by the filename (after stripping any leading directory
12328 names) with the last period replaced by the string SECTION_DESC. If
12329 FILENAME does not contain a period, SECTION_DESC is appended to the end of
12330 the name. */
12331
12332 void
12333 rs6000_gen_section_name (buf, filename, section_desc)
12334 char **buf;
12335 const char *filename;
12336 const char *section_desc;
12337 {
12338 const char *q, *after_last_slash, *last_period = 0;
12339 char *p;
12340 int len;
12341
12342 after_last_slash = filename;
12343 for (q = filename; *q; q++)
12344 {
12345 if (*q == '/')
12346 after_last_slash = q + 1;
12347 else if (*q == '.')
12348 last_period = q;
12349 }
12350
12351 len = strlen (after_last_slash) + strlen (section_desc) + 2;
12352 *buf = (char *) xmalloc (len);
12353
12354 p = *buf;
12355 *p++ = '_';
12356
12357 for (q = after_last_slash; *q; q++)
12358 {
12359 if (q == last_period)
12360 {
12361 strcpy (p, section_desc);
12362 p += strlen (section_desc);
12363 break;
12364 }
12365
12366 else if (ISALNUM (*q))
12367 *p++ = *q;
12368 }
12369
12370 if (last_period == 0)
12371 strcpy (p, section_desc);
12372 else
12373 *p = '\0';
12374 }
12375 \f
12376 /* Emit profile function. */
12377
12378 void
12379 output_profile_hook (labelno)
12380 int labelno ATTRIBUTE_UNUSED;
12381 {
12382 if (TARGET_PROFILE_KERNEL)
12383 return;
12384
12385 if (DEFAULT_ABI == ABI_AIX)
12386 {
12387 #ifdef NO_PROFILE_COUNTERS
12388 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
12389 #else
12390 char buf[30];
12391 const char *label_name;
12392 rtx fun;
12393
12394 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12395 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
12396 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
12397
12398 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
12399 fun, Pmode);
12400 #endif
12401 }
12402 else if (DEFAULT_ABI == ABI_DARWIN)
12403 {
12404 const char *mcount_name = RS6000_MCOUNT;
12405 int caller_addr_regno = LINK_REGISTER_REGNUM;
12406
12407 /* Be conservative and always set this, at least for now. */
12408 current_function_uses_pic_offset_table = 1;
12409
12410 #if TARGET_MACHO
12411 /* For PIC code, set up a stub and collect the caller's address
12412 from r0, which is where the prologue puts it. */
12413 if (MACHOPIC_INDIRECT)
12414 {
12415 mcount_name = machopic_stub_name (mcount_name);
12416 if (current_function_uses_pic_offset_table)
12417 caller_addr_regno = 0;
12418 }
12419 #endif
12420 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
12421 0, VOIDmode, 1,
12422 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
12423 }
12424 }
12425
12426 /* Write function profiler code. */
12427
12428 void
12429 output_function_profiler (file, labelno)
12430 FILE *file;
12431 int labelno;
12432 {
12433 char buf[100];
12434 int save_lr = 8;
12435
12436 switch (DEFAULT_ABI)
12437 {
12438 default:
12439 abort ();
12440
12441 case ABI_V4:
12442 save_lr = 4;
12443 if (!TARGET_32BIT)
12444 {
12445 warning ("no profiling of 64-bit code for this ABI");
12446 return;
12447 }
12448 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12449 fprintf (file, "\tmflr %s\n", reg_names[0]);
12450 if (flag_pic == 1)
12451 {
12452 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
12453 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12454 reg_names[0], save_lr, reg_names[1]);
12455 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
12456 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
12457 assemble_name (file, buf);
12458 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
12459 }
12460 else if (flag_pic > 1)
12461 {
12462 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12463 reg_names[0], save_lr, reg_names[1]);
12464 /* Now, we need to get the address of the label. */
12465 fputs ("\tbl 1f\n\t.long ", file);
12466 assemble_name (file, buf);
12467 fputs ("-.\n1:", file);
12468 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
12469 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
12470 reg_names[0], reg_names[11]);
12471 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
12472 reg_names[0], reg_names[0], reg_names[11]);
12473 }
12474 else
12475 {
12476 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
12477 assemble_name (file, buf);
12478 fputs ("@ha\n", file);
12479 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12480 reg_names[0], save_lr, reg_names[1]);
12481 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
12482 assemble_name (file, buf);
12483 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
12484 }
12485
12486 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12487 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12488 break;
12489
12490 case ABI_AIX:
12491 case ABI_DARWIN:
12492 if (!TARGET_PROFILE_KERNEL)
12493 {
12494 /* Don't do anything, done in output_profile_hook (). */
12495 }
12496 else
12497 {
12498 if (TARGET_32BIT)
12499 abort ();
12500
12501 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
12502 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
12503
12504 if (current_function_needs_context)
12505 {
12506 asm_fprintf (file, "\tstd %s,24(%s)\n",
12507 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
12508 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12509 asm_fprintf (file, "\tld %s,24(%s)\n",
12510 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
12511 }
12512 else
12513 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12514 }
12515 break;
12516 }
12517 }
12518
12519 \f
12520 static int
12521 rs6000_use_dfa_pipeline_interface ()
12522 {
12523 return 1;
12524 }
12525
12526 /* Power4 load update and store update instructions are cracked into a
12527 load or store and an integer insn which are executed in the same cycle.
12528 Branches have their own dispatch slot which does not count against the
12529 GCC issue rate, but it changes the program flow so there are no other
12530 instructions to issue in this cycle. */
12531
12532 static int
12533 rs6000_variable_issue (stream, verbose, insn, more)
12534 FILE *stream ATTRIBUTE_UNUSED;
12535 int verbose ATTRIBUTE_UNUSED;
12536 rtx insn;
12537 int more;
12538 {
12539 if (GET_CODE (PATTERN (insn)) == USE
12540 || GET_CODE (PATTERN (insn)) == CLOBBER)
12541 return more;
12542
12543 if (rs6000_cpu == PROCESSOR_POWER4)
12544 {
12545 enum attr_type type = get_attr_type (insn);
12546 if (type == TYPE_LOAD_EXT_U || type == TYPE_LOAD_EXT_UX
12547 || type == TYPE_LOAD_UX || type == TYPE_STORE_UX)
12548 return 0;
12549 else if (type == TYPE_LOAD_U || type == TYPE_STORE_U
12550 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
12551 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
12552 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
12553 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
12554 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
12555 || type == TYPE_IDIV || type == TYPE_LDIV)
12556 return more > 2 ? more - 2 : 0;
12557 }
12558
12559 return more - 1;
12560 }
12561
12562 /* Adjust the cost of a scheduling dependency. Return the new cost of
12563 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
12564
12565 static int
12566 rs6000_adjust_cost (insn, link, dep_insn, cost)
12567 rtx insn;
12568 rtx link;
12569 rtx dep_insn ATTRIBUTE_UNUSED;
12570 int cost;
12571 {
12572 if (! recog_memoized (insn))
12573 return 0;
12574
12575 if (REG_NOTE_KIND (link) != 0)
12576 return 0;
12577
12578 if (REG_NOTE_KIND (link) == 0)
12579 {
12580 /* Data dependency; DEP_INSN writes a register that INSN reads
12581 some cycles later. */
12582 switch (get_attr_type (insn))
12583 {
12584 case TYPE_JMPREG:
12585 /* Tell the first scheduling pass about the latency between
12586 a mtctr and bctr (and mtlr and br/blr). The first
12587 scheduling pass will not know about this latency since
12588 the mtctr instruction, which has the latency associated
12589 to it, will be generated by reload. */
12590 return TARGET_POWER ? 5 : 4;
12591 case TYPE_BRANCH:
12592 /* Leave some extra cycles between a compare and its
12593 dependent branch, to inhibit expensive mispredicts. */
12594 if ((rs6000_cpu_attr == CPU_PPC603
12595 || rs6000_cpu_attr == CPU_PPC604
12596 || rs6000_cpu_attr == CPU_PPC604E
12597 || rs6000_cpu_attr == CPU_PPC620
12598 || rs6000_cpu_attr == CPU_PPC630
12599 || rs6000_cpu_attr == CPU_PPC750
12600 || rs6000_cpu_attr == CPU_PPC7400
12601 || rs6000_cpu_attr == CPU_PPC7450
12602 || rs6000_cpu_attr == CPU_POWER4)
12603 && recog_memoized (dep_insn)
12604 && (INSN_CODE (dep_insn) >= 0)
12605 && (get_attr_type (dep_insn) == TYPE_CMP
12606 || get_attr_type (dep_insn) == TYPE_COMPARE
12607 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
12608 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
12609 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
12610 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
12611 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
12612 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
12613 return cost + 2;
12614 default:
12615 break;
12616 }
12617 /* Fall out to return default cost. */
12618 }
12619
12620 return cost;
12621 }
12622
12623 /* A C statement (sans semicolon) to update the integer scheduling
12624 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12625 INSN earlier, increase the priority to execute INSN later. Do not
12626 define this macro if you do not need to adjust the scheduling
12627 priorities of insns. */
12628
12629 static int
12630 rs6000_adjust_priority (insn, priority)
12631 rtx insn ATTRIBUTE_UNUSED;
12632 int priority;
12633 {
12634 /* On machines (like the 750) which have asymmetric integer units,
12635 where one integer unit can do multiply and divides and the other
12636 can't, reduce the priority of multiply/divide so it is scheduled
12637 before other integer operations. */
12638
12639 #if 0
12640 if (! INSN_P (insn))
12641 return priority;
12642
12643 if (GET_CODE (PATTERN (insn)) == USE)
12644 return priority;
12645
12646 switch (rs6000_cpu_attr) {
12647 case CPU_PPC750:
12648 switch (get_attr_type (insn))
12649 {
12650 default:
12651 break;
12652
12653 case TYPE_IMUL:
12654 case TYPE_IDIV:
12655 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
12656 priority, priority);
12657 if (priority >= 0 && priority < 0x01000000)
12658 priority >>= 3;
12659 break;
12660 }
12661 }
12662 #endif
12663
12664 return priority;
12665 }
12666
12667 /* Return how many instructions the machine can issue per cycle. */
12668
12669 static int
12670 rs6000_issue_rate ()
12671 {
12672 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
12673 if (!reload_completed)
12674 return 1;
12675
12676 switch (rs6000_cpu_attr) {
12677 case CPU_RIOS1: /* ? */
12678 case CPU_RS64A:
12679 case CPU_PPC601: /* ? */
12680 case CPU_PPC7450:
12681 return 3;
12682 case CPU_PPC440:
12683 case CPU_PPC603:
12684 case CPU_PPC750:
12685 case CPU_PPC7400:
12686 case CPU_PPC8540:
12687 return 2;
12688 case CPU_RIOS2:
12689 case CPU_PPC604:
12690 case CPU_PPC604E:
12691 case CPU_PPC620:
12692 case CPU_PPC630:
12693 case CPU_POWER4:
12694 return 4;
12695 default:
12696 return 1;
12697 }
12698 }
12699
12700 /* Return how many instructions to look ahead for better insn
12701 scheduling. */
12702
12703 static int
12704 rs6000_use_sched_lookahead ()
12705 {
12706 if (rs6000_cpu_attr == CPU_PPC8540)
12707 return 4;
12708 return 0;
12709 }
12710
12711 \f
12712 /* Length in units of the trampoline for entering a nested function. */
12713
12714 int
12715 rs6000_trampoline_size ()
12716 {
12717 int ret = 0;
12718
12719 switch (DEFAULT_ABI)
12720 {
12721 default:
12722 abort ();
12723
12724 case ABI_AIX:
12725 ret = (TARGET_32BIT) ? 12 : 24;
12726 break;
12727
12728 case ABI_DARWIN:
12729 case ABI_V4:
12730 ret = (TARGET_32BIT) ? 40 : 48;
12731 break;
12732 }
12733
12734 return ret;
12735 }
12736
12737 /* Emit RTL insns to initialize the variable parts of a trampoline.
12738 FNADDR is an RTX for the address of the function's pure code.
12739 CXT is an RTX for the static chain value for the function. */
12740
12741 void
12742 rs6000_initialize_trampoline (addr, fnaddr, cxt)
12743 rtx addr;
12744 rtx fnaddr;
12745 rtx cxt;
12746 {
12747 enum machine_mode pmode = Pmode;
12748 int regsize = (TARGET_32BIT) ? 4 : 8;
12749 rtx ctx_reg = force_reg (pmode, cxt);
12750
12751 switch (DEFAULT_ABI)
12752 {
12753 default:
12754 abort ();
12755
12756 /* Macros to shorten the code expansions below. */
12757 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12758 #define MEM_PLUS(addr,offset) \
12759 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12760
12761 /* Under AIX, just build the 3 word function descriptor */
12762 case ABI_AIX:
12763 {
12764 rtx fn_reg = gen_reg_rtx (pmode);
12765 rtx toc_reg = gen_reg_rtx (pmode);
12766 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
12767 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
12768 emit_move_insn (MEM_DEREF (addr), fn_reg);
12769 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
12770 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
12771 }
12772 break;
12773
12774 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
12775 case ABI_DARWIN:
12776 case ABI_V4:
12777 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
12778 FALSE, VOIDmode, 4,
12779 addr, pmode,
12780 GEN_INT (rs6000_trampoline_size ()), SImode,
12781 fnaddr, pmode,
12782 ctx_reg, pmode);
12783 break;
12784 }
12785
12786 return;
12787 }
12788
12789 \f
12790 /* Table of valid machine attributes. */
12791
12792 const struct attribute_spec rs6000_attribute_table[] =
12793 {
12794 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12795 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12796 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12797 { NULL, 0, 0, false, false, false, NULL }
12798 };
12799
12800 /* Handle a "longcall" or "shortcall" attribute; arguments as in
12801 struct attribute_spec.handler. */
12802
12803 static tree
12804 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
12805 tree *node;
12806 tree name;
12807 tree args ATTRIBUTE_UNUSED;
12808 int flags ATTRIBUTE_UNUSED;
12809 bool *no_add_attrs;
12810 {
12811 if (TREE_CODE (*node) != FUNCTION_TYPE
12812 && TREE_CODE (*node) != FIELD_DECL
12813 && TREE_CODE (*node) != TYPE_DECL)
12814 {
12815 warning ("`%s' attribute only applies to functions",
12816 IDENTIFIER_POINTER (name));
12817 *no_add_attrs = true;
12818 }
12819
12820 return NULL_TREE;
12821 }
12822
12823 /* Set longcall attributes on all functions declared when
12824 rs6000_default_long_calls is true. */
12825 static void
12826 rs6000_set_default_type_attributes (type)
12827 tree type;
12828 {
12829 if (rs6000_default_long_calls
12830 && (TREE_CODE (type) == FUNCTION_TYPE
12831 || TREE_CODE (type) == METHOD_TYPE))
12832 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
12833 NULL_TREE,
12834 TYPE_ATTRIBUTES (type));
12835 }
12836
12837 /* Return a reference suitable for calling a function with the
12838 longcall attribute. */
12839
12840 struct rtx_def *
12841 rs6000_longcall_ref (call_ref)
12842 rtx call_ref;
12843 {
12844 const char *call_name;
12845 tree node;
12846
12847 if (GET_CODE (call_ref) != SYMBOL_REF)
12848 return call_ref;
12849
12850 /* System V adds '.' to the internal name, so skip them. */
12851 call_name = XSTR (call_ref, 0);
12852 if (*call_name == '.')
12853 {
12854 while (*call_name == '.')
12855 call_name++;
12856
12857 node = get_identifier (call_name);
12858 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
12859 }
12860
12861 return force_reg (Pmode, call_ref);
12862 }
12863
12864 \f
12865 #ifdef USING_ELFOS_H
12866
12867 /* A C statement or statements to switch to the appropriate section
12868 for output of RTX in mode MODE. You can assume that RTX is some
12869 kind of constant in RTL. The argument MODE is redundant except in
12870 the case of a `const_int' rtx. Select the section by calling
12871 `text_section' or one of the alternatives for other sections.
12872
12873 Do not define this macro if you put all constants in the read-only
12874 data section. */
12875
12876 static void
12877 rs6000_elf_select_rtx_section (mode, x, align)
12878 enum machine_mode mode;
12879 rtx x;
12880 unsigned HOST_WIDE_INT align;
12881 {
12882 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
12883 toc_section ();
12884 else
12885 default_elf_select_rtx_section (mode, x, align);
12886 }
12887
12888 /* A C statement or statements to switch to the appropriate
12889 section for output of DECL. DECL is either a `VAR_DECL' node
12890 or a constant of some sort. RELOC indicates whether forming
12891 the initial value of DECL requires link-time relocations. */
12892
12893 static void
12894 rs6000_elf_select_section (decl, reloc, align)
12895 tree decl;
12896 int reloc;
12897 unsigned HOST_WIDE_INT align;
12898 {
12899 /* Pretend that we're always building for a shared library when
12900 ABI_AIX, because otherwise we end up with dynamic relocations
12901 in read-only sections. This happens for function pointers,
12902 references to vtables in typeinfo, and probably other cases. */
12903 default_elf_select_section_1 (decl, reloc, align,
12904 flag_pic || DEFAULT_ABI == ABI_AIX);
12905 }
12906
12907 /* A C statement to build up a unique section name, expressed as a
12908 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
12909 RELOC indicates whether the initial value of EXP requires
12910 link-time relocations. If you do not define this macro, GCC will use
12911 the symbol name prefixed by `.' as the section name. Note - this
12912 macro can now be called for uninitialized data items as well as
12913 initialized data and functions. */
12914
12915 static void
12916 rs6000_elf_unique_section (decl, reloc)
12917 tree decl;
12918 int reloc;
12919 {
12920 /* As above, pretend that we're always building for a shared library
12921 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
12922 default_unique_section_1 (decl, reloc,
12923 flag_pic || DEFAULT_ABI == ABI_AIX);
12924 }
12925 \f
12926 /* For a SYMBOL_REF, set generic flags and then perform some
12927 target-specific processing.
12928
12929 Set SYMBOL_FLAG_SMALL_V4 for an operand in small memory on V.4/eabi;
12930 this is different from the generic SYMBOL_FLAG_SMALL.
12931
12932 When the AIX ABI is requested on a non-AIX system, replace the
12933 function name with the real name (with a leading .) rather than the
12934 function descriptor name. This saves a lot of overriding code to
12935 read the prefixes. */
12936
12937 static void
12938 rs6000_elf_encode_section_info (decl, rtl, first)
12939 tree decl;
12940 rtx rtl;
12941 int first;
12942 {
12943 default_encode_section_info (decl, rtl, first);
12944
12945 if (first
12946 && TREE_CODE (decl) == FUNCTION_DECL
12947 && !TARGET_AIX
12948 && DEFAULT_ABI == ABI_AIX)
12949 {
12950 rtx sym_ref = XEXP (rtl, 0);
12951 size_t len = strlen (XSTR (sym_ref, 0));
12952 char *str = alloca (len + 2);
12953 str[0] = '.';
12954 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
12955 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
12956 }
12957 else if (rs6000_sdata != SDATA_NONE
12958 && DEFAULT_ABI == ABI_V4
12959 && TREE_CODE (decl) == VAR_DECL)
12960 {
12961 rtx sym_ref = XEXP (rtl, 0);
12962 int size = int_size_in_bytes (TREE_TYPE (decl));
12963 tree section_name = DECL_SECTION_NAME (decl);
12964 const char *name = (char *)0;
12965
12966 if (section_name)
12967 {
12968 if (TREE_CODE (section_name) == STRING_CST)
12969 name = TREE_STRING_POINTER (section_name);
12970 else
12971 abort ();
12972 }
12973
12974 if (name
12975 ? (strcmp (name, ".sdata") == 0
12976 || strcmp (name, ".sdata2") == 0
12977 || strcmp (name, ".sbss") == 0
12978 || strcmp (name, ".sbss2") == 0
12979 || strcmp (name, ".PPC.EMB.sdata0") == 0
12980 || strcmp (name, ".PPC.EMB.sbss0") == 0)
12981 : (size > 0 && size <= g_switch_value))
12982 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_SMALL_V4;
12983 }
12984 }
12985
12986 static bool
12987 rs6000_elf_in_small_data_p (decl)
12988 tree decl;
12989 {
12990 if (rs6000_sdata == SDATA_NONE)
12991 return false;
12992
12993 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
12994 {
12995 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
12996 if (strcmp (section, ".sdata") == 0
12997 || strcmp (section, ".sdata2") == 0
12998 || strcmp (section, ".sbss") == 0)
12999 return true;
13000 }
13001 else
13002 {
13003 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
13004
13005 if (size > 0
13006 && size <= g_switch_value
13007 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
13008 return true;
13009 }
13010
13011 return false;
13012 }
13013
13014 #endif /* USING_ELFOS_H */
13015
13016 \f
13017 /* Return a REG that occurs in ADDR with coefficient 1.
13018 ADDR can be effectively incremented by incrementing REG.
13019
13020 r0 is special and we must not select it as an address
13021 register by this routine since our caller will try to
13022 increment the returned register via an "la" instruction. */
13023
13024 struct rtx_def *
13025 find_addr_reg (addr)
13026 rtx addr;
13027 {
13028 while (GET_CODE (addr) == PLUS)
13029 {
13030 if (GET_CODE (XEXP (addr, 0)) == REG
13031 && REGNO (XEXP (addr, 0)) != 0)
13032 addr = XEXP (addr, 0);
13033 else if (GET_CODE (XEXP (addr, 1)) == REG
13034 && REGNO (XEXP (addr, 1)) != 0)
13035 addr = XEXP (addr, 1);
13036 else if (CONSTANT_P (XEXP (addr, 0)))
13037 addr = XEXP (addr, 1);
13038 else if (CONSTANT_P (XEXP (addr, 1)))
13039 addr = XEXP (addr, 0);
13040 else
13041 abort ();
13042 }
13043 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
13044 return addr;
13045 abort ();
13046 }
13047
13048 void
13049 rs6000_fatal_bad_address (op)
13050 rtx op;
13051 {
13052 fatal_insn ("bad address", op);
13053 }
13054
13055 #if TARGET_MACHO
13056
13057 #if 0
13058 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
13059 reference and a constant. */
13060
13061 int
13062 symbolic_operand (op)
13063 rtx op;
13064 {
13065 switch (GET_CODE (op))
13066 {
13067 case SYMBOL_REF:
13068 case LABEL_REF:
13069 return 1;
13070 case CONST:
13071 op = XEXP (op, 0);
13072 return (GET_CODE (op) == SYMBOL_REF ||
13073 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
13074 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
13075 && GET_CODE (XEXP (op, 1)) == CONST_INT);
13076 default:
13077 return 0;
13078 }
13079 }
13080 #endif
13081
13082 #ifdef RS6000_LONG_BRANCH
13083
13084 static tree stub_list = 0;
13085
13086 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
13087 procedure calls to the linked list. */
13088
13089 void
13090 add_compiler_stub (label_name, function_name, line_number)
13091 tree label_name;
13092 tree function_name;
13093 int line_number;
13094 {
13095 tree stub = build_tree_list (function_name, label_name);
13096 TREE_TYPE (stub) = build_int_2 (line_number, 0);
13097 TREE_CHAIN (stub) = stub_list;
13098 stub_list = stub;
13099 }
13100
13101 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
13102 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
13103 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
13104
13105 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
13106 handling procedure calls from the linked list and initializes the
13107 linked list. */
13108
13109 void
13110 output_compiler_stub ()
13111 {
13112 char tmp_buf[256];
13113 char label_buf[256];
13114 tree stub;
13115
13116 if (!flag_pic)
13117 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13118 {
13119 fprintf (asm_out_file,
13120 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
13121
13122 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13123 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13124 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
13125 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13126
13127 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
13128 strcpy (label_buf,
13129 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
13130 else
13131 {
13132 label_buf[0] = '_';
13133 strcpy (label_buf+1,
13134 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
13135 }
13136
13137 strcpy (tmp_buf, "lis r12,hi16(");
13138 strcat (tmp_buf, label_buf);
13139 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
13140 strcat (tmp_buf, label_buf);
13141 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
13142 output_asm_insn (tmp_buf, 0);
13143
13144 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13145 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13146 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
13147 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13148 }
13149
13150 stub_list = 0;
13151 }
13152
13153 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
13154 already there or not. */
13155
13156 int
13157 no_previous_def (function_name)
13158 tree function_name;
13159 {
13160 tree stub;
13161 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13162 if (function_name == STUB_FUNCTION_NAME (stub))
13163 return 0;
13164 return 1;
13165 }
13166
13167 /* GET_PREV_LABEL gets the label name from the previous definition of
13168 the function. */
13169
13170 tree
13171 get_prev_label (function_name)
13172 tree function_name;
13173 {
13174 tree stub;
13175 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13176 if (function_name == STUB_FUNCTION_NAME (stub))
13177 return STUB_LABEL_NAME (stub);
13178 return 0;
13179 }
13180
13181 /* INSN is either a function call or a millicode call. It may have an
13182 unconditional jump in its delay slot.
13183
13184 CALL_DEST is the routine we are calling. */
13185
13186 char *
13187 output_call (insn, call_dest, operand_number)
13188 rtx insn;
13189 rtx call_dest;
13190 int operand_number;
13191 {
13192 static char buf[256];
13193 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
13194 {
13195 tree labelname;
13196 tree funname = get_identifier (XSTR (call_dest, 0));
13197
13198 if (no_previous_def (funname))
13199 {
13200 int line_number = 0;
13201 rtx label_rtx = gen_label_rtx ();
13202 char *label_buf, temp_buf[256];
13203 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
13204 CODE_LABEL_NUMBER (label_rtx));
13205 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
13206 labelname = get_identifier (label_buf);
13207 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
13208 if (insn)
13209 line_number = NOTE_LINE_NUMBER (insn);
13210 add_compiler_stub (labelname, funname, line_number);
13211 }
13212 else
13213 labelname = get_prev_label (funname);
13214
13215 sprintf (buf, "jbsr %%z%d,%.246s",
13216 operand_number, IDENTIFIER_POINTER (labelname));
13217 return buf;
13218 }
13219 else
13220 {
13221 sprintf (buf, "bl %%z%d", operand_number);
13222 return buf;
13223 }
13224 }
13225
13226 #endif /* RS6000_LONG_BRANCH */
13227
13228 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
13229 do { \
13230 const char *const symbol_ = (SYMBOL); \
13231 char *buffer_ = (BUF); \
13232 if (symbol_[0] == '"') \
13233 { \
13234 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
13235 } \
13236 else if (name_needs_quotes(symbol_)) \
13237 { \
13238 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
13239 } \
13240 else \
13241 { \
13242 sprintf(buffer_, "L%d$%s", (N), symbol_); \
13243 } \
13244 } while (0)
13245
13246
13247 /* Generate PIC and indirect symbol stubs. */
13248
13249 void
13250 machopic_output_stub (file, symb, stub)
13251 FILE *file;
13252 const char *symb, *stub;
13253 {
13254 unsigned int length;
13255 char *symbol_name, *lazy_ptr_name;
13256 char *local_label_0;
13257 static int label = 0;
13258
13259 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
13260 symb = (*targetm.strip_name_encoding) (symb);
13261
13262 label += 1;
13263
13264 length = strlen (symb);
13265 symbol_name = alloca (length + 32);
13266 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
13267
13268 lazy_ptr_name = alloca (length + 32);
13269 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
13270
13271 local_label_0 = alloca (length + 32);
13272 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
13273
13274 if (flag_pic == 2)
13275 machopic_picsymbol_stub1_section ();
13276 else
13277 machopic_symbol_stub1_section ();
13278 fprintf (file, "\t.align 2\n");
13279
13280 fprintf (file, "%s:\n", stub);
13281 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13282
13283 if (flag_pic == 2)
13284 {
13285 fprintf (file, "\tmflr r0\n");
13286 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
13287 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
13288 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
13289 lazy_ptr_name, local_label_0);
13290 fprintf (file, "\tmtlr r0\n");
13291 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
13292 lazy_ptr_name, local_label_0);
13293 fprintf (file, "\tmtctr r12\n");
13294 fprintf (file, "\tbctr\n");
13295 }
13296 else
13297 {
13298 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
13299 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
13300 fprintf (file, "\tmtctr r12\n");
13301 fprintf (file, "\tbctr\n");
13302 }
13303
13304 machopic_lazy_symbol_ptr_section ();
13305 fprintf (file, "%s:\n", lazy_ptr_name);
13306 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13307 fprintf (file, "\t.long dyld_stub_binding_helper\n");
13308 }
13309
13310 /* Legitimize PIC addresses. If the address is already
13311 position-independent, we return ORIG. Newly generated
13312 position-independent addresses go into a reg. This is REG if non
13313 zero, otherwise we allocate register(s) as necessary. */
13314
13315 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13316
13317 rtx
13318 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
13319 rtx orig;
13320 enum machine_mode mode;
13321 rtx reg;
13322 {
13323 rtx base, offset;
13324
13325 if (reg == NULL && ! reload_in_progress && ! reload_completed)
13326 reg = gen_reg_rtx (Pmode);
13327
13328 if (GET_CODE (orig) == CONST)
13329 {
13330 if (GET_CODE (XEXP (orig, 0)) == PLUS
13331 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
13332 return orig;
13333
13334 if (GET_CODE (XEXP (orig, 0)) == PLUS)
13335 {
13336 base =
13337 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
13338 Pmode, reg);
13339 offset =
13340 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
13341 Pmode, reg);
13342 }
13343 else
13344 abort ();
13345
13346 if (GET_CODE (offset) == CONST_INT)
13347 {
13348 if (SMALL_INT (offset))
13349 return plus_constant (base, INTVAL (offset));
13350 else if (! reload_in_progress && ! reload_completed)
13351 offset = force_reg (Pmode, offset);
13352 else
13353 {
13354 rtx mem = force_const_mem (Pmode, orig);
13355 return machopic_legitimize_pic_address (mem, Pmode, reg);
13356 }
13357 }
13358 return gen_rtx (PLUS, Pmode, base, offset);
13359 }
13360
13361 /* Fall back on generic machopic code. */
13362 return machopic_legitimize_pic_address (orig, mode, reg);
13363 }
13364
13365 /* This is just a placeholder to make linking work without having to
13366 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13367 ever needed for Darwin (not too likely!) this would have to get a
13368 real definition. */
13369
13370 void
13371 toc_section ()
13372 {
13373 }
13374
13375 #endif /* TARGET_MACHO */
13376
13377 #if TARGET_ELF
13378 static unsigned int
13379 rs6000_elf_section_type_flags (decl, name, reloc)
13380 tree decl;
13381 const char *name;
13382 int reloc;
13383 {
13384 unsigned int flags
13385 = default_section_type_flags_1 (decl, name, reloc,
13386 flag_pic || DEFAULT_ABI == ABI_AIX);
13387
13388 if (TARGET_RELOCATABLE)
13389 flags |= SECTION_WRITE;
13390
13391 return flags;
13392 }
13393
13394 /* Record an element in the table of global constructors. SYMBOL is
13395 a SYMBOL_REF of the function to be called; PRIORITY is a number
13396 between 0 and MAX_INIT_PRIORITY.
13397
13398 This differs from default_named_section_asm_out_constructor in
13399 that we have special handling for -mrelocatable. */
13400
13401 static void
13402 rs6000_elf_asm_out_constructor (symbol, priority)
13403 rtx symbol;
13404 int priority;
13405 {
13406 const char *section = ".ctors";
13407 char buf[16];
13408
13409 if (priority != DEFAULT_INIT_PRIORITY)
13410 {
13411 sprintf (buf, ".ctors.%.5u",
13412 /* Invert the numbering so the linker puts us in the proper
13413 order; constructors are run from right to left, and the
13414 linker sorts in increasing order. */
13415 MAX_INIT_PRIORITY - priority);
13416 section = buf;
13417 }
13418
13419 named_section_flags (section, SECTION_WRITE);
13420 assemble_align (POINTER_SIZE);
13421
13422 if (TARGET_RELOCATABLE)
13423 {
13424 fputs ("\t.long (", asm_out_file);
13425 output_addr_const (asm_out_file, symbol);
13426 fputs (")@fixup\n", asm_out_file);
13427 }
13428 else
13429 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13430 }
13431
13432 static void
13433 rs6000_elf_asm_out_destructor (symbol, priority)
13434 rtx symbol;
13435 int priority;
13436 {
13437 const char *section = ".dtors";
13438 char buf[16];
13439
13440 if (priority != DEFAULT_INIT_PRIORITY)
13441 {
13442 sprintf (buf, ".dtors.%.5u",
13443 /* Invert the numbering so the linker puts us in the proper
13444 order; constructors are run from right to left, and the
13445 linker sorts in increasing order. */
13446 MAX_INIT_PRIORITY - priority);
13447 section = buf;
13448 }
13449
13450 named_section_flags (section, SECTION_WRITE);
13451 assemble_align (POINTER_SIZE);
13452
13453 if (TARGET_RELOCATABLE)
13454 {
13455 fputs ("\t.long (", asm_out_file);
13456 output_addr_const (asm_out_file, symbol);
13457 fputs (")@fixup\n", asm_out_file);
13458 }
13459 else
13460 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13461 }
13462 #endif
13463
13464 #if TARGET_XCOFF
13465 static void
13466 rs6000_xcoff_asm_globalize_label (stream, name)
13467 FILE *stream;
13468 const char *name;
13469 {
13470 fputs (GLOBAL_ASM_OP, stream);
13471 RS6000_OUTPUT_BASENAME (stream, name);
13472 putc ('\n', stream);
13473 }
13474
13475 static void
13476 rs6000_xcoff_asm_named_section (name, flags)
13477 const char *name;
13478 unsigned int flags;
13479 {
13480 int smclass;
13481 static const char * const suffix[3] = { "PR", "RO", "RW" };
13482
13483 if (flags & SECTION_CODE)
13484 smclass = 0;
13485 else if (flags & SECTION_WRITE)
13486 smclass = 2;
13487 else
13488 smclass = 1;
13489
13490 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
13491 (flags & SECTION_CODE) ? "." : "",
13492 name, suffix[smclass], flags & SECTION_ENTSIZE);
13493 }
13494
13495 static void
13496 rs6000_xcoff_select_section (decl, reloc, align)
13497 tree decl;
13498 int reloc;
13499 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13500 {
13501 if (decl_readonly_section_1 (decl, reloc, 1))
13502 {
13503 if (TREE_PUBLIC (decl))
13504 read_only_data_section ();
13505 else
13506 read_only_private_data_section ();
13507 }
13508 else
13509 {
13510 if (TREE_PUBLIC (decl))
13511 data_section ();
13512 else
13513 private_data_section ();
13514 }
13515 }
13516
13517 static void
13518 rs6000_xcoff_unique_section (decl, reloc)
13519 tree decl;
13520 int reloc ATTRIBUTE_UNUSED;
13521 {
13522 const char *name;
13523
13524 /* Use select_section for private and uninitialized data. */
13525 if (!TREE_PUBLIC (decl)
13526 || DECL_COMMON (decl)
13527 || DECL_INITIAL (decl) == NULL_TREE
13528 || DECL_INITIAL (decl) == error_mark_node
13529 || (flag_zero_initialized_in_bss
13530 && initializer_zerop (DECL_INITIAL (decl))))
13531 return;
13532
13533 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
13534 name = (*targetm.strip_name_encoding) (name);
13535 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
13536 }
13537
13538 /* Select section for constant in constant pool.
13539
13540 On RS/6000, all constants are in the private read-only data area.
13541 However, if this is being placed in the TOC it must be output as a
13542 toc entry. */
13543
13544 static void
13545 rs6000_xcoff_select_rtx_section (mode, x, align)
13546 enum machine_mode mode;
13547 rtx x;
13548 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13549 {
13550 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13551 toc_section ();
13552 else
13553 read_only_private_data_section ();
13554 }
13555
13556 /* Remove any trailing [DS] or the like from the symbol name. */
13557
13558 static const char *
13559 rs6000_xcoff_strip_name_encoding (name)
13560 const char *name;
13561 {
13562 size_t len;
13563 if (*name == '*')
13564 name++;
13565 len = strlen (name);
13566 if (name[len - 1] == ']')
13567 return ggc_alloc_string (name, len - 4);
13568 else
13569 return name;
13570 }
13571
13572 /* Section attributes. AIX is always PIC. */
13573
13574 static unsigned int
13575 rs6000_xcoff_section_type_flags (decl, name, reloc)
13576 tree decl;
13577 const char *name;
13578 int reloc;
13579 {
13580 unsigned int align;
13581 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
13582
13583 /* Align to at least UNIT size. */
13584 if (flags & SECTION_CODE)
13585 align = MIN_UNITS_PER_WORD;
13586 else
13587 /* Increase alignment of large objects if not already stricter. */
13588 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
13589 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
13590 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
13591
13592 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
13593 }
13594 #endif /* TARGET_XCOFF */
13595
13596 #if TARGET_MACHO
13597 /* Cross-module name binding. Darwin does not support overriding
13598 functions at dynamic-link time. */
13599
13600 static bool
13601 rs6000_binds_local_p (decl)
13602 tree decl;
13603 {
13604 return default_binds_local_p_1 (decl, 0);
13605 }
13606 #endif
13607
13608 /* Compute a (partial) cost for rtx X. Return true if the complete
13609 cost has been computed, and false if subexpressions should be
13610 scanned. In either case, *TOTAL contains the cost result. */
13611
13612 static bool
13613 rs6000_rtx_costs (x, code, outer_code, total)
13614 rtx x;
13615 int code, outer_code ATTRIBUTE_UNUSED;
13616 int *total;
13617 {
13618 switch (code)
13619 {
13620 /* On the RS/6000, if it is valid in the insn, it is free.
13621 So this always returns 0. */
13622 case CONST_INT:
13623 case CONST:
13624 case LABEL_REF:
13625 case SYMBOL_REF:
13626 case CONST_DOUBLE:
13627 case HIGH:
13628 *total = 0;
13629 return true;
13630
13631 case PLUS:
13632 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
13633 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
13634 + 0x8000) >= 0x10000)
13635 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
13636 ? COSTS_N_INSNS (2)
13637 : COSTS_N_INSNS (1));
13638 return true;
13639
13640 case AND:
13641 case IOR:
13642 case XOR:
13643 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
13644 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
13645 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
13646 ? COSTS_N_INSNS (2)
13647 : COSTS_N_INSNS (1));
13648 return true;
13649
13650 case MULT:
13651 if (optimize_size)
13652 {
13653 *total = COSTS_N_INSNS (2);
13654 return true;
13655 }
13656 switch (rs6000_cpu)
13657 {
13658 case PROCESSOR_RIOS1:
13659 case PROCESSOR_PPC405:
13660 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13661 ? COSTS_N_INSNS (5)
13662 : (INTVAL (XEXP (x, 1)) >= -256
13663 && INTVAL (XEXP (x, 1)) <= 255)
13664 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13665 return true;
13666
13667 case PROCESSOR_RS64A:
13668 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13669 ? GET_MODE (XEXP (x, 1)) != DImode
13670 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
13671 : (INTVAL (XEXP (x, 1)) >= -256
13672 && INTVAL (XEXP (x, 1)) <= 255)
13673 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
13674 return true;
13675
13676 case PROCESSOR_RIOS2:
13677 case PROCESSOR_MPCCORE:
13678 case PROCESSOR_PPC604e:
13679 *total = COSTS_N_INSNS (2);
13680 return true;
13681
13682 case PROCESSOR_PPC601:
13683 *total = COSTS_N_INSNS (5);
13684 return true;
13685
13686 case PROCESSOR_PPC603:
13687 case PROCESSOR_PPC7400:
13688 case PROCESSOR_PPC750:
13689 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13690 ? COSTS_N_INSNS (5)
13691 : (INTVAL (XEXP (x, 1)) >= -256
13692 && INTVAL (XEXP (x, 1)) <= 255)
13693 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
13694 return true;
13695
13696 case PROCESSOR_PPC7450:
13697 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13698 ? COSTS_N_INSNS (4)
13699 : COSTS_N_INSNS (3));
13700 return true;
13701
13702 case PROCESSOR_PPC403:
13703 case PROCESSOR_PPC604:
13704 case PROCESSOR_PPC8540:
13705 *total = COSTS_N_INSNS (4);
13706 return true;
13707
13708 case PROCESSOR_PPC620:
13709 case PROCESSOR_PPC630:
13710 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13711 ? GET_MODE (XEXP (x, 1)) != DImode
13712 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
13713 : (INTVAL (XEXP (x, 1)) >= -256
13714 && INTVAL (XEXP (x, 1)) <= 255)
13715 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13716 return true;
13717
13718 case PROCESSOR_POWER4:
13719 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13720 ? GET_MODE (XEXP (x, 1)) != DImode
13721 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
13722 : COSTS_N_INSNS (2));
13723 return true;
13724
13725 default:
13726 abort ();
13727 }
13728
13729 case DIV:
13730 case MOD:
13731 if (GET_CODE (XEXP (x, 1)) == CONST_INT
13732 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
13733 {
13734 *total = COSTS_N_INSNS (2);
13735 return true;
13736 }
13737 /* FALLTHRU */
13738
13739 case UDIV:
13740 case UMOD:
13741 switch (rs6000_cpu)
13742 {
13743 case PROCESSOR_RIOS1:
13744 *total = COSTS_N_INSNS (19);
13745 return true;
13746
13747 case PROCESSOR_RIOS2:
13748 *total = COSTS_N_INSNS (13);
13749 return true;
13750
13751 case PROCESSOR_RS64A:
13752 *total = (GET_MODE (XEXP (x, 1)) != DImode
13753 ? COSTS_N_INSNS (65)
13754 : COSTS_N_INSNS (67));
13755 return true;
13756
13757 case PROCESSOR_MPCCORE:
13758 *total = COSTS_N_INSNS (6);
13759 return true;
13760
13761 case PROCESSOR_PPC403:
13762 *total = COSTS_N_INSNS (33);
13763 return true;
13764
13765 case PROCESSOR_PPC405:
13766 *total = COSTS_N_INSNS (35);
13767 return true;
13768
13769 case PROCESSOR_PPC601:
13770 *total = COSTS_N_INSNS (36);
13771 return true;
13772
13773 case PROCESSOR_PPC603:
13774 *total = COSTS_N_INSNS (37);
13775 return true;
13776
13777 case PROCESSOR_PPC604:
13778 case PROCESSOR_PPC604e:
13779 *total = COSTS_N_INSNS (20);
13780 return true;
13781
13782 case PROCESSOR_PPC620:
13783 case PROCESSOR_PPC630:
13784 *total = (GET_MODE (XEXP (x, 1)) != DImode
13785 ? COSTS_N_INSNS (21)
13786 : COSTS_N_INSNS (37));
13787 return true;
13788
13789 case PROCESSOR_PPC750:
13790 case PROCESSOR_PPC8540:
13791 case PROCESSOR_PPC7400:
13792 *total = COSTS_N_INSNS (19);
13793 return true;
13794
13795 case PROCESSOR_PPC7450:
13796 *total = COSTS_N_INSNS (23);
13797 return true;
13798
13799 case PROCESSOR_POWER4:
13800 *total = (GET_MODE (XEXP (x, 1)) != DImode
13801 ? COSTS_N_INSNS (18)
13802 : COSTS_N_INSNS (34));
13803 return true;
13804
13805 default:
13806 abort ();
13807 }
13808
13809 case FFS:
13810 *total = COSTS_N_INSNS (4);
13811 return true;
13812
13813 case MEM:
13814 /* MEM should be slightly more expensive than (plus (reg) (const)) */
13815 *total = 5;
13816 return true;
13817
13818 default:
13819 return false;
13820 }
13821 }
13822
13823 /* A C expression returning the cost of moving data from a register of class
13824 CLASS1 to one of CLASS2. */
13825
13826 int
13827 rs6000_register_move_cost (mode, from, to)
13828 enum machine_mode mode;
13829 enum reg_class from, to;
13830 {
13831 /* Moves from/to GENERAL_REGS. */
13832 if (reg_classes_intersect_p (to, GENERAL_REGS)
13833 || reg_classes_intersect_p (from, GENERAL_REGS))
13834 {
13835 if (! reg_classes_intersect_p (to, GENERAL_REGS))
13836 from = to;
13837
13838 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
13839 return (rs6000_memory_move_cost (mode, from, 0)
13840 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
13841
13842 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
13843 else if (from == CR_REGS)
13844 return 4;
13845
13846 else
13847 /* A move will cost one instruction per GPR moved. */
13848 return 2 * HARD_REGNO_NREGS (0, mode);
13849 }
13850
13851 /* Moving between two similar registers is just one instruction. */
13852 else if (reg_classes_intersect_p (to, from))
13853 return mode == TFmode ? 4 : 2;
13854
13855 /* Everything else has to go through GENERAL_REGS. */
13856 else
13857 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
13858 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
13859 }
13860
13861 /* A C expressions returning the cost of moving data of MODE from a register to
13862 or from memory. */
13863
13864 int
13865 rs6000_memory_move_cost (mode, class, in)
13866 enum machine_mode mode;
13867 enum reg_class class;
13868 int in ATTRIBUTE_UNUSED;
13869 {
13870 if (reg_classes_intersect_p (class, GENERAL_REGS))
13871 return 4 * HARD_REGNO_NREGS (0, mode);
13872 else if (reg_classes_intersect_p (class, FLOAT_REGS))
13873 return 4 * HARD_REGNO_NREGS (32, mode);
13874 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
13875 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
13876 else
13877 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
13878 }
13879
13880 /* Return true if TYPE is of type __ev64_opaque__. */
13881
13882 static bool
13883 is_ev64_opaque_type (type)
13884 tree type;
13885 {
13886 return (TARGET_SPE
13887 && (type == opaque_V2SI_type_node
13888 || type == opaque_V2SF_type_node
13889 || (TREE_CODE (type) == VECTOR_TYPE
13890 && TYPE_NAME (type)
13891 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
13892 && DECL_NAME (TYPE_NAME (type))
13893 && strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))),
13894 "__ev64_opaque__") == 0)));
13895 }
13896
13897 static rtx
13898 rs6000_dwarf_register_span (reg)
13899 rtx reg;
13900 {
13901 unsigned regno;
13902
13903 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
13904 return NULL_RTX;
13905
13906 regno = REGNO (reg);
13907
13908 /* The duality of the SPE register size wreaks all kinds of havoc.
13909 This is a way of distinguishing r0 in 32-bits from r0 in
13910 64-bits. */
13911 return
13912 gen_rtx_PARALLEL (VOIDmode,
13913 BYTES_BIG_ENDIAN
13914 ? gen_rtvec (2,
13915 gen_rtx_REG (SImode, regno + 1200),
13916 gen_rtx_REG (SImode, regno))
13917 : gen_rtvec (2,
13918 gen_rtx_REG (SImode, regno),
13919 gen_rtx_REG (SImode, regno + 1200)));
13920 }
13921
13922 #include "gt-rs6000.h"
This page took 0.691743 seconds and 6 git commands to generate.