]> gcc.gnu.org Git - gcc.git/blob - gcc/config/rs6000/rs6000.c
fdf73ac57065f4d4259ffe1b249e2a1509739e20
[gcc.git] / gcc / config / rs6000 / rs6000.c
1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
5
6 This file is part of GNU CC.
7
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "obstack.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "except.h"
41 #include "function.h"
42 #include "output.h"
43 #include "basic-block.h"
44 #include "integrate.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "hashtab.h"
48 #include "tm_p.h"
49 #include "target.h"
50 #include "target-def.h"
51 #include "langhooks.h"
52 #include "reload.h"
53
54 #ifndef TARGET_NO_PROTOTYPE
55 #define TARGET_NO_PROTOTYPE 0
56 #endif
57
58 #define min(A,B) ((A) < (B) ? (A) : (B))
59 #define max(A,B) ((A) > (B) ? (A) : (B))
60
61 /* Target cpu type */
62
63 enum processor_type rs6000_cpu;
64 struct rs6000_cpu_select rs6000_select[3] =
65 {
66 /* switch name, tune arch */
67 { (const char *)0, "--with-cpu=", 1, 1 },
68 { (const char *)0, "-mcpu=", 1, 1 },
69 { (const char *)0, "-mtune=", 1, 0 },
70 };
71
72 /* Size of long double */
73 const char *rs6000_long_double_size_string;
74 int rs6000_long_double_type_size;
75
76 /* Whether -mabi=altivec has appeared */
77 int rs6000_altivec_abi;
78
79 /* Whether VRSAVE instructions should be generated. */
80 int rs6000_altivec_vrsave;
81
82 /* String from -mvrsave= option. */
83 const char *rs6000_altivec_vrsave_string;
84
85 /* Nonzero if we want SPE ABI extensions. */
86 int rs6000_spe_abi;
87
88 /* Whether isel instructions should be generated. */
89 int rs6000_isel;
90
91 /* Nonzero if we have FPRs. */
92 int rs6000_fprs = 1;
93
94 /* String from -misel=. */
95 const char *rs6000_isel_string;
96
97 /* Set to nonzero once AIX common-mode calls have been defined. */
98 static GTY(()) int common_mode_defined;
99
100 /* Private copy of original value of flag_pic for ABI_AIX. */
101 static int rs6000_flag_pic;
102
103 /* Save information from a "cmpxx" operation until the branch or scc is
104 emitted. */
105 rtx rs6000_compare_op0, rs6000_compare_op1;
106 int rs6000_compare_fp_p;
107
108 /* Label number of label created for -mrelocatable, to call to so we can
109 get the address of the GOT section */
110 int rs6000_pic_labelno;
111
112 #ifdef USING_ELFOS_H
113 /* Which abi to adhere to */
114 const char *rs6000_abi_name = RS6000_ABI_NAME;
115
116 /* Semantics of the small data area */
117 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
118
119 /* Which small data model to use */
120 const char *rs6000_sdata_name = (char *)0;
121
122 /* Counter for labels which are to be placed in .fixup. */
123 int fixuplabelno = 0;
124 #endif
125
126 /* ABI enumeration available for subtarget to use. */
127 enum rs6000_abi rs6000_current_abi;
128
129 /* ABI string from -mabi= option. */
130 const char *rs6000_abi_string;
131
132 /* Debug flags */
133 const char *rs6000_debug_name;
134 int rs6000_debug_stack; /* debug stack applications */
135 int rs6000_debug_arg; /* debug argument handling */
136
137 const char *rs6000_traceback_name;
138 static enum {
139 traceback_default = 0,
140 traceback_none,
141 traceback_part,
142 traceback_full
143 } rs6000_traceback;
144
145 /* Flag to say the TOC is initialized */
146 int toc_initialized;
147 char toc_label_name[10];
148
149 /* Alias set for saves and restores from the rs6000 stack. */
150 static int rs6000_sr_alias_set;
151
152 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
153 The only place that looks at this is rs6000_set_default_type_attributes;
154 everywhere else should rely on the presence or absence of a longcall
155 attribute on the function declaration. */
156 int rs6000_default_long_calls;
157 const char *rs6000_longcall_switch;
158
159 struct builtin_description
160 {
161 /* mask is not const because we're going to alter it below. This
162 nonsense will go away when we rewrite the -march infrastructure
163 to give us more target flag bits. */
164 unsigned int mask;
165 const enum insn_code icode;
166 const char *const name;
167 const enum rs6000_builtins code;
168 };
169
170 static bool rs6000_function_ok_for_sibcall PARAMS ((tree, tree));
171 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
172 static void validate_condition_mode
173 PARAMS ((enum rtx_code, enum machine_mode));
174 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
175 static void rs6000_maybe_dead PARAMS ((rtx));
176 static void rs6000_emit_stack_tie PARAMS ((void));
177 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
178 static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
179 unsigned int, int, int));
180 static rtx gen_frame_mem_offset PARAMS ((enum machine_mode, rtx, int));
181 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
182 static unsigned rs6000_hash_constant PARAMS ((rtx));
183 static unsigned toc_hash_function PARAMS ((const void *));
184 static int toc_hash_eq PARAMS ((const void *, const void *));
185 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
186 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
187 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
188 #ifdef HAVE_GAS_HIDDEN
189 static void rs6000_assemble_visibility PARAMS ((tree, int));
190 #endif
191 static int rs6000_ra_ever_killed PARAMS ((void));
192 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
193 const struct attribute_spec rs6000_attribute_table[];
194 static void rs6000_set_default_type_attributes PARAMS ((tree));
195 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
196 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
197 static void rs6000_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
198 HOST_WIDE_INT, tree));
199 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
200 HOST_WIDE_INT, HOST_WIDE_INT));
201 #if TARGET_ELF
202 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
203 int));
204 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
205 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
206 static void rs6000_elf_select_section PARAMS ((tree, int,
207 unsigned HOST_WIDE_INT));
208 static void rs6000_elf_unique_section PARAMS ((tree, int));
209 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
210 unsigned HOST_WIDE_INT));
211 static void rs6000_elf_encode_section_info PARAMS ((tree, int))
212 ATTRIBUTE_UNUSED;
213 static const char *rs6000_elf_strip_name_encoding PARAMS ((const char *));
214 static bool rs6000_elf_in_small_data_p PARAMS ((tree));
215 #endif
216 #if TARGET_XCOFF
217 static void rs6000_xcoff_asm_globalize_label PARAMS ((FILE *, const char *));
218 static void rs6000_xcoff_asm_named_section PARAMS ((const char *, unsigned int));
219 static void rs6000_xcoff_select_section PARAMS ((tree, int,
220 unsigned HOST_WIDE_INT));
221 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
222 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
223 unsigned HOST_WIDE_INT));
224 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
225 static unsigned int rs6000_xcoff_section_type_flags PARAMS ((tree, const char *, int));
226 #endif
227 static void rs6000_xcoff_encode_section_info PARAMS ((tree, int))
228 ATTRIBUTE_UNUSED;
229 static bool rs6000_binds_local_p PARAMS ((tree));
230 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
231 static int rs6000_adjust_priority PARAMS ((rtx, int));
232 static int rs6000_issue_rate PARAMS ((void));
233
234 static void rs6000_init_builtins PARAMS ((void));
235 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
236 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
237 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
238 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
239 static void altivec_init_builtins PARAMS ((void));
240 static void rs6000_common_init_builtins PARAMS ((void));
241
242 static void enable_mask_for_builtins PARAMS ((struct builtin_description *,
243 int, enum rs6000_builtins,
244 enum rs6000_builtins));
245 static void spe_init_builtins PARAMS ((void));
246 static rtx spe_expand_builtin PARAMS ((tree, rtx, bool *));
247 static rtx spe_expand_predicate_builtin PARAMS ((enum insn_code, tree, rtx));
248 static rtx spe_expand_evsel_builtin PARAMS ((enum insn_code, tree, rtx));
249 static int rs6000_emit_int_cmove PARAMS ((rtx, rtx, rtx, rtx));
250
251 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
252 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
253 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
254 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
255 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
256 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
257 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
258 static void rs6000_parse_abi_options PARAMS ((void));
259 static void rs6000_parse_vrsave_option PARAMS ((void));
260 static void rs6000_parse_isel_option PARAMS ((void));
261 static int first_altivec_reg_to_save PARAMS ((void));
262 static unsigned int compute_vrsave_mask PARAMS ((void));
263 static void is_altivec_return_reg PARAMS ((rtx, void *));
264 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
265 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
266 static int easy_vector_constant PARAMS ((rtx));
267
268 /* Hash table stuff for keeping track of TOC entries. */
269
270 struct toc_hash_struct GTY(())
271 {
272 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
273 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
274 rtx key;
275 enum machine_mode key_mode;
276 int labelno;
277 };
278
279 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
280 \f
281 /* Default register names. */
282 char rs6000_reg_names[][8] =
283 {
284 "0", "1", "2", "3", "4", "5", "6", "7",
285 "8", "9", "10", "11", "12", "13", "14", "15",
286 "16", "17", "18", "19", "20", "21", "22", "23",
287 "24", "25", "26", "27", "28", "29", "30", "31",
288 "0", "1", "2", "3", "4", "5", "6", "7",
289 "8", "9", "10", "11", "12", "13", "14", "15",
290 "16", "17", "18", "19", "20", "21", "22", "23",
291 "24", "25", "26", "27", "28", "29", "30", "31",
292 "mq", "lr", "ctr","ap",
293 "0", "1", "2", "3", "4", "5", "6", "7",
294 "xer",
295 /* AltiVec registers. */
296 "0", "1", "2", "3", "4", "5", "6", "7",
297 "8", "9", "10", "11", "12", "13", "14", "15",
298 "16", "17", "18", "19", "20", "21", "22", "23",
299 "24", "25", "26", "27", "28", "29", "30", "31",
300 "vrsave", "vscr",
301 /* SPE registers. */
302 "spe_acc", "spefscr"
303 };
304
305 #ifdef TARGET_REGNAMES
306 static const char alt_reg_names[][8] =
307 {
308 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
309 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
310 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
311 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
312 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
313 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
314 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
315 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
316 "mq", "lr", "ctr", "ap",
317 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
318 "xer",
319 /* AltiVec registers. */
320 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
321 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
322 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
323 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
324 "vrsave", "vscr",
325 /* SPE registers. */
326 "spe_acc", "spefscr"
327 };
328 #endif
329 \f
330 #ifndef MASK_STRICT_ALIGN
331 #define MASK_STRICT_ALIGN 0
332 #endif
333
334 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
335 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
336 \f
337 /* Initialize the GCC target structure. */
338 #undef TARGET_ATTRIBUTE_TABLE
339 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
340 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
341 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
342
343 #undef TARGET_ASM_ALIGNED_DI_OP
344 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
345
346 /* Default unaligned ops are only provided for ELF. Find the ops needed
347 for non-ELF systems. */
348 #ifndef OBJECT_FORMAT_ELF
349 #if TARGET_XCOFF
350 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
351 64-bit targets. */
352 #undef TARGET_ASM_UNALIGNED_HI_OP
353 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
354 #undef TARGET_ASM_UNALIGNED_SI_OP
355 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
356 #undef TARGET_ASM_UNALIGNED_DI_OP
357 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
358 #else
359 /* For Darwin. */
360 #undef TARGET_ASM_UNALIGNED_HI_OP
361 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
362 #undef TARGET_ASM_UNALIGNED_SI_OP
363 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
364 #endif
365 #endif
366
367 /* This hook deals with fixups for relocatable code and DI-mode objects
368 in 64-bit code. */
369 #undef TARGET_ASM_INTEGER
370 #define TARGET_ASM_INTEGER rs6000_assemble_integer
371
372 #ifdef HAVE_GAS_HIDDEN
373 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
374 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
375 #endif
376
377 #undef TARGET_ASM_FUNCTION_PROLOGUE
378 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
379 #undef TARGET_ASM_FUNCTION_EPILOGUE
380 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
381
382 #undef TARGET_SCHED_ISSUE_RATE
383 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
384 #undef TARGET_SCHED_ADJUST_COST
385 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
386 #undef TARGET_SCHED_ADJUST_PRIORITY
387 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
388
389 #undef TARGET_INIT_BUILTINS
390 #define TARGET_INIT_BUILTINS rs6000_init_builtins
391
392 #undef TARGET_EXPAND_BUILTIN
393 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
394
395 #undef TARGET_BINDS_LOCAL_P
396 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
397
398 #undef TARGET_ASM_OUTPUT_MI_THUNK
399 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
400
401 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
402 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
403
404 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
405 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
406
407 struct gcc_target targetm = TARGET_INITIALIZER;
408 \f
409 /* Override command line options. Mostly we process the processor
410 type and sometimes adjust other TARGET_ options. */
411
412 void
413 rs6000_override_options (default_cpu)
414 const char *default_cpu;
415 {
416 size_t i, j;
417 struct rs6000_cpu_select *ptr;
418
419 /* Simplify the entries below by making a mask for any POWER
420 variant and any PowerPC variant. */
421
422 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
423 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
424 | MASK_PPC_GFXOPT | MASK_POWERPC64)
425 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
426
427 static struct ptt
428 {
429 const char *const name; /* Canonical processor name. */
430 const enum processor_type processor; /* Processor type enum value. */
431 const int target_enable; /* Target flags to enable. */
432 const int target_disable; /* Target flags to disable. */
433 } const processor_target_table[]
434 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
435 POWER_MASKS | POWERPC_MASKS},
436 {"power", PROCESSOR_POWER,
437 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
438 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
439 {"power2", PROCESSOR_POWER,
440 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
441 POWERPC_MASKS | MASK_NEW_MNEMONICS},
442 {"power3", PROCESSOR_PPC630,
443 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
444 POWER_MASKS | MASK_PPC_GPOPT},
445 {"power4", PROCESSOR_POWER4,
446 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
447 POWER_MASKS | MASK_PPC_GPOPT},
448 {"powerpc", PROCESSOR_POWERPC,
449 MASK_POWERPC | MASK_NEW_MNEMONICS,
450 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
451 {"powerpc64", PROCESSOR_POWERPC64,
452 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
453 POWER_MASKS | POWERPC_OPT_MASKS},
454 {"rios", PROCESSOR_RIOS1,
455 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
456 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
457 {"rios1", PROCESSOR_RIOS1,
458 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
459 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
460 {"rsc", PROCESSOR_PPC601,
461 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
462 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
463 {"rsc1", PROCESSOR_PPC601,
464 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
465 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
466 {"rios2", PROCESSOR_RIOS2,
467 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
468 POWERPC_MASKS | MASK_NEW_MNEMONICS},
469 {"rs64a", PROCESSOR_RS64A,
470 MASK_POWERPC | MASK_NEW_MNEMONICS,
471 POWER_MASKS | POWERPC_OPT_MASKS},
472 {"401", PROCESSOR_PPC403,
473 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
474 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
475 {"403", PROCESSOR_PPC403,
476 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
477 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
478 {"405", PROCESSOR_PPC405,
479 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
480 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
481 {"405f", PROCESSOR_PPC405,
482 MASK_POWERPC | MASK_NEW_MNEMONICS,
483 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
484 {"505", PROCESSOR_MPCCORE,
485 MASK_POWERPC | MASK_NEW_MNEMONICS,
486 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
487 {"601", PROCESSOR_PPC601,
488 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
489 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
490 {"602", PROCESSOR_PPC603,
491 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
492 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
493 {"603", PROCESSOR_PPC603,
494 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
495 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
496 {"603e", PROCESSOR_PPC603,
497 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
498 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
499 {"ec603e", PROCESSOR_PPC603,
500 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
501 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
502 {"604", PROCESSOR_PPC604,
503 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
504 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
505 {"604e", PROCESSOR_PPC604e,
506 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
507 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
508 {"620", PROCESSOR_PPC620,
509 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
510 POWER_MASKS | MASK_PPC_GPOPT},
511 {"630", PROCESSOR_PPC630,
512 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
513 POWER_MASKS | MASK_PPC_GPOPT},
514 {"740", PROCESSOR_PPC750,
515 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
516 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
517 {"750", PROCESSOR_PPC750,
518 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
519 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
520 {"7400", PROCESSOR_PPC7400,
521 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
522 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
523 {"7450", PROCESSOR_PPC7450,
524 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
525 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
526 {"8540", PROCESSOR_PPC8540,
527 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
528 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
529 {"801", PROCESSOR_MPCCORE,
530 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
531 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
532 {"821", PROCESSOR_MPCCORE,
533 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
534 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
535 {"823", PROCESSOR_MPCCORE,
536 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
537 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
538 {"860", PROCESSOR_MPCCORE,
539 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
540 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
541
542 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
543
544 /* Save current -mmultiple/-mno-multiple status. */
545 int multiple = TARGET_MULTIPLE;
546 /* Save current -mstring/-mno-string status. */
547 int string = TARGET_STRING;
548
549 /* Identify the processor type. */
550 rs6000_select[0].string = default_cpu;
551 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
552
553 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
554 {
555 ptr = &rs6000_select[i];
556 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
557 {
558 for (j = 0; j < ptt_size; j++)
559 if (! strcmp (ptr->string, processor_target_table[j].name))
560 {
561 if (ptr->set_tune_p)
562 rs6000_cpu = processor_target_table[j].processor;
563
564 if (ptr->set_arch_p)
565 {
566 target_flags |= processor_target_table[j].target_enable;
567 target_flags &= ~processor_target_table[j].target_disable;
568 }
569 break;
570 }
571
572 if (j == ptt_size)
573 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
574 }
575 }
576
577 if (rs6000_cpu == PROCESSOR_PPC8540)
578 rs6000_isel = 1;
579
580 /* If we are optimizing big endian systems for space, use the load/store
581 multiple and string instructions. */
582 if (BYTES_BIG_ENDIAN && optimize_size)
583 target_flags |= MASK_MULTIPLE | MASK_STRING;
584
585 /* If -mmultiple or -mno-multiple was explicitly used, don't
586 override with the processor default */
587 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
588 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
589
590 /* If -mstring or -mno-string was explicitly used, don't override
591 with the processor default. */
592 if ((target_flags_explicit & MASK_STRING) != 0)
593 target_flags = (target_flags & ~MASK_STRING) | string;
594
595 /* Don't allow -mmultiple or -mstring on little endian systems
596 unless the cpu is a 750, because the hardware doesn't support the
597 instructions used in little endian mode, and causes an alignment
598 trap. The 750 does not cause an alignment trap (except when the
599 target is unaligned). */
600
601 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
602 {
603 if (TARGET_MULTIPLE)
604 {
605 target_flags &= ~MASK_MULTIPLE;
606 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
607 warning ("-mmultiple is not supported on little endian systems");
608 }
609
610 if (TARGET_STRING)
611 {
612 target_flags &= ~MASK_STRING;
613 if ((target_flags_explicit & MASK_STRING) != 0)
614 warning ("-mstring is not supported on little endian systems");
615 }
616 }
617
618 if (flag_pic != 0 && DEFAULT_ABI == ABI_AIX)
619 {
620 rs6000_flag_pic = flag_pic;
621 flag_pic = 0;
622 }
623
624 /* For Darwin, always silently make -fpic and -fPIC identical. */
625 if (flag_pic == 1 && DEFAULT_ABI == ABI_DARWIN)
626 flag_pic = 2;
627
628 /* Set debug flags */
629 if (rs6000_debug_name)
630 {
631 if (! strcmp (rs6000_debug_name, "all"))
632 rs6000_debug_stack = rs6000_debug_arg = 1;
633 else if (! strcmp (rs6000_debug_name, "stack"))
634 rs6000_debug_stack = 1;
635 else if (! strcmp (rs6000_debug_name, "arg"))
636 rs6000_debug_arg = 1;
637 else
638 error ("unknown -mdebug-%s switch", rs6000_debug_name);
639 }
640
641 if (rs6000_traceback_name)
642 {
643 if (! strncmp (rs6000_traceback_name, "full", 4))
644 rs6000_traceback = traceback_full;
645 else if (! strncmp (rs6000_traceback_name, "part", 4))
646 rs6000_traceback = traceback_part;
647 else if (! strncmp (rs6000_traceback_name, "no", 2))
648 rs6000_traceback = traceback_none;
649 else
650 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
651 rs6000_traceback_name);
652 }
653
654 /* Set size of long double */
655 rs6000_long_double_type_size = 64;
656 if (rs6000_long_double_size_string)
657 {
658 char *tail;
659 int size = strtol (rs6000_long_double_size_string, &tail, 10);
660 if (*tail != '\0' || (size != 64 && size != 128))
661 error ("Unknown switch -mlong-double-%s",
662 rs6000_long_double_size_string);
663 else
664 rs6000_long_double_type_size = size;
665 }
666
667 /* Handle -mabi= options. */
668 rs6000_parse_abi_options ();
669
670 /* Handle -mvrsave= option. */
671 rs6000_parse_vrsave_option ();
672
673 /* Handle -misel= option. */
674 rs6000_parse_isel_option ();
675
676 #ifdef SUBTARGET_OVERRIDE_OPTIONS
677 SUBTARGET_OVERRIDE_OPTIONS;
678 #endif
679 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
680 SUBSUBTARGET_OVERRIDE_OPTIONS;
681 #endif
682
683 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
684 using TARGET_OPTIONS to handle a toggle switch, but we're out of
685 bits in target_flags so TARGET_SWITCHES cannot be used.
686 Assumption here is that rs6000_longcall_switch points into the
687 text of the complete option, rather than being a copy, so we can
688 scan back for the presence or absence of the no- modifier. */
689 if (rs6000_longcall_switch)
690 {
691 const char *base = rs6000_longcall_switch;
692 while (base[-1] != 'm') base--;
693
694 if (*rs6000_longcall_switch != '\0')
695 error ("invalid option `%s'", base);
696 rs6000_default_long_calls = (base[0] != 'n');
697 }
698
699 #ifdef TARGET_REGNAMES
700 /* If the user desires alternate register names, copy in the
701 alternate names now. */
702 if (TARGET_REGNAMES)
703 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
704 #endif
705
706 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
707 If -maix-struct-return or -msvr4-struct-return was explicitly
708 used, don't override with the ABI default. */
709 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
710 {
711 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
712 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
713 else
714 target_flags |= MASK_AIX_STRUCT_RET;
715 }
716
717 if (TARGET_LONG_DOUBLE_128
718 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
719 real_format_for_mode[TFmode - QFmode] = &ibm_extended_format;
720
721 /* Allocate an alias set for register saves & restores from stack. */
722 rs6000_sr_alias_set = new_alias_set ();
723
724 if (TARGET_TOC)
725 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
726
727 /* We can only guarantee the availability of DI pseudo-ops when
728 assembling for 64-bit targets. */
729 if (!TARGET_64BIT)
730 {
731 targetm.asm_out.aligned_op.di = NULL;
732 targetm.asm_out.unaligned_op.di = NULL;
733 }
734
735 /* Arrange to save and restore machine status around nested functions. */
736 init_machine_status = rs6000_init_machine_status;
737 }
738
739 /* Handle -misel= option. */
740 static void
741 rs6000_parse_isel_option ()
742 {
743 if (rs6000_isel_string == 0)
744 return;
745 else if (! strcmp (rs6000_isel_string, "yes"))
746 rs6000_isel = 1;
747 else if (! strcmp (rs6000_isel_string, "no"))
748 rs6000_isel = 0;
749 else
750 error ("unknown -misel= option specified: '%s'",
751 rs6000_isel_string);
752 }
753
754 /* Handle -mvrsave= options. */
755 static void
756 rs6000_parse_vrsave_option ()
757 {
758 /* Generate VRSAVE instructions by default. */
759 if (rs6000_altivec_vrsave_string == 0
760 || ! strcmp (rs6000_altivec_vrsave_string, "yes"))
761 rs6000_altivec_vrsave = 1;
762 else if (! strcmp (rs6000_altivec_vrsave_string, "no"))
763 rs6000_altivec_vrsave = 0;
764 else
765 error ("unknown -mvrsave= option specified: '%s'",
766 rs6000_altivec_vrsave_string);
767 }
768
769 /* Handle -mabi= options. */
770 static void
771 rs6000_parse_abi_options ()
772 {
773 if (rs6000_abi_string == 0)
774 return;
775 else if (! strcmp (rs6000_abi_string, "altivec"))
776 rs6000_altivec_abi = 1;
777 else if (! strcmp (rs6000_abi_string, "no-altivec"))
778 rs6000_altivec_abi = 0;
779 else if (! strcmp (rs6000_abi_string, "spe"))
780 rs6000_spe_abi = 1;
781 else if (! strcmp (rs6000_abi_string, "no-spe"))
782 rs6000_spe_abi = 0;
783 else
784 error ("unknown ABI specified: '%s'", rs6000_abi_string);
785 }
786
787 void
788 optimization_options (level, size)
789 int level ATTRIBUTE_UNUSED;
790 int size ATTRIBUTE_UNUSED;
791 {
792 }
793 \f
794 /* Do anything needed at the start of the asm file. */
795
796 void
797 rs6000_file_start (file, default_cpu)
798 FILE *file;
799 const char *default_cpu;
800 {
801 size_t i;
802 char buffer[80];
803 const char *start = buffer;
804 struct rs6000_cpu_select *ptr;
805
806 if (flag_verbose_asm)
807 {
808 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
809 rs6000_select[0].string = default_cpu;
810
811 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
812 {
813 ptr = &rs6000_select[i];
814 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
815 {
816 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
817 start = "";
818 }
819 }
820
821 #ifdef USING_ELFOS_H
822 switch (rs6000_sdata)
823 {
824 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
825 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
826 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
827 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
828 }
829
830 if (rs6000_sdata && g_switch_value)
831 {
832 fprintf (file, "%s -G %d", start, g_switch_value);
833 start = "";
834 }
835 #endif
836
837 if (*start == '\0')
838 putc ('\n', file);
839 }
840 }
841 \f
842 /* Return nonzero if this function is known to have a null epilogue. */
843
844 int
845 direct_return ()
846 {
847 if (reload_completed)
848 {
849 rs6000_stack_t *info = rs6000_stack_info ();
850
851 if (info->first_gp_reg_save == 32
852 && info->first_fp_reg_save == 64
853 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
854 && ! info->lr_save_p
855 && ! info->cr_save_p
856 && info->vrsave_mask == 0
857 && ! info->push_p)
858 return 1;
859 }
860
861 return 0;
862 }
863
864 /* Returns 1 always. */
865
866 int
867 any_operand (op, mode)
868 rtx op ATTRIBUTE_UNUSED;
869 enum machine_mode mode ATTRIBUTE_UNUSED;
870 {
871 return 1;
872 }
873
874 /* Returns 1 if op is the count register. */
875 int
876 count_register_operand (op, mode)
877 rtx op;
878 enum machine_mode mode ATTRIBUTE_UNUSED;
879 {
880 if (GET_CODE (op) != REG)
881 return 0;
882
883 if (REGNO (op) == COUNT_REGISTER_REGNUM)
884 return 1;
885
886 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
887 return 1;
888
889 return 0;
890 }
891
892 /* Returns 1 if op is an altivec register. */
893 int
894 altivec_register_operand (op, mode)
895 rtx op;
896 enum machine_mode mode ATTRIBUTE_UNUSED;
897 {
898
899 return (register_operand (op, mode)
900 && (GET_CODE (op) != REG
901 || REGNO (op) > FIRST_PSEUDO_REGISTER
902 || ALTIVEC_REGNO_P (REGNO (op))));
903 }
904
905 int
906 xer_operand (op, mode)
907 rtx op;
908 enum machine_mode mode ATTRIBUTE_UNUSED;
909 {
910 if (GET_CODE (op) != REG)
911 return 0;
912
913 if (XER_REGNO_P (REGNO (op)))
914 return 1;
915
916 return 0;
917 }
918
919 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
920 by such constants completes more quickly. */
921
922 int
923 s8bit_cint_operand (op, mode)
924 rtx op;
925 enum machine_mode mode ATTRIBUTE_UNUSED;
926 {
927 return ( GET_CODE (op) == CONST_INT
928 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
929 }
930
931 /* Return 1 if OP is a constant that can fit in a D field. */
932
933 int
934 short_cint_operand (op, mode)
935 rtx op;
936 enum machine_mode mode ATTRIBUTE_UNUSED;
937 {
938 return (GET_CODE (op) == CONST_INT
939 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
940 }
941
942 /* Similar for an unsigned D field. */
943
944 int
945 u_short_cint_operand (op, mode)
946 rtx op;
947 enum machine_mode mode ATTRIBUTE_UNUSED;
948 {
949 return (GET_CODE (op) == CONST_INT
950 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
951 }
952
953 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
954
955 int
956 non_short_cint_operand (op, mode)
957 rtx op;
958 enum machine_mode mode ATTRIBUTE_UNUSED;
959 {
960 return (GET_CODE (op) == CONST_INT
961 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
962 }
963
964 /* Returns 1 if OP is a CONST_INT that is a positive value
965 and an exact power of 2. */
966
967 int
968 exact_log2_cint_operand (op, mode)
969 rtx op;
970 enum machine_mode mode ATTRIBUTE_UNUSED;
971 {
972 return (GET_CODE (op) == CONST_INT
973 && INTVAL (op) > 0
974 && exact_log2 (INTVAL (op)) >= 0);
975 }
976
977 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
978 ctr, or lr). */
979
980 int
981 gpc_reg_operand (op, mode)
982 rtx op;
983 enum machine_mode mode;
984 {
985 return (register_operand (op, mode)
986 && (GET_CODE (op) != REG
987 || (REGNO (op) >= ARG_POINTER_REGNUM
988 && !XER_REGNO_P (REGNO (op)))
989 || REGNO (op) < MQ_REGNO));
990 }
991
992 /* Returns 1 if OP is either a pseudo-register or a register denoting a
993 CR field. */
994
995 int
996 cc_reg_operand (op, mode)
997 rtx op;
998 enum machine_mode mode;
999 {
1000 return (register_operand (op, mode)
1001 && (GET_CODE (op) != REG
1002 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1003 || CR_REGNO_P (REGNO (op))));
1004 }
1005
1006 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1007 CR field that isn't CR0. */
1008
1009 int
1010 cc_reg_not_cr0_operand (op, mode)
1011 rtx op;
1012 enum machine_mode mode;
1013 {
1014 return (register_operand (op, mode)
1015 && (GET_CODE (op) != REG
1016 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1017 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1018 }
1019
1020 /* Returns 1 if OP is either a constant integer valid for a D-field or
1021 a non-special register. If a register, it must be in the proper
1022 mode unless MODE is VOIDmode. */
1023
1024 int
1025 reg_or_short_operand (op, mode)
1026 rtx op;
1027 enum machine_mode mode;
1028 {
1029 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1030 }
1031
1032 /* Similar, except check if the negation of the constant would be
1033 valid for a D-field. */
1034
1035 int
1036 reg_or_neg_short_operand (op, mode)
1037 rtx op;
1038 enum machine_mode mode;
1039 {
1040 if (GET_CODE (op) == CONST_INT)
1041 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1042
1043 return gpc_reg_operand (op, mode);
1044 }
1045
1046 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1047 a non-special register. If a register, it must be in the proper
1048 mode unless MODE is VOIDmode. */
1049
1050 int
1051 reg_or_aligned_short_operand (op, mode)
1052 rtx op;
1053 enum machine_mode mode;
1054 {
1055 if (gpc_reg_operand (op, mode))
1056 return 1;
1057 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1058 return 1;
1059
1060 return 0;
1061 }
1062
1063
1064 /* Return 1 if the operand is either a register or an integer whose
1065 high-order 16 bits are zero. */
1066
1067 int
1068 reg_or_u_short_operand (op, mode)
1069 rtx op;
1070 enum machine_mode mode;
1071 {
1072 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1073 }
1074
1075 /* Return 1 is the operand is either a non-special register or ANY
1076 constant integer. */
1077
1078 int
1079 reg_or_cint_operand (op, mode)
1080 rtx op;
1081 enum machine_mode mode;
1082 {
1083 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1084 }
1085
1086 /* Return 1 is the operand is either a non-special register or ANY
1087 32-bit signed constant integer. */
1088
1089 int
1090 reg_or_arith_cint_operand (op, mode)
1091 rtx op;
1092 enum machine_mode mode;
1093 {
1094 return (gpc_reg_operand (op, mode)
1095 || (GET_CODE (op) == CONST_INT
1096 #if HOST_BITS_PER_WIDE_INT != 32
1097 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1098 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1099 #endif
1100 ));
1101 }
1102
1103 /* Return 1 is the operand is either a non-special register or a 32-bit
1104 signed constant integer valid for 64-bit addition. */
1105
1106 int
1107 reg_or_add_cint64_operand (op, mode)
1108 rtx op;
1109 enum machine_mode mode;
1110 {
1111 return (gpc_reg_operand (op, mode)
1112 || (GET_CODE (op) == CONST_INT
1113 #if HOST_BITS_PER_WIDE_INT == 32
1114 && INTVAL (op) < 0x7fff8000
1115 #else
1116 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1117 < 0x100000000ll)
1118 #endif
1119 ));
1120 }
1121
1122 /* Return 1 is the operand is either a non-special register or a 32-bit
1123 signed constant integer valid for 64-bit subtraction. */
1124
1125 int
1126 reg_or_sub_cint64_operand (op, mode)
1127 rtx op;
1128 enum machine_mode mode;
1129 {
1130 return (gpc_reg_operand (op, mode)
1131 || (GET_CODE (op) == CONST_INT
1132 #if HOST_BITS_PER_WIDE_INT == 32
1133 && (- INTVAL (op)) < 0x7fff8000
1134 #else
1135 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1136 < 0x100000000ll)
1137 #endif
1138 ));
1139 }
1140
1141 /* Return 1 is the operand is either a non-special register or ANY
1142 32-bit unsigned constant integer. */
1143
1144 int
1145 reg_or_logical_cint_operand (op, mode)
1146 rtx op;
1147 enum machine_mode mode;
1148 {
1149 if (GET_CODE (op) == CONST_INT)
1150 {
1151 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1152 {
1153 if (GET_MODE_BITSIZE (mode) <= 32)
1154 abort ();
1155
1156 if (INTVAL (op) < 0)
1157 return 0;
1158 }
1159
1160 return ((INTVAL (op) & GET_MODE_MASK (mode)
1161 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1162 }
1163 else if (GET_CODE (op) == CONST_DOUBLE)
1164 {
1165 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1166 || mode != DImode)
1167 abort ();
1168
1169 return CONST_DOUBLE_HIGH (op) == 0;
1170 }
1171 else
1172 return gpc_reg_operand (op, mode);
1173 }
1174
1175 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1176
1177 int
1178 got_operand (op, mode)
1179 rtx op;
1180 enum machine_mode mode ATTRIBUTE_UNUSED;
1181 {
1182 return (GET_CODE (op) == SYMBOL_REF
1183 || GET_CODE (op) == CONST
1184 || GET_CODE (op) == LABEL_REF);
1185 }
1186
1187 /* Return 1 if the operand is a simple references that can be loaded via
1188 the GOT (labels involving addition aren't allowed). */
1189
1190 int
1191 got_no_const_operand (op, mode)
1192 rtx op;
1193 enum machine_mode mode ATTRIBUTE_UNUSED;
1194 {
1195 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1196 }
1197
1198 /* Return the number of instructions it takes to form a constant in an
1199 integer register. */
1200
1201 static int
1202 num_insns_constant_wide (value)
1203 HOST_WIDE_INT value;
1204 {
1205 /* signed constant loadable with {cal|addi} */
1206 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1207 return 1;
1208
1209 /* constant loadable with {cau|addis} */
1210 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1211 return 1;
1212
1213 #if HOST_BITS_PER_WIDE_INT == 64
1214 else if (TARGET_POWERPC64)
1215 {
1216 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1217 HOST_WIDE_INT high = value >> 31;
1218
1219 if (high == 0 || high == -1)
1220 return 2;
1221
1222 high >>= 1;
1223
1224 if (low == 0)
1225 return num_insns_constant_wide (high) + 1;
1226 else
1227 return (num_insns_constant_wide (high)
1228 + num_insns_constant_wide (low) + 1);
1229 }
1230 #endif
1231
1232 else
1233 return 2;
1234 }
1235
1236 int
1237 num_insns_constant (op, mode)
1238 rtx op;
1239 enum machine_mode mode;
1240 {
1241 if (GET_CODE (op) == CONST_INT)
1242 {
1243 #if HOST_BITS_PER_WIDE_INT == 64
1244 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1245 && mask64_operand (op, mode))
1246 return 2;
1247 else
1248 #endif
1249 return num_insns_constant_wide (INTVAL (op));
1250 }
1251
1252 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1253 {
1254 long l;
1255 REAL_VALUE_TYPE rv;
1256
1257 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1258 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1259 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1260 }
1261
1262 else if (GET_CODE (op) == CONST_DOUBLE)
1263 {
1264 HOST_WIDE_INT low;
1265 HOST_WIDE_INT high;
1266 long l[2];
1267 REAL_VALUE_TYPE rv;
1268 int endian = (WORDS_BIG_ENDIAN == 0);
1269
1270 if (mode == VOIDmode || mode == DImode)
1271 {
1272 high = CONST_DOUBLE_HIGH (op);
1273 low = CONST_DOUBLE_LOW (op);
1274 }
1275 else
1276 {
1277 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1278 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1279 high = l[endian];
1280 low = l[1 - endian];
1281 }
1282
1283 if (TARGET_32BIT)
1284 return (num_insns_constant_wide (low)
1285 + num_insns_constant_wide (high));
1286
1287 else
1288 {
1289 if (high == 0 && low >= 0)
1290 return num_insns_constant_wide (low);
1291
1292 else if (high == -1 && low < 0)
1293 return num_insns_constant_wide (low);
1294
1295 else if (mask64_operand (op, mode))
1296 return 2;
1297
1298 else if (low == 0)
1299 return num_insns_constant_wide (high) + 1;
1300
1301 else
1302 return (num_insns_constant_wide (high)
1303 + num_insns_constant_wide (low) + 1);
1304 }
1305 }
1306
1307 else
1308 abort ();
1309 }
1310
1311 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1312 register with one instruction per word. We only do this if we can
1313 safely read CONST_DOUBLE_{LOW,HIGH}. */
1314
1315 int
1316 easy_fp_constant (op, mode)
1317 rtx op;
1318 enum machine_mode mode;
1319 {
1320 if (GET_CODE (op) != CONST_DOUBLE
1321 || GET_MODE (op) != mode
1322 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1323 return 0;
1324
1325 /* Consider all constants with -msoft-float to be easy. */
1326 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1327 && mode != DImode)
1328 return 1;
1329
1330 /* If we are using V.4 style PIC, consider all constants to be hard. */
1331 if (flag_pic && DEFAULT_ABI == ABI_V4)
1332 return 0;
1333
1334 #ifdef TARGET_RELOCATABLE
1335 /* Similarly if we are using -mrelocatable, consider all constants
1336 to be hard. */
1337 if (TARGET_RELOCATABLE)
1338 return 0;
1339 #endif
1340
1341 if (mode == TFmode)
1342 {
1343 long k[4];
1344 REAL_VALUE_TYPE rv;
1345
1346 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1347 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1348
1349 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1350 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1351 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1352 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1353 }
1354
1355 else if (mode == DFmode)
1356 {
1357 long k[2];
1358 REAL_VALUE_TYPE rv;
1359
1360 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1361 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1362
1363 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1364 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1365 }
1366
1367 else if (mode == SFmode)
1368 {
1369 long l;
1370 REAL_VALUE_TYPE rv;
1371
1372 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1373 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1374
1375 return num_insns_constant_wide (l) == 1;
1376 }
1377
1378 else if (mode == DImode)
1379 return ((TARGET_POWERPC64
1380 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1381 || (num_insns_constant (op, DImode) <= 2));
1382
1383 else if (mode == SImode)
1384 return 1;
1385 else
1386 abort ();
1387 }
1388
1389 /* Return 1 if the operand is a CONST_INT and can be put into a
1390 register with one instruction. */
1391
1392 static int
1393 easy_vector_constant (op)
1394 rtx op;
1395 {
1396 rtx elt;
1397 int units, i;
1398
1399 if (GET_CODE (op) != CONST_VECTOR)
1400 return 0;
1401
1402 units = CONST_VECTOR_NUNITS (op);
1403
1404 /* We can generate 0 easily. Look for that. */
1405 for (i = 0; i < units; ++i)
1406 {
1407 elt = CONST_VECTOR_ELT (op, i);
1408
1409 /* We could probably simplify this by just checking for equality
1410 with CONST0_RTX for the current mode, but let's be safe
1411 instead. */
1412
1413 switch (GET_CODE (elt))
1414 {
1415 case CONST_INT:
1416 if (INTVAL (elt) != 0)
1417 return 0;
1418 break;
1419 case CONST_DOUBLE:
1420 if (CONST_DOUBLE_LOW (elt) != 0 || CONST_DOUBLE_HIGH (elt) != 0)
1421 return 0;
1422 break;
1423 default:
1424 return 0;
1425 }
1426 }
1427
1428 /* We could probably generate a few other constants trivially, but
1429 gcc doesn't generate them yet. FIXME later. */
1430 return 1;
1431 }
1432
1433 /* Return 1 if the operand is the constant 0. This works for scalars
1434 as well as vectors. */
1435 int
1436 zero_constant (op, mode)
1437 rtx op;
1438 enum machine_mode mode;
1439 {
1440 return op == CONST0_RTX (mode);
1441 }
1442
1443 /* Return 1 if the operand is 0.0. */
1444 int
1445 zero_fp_constant (op, mode)
1446 rtx op;
1447 enum machine_mode mode;
1448 {
1449 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1450 }
1451
1452 /* Return 1 if the operand is in volatile memory. Note that during
1453 the RTL generation phase, memory_operand does not return TRUE for
1454 volatile memory references. So this function allows us to
1455 recognize volatile references where its safe. */
1456
1457 int
1458 volatile_mem_operand (op, mode)
1459 rtx op;
1460 enum machine_mode mode;
1461 {
1462 if (GET_CODE (op) != MEM)
1463 return 0;
1464
1465 if (!MEM_VOLATILE_P (op))
1466 return 0;
1467
1468 if (mode != GET_MODE (op))
1469 return 0;
1470
1471 if (reload_completed)
1472 return memory_operand (op, mode);
1473
1474 if (reload_in_progress)
1475 return strict_memory_address_p (mode, XEXP (op, 0));
1476
1477 return memory_address_p (mode, XEXP (op, 0));
1478 }
1479
1480 /* Return 1 if the operand is an offsettable memory operand. */
1481
1482 int
1483 offsettable_mem_operand (op, mode)
1484 rtx op;
1485 enum machine_mode mode;
1486 {
1487 return ((GET_CODE (op) == MEM)
1488 && offsettable_address_p (reload_completed || reload_in_progress,
1489 mode, XEXP (op, 0)));
1490 }
1491
1492 /* Return 1 if the operand is either an easy FP constant (see above) or
1493 memory. */
1494
1495 int
1496 mem_or_easy_const_operand (op, mode)
1497 rtx op;
1498 enum machine_mode mode;
1499 {
1500 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1501 }
1502
1503 /* Return 1 if the operand is either a non-special register or an item
1504 that can be used as the operand of a `mode' add insn. */
1505
1506 int
1507 add_operand (op, mode)
1508 rtx op;
1509 enum machine_mode mode;
1510 {
1511 if (GET_CODE (op) == CONST_INT)
1512 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1513 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1514
1515 return gpc_reg_operand (op, mode);
1516 }
1517
1518 /* Return 1 if OP is a constant but not a valid add_operand. */
1519
1520 int
1521 non_add_cint_operand (op, mode)
1522 rtx op;
1523 enum machine_mode mode ATTRIBUTE_UNUSED;
1524 {
1525 return (GET_CODE (op) == CONST_INT
1526 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1527 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1528 }
1529
1530 /* Return 1 if the operand is a non-special register or a constant that
1531 can be used as the operand of an OR or XOR insn on the RS/6000. */
1532
1533 int
1534 logical_operand (op, mode)
1535 rtx op;
1536 enum machine_mode mode;
1537 {
1538 HOST_WIDE_INT opl, oph;
1539
1540 if (gpc_reg_operand (op, mode))
1541 return 1;
1542
1543 if (GET_CODE (op) == CONST_INT)
1544 {
1545 opl = INTVAL (op) & GET_MODE_MASK (mode);
1546
1547 #if HOST_BITS_PER_WIDE_INT <= 32
1548 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1549 return 0;
1550 #endif
1551 }
1552 else if (GET_CODE (op) == CONST_DOUBLE)
1553 {
1554 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1555 abort ();
1556
1557 opl = CONST_DOUBLE_LOW (op);
1558 oph = CONST_DOUBLE_HIGH (op);
1559 if (oph != 0)
1560 return 0;
1561 }
1562 else
1563 return 0;
1564
1565 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1566 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1567 }
1568
1569 /* Return 1 if C is a constant that is not a logical operand (as
1570 above), but could be split into one. */
1571
1572 int
1573 non_logical_cint_operand (op, mode)
1574 rtx op;
1575 enum machine_mode mode;
1576 {
1577 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1578 && ! logical_operand (op, mode)
1579 && reg_or_logical_cint_operand (op, mode));
1580 }
1581
1582 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1583 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1584 Reject all ones and all zeros, since these should have been optimized
1585 away and confuse the making of MB and ME. */
1586
1587 int
1588 mask_operand (op, mode)
1589 rtx op;
1590 enum machine_mode mode ATTRIBUTE_UNUSED;
1591 {
1592 HOST_WIDE_INT c, lsb;
1593
1594 if (GET_CODE (op) != CONST_INT)
1595 return 0;
1596
1597 c = INTVAL (op);
1598
1599 /* Fail in 64-bit mode if the mask wraps around because the upper
1600 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1601 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1602 return 0;
1603
1604 /* We don't change the number of transitions by inverting,
1605 so make sure we start with the LS bit zero. */
1606 if (c & 1)
1607 c = ~c;
1608
1609 /* Reject all zeros or all ones. */
1610 if (c == 0)
1611 return 0;
1612
1613 /* Find the first transition. */
1614 lsb = c & -c;
1615
1616 /* Invert to look for a second transition. */
1617 c = ~c;
1618
1619 /* Erase first transition. */
1620 c &= -lsb;
1621
1622 /* Find the second transition (if any). */
1623 lsb = c & -c;
1624
1625 /* Match if all the bits above are 1's (or c is zero). */
1626 return c == -lsb;
1627 }
1628
1629 /* Return 1 for the PowerPC64 rlwinm corner case. */
1630
1631 int
1632 mask_operand_wrap (op, mode)
1633 rtx op;
1634 enum machine_mode mode ATTRIBUTE_UNUSED;
1635 {
1636 HOST_WIDE_INT c, lsb;
1637
1638 if (GET_CODE (op) != CONST_INT)
1639 return 0;
1640
1641 c = INTVAL (op);
1642
1643 if ((c & 0x80000001) != 0x80000001)
1644 return 0;
1645
1646 c = ~c;
1647 if (c == 0)
1648 return 0;
1649
1650 lsb = c & -c;
1651 c = ~c;
1652 c &= -lsb;
1653 lsb = c & -c;
1654 return c == -lsb;
1655 }
1656
1657 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1658 It is if there are no more than one 1->0 or 0->1 transitions.
1659 Reject all zeros, since zero should have been optimized away and
1660 confuses the making of MB and ME. */
1661
1662 int
1663 mask64_operand (op, mode)
1664 rtx op;
1665 enum machine_mode mode ATTRIBUTE_UNUSED;
1666 {
1667 if (GET_CODE (op) == CONST_INT)
1668 {
1669 HOST_WIDE_INT c, lsb;
1670
1671 c = INTVAL (op);
1672
1673 /* Reject all zeros. */
1674 if (c == 0)
1675 return 0;
1676
1677 /* We don't change the number of transitions by inverting,
1678 so make sure we start with the LS bit zero. */
1679 if (c & 1)
1680 c = ~c;
1681
1682 /* Find the transition, and check that all bits above are 1's. */
1683 lsb = c & -c;
1684 return c == -lsb;
1685 }
1686 return 0;
1687 }
1688
1689 /* Like mask64_operand, but allow up to three transitions. This
1690 predicate is used by insn patterns that generate two rldicl or
1691 rldicr machine insns. */
1692
1693 int
1694 mask64_2_operand (op, mode)
1695 rtx op;
1696 enum machine_mode mode ATTRIBUTE_UNUSED;
1697 {
1698 if (GET_CODE (op) == CONST_INT)
1699 {
1700 HOST_WIDE_INT c, lsb;
1701
1702 c = INTVAL (op);
1703
1704 /* Disallow all zeros. */
1705 if (c == 0)
1706 return 0;
1707
1708 /* We don't change the number of transitions by inverting,
1709 so make sure we start with the LS bit zero. */
1710 if (c & 1)
1711 c = ~c;
1712
1713 /* Find the first transition. */
1714 lsb = c & -c;
1715
1716 /* Invert to look for a second transition. */
1717 c = ~c;
1718
1719 /* Erase first transition. */
1720 c &= -lsb;
1721
1722 /* Find the second transition. */
1723 lsb = c & -c;
1724
1725 /* Invert to look for a third transition. */
1726 c = ~c;
1727
1728 /* Erase second transition. */
1729 c &= -lsb;
1730
1731 /* Find the third transition (if any). */
1732 lsb = c & -c;
1733
1734 /* Match if all the bits above are 1's (or c is zero). */
1735 return c == -lsb;
1736 }
1737 return 0;
1738 }
1739
1740 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1741 implement ANDing by the mask IN. */
1742 void
1743 build_mask64_2_operands (in, out)
1744 rtx in;
1745 rtx *out;
1746 {
1747 #if HOST_BITS_PER_WIDE_INT >= 64
1748 unsigned HOST_WIDE_INT c, lsb, m1, m2;
1749 int shift;
1750
1751 if (GET_CODE (in) != CONST_INT)
1752 abort ();
1753
1754 c = INTVAL (in);
1755 if (c & 1)
1756 {
1757 /* Assume c initially something like 0x00fff000000fffff. The idea
1758 is to rotate the word so that the middle ^^^^^^ group of zeros
1759 is at the MS end and can be cleared with an rldicl mask. We then
1760 rotate back and clear off the MS ^^ group of zeros with a
1761 second rldicl. */
1762 c = ~c; /* c == 0xff000ffffff00000 */
1763 lsb = c & -c; /* lsb == 0x0000000000100000 */
1764 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
1765 c = ~c; /* c == 0x00fff000000fffff */
1766 c &= -lsb; /* c == 0x00fff00000000000 */
1767 lsb = c & -c; /* lsb == 0x0000100000000000 */
1768 c = ~c; /* c == 0xff000fffffffffff */
1769 c &= -lsb; /* c == 0xff00000000000000 */
1770 shift = 0;
1771 while ((lsb >>= 1) != 0)
1772 shift++; /* shift == 44 on exit from loop */
1773 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
1774 m1 = ~m1; /* m1 == 0x000000ffffffffff */
1775 m2 = ~c; /* m2 == 0x00ffffffffffffff */
1776 }
1777 else
1778 {
1779 /* Assume c initially something like 0xff000f0000000000. The idea
1780 is to rotate the word so that the ^^^ middle group of zeros
1781 is at the LS end and can be cleared with an rldicr mask. We then
1782 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1783 a second rldicr. */
1784 lsb = c & -c; /* lsb == 0x0000010000000000 */
1785 m2 = -lsb; /* m2 == 0xffffff0000000000 */
1786 c = ~c; /* c == 0x00fff0ffffffffff */
1787 c &= -lsb; /* c == 0x00fff00000000000 */
1788 lsb = c & -c; /* lsb == 0x0000100000000000 */
1789 c = ~c; /* c == 0xff000fffffffffff */
1790 c &= -lsb; /* c == 0xff00000000000000 */
1791 shift = 0;
1792 while ((lsb >>= 1) != 0)
1793 shift++; /* shift == 44 on exit from loop */
1794 m1 = ~c; /* m1 == 0x00ffffffffffffff */
1795 m1 >>= shift; /* m1 == 0x0000000000000fff */
1796 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
1797 }
1798
1799 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1800 masks will be all 1's. We are guaranteed more than one transition. */
1801 out[0] = GEN_INT (64 - shift);
1802 out[1] = GEN_INT (m1);
1803 out[2] = GEN_INT (shift);
1804 out[3] = GEN_INT (m2);
1805 #else
1806 (void)in;
1807 (void)out;
1808 abort ();
1809 #endif
1810 }
1811
1812 /* Return 1 if the operand is either a non-special register or a constant
1813 that can be used as the operand of a PowerPC64 logical AND insn. */
1814
1815 int
1816 and64_operand (op, mode)
1817 rtx op;
1818 enum machine_mode mode;
1819 {
1820 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1821 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1822
1823 return (logical_operand (op, mode) || mask64_operand (op, mode));
1824 }
1825
1826 /* Like the above, but also match constants that can be implemented
1827 with two rldicl or rldicr insns. */
1828
1829 int
1830 and64_2_operand (op, mode)
1831 rtx op;
1832 enum machine_mode mode;
1833 {
1834 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1835 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
1836
1837 return logical_operand (op, mode) || mask64_2_operand (op, mode);
1838 }
1839
1840 /* Return 1 if the operand is either a non-special register or a
1841 constant that can be used as the operand of an RS/6000 logical AND insn. */
1842
1843 int
1844 and_operand (op, mode)
1845 rtx op;
1846 enum machine_mode mode;
1847 {
1848 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1849 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1850
1851 return (logical_operand (op, mode) || mask_operand (op, mode));
1852 }
1853
1854 /* Return 1 if the operand is a general register or memory operand. */
1855
1856 int
1857 reg_or_mem_operand (op, mode)
1858 rtx op;
1859 enum machine_mode mode;
1860 {
1861 return (gpc_reg_operand (op, mode)
1862 || memory_operand (op, mode)
1863 || volatile_mem_operand (op, mode));
1864 }
1865
1866 /* Return 1 if the operand is a general register or memory operand without
1867 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1868 instruction. */
1869
1870 int
1871 lwa_operand (op, mode)
1872 rtx op;
1873 enum machine_mode mode;
1874 {
1875 rtx inner = op;
1876
1877 if (reload_completed && GET_CODE (inner) == SUBREG)
1878 inner = SUBREG_REG (inner);
1879
1880 return gpc_reg_operand (inner, mode)
1881 || (memory_operand (inner, mode)
1882 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1883 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1884 && (GET_CODE (XEXP (inner, 0)) != PLUS
1885 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1886 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1887 }
1888
1889 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
1890
1891 int
1892 symbol_ref_operand (op, mode)
1893 rtx op;
1894 enum machine_mode mode;
1895 {
1896 if (mode != VOIDmode && GET_MODE (op) != mode)
1897 return 0;
1898
1899 return (GET_CODE (op) == SYMBOL_REF);
1900 }
1901
1902 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1903 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
1904
1905 int
1906 call_operand (op, mode)
1907 rtx op;
1908 enum machine_mode mode;
1909 {
1910 if (mode != VOIDmode && GET_MODE (op) != mode)
1911 return 0;
1912
1913 return (GET_CODE (op) == SYMBOL_REF
1914 || (GET_CODE (op) == REG
1915 && (REGNO (op) == LINK_REGISTER_REGNUM
1916 || REGNO (op) == COUNT_REGISTER_REGNUM
1917 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1918 }
1919
1920 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1921 this file and the function is not weakly defined. */
1922
1923 int
1924 current_file_function_operand (op, mode)
1925 rtx op;
1926 enum machine_mode mode ATTRIBUTE_UNUSED;
1927 {
1928 return (GET_CODE (op) == SYMBOL_REF
1929 && (SYMBOL_REF_FLAG (op)
1930 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1931 && ! DECL_WEAK (current_function_decl))));
1932 }
1933
1934 /* Return 1 if this operand is a valid input for a move insn. */
1935
1936 int
1937 input_operand (op, mode)
1938 rtx op;
1939 enum machine_mode mode;
1940 {
1941 /* Memory is always valid. */
1942 if (memory_operand (op, mode))
1943 return 1;
1944
1945 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1946 if (GET_CODE (op) == CONSTANT_P_RTX)
1947 return 1;
1948
1949 /* For floating-point, easy constants are valid. */
1950 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1951 && CONSTANT_P (op)
1952 && easy_fp_constant (op, mode))
1953 return 1;
1954
1955 /* Allow any integer constant. */
1956 if (GET_MODE_CLASS (mode) == MODE_INT
1957 && (GET_CODE (op) == CONST_INT
1958 || GET_CODE (op) == CONST_DOUBLE))
1959 return 1;
1960
1961 /* For floating-point or multi-word mode, the only remaining valid type
1962 is a register. */
1963 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1964 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1965 return register_operand (op, mode);
1966
1967 /* The only cases left are integral modes one word or smaller (we
1968 do not get called for MODE_CC values). These can be in any
1969 register. */
1970 if (register_operand (op, mode))
1971 return 1;
1972
1973 /* A SYMBOL_REF referring to the TOC is valid. */
1974 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1975 return 1;
1976
1977 /* A constant pool expression (relative to the TOC) is valid */
1978 if (TOC_RELATIVE_EXPR_P (op))
1979 return 1;
1980
1981 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1982 to be valid. */
1983 if (DEFAULT_ABI == ABI_V4
1984 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1985 && small_data_operand (op, Pmode))
1986 return 1;
1987
1988 return 0;
1989 }
1990
1991 /* Return 1 for an operand in small memory on V.4/eabi. */
1992
1993 int
1994 small_data_operand (op, mode)
1995 rtx op ATTRIBUTE_UNUSED;
1996 enum machine_mode mode ATTRIBUTE_UNUSED;
1997 {
1998 #if TARGET_ELF
1999 rtx sym_ref;
2000
2001 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2002 return 0;
2003
2004 if (DEFAULT_ABI != ABI_V4)
2005 return 0;
2006
2007 if (GET_CODE (op) == SYMBOL_REF)
2008 sym_ref = op;
2009
2010 else if (GET_CODE (op) != CONST
2011 || GET_CODE (XEXP (op, 0)) != PLUS
2012 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2013 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2014 return 0;
2015
2016 else
2017 {
2018 rtx sum = XEXP (op, 0);
2019 HOST_WIDE_INT summand;
2020
2021 /* We have to be careful here, because it is the referenced address
2022 that must be 32k from _SDA_BASE_, not just the symbol. */
2023 summand = INTVAL (XEXP (sum, 1));
2024 if (summand < 0 || summand > g_switch_value)
2025 return 0;
2026
2027 sym_ref = XEXP (sum, 0);
2028 }
2029
2030 if (*XSTR (sym_ref, 0) != '@')
2031 return 0;
2032
2033 return 1;
2034
2035 #else
2036 return 0;
2037 #endif
2038 }
2039 \f
2040 static int
2041 constant_pool_expr_1 (op, have_sym, have_toc)
2042 rtx op;
2043 int *have_sym;
2044 int *have_toc;
2045 {
2046 switch (GET_CODE(op))
2047 {
2048 case SYMBOL_REF:
2049 if (CONSTANT_POOL_ADDRESS_P (op))
2050 {
2051 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2052 {
2053 *have_sym = 1;
2054 return 1;
2055 }
2056 else
2057 return 0;
2058 }
2059 else if (! strcmp (XSTR (op, 0), toc_label_name))
2060 {
2061 *have_toc = 1;
2062 return 1;
2063 }
2064 else
2065 return 0;
2066 case PLUS:
2067 case MINUS:
2068 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2069 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2070 case CONST:
2071 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2072 case CONST_INT:
2073 return 1;
2074 default:
2075 return 0;
2076 }
2077 }
2078
2079 int
2080 constant_pool_expr_p (op)
2081 rtx op;
2082 {
2083 int have_sym = 0;
2084 int have_toc = 0;
2085 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2086 }
2087
2088 int
2089 toc_relative_expr_p (op)
2090 rtx op;
2091 {
2092 int have_sym = 0;
2093 int have_toc = 0;
2094 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2095 }
2096
2097 /* Try machine-dependent ways of modifying an illegitimate address
2098 to be legitimate. If we find one, return the new, valid address.
2099 This is used from only one place: `memory_address' in explow.c.
2100
2101 OLDX is the address as it was before break_out_memory_refs was
2102 called. In some cases it is useful to look at this to decide what
2103 needs to be done.
2104
2105 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2106
2107 It is always safe for this function to do nothing. It exists to
2108 recognize opportunities to optimize the output.
2109
2110 On RS/6000, first check for the sum of a register with a constant
2111 integer that is out of range. If so, generate code to add the
2112 constant with the low-order 16 bits masked to the register and force
2113 this result into another register (this can be done with `cau').
2114 Then generate an address of REG+(CONST&0xffff), allowing for the
2115 possibility of bit 16 being a one.
2116
2117 Then check for the sum of a register and something not constant, try to
2118 load the other things into a register and return the sum. */
2119 rtx
2120 rs6000_legitimize_address (x, oldx, mode)
2121 rtx x;
2122 rtx oldx ATTRIBUTE_UNUSED;
2123 enum machine_mode mode;
2124 {
2125 if (GET_CODE (x) == PLUS
2126 && GET_CODE (XEXP (x, 0)) == REG
2127 && GET_CODE (XEXP (x, 1)) == CONST_INT
2128 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2129 {
2130 HOST_WIDE_INT high_int, low_int;
2131 rtx sum;
2132 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2133 high_int = INTVAL (XEXP (x, 1)) - low_int;
2134 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2135 GEN_INT (high_int)), 0);
2136 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2137 }
2138 else if (GET_CODE (x) == PLUS
2139 && GET_CODE (XEXP (x, 0)) == REG
2140 && GET_CODE (XEXP (x, 1)) != CONST_INT
2141 && GET_MODE_NUNITS (mode) == 1
2142 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2143 || TARGET_POWERPC64
2144 || (mode != DFmode && mode != TFmode))
2145 && (TARGET_POWERPC64 || mode != DImode)
2146 && mode != TImode)
2147 {
2148 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2149 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2150 }
2151 else if (ALTIVEC_VECTOR_MODE (mode))
2152 {
2153 rtx reg;
2154
2155 /* Make sure both operands are registers. */
2156 if (GET_CODE (x) == PLUS)
2157 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2158 force_reg (Pmode, XEXP (x, 1)));
2159
2160 reg = force_reg (Pmode, x);
2161 return reg;
2162 }
2163 else if (SPE_VECTOR_MODE (mode))
2164 {
2165 /* We accept [reg + reg] and [reg + OFFSET]. */
2166
2167 if (GET_CODE (x) == PLUS)
2168 {
2169 rtx op1 = XEXP (x, 0);
2170 rtx op2 = XEXP (x, 1);
2171
2172 op1 = force_reg (Pmode, op1);
2173
2174 if (GET_CODE (op2) != REG
2175 && (GET_CODE (op2) != CONST_INT
2176 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2177 op2 = force_reg (Pmode, op2);
2178
2179 return gen_rtx_PLUS (Pmode, op1, op2);
2180 }
2181
2182 return force_reg (Pmode, x);
2183 }
2184 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
2185 && GET_CODE (x) != CONST_INT
2186 && GET_CODE (x) != CONST_DOUBLE
2187 && CONSTANT_P (x)
2188 && GET_MODE_NUNITS (mode) == 1
2189 && (GET_MODE_BITSIZE (mode) <= 32
2190 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2191 {
2192 rtx reg = gen_reg_rtx (Pmode);
2193 emit_insn (gen_elf_high (reg, (x)));
2194 return gen_rtx_LO_SUM (Pmode, reg, (x));
2195 }
2196 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2197 && ! flag_pic
2198 && GET_CODE (x) != CONST_INT
2199 && GET_CODE (x) != CONST_DOUBLE
2200 && CONSTANT_P (x)
2201 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2202 && mode != DImode
2203 && mode != TImode)
2204 {
2205 rtx reg = gen_reg_rtx (Pmode);
2206 emit_insn (gen_macho_high (reg, (x)));
2207 return gen_rtx_LO_SUM (Pmode, reg, (x));
2208 }
2209 else if (TARGET_TOC
2210 && CONSTANT_POOL_EXPR_P (x)
2211 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2212 {
2213 return create_TOC_reference (x);
2214 }
2215 else
2216 return NULL_RTX;
2217 }
2218
2219 /* The convention appears to be to define this wherever it is used.
2220 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2221 is now used here. */
2222 #ifndef REG_MODE_OK_FOR_BASE_P
2223 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2224 #endif
2225
2226 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2227 replace the input X, or the original X if no replacement is called for.
2228 The output parameter *WIN is 1 if the calling macro should goto WIN,
2229 0 if it should not.
2230
2231 For RS/6000, we wish to handle large displacements off a base
2232 register by splitting the addend across an addiu/addis and the mem insn.
2233 This cuts number of extra insns needed from 3 to 1.
2234
2235 On Darwin, we use this to generate code for floating point constants.
2236 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2237 The Darwin code is inside #if TARGET_MACHO because only then is
2238 machopic_function_base_name() defined. */
2239 rtx
2240 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
2241 rtx x;
2242 enum machine_mode mode;
2243 int opnum;
2244 int type;
2245 int ind_levels ATTRIBUTE_UNUSED;
2246 int *win;
2247 {
2248 /* We must recognize output that we have already generated ourselves. */
2249 if (GET_CODE (x) == PLUS
2250 && GET_CODE (XEXP (x, 0)) == PLUS
2251 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2252 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2253 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2254 {
2255 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2256 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2257 opnum, (enum reload_type)type);
2258 *win = 1;
2259 return x;
2260 }
2261
2262 #if TARGET_MACHO
2263 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2264 && GET_CODE (x) == LO_SUM
2265 && GET_CODE (XEXP (x, 0)) == PLUS
2266 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2267 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2268 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2269 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2270 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2271 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2272 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2273 {
2274 /* Result of previous invocation of this function on Darwin
2275 floating point constant. */
2276 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2277 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2278 opnum, (enum reload_type)type);
2279 *win = 1;
2280 return x;
2281 }
2282 #endif
2283 if (GET_CODE (x) == PLUS
2284 && GET_CODE (XEXP (x, 0)) == REG
2285 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2286 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2287 && GET_CODE (XEXP (x, 1)) == CONST_INT
2288 && !SPE_VECTOR_MODE (mode)
2289 && !ALTIVEC_VECTOR_MODE (mode))
2290 {
2291 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2292 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2293 HOST_WIDE_INT high
2294 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2295
2296 /* Check for 32-bit overflow. */
2297 if (high + low != val)
2298 {
2299 *win = 0;
2300 return x;
2301 }
2302
2303 /* Reload the high part into a base reg; leave the low part
2304 in the mem directly. */
2305
2306 x = gen_rtx_PLUS (GET_MODE (x),
2307 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2308 GEN_INT (high)),
2309 GEN_INT (low));
2310
2311 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2312 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2313 opnum, (enum reload_type)type);
2314 *win = 1;
2315 return x;
2316 }
2317 #if TARGET_MACHO
2318 if (GET_CODE (x) == SYMBOL_REF
2319 && DEFAULT_ABI == ABI_DARWIN
2320 && !ALTIVEC_VECTOR_MODE (mode)
2321 && flag_pic)
2322 {
2323 /* Darwin load of floating point constant. */
2324 rtx offset = gen_rtx (CONST, Pmode,
2325 gen_rtx (MINUS, Pmode, x,
2326 gen_rtx (SYMBOL_REF, Pmode,
2327 machopic_function_base_name ())));
2328 x = gen_rtx (LO_SUM, GET_MODE (x),
2329 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2330 gen_rtx (HIGH, Pmode, offset)), offset);
2331 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2332 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2333 opnum, (enum reload_type)type);
2334 *win = 1;
2335 return x;
2336 }
2337 #endif
2338 if (TARGET_TOC
2339 && CONSTANT_POOL_EXPR_P (x)
2340 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2341 {
2342 (x) = create_TOC_reference (x);
2343 *win = 1;
2344 return x;
2345 }
2346 *win = 0;
2347 return x;
2348 }
2349
2350 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2351 that is a valid memory address for an instruction.
2352 The MODE argument is the machine mode for the MEM expression
2353 that wants to use this address.
2354
2355 On the RS/6000, there are four valid address: a SYMBOL_REF that
2356 refers to a constant pool entry of an address (or the sum of it
2357 plus a constant), a short (16-bit signed) constant plus a register,
2358 the sum of two registers, or a register indirect, possibly with an
2359 auto-increment. For DFmode and DImode with a constant plus register,
2360 we must ensure that both words are addressable or PowerPC64 with offset
2361 word aligned.
2362
2363 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2364 32-bit DImode, TImode), indexed addressing cannot be used because
2365 adjacent memory cells are accessed by adding word-sized offsets
2366 during assembly output. */
2367 int
2368 rs6000_legitimate_address (mode, x, reg_ok_strict)
2369 enum machine_mode mode;
2370 rtx x;
2371 int reg_ok_strict;
2372 {
2373 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2374 return 1;
2375 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2376 && !ALTIVEC_VECTOR_MODE (mode)
2377 && !SPE_VECTOR_MODE (mode)
2378 && TARGET_UPDATE
2379 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2380 return 1;
2381 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2382 return 1;
2383 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2384 return 1;
2385 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2386 if (! reg_ok_strict
2387 && GET_CODE (x) == PLUS
2388 && GET_CODE (XEXP (x, 0)) == REG
2389 && XEXP (x, 0) == virtual_stack_vars_rtx
2390 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2391 return 1;
2392 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2393 return 1;
2394 if (mode != TImode
2395 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2396 || TARGET_POWERPC64
2397 || (mode != DFmode && mode != TFmode))
2398 && (TARGET_POWERPC64 || mode != DImode)
2399 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2400 return 1;
2401 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2402 return 1;
2403 return 0;
2404 }
2405 \f
2406 /* Try to output insns to set TARGET equal to the constant C if it can
2407 be done in less than N insns. Do all computations in MODE.
2408 Returns the place where the output has been placed if it can be
2409 done and the insns have been emitted. If it would take more than N
2410 insns, zero is returned and no insns and emitted. */
2411
2412 rtx
2413 rs6000_emit_set_const (dest, mode, source, n)
2414 rtx dest, source;
2415 enum machine_mode mode;
2416 int n ATTRIBUTE_UNUSED;
2417 {
2418 rtx result, insn, set;
2419 HOST_WIDE_INT c0, c1;
2420
2421 if (mode == QImode || mode == HImode)
2422 {
2423 if (dest == NULL)
2424 dest = gen_reg_rtx (mode);
2425 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2426 return dest;
2427 }
2428 else if (mode == SImode)
2429 {
2430 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
2431
2432 emit_insn (gen_rtx_SET (VOIDmode, result,
2433 GEN_INT (INTVAL (source)
2434 & (~ (HOST_WIDE_INT) 0xffff))));
2435 emit_insn (gen_rtx_SET (VOIDmode, dest,
2436 gen_rtx_IOR (SImode, result,
2437 GEN_INT (INTVAL (source) & 0xffff))));
2438 result = dest;
2439 }
2440 else if (mode == DImode)
2441 {
2442 if (GET_CODE (source) == CONST_INT)
2443 {
2444 c0 = INTVAL (source);
2445 c1 = -(c0 < 0);
2446 }
2447 else if (GET_CODE (source) == CONST_DOUBLE)
2448 {
2449 #if HOST_BITS_PER_WIDE_INT >= 64
2450 c0 = CONST_DOUBLE_LOW (source);
2451 c1 = -(c0 < 0);
2452 #else
2453 c0 = CONST_DOUBLE_LOW (source);
2454 c1 = CONST_DOUBLE_HIGH (source);
2455 #endif
2456 }
2457 else
2458 abort ();
2459
2460 result = rs6000_emit_set_long_const (dest, c0, c1);
2461 }
2462 else
2463 abort ();
2464
2465 insn = get_last_insn ();
2466 set = single_set (insn);
2467 if (! CONSTANT_P (SET_SRC (set)))
2468 set_unique_reg_note (insn, REG_EQUAL, source);
2469
2470 return result;
2471 }
2472
2473 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2474 fall back to a straight forward decomposition. We do this to avoid
2475 exponential run times encountered when looking for longer sequences
2476 with rs6000_emit_set_const. */
2477 static rtx
2478 rs6000_emit_set_long_const (dest, c1, c2)
2479 rtx dest;
2480 HOST_WIDE_INT c1, c2;
2481 {
2482 if (!TARGET_POWERPC64)
2483 {
2484 rtx operand1, operand2;
2485
2486 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2487 DImode);
2488 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2489 DImode);
2490 emit_move_insn (operand1, GEN_INT (c1));
2491 emit_move_insn (operand2, GEN_INT (c2));
2492 }
2493 else
2494 {
2495 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2496
2497 ud1 = c1 & 0xffff;
2498 ud2 = (c1 & 0xffff0000) >> 16;
2499 #if HOST_BITS_PER_WIDE_INT >= 64
2500 c2 = c1 >> 32;
2501 #endif
2502 ud3 = c2 & 0xffff;
2503 ud4 = (c2 & 0xffff0000) >> 16;
2504
2505 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2506 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2507 {
2508 if (ud1 & 0x8000)
2509 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2510 else
2511 emit_move_insn (dest, GEN_INT (ud1));
2512 }
2513
2514 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2515 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2516 {
2517 if (ud2 & 0x8000)
2518 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2519 - 0x80000000));
2520 else
2521 emit_move_insn (dest, GEN_INT (ud2 << 16));
2522 if (ud1 != 0)
2523 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2524 }
2525 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2526 || (ud4 == 0 && ! (ud3 & 0x8000)))
2527 {
2528 if (ud3 & 0x8000)
2529 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2530 - 0x80000000));
2531 else
2532 emit_move_insn (dest, GEN_INT (ud3 << 16));
2533
2534 if (ud2 != 0)
2535 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2536 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2537 if (ud1 != 0)
2538 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2539 }
2540 else
2541 {
2542 if (ud4 & 0x8000)
2543 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2544 - 0x80000000));
2545 else
2546 emit_move_insn (dest, GEN_INT (ud4 << 16));
2547
2548 if (ud3 != 0)
2549 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2550
2551 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2552 if (ud2 != 0)
2553 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2554 GEN_INT (ud2 << 16)));
2555 if (ud1 != 0)
2556 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2557 }
2558 }
2559 return dest;
2560 }
2561
2562 /* Emit a move from SOURCE to DEST in mode MODE. */
2563 void
2564 rs6000_emit_move (dest, source, mode)
2565 rtx dest;
2566 rtx source;
2567 enum machine_mode mode;
2568 {
2569 rtx operands[2];
2570 operands[0] = dest;
2571 operands[1] = source;
2572
2573 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2574 if (GET_CODE (operands[1]) == CONST_DOUBLE
2575 && ! FLOAT_MODE_P (mode)
2576 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2577 {
2578 /* FIXME. This should never happen. */
2579 /* Since it seems that it does, do the safe thing and convert
2580 to a CONST_INT. */
2581 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2582 }
2583 if (GET_CODE (operands[1]) == CONST_DOUBLE
2584 && ! FLOAT_MODE_P (mode)
2585 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2586 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2587 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2588 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2589 abort ();
2590
2591 /* Check if GCC is setting up a block move that will end up using FP
2592 registers as temporaries. We must make sure this is acceptable. */
2593 if (GET_CODE (operands[0]) == MEM
2594 && GET_CODE (operands[1]) == MEM
2595 && mode == DImode
2596 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2597 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2598 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2599 ? 32 : MEM_ALIGN (operands[0])))
2600 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2601 ? 32
2602 : MEM_ALIGN (operands[1]))))
2603 && ! MEM_VOLATILE_P (operands [0])
2604 && ! MEM_VOLATILE_P (operands [1]))
2605 {
2606 emit_move_insn (adjust_address (operands[0], SImode, 0),
2607 adjust_address (operands[1], SImode, 0));
2608 emit_move_insn (adjust_address (operands[0], SImode, 4),
2609 adjust_address (operands[1], SImode, 4));
2610 return;
2611 }
2612
2613 if (!no_new_pseudos)
2614 {
2615 if (GET_CODE (operands[1]) == MEM && optimize > 0
2616 && (mode == QImode || mode == HImode || mode == SImode)
2617 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2618 {
2619 rtx reg = gen_reg_rtx (word_mode);
2620
2621 emit_insn (gen_rtx_SET (word_mode, reg,
2622 gen_rtx_ZERO_EXTEND (word_mode,
2623 operands[1])));
2624 operands[1] = gen_lowpart (mode, reg);
2625 }
2626 if (GET_CODE (operands[0]) != REG)
2627 operands[1] = force_reg (mode, operands[1]);
2628 }
2629
2630 if (mode == SFmode && ! TARGET_POWERPC
2631 && TARGET_HARD_FLOAT && TARGET_FPRS
2632 && GET_CODE (operands[0]) == MEM)
2633 {
2634 int regnum;
2635
2636 if (reload_in_progress || reload_completed)
2637 regnum = true_regnum (operands[1]);
2638 else if (GET_CODE (operands[1]) == REG)
2639 regnum = REGNO (operands[1]);
2640 else
2641 regnum = -1;
2642
2643 /* If operands[1] is a register, on POWER it may have
2644 double-precision data in it, so truncate it to single
2645 precision. */
2646 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2647 {
2648 rtx newreg;
2649 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2650 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2651 operands[1] = newreg;
2652 }
2653 }
2654
2655 /* Handle the case where reload calls us with an invalid address;
2656 and the case of CONSTANT_P_RTX. */
2657 if (!ALTIVEC_VECTOR_MODE (mode)
2658 && (! general_operand (operands[1], mode)
2659 || ! nonimmediate_operand (operands[0], mode)
2660 || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2661 {
2662 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2663 return;
2664 }
2665
2666 /* FIXME: In the long term, this switch statement should go away
2667 and be replaced by a sequence of tests based on things like
2668 mode == Pmode. */
2669 switch (mode)
2670 {
2671 case HImode:
2672 case QImode:
2673 if (CONSTANT_P (operands[1])
2674 && GET_CODE (operands[1]) != CONST_INT)
2675 operands[1] = force_const_mem (mode, operands[1]);
2676 break;
2677
2678 case TFmode:
2679 case DFmode:
2680 case SFmode:
2681 if (CONSTANT_P (operands[1])
2682 && ! easy_fp_constant (operands[1], mode))
2683 operands[1] = force_const_mem (mode, operands[1]);
2684 break;
2685
2686 case V16QImode:
2687 case V8HImode:
2688 case V4SFmode:
2689 case V4SImode:
2690 case V4HImode:
2691 case V2SFmode:
2692 case V2SImode:
2693 case V1DImode:
2694 if (CONSTANT_P (operands[1])
2695 && !easy_vector_constant (operands[1]))
2696 operands[1] = force_const_mem (mode, operands[1]);
2697 break;
2698
2699 case SImode:
2700 case DImode:
2701 /* Use default pattern for address of ELF small data */
2702 if (TARGET_ELF
2703 && mode == Pmode
2704 && DEFAULT_ABI == ABI_V4
2705 && (GET_CODE (operands[1]) == SYMBOL_REF
2706 || GET_CODE (operands[1]) == CONST)
2707 && small_data_operand (operands[1], mode))
2708 {
2709 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2710 return;
2711 }
2712
2713 if (DEFAULT_ABI == ABI_V4
2714 && mode == Pmode && mode == SImode
2715 && flag_pic == 1 && got_operand (operands[1], mode))
2716 {
2717 emit_insn (gen_movsi_got (operands[0], operands[1]));
2718 return;
2719 }
2720
2721 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2722 && TARGET_NO_TOC && ! flag_pic
2723 && mode == Pmode
2724 && CONSTANT_P (operands[1])
2725 && GET_CODE (operands[1]) != HIGH
2726 && GET_CODE (operands[1]) != CONST_INT)
2727 {
2728 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2729
2730 /* If this is a function address on -mcall-aixdesc,
2731 convert it to the address of the descriptor. */
2732 if (DEFAULT_ABI == ABI_AIX
2733 && GET_CODE (operands[1]) == SYMBOL_REF
2734 && XSTR (operands[1], 0)[0] == '.')
2735 {
2736 const char *name = XSTR (operands[1], 0);
2737 rtx new_ref;
2738 while (*name == '.')
2739 name++;
2740 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2741 CONSTANT_POOL_ADDRESS_P (new_ref)
2742 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2743 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2744 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2745 operands[1] = new_ref;
2746 }
2747
2748 if (DEFAULT_ABI == ABI_DARWIN)
2749 {
2750 emit_insn (gen_macho_high (target, operands[1]));
2751 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2752 return;
2753 }
2754
2755 emit_insn (gen_elf_high (target, operands[1]));
2756 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2757 return;
2758 }
2759
2760 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2761 and we have put it in the TOC, we just need to make a TOC-relative
2762 reference to it. */
2763 if (TARGET_TOC
2764 && GET_CODE (operands[1]) == SYMBOL_REF
2765 && CONSTANT_POOL_EXPR_P (operands[1])
2766 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2767 get_pool_mode (operands[1])))
2768 {
2769 operands[1] = create_TOC_reference (operands[1]);
2770 }
2771 else if (mode == Pmode
2772 && CONSTANT_P (operands[1])
2773 && ((GET_CODE (operands[1]) != CONST_INT
2774 && ! easy_fp_constant (operands[1], mode))
2775 || (GET_CODE (operands[1]) == CONST_INT
2776 && num_insns_constant (operands[1], mode) > 2)
2777 || (GET_CODE (operands[0]) == REG
2778 && FP_REGNO_P (REGNO (operands[0]))))
2779 && GET_CODE (operands[1]) != HIGH
2780 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2781 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2782 {
2783 /* Emit a USE operation so that the constant isn't deleted if
2784 expensive optimizations are turned on because nobody
2785 references it. This should only be done for operands that
2786 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2787 This should not be done for operands that contain LABEL_REFs.
2788 For now, we just handle the obvious case. */
2789 if (GET_CODE (operands[1]) != LABEL_REF)
2790 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2791
2792 #if TARGET_MACHO
2793 /* Darwin uses a special PIC legitimizer. */
2794 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2795 {
2796 operands[1] =
2797 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2798 operands[0]);
2799 if (operands[0] != operands[1])
2800 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2801 return;
2802 }
2803 #endif
2804
2805 /* If we are to limit the number of things we put in the TOC and
2806 this is a symbol plus a constant we can add in one insn,
2807 just put the symbol in the TOC and add the constant. Don't do
2808 this if reload is in progress. */
2809 if (GET_CODE (operands[1]) == CONST
2810 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2811 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2812 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2813 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2814 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2815 && ! side_effects_p (operands[0]))
2816 {
2817 rtx sym =
2818 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2819 rtx other = XEXP (XEXP (operands[1], 0), 1);
2820
2821 sym = force_reg (mode, sym);
2822 if (mode == SImode)
2823 emit_insn (gen_addsi3 (operands[0], sym, other));
2824 else
2825 emit_insn (gen_adddi3 (operands[0], sym, other));
2826 return;
2827 }
2828
2829 operands[1] = force_const_mem (mode, operands[1]);
2830
2831 if (TARGET_TOC
2832 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2833 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2834 get_pool_constant (XEXP (operands[1], 0)),
2835 get_pool_mode (XEXP (operands[1], 0))))
2836 {
2837 operands[1]
2838 = gen_rtx_MEM (mode,
2839 create_TOC_reference (XEXP (operands[1], 0)));
2840 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2841 RTX_UNCHANGING_P (operands[1]) = 1;
2842 }
2843 }
2844 break;
2845
2846 case TImode:
2847 if (GET_CODE (operands[0]) == MEM
2848 && GET_CODE (XEXP (operands[0], 0)) != REG
2849 && ! reload_in_progress)
2850 operands[0]
2851 = replace_equiv_address (operands[0],
2852 copy_addr_to_reg (XEXP (operands[0], 0)));
2853
2854 if (GET_CODE (operands[1]) == MEM
2855 && GET_CODE (XEXP (operands[1], 0)) != REG
2856 && ! reload_in_progress)
2857 operands[1]
2858 = replace_equiv_address (operands[1],
2859 copy_addr_to_reg (XEXP (operands[1], 0)));
2860 break;
2861
2862 default:
2863 abort ();
2864 }
2865
2866 /* Above, we may have called force_const_mem which may have returned
2867 an invalid address. If we can, fix this up; otherwise, reload will
2868 have to deal with it. */
2869 if (GET_CODE (operands[1]) == MEM
2870 && ! memory_address_p (mode, XEXP (operands[1], 0))
2871 && ! reload_in_progress)
2872 operands[1] = adjust_address (operands[1], mode, 0);
2873
2874 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2875 return;
2876 }
2877 \f
2878 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2879 for a call to a function whose data type is FNTYPE.
2880 For a library call, FNTYPE is 0.
2881
2882 For incoming args we set the number of arguments in the prototype large
2883 so we never return a PARALLEL. */
2884
2885 void
2886 init_cumulative_args (cum, fntype, libname, incoming)
2887 CUMULATIVE_ARGS *cum;
2888 tree fntype;
2889 rtx libname ATTRIBUTE_UNUSED;
2890 int incoming;
2891 {
2892 static CUMULATIVE_ARGS zero_cumulative;
2893
2894 *cum = zero_cumulative;
2895 cum->words = 0;
2896 cum->fregno = FP_ARG_MIN_REG;
2897 cum->vregno = ALTIVEC_ARG_MIN_REG;
2898 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2899 cum->call_cookie = CALL_NORMAL;
2900 cum->sysv_gregno = GP_ARG_MIN_REG;
2901
2902 if (incoming)
2903 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2904
2905 else if (cum->prototype)
2906 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2907 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2908 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2909
2910 else
2911 cum->nargs_prototype = 0;
2912
2913 cum->orig_nargs = cum->nargs_prototype;
2914
2915 /* Check for a longcall attribute. */
2916 if (fntype
2917 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
2918 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
2919 cum->call_cookie = CALL_LONG;
2920
2921 if (TARGET_DEBUG_ARG)
2922 {
2923 fprintf (stderr, "\ninit_cumulative_args:");
2924 if (fntype)
2925 {
2926 tree ret_type = TREE_TYPE (fntype);
2927 fprintf (stderr, " ret code = %s,",
2928 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2929 }
2930
2931 if (cum->call_cookie & CALL_LONG)
2932 fprintf (stderr, " longcall,");
2933
2934 fprintf (stderr, " proto = %d, nargs = %d\n",
2935 cum->prototype, cum->nargs_prototype);
2936 }
2937 }
2938 \f
2939 /* If defined, a C expression which determines whether, and in which
2940 direction, to pad out an argument with extra space. The value
2941 should be of type `enum direction': either `upward' to pad above
2942 the argument, `downward' to pad below, or `none' to inhibit
2943 padding.
2944
2945 For the AIX ABI structs are always stored left shifted in their
2946 argument slot. */
2947
2948 enum direction
2949 function_arg_padding (mode, type)
2950 enum machine_mode mode;
2951 tree type;
2952 {
2953 if (type != 0 && AGGREGATE_TYPE_P (type))
2954 return upward;
2955
2956 /* This is the default definition. */
2957 return (! BYTES_BIG_ENDIAN
2958 ? upward
2959 : ((mode == BLKmode
2960 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2961 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2962 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2963 ? downward : upward));
2964 }
2965
2966 /* If defined, a C expression that gives the alignment boundary, in bits,
2967 of an argument with the specified mode and type. If it is not defined,
2968 PARM_BOUNDARY is used for all arguments.
2969
2970 V.4 wants long longs to be double word aligned. */
2971
2972 int
2973 function_arg_boundary (mode, type)
2974 enum machine_mode mode;
2975 tree type ATTRIBUTE_UNUSED;
2976 {
2977 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2978 return 64;
2979 else if (SPE_VECTOR_MODE (mode))
2980 return 64;
2981 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2982 return 128;
2983 else
2984 return PARM_BOUNDARY;
2985 }
2986 \f
2987 /* Update the data in CUM to advance over an argument
2988 of mode MODE and data type TYPE.
2989 (TYPE is null for libcalls where that information may not be available.) */
2990
2991 void
2992 function_arg_advance (cum, mode, type, named)
2993 CUMULATIVE_ARGS *cum;
2994 enum machine_mode mode;
2995 tree type;
2996 int named;
2997 {
2998 cum->nargs_prototype--;
2999
3000 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3001 {
3002 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
3003 cum->vregno++;
3004 else
3005 cum->words += RS6000_ARG_SIZE (mode, type);
3006 }
3007 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
3008 && named && cum->sysv_gregno <= GP_ARG_MAX_REG)
3009 cum->sysv_gregno++;
3010 else if (DEFAULT_ABI == ABI_V4)
3011 {
3012 if (TARGET_HARD_FLOAT && TARGET_FPRS
3013 && (mode == SFmode || mode == DFmode))
3014 {
3015 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3016 cum->fregno++;
3017 else
3018 {
3019 if (mode == DFmode)
3020 cum->words += cum->words & 1;
3021 cum->words += RS6000_ARG_SIZE (mode, type);
3022 }
3023 }
3024 else
3025 {
3026 int n_words;
3027 int gregno = cum->sysv_gregno;
3028
3029 /* Aggregates and IEEE quad get passed by reference. */
3030 if ((type && AGGREGATE_TYPE_P (type))
3031 || mode == TFmode)
3032 n_words = 1;
3033 else
3034 n_words = RS6000_ARG_SIZE (mode, type);
3035
3036 /* Long long and SPE vectors are put in odd registers. */
3037 if (n_words == 2 && (gregno & 1) == 0)
3038 gregno += 1;
3039
3040 /* Long long and SPE vectors are not split between registers
3041 and stack. */
3042 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
3043 {
3044 /* Long long is aligned on the stack. */
3045 if (n_words == 2)
3046 cum->words += cum->words & 1;
3047 cum->words += n_words;
3048 }
3049
3050 /* Note: continuing to accumulate gregno past when we've started
3051 spilling to the stack indicates the fact that we've started
3052 spilling to the stack to expand_builtin_saveregs. */
3053 cum->sysv_gregno = gregno + n_words;
3054 }
3055
3056 if (TARGET_DEBUG_ARG)
3057 {
3058 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3059 cum->words, cum->fregno);
3060 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3061 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3062 fprintf (stderr, "mode = %4s, named = %d\n",
3063 GET_MODE_NAME (mode), named);
3064 }
3065 }
3066 else
3067 {
3068 int align = (TARGET_32BIT && (cum->words & 1) != 0
3069 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3070
3071 cum->words += align + RS6000_ARG_SIZE (mode, type);
3072
3073 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3074 && TARGET_HARD_FLOAT && TARGET_FPRS)
3075 cum->fregno += (mode == TFmode ? 2 : 1);
3076
3077 if (TARGET_DEBUG_ARG)
3078 {
3079 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3080 cum->words, cum->fregno);
3081 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3082 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3083 fprintf (stderr, "named = %d, align = %d\n", named, align);
3084 }
3085 }
3086 }
3087 \f
3088 /* Determine where to put an argument to a function.
3089 Value is zero to push the argument on the stack,
3090 or a hard register in which to store the argument.
3091
3092 MODE is the argument's machine mode.
3093 TYPE is the data type of the argument (as a tree).
3094 This is null for libcalls where that information may
3095 not be available.
3096 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3097 the preceding args and about the function being called.
3098 NAMED is nonzero if this argument is a named parameter
3099 (otherwise it is an extra parameter matching an ellipsis).
3100
3101 On RS/6000 the first eight words of non-FP are normally in registers
3102 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3103 Under V.4, the first 8 FP args are in registers.
3104
3105 If this is floating-point and no prototype is specified, we use
3106 both an FP and integer register (or possibly FP reg and stack). Library
3107 functions (when TYPE is zero) always have the proper types for args,
3108 so we can pass the FP value just in one register. emit_library_function
3109 doesn't support PARALLEL anyway. */
3110
3111 struct rtx_def *
3112 function_arg (cum, mode, type, named)
3113 CUMULATIVE_ARGS *cum;
3114 enum machine_mode mode;
3115 tree type;
3116 int named;
3117 {
3118 enum rs6000_abi abi = DEFAULT_ABI;
3119
3120 /* Return a marker to indicate whether CR1 needs to set or clear the
3121 bit that V.4 uses to say fp args were passed in registers.
3122 Assume that we don't need the marker for software floating point,
3123 or compiler generated library calls. */
3124 if (mode == VOIDmode)
3125 {
3126 if (abi == ABI_V4
3127 && cum->nargs_prototype < 0
3128 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
3129 {
3130 /* For the SPE, we need to crxor CR6 always. */
3131 if (TARGET_SPE_ABI)
3132 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
3133 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
3134 return GEN_INT (cum->call_cookie
3135 | ((cum->fregno == FP_ARG_MIN_REG)
3136 ? CALL_V4_SET_FP_ARGS
3137 : CALL_V4_CLEAR_FP_ARGS));
3138 }
3139
3140 return GEN_INT (cum->call_cookie);
3141 }
3142
3143 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3144 {
3145 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
3146 return gen_rtx_REG (mode, cum->vregno);
3147 else
3148 return NULL;
3149 }
3150 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode) && named)
3151 {
3152 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3153 return gen_rtx_REG (mode, cum->sysv_gregno);
3154 else
3155 return NULL;
3156 }
3157 else if (abi == ABI_V4)
3158 {
3159 if (TARGET_HARD_FLOAT && TARGET_FPRS
3160 && (mode == SFmode || mode == DFmode))
3161 {
3162 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3163 return gen_rtx_REG (mode, cum->fregno);
3164 else
3165 return NULL;
3166 }
3167 else
3168 {
3169 int n_words;
3170 int gregno = cum->sysv_gregno;
3171
3172 /* Aggregates and IEEE quad get passed by reference. */
3173 if ((type && AGGREGATE_TYPE_P (type))
3174 || mode == TFmode)
3175 n_words = 1;
3176 else
3177 n_words = RS6000_ARG_SIZE (mode, type);
3178
3179 /* Long long and SPE vectors are put in odd registers. */
3180 if (n_words == 2 && (gregno & 1) == 0)
3181 gregno += 1;
3182
3183 /* Long long and SPE vectors are not split between registers
3184 and stack. */
3185 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3186 {
3187 /* SPE vectors in ... get split into 2 registers. */
3188 if (TARGET_SPE && TARGET_SPE_ABI
3189 && SPE_VECTOR_MODE (mode) && !named)
3190 {
3191 rtx r1, r2;
3192 enum machine_mode m = SImode;
3193
3194 r1 = gen_rtx_REG (m, gregno);
3195 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3196 r2 = gen_rtx_REG (m, gregno + 1);
3197 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3198 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3199 }
3200 return gen_rtx_REG (mode, gregno);
3201 }
3202 else
3203 return NULL;
3204 }
3205 }
3206 else
3207 {
3208 int align = (TARGET_32BIT && (cum->words & 1) != 0
3209 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3210 int align_words = cum->words + align;
3211
3212 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3213 return NULL_RTX;
3214
3215 if (USE_FP_FOR_ARG_P (*cum, mode, type))
3216 {
3217 if (! type
3218 || ((cum->nargs_prototype > 0)
3219 /* IBM AIX extended its linkage convention definition always
3220 to require FP args after register save area hole on the
3221 stack. */
3222 && (DEFAULT_ABI != ABI_AIX
3223 || ! TARGET_XL_CALL
3224 || (align_words < GP_ARG_NUM_REG))))
3225 return gen_rtx_REG (mode, cum->fregno);
3226
3227 return gen_rtx_PARALLEL (mode,
3228 gen_rtvec (2,
3229 gen_rtx_EXPR_LIST (VOIDmode,
3230 ((align_words >= GP_ARG_NUM_REG)
3231 ? NULL_RTX
3232 : (align_words
3233 + RS6000_ARG_SIZE (mode, type)
3234 > GP_ARG_NUM_REG
3235 /* If this is partially on the stack, then
3236 we only include the portion actually
3237 in registers here. */
3238 ? gen_rtx_REG (SImode,
3239 GP_ARG_MIN_REG + align_words)
3240 : gen_rtx_REG (mode,
3241 GP_ARG_MIN_REG + align_words))),
3242 const0_rtx),
3243 gen_rtx_EXPR_LIST (VOIDmode,
3244 gen_rtx_REG (mode, cum->fregno),
3245 const0_rtx)));
3246 }
3247 else if (align_words < GP_ARG_NUM_REG)
3248 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
3249 else
3250 return NULL_RTX;
3251 }
3252 }
3253 \f
3254 /* For an arg passed partly in registers and partly in memory,
3255 this is the number of registers used.
3256 For args passed entirely in registers or entirely in memory, zero. */
3257
3258 int
3259 function_arg_partial_nregs (cum, mode, type, named)
3260 CUMULATIVE_ARGS *cum;
3261 enum machine_mode mode;
3262 tree type;
3263 int named ATTRIBUTE_UNUSED;
3264 {
3265 if (DEFAULT_ABI == ABI_V4)
3266 return 0;
3267
3268 if (USE_FP_FOR_ARG_P (*cum, mode, type)
3269 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
3270 {
3271 if (cum->nargs_prototype >= 0)
3272 return 0;
3273 }
3274
3275 if (cum->words < GP_ARG_NUM_REG
3276 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
3277 {
3278 int ret = GP_ARG_NUM_REG - cum->words;
3279 if (ret && TARGET_DEBUG_ARG)
3280 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
3281
3282 return ret;
3283 }
3284
3285 return 0;
3286 }
3287 \f
3288 /* A C expression that indicates when an argument must be passed by
3289 reference. If nonzero for an argument, a copy of that argument is
3290 made in memory and a pointer to the argument is passed instead of
3291 the argument itself. The pointer is passed in whatever way is
3292 appropriate for passing a pointer to that type.
3293
3294 Under V.4, structures and unions are passed by reference. */
3295
3296 int
3297 function_arg_pass_by_reference (cum, mode, type, named)
3298 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
3299 enum machine_mode mode ATTRIBUTE_UNUSED;
3300 tree type;
3301 int named ATTRIBUTE_UNUSED;
3302 {
3303 if (DEFAULT_ABI == ABI_V4
3304 && ((type && AGGREGATE_TYPE_P (type))
3305 || mode == TFmode))
3306 {
3307 if (TARGET_DEBUG_ARG)
3308 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
3309
3310 return 1;
3311 }
3312
3313 return 0;
3314 }
3315 \f
3316 /* Perform any needed actions needed for a function that is receiving a
3317 variable number of arguments.
3318
3319 CUM is as above.
3320
3321 MODE and TYPE are the mode and type of the current parameter.
3322
3323 PRETEND_SIZE is a variable that should be set to the amount of stack
3324 that must be pushed by the prolog to pretend that our caller pushed
3325 it.
3326
3327 Normally, this macro will push all remaining incoming registers on the
3328 stack and set PRETEND_SIZE to the length of the registers pushed. */
3329
3330 void
3331 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
3332 CUMULATIVE_ARGS *cum;
3333 enum machine_mode mode;
3334 tree type;
3335 int *pretend_size ATTRIBUTE_UNUSED;
3336 int no_rtl;
3337
3338 {
3339 CUMULATIVE_ARGS next_cum;
3340 int reg_size = TARGET_32BIT ? 4 : 8;
3341 rtx save_area = NULL_RTX, mem;
3342 int first_reg_offset, set;
3343 tree fntype;
3344 int stdarg_p;
3345
3346 fntype = TREE_TYPE (current_function_decl);
3347 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
3348 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3349 != void_type_node));
3350
3351 /* For varargs, we do not want to skip the dummy va_dcl argument.
3352 For stdargs, we do want to skip the last named argument. */
3353 next_cum = *cum;
3354 if (stdarg_p)
3355 function_arg_advance (&next_cum, mode, type, 1);
3356
3357 if (DEFAULT_ABI == ABI_V4)
3358 {
3359 /* Indicate to allocate space on the stack for varargs save area. */
3360 cfun->machine->sysv_varargs_p = 1;
3361 if (! no_rtl)
3362 save_area = plus_constant (virtual_stack_vars_rtx,
3363 - RS6000_VARARGS_SIZE);
3364
3365 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
3366 }
3367 else
3368 {
3369 first_reg_offset = next_cum.words;
3370 save_area = virtual_incoming_args_rtx;
3371 cfun->machine->sysv_varargs_p = 0;
3372
3373 if (MUST_PASS_IN_STACK (mode, type))
3374 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
3375 }
3376
3377 set = get_varargs_alias_set ();
3378 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
3379 {
3380 mem = gen_rtx_MEM (BLKmode,
3381 plus_constant (save_area,
3382 first_reg_offset * reg_size)),
3383 set_mem_alias_set (mem, set);
3384 set_mem_align (mem, BITS_PER_WORD);
3385
3386 move_block_from_reg
3387 (GP_ARG_MIN_REG + first_reg_offset, mem,
3388 GP_ARG_NUM_REG - first_reg_offset,
3389 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
3390 }
3391
3392 /* Save FP registers if needed. */
3393 if (DEFAULT_ABI == ABI_V4
3394 && TARGET_HARD_FLOAT && TARGET_FPRS
3395 && ! no_rtl
3396 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
3397 {
3398 int fregno = next_cum.fregno;
3399 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
3400 rtx lab = gen_label_rtx ();
3401 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
3402
3403 emit_jump_insn (gen_rtx_SET (VOIDmode,
3404 pc_rtx,
3405 gen_rtx_IF_THEN_ELSE (VOIDmode,
3406 gen_rtx_NE (VOIDmode, cr1,
3407 const0_rtx),
3408 gen_rtx_LABEL_REF (VOIDmode, lab),
3409 pc_rtx)));
3410
3411 while (fregno <= FP_ARG_V4_MAX_REG)
3412 {
3413 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
3414 set_mem_alias_set (mem, set);
3415 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
3416 fregno++;
3417 off += 8;
3418 }
3419
3420 emit_label (lab);
3421 }
3422 }
3423
3424 /* Create the va_list data type. */
3425
3426 tree
3427 rs6000_build_va_list ()
3428 {
3429 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
3430
3431 /* For AIX, prefer 'char *' because that's what the system
3432 header files like. */
3433 if (DEFAULT_ABI != ABI_V4)
3434 return build_pointer_type (char_type_node);
3435
3436 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3437 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3438
3439 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3440 unsigned_char_type_node);
3441 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3442 unsigned_char_type_node);
3443 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3444 ptr_type_node);
3445 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3446 ptr_type_node);
3447
3448 DECL_FIELD_CONTEXT (f_gpr) = record;
3449 DECL_FIELD_CONTEXT (f_fpr) = record;
3450 DECL_FIELD_CONTEXT (f_ovf) = record;
3451 DECL_FIELD_CONTEXT (f_sav) = record;
3452
3453 TREE_CHAIN (record) = type_decl;
3454 TYPE_NAME (record) = type_decl;
3455 TYPE_FIELDS (record) = f_gpr;
3456 TREE_CHAIN (f_gpr) = f_fpr;
3457 TREE_CHAIN (f_fpr) = f_ovf;
3458 TREE_CHAIN (f_ovf) = f_sav;
3459
3460 layout_type (record);
3461
3462 /* The correct type is an array type of one element. */
3463 return build_array_type (record, build_index_type (size_zero_node));
3464 }
3465
3466 /* Implement va_start. */
3467
3468 void
3469 rs6000_va_start (valist, nextarg)
3470 tree valist;
3471 rtx nextarg;
3472 {
3473 HOST_WIDE_INT words, n_gpr, n_fpr;
3474 tree f_gpr, f_fpr, f_ovf, f_sav;
3475 tree gpr, fpr, ovf, sav, t;
3476
3477 /* Only SVR4 needs something special. */
3478 if (DEFAULT_ABI != ABI_V4)
3479 {
3480 std_expand_builtin_va_start (valist, nextarg);
3481 return;
3482 }
3483
3484 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3485 f_fpr = TREE_CHAIN (f_gpr);
3486 f_ovf = TREE_CHAIN (f_fpr);
3487 f_sav = TREE_CHAIN (f_ovf);
3488
3489 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3490 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3491 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3492 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3493 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3494
3495 /* Count number of gp and fp argument registers used. */
3496 words = current_function_args_info.words;
3497 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3498 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3499
3500 if (TARGET_DEBUG_ARG)
3501 {
3502 fputs ("va_start: words = ", stderr);
3503 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3504 fputs (", n_gpr = ", stderr);
3505 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3506 fputs (", n_fpr = ", stderr);
3507 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3508 putc ('\n', stderr);
3509 }
3510
3511 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3512 TREE_SIDE_EFFECTS (t) = 1;
3513 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3514
3515 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3516 TREE_SIDE_EFFECTS (t) = 1;
3517 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3518
3519 /* Find the overflow area. */
3520 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3521 if (words != 0)
3522 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3523 build_int_2 (words * UNITS_PER_WORD, 0));
3524 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3525 TREE_SIDE_EFFECTS (t) = 1;
3526 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3527
3528 /* Find the register save area. */
3529 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3530 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3531 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3532 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3533 TREE_SIDE_EFFECTS (t) = 1;
3534 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3535 }
3536
3537 /* Implement va_arg. */
3538
3539 rtx
3540 rs6000_va_arg (valist, type)
3541 tree valist, type;
3542 {
3543 tree f_gpr, f_fpr, f_ovf, f_sav;
3544 tree gpr, fpr, ovf, sav, reg, t, u;
3545 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3546 rtx lab_false, lab_over, addr_rtx, r;
3547
3548 if (DEFAULT_ABI != ABI_V4)
3549 return std_expand_builtin_va_arg (valist, type);
3550
3551 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3552 f_fpr = TREE_CHAIN (f_gpr);
3553 f_ovf = TREE_CHAIN (f_fpr);
3554 f_sav = TREE_CHAIN (f_ovf);
3555
3556 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3557 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3558 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3559 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3560 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3561
3562 size = int_size_in_bytes (type);
3563 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3564
3565 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3566 {
3567 /* Aggregates and long doubles are passed by reference. */
3568 indirect_p = 1;
3569 reg = gpr;
3570 n_reg = 1;
3571 sav_ofs = 0;
3572 sav_scale = 4;
3573 size = UNITS_PER_WORD;
3574 rsize = 1;
3575 }
3576 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
3577 {
3578 /* FP args go in FP registers, if present. */
3579 indirect_p = 0;
3580 reg = fpr;
3581 n_reg = 1;
3582 sav_ofs = 8*4;
3583 sav_scale = 8;
3584 }
3585 else
3586 {
3587 /* Otherwise into GP registers. */
3588 indirect_p = 0;
3589 reg = gpr;
3590 n_reg = rsize;
3591 sav_ofs = 0;
3592 sav_scale = 4;
3593 }
3594
3595 /* Pull the value out of the saved registers ... */
3596
3597 lab_false = gen_label_rtx ();
3598 lab_over = gen_label_rtx ();
3599 addr_rtx = gen_reg_rtx (Pmode);
3600
3601 /* AltiVec vectors never go in registers. */
3602 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
3603 {
3604 TREE_THIS_VOLATILE (reg) = 1;
3605 emit_cmp_and_jump_insns
3606 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3607 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3608 lab_false);
3609
3610 /* Long long is aligned in the registers. */
3611 if (n_reg > 1)
3612 {
3613 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3614 build_int_2 (n_reg - 1, 0));
3615 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3616 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3617 TREE_SIDE_EFFECTS (u) = 1;
3618 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3619 }
3620
3621 if (sav_ofs)
3622 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3623 else
3624 t = sav;
3625
3626 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3627 build_int_2 (n_reg, 0));
3628 TREE_SIDE_EFFECTS (u) = 1;
3629
3630 u = build1 (CONVERT_EXPR, integer_type_node, u);
3631 TREE_SIDE_EFFECTS (u) = 1;
3632
3633 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3634 TREE_SIDE_EFFECTS (u) = 1;
3635
3636 t = build (PLUS_EXPR, ptr_type_node, t, u);
3637 TREE_SIDE_EFFECTS (t) = 1;
3638
3639 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3640 if (r != addr_rtx)
3641 emit_move_insn (addr_rtx, r);
3642
3643 emit_jump_insn (gen_jump (lab_over));
3644 emit_barrier ();
3645 }
3646
3647 emit_label (lab_false);
3648
3649 /* ... otherwise out of the overflow area. */
3650
3651 /* Make sure we don't find reg 7 for the next int arg.
3652
3653 All AltiVec vectors go in the overflow area. So in the AltiVec
3654 case we need to get the vectors from the overflow area, but
3655 remember where the GPRs and FPRs are. */
3656 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
3657 || !TARGET_ALTIVEC))
3658 {
3659 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3660 TREE_SIDE_EFFECTS (t) = 1;
3661 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3662 }
3663
3664 /* Care for on-stack alignment if needed. */
3665 if (rsize <= 1)
3666 t = ovf;
3667 else
3668 {
3669 int align;
3670
3671 /* AltiVec vectors are 16 byte aligned. */
3672 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
3673 align = 15;
3674 else
3675 align = 7;
3676
3677 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3678 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3679 }
3680 t = save_expr (t);
3681
3682 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3683 if (r != addr_rtx)
3684 emit_move_insn (addr_rtx, r);
3685
3686 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3687 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3688 TREE_SIDE_EFFECTS (t) = 1;
3689 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3690
3691 emit_label (lab_over);
3692
3693 if (indirect_p)
3694 {
3695 r = gen_rtx_MEM (Pmode, addr_rtx);
3696 set_mem_alias_set (r, get_varargs_alias_set ());
3697 emit_move_insn (addr_rtx, r);
3698 }
3699
3700 return addr_rtx;
3701 }
3702
3703 /* Builtins. */
3704
3705 #define def_builtin(MASK, NAME, TYPE, CODE) \
3706 do { \
3707 if ((MASK) & target_flags) \
3708 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3709 NULL, NULL_TREE); \
3710 } while (0)
3711
3712 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3713
3714 static const struct builtin_description bdesc_3arg[] =
3715 {
3716 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3717 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3718 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3719 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3720 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3721 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3722 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3723 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3724 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3725 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3726 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3727 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3728 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3729 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3730 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3731 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3732 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3733 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3734 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3735 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3736 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3737 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3738 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3739 };
3740
3741 /* DST operations: void foo (void *, const int, const char). */
3742
3743 static const struct builtin_description bdesc_dst[] =
3744 {
3745 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3746 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3747 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3748 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3749 };
3750
3751 /* Simple binary operations: VECc = foo (VECa, VECb). */
3752
3753 static struct builtin_description bdesc_2arg[] =
3754 {
3755 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3756 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3757 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3758 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3759 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3760 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3761 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3762 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3763 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3764 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3765 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3766 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3767 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3768 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3769 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3770 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3771 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3772 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3773 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3774 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3775 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3776 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3777 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3778 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3779 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3780 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3781 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3782 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3783 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3784 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3785 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3786 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3787 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3788 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3789 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3790 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3791 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3792 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3793 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3794 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3795 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3796 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3797 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3798 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3799 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3800 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3801 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3802 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3803 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3804 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3805 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3806 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3807 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3808 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3809 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3810 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3811 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3812 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3813 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3814 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3815 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3816 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3817 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3818 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3819 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3820 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3821 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3822 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3823 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3824 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3825 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3826 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3827 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3828 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3829 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3830 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3831 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3832 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3833 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3834 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3835 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3836 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3837 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3838 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3839 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3840 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3841 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3842 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3843 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3844 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3845 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3846 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3847 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3848 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3849 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3850 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3851 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3852 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3853 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3854 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3855 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3856 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3857 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3858 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3859 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3860 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3861 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3862 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3863 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3864 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3865 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3866 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3867 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3868
3869 /* Place holder, leave as first spe builtin. */
3870 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
3871 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
3872 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
3873 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
3874 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
3875 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
3876 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
3877 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
3878 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
3879 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
3880 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
3881 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
3882 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
3883 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
3884 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
3885 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
3886 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
3887 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
3888 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
3889 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
3890 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
3891 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
3892 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
3893 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
3894 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
3895 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
3896 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
3897 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
3898 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
3899 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
3900 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
3901 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
3902 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
3903 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
3904 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
3905 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
3906 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
3907 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
3908 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
3909 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
3910 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
3911 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
3912 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
3913 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
3914 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
3915 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
3916 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
3917 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
3918 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
3919 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
3920 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
3921 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
3922 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
3923 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
3924 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
3925 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
3926 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
3927 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
3928 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
3929 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
3930 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
3931 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
3932 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
3933 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
3934 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
3935 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
3936 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
3937 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
3938 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
3939 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
3940 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
3941 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
3942 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
3943 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
3944 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
3945 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
3946 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
3947 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
3948 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
3949 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
3950 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
3951 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
3952 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
3953 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
3954 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
3955 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
3956 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
3957 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
3958 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
3959 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
3960 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
3961 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
3962 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
3963 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
3964 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
3965 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
3966 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
3967 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
3968 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
3969 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
3970 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
3971 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
3972 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
3973 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
3974 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
3975 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
3976 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
3977 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
3978 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
3979
3980 /* SPE binary operations expecting a 5-bit unsigned literal. */
3981 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
3982
3983 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
3984 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
3985 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
3986 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
3987 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
3988 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
3989 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
3990 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
3991 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
3992 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
3993 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
3994 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
3995 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
3996 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
3997 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
3998 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
3999 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
4000 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
4001 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
4002 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
4003 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
4004 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
4005 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
4006 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
4007 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
4008 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
4009
4010 /* Place-holder. Leave as last binary SPE builtin. */
4011 { 0, CODE_FOR_spe_evxor, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
4012 };
4013
4014 /* AltiVec predicates. */
4015
4016 struct builtin_description_predicates
4017 {
4018 const unsigned int mask;
4019 const enum insn_code icode;
4020 const char *opcode;
4021 const char *const name;
4022 const enum rs6000_builtins code;
4023 };
4024
4025 static const struct builtin_description_predicates bdesc_altivec_preds[] =
4026 {
4027 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
4028 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
4029 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
4030 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
4031 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
4032 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
4033 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
4034 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
4035 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
4036 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
4037 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
4038 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
4039 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
4040 };
4041
4042 /* SPE predicates. */
4043 static struct builtin_description bdesc_spe_predicates[] =
4044 {
4045 /* Place-holder. Leave as first. */
4046 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
4047 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
4048 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
4049 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
4050 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
4051 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
4052 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
4053 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
4054 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
4055 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
4056 /* Place-holder. Leave as last. */
4057 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
4058 };
4059
4060 /* SPE evsel predicates. */
4061 static struct builtin_description bdesc_spe_evsel[] =
4062 {
4063 /* Place-holder. Leave as first. */
4064 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
4065 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
4066 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
4067 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
4068 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
4069 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
4070 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
4071 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
4072 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
4073 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
4074 /* Place-holder. Leave as last. */
4075 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4076 };
4077
4078 /* ABS* opreations. */
4079
4080 static const struct builtin_description bdesc_abs[] =
4081 {
4082 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4083 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4084 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4085 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4086 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4087 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4088 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4089 };
4090
4091 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4092 foo (VECa). */
4093
4094 static struct builtin_description bdesc_1arg[] =
4095 {
4096 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4097 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4098 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4099 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4100 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4101 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4102 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4103 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4104 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4105 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4106 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4107 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4108 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4109 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4110 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4111 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4112 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4113
4114 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4115 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4116 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4117 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4118 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4119 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4120 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4121 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4122 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4123 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4124 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4125 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4126 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4127 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4128 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4129 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4130 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
4131 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
4132 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
4133 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
4134 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
4135 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
4136 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
4137 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
4138 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
4139 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
4140 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
4141 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
4142 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
4143 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
4144 { 0, CODE_FOR_spe_evsplatfi, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI },
4145 { 0, CODE_FOR_spe_evsplati, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI },
4146
4147 /* Place-holder. Leave as last unary SPE builtin. */
4148 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
4149 };
4150
4151 static rtx
4152 rs6000_expand_unop_builtin (icode, arglist, target)
4153 enum insn_code icode;
4154 tree arglist;
4155 rtx target;
4156 {
4157 rtx pat;
4158 tree arg0 = TREE_VALUE (arglist);
4159 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4160 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4161 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4162
4163 if (icode == CODE_FOR_nothing)
4164 /* Builtin not supported on this processor. */
4165 return 0;
4166
4167 /* If we got invalid arguments bail out before generating bad rtl. */
4168 if (arg0 == error_mark_node)
4169 return const0_rtx;
4170
4171 if (icode == CODE_FOR_altivec_vspltisb
4172 || icode == CODE_FOR_altivec_vspltish
4173 || icode == CODE_FOR_altivec_vspltisw
4174 || icode == CODE_FOR_spe_evsplatfi
4175 || icode == CODE_FOR_spe_evsplati)
4176 {
4177 /* Only allow 5-bit *signed* literals. */
4178 if (GET_CODE (op0) != CONST_INT
4179 || INTVAL (op0) > 0x1f
4180 || INTVAL (op0) < -0x1f)
4181 {
4182 error ("argument 1 must be a 5-bit signed literal");
4183 return const0_rtx;
4184 }
4185 }
4186
4187 if (target == 0
4188 || GET_MODE (target) != tmode
4189 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4190 target = gen_reg_rtx (tmode);
4191
4192 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4193 op0 = copy_to_mode_reg (mode0, op0);
4194
4195 pat = GEN_FCN (icode) (target, op0);
4196 if (! pat)
4197 return 0;
4198 emit_insn (pat);
4199
4200 return target;
4201 }
4202
4203 static rtx
4204 altivec_expand_abs_builtin (icode, arglist, target)
4205 enum insn_code icode;
4206 tree arglist;
4207 rtx target;
4208 {
4209 rtx pat, scratch1, scratch2;
4210 tree arg0 = TREE_VALUE (arglist);
4211 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4212 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4213 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4214
4215 /* If we have invalid arguments, bail out before generating bad rtl. */
4216 if (arg0 == error_mark_node)
4217 return const0_rtx;
4218
4219 if (target == 0
4220 || GET_MODE (target) != tmode
4221 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4222 target = gen_reg_rtx (tmode);
4223
4224 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4225 op0 = copy_to_mode_reg (mode0, op0);
4226
4227 scratch1 = gen_reg_rtx (mode0);
4228 scratch2 = gen_reg_rtx (mode0);
4229
4230 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
4231 if (! pat)
4232 return 0;
4233 emit_insn (pat);
4234
4235 return target;
4236 }
4237
4238 static rtx
4239 rs6000_expand_binop_builtin (icode, arglist, target)
4240 enum insn_code icode;
4241 tree arglist;
4242 rtx target;
4243 {
4244 rtx pat;
4245 tree arg0 = TREE_VALUE (arglist);
4246 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4247 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4248 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4249 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4250 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4251 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4252
4253 if (icode == CODE_FOR_nothing)
4254 /* Builtin not supported on this processor. */
4255 return 0;
4256
4257 /* If we got invalid arguments bail out before generating bad rtl. */
4258 if (arg0 == error_mark_node || arg1 == error_mark_node)
4259 return const0_rtx;
4260
4261 if (icode == CODE_FOR_altivec_vcfux
4262 || icode == CODE_FOR_altivec_vcfsx
4263 || icode == CODE_FOR_altivec_vctsxs
4264 || icode == CODE_FOR_altivec_vctuxs
4265 || icode == CODE_FOR_altivec_vspltb
4266 || icode == CODE_FOR_altivec_vsplth
4267 || icode == CODE_FOR_altivec_vspltw
4268 || icode == CODE_FOR_spe_evaddiw
4269 || icode == CODE_FOR_spe_evldd
4270 || icode == CODE_FOR_spe_evldh
4271 || icode == CODE_FOR_spe_evldw
4272 || icode == CODE_FOR_spe_evlhhesplat
4273 || icode == CODE_FOR_spe_evlhhossplat
4274 || icode == CODE_FOR_spe_evlhhousplat
4275 || icode == CODE_FOR_spe_evlwhe
4276 || icode == CODE_FOR_spe_evlwhos
4277 || icode == CODE_FOR_spe_evlwhou
4278 || icode == CODE_FOR_spe_evlwhsplat
4279 || icode == CODE_FOR_spe_evlwwsplat
4280 || icode == CODE_FOR_spe_evrlwi
4281 || icode == CODE_FOR_spe_evslwi
4282 || icode == CODE_FOR_spe_evsrwis
4283 || icode == CODE_FOR_spe_evsrwiu)
4284 {
4285 /* Only allow 5-bit unsigned literals. */
4286 if (TREE_CODE (arg1) != INTEGER_CST
4287 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4288 {
4289 error ("argument 2 must be a 5-bit unsigned literal");
4290 return const0_rtx;
4291 }
4292 }
4293
4294 if (target == 0
4295 || GET_MODE (target) != tmode
4296 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4297 target = gen_reg_rtx (tmode);
4298
4299 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4300 op0 = copy_to_mode_reg (mode0, op0);
4301 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4302 op1 = copy_to_mode_reg (mode1, op1);
4303
4304 pat = GEN_FCN (icode) (target, op0, op1);
4305 if (! pat)
4306 return 0;
4307 emit_insn (pat);
4308
4309 return target;
4310 }
4311
4312 static rtx
4313 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
4314 enum insn_code icode;
4315 const char *opcode;
4316 tree arglist;
4317 rtx target;
4318 {
4319 rtx pat, scratch;
4320 tree cr6_form = TREE_VALUE (arglist);
4321 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
4322 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4323 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4324 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4325 enum machine_mode tmode = SImode;
4326 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4327 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4328 int cr6_form_int;
4329
4330 if (TREE_CODE (cr6_form) != INTEGER_CST)
4331 {
4332 error ("argument 1 of __builtin_altivec_predicate must be a constant");
4333 return const0_rtx;
4334 }
4335 else
4336 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
4337
4338 if (mode0 != mode1)
4339 abort ();
4340
4341 /* If we have invalid arguments, bail out before generating bad rtl. */
4342 if (arg0 == error_mark_node || arg1 == error_mark_node)
4343 return const0_rtx;
4344
4345 if (target == 0
4346 || GET_MODE (target) != tmode
4347 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4348 target = gen_reg_rtx (tmode);
4349
4350 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4351 op0 = copy_to_mode_reg (mode0, op0);
4352 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4353 op1 = copy_to_mode_reg (mode1, op1);
4354
4355 scratch = gen_reg_rtx (mode0);
4356
4357 pat = GEN_FCN (icode) (scratch, op0, op1,
4358 gen_rtx (SYMBOL_REF, Pmode, opcode));
4359 if (! pat)
4360 return 0;
4361 emit_insn (pat);
4362
4363 /* The vec_any* and vec_all* predicates use the same opcodes for two
4364 different operations, but the bits in CR6 will be different
4365 depending on what information we want. So we have to play tricks
4366 with CR6 to get the right bits out.
4367
4368 If you think this is disgusting, look at the specs for the
4369 AltiVec predicates. */
4370
4371 switch (cr6_form_int)
4372 {
4373 case 0:
4374 emit_insn (gen_cr6_test_for_zero (target));
4375 break;
4376 case 1:
4377 emit_insn (gen_cr6_test_for_zero_reverse (target));
4378 break;
4379 case 2:
4380 emit_insn (gen_cr6_test_for_lt (target));
4381 break;
4382 case 3:
4383 emit_insn (gen_cr6_test_for_lt_reverse (target));
4384 break;
4385 default:
4386 error ("argument 1 of __builtin_altivec_predicate is out of range");
4387 break;
4388 }
4389
4390 return target;
4391 }
4392
4393 static rtx
4394 altivec_expand_stv_builtin (icode, arglist)
4395 enum insn_code icode;
4396 tree arglist;
4397 {
4398 tree arg0 = TREE_VALUE (arglist);
4399 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4400 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4401 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4402 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4403 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4404 rtx pat;
4405 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
4406 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
4407 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
4408
4409 /* Invalid arguments. Bail before doing anything stoopid! */
4410 if (arg0 == error_mark_node
4411 || arg1 == error_mark_node
4412 || arg2 == error_mark_node)
4413 return const0_rtx;
4414
4415 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
4416 op0 = copy_to_mode_reg (mode2, op0);
4417 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
4418 op1 = copy_to_mode_reg (mode0, op1);
4419 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
4420 op2 = copy_to_mode_reg (mode1, op2);
4421
4422 pat = GEN_FCN (icode) (op1, op2, op0);
4423 if (pat)
4424 emit_insn (pat);
4425 return NULL_RTX;
4426 }
4427
4428 static rtx
4429 rs6000_expand_ternop_builtin (icode, arglist, target)
4430 enum insn_code icode;
4431 tree arglist;
4432 rtx target;
4433 {
4434 rtx pat;
4435 tree arg0 = TREE_VALUE (arglist);
4436 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4437 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4438 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4439 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4440 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4441 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4442 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4443 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4444 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
4445
4446 if (icode == CODE_FOR_nothing)
4447 /* Builtin not supported on this processor. */
4448 return 0;
4449
4450 /* If we got invalid arguments bail out before generating bad rtl. */
4451 if (arg0 == error_mark_node
4452 || arg1 == error_mark_node
4453 || arg2 == error_mark_node)
4454 return const0_rtx;
4455
4456 if (icode == CODE_FOR_altivec_vsldoi_4sf
4457 || icode == CODE_FOR_altivec_vsldoi_4si
4458 || icode == CODE_FOR_altivec_vsldoi_8hi
4459 || icode == CODE_FOR_altivec_vsldoi_16qi)
4460 {
4461 /* Only allow 4-bit unsigned literals. */
4462 if (TREE_CODE (arg2) != INTEGER_CST
4463 || TREE_INT_CST_LOW (arg2) & ~0xf)
4464 {
4465 error ("argument 3 must be a 4-bit unsigned literal");
4466 return const0_rtx;
4467 }
4468 }
4469
4470 if (target == 0
4471 || GET_MODE (target) != tmode
4472 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4473 target = gen_reg_rtx (tmode);
4474
4475 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4476 op0 = copy_to_mode_reg (mode0, op0);
4477 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4478 op1 = copy_to_mode_reg (mode1, op1);
4479 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
4480 op2 = copy_to_mode_reg (mode2, op2);
4481
4482 pat = GEN_FCN (icode) (target, op0, op1, op2);
4483 if (! pat)
4484 return 0;
4485 emit_insn (pat);
4486
4487 return target;
4488 }
4489
4490 /* Expand the lvx builtins. */
4491 static rtx
4492 altivec_expand_ld_builtin (exp, target, expandedp)
4493 tree exp;
4494 rtx target;
4495 bool *expandedp;
4496 {
4497 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4498 tree arglist = TREE_OPERAND (exp, 1);
4499 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4500 tree arg0;
4501 enum machine_mode tmode, mode0;
4502 rtx pat, op0;
4503 enum insn_code icode;
4504
4505 switch (fcode)
4506 {
4507 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
4508 icode = CODE_FOR_altivec_lvx_16qi;
4509 break;
4510 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
4511 icode = CODE_FOR_altivec_lvx_8hi;
4512 break;
4513 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
4514 icode = CODE_FOR_altivec_lvx_4si;
4515 break;
4516 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
4517 icode = CODE_FOR_altivec_lvx_4sf;
4518 break;
4519 default:
4520 *expandedp = false;
4521 return NULL_RTX;
4522 }
4523
4524 *expandedp = true;
4525
4526 arg0 = TREE_VALUE (arglist);
4527 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4528 tmode = insn_data[icode].operand[0].mode;
4529 mode0 = insn_data[icode].operand[1].mode;
4530
4531 if (target == 0
4532 || GET_MODE (target) != tmode
4533 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4534 target = gen_reg_rtx (tmode);
4535
4536 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4537 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4538
4539 pat = GEN_FCN (icode) (target, op0);
4540 if (! pat)
4541 return 0;
4542 emit_insn (pat);
4543 return target;
4544 }
4545
4546 /* Expand the stvx builtins. */
4547 static rtx
4548 altivec_expand_st_builtin (exp, target, expandedp)
4549 tree exp;
4550 rtx target ATTRIBUTE_UNUSED;
4551 bool *expandedp;
4552 {
4553 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4554 tree arglist = TREE_OPERAND (exp, 1);
4555 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4556 tree arg0, arg1;
4557 enum machine_mode mode0, mode1;
4558 rtx pat, op0, op1;
4559 enum insn_code icode;
4560
4561 switch (fcode)
4562 {
4563 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
4564 icode = CODE_FOR_altivec_stvx_16qi;
4565 break;
4566 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
4567 icode = CODE_FOR_altivec_stvx_8hi;
4568 break;
4569 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
4570 icode = CODE_FOR_altivec_stvx_4si;
4571 break;
4572 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
4573 icode = CODE_FOR_altivec_stvx_4sf;
4574 break;
4575 default:
4576 *expandedp = false;
4577 return NULL_RTX;
4578 }
4579
4580 arg0 = TREE_VALUE (arglist);
4581 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4582 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4583 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4584 mode0 = insn_data[icode].operand[0].mode;
4585 mode1 = insn_data[icode].operand[1].mode;
4586
4587 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4588 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4589 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
4590 op1 = copy_to_mode_reg (mode1, op1);
4591
4592 pat = GEN_FCN (icode) (op0, op1);
4593 if (pat)
4594 emit_insn (pat);
4595
4596 *expandedp = true;
4597 return NULL_RTX;
4598 }
4599
4600 /* Expand the dst builtins. */
4601 static rtx
4602 altivec_expand_dst_builtin (exp, target, expandedp)
4603 tree exp;
4604 rtx target ATTRIBUTE_UNUSED;
4605 bool *expandedp;
4606 {
4607 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4608 tree arglist = TREE_OPERAND (exp, 1);
4609 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4610 tree arg0, arg1, arg2;
4611 enum machine_mode mode0, mode1, mode2;
4612 rtx pat, op0, op1, op2;
4613 struct builtin_description *d;
4614 size_t i;
4615
4616 *expandedp = false;
4617
4618 /* Handle DST variants. */
4619 d = (struct builtin_description *) bdesc_dst;
4620 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4621 if (d->code == fcode)
4622 {
4623 arg0 = TREE_VALUE (arglist);
4624 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4625 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4626 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4627 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4628 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4629 mode0 = insn_data[d->icode].operand[0].mode;
4630 mode1 = insn_data[d->icode].operand[1].mode;
4631 mode2 = insn_data[d->icode].operand[2].mode;
4632
4633 /* Invalid arguments, bail out before generating bad rtl. */
4634 if (arg0 == error_mark_node
4635 || arg1 == error_mark_node
4636 || arg2 == error_mark_node)
4637 return const0_rtx;
4638
4639 if (TREE_CODE (arg2) != INTEGER_CST
4640 || TREE_INT_CST_LOW (arg2) & ~0x3)
4641 {
4642 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
4643 return const0_rtx;
4644 }
4645
4646 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4647 op0 = copy_to_mode_reg (mode0, op0);
4648 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4649 op1 = copy_to_mode_reg (mode1, op1);
4650
4651 pat = GEN_FCN (d->icode) (op0, op1, op2);
4652 if (pat != 0)
4653 emit_insn (pat);
4654
4655 *expandedp = true;
4656 return NULL_RTX;
4657 }
4658
4659 return NULL_RTX;
4660 }
4661
4662 /* Expand the builtin in EXP and store the result in TARGET. Store
4663 true in *EXPANDEDP if we found a builtin to expand. */
4664 static rtx
4665 altivec_expand_builtin (exp, target, expandedp)
4666 tree exp;
4667 rtx target;
4668 bool *expandedp;
4669 {
4670 struct builtin_description *d;
4671 struct builtin_description_predicates *dp;
4672 size_t i;
4673 enum insn_code icode;
4674 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4675 tree arglist = TREE_OPERAND (exp, 1);
4676 tree arg0;
4677 rtx op0, pat;
4678 enum machine_mode tmode, mode0;
4679 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4680
4681 target = altivec_expand_ld_builtin (exp, target, expandedp);
4682 if (*expandedp)
4683 return target;
4684
4685 target = altivec_expand_st_builtin (exp, target, expandedp);
4686 if (*expandedp)
4687 return target;
4688
4689 target = altivec_expand_dst_builtin (exp, target, expandedp);
4690 if (*expandedp)
4691 return target;
4692
4693 *expandedp = true;
4694
4695 switch (fcode)
4696 {
4697 case ALTIVEC_BUILTIN_STVX:
4698 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
4699 case ALTIVEC_BUILTIN_STVEBX:
4700 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
4701 case ALTIVEC_BUILTIN_STVEHX:
4702 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
4703 case ALTIVEC_BUILTIN_STVEWX:
4704 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
4705 case ALTIVEC_BUILTIN_STVXL:
4706 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
4707
4708 case ALTIVEC_BUILTIN_MFVSCR:
4709 icode = CODE_FOR_altivec_mfvscr;
4710 tmode = insn_data[icode].operand[0].mode;
4711
4712 if (target == 0
4713 || GET_MODE (target) != tmode
4714 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4715 target = gen_reg_rtx (tmode);
4716
4717 pat = GEN_FCN (icode) (target);
4718 if (! pat)
4719 return 0;
4720 emit_insn (pat);
4721 return target;
4722
4723 case ALTIVEC_BUILTIN_MTVSCR:
4724 icode = CODE_FOR_altivec_mtvscr;
4725 arg0 = TREE_VALUE (arglist);
4726 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4727 mode0 = insn_data[icode].operand[0].mode;
4728
4729 /* If we got invalid arguments bail out before generating bad rtl. */
4730 if (arg0 == error_mark_node)
4731 return const0_rtx;
4732
4733 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4734 op0 = copy_to_mode_reg (mode0, op0);
4735
4736 pat = GEN_FCN (icode) (op0);
4737 if (pat)
4738 emit_insn (pat);
4739 return NULL_RTX;
4740
4741 case ALTIVEC_BUILTIN_DSSALL:
4742 emit_insn (gen_altivec_dssall ());
4743 return NULL_RTX;
4744
4745 case ALTIVEC_BUILTIN_DSS:
4746 icode = CODE_FOR_altivec_dss;
4747 arg0 = TREE_VALUE (arglist);
4748 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4749 mode0 = insn_data[icode].operand[0].mode;
4750
4751 /* If we got invalid arguments bail out before generating bad rtl. */
4752 if (arg0 == error_mark_node)
4753 return const0_rtx;
4754
4755 if (TREE_CODE (arg0) != INTEGER_CST
4756 || TREE_INT_CST_LOW (arg0) & ~0x3)
4757 {
4758 error ("argument to dss must be a 2-bit unsigned literal");
4759 return const0_rtx;
4760 }
4761
4762 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4763 op0 = copy_to_mode_reg (mode0, op0);
4764
4765 emit_insn (gen_altivec_dss (op0));
4766 return NULL_RTX;
4767 }
4768
4769 /* Expand abs* operations. */
4770 d = (struct builtin_description *) bdesc_abs;
4771 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4772 if (d->code == fcode)
4773 return altivec_expand_abs_builtin (d->icode, arglist, target);
4774
4775 /* Expand the AltiVec predicates. */
4776 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4777 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4778 if (dp->code == fcode)
4779 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4780
4781 /* LV* are funky. We initialized them differently. */
4782 switch (fcode)
4783 {
4784 case ALTIVEC_BUILTIN_LVSL:
4785 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4786 arglist, target);
4787 case ALTIVEC_BUILTIN_LVSR:
4788 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4789 arglist, target);
4790 case ALTIVEC_BUILTIN_LVEBX:
4791 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4792 arglist, target);
4793 case ALTIVEC_BUILTIN_LVEHX:
4794 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4795 arglist, target);
4796 case ALTIVEC_BUILTIN_LVEWX:
4797 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4798 arglist, target);
4799 case ALTIVEC_BUILTIN_LVXL:
4800 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4801 arglist, target);
4802 case ALTIVEC_BUILTIN_LVX:
4803 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
4804 arglist, target);
4805 default:
4806 break;
4807 /* Fall through. */
4808 }
4809
4810 *expandedp = false;
4811 return NULL_RTX;
4812 }
4813
4814 /* Binops that need to be initialized manually, but can be expanded
4815 automagically by rs6000_expand_binop_builtin. */
4816 static struct builtin_description bdesc_2arg_spe[] =
4817 {
4818 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
4819 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
4820 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
4821 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
4822 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
4823 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
4824 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
4825 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
4826 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
4827 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
4828 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
4829 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
4830 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
4831 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
4832 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
4833 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
4834 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
4835 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
4836 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
4837 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
4838 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
4839 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
4840 };
4841
4842 /* Expand the builtin in EXP and store the result in TARGET. Store
4843 true in *EXPANDEDP if we found a builtin to expand.
4844
4845 This expands the SPE builtins that are not simple unary and binary
4846 operations. */
4847 static rtx
4848 spe_expand_builtin (exp, target, expandedp)
4849 tree exp;
4850 rtx target;
4851 bool *expandedp;
4852 {
4853 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4854 tree arglist = TREE_OPERAND (exp, 1);
4855 tree arg1, arg0;
4856 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4857 enum insn_code icode;
4858 enum machine_mode tmode, mode0;
4859 rtx pat, op0;
4860 struct builtin_description *d;
4861 size_t i;
4862
4863 *expandedp = true;
4864
4865 /* Syntax check for a 5-bit unsigned immediate. */
4866 switch (fcode)
4867 {
4868 case SPE_BUILTIN_EVSTDD:
4869 case SPE_BUILTIN_EVSTDH:
4870 case SPE_BUILTIN_EVSTDW:
4871 case SPE_BUILTIN_EVSTWHE:
4872 case SPE_BUILTIN_EVSTWHO:
4873 case SPE_BUILTIN_EVSTWWE:
4874 case SPE_BUILTIN_EVSTWWO:
4875 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4876 if (TREE_CODE (arg1) != INTEGER_CST
4877 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4878 {
4879 error ("argument 2 must be a 5-bit unsigned literal");
4880 return const0_rtx;
4881 }
4882 break;
4883 default:
4884 break;
4885 }
4886
4887 d = (struct builtin_description *) bdesc_2arg_spe;
4888 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
4889 if (d->code == fcode)
4890 return rs6000_expand_binop_builtin (d->icode, arglist, target);
4891
4892 d = (struct builtin_description *) bdesc_spe_predicates;
4893 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
4894 if (d->code == fcode)
4895 return spe_expand_predicate_builtin (d->icode, arglist, target);
4896
4897 d = (struct builtin_description *) bdesc_spe_evsel;
4898 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
4899 if (d->code == fcode)
4900 return spe_expand_evsel_builtin (d->icode, arglist, target);
4901
4902 switch (fcode)
4903 {
4904 case SPE_BUILTIN_EVSTDDX:
4905 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
4906 case SPE_BUILTIN_EVSTDHX:
4907 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
4908 case SPE_BUILTIN_EVSTDWX:
4909 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
4910 case SPE_BUILTIN_EVSTWHEX:
4911 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
4912 case SPE_BUILTIN_EVSTWHOX:
4913 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
4914 case SPE_BUILTIN_EVSTWWEX:
4915 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
4916 case SPE_BUILTIN_EVSTWWOX:
4917 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
4918 case SPE_BUILTIN_EVSTDD:
4919 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
4920 case SPE_BUILTIN_EVSTDH:
4921 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
4922 case SPE_BUILTIN_EVSTDW:
4923 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
4924 case SPE_BUILTIN_EVSTWHE:
4925 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
4926 case SPE_BUILTIN_EVSTWHO:
4927 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
4928 case SPE_BUILTIN_EVSTWWE:
4929 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
4930 case SPE_BUILTIN_EVSTWWO:
4931 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
4932 case SPE_BUILTIN_MFSPEFSCR:
4933 icode = CODE_FOR_spe_mfspefscr;
4934 tmode = insn_data[icode].operand[0].mode;
4935
4936 if (target == 0
4937 || GET_MODE (target) != tmode
4938 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4939 target = gen_reg_rtx (tmode);
4940
4941 pat = GEN_FCN (icode) (target);
4942 if (! pat)
4943 return 0;
4944 emit_insn (pat);
4945 return target;
4946 case SPE_BUILTIN_MTSPEFSCR:
4947 icode = CODE_FOR_spe_mtspefscr;
4948 arg0 = TREE_VALUE (arglist);
4949 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4950 mode0 = insn_data[icode].operand[0].mode;
4951
4952 if (arg0 == error_mark_node)
4953 return const0_rtx;
4954
4955 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4956 op0 = copy_to_mode_reg (mode0, op0);
4957
4958 pat = GEN_FCN (icode) (op0);
4959 if (pat)
4960 emit_insn (pat);
4961 return NULL_RTX;
4962 default:
4963 break;
4964 }
4965
4966 *expandedp = false;
4967 return NULL_RTX;
4968 }
4969
4970 static rtx
4971 spe_expand_predicate_builtin (icode, arglist, target)
4972 enum insn_code icode;
4973 tree arglist;
4974 rtx target;
4975 {
4976 rtx pat, scratch, tmp;
4977 tree form = TREE_VALUE (arglist);
4978 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
4979 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4980 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4981 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4982 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4983 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4984 int form_int;
4985 enum rtx_code code;
4986
4987 if (TREE_CODE (form) != INTEGER_CST)
4988 {
4989 error ("argument 1 of __builtin_spe_predicate must be a constant");
4990 return const0_rtx;
4991 }
4992 else
4993 form_int = TREE_INT_CST_LOW (form);
4994
4995 if (mode0 != mode1)
4996 abort ();
4997
4998 if (arg0 == error_mark_node || arg1 == error_mark_node)
4999 return const0_rtx;
5000
5001 if (target == 0
5002 || GET_MODE (target) != SImode
5003 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
5004 target = gen_reg_rtx (SImode);
5005
5006 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5007 op0 = copy_to_mode_reg (mode0, op0);
5008 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5009 op1 = copy_to_mode_reg (mode1, op1);
5010
5011 scratch = gen_reg_rtx (CCmode);
5012
5013 pat = GEN_FCN (icode) (scratch, op0, op1);
5014 if (! pat)
5015 return const0_rtx;
5016 emit_insn (pat);
5017
5018 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5019 _lower_. We use one compare, but look in different bits of the
5020 CR for each variant.
5021
5022 There are 2 elements in each SPE simd type (upper/lower). The CR
5023 bits are set as follows:
5024
5025 BIT0 | BIT 1 | BIT 2 | BIT 3
5026 U | L | (U | L) | (U & L)
5027
5028 So, for an "all" relationship, BIT 3 would be set.
5029 For an "any" relationship, BIT 2 would be set. Etc.
5030
5031 Following traditional nomenclature, these bits map to:
5032
5033 BIT0 | BIT 1 | BIT 2 | BIT 3
5034 LT | GT | EQ | OV
5035
5036 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5037 */
5038
5039 switch (form_int)
5040 {
5041 /* All variant. OV bit. */
5042 case 0:
5043 /* We need to get to the OV bit, which is the ORDERED bit. We
5044 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5045 that's ugly and will trigger a validate_condition_mode abort.
5046 So let's just use another pattern. */
5047 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
5048 return target;
5049 /* Any variant. EQ bit. */
5050 case 1:
5051 code = EQ;
5052 break;
5053 /* Upper variant. LT bit. */
5054 case 2:
5055 code = LT;
5056 break;
5057 /* Lower variant. GT bit. */
5058 case 3:
5059 code = GT;
5060 break;
5061 default:
5062 error ("argument 1 of __builtin_spe_predicate is out of range");
5063 return const0_rtx;
5064 }
5065
5066 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
5067 emit_move_insn (target, tmp);
5068
5069 return target;
5070 }
5071
5072 /* The evsel builtins look like this:
5073
5074 e = __builtin_spe_evsel_OP (a, b, c, d);
5075
5076 and work like this:
5077
5078 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5079 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5080 */
5081
5082 static rtx
5083 spe_expand_evsel_builtin (icode, arglist, target)
5084 enum insn_code icode;
5085 tree arglist;
5086 rtx target;
5087 {
5088 rtx pat, scratch;
5089 tree arg0 = TREE_VALUE (arglist);
5090 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5091 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5092 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5093 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5094 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5095 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5096 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5097 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5098 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5099
5100 if (mode0 != mode1)
5101 abort ();
5102
5103 if (arg0 == error_mark_node || arg1 == error_mark_node
5104 || arg2 == error_mark_node || arg3 == error_mark_node)
5105 return const0_rtx;
5106
5107 if (target == 0
5108 || GET_MODE (target) != mode0
5109 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5110 target = gen_reg_rtx (mode0);
5111
5112 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5113 op0 = copy_to_mode_reg (mode0, op0);
5114 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5115 op1 = copy_to_mode_reg (mode0, op1);
5116 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5117 op2 = copy_to_mode_reg (mode0, op2);
5118 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5119 op3 = copy_to_mode_reg (mode0, op3);
5120
5121 /* Generate the compare. */
5122 scratch = gen_reg_rtx (CCmode);
5123 pat = GEN_FCN (icode) (scratch, op0, op1);
5124 if (! pat)
5125 return const0_rtx;
5126 emit_insn (pat);
5127
5128 if (mode0 == V2SImode)
5129 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
5130 else
5131 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
5132
5133 return target;
5134 }
5135
5136 /* Expand an expression EXP that calls a built-in function,
5137 with result going to TARGET if that's convenient
5138 (and in mode MODE if that's convenient).
5139 SUBTARGET may be used as the target for computing one of EXP's operands.
5140 IGNORE is nonzero if the value is to be ignored. */
5141
5142 static rtx
5143 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
5144 tree exp;
5145 rtx target;
5146 rtx subtarget ATTRIBUTE_UNUSED;
5147 enum machine_mode mode ATTRIBUTE_UNUSED;
5148 int ignore ATTRIBUTE_UNUSED;
5149 {
5150 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5151 tree arglist = TREE_OPERAND (exp, 1);
5152 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5153 struct builtin_description *d;
5154 size_t i;
5155 rtx ret;
5156 bool success;
5157
5158 if (TARGET_ALTIVEC)
5159 {
5160 ret = altivec_expand_builtin (exp, target, &success);
5161
5162 if (success)
5163 return ret;
5164 }
5165 if (TARGET_SPE)
5166 {
5167 ret = spe_expand_builtin (exp, target, &success);
5168
5169 if (success)
5170 return ret;
5171 }
5172
5173 if (TARGET_ALTIVEC || TARGET_SPE)
5174 {
5175 /* Handle simple unary operations. */
5176 d = (struct builtin_description *) bdesc_1arg;
5177 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5178 if (d->code == fcode)
5179 return rs6000_expand_unop_builtin (d->icode, arglist, target);
5180
5181 /* Handle simple binary operations. */
5182 d = (struct builtin_description *) bdesc_2arg;
5183 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5184 if (d->code == fcode)
5185 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5186
5187 /* Handle simple ternary operations. */
5188 d = (struct builtin_description *) bdesc_3arg;
5189 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5190 if (d->code == fcode)
5191 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
5192 }
5193
5194 abort ();
5195 return NULL_RTX;
5196 }
5197
5198 static void
5199 rs6000_init_builtins ()
5200 {
5201 if (TARGET_SPE)
5202 spe_init_builtins ();
5203 if (TARGET_ALTIVEC)
5204 altivec_init_builtins ();
5205 if (TARGET_ALTIVEC || TARGET_SPE)
5206 rs6000_common_init_builtins ();
5207 }
5208
5209 /* Search through a set of builtins and enable the mask bits.
5210 DESC is an array of builtins.
5211 SIZE is the totaly number of builtins.
5212 START is the builtin enum at which to start.
5213 END is the builtin enum at which to end. */
5214 static void
5215 enable_mask_for_builtins (desc, size, start, end)
5216 struct builtin_description *desc;
5217 int size;
5218 enum rs6000_builtins start, end;
5219 {
5220 int i;
5221
5222 for (i = 0; i < size; ++i)
5223 if (desc[i].code == start)
5224 break;
5225
5226 if (i == size)
5227 return;
5228
5229 for (; i < size; ++i)
5230 {
5231 /* Flip all the bits on. */
5232 desc[i].mask = target_flags;
5233 if (desc[i].code == end)
5234 break;
5235 }
5236 }
5237
5238 static void
5239 spe_init_builtins ()
5240 {
5241 tree endlink = void_list_node;
5242 tree puint_type_node = build_pointer_type (unsigned_type_node);
5243 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
5244 tree pv2si_type_node = build_pointer_type (V2SI_type_node);
5245 struct builtin_description *d;
5246 size_t i;
5247
5248 tree v2si_ftype_4_v2si
5249 = build_function_type
5250 (V2SI_type_node,
5251 tree_cons (NULL_TREE, V2SI_type_node,
5252 tree_cons (NULL_TREE, V2SI_type_node,
5253 tree_cons (NULL_TREE, V2SI_type_node,
5254 tree_cons (NULL_TREE, V2SI_type_node,
5255 endlink)))));
5256
5257 tree v2sf_ftype_4_v2sf
5258 = build_function_type
5259 (V2SF_type_node,
5260 tree_cons (NULL_TREE, V2SF_type_node,
5261 tree_cons (NULL_TREE, V2SF_type_node,
5262 tree_cons (NULL_TREE, V2SF_type_node,
5263 tree_cons (NULL_TREE, V2SF_type_node,
5264 endlink)))));
5265
5266 tree int_ftype_int_v2si_v2si
5267 = build_function_type
5268 (integer_type_node,
5269 tree_cons (NULL_TREE, integer_type_node,
5270 tree_cons (NULL_TREE, V2SI_type_node,
5271 tree_cons (NULL_TREE, V2SI_type_node,
5272 endlink))));
5273
5274 tree int_ftype_int_v2sf_v2sf
5275 = build_function_type
5276 (integer_type_node,
5277 tree_cons (NULL_TREE, integer_type_node,
5278 tree_cons (NULL_TREE, V2SF_type_node,
5279 tree_cons (NULL_TREE, V2SF_type_node,
5280 endlink))));
5281
5282 tree void_ftype_v2si_puint_int
5283 = build_function_type (void_type_node,
5284 tree_cons (NULL_TREE, V2SI_type_node,
5285 tree_cons (NULL_TREE, puint_type_node,
5286 tree_cons (NULL_TREE,
5287 integer_type_node,
5288 endlink))));
5289
5290 tree void_ftype_v2si_puint_char
5291 = build_function_type (void_type_node,
5292 tree_cons (NULL_TREE, V2SI_type_node,
5293 tree_cons (NULL_TREE, puint_type_node,
5294 tree_cons (NULL_TREE,
5295 char_type_node,
5296 endlink))));
5297
5298 tree void_ftype_v2si_pv2si_int
5299 = build_function_type (void_type_node,
5300 tree_cons (NULL_TREE, V2SI_type_node,
5301 tree_cons (NULL_TREE, pv2si_type_node,
5302 tree_cons (NULL_TREE,
5303 integer_type_node,
5304 endlink))));
5305
5306 tree void_ftype_v2si_pv2si_char
5307 = build_function_type (void_type_node,
5308 tree_cons (NULL_TREE, V2SI_type_node,
5309 tree_cons (NULL_TREE, pv2si_type_node,
5310 tree_cons (NULL_TREE,
5311 char_type_node,
5312 endlink))));
5313
5314 tree void_ftype_int
5315 = build_function_type (void_type_node,
5316 tree_cons (NULL_TREE, integer_type_node, endlink));
5317
5318 tree int_ftype_void
5319 = build_function_type (integer_type_node,
5320 tree_cons (NULL_TREE, void_type_node, endlink));
5321
5322 tree v2si_ftype_pv2si_int
5323 = build_function_type (V2SI_type_node,
5324 tree_cons (NULL_TREE, pv2si_type_node,
5325 tree_cons (NULL_TREE, integer_type_node,
5326 endlink)));
5327
5328 tree v2si_ftype_puint_int
5329 = build_function_type (V2SI_type_node,
5330 tree_cons (NULL_TREE, puint_type_node,
5331 tree_cons (NULL_TREE, integer_type_node,
5332 endlink)));
5333
5334 tree v2si_ftype_pushort_int
5335 = build_function_type (V2SI_type_node,
5336 tree_cons (NULL_TREE, pushort_type_node,
5337 tree_cons (NULL_TREE, integer_type_node,
5338 endlink)));
5339
5340 /* The initialization of the simple binary and unary builtins is
5341 done in rs6000_common_init_builtins, but we have to enable the
5342 mask bits here manually because we have run out of `target_flags'
5343 bits. We really need to redesign this mask business. */
5344
5345 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
5346 ARRAY_SIZE (bdesc_2arg),
5347 SPE_BUILTIN_EVADDW,
5348 SPE_BUILTIN_EVXOR);
5349 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
5350 ARRAY_SIZE (bdesc_1arg),
5351 SPE_BUILTIN_EVABS,
5352 SPE_BUILTIN_EVSUBFUSIAAW);
5353 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
5354 ARRAY_SIZE (bdesc_spe_predicates),
5355 SPE_BUILTIN_EVCMPEQ,
5356 SPE_BUILTIN_EVFSTSTLT);
5357 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
5358 ARRAY_SIZE (bdesc_spe_evsel),
5359 SPE_BUILTIN_EVSEL_CMPGTS,
5360 SPE_BUILTIN_EVSEL_FSTSTEQ);
5361
5362 /* Initialize irregular SPE builtins. */
5363
5364 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
5365 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
5366 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
5367 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
5368 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
5369 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
5370 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
5371 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
5372 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
5373 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
5374 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
5375 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
5376 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
5377 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
5378 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
5379 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
5380
5381 /* Loads. */
5382 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
5383 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
5384 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
5385 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
5386 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
5387 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
5388 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
5389 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
5390 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
5391 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
5392 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
5393 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
5394 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
5395 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
5396 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
5397 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
5398 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
5399 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
5400 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
5401 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
5402 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
5403 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
5404
5405 /* Predicates. */
5406 d = (struct builtin_description *) bdesc_spe_predicates;
5407 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
5408 {
5409 tree type;
5410
5411 switch (insn_data[d->icode].operand[1].mode)
5412 {
5413 case V2SImode:
5414 type = int_ftype_int_v2si_v2si;
5415 break;
5416 case V2SFmode:
5417 type = int_ftype_int_v2sf_v2sf;
5418 break;
5419 default:
5420 abort ();
5421 }
5422
5423 def_builtin (d->mask, d->name, type, d->code);
5424 }
5425
5426 /* Evsel predicates. */
5427 d = (struct builtin_description *) bdesc_spe_evsel;
5428 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
5429 {
5430 tree type;
5431
5432 switch (insn_data[d->icode].operand[1].mode)
5433 {
5434 case V2SImode:
5435 type = v2si_ftype_4_v2si;
5436 break;
5437 case V2SFmode:
5438 type = v2sf_ftype_4_v2sf;
5439 break;
5440 default:
5441 abort ();
5442 }
5443
5444 def_builtin (d->mask, d->name, type, d->code);
5445 }
5446 }
5447
5448 static void
5449 altivec_init_builtins ()
5450 {
5451 struct builtin_description *d;
5452 struct builtin_description_predicates *dp;
5453 size_t i;
5454 tree pfloat_type_node = build_pointer_type (float_type_node);
5455 tree pint_type_node = build_pointer_type (integer_type_node);
5456 tree pshort_type_node = build_pointer_type (short_integer_type_node);
5457 tree pchar_type_node = build_pointer_type (char_type_node);
5458
5459 tree pvoid_type_node = build_pointer_type (void_type_node);
5460
5461 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
5462 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
5463 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
5464 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
5465
5466 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
5467
5468 tree int_ftype_int_v4si_v4si
5469 = build_function_type_list (integer_type_node,
5470 integer_type_node, V4SI_type_node,
5471 V4SI_type_node, NULL_TREE);
5472 tree v4sf_ftype_pcfloat
5473 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
5474 tree void_ftype_pfloat_v4sf
5475 = build_function_type_list (void_type_node,
5476 pfloat_type_node, V4SF_type_node, NULL_TREE);
5477 tree v4si_ftype_pcint
5478 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
5479 tree void_ftype_pint_v4si
5480 = build_function_type_list (void_type_node,
5481 pint_type_node, V4SI_type_node, NULL_TREE);
5482 tree v8hi_ftype_pcshort
5483 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
5484 tree void_ftype_pshort_v8hi
5485 = build_function_type_list (void_type_node,
5486 pshort_type_node, V8HI_type_node, NULL_TREE);
5487 tree v16qi_ftype_pcchar
5488 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
5489 tree void_ftype_pchar_v16qi
5490 = build_function_type_list (void_type_node,
5491 pchar_type_node, V16QI_type_node, NULL_TREE);
5492 tree void_ftype_v4si
5493 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
5494 tree v8hi_ftype_void
5495 = build_function_type (V8HI_type_node, void_list_node);
5496 tree void_ftype_void
5497 = build_function_type (void_type_node, void_list_node);
5498 tree void_ftype_qi
5499 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
5500
5501 tree v16qi_ftype_int_pcvoid
5502 = build_function_type_list (V16QI_type_node,
5503 integer_type_node, pcvoid_type_node, NULL_TREE);
5504 tree v8hi_ftype_int_pcvoid
5505 = build_function_type_list (V8HI_type_node,
5506 integer_type_node, pcvoid_type_node, NULL_TREE);
5507 tree v4si_ftype_int_pcvoid
5508 = build_function_type_list (V4SI_type_node,
5509 integer_type_node, pcvoid_type_node, NULL_TREE);
5510
5511 tree void_ftype_v4si_int_pvoid
5512 = build_function_type_list (void_type_node,
5513 V4SI_type_node, integer_type_node,
5514 pvoid_type_node, NULL_TREE);
5515 tree void_ftype_v16qi_int_pvoid
5516 = build_function_type_list (void_type_node,
5517 V16QI_type_node, integer_type_node,
5518 pvoid_type_node, NULL_TREE);
5519 tree void_ftype_v8hi_int_pvoid
5520 = build_function_type_list (void_type_node,
5521 V8HI_type_node, integer_type_node,
5522 pvoid_type_node, NULL_TREE);
5523 tree int_ftype_int_v8hi_v8hi
5524 = build_function_type_list (integer_type_node,
5525 integer_type_node, V8HI_type_node,
5526 V8HI_type_node, NULL_TREE);
5527 tree int_ftype_int_v16qi_v16qi
5528 = build_function_type_list (integer_type_node,
5529 integer_type_node, V16QI_type_node,
5530 V16QI_type_node, NULL_TREE);
5531 tree int_ftype_int_v4sf_v4sf
5532 = build_function_type_list (integer_type_node,
5533 integer_type_node, V4SF_type_node,
5534 V4SF_type_node, NULL_TREE);
5535 tree v4si_ftype_v4si
5536 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
5537 tree v8hi_ftype_v8hi
5538 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
5539 tree v16qi_ftype_v16qi
5540 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
5541 tree v4sf_ftype_v4sf
5542 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5543 tree void_ftype_pcvoid_int_char
5544 = build_function_type_list (void_type_node,
5545 pcvoid_type_node, integer_type_node,
5546 char_type_node, NULL_TREE);
5547
5548 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
5549 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
5550 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
5551 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
5552 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
5553 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
5554 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
5555 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
5556 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
5557 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
5558 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
5559 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
5560 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
5561 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
5562 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
5563 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
5564 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
5565 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
5566 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
5567 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
5568 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSL);
5569 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSR);
5570 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEBX);
5571 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEHX);
5572 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEWX);
5573 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVXL);
5574 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVX);
5575 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
5576 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
5577 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
5578 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
5579 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
5580
5581 /* Add the DST variants. */
5582 d = (struct builtin_description *) bdesc_dst;
5583 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5584 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
5585
5586 /* Initialize the predicates. */
5587 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5588 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5589 {
5590 enum machine_mode mode1;
5591 tree type;
5592
5593 mode1 = insn_data[dp->icode].operand[1].mode;
5594
5595 switch (mode1)
5596 {
5597 case V4SImode:
5598 type = int_ftype_int_v4si_v4si;
5599 break;
5600 case V8HImode:
5601 type = int_ftype_int_v8hi_v8hi;
5602 break;
5603 case V16QImode:
5604 type = int_ftype_int_v16qi_v16qi;
5605 break;
5606 case V4SFmode:
5607 type = int_ftype_int_v4sf_v4sf;
5608 break;
5609 default:
5610 abort ();
5611 }
5612
5613 def_builtin (dp->mask, dp->name, type, dp->code);
5614 }
5615
5616 /* Initialize the abs* operators. */
5617 d = (struct builtin_description *) bdesc_abs;
5618 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5619 {
5620 enum machine_mode mode0;
5621 tree type;
5622
5623 mode0 = insn_data[d->icode].operand[0].mode;
5624
5625 switch (mode0)
5626 {
5627 case V4SImode:
5628 type = v4si_ftype_v4si;
5629 break;
5630 case V8HImode:
5631 type = v8hi_ftype_v8hi;
5632 break;
5633 case V16QImode:
5634 type = v16qi_ftype_v16qi;
5635 break;
5636 case V4SFmode:
5637 type = v4sf_ftype_v4sf;
5638 break;
5639 default:
5640 abort ();
5641 }
5642
5643 def_builtin (d->mask, d->name, type, d->code);
5644 }
5645 }
5646
5647 static void
5648 rs6000_common_init_builtins ()
5649 {
5650 struct builtin_description *d;
5651 size_t i;
5652
5653 tree v4sf_ftype_v4sf_v4sf_v16qi
5654 = build_function_type_list (V4SF_type_node,
5655 V4SF_type_node, V4SF_type_node,
5656 V16QI_type_node, NULL_TREE);
5657 tree v4si_ftype_v4si_v4si_v16qi
5658 = build_function_type_list (V4SI_type_node,
5659 V4SI_type_node, V4SI_type_node,
5660 V16QI_type_node, NULL_TREE);
5661 tree v8hi_ftype_v8hi_v8hi_v16qi
5662 = build_function_type_list (V8HI_type_node,
5663 V8HI_type_node, V8HI_type_node,
5664 V16QI_type_node, NULL_TREE);
5665 tree v16qi_ftype_v16qi_v16qi_v16qi
5666 = build_function_type_list (V16QI_type_node,
5667 V16QI_type_node, V16QI_type_node,
5668 V16QI_type_node, NULL_TREE);
5669 tree v4si_ftype_char
5670 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
5671 tree v8hi_ftype_char
5672 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
5673 tree v16qi_ftype_char
5674 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
5675 tree v8hi_ftype_v16qi
5676 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
5677 tree v4sf_ftype_v4sf
5678 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5679
5680 tree v2si_ftype_v2si_v2si
5681 = build_function_type_list (V2SI_type_node,
5682 V2SI_type_node, V2SI_type_node, NULL_TREE);
5683
5684 tree v2sf_ftype_v2sf_v2sf
5685 = build_function_type_list (V2SF_type_node,
5686 V2SF_type_node, V2SF_type_node, NULL_TREE);
5687
5688 tree v2si_ftype_int_int
5689 = build_function_type_list (V2SI_type_node,
5690 integer_type_node, integer_type_node,
5691 NULL_TREE);
5692
5693 tree v2si_ftype_v2si
5694 = build_function_type_list (V2SI_type_node, V2SI_type_node, NULL_TREE);
5695
5696 tree v2sf_ftype_v2sf
5697 = build_function_type_list (V2SF_type_node,
5698 V2SF_type_node, NULL_TREE);
5699
5700 tree v2sf_ftype_v2si
5701 = build_function_type_list (V2SF_type_node,
5702 V2SI_type_node, NULL_TREE);
5703
5704 tree v2si_ftype_v2sf
5705 = build_function_type_list (V2SI_type_node,
5706 V2SF_type_node, NULL_TREE);
5707
5708 tree v2si_ftype_v2si_char
5709 = build_function_type_list (V2SI_type_node,
5710 V2SI_type_node, char_type_node, NULL_TREE);
5711
5712 tree v2si_ftype_int_char
5713 = build_function_type_list (V2SI_type_node,
5714 integer_type_node, char_type_node, NULL_TREE);
5715
5716 tree v2si_ftype_char
5717 = build_function_type_list (V2SI_type_node, char_type_node, NULL_TREE);
5718
5719 tree int_ftype_int_int
5720 = build_function_type_list (integer_type_node,
5721 integer_type_node, integer_type_node,
5722 NULL_TREE);
5723
5724 tree v4si_ftype_v4si_v4si
5725 = build_function_type_list (V4SI_type_node,
5726 V4SI_type_node, V4SI_type_node, NULL_TREE);
5727 tree v4sf_ftype_v4si_char
5728 = build_function_type_list (V4SF_type_node,
5729 V4SI_type_node, char_type_node, NULL_TREE);
5730 tree v4si_ftype_v4sf_char
5731 = build_function_type_list (V4SI_type_node,
5732 V4SF_type_node, char_type_node, NULL_TREE);
5733 tree v4si_ftype_v4si_char
5734 = build_function_type_list (V4SI_type_node,
5735 V4SI_type_node, char_type_node, NULL_TREE);
5736 tree v8hi_ftype_v8hi_char
5737 = build_function_type_list (V8HI_type_node,
5738 V8HI_type_node, char_type_node, NULL_TREE);
5739 tree v16qi_ftype_v16qi_char
5740 = build_function_type_list (V16QI_type_node,
5741 V16QI_type_node, char_type_node, NULL_TREE);
5742 tree v16qi_ftype_v16qi_v16qi_char
5743 = build_function_type_list (V16QI_type_node,
5744 V16QI_type_node, V16QI_type_node,
5745 char_type_node, NULL_TREE);
5746 tree v8hi_ftype_v8hi_v8hi_char
5747 = build_function_type_list (V8HI_type_node,
5748 V8HI_type_node, V8HI_type_node,
5749 char_type_node, NULL_TREE);
5750 tree v4si_ftype_v4si_v4si_char
5751 = build_function_type_list (V4SI_type_node,
5752 V4SI_type_node, V4SI_type_node,
5753 char_type_node, NULL_TREE);
5754 tree v4sf_ftype_v4sf_v4sf_char
5755 = build_function_type_list (V4SF_type_node,
5756 V4SF_type_node, V4SF_type_node,
5757 char_type_node, NULL_TREE);
5758 tree v4sf_ftype_v4sf_v4sf
5759 = build_function_type_list (V4SF_type_node,
5760 V4SF_type_node, V4SF_type_node, NULL_TREE);
5761 tree v4sf_ftype_v4sf_v4sf_v4si
5762 = build_function_type_list (V4SF_type_node,
5763 V4SF_type_node, V4SF_type_node,
5764 V4SI_type_node, NULL_TREE);
5765 tree v4sf_ftype_v4sf_v4sf_v4sf
5766 = build_function_type_list (V4SF_type_node,
5767 V4SF_type_node, V4SF_type_node,
5768 V4SF_type_node, NULL_TREE);
5769 tree v4si_ftype_v4si_v4si_v4si
5770 = build_function_type_list (V4SI_type_node,
5771 V4SI_type_node, V4SI_type_node,
5772 V4SI_type_node, NULL_TREE);
5773 tree v8hi_ftype_v8hi_v8hi
5774 = build_function_type_list (V8HI_type_node,
5775 V8HI_type_node, V8HI_type_node, NULL_TREE);
5776 tree v8hi_ftype_v8hi_v8hi_v8hi
5777 = build_function_type_list (V8HI_type_node,
5778 V8HI_type_node, V8HI_type_node,
5779 V8HI_type_node, NULL_TREE);
5780 tree v4si_ftype_v8hi_v8hi_v4si
5781 = build_function_type_list (V4SI_type_node,
5782 V8HI_type_node, V8HI_type_node,
5783 V4SI_type_node, NULL_TREE);
5784 tree v4si_ftype_v16qi_v16qi_v4si
5785 = build_function_type_list (V4SI_type_node,
5786 V16QI_type_node, V16QI_type_node,
5787 V4SI_type_node, NULL_TREE);
5788 tree v16qi_ftype_v16qi_v16qi
5789 = build_function_type_list (V16QI_type_node,
5790 V16QI_type_node, V16QI_type_node, NULL_TREE);
5791 tree v4si_ftype_v4sf_v4sf
5792 = build_function_type_list (V4SI_type_node,
5793 V4SF_type_node, V4SF_type_node, NULL_TREE);
5794 tree v8hi_ftype_v16qi_v16qi
5795 = build_function_type_list (V8HI_type_node,
5796 V16QI_type_node, V16QI_type_node, NULL_TREE);
5797 tree v4si_ftype_v8hi_v8hi
5798 = build_function_type_list (V4SI_type_node,
5799 V8HI_type_node, V8HI_type_node, NULL_TREE);
5800 tree v8hi_ftype_v4si_v4si
5801 = build_function_type_list (V8HI_type_node,
5802 V4SI_type_node, V4SI_type_node, NULL_TREE);
5803 tree v16qi_ftype_v8hi_v8hi
5804 = build_function_type_list (V16QI_type_node,
5805 V8HI_type_node, V8HI_type_node, NULL_TREE);
5806 tree v4si_ftype_v16qi_v4si
5807 = build_function_type_list (V4SI_type_node,
5808 V16QI_type_node, V4SI_type_node, NULL_TREE);
5809 tree v4si_ftype_v16qi_v16qi
5810 = build_function_type_list (V4SI_type_node,
5811 V16QI_type_node, V16QI_type_node, NULL_TREE);
5812 tree v4si_ftype_v8hi_v4si
5813 = build_function_type_list (V4SI_type_node,
5814 V8HI_type_node, V4SI_type_node, NULL_TREE);
5815 tree v4si_ftype_v8hi
5816 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
5817 tree int_ftype_v4si_v4si
5818 = build_function_type_list (integer_type_node,
5819 V4SI_type_node, V4SI_type_node, NULL_TREE);
5820 tree int_ftype_v4sf_v4sf
5821 = build_function_type_list (integer_type_node,
5822 V4SF_type_node, V4SF_type_node, NULL_TREE);
5823 tree int_ftype_v16qi_v16qi
5824 = build_function_type_list (integer_type_node,
5825 V16QI_type_node, V16QI_type_node, NULL_TREE);
5826 tree int_ftype_v8hi_v8hi
5827 = build_function_type_list (integer_type_node,
5828 V8HI_type_node, V8HI_type_node, NULL_TREE);
5829
5830 /* Add the simple ternary operators. */
5831 d = (struct builtin_description *) bdesc_3arg;
5832 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5833 {
5834
5835 enum machine_mode mode0, mode1, mode2, mode3;
5836 tree type;
5837
5838 if (d->name == 0 || d->icode == CODE_FOR_nothing)
5839 continue;
5840
5841 mode0 = insn_data[d->icode].operand[0].mode;
5842 mode1 = insn_data[d->icode].operand[1].mode;
5843 mode2 = insn_data[d->icode].operand[2].mode;
5844 mode3 = insn_data[d->icode].operand[3].mode;
5845
5846 /* When all four are of the same mode. */
5847 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
5848 {
5849 switch (mode0)
5850 {
5851 case V4SImode:
5852 type = v4si_ftype_v4si_v4si_v4si;
5853 break;
5854 case V4SFmode:
5855 type = v4sf_ftype_v4sf_v4sf_v4sf;
5856 break;
5857 case V8HImode:
5858 type = v8hi_ftype_v8hi_v8hi_v8hi;
5859 break;
5860 case V16QImode:
5861 type = v16qi_ftype_v16qi_v16qi_v16qi;
5862 break;
5863 default:
5864 abort();
5865 }
5866 }
5867 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
5868 {
5869 switch (mode0)
5870 {
5871 case V4SImode:
5872 type = v4si_ftype_v4si_v4si_v16qi;
5873 break;
5874 case V4SFmode:
5875 type = v4sf_ftype_v4sf_v4sf_v16qi;
5876 break;
5877 case V8HImode:
5878 type = v8hi_ftype_v8hi_v8hi_v16qi;
5879 break;
5880 case V16QImode:
5881 type = v16qi_ftype_v16qi_v16qi_v16qi;
5882 break;
5883 default:
5884 abort();
5885 }
5886 }
5887 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
5888 && mode3 == V4SImode)
5889 type = v4si_ftype_v16qi_v16qi_v4si;
5890 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
5891 && mode3 == V4SImode)
5892 type = v4si_ftype_v8hi_v8hi_v4si;
5893 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
5894 && mode3 == V4SImode)
5895 type = v4sf_ftype_v4sf_v4sf_v4si;
5896
5897 /* vchar, vchar, vchar, 4 bit literal. */
5898 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
5899 && mode3 == QImode)
5900 type = v16qi_ftype_v16qi_v16qi_char;
5901
5902 /* vshort, vshort, vshort, 4 bit literal. */
5903 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
5904 && mode3 == QImode)
5905 type = v8hi_ftype_v8hi_v8hi_char;
5906
5907 /* vint, vint, vint, 4 bit literal. */
5908 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
5909 && mode3 == QImode)
5910 type = v4si_ftype_v4si_v4si_char;
5911
5912 /* vfloat, vfloat, vfloat, 4 bit literal. */
5913 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
5914 && mode3 == QImode)
5915 type = v4sf_ftype_v4sf_v4sf_char;
5916
5917 else
5918 abort ();
5919
5920 def_builtin (d->mask, d->name, type, d->code);
5921 }
5922
5923 /* Add the simple binary operators. */
5924 d = (struct builtin_description *) bdesc_2arg;
5925 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5926 {
5927 enum machine_mode mode0, mode1, mode2;
5928 tree type;
5929
5930 if (d->name == 0 || d->icode == CODE_FOR_nothing)
5931 continue;
5932
5933 mode0 = insn_data[d->icode].operand[0].mode;
5934 mode1 = insn_data[d->icode].operand[1].mode;
5935 mode2 = insn_data[d->icode].operand[2].mode;
5936
5937 /* When all three operands are of the same mode. */
5938 if (mode0 == mode1 && mode1 == mode2)
5939 {
5940 switch (mode0)
5941 {
5942 case V4SFmode:
5943 type = v4sf_ftype_v4sf_v4sf;
5944 break;
5945 case V4SImode:
5946 type = v4si_ftype_v4si_v4si;
5947 break;
5948 case V16QImode:
5949 type = v16qi_ftype_v16qi_v16qi;
5950 break;
5951 case V8HImode:
5952 type = v8hi_ftype_v8hi_v8hi;
5953 break;
5954 case V2SImode:
5955 type = v2si_ftype_v2si_v2si;
5956 break;
5957 case V2SFmode:
5958 type = v2sf_ftype_v2sf_v2sf;
5959 break;
5960 case SImode:
5961 type = int_ftype_int_int;
5962 break;
5963 default:
5964 abort ();
5965 }
5966 }
5967
5968 /* A few other combos we really don't want to do manually. */
5969
5970 /* vint, vfloat, vfloat. */
5971 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
5972 type = v4si_ftype_v4sf_v4sf;
5973
5974 /* vshort, vchar, vchar. */
5975 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
5976 type = v8hi_ftype_v16qi_v16qi;
5977
5978 /* vint, vshort, vshort. */
5979 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
5980 type = v4si_ftype_v8hi_v8hi;
5981
5982 /* vshort, vint, vint. */
5983 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
5984 type = v8hi_ftype_v4si_v4si;
5985
5986 /* vchar, vshort, vshort. */
5987 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
5988 type = v16qi_ftype_v8hi_v8hi;
5989
5990 /* vint, vchar, vint. */
5991 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
5992 type = v4si_ftype_v16qi_v4si;
5993
5994 /* vint, vchar, vchar. */
5995 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
5996 type = v4si_ftype_v16qi_v16qi;
5997
5998 /* vint, vshort, vint. */
5999 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
6000 type = v4si_ftype_v8hi_v4si;
6001
6002 /* vint, vint, 5 bit literal. */
6003 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
6004 type = v4si_ftype_v4si_char;
6005
6006 /* vshort, vshort, 5 bit literal. */
6007 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
6008 type = v8hi_ftype_v8hi_char;
6009
6010 /* vchar, vchar, 5 bit literal. */
6011 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
6012 type = v16qi_ftype_v16qi_char;
6013
6014 /* vfloat, vint, 5 bit literal. */
6015 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
6016 type = v4sf_ftype_v4si_char;
6017
6018 /* vint, vfloat, 5 bit literal. */
6019 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
6020 type = v4si_ftype_v4sf_char;
6021
6022 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
6023 type = v2si_ftype_int_int;
6024
6025 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
6026 type = v2si_ftype_v2si_char;
6027
6028 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
6029 type = v2si_ftype_int_char;
6030
6031 /* int, x, x. */
6032 else if (mode0 == SImode)
6033 {
6034 switch (mode1)
6035 {
6036 case V4SImode:
6037 type = int_ftype_v4si_v4si;
6038 break;
6039 case V4SFmode:
6040 type = int_ftype_v4sf_v4sf;
6041 break;
6042 case V16QImode:
6043 type = int_ftype_v16qi_v16qi;
6044 break;
6045 case V8HImode:
6046 type = int_ftype_v8hi_v8hi;
6047 break;
6048 default:
6049 abort ();
6050 }
6051 }
6052
6053 else
6054 abort ();
6055
6056 def_builtin (d->mask, d->name, type, d->code);
6057 }
6058
6059 /* Add the simple unary operators. */
6060 d = (struct builtin_description *) bdesc_1arg;
6061 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6062 {
6063 enum machine_mode mode0, mode1;
6064 tree type;
6065
6066 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6067 continue;
6068
6069 mode0 = insn_data[d->icode].operand[0].mode;
6070 mode1 = insn_data[d->icode].operand[1].mode;
6071
6072 if (mode0 == V4SImode && mode1 == QImode)
6073 type = v4si_ftype_char;
6074 else if (mode0 == V8HImode && mode1 == QImode)
6075 type = v8hi_ftype_char;
6076 else if (mode0 == V16QImode && mode1 == QImode)
6077 type = v16qi_ftype_char;
6078 else if (mode0 == V4SFmode && mode1 == V4SFmode)
6079 type = v4sf_ftype_v4sf;
6080 else if (mode0 == V8HImode && mode1 == V16QImode)
6081 type = v8hi_ftype_v16qi;
6082 else if (mode0 == V4SImode && mode1 == V8HImode)
6083 type = v4si_ftype_v8hi;
6084 else if (mode0 == V2SImode && mode1 == V2SImode)
6085 type = v2si_ftype_v2si;
6086 else if (mode0 == V2SFmode && mode1 == V2SFmode)
6087 type = v2sf_ftype_v2sf;
6088 else if (mode0 == V2SFmode && mode1 == V2SImode)
6089 type = v2sf_ftype_v2si;
6090 else if (mode0 == V2SImode && mode1 == V2SFmode)
6091 type = v2si_ftype_v2sf;
6092 else if (mode0 == V2SImode && mode1 == QImode)
6093 type = v2si_ftype_char;
6094 else
6095 abort ();
6096
6097 def_builtin (d->mask, d->name, type, d->code);
6098 }
6099 }
6100
6101 \f
6102 /* Expand a block move operation, and return 1 if successful. Return 0
6103 if we should let the compiler generate normal code.
6104
6105 operands[0] is the destination
6106 operands[1] is the source
6107 operands[2] is the length
6108 operands[3] is the alignment */
6109
6110 #define MAX_MOVE_REG 4
6111
6112 int
6113 expand_block_move (operands)
6114 rtx operands[];
6115 {
6116 rtx orig_dest = operands[0];
6117 rtx orig_src = operands[1];
6118 rtx bytes_rtx = operands[2];
6119 rtx align_rtx = operands[3];
6120 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
6121 int align;
6122 int bytes;
6123 int offset;
6124 int move_bytes;
6125
6126 /* If this is not a fixed size move, just call memcpy */
6127 if (! constp)
6128 return 0;
6129
6130 /* If this is not a fixed size alignment, abort */
6131 if (GET_CODE (align_rtx) != CONST_INT)
6132 abort ();
6133 align = INTVAL (align_rtx);
6134
6135 /* Anything to move? */
6136 bytes = INTVAL (bytes_rtx);
6137 if (bytes <= 0)
6138 return 1;
6139
6140 /* store_one_arg depends on expand_block_move to handle at least the size of
6141 reg_parm_stack_space. */
6142 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
6143 return 0;
6144
6145 if (TARGET_STRING) /* string instructions are available */
6146 {
6147 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6148 {
6149 union {
6150 rtx (*movstrsi) PARAMS ((rtx, rtx, rtx, rtx));
6151 rtx (*mov) PARAMS ((rtx, rtx));
6152 } gen_func;
6153 enum machine_mode mode = BLKmode;
6154 rtx src, dest;
6155
6156 if (bytes > 24 /* move up to 32 bytes at a time */
6157 && ! fixed_regs[5]
6158 && ! fixed_regs[6]
6159 && ! fixed_regs[7]
6160 && ! fixed_regs[8]
6161 && ! fixed_regs[9]
6162 && ! fixed_regs[10]
6163 && ! fixed_regs[11]
6164 && ! fixed_regs[12])
6165 {
6166 move_bytes = (bytes > 32) ? 32 : bytes;
6167 gen_func.movstrsi = gen_movstrsi_8reg;
6168 }
6169 else if (bytes > 16 /* move up to 24 bytes at a time */
6170 && ! fixed_regs[5]
6171 && ! fixed_regs[6]
6172 && ! fixed_regs[7]
6173 && ! fixed_regs[8]
6174 && ! fixed_regs[9]
6175 && ! fixed_regs[10])
6176 {
6177 move_bytes = (bytes > 24) ? 24 : bytes;
6178 gen_func.movstrsi = gen_movstrsi_6reg;
6179 }
6180 else if (bytes > 8 /* move up to 16 bytes at a time */
6181 && ! fixed_regs[5]
6182 && ! fixed_regs[6]
6183 && ! fixed_regs[7]
6184 && ! fixed_regs[8])
6185 {
6186 move_bytes = (bytes > 16) ? 16 : bytes;
6187 gen_func.movstrsi = gen_movstrsi_4reg;
6188 }
6189 else if (bytes >= 8 && TARGET_POWERPC64
6190 /* 64-bit loads and stores require word-aligned
6191 displacements. */
6192 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6193 {
6194 move_bytes = 8;
6195 mode = DImode;
6196 gen_func.mov = gen_movdi;
6197 }
6198 else if (bytes > 4 && !TARGET_POWERPC64)
6199 { /* move up to 8 bytes at a time */
6200 move_bytes = (bytes > 8) ? 8 : bytes;
6201 gen_func.movstrsi = gen_movstrsi_2reg;
6202 }
6203 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6204 { /* move 4 bytes */
6205 move_bytes = 4;
6206 mode = SImode;
6207 gen_func.mov = gen_movsi;
6208 }
6209 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6210 { /* move 2 bytes */
6211 move_bytes = 2;
6212 mode = HImode;
6213 gen_func.mov = gen_movhi;
6214 }
6215 else if (bytes == 1) /* move 1 byte */
6216 {
6217 move_bytes = 1;
6218 mode = QImode;
6219 gen_func.mov = gen_movqi;
6220 }
6221 else
6222 { /* move up to 4 bytes at a time */
6223 move_bytes = (bytes > 4) ? 4 : bytes;
6224 gen_func.movstrsi = gen_movstrsi_1reg;
6225 }
6226
6227 src = adjust_address (orig_src, mode, offset);
6228 dest = adjust_address (orig_dest, mode, offset);
6229
6230 if (mode == BLKmode)
6231 {
6232 /* Move the address into scratch registers. The movstrsi
6233 patterns require zero offset. */
6234 if (!REG_P (XEXP (src, 0)))
6235 {
6236 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
6237 src = replace_equiv_address (src, src_reg);
6238 }
6239 set_mem_size (src, GEN_INT (move_bytes));
6240
6241 if (!REG_P (XEXP (dest, 0)))
6242 {
6243 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
6244 dest = replace_equiv_address (dest, dest_reg);
6245 }
6246 set_mem_size (dest, GEN_INT (move_bytes));
6247
6248 emit_insn ((*gen_func.movstrsi) (dest, src,
6249 GEN_INT (move_bytes & 31),
6250 align_rtx));
6251 }
6252 else
6253 {
6254 rtx tmp_reg = gen_reg_rtx (mode);
6255
6256 emit_insn ((*gen_func.mov) (tmp_reg, src));
6257 emit_insn ((*gen_func.mov) (dest, tmp_reg));
6258 }
6259 }
6260 }
6261
6262 else /* string instructions not available */
6263 {
6264 rtx stores[MAX_MOVE_REG];
6265 int num_reg = 0;
6266 int i;
6267
6268 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6269 {
6270 rtx (*gen_mov_func) PARAMS ((rtx, rtx));
6271 enum machine_mode mode;
6272 rtx src, dest, tmp_reg;
6273
6274 /* Generate the appropriate load and store, saving the stores
6275 for later. */
6276 if (bytes >= 8 && TARGET_POWERPC64
6277 /* 64-bit loads and stores require word-aligned
6278 displacements. */
6279 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6280 {
6281 move_bytes = 8;
6282 mode = DImode;
6283 gen_mov_func = gen_movdi;
6284 }
6285 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6286 {
6287 move_bytes = 4;
6288 mode = SImode;
6289 gen_mov_func = gen_movsi;
6290 }
6291 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6292 {
6293 move_bytes = 2;
6294 mode = HImode;
6295 gen_mov_func = gen_movhi;
6296 }
6297 else
6298 {
6299 move_bytes = 1;
6300 mode = QImode;
6301 gen_mov_func = gen_movqi;
6302 }
6303
6304 src = adjust_address (orig_src, mode, offset);
6305 dest = adjust_address (orig_dest, mode, offset);
6306 tmp_reg = gen_reg_rtx (mode);
6307
6308 emit_insn ((*gen_mov_func) (tmp_reg, src));
6309 stores[num_reg++] = (*gen_mov_func) (dest, tmp_reg);
6310
6311 if (num_reg >= MAX_MOVE_REG)
6312 {
6313 for (i = 0; i < num_reg; i++)
6314 emit_insn (stores[i]);
6315 num_reg = 0;
6316 }
6317 }
6318
6319 for (i = 0; i < num_reg; i++)
6320 emit_insn (stores[i]);
6321 }
6322
6323 return 1;
6324 }
6325
6326 \f
6327 /* Return 1 if OP is a load multiple operation. It is known to be a
6328 PARALLEL and the first section will be tested. */
6329
6330 int
6331 load_multiple_operation (op, mode)
6332 rtx op;
6333 enum machine_mode mode ATTRIBUTE_UNUSED;
6334 {
6335 int count = XVECLEN (op, 0);
6336 unsigned int dest_regno;
6337 rtx src_addr;
6338 int i;
6339
6340 /* Perform a quick check so we don't blow up below. */
6341 if (count <= 1
6342 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6343 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6344 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6345 return 0;
6346
6347 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6348 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6349
6350 for (i = 1; i < count; i++)
6351 {
6352 rtx elt = XVECEXP (op, 0, i);
6353
6354 if (GET_CODE (elt) != SET
6355 || GET_CODE (SET_DEST (elt)) != REG
6356 || GET_MODE (SET_DEST (elt)) != SImode
6357 || REGNO (SET_DEST (elt)) != dest_regno + i
6358 || GET_CODE (SET_SRC (elt)) != MEM
6359 || GET_MODE (SET_SRC (elt)) != SImode
6360 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
6361 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
6362 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
6363 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
6364 return 0;
6365 }
6366
6367 return 1;
6368 }
6369
6370 /* Similar, but tests for store multiple. Here, the second vector element
6371 is a CLOBBER. It will be tested later. */
6372
6373 int
6374 store_multiple_operation (op, mode)
6375 rtx op;
6376 enum machine_mode mode ATTRIBUTE_UNUSED;
6377 {
6378 int count = XVECLEN (op, 0) - 1;
6379 unsigned int src_regno;
6380 rtx dest_addr;
6381 int i;
6382
6383 /* Perform a quick check so we don't blow up below. */
6384 if (count <= 1
6385 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6386 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6387 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6388 return 0;
6389
6390 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6391 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6392
6393 for (i = 1; i < count; i++)
6394 {
6395 rtx elt = XVECEXP (op, 0, i + 1);
6396
6397 if (GET_CODE (elt) != SET
6398 || GET_CODE (SET_SRC (elt)) != REG
6399 || GET_MODE (SET_SRC (elt)) != SImode
6400 || REGNO (SET_SRC (elt)) != src_regno + i
6401 || GET_CODE (SET_DEST (elt)) != MEM
6402 || GET_MODE (SET_DEST (elt)) != SImode
6403 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
6404 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
6405 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
6406 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
6407 return 0;
6408 }
6409
6410 return 1;
6411 }
6412
6413 /* Return a string to perform a load_multiple operation.
6414 operands[0] is the vector.
6415 operands[1] is the source address.
6416 operands[2] is the first destination register. */
6417
6418 const char *
6419 rs6000_output_load_multiple (operands)
6420 rtx operands[3];
6421 {
6422 /* We have to handle the case where the pseudo used to contain the address
6423 is assigned to one of the output registers. */
6424 int i, j;
6425 int words = XVECLEN (operands[0], 0);
6426 rtx xop[10];
6427
6428 if (XVECLEN (operands[0], 0) == 1)
6429 return "{l|lwz} %2,0(%1)";
6430
6431 for (i = 0; i < words; i++)
6432 if (refers_to_regno_p (REGNO (operands[2]) + i,
6433 REGNO (operands[2]) + i + 1, operands[1], 0))
6434 {
6435 if (i == words-1)
6436 {
6437 xop[0] = GEN_INT (4 * (words-1));
6438 xop[1] = operands[1];
6439 xop[2] = operands[2];
6440 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
6441 return "";
6442 }
6443 else if (i == 0)
6444 {
6445 xop[0] = GEN_INT (4 * (words-1));
6446 xop[1] = operands[1];
6447 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
6448 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
6449 return "";
6450 }
6451 else
6452 {
6453 for (j = 0; j < words; j++)
6454 if (j != i)
6455 {
6456 xop[0] = GEN_INT (j * 4);
6457 xop[1] = operands[1];
6458 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
6459 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
6460 }
6461 xop[0] = GEN_INT (i * 4);
6462 xop[1] = operands[1];
6463 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
6464 return "";
6465 }
6466 }
6467
6468 return "{lsi|lswi} %2,%1,%N0";
6469 }
6470
6471 /* Return 1 for a parallel vrsave operation. */
6472
6473 int
6474 vrsave_operation (op, mode)
6475 rtx op;
6476 enum machine_mode mode ATTRIBUTE_UNUSED;
6477 {
6478 int count = XVECLEN (op, 0);
6479 unsigned int dest_regno, src_regno;
6480 int i;
6481
6482 if (count <= 1
6483 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6484 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6485 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
6486 return 0;
6487
6488 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6489 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6490
6491 if (dest_regno != VRSAVE_REGNO
6492 && src_regno != VRSAVE_REGNO)
6493 return 0;
6494
6495 for (i = 1; i < count; i++)
6496 {
6497 rtx elt = XVECEXP (op, 0, i);
6498
6499 if (GET_CODE (elt) != CLOBBER
6500 && GET_CODE (elt) != SET)
6501 return 0;
6502 }
6503
6504 return 1;
6505 }
6506
6507 /* Return 1 for an PARALLEL suitable for mtcrf. */
6508
6509 int
6510 mtcrf_operation (op, mode)
6511 rtx op;
6512 enum machine_mode mode ATTRIBUTE_UNUSED;
6513 {
6514 int count = XVECLEN (op, 0);
6515 int i;
6516 rtx src_reg;
6517
6518 /* Perform a quick check so we don't blow up below. */
6519 if (count < 1
6520 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6521 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
6522 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
6523 return 0;
6524 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
6525
6526 if (GET_CODE (src_reg) != REG
6527 || GET_MODE (src_reg) != SImode
6528 || ! INT_REGNO_P (REGNO (src_reg)))
6529 return 0;
6530
6531 for (i = 0; i < count; i++)
6532 {
6533 rtx exp = XVECEXP (op, 0, i);
6534 rtx unspec;
6535 int maskval;
6536
6537 if (GET_CODE (exp) != SET
6538 || GET_CODE (SET_DEST (exp)) != REG
6539 || GET_MODE (SET_DEST (exp)) != CCmode
6540 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
6541 return 0;
6542 unspec = SET_SRC (exp);
6543 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
6544
6545 if (GET_CODE (unspec) != UNSPEC
6546 || XINT (unspec, 1) != 20
6547 || XVECLEN (unspec, 0) != 2
6548 || XVECEXP (unspec, 0, 0) != src_reg
6549 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
6550 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
6551 return 0;
6552 }
6553 return 1;
6554 }
6555
6556 /* Return 1 for an PARALLEL suitable for lmw. */
6557
6558 int
6559 lmw_operation (op, mode)
6560 rtx op;
6561 enum machine_mode mode ATTRIBUTE_UNUSED;
6562 {
6563 int count = XVECLEN (op, 0);
6564 unsigned int dest_regno;
6565 rtx src_addr;
6566 unsigned int base_regno;
6567 HOST_WIDE_INT offset;
6568 int i;
6569
6570 /* Perform a quick check so we don't blow up below. */
6571 if (count <= 1
6572 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6573 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6574 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6575 return 0;
6576
6577 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6578 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6579
6580 if (dest_regno > 31
6581 || count != 32 - (int) dest_regno)
6582 return 0;
6583
6584 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
6585 {
6586 offset = 0;
6587 base_regno = REGNO (src_addr);
6588 if (base_regno == 0)
6589 return 0;
6590 }
6591 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
6592 {
6593 offset = INTVAL (XEXP (src_addr, 1));
6594 base_regno = REGNO (XEXP (src_addr, 0));
6595 }
6596 else
6597 return 0;
6598
6599 for (i = 0; i < count; i++)
6600 {
6601 rtx elt = XVECEXP (op, 0, i);
6602 rtx newaddr;
6603 rtx addr_reg;
6604 HOST_WIDE_INT newoffset;
6605
6606 if (GET_CODE (elt) != SET
6607 || GET_CODE (SET_DEST (elt)) != REG
6608 || GET_MODE (SET_DEST (elt)) != SImode
6609 || REGNO (SET_DEST (elt)) != dest_regno + i
6610 || GET_CODE (SET_SRC (elt)) != MEM
6611 || GET_MODE (SET_SRC (elt)) != SImode)
6612 return 0;
6613 newaddr = XEXP (SET_SRC (elt), 0);
6614 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6615 {
6616 newoffset = 0;
6617 addr_reg = newaddr;
6618 }
6619 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6620 {
6621 addr_reg = XEXP (newaddr, 0);
6622 newoffset = INTVAL (XEXP (newaddr, 1));
6623 }
6624 else
6625 return 0;
6626 if (REGNO (addr_reg) != base_regno
6627 || newoffset != offset + 4 * i)
6628 return 0;
6629 }
6630
6631 return 1;
6632 }
6633
6634 /* Return 1 for an PARALLEL suitable for stmw. */
6635
6636 int
6637 stmw_operation (op, mode)
6638 rtx op;
6639 enum machine_mode mode ATTRIBUTE_UNUSED;
6640 {
6641 int count = XVECLEN (op, 0);
6642 unsigned int src_regno;
6643 rtx dest_addr;
6644 unsigned int base_regno;
6645 HOST_WIDE_INT offset;
6646 int i;
6647
6648 /* Perform a quick check so we don't blow up below. */
6649 if (count <= 1
6650 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6651 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6652 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6653 return 0;
6654
6655 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6656 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6657
6658 if (src_regno > 31
6659 || count != 32 - (int) src_regno)
6660 return 0;
6661
6662 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
6663 {
6664 offset = 0;
6665 base_regno = REGNO (dest_addr);
6666 if (base_regno == 0)
6667 return 0;
6668 }
6669 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
6670 {
6671 offset = INTVAL (XEXP (dest_addr, 1));
6672 base_regno = REGNO (XEXP (dest_addr, 0));
6673 }
6674 else
6675 return 0;
6676
6677 for (i = 0; i < count; i++)
6678 {
6679 rtx elt = XVECEXP (op, 0, i);
6680 rtx newaddr;
6681 rtx addr_reg;
6682 HOST_WIDE_INT newoffset;
6683
6684 if (GET_CODE (elt) != SET
6685 || GET_CODE (SET_SRC (elt)) != REG
6686 || GET_MODE (SET_SRC (elt)) != SImode
6687 || REGNO (SET_SRC (elt)) != src_regno + i
6688 || GET_CODE (SET_DEST (elt)) != MEM
6689 || GET_MODE (SET_DEST (elt)) != SImode)
6690 return 0;
6691 newaddr = XEXP (SET_DEST (elt), 0);
6692 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6693 {
6694 newoffset = 0;
6695 addr_reg = newaddr;
6696 }
6697 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6698 {
6699 addr_reg = XEXP (newaddr, 0);
6700 newoffset = INTVAL (XEXP (newaddr, 1));
6701 }
6702 else
6703 return 0;
6704 if (REGNO (addr_reg) != base_regno
6705 || newoffset != offset + 4 * i)
6706 return 0;
6707 }
6708
6709 return 1;
6710 }
6711 \f
6712 /* A validation routine: say whether CODE, a condition code, and MODE
6713 match. The other alternatives either don't make sense or should
6714 never be generated. */
6715
6716 static void
6717 validate_condition_mode (code, mode)
6718 enum rtx_code code;
6719 enum machine_mode mode;
6720 {
6721 if (GET_RTX_CLASS (code) != '<'
6722 || GET_MODE_CLASS (mode) != MODE_CC)
6723 abort ();
6724
6725 /* These don't make sense. */
6726 if ((code == GT || code == LT || code == GE || code == LE)
6727 && mode == CCUNSmode)
6728 abort ();
6729
6730 if ((code == GTU || code == LTU || code == GEU || code == LEU)
6731 && mode != CCUNSmode)
6732 abort ();
6733
6734 if (mode != CCFPmode
6735 && (code == ORDERED || code == UNORDERED
6736 || code == UNEQ || code == LTGT
6737 || code == UNGT || code == UNLT
6738 || code == UNGE || code == UNLE))
6739 abort ();
6740
6741 /* These should never be generated except for
6742 flag_unsafe_math_optimizations and flag_finite_math_only. */
6743 if (mode == CCFPmode
6744 && ! flag_unsafe_math_optimizations
6745 && ! flag_finite_math_only
6746 && (code == LE || code == GE
6747 || code == UNEQ || code == LTGT
6748 || code == UNGT || code == UNLT))
6749 abort ();
6750
6751 /* These are invalid; the information is not there. */
6752 if (mode == CCEQmode
6753 && code != EQ && code != NE)
6754 abort ();
6755 }
6756
6757 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
6758 We only check the opcode against the mode of the CC value here. */
6759
6760 int
6761 branch_comparison_operator (op, mode)
6762 rtx op;
6763 enum machine_mode mode ATTRIBUTE_UNUSED;
6764 {
6765 enum rtx_code code = GET_CODE (op);
6766 enum machine_mode cc_mode;
6767
6768 if (GET_RTX_CLASS (code) != '<')
6769 return 0;
6770
6771 cc_mode = GET_MODE (XEXP (op, 0));
6772 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
6773 return 0;
6774
6775 validate_condition_mode (code, cc_mode);
6776
6777 return 1;
6778 }
6779
6780 /* Return 1 if OP is a comparison operation that is valid for a branch
6781 insn and which is true if the corresponding bit in the CC register
6782 is set. */
6783
6784 int
6785 branch_positive_comparison_operator (op, mode)
6786 rtx op;
6787 enum machine_mode mode;
6788 {
6789 enum rtx_code code;
6790
6791 if (! branch_comparison_operator (op, mode))
6792 return 0;
6793
6794 code = GET_CODE (op);
6795 return (code == EQ || code == LT || code == GT
6796 || (TARGET_SPE && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
6797 || code == LTU || code == GTU
6798 || code == UNORDERED);
6799 }
6800
6801 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
6802 We check the opcode against the mode of the CC value and disallow EQ or
6803 NE comparisons for integers. */
6804
6805 int
6806 scc_comparison_operator (op, mode)
6807 rtx op;
6808 enum machine_mode mode;
6809 {
6810 enum rtx_code code = GET_CODE (op);
6811 enum machine_mode cc_mode;
6812
6813 if (GET_MODE (op) != mode && mode != VOIDmode)
6814 return 0;
6815
6816 if (GET_RTX_CLASS (code) != '<')
6817 return 0;
6818
6819 cc_mode = GET_MODE (XEXP (op, 0));
6820 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
6821 return 0;
6822
6823 validate_condition_mode (code, cc_mode);
6824
6825 if (code == NE && cc_mode != CCFPmode)
6826 return 0;
6827
6828 return 1;
6829 }
6830
6831 int
6832 trap_comparison_operator (op, mode)
6833 rtx op;
6834 enum machine_mode mode;
6835 {
6836 if (mode != VOIDmode && mode != GET_MODE (op))
6837 return 0;
6838 return GET_RTX_CLASS (GET_CODE (op)) == '<';
6839 }
6840
6841 int
6842 boolean_operator (op, mode)
6843 rtx op;
6844 enum machine_mode mode ATTRIBUTE_UNUSED;
6845 {
6846 enum rtx_code code = GET_CODE (op);
6847 return (code == AND || code == IOR || code == XOR);
6848 }
6849
6850 int
6851 boolean_or_operator (op, mode)
6852 rtx op;
6853 enum machine_mode mode ATTRIBUTE_UNUSED;
6854 {
6855 enum rtx_code code = GET_CODE (op);
6856 return (code == IOR || code == XOR);
6857 }
6858
6859 int
6860 min_max_operator (op, mode)
6861 rtx op;
6862 enum machine_mode mode ATTRIBUTE_UNUSED;
6863 {
6864 enum rtx_code code = GET_CODE (op);
6865 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
6866 }
6867 \f
6868 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
6869 mask required to convert the result of a rotate insn into a shift
6870 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
6871
6872 int
6873 includes_lshift_p (shiftop, andop)
6874 rtx shiftop;
6875 rtx andop;
6876 {
6877 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
6878
6879 shift_mask <<= INTVAL (shiftop);
6880
6881 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
6882 }
6883
6884 /* Similar, but for right shift. */
6885
6886 int
6887 includes_rshift_p (shiftop, andop)
6888 rtx shiftop;
6889 rtx andop;
6890 {
6891 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
6892
6893 shift_mask >>= INTVAL (shiftop);
6894
6895 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
6896 }
6897
6898 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
6899 to perform a left shift. It must have exactly SHIFTOP least
6900 signifigant 0's, then one or more 1's, then zero or more 0's. */
6901
6902 int
6903 includes_rldic_lshift_p (shiftop, andop)
6904 rtx shiftop;
6905 rtx andop;
6906 {
6907 if (GET_CODE (andop) == CONST_INT)
6908 {
6909 HOST_WIDE_INT c, lsb, shift_mask;
6910
6911 c = INTVAL (andop);
6912 if (c == 0 || c == ~0)
6913 return 0;
6914
6915 shift_mask = ~0;
6916 shift_mask <<= INTVAL (shiftop);
6917
6918 /* Find the least signifigant one bit. */
6919 lsb = c & -c;
6920
6921 /* It must coincide with the LSB of the shift mask. */
6922 if (-lsb != shift_mask)
6923 return 0;
6924
6925 /* Invert to look for the next transition (if any). */
6926 c = ~c;
6927
6928 /* Remove the low group of ones (originally low group of zeros). */
6929 c &= -lsb;
6930
6931 /* Again find the lsb, and check we have all 1's above. */
6932 lsb = c & -c;
6933 return c == -lsb;
6934 }
6935 else if (GET_CODE (andop) == CONST_DOUBLE
6936 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
6937 {
6938 HOST_WIDE_INT low, high, lsb;
6939 HOST_WIDE_INT shift_mask_low, shift_mask_high;
6940
6941 low = CONST_DOUBLE_LOW (andop);
6942 if (HOST_BITS_PER_WIDE_INT < 64)
6943 high = CONST_DOUBLE_HIGH (andop);
6944
6945 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
6946 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
6947 return 0;
6948
6949 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
6950 {
6951 shift_mask_high = ~0;
6952 if (INTVAL (shiftop) > 32)
6953 shift_mask_high <<= INTVAL (shiftop) - 32;
6954
6955 lsb = high & -high;
6956
6957 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
6958 return 0;
6959
6960 high = ~high;
6961 high &= -lsb;
6962
6963 lsb = high & -high;
6964 return high == -lsb;
6965 }
6966
6967 shift_mask_low = ~0;
6968 shift_mask_low <<= INTVAL (shiftop);
6969
6970 lsb = low & -low;
6971
6972 if (-lsb != shift_mask_low)
6973 return 0;
6974
6975 if (HOST_BITS_PER_WIDE_INT < 64)
6976 high = ~high;
6977 low = ~low;
6978 low &= -lsb;
6979
6980 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
6981 {
6982 lsb = high & -high;
6983 return high == -lsb;
6984 }
6985
6986 lsb = low & -low;
6987 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
6988 }
6989 else
6990 return 0;
6991 }
6992
6993 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
6994 to perform a left shift. It must have SHIFTOP or more least
6995 signifigant 0's, with the remainder of the word 1's. */
6996
6997 int
6998 includes_rldicr_lshift_p (shiftop, andop)
6999 rtx shiftop;
7000 rtx andop;
7001 {
7002 if (GET_CODE (andop) == CONST_INT)
7003 {
7004 HOST_WIDE_INT c, lsb, shift_mask;
7005
7006 shift_mask = ~0;
7007 shift_mask <<= INTVAL (shiftop);
7008 c = INTVAL (andop);
7009
7010 /* Find the least signifigant one bit. */
7011 lsb = c & -c;
7012
7013 /* It must be covered by the shift mask.
7014 This test also rejects c == 0. */
7015 if ((lsb & shift_mask) == 0)
7016 return 0;
7017
7018 /* Check we have all 1's above the transition, and reject all 1's. */
7019 return c == -lsb && lsb != 1;
7020 }
7021 else if (GET_CODE (andop) == CONST_DOUBLE
7022 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7023 {
7024 HOST_WIDE_INT low, lsb, shift_mask_low;
7025
7026 low = CONST_DOUBLE_LOW (andop);
7027
7028 if (HOST_BITS_PER_WIDE_INT < 64)
7029 {
7030 HOST_WIDE_INT high, shift_mask_high;
7031
7032 high = CONST_DOUBLE_HIGH (andop);
7033
7034 if (low == 0)
7035 {
7036 shift_mask_high = ~0;
7037 if (INTVAL (shiftop) > 32)
7038 shift_mask_high <<= INTVAL (shiftop) - 32;
7039
7040 lsb = high & -high;
7041
7042 if ((lsb & shift_mask_high) == 0)
7043 return 0;
7044
7045 return high == -lsb;
7046 }
7047 if (high != ~0)
7048 return 0;
7049 }
7050
7051 shift_mask_low = ~0;
7052 shift_mask_low <<= INTVAL (shiftop);
7053
7054 lsb = low & -low;
7055
7056 if ((lsb & shift_mask_low) == 0)
7057 return 0;
7058
7059 return low == -lsb && lsb != 1;
7060 }
7061 else
7062 return 0;
7063 }
7064
7065 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7066 for lfq and stfq insns.
7067
7068 Note reg1 and reg2 *must* be hard registers. To be sure we will
7069 abort if we are passed pseudo registers. */
7070
7071 int
7072 registers_ok_for_quad_peep (reg1, reg2)
7073 rtx reg1, reg2;
7074 {
7075 /* We might have been passed a SUBREG. */
7076 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
7077 return 0;
7078
7079 return (REGNO (reg1) == REGNO (reg2) - 1);
7080 }
7081
7082 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7083 addr1 and addr2 must be in consecutive memory locations
7084 (addr2 == addr1 + 8). */
7085
7086 int
7087 addrs_ok_for_quad_peep (addr1, addr2)
7088 rtx addr1;
7089 rtx addr2;
7090 {
7091 unsigned int reg1;
7092 int offset1;
7093
7094 /* Extract an offset (if used) from the first addr. */
7095 if (GET_CODE (addr1) == PLUS)
7096 {
7097 /* If not a REG, return zero. */
7098 if (GET_CODE (XEXP (addr1, 0)) != REG)
7099 return 0;
7100 else
7101 {
7102 reg1 = REGNO (XEXP (addr1, 0));
7103 /* The offset must be constant! */
7104 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
7105 return 0;
7106 offset1 = INTVAL (XEXP (addr1, 1));
7107 }
7108 }
7109 else if (GET_CODE (addr1) != REG)
7110 return 0;
7111 else
7112 {
7113 reg1 = REGNO (addr1);
7114 /* This was a simple (mem (reg)) expression. Offset is 0. */
7115 offset1 = 0;
7116 }
7117
7118 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
7119 if (GET_CODE (addr2) != PLUS)
7120 return 0;
7121
7122 if (GET_CODE (XEXP (addr2, 0)) != REG
7123 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7124 return 0;
7125
7126 if (reg1 != REGNO (XEXP (addr2, 0)))
7127 return 0;
7128
7129 /* The offset for the second addr must be 8 more than the first addr. */
7130 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7131 return 0;
7132
7133 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7134 instructions. */
7135 return 1;
7136 }
7137 \f
7138 /* Return the register class of a scratch register needed to copy IN into
7139 or out of a register in CLASS in MODE. If it can be done directly,
7140 NO_REGS is returned. */
7141
7142 enum reg_class
7143 secondary_reload_class (class, mode, in)
7144 enum reg_class class;
7145 enum machine_mode mode ATTRIBUTE_UNUSED;
7146 rtx in;
7147 {
7148 int regno;
7149
7150 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
7151 {
7152 /* We cannot copy a symbolic operand directly into anything
7153 other than BASE_REGS for TARGET_ELF. So indicate that a
7154 register from BASE_REGS is needed as an intermediate
7155 register.
7156
7157 On Darwin, pic addresses require a load from memory, which
7158 needs a base register. */
7159 if (class != BASE_REGS
7160 && (GET_CODE (in) == SYMBOL_REF
7161 || GET_CODE (in) == HIGH
7162 || GET_CODE (in) == LABEL_REF
7163 || GET_CODE (in) == CONST))
7164 return BASE_REGS;
7165 }
7166
7167 if (GET_CODE (in) == REG)
7168 {
7169 regno = REGNO (in);
7170 if (regno >= FIRST_PSEUDO_REGISTER)
7171 {
7172 regno = true_regnum (in);
7173 if (regno >= FIRST_PSEUDO_REGISTER)
7174 regno = -1;
7175 }
7176 }
7177 else if (GET_CODE (in) == SUBREG)
7178 {
7179 regno = true_regnum (in);
7180 if (regno >= FIRST_PSEUDO_REGISTER)
7181 regno = -1;
7182 }
7183 else
7184 regno = -1;
7185
7186 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7187 into anything. */
7188 if (class == GENERAL_REGS || class == BASE_REGS
7189 || (regno >= 0 && INT_REGNO_P (regno)))
7190 return NO_REGS;
7191
7192 /* Constants, memory, and FP registers can go into FP registers. */
7193 if ((regno == -1 || FP_REGNO_P (regno))
7194 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
7195 return NO_REGS;
7196
7197 /* Memory, and AltiVec registers can go into AltiVec registers. */
7198 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
7199 && class == ALTIVEC_REGS)
7200 return NO_REGS;
7201
7202 /* We can copy among the CR registers. */
7203 if ((class == CR_REGS || class == CR0_REGS)
7204 && regno >= 0 && CR_REGNO_P (regno))
7205 return NO_REGS;
7206
7207 /* Otherwise, we need GENERAL_REGS. */
7208 return GENERAL_REGS;
7209 }
7210 \f
7211 /* Given a comparison operation, return the bit number in CCR to test. We
7212 know this is a valid comparison.
7213
7214 SCC_P is 1 if this is for an scc. That means that %D will have been
7215 used instead of %C, so the bits will be in different places.
7216
7217 Return -1 if OP isn't a valid comparison for some reason. */
7218
7219 int
7220 ccr_bit (op, scc_p)
7221 rtx op;
7222 int scc_p;
7223 {
7224 enum rtx_code code = GET_CODE (op);
7225 enum machine_mode cc_mode;
7226 int cc_regnum;
7227 int base_bit;
7228 rtx reg;
7229
7230 if (GET_RTX_CLASS (code) != '<')
7231 return -1;
7232
7233 reg = XEXP (op, 0);
7234
7235 if (GET_CODE (reg) != REG
7236 || ! CR_REGNO_P (REGNO (reg)))
7237 abort ();
7238
7239 cc_mode = GET_MODE (reg);
7240 cc_regnum = REGNO (reg);
7241 base_bit = 4 * (cc_regnum - CR0_REGNO);
7242
7243 validate_condition_mode (code, cc_mode);
7244
7245 switch (code)
7246 {
7247 case NE:
7248 if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7249 return base_bit + 1;
7250 return scc_p ? base_bit + 3 : base_bit + 2;
7251 case EQ:
7252 if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7253 return base_bit + 1;
7254 return base_bit + 2;
7255 case GT: case GTU: case UNLE:
7256 return base_bit + 1;
7257 case LT: case LTU: case UNGE:
7258 return base_bit;
7259 case ORDERED: case UNORDERED:
7260 return base_bit + 3;
7261
7262 case GE: case GEU:
7263 /* If scc, we will have done a cror to put the bit in the
7264 unordered position. So test that bit. For integer, this is ! LT
7265 unless this is an scc insn. */
7266 return scc_p ? base_bit + 3 : base_bit;
7267
7268 case LE: case LEU:
7269 return scc_p ? base_bit + 3 : base_bit + 1;
7270
7271 default:
7272 abort ();
7273 }
7274 }
7275 \f
7276 /* Return the GOT register. */
7277
7278 struct rtx_def *
7279 rs6000_got_register (value)
7280 rtx value ATTRIBUTE_UNUSED;
7281 {
7282 /* The second flow pass currently (June 1999) can't update
7283 regs_ever_live without disturbing other parts of the compiler, so
7284 update it here to make the prolog/epilogue code happy. */
7285 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
7286 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
7287
7288 current_function_uses_pic_offset_table = 1;
7289
7290 return pic_offset_table_rtx;
7291 }
7292 \f
7293 /* Function to init struct machine_function.
7294 This will be called, via a pointer variable,
7295 from push_function_context. */
7296
7297 static struct machine_function *
7298 rs6000_init_machine_status ()
7299 {
7300 return ggc_alloc_cleared (sizeof (machine_function));
7301 }
7302 \f
7303 /* These macros test for integers and extract the low-order bits. */
7304 #define INT_P(X) \
7305 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7306 && GET_MODE (X) == VOIDmode)
7307
7308 #define INT_LOWPART(X) \
7309 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7310
7311 int
7312 extract_MB (op)
7313 rtx op;
7314 {
7315 int i;
7316 unsigned long val = INT_LOWPART (op);
7317
7318 /* If the high bit is zero, the value is the first 1 bit we find
7319 from the left. */
7320 if ((val & 0x80000000) == 0)
7321 {
7322 if ((val & 0xffffffff) == 0)
7323 abort ();
7324
7325 i = 1;
7326 while (((val <<= 1) & 0x80000000) == 0)
7327 ++i;
7328 return i;
7329 }
7330
7331 /* If the high bit is set and the low bit is not, or the mask is all
7332 1's, the value is zero. */
7333 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
7334 return 0;
7335
7336 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7337 from the right. */
7338 i = 31;
7339 while (((val >>= 1) & 1) != 0)
7340 --i;
7341
7342 return i;
7343 }
7344
7345 int
7346 extract_ME (op)
7347 rtx op;
7348 {
7349 int i;
7350 unsigned long val = INT_LOWPART (op);
7351
7352 /* If the low bit is zero, the value is the first 1 bit we find from
7353 the right. */
7354 if ((val & 1) == 0)
7355 {
7356 if ((val & 0xffffffff) == 0)
7357 abort ();
7358
7359 i = 30;
7360 while (((val >>= 1) & 1) == 0)
7361 --i;
7362
7363 return i;
7364 }
7365
7366 /* If the low bit is set and the high bit is not, or the mask is all
7367 1's, the value is 31. */
7368 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
7369 return 31;
7370
7371 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7372 from the left. */
7373 i = 0;
7374 while (((val <<= 1) & 0x80000000) != 0)
7375 ++i;
7376
7377 return i;
7378 }
7379
7380 /* Print an operand. Recognize special options, documented below. */
7381
7382 #if TARGET_ELF
7383 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7384 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7385 #else
7386 #define SMALL_DATA_RELOC "sda21"
7387 #define SMALL_DATA_REG 0
7388 #endif
7389
7390 void
7391 print_operand (file, x, code)
7392 FILE *file;
7393 rtx x;
7394 int code;
7395 {
7396 int i;
7397 HOST_WIDE_INT val;
7398 unsigned HOST_WIDE_INT uval;
7399
7400 switch (code)
7401 {
7402 case '.':
7403 /* Write out an instruction after the call which may be replaced
7404 with glue code by the loader. This depends on the AIX version. */
7405 asm_fprintf (file, RS6000_CALL_GLUE);
7406 return;
7407
7408 /* %a is output_address. */
7409
7410 case 'A':
7411 /* If X is a constant integer whose low-order 5 bits are zero,
7412 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
7413 in the AIX assembler where "sri" with a zero shift count
7414 writes a trash instruction. */
7415 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
7416 putc ('l', file);
7417 else
7418 putc ('r', file);
7419 return;
7420
7421 case 'b':
7422 /* If constant, low-order 16 bits of constant, unsigned.
7423 Otherwise, write normally. */
7424 if (INT_P (x))
7425 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
7426 else
7427 print_operand (file, x, 0);
7428 return;
7429
7430 case 'B':
7431 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7432 for 64-bit mask direction. */
7433 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
7434 return;
7435
7436 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7437 output_operand. */
7438
7439 case 'D':
7440 /* There used to be a comment for 'C' reading "This is an
7441 optional cror needed for certain floating-point
7442 comparisons. Otherwise write nothing." */
7443
7444 /* Similar, except that this is for an scc, so we must be able to
7445 encode the test in a single bit that is one. We do the above
7446 for any LE, GE, GEU, or LEU and invert the bit for NE. */
7447 if (GET_CODE (x) == LE || GET_CODE (x) == GE
7448 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
7449 {
7450 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7451
7452 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
7453 base_bit + 2,
7454 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
7455 }
7456
7457 else if (GET_CODE (x) == NE)
7458 {
7459 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7460
7461 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
7462 base_bit + 2, base_bit + 2);
7463 }
7464 else if (TARGET_SPE && TARGET_HARD_FLOAT
7465 && GET_CODE (x) == EQ
7466 && GET_MODE (XEXP (x, 0)) == CCFPmode)
7467 {
7468 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7469
7470 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 1,
7471 base_bit + 1, base_bit + 1);
7472 }
7473 return;
7474
7475 case 'E':
7476 /* X is a CR register. Print the number of the EQ bit of the CR */
7477 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7478 output_operand_lossage ("invalid %%E value");
7479 else
7480 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
7481 return;
7482
7483 case 'f':
7484 /* X is a CR register. Print the shift count needed to move it
7485 to the high-order four bits. */
7486 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7487 output_operand_lossage ("invalid %%f value");
7488 else
7489 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
7490 return;
7491
7492 case 'F':
7493 /* Similar, but print the count for the rotate in the opposite
7494 direction. */
7495 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7496 output_operand_lossage ("invalid %%F value");
7497 else
7498 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
7499 return;
7500
7501 case 'G':
7502 /* X is a constant integer. If it is negative, print "m",
7503 otherwise print "z". This is to make an aze or ame insn. */
7504 if (GET_CODE (x) != CONST_INT)
7505 output_operand_lossage ("invalid %%G value");
7506 else if (INTVAL (x) >= 0)
7507 putc ('z', file);
7508 else
7509 putc ('m', file);
7510 return;
7511
7512 case 'h':
7513 /* If constant, output low-order five bits. Otherwise, write
7514 normally. */
7515 if (INT_P (x))
7516 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
7517 else
7518 print_operand (file, x, 0);
7519 return;
7520
7521 case 'H':
7522 /* If constant, output low-order six bits. Otherwise, write
7523 normally. */
7524 if (INT_P (x))
7525 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
7526 else
7527 print_operand (file, x, 0);
7528 return;
7529
7530 case 'I':
7531 /* Print `i' if this is a constant, else nothing. */
7532 if (INT_P (x))
7533 putc ('i', file);
7534 return;
7535
7536 case 'j':
7537 /* Write the bit number in CCR for jump. */
7538 i = ccr_bit (x, 0);
7539 if (i == -1)
7540 output_operand_lossage ("invalid %%j code");
7541 else
7542 fprintf (file, "%d", i);
7543 return;
7544
7545 case 'J':
7546 /* Similar, but add one for shift count in rlinm for scc and pass
7547 scc flag to `ccr_bit'. */
7548 i = ccr_bit (x, 1);
7549 if (i == -1)
7550 output_operand_lossage ("invalid %%J code");
7551 else
7552 /* If we want bit 31, write a shift count of zero, not 32. */
7553 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7554 return;
7555
7556 case 'k':
7557 /* X must be a constant. Write the 1's complement of the
7558 constant. */
7559 if (! INT_P (x))
7560 output_operand_lossage ("invalid %%k value");
7561 else
7562 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
7563 return;
7564
7565 case 'K':
7566 /* X must be a symbolic constant on ELF. Write an
7567 expression suitable for an 'addi' that adds in the low 16
7568 bits of the MEM. */
7569 if (GET_CODE (x) != CONST)
7570 {
7571 print_operand_address (file, x);
7572 fputs ("@l", file);
7573 }
7574 else
7575 {
7576 if (GET_CODE (XEXP (x, 0)) != PLUS
7577 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
7578 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
7579 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
7580 output_operand_lossage ("invalid %%K value");
7581 print_operand_address (file, XEXP (XEXP (x, 0), 0));
7582 fputs ("@l", file);
7583 /* For GNU as, there must be a non-alphanumeric character
7584 between 'l' and the number. The '-' is added by
7585 print_operand() already. */
7586 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
7587 fputs ("+", file);
7588 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
7589 }
7590 return;
7591
7592 /* %l is output_asm_label. */
7593
7594 case 'L':
7595 /* Write second word of DImode or DFmode reference. Works on register
7596 or non-indexed memory only. */
7597 if (GET_CODE (x) == REG)
7598 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
7599 else if (GET_CODE (x) == MEM)
7600 {
7601 /* Handle possible auto-increment. Since it is pre-increment and
7602 we have already done it, we can just use an offset of word. */
7603 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7604 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7605 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
7606 UNITS_PER_WORD));
7607 else
7608 output_address (XEXP (adjust_address_nv (x, SImode,
7609 UNITS_PER_WORD),
7610 0));
7611
7612 if (small_data_operand (x, GET_MODE (x)))
7613 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7614 reg_names[SMALL_DATA_REG]);
7615 }
7616 return;
7617
7618 case 'm':
7619 /* MB value for a mask operand. */
7620 if (! mask_operand (x, SImode))
7621 output_operand_lossage ("invalid %%m value");
7622
7623 fprintf (file, "%d", extract_MB (x));
7624 return;
7625
7626 case 'M':
7627 /* ME value for a mask operand. */
7628 if (! mask_operand (x, SImode))
7629 output_operand_lossage ("invalid %%M value");
7630
7631 fprintf (file, "%d", extract_ME (x));
7632 return;
7633
7634 /* %n outputs the negative of its operand. */
7635
7636 case 'N':
7637 /* Write the number of elements in the vector times 4. */
7638 if (GET_CODE (x) != PARALLEL)
7639 output_operand_lossage ("invalid %%N value");
7640 else
7641 fprintf (file, "%d", XVECLEN (x, 0) * 4);
7642 return;
7643
7644 case 'O':
7645 /* Similar, but subtract 1 first. */
7646 if (GET_CODE (x) != PARALLEL)
7647 output_operand_lossage ("invalid %%O value");
7648 else
7649 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
7650 return;
7651
7652 case 'p':
7653 /* X is a CONST_INT that is a power of two. Output the logarithm. */
7654 if (! INT_P (x)
7655 || INT_LOWPART (x) < 0
7656 || (i = exact_log2 (INT_LOWPART (x))) < 0)
7657 output_operand_lossage ("invalid %%p value");
7658 else
7659 fprintf (file, "%d", i);
7660 return;
7661
7662 case 'P':
7663 /* The operand must be an indirect memory reference. The result
7664 is the register number. */
7665 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
7666 || REGNO (XEXP (x, 0)) >= 32)
7667 output_operand_lossage ("invalid %%P value");
7668 else
7669 fprintf (file, "%d", REGNO (XEXP (x, 0)));
7670 return;
7671
7672 case 'q':
7673 /* This outputs the logical code corresponding to a boolean
7674 expression. The expression may have one or both operands
7675 negated (if one, only the first one). For condition register
7676 logical operations, it will also treat the negated
7677 CR codes as NOTs, but not handle NOTs of them. */
7678 {
7679 const char *const *t = 0;
7680 const char *s;
7681 enum rtx_code code = GET_CODE (x);
7682 static const char * const tbl[3][3] = {
7683 { "and", "andc", "nor" },
7684 { "or", "orc", "nand" },
7685 { "xor", "eqv", "xor" } };
7686
7687 if (code == AND)
7688 t = tbl[0];
7689 else if (code == IOR)
7690 t = tbl[1];
7691 else if (code == XOR)
7692 t = tbl[2];
7693 else
7694 output_operand_lossage ("invalid %%q value");
7695
7696 if (GET_CODE (XEXP (x, 0)) != NOT)
7697 s = t[0];
7698 else
7699 {
7700 if (GET_CODE (XEXP (x, 1)) == NOT)
7701 s = t[2];
7702 else
7703 s = t[1];
7704 }
7705
7706 fputs (s, file);
7707 }
7708 return;
7709
7710 case 'R':
7711 /* X is a CR register. Print the mask for `mtcrf'. */
7712 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7713 output_operand_lossage ("invalid %%R value");
7714 else
7715 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
7716 return;
7717
7718 case 's':
7719 /* Low 5 bits of 32 - value */
7720 if (! INT_P (x))
7721 output_operand_lossage ("invalid %%s value");
7722 else
7723 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
7724 return;
7725
7726 case 'S':
7727 /* PowerPC64 mask position. All 0's is excluded.
7728 CONST_INT 32-bit mask is considered sign-extended so any
7729 transition must occur within the CONST_INT, not on the boundary. */
7730 if (! mask64_operand (x, DImode))
7731 output_operand_lossage ("invalid %%S value");
7732
7733 uval = INT_LOWPART (x);
7734
7735 if (uval & 1) /* Clear Left */
7736 {
7737 uval &= ((unsigned HOST_WIDE_INT) 1 << 63 << 1) - 1;
7738 i = 64;
7739 }
7740 else /* Clear Right */
7741 {
7742 uval = ~uval;
7743 uval &= ((unsigned HOST_WIDE_INT) 1 << 63 << 1) - 1;
7744 i = 63;
7745 }
7746 while (uval != 0)
7747 --i, uval >>= 1;
7748 if (i < 0)
7749 abort ();
7750 fprintf (file, "%d", i);
7751 return;
7752
7753 case 't':
7754 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
7755 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
7756 abort ();
7757
7758 /* Bit 3 is OV bit. */
7759 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
7760
7761 /* If we want bit 31, write a shift count of zero, not 32. */
7762 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7763 return;
7764
7765 case 'T':
7766 /* Print the symbolic name of a branch target register. */
7767 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
7768 && REGNO (x) != COUNT_REGISTER_REGNUM))
7769 output_operand_lossage ("invalid %%T value");
7770 else if (REGNO (x) == LINK_REGISTER_REGNUM)
7771 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
7772 else
7773 fputs ("ctr", file);
7774 return;
7775
7776 case 'u':
7777 /* High-order 16 bits of constant for use in unsigned operand. */
7778 if (! INT_P (x))
7779 output_operand_lossage ("invalid %%u value");
7780 else
7781 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
7782 (INT_LOWPART (x) >> 16) & 0xffff);
7783 return;
7784
7785 case 'v':
7786 /* High-order 16 bits of constant for use in signed operand. */
7787 if (! INT_P (x))
7788 output_operand_lossage ("invalid %%v value");
7789 else
7790 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
7791 (INT_LOWPART (x) >> 16) & 0xffff);
7792 return;
7793
7794 case 'U':
7795 /* Print `u' if this has an auto-increment or auto-decrement. */
7796 if (GET_CODE (x) == MEM
7797 && (GET_CODE (XEXP (x, 0)) == PRE_INC
7798 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
7799 putc ('u', file);
7800 return;
7801
7802 case 'V':
7803 /* Print the trap code for this operand. */
7804 switch (GET_CODE (x))
7805 {
7806 case EQ:
7807 fputs ("eq", file); /* 4 */
7808 break;
7809 case NE:
7810 fputs ("ne", file); /* 24 */
7811 break;
7812 case LT:
7813 fputs ("lt", file); /* 16 */
7814 break;
7815 case LE:
7816 fputs ("le", file); /* 20 */
7817 break;
7818 case GT:
7819 fputs ("gt", file); /* 8 */
7820 break;
7821 case GE:
7822 fputs ("ge", file); /* 12 */
7823 break;
7824 case LTU:
7825 fputs ("llt", file); /* 2 */
7826 break;
7827 case LEU:
7828 fputs ("lle", file); /* 6 */
7829 break;
7830 case GTU:
7831 fputs ("lgt", file); /* 1 */
7832 break;
7833 case GEU:
7834 fputs ("lge", file); /* 5 */
7835 break;
7836 default:
7837 abort ();
7838 }
7839 break;
7840
7841 case 'w':
7842 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
7843 normally. */
7844 if (INT_P (x))
7845 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7846 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
7847 else
7848 print_operand (file, x, 0);
7849 return;
7850
7851 case 'W':
7852 /* MB value for a PowerPC64 rldic operand. */
7853 val = (GET_CODE (x) == CONST_INT
7854 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
7855
7856 if (val < 0)
7857 i = -1;
7858 else
7859 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
7860 if ((val <<= 1) < 0)
7861 break;
7862
7863 #if HOST_BITS_PER_WIDE_INT == 32
7864 if (GET_CODE (x) == CONST_INT && i >= 0)
7865 i += 32; /* zero-extend high-part was all 0's */
7866 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
7867 {
7868 val = CONST_DOUBLE_LOW (x);
7869
7870 if (val == 0)
7871 abort ();
7872 else if (val < 0)
7873 --i;
7874 else
7875 for ( ; i < 64; i++)
7876 if ((val <<= 1) < 0)
7877 break;
7878 }
7879 #endif
7880
7881 fprintf (file, "%d", i + 1);
7882 return;
7883
7884 case 'X':
7885 if (GET_CODE (x) == MEM
7886 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
7887 putc ('x', file);
7888 return;
7889
7890 case 'Y':
7891 /* Like 'L', for third word of TImode */
7892 if (GET_CODE (x) == REG)
7893 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
7894 else if (GET_CODE (x) == MEM)
7895 {
7896 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7897 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7898 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
7899 else
7900 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
7901 if (small_data_operand (x, GET_MODE (x)))
7902 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7903 reg_names[SMALL_DATA_REG]);
7904 }
7905 return;
7906
7907 case 'z':
7908 /* X is a SYMBOL_REF. Write out the name preceded by a
7909 period and without any trailing data in brackets. Used for function
7910 names. If we are configured for System V (or the embedded ABI) on
7911 the PowerPC, do not emit the period, since those systems do not use
7912 TOCs and the like. */
7913 if (GET_CODE (x) != SYMBOL_REF)
7914 abort ();
7915
7916 if (XSTR (x, 0)[0] != '.')
7917 {
7918 switch (DEFAULT_ABI)
7919 {
7920 default:
7921 abort ();
7922
7923 case ABI_AIX:
7924 putc ('.', file);
7925 break;
7926
7927 case ABI_V4:
7928 case ABI_AIX_NODESC:
7929 case ABI_DARWIN:
7930 break;
7931 }
7932 }
7933 #if TARGET_AIX
7934 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
7935 #else
7936 assemble_name (file, XSTR (x, 0));
7937 #endif
7938 return;
7939
7940 case 'Z':
7941 /* Like 'L', for last word of TImode. */
7942 if (GET_CODE (x) == REG)
7943 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
7944 else if (GET_CODE (x) == MEM)
7945 {
7946 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7947 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7948 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
7949 else
7950 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
7951 if (small_data_operand (x, GET_MODE (x)))
7952 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7953 reg_names[SMALL_DATA_REG]);
7954 }
7955 return;
7956
7957 /* Print AltiVec or SPE memory operand. */
7958 case 'y':
7959 {
7960 rtx tmp;
7961
7962 if (GET_CODE (x) != MEM)
7963 abort ();
7964
7965 tmp = XEXP (x, 0);
7966
7967 if (TARGET_SPE)
7968 {
7969 /* Handle [reg]. */
7970 if (GET_CODE (tmp) == REG)
7971 {
7972 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
7973 break;
7974 }
7975 /* Handle [reg+UIMM]. */
7976 else if (GET_CODE (tmp) == PLUS &&
7977 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
7978 {
7979 int x;
7980
7981 if (GET_CODE (XEXP (tmp, 0)) != REG)
7982 abort ();
7983
7984 x = INTVAL (XEXP (tmp, 1));
7985 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
7986 break;
7987 }
7988
7989 /* Fall through. Must be [reg+reg]. */
7990 }
7991 if (GET_CODE (tmp) == REG)
7992 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
7993 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
7994 {
7995 if (REGNO (XEXP (tmp, 0)) == 0)
7996 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
7997 reg_names[ REGNO (XEXP (tmp, 0)) ]);
7998 else
7999 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
8000 reg_names[ REGNO (XEXP (tmp, 1)) ]);
8001 }
8002 else
8003 abort ();
8004 break;
8005 }
8006
8007 case 0:
8008 if (GET_CODE (x) == REG)
8009 fprintf (file, "%s", reg_names[REGNO (x)]);
8010 else if (GET_CODE (x) == MEM)
8011 {
8012 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8013 know the width from the mode. */
8014 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
8015 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
8016 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8017 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
8018 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
8019 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8020 else
8021 output_address (XEXP (x, 0));
8022 }
8023 else
8024 output_addr_const (file, x);
8025 return;
8026
8027 default:
8028 output_operand_lossage ("invalid %%xn code");
8029 }
8030 }
8031 \f
8032 /* Print the address of an operand. */
8033
8034 void
8035 print_operand_address (file, x)
8036 FILE *file;
8037 rtx x;
8038 {
8039 if (GET_CODE (x) == REG)
8040 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
8041 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
8042 || GET_CODE (x) == LABEL_REF)
8043 {
8044 output_addr_const (file, x);
8045 if (small_data_operand (x, GET_MODE (x)))
8046 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8047 reg_names[SMALL_DATA_REG]);
8048 else if (TARGET_TOC)
8049 abort ();
8050 }
8051 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
8052 {
8053 if (REGNO (XEXP (x, 0)) == 0)
8054 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
8055 reg_names[ REGNO (XEXP (x, 0)) ]);
8056 else
8057 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
8058 reg_names[ REGNO (XEXP (x, 1)) ]);
8059 }
8060 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
8061 {
8062 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
8063 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8064 }
8065 #if TARGET_ELF
8066 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8067 && CONSTANT_P (XEXP (x, 1)))
8068 {
8069 output_addr_const (file, XEXP (x, 1));
8070 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8071 }
8072 #endif
8073 #if TARGET_MACHO
8074 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8075 && CONSTANT_P (XEXP (x, 1)))
8076 {
8077 fprintf (file, "lo16(");
8078 output_addr_const (file, XEXP (x, 1));
8079 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8080 }
8081 #endif
8082 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
8083 {
8084 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
8085 {
8086 rtx contains_minus = XEXP (x, 1);
8087 rtx minus, symref;
8088 const char *name;
8089
8090 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8091 turn it into (sym) for output_addr_const. */
8092 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
8093 contains_minus = XEXP (contains_minus, 0);
8094
8095 minus = XEXP (contains_minus, 0);
8096 symref = XEXP (minus, 0);
8097 XEXP (contains_minus, 0) = symref;
8098 if (TARGET_ELF)
8099 {
8100 char *newname;
8101
8102 name = XSTR (symref, 0);
8103 newname = alloca (strlen (name) + sizeof ("@toc"));
8104 strcpy (newname, name);
8105 strcat (newname, "@toc");
8106 XSTR (symref, 0) = newname;
8107 }
8108 output_addr_const (file, XEXP (x, 1));
8109 if (TARGET_ELF)
8110 XSTR (symref, 0) = name;
8111 XEXP (contains_minus, 0) = minus;
8112 }
8113 else
8114 output_addr_const (file, XEXP (x, 1));
8115
8116 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
8117 }
8118 else
8119 abort ();
8120 }
8121 \f
8122 /* Target hook for assembling integer objects. The PowerPC version has
8123 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8124 is defined. It also needs to handle DI-mode objects on 64-bit
8125 targets. */
8126
8127 static bool
8128 rs6000_assemble_integer (x, size, aligned_p)
8129 rtx x;
8130 unsigned int size;
8131 int aligned_p;
8132 {
8133 #ifdef RELOCATABLE_NEEDS_FIXUP
8134 /* Special handling for SI values. */
8135 if (size == 4 && aligned_p)
8136 {
8137 extern int in_toc_section PARAMS ((void));
8138 static int recurse = 0;
8139
8140 /* For -mrelocatable, we mark all addresses that need to be fixed up
8141 in the .fixup section. */
8142 if (TARGET_RELOCATABLE
8143 && !in_toc_section ()
8144 && !in_text_section ()
8145 && !recurse
8146 && GET_CODE (x) != CONST_INT
8147 && GET_CODE (x) != CONST_DOUBLE
8148 && CONSTANT_P (x))
8149 {
8150 char buf[256];
8151
8152 recurse = 1;
8153 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
8154 fixuplabelno++;
8155 ASM_OUTPUT_LABEL (asm_out_file, buf);
8156 fprintf (asm_out_file, "\t.long\t(");
8157 output_addr_const (asm_out_file, x);
8158 fprintf (asm_out_file, ")@fixup\n");
8159 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
8160 ASM_OUTPUT_ALIGN (asm_out_file, 2);
8161 fprintf (asm_out_file, "\t.long\t");
8162 assemble_name (asm_out_file, buf);
8163 fprintf (asm_out_file, "\n\t.previous\n");
8164 recurse = 0;
8165 return true;
8166 }
8167 /* Remove initial .'s to turn a -mcall-aixdesc function
8168 address into the address of the descriptor, not the function
8169 itself. */
8170 else if (GET_CODE (x) == SYMBOL_REF
8171 && XSTR (x, 0)[0] == '.'
8172 && DEFAULT_ABI == ABI_AIX)
8173 {
8174 const char *name = XSTR (x, 0);
8175 while (*name == '.')
8176 name++;
8177
8178 fprintf (asm_out_file, "\t.long\t%s\n", name);
8179 return true;
8180 }
8181 }
8182 #endif /* RELOCATABLE_NEEDS_FIXUP */
8183 return default_assemble_integer (x, size, aligned_p);
8184 }
8185
8186 #ifdef HAVE_GAS_HIDDEN
8187 /* Emit an assembler directive to set symbol visibility for DECL to
8188 VISIBILITY_TYPE. */
8189
8190 static void
8191 rs6000_assemble_visibility (decl, vis)
8192 tree decl;
8193 int vis;
8194 {
8195 /* Functions need to have their entry point symbol visibility set as
8196 well as their descriptor symbol visibility. */
8197 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
8198 {
8199 static const char * const visibility_types[] = {
8200 NULL, "internal", "hidden", "protected"
8201 };
8202
8203 const char *name, *type;
8204
8205 name = ((* targetm.strip_name_encoding)
8206 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
8207 type = visibility_types[vis];
8208
8209 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
8210 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
8211 }
8212 else
8213 default_assemble_visibility (decl, vis);
8214 }
8215 #endif
8216 \f
8217 enum rtx_code
8218 rs6000_reverse_condition (mode, code)
8219 enum machine_mode mode;
8220 enum rtx_code code;
8221 {
8222 /* Reversal of FP compares takes care -- an ordered compare
8223 becomes an unordered compare and vice versa. */
8224 if (mode == CCFPmode && !flag_unsafe_math_optimizations)
8225 return reverse_condition_maybe_unordered (code);
8226 else
8227 return reverse_condition (code);
8228 }
8229
8230 /* Generate a compare for CODE. Return a brand-new rtx that
8231 represents the result of the compare. */
8232
8233 static rtx
8234 rs6000_generate_compare (code)
8235 enum rtx_code code;
8236 {
8237 enum machine_mode comp_mode;
8238 rtx compare_result;
8239
8240 if (rs6000_compare_fp_p)
8241 comp_mode = CCFPmode;
8242 else if (code == GTU || code == LTU
8243 || code == GEU || code == LEU)
8244 comp_mode = CCUNSmode;
8245 else
8246 comp_mode = CCmode;
8247
8248 /* First, the compare. */
8249 compare_result = gen_reg_rtx (comp_mode);
8250
8251 /* SPE FP compare instructions on the GPRs. Yuck! */
8252 if ((TARGET_SPE && TARGET_HARD_FLOAT) && rs6000_compare_fp_p)
8253 {
8254 rtx cmp, or1, or2, or_result, compare_result2;
8255
8256 switch (code)
8257 {
8258 case EQ:
8259 case UNEQ:
8260 case NE:
8261 case LTGT:
8262 cmp = flag_unsafe_math_optimizations
8263 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
8264 rs6000_compare_op1)
8265 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
8266 rs6000_compare_op1);
8267 break;
8268 case GT:
8269 case GTU:
8270 case UNGT:
8271 case UNGE:
8272 case GE:
8273 case GEU:
8274 cmp = flag_unsafe_math_optimizations
8275 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
8276 rs6000_compare_op1)
8277 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
8278 rs6000_compare_op1);
8279 break;
8280 case LT:
8281 case LTU:
8282 case UNLT:
8283 case UNLE:
8284 case LE:
8285 case LEU:
8286 cmp = flag_unsafe_math_optimizations
8287 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
8288 rs6000_compare_op1)
8289 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
8290 rs6000_compare_op1);
8291 break;
8292 default:
8293 abort ();
8294 }
8295
8296 /* Synthesize LE and GE from LT/GT || EQ. */
8297 if (code == LE || code == GE || code == LEU || code == GEU)
8298 {
8299 /* Synthesize GE/LE frome GT/LT || EQ. */
8300
8301 emit_insn (cmp);
8302
8303 switch (code)
8304 {
8305 case LE: code = LT; break;
8306 case GE: code = GT; break;
8307 case LEU: code = LT; break;
8308 case GEU: code = GT; break;
8309 default: abort ();
8310 }
8311
8312 or1 = gen_reg_rtx (SImode);
8313 or2 = gen_reg_rtx (SImode);
8314 or_result = gen_reg_rtx (CCEQmode);
8315 compare_result2 = gen_reg_rtx (CCFPmode);
8316
8317 /* Do the EQ. */
8318 cmp = flag_unsafe_math_optimizations
8319 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
8320 rs6000_compare_op1)
8321 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
8322 rs6000_compare_op1);
8323 emit_insn (cmp);
8324
8325 /* The MC8540 FP compare instructions set the CR bits
8326 differently than other PPC compare instructions. For
8327 that matter, there is no generic test instruction, but a
8328 testgt, testlt, and testeq. For a true condition, bit 2
8329 is set (x1xx) in the CR. Following the traditional CR
8330 values:
8331
8332 LT GT EQ OV
8333 bit3 bit2 bit1 bit0
8334
8335 ... bit 2 would be a GT CR alias, so later on we
8336 look in the GT bits for the branch instructins.
8337 However, we must be careful to emit correct RTL in
8338 the meantime, so optimizations don't get confused. */
8339
8340 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
8341 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
8342
8343 /* OR them together. */
8344 cmp = gen_rtx_SET (VOIDmode, or_result,
8345 gen_rtx_COMPARE (CCEQmode,
8346 gen_rtx_IOR (SImode, or1, or2),
8347 const_true_rtx));
8348 compare_result = or_result;
8349 code = EQ;
8350 }
8351 else
8352 {
8353 /* We only care about 1 bit (x1xx), so map everything to NE to
8354 maintain rtl sanity. We'll get to the right bit (x1xx) at
8355 code output time. */
8356 if (code == NE || code == LTGT)
8357 /* Do the inverse here because we have no cmpne
8358 instruction. We use the cmpeq instruction and expect
8359 to get a 0 instead. */
8360 code = EQ;
8361 else
8362 code = NE;
8363 }
8364
8365 emit_insn (cmp);
8366 }
8367 else
8368 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
8369 gen_rtx_COMPARE (comp_mode,
8370 rs6000_compare_op0,
8371 rs6000_compare_op1)));
8372
8373 /* Some kinds of FP comparisons need an OR operation;
8374 except for flag_unsafe_math_optimizations we don't bother. */
8375 if (rs6000_compare_fp_p
8376 && ! flag_unsafe_math_optimizations
8377 && ! (TARGET_HARD_FLOAT && TARGET_SPE)
8378 && (code == LE || code == GE
8379 || code == UNEQ || code == LTGT
8380 || code == UNGT || code == UNLT))
8381 {
8382 enum rtx_code or1, or2;
8383 rtx or1_rtx, or2_rtx, compare2_rtx;
8384 rtx or_result = gen_reg_rtx (CCEQmode);
8385
8386 switch (code)
8387 {
8388 case LE: or1 = LT; or2 = EQ; break;
8389 case GE: or1 = GT; or2 = EQ; break;
8390 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
8391 case LTGT: or1 = LT; or2 = GT; break;
8392 case UNGT: or1 = UNORDERED; or2 = GT; break;
8393 case UNLT: or1 = UNORDERED; or2 = LT; break;
8394 default: abort ();
8395 }
8396 validate_condition_mode (or1, comp_mode);
8397 validate_condition_mode (or2, comp_mode);
8398 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
8399 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
8400 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
8401 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
8402 const_true_rtx);
8403 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
8404
8405 compare_result = or_result;
8406 code = EQ;
8407 }
8408
8409 validate_condition_mode (code, GET_MODE (compare_result));
8410
8411 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
8412 }
8413
8414
8415 /* Emit the RTL for an sCOND pattern. */
8416
8417 void
8418 rs6000_emit_sCOND (code, result)
8419 enum rtx_code code;
8420 rtx result;
8421 {
8422 rtx condition_rtx;
8423 enum machine_mode op_mode;
8424
8425 condition_rtx = rs6000_generate_compare (code);
8426
8427 op_mode = GET_MODE (rs6000_compare_op0);
8428 if (op_mode == VOIDmode)
8429 op_mode = GET_MODE (rs6000_compare_op1);
8430
8431 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
8432 {
8433 PUT_MODE (condition_rtx, DImode);
8434 convert_move (result, condition_rtx, 0);
8435 }
8436 else
8437 {
8438 PUT_MODE (condition_rtx, SImode);
8439 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
8440 }
8441 }
8442
8443 /* Emit a branch of kind CODE to location LOC. */
8444
8445 void
8446 rs6000_emit_cbranch (code, loc)
8447 enum rtx_code code;
8448 rtx loc;
8449 {
8450 rtx condition_rtx, loc_ref;
8451
8452 condition_rtx = rs6000_generate_compare (code);
8453 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
8454 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
8455 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
8456 loc_ref, pc_rtx)));
8457 }
8458
8459 /* Return the string to output a conditional branch to LABEL, which is
8460 the operand number of the label, or -1 if the branch is really a
8461 conditional return.
8462
8463 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
8464 condition code register and its mode specifies what kind of
8465 comparison we made.
8466
8467 REVERSED is nonzero if we should reverse the sense of the comparison.
8468
8469 INSN is the insn. */
8470
8471 char *
8472 output_cbranch (op, label, reversed, insn)
8473 rtx op;
8474 const char * label;
8475 int reversed;
8476 rtx insn;
8477 {
8478 static char string[64];
8479 enum rtx_code code = GET_CODE (op);
8480 rtx cc_reg = XEXP (op, 0);
8481 enum machine_mode mode = GET_MODE (cc_reg);
8482 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
8483 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
8484 int really_reversed = reversed ^ need_longbranch;
8485 char *s = string;
8486 const char *ccode;
8487 const char *pred;
8488 rtx note;
8489
8490 validate_condition_mode (code, mode);
8491
8492 /* Work out which way this really branches. We could use
8493 reverse_condition_maybe_unordered here always but this
8494 makes the resulting assembler clearer. */
8495 if (really_reversed)
8496 {
8497 /* Reversal of FP compares takes care -- an ordered compare
8498 becomes an unordered compare and vice versa. */
8499 if (mode == CCFPmode)
8500 code = reverse_condition_maybe_unordered (code);
8501 else
8502 code = reverse_condition (code);
8503 }
8504
8505 if ((TARGET_SPE && TARGET_HARD_FLOAT) && mode == CCFPmode)
8506 {
8507 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8508 to the GT bit. */
8509 if (code == EQ)
8510 /* Opposite of GT. */
8511 code = UNLE;
8512 else if (code == NE)
8513 code = GT;
8514 else
8515 abort ();
8516 }
8517
8518 switch (code)
8519 {
8520 /* Not all of these are actually distinct opcodes, but
8521 we distinguish them for clarity of the resulting assembler. */
8522 case NE: case LTGT:
8523 ccode = "ne"; break;
8524 case EQ: case UNEQ:
8525 ccode = "eq"; break;
8526 case GE: case GEU:
8527 ccode = "ge"; break;
8528 case GT: case GTU: case UNGT:
8529 ccode = "gt"; break;
8530 case LE: case LEU:
8531 ccode = "le"; break;
8532 case LT: case LTU: case UNLT:
8533 ccode = "lt"; break;
8534 case UNORDERED: ccode = "un"; break;
8535 case ORDERED: ccode = "nu"; break;
8536 case UNGE: ccode = "nl"; break;
8537 case UNLE: ccode = "ng"; break;
8538 default:
8539 abort ();
8540 }
8541
8542 /* Maybe we have a guess as to how likely the branch is.
8543 The old mnemonics don't have a way to specify this information. */
8544 pred = "";
8545 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
8546 if (note != NULL_RTX)
8547 {
8548 /* PROB is the difference from 50%. */
8549 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
8550 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
8551
8552 /* Only hint for highly probable/improbable branches on newer
8553 cpus as static prediction overrides processor dynamic
8554 prediction. For older cpus we may as well always hint, but
8555 assume not taken for branches that are very close to 50% as a
8556 mispredicted taken branch is more expensive than a
8557 mispredicted not-taken branch. */
8558 if (always_hint
8559 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
8560 {
8561 if (abs (prob) > REG_BR_PROB_BASE / 20
8562 && ((prob > 0) ^ need_longbranch))
8563 pred = "+";
8564 else
8565 pred = "-";
8566 }
8567 }
8568
8569 if (label == NULL)
8570 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
8571 else
8572 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
8573
8574 /* We need to escape any '%' characters in the reg_names string.
8575 Assume they'd only be the first character... */
8576 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
8577 *s++ = '%';
8578 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
8579
8580 if (label != NULL)
8581 {
8582 /* If the branch distance was too far, we may have to use an
8583 unconditional branch to go the distance. */
8584 if (need_longbranch)
8585 s += sprintf (s, ",$+8\n\tb %s", label);
8586 else
8587 s += sprintf (s, ",%s", label);
8588 }
8589
8590 return string;
8591 }
8592
8593 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
8594 operands of the last comparison is nonzero/true, FALSE_COND if it
8595 is zero/false. Return 0 if the hardware has no such operation. */
8596
8597 int
8598 rs6000_emit_cmove (dest, op, true_cond, false_cond)
8599 rtx dest;
8600 rtx op;
8601 rtx true_cond;
8602 rtx false_cond;
8603 {
8604 enum rtx_code code = GET_CODE (op);
8605 rtx op0 = rs6000_compare_op0;
8606 rtx op1 = rs6000_compare_op1;
8607 REAL_VALUE_TYPE c1;
8608 enum machine_mode compare_mode = GET_MODE (op0);
8609 enum machine_mode result_mode = GET_MODE (dest);
8610 rtx temp;
8611
8612 /* These modes should always match. */
8613 if (GET_MODE (op1) != compare_mode
8614 /* In the isel case however, we can use a compare immediate, so
8615 op1 may be a small constant. */
8616 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
8617 return 0;
8618 if (GET_MODE (true_cond) != result_mode)
8619 return 0;
8620 if (GET_MODE (false_cond) != result_mode)
8621 return 0;
8622
8623 /* First, work out if the hardware can do this at all, or
8624 if it's too slow... */
8625 if (! rs6000_compare_fp_p)
8626 {
8627 if (TARGET_ISEL)
8628 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
8629 return 0;
8630 }
8631
8632 /* Eliminate half of the comparisons by switching operands, this
8633 makes the remaining code simpler. */
8634 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
8635 || code == LTGT || code == LT)
8636 {
8637 code = reverse_condition_maybe_unordered (code);
8638 temp = true_cond;
8639 true_cond = false_cond;
8640 false_cond = temp;
8641 }
8642
8643 /* UNEQ and LTGT take four instructions for a comparison with zero,
8644 it'll probably be faster to use a branch here too. */
8645 if (code == UNEQ)
8646 return 0;
8647
8648 if (GET_CODE (op1) == CONST_DOUBLE)
8649 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
8650
8651 /* We're going to try to implement comparions by performing
8652 a subtract, then comparing against zero. Unfortunately,
8653 Inf - Inf is NaN which is not zero, and so if we don't
8654 know that the operand is finite and the comparison
8655 would treat EQ different to UNORDERED, we can't do it. */
8656 if (! flag_unsafe_math_optimizations
8657 && code != GT && code != UNGE
8658 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
8659 /* Constructs of the form (a OP b ? a : b) are safe. */
8660 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
8661 || (! rtx_equal_p (op0, true_cond)
8662 && ! rtx_equal_p (op1, true_cond))))
8663 return 0;
8664 /* At this point we know we can use fsel. */
8665
8666 /* Reduce the comparison to a comparison against zero. */
8667 temp = gen_reg_rtx (compare_mode);
8668 emit_insn (gen_rtx_SET (VOIDmode, temp,
8669 gen_rtx_MINUS (compare_mode, op0, op1)));
8670 op0 = temp;
8671 op1 = CONST0_RTX (compare_mode);
8672
8673 /* If we don't care about NaNs we can reduce some of the comparisons
8674 down to faster ones. */
8675 if (flag_unsafe_math_optimizations)
8676 switch (code)
8677 {
8678 case GT:
8679 code = LE;
8680 temp = true_cond;
8681 true_cond = false_cond;
8682 false_cond = temp;
8683 break;
8684 case UNGE:
8685 code = GE;
8686 break;
8687 case UNEQ:
8688 code = EQ;
8689 break;
8690 default:
8691 break;
8692 }
8693
8694 /* Now, reduce everything down to a GE. */
8695 switch (code)
8696 {
8697 case GE:
8698 break;
8699
8700 case LE:
8701 temp = gen_reg_rtx (compare_mode);
8702 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8703 op0 = temp;
8704 break;
8705
8706 case ORDERED:
8707 temp = gen_reg_rtx (compare_mode);
8708 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
8709 op0 = temp;
8710 break;
8711
8712 case EQ:
8713 temp = gen_reg_rtx (compare_mode);
8714 emit_insn (gen_rtx_SET (VOIDmode, temp,
8715 gen_rtx_NEG (compare_mode,
8716 gen_rtx_ABS (compare_mode, op0))));
8717 op0 = temp;
8718 break;
8719
8720 case UNGE:
8721 temp = gen_reg_rtx (result_mode);
8722 emit_insn (gen_rtx_SET (VOIDmode, temp,
8723 gen_rtx_IF_THEN_ELSE (result_mode,
8724 gen_rtx_GE (VOIDmode,
8725 op0, op1),
8726 true_cond, false_cond)));
8727 false_cond = temp;
8728 true_cond = false_cond;
8729
8730 temp = gen_reg_rtx (compare_mode);
8731 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8732 op0 = temp;
8733 break;
8734
8735 case GT:
8736 temp = gen_reg_rtx (result_mode);
8737 emit_insn (gen_rtx_SET (VOIDmode, temp,
8738 gen_rtx_IF_THEN_ELSE (result_mode,
8739 gen_rtx_GE (VOIDmode,
8740 op0, op1),
8741 true_cond, false_cond)));
8742 true_cond = temp;
8743 false_cond = true_cond;
8744
8745 temp = gen_reg_rtx (compare_mode);
8746 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8747 op0 = temp;
8748 break;
8749
8750 default:
8751 abort ();
8752 }
8753
8754 emit_insn (gen_rtx_SET (VOIDmode, dest,
8755 gen_rtx_IF_THEN_ELSE (result_mode,
8756 gen_rtx_GE (VOIDmode,
8757 op0, op1),
8758 true_cond, false_cond)));
8759 return 1;
8760 }
8761
8762 /* Same as above, but for ints (isel). */
8763
8764 static int
8765 rs6000_emit_int_cmove (dest, op, true_cond, false_cond)
8766 rtx dest;
8767 rtx op;
8768 rtx true_cond;
8769 rtx false_cond;
8770 {
8771 rtx condition_rtx, cr;
8772
8773 /* All isel implementations thus far are 32-bits. */
8774 if (GET_MODE (rs6000_compare_op0) != SImode)
8775 return 0;
8776
8777 /* We still have to do the compare, because isel doesn't do a
8778 compare, it just looks at the CRx bits set by a previous compare
8779 instruction. */
8780 condition_rtx = rs6000_generate_compare (GET_CODE (op));
8781 cr = XEXP (condition_rtx, 0);
8782
8783 if (GET_MODE (cr) == CCmode)
8784 emit_insn (gen_isel_signed (dest, condition_rtx,
8785 true_cond, false_cond, cr));
8786 else
8787 emit_insn (gen_isel_unsigned (dest, condition_rtx,
8788 true_cond, false_cond, cr));
8789
8790 return 1;
8791 }
8792
8793 const char *
8794 output_isel (operands)
8795 rtx *operands;
8796 {
8797 enum rtx_code code;
8798
8799 code = GET_CODE (operands[1]);
8800 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
8801 {
8802 PUT_CODE (operands[1], reverse_condition (code));
8803 return "isel %0,%3,%2,%j1";
8804 }
8805 else
8806 return "isel %0,%2,%3,%j1";
8807 }
8808
8809 void
8810 rs6000_emit_minmax (dest, code, op0, op1)
8811 rtx dest;
8812 enum rtx_code code;
8813 rtx op0;
8814 rtx op1;
8815 {
8816 enum machine_mode mode = GET_MODE (op0);
8817 enum rtx_code c;
8818 rtx target;
8819
8820 if (code == SMAX || code == SMIN)
8821 c = GE;
8822 else
8823 c = GEU;
8824
8825 if (code == SMAX || code == UMAX)
8826 target = emit_conditional_move (dest, c, op0, op1, mode,
8827 op0, op1, mode, 0);
8828 else
8829 target = emit_conditional_move (dest, c, op0, op1, mode,
8830 op1, op0, mode, 0);
8831 if (target == NULL_RTX)
8832 abort ();
8833 if (target != dest)
8834 emit_move_insn (dest, target);
8835 }
8836 \f
8837 /* This page contains routines that are used to determine what the
8838 function prologue and epilogue code will do and write them out. */
8839
8840 /* Return the first fixed-point register that is required to be
8841 saved. 32 if none. */
8842
8843 int
8844 first_reg_to_save ()
8845 {
8846 int first_reg;
8847
8848 /* Find lowest numbered live register. */
8849 for (first_reg = 13; first_reg <= 31; first_reg++)
8850 if (regs_ever_live[first_reg]
8851 && (! call_used_regs[first_reg]
8852 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
8853 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8854 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
8855 break;
8856
8857 #if TARGET_MACHO
8858 if (flag_pic
8859 && current_function_uses_pic_offset_table
8860 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
8861 return RS6000_PIC_OFFSET_TABLE_REGNUM;
8862 #endif
8863
8864 return first_reg;
8865 }
8866
8867 /* Similar, for FP regs. */
8868
8869 int
8870 first_fp_reg_to_save ()
8871 {
8872 int first_reg;
8873
8874 /* Find lowest numbered live register. */
8875 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
8876 if (regs_ever_live[first_reg])
8877 break;
8878
8879 return first_reg;
8880 }
8881
8882 /* Similar, for AltiVec regs. */
8883
8884 static int
8885 first_altivec_reg_to_save ()
8886 {
8887 int i;
8888
8889 /* Stack frame remains as is unless we are in AltiVec ABI. */
8890 if (! TARGET_ALTIVEC_ABI)
8891 return LAST_ALTIVEC_REGNO + 1;
8892
8893 /* Find lowest numbered live register. */
8894 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
8895 if (regs_ever_live[i])
8896 break;
8897
8898 return i;
8899 }
8900
8901 /* Return a 32-bit mask of the AltiVec registers we need to set in
8902 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
8903 the 32-bit word is 0. */
8904
8905 static unsigned int
8906 compute_vrsave_mask ()
8907 {
8908 unsigned int i, mask = 0;
8909
8910 /* First, find out if we use _any_ altivec registers. */
8911 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8912 if (regs_ever_live[i])
8913 mask |= ALTIVEC_REG_BIT (i);
8914
8915 if (mask == 0)
8916 return mask;
8917
8918 /* Next, add all registers that are call-clobbered. We do this
8919 because post-reload register optimizers such as regrename_optimize
8920 may choose to use them. They never change the register class
8921 chosen by reload, so cannot create new uses of altivec registers
8922 if there were none before, so the early exit above is safe. */
8923 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
8924 altivec registers not saved in the mask, which might well make the
8925 adjustments below more effective in eliding the save/restore of
8926 VRSAVE in small functions. */
8927 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8928 if (call_used_regs[i])
8929 mask |= ALTIVEC_REG_BIT (i);
8930
8931 /* Next, remove the argument registers from the set. These must
8932 be in the VRSAVE mask set by the caller, so we don't need to add
8933 them in again. More importantly, the mask we compute here is
8934 used to generate CLOBBERs in the set_vrsave insn, and we do not
8935 wish the argument registers to die. */
8936 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
8937 mask &= ~ALTIVEC_REG_BIT (i);
8938
8939 /* Similarly, remove the return value from the set. */
8940 {
8941 bool yes = false;
8942 diddle_return_value (is_altivec_return_reg, &yes);
8943 if (yes)
8944 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
8945 }
8946
8947 return mask;
8948 }
8949
8950 static void
8951 is_altivec_return_reg (reg, xyes)
8952 rtx reg;
8953 void *xyes;
8954 {
8955 bool *yes = (bool *) xyes;
8956 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
8957 *yes = true;
8958 }
8959
8960 \f
8961 /* Calculate the stack information for the current function. This is
8962 complicated by having two separate calling sequences, the AIX calling
8963 sequence and the V.4 calling sequence.
8964
8965 AIX (and Darwin/Mac OS X) stack frames look like:
8966 32-bit 64-bit
8967 SP----> +---------------------------------------+
8968 | back chain to caller | 0 0
8969 +---------------------------------------+
8970 | saved CR | 4 8 (8-11)
8971 +---------------------------------------+
8972 | saved LR | 8 16
8973 +---------------------------------------+
8974 | reserved for compilers | 12 24
8975 +---------------------------------------+
8976 | reserved for binders | 16 32
8977 +---------------------------------------+
8978 | saved TOC pointer | 20 40
8979 +---------------------------------------+
8980 | Parameter save area (P) | 24 48
8981 +---------------------------------------+
8982 | Alloca space (A) | 24+P etc.
8983 +---------------------------------------+
8984 | Local variable space (L) | 24+P+A
8985 +---------------------------------------+
8986 | Float/int conversion temporary (X) | 24+P+A+L
8987 +---------------------------------------+
8988 | Save area for AltiVec registers (W) | 24+P+A+L+X
8989 +---------------------------------------+
8990 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
8991 +---------------------------------------+
8992 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
8993 +---------------------------------------+
8994 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
8995 +---------------------------------------+
8996 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
8997 +---------------------------------------+
8998 old SP->| back chain to caller's caller |
8999 +---------------------------------------+
9000
9001 The required alignment for AIX configurations is two words (i.e., 8
9002 or 16 bytes).
9003
9004
9005 V.4 stack frames look like:
9006
9007 SP----> +---------------------------------------+
9008 | back chain to caller | 0
9009 +---------------------------------------+
9010 | caller's saved LR | 4
9011 +---------------------------------------+
9012 | Parameter save area (P) | 8
9013 +---------------------------------------+
9014 | Alloca space (A) | 8+P
9015 +---------------------------------------+
9016 | Varargs save area (V) | 8+P+A
9017 +---------------------------------------+
9018 | Local variable space (L) | 8+P+A+V
9019 +---------------------------------------+
9020 | Float/int conversion temporary (X) | 8+P+A+V+L
9021 +---------------------------------------+
9022 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9023 +---------------------------------------+
9024 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9025 +---------------------------------------+
9026 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9027 +---------------------------------------+
9028 | SPE: area for 64-bit GP registers |
9029 +---------------------------------------+
9030 | SPE alignment padding |
9031 +---------------------------------------+
9032 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9033 +---------------------------------------+
9034 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9035 +---------------------------------------+
9036 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9037 +---------------------------------------+
9038 old SP->| back chain to caller's caller |
9039 +---------------------------------------+
9040
9041 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9042 given. (But note below and in sysv4.h that we require only 8 and
9043 may round up the size of our stack frame anyways. The historical
9044 reason is early versions of powerpc-linux which didn't properly
9045 align the stack at program startup. A happy side-effect is that
9046 -mno-eabi libraries can be used with -meabi programs.)
9047
9048 The EABI configuration defaults to the V.4 layout, unless
9049 -mcall-aix is used, in which case the AIX layout is used. However,
9050 the stack alignment requirements may differ. If -mno-eabi is not
9051 given, the required stack alignment is 8 bytes; if -mno-eabi is
9052 given, the required alignment is 16 bytes. (But see V.4 comment
9053 above.) */
9054
9055 #ifndef ABI_STACK_BOUNDARY
9056 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9057 #endif
9058
9059 rs6000_stack_t *
9060 rs6000_stack_info ()
9061 {
9062 static rs6000_stack_t info, zero_info;
9063 rs6000_stack_t *info_ptr = &info;
9064 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9065 enum rs6000_abi abi;
9066 int ehrd_size;
9067 int total_raw_size;
9068
9069 /* Zero all fields portably. */
9070 info = zero_info;
9071
9072 /* Select which calling sequence. */
9073 info_ptr->abi = abi = DEFAULT_ABI;
9074
9075 /* Calculate which registers need to be saved & save area size. */
9076 info_ptr->first_gp_reg_save = first_reg_to_save ();
9077 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9078 even if it currently looks like we won't. */
9079 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
9080 || (flag_pic == 1 && abi == ABI_V4)
9081 || (flag_pic && abi == ABI_DARWIN))
9082 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
9083 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
9084 else
9085 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
9086
9087 /* For the SPE, we have an additional upper 32-bits on each GPR.
9088 Ideally we should save the entire 64-bits only when the upper
9089 half is used in SIMD instructions. Since we only record
9090 registers live (not the size they are used in), this proves
9091 difficult because we'd have to traverse the instruction chain at
9092 the right time, taking reload into account. This is a real pain,
9093 so we opt to save the GPRs in 64-bits always. Anyone overly
9094 concerned with frame size can fix this. ;-).
9095
9096 So... since we save all GPRs (except the SP) in 64-bits, the
9097 traditional GP save area will be empty. */
9098 if (TARGET_SPE_ABI)
9099 info_ptr->gp_size = 0;
9100
9101 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
9102 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
9103
9104 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
9105 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
9106 - info_ptr->first_altivec_reg_save);
9107
9108 /* Does this function call anything? */
9109 info_ptr->calls_p = (! current_function_is_leaf
9110 || cfun->machine->ra_needs_full_frame);
9111
9112 /* Determine if we need to save the link register. */
9113 if (rs6000_ra_ever_killed ()
9114 || (DEFAULT_ABI == ABI_AIX && current_function_profile)
9115 #ifdef TARGET_RELOCATABLE
9116 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
9117 #endif
9118 || (info_ptr->first_fp_reg_save != 64
9119 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
9120 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
9121 || (abi == ABI_V4 && current_function_calls_alloca)
9122 || (DEFAULT_ABI == ABI_DARWIN
9123 && flag_pic
9124 && current_function_uses_pic_offset_table)
9125 || info_ptr->calls_p)
9126 {
9127 info_ptr->lr_save_p = 1;
9128 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
9129 }
9130
9131 /* Determine if we need to save the condition code registers. */
9132 if (regs_ever_live[CR2_REGNO]
9133 || regs_ever_live[CR3_REGNO]
9134 || regs_ever_live[CR4_REGNO])
9135 {
9136 info_ptr->cr_save_p = 1;
9137 if (abi == ABI_V4)
9138 info_ptr->cr_size = reg_size;
9139 }
9140
9141 /* If the current function calls __builtin_eh_return, then we need
9142 to allocate stack space for registers that will hold data for
9143 the exception handler. */
9144 if (current_function_calls_eh_return)
9145 {
9146 unsigned int i;
9147 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
9148 continue;
9149
9150 /* SPE saves EH registers in 64-bits. */
9151 ehrd_size = i * (TARGET_SPE_ABI ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
9152 }
9153 else
9154 ehrd_size = 0;
9155
9156 /* Determine various sizes. */
9157 info_ptr->reg_size = reg_size;
9158 info_ptr->fixed_size = RS6000_SAVE_AREA;
9159 info_ptr->varargs_size = RS6000_VARARGS_AREA;
9160 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
9161 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
9162 8);
9163
9164 if (TARGET_SPE_ABI)
9165 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
9166 else
9167 info_ptr->spe_gp_size = 0;
9168
9169 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
9170 {
9171 info_ptr->vrsave_mask = compute_vrsave_mask ();
9172 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
9173 }
9174 else
9175 {
9176 info_ptr->vrsave_mask = 0;
9177 info_ptr->vrsave_size = 0;
9178 }
9179
9180 /* Calculate the offsets. */
9181 switch (abi)
9182 {
9183 case ABI_NONE:
9184 default:
9185 abort ();
9186
9187 case ABI_AIX:
9188 case ABI_AIX_NODESC:
9189 case ABI_DARWIN:
9190 info_ptr->fp_save_offset = - info_ptr->fp_size;
9191 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9192
9193 if (TARGET_ALTIVEC_ABI)
9194 {
9195 info_ptr->vrsave_save_offset
9196 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
9197
9198 /* Align stack so vector save area is on a quadword boundary. */
9199 if (info_ptr->altivec_size != 0)
9200 info_ptr->altivec_padding_size
9201 = 16 - (-info_ptr->vrsave_save_offset % 16);
9202 else
9203 info_ptr->altivec_padding_size = 0;
9204
9205 info_ptr->altivec_save_offset
9206 = info_ptr->vrsave_save_offset
9207 - info_ptr->altivec_padding_size
9208 - info_ptr->altivec_size;
9209
9210 /* Adjust for AltiVec case. */
9211 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
9212 }
9213 else
9214 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
9215 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
9216 info_ptr->lr_save_offset = 2*reg_size;
9217 break;
9218
9219 case ABI_V4:
9220 info_ptr->fp_save_offset = - info_ptr->fp_size;
9221 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9222 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
9223
9224 if (TARGET_SPE_ABI)
9225 {
9226 /* Align stack so SPE GPR save area is aligned on a
9227 double-word boundary. */
9228 if (info_ptr->spe_gp_size != 0)
9229 info_ptr->spe_padding_size
9230 = 8 - (-info_ptr->cr_save_offset % 8);
9231 else
9232 info_ptr->spe_padding_size = 0;
9233
9234 info_ptr->spe_gp_save_offset
9235 = info_ptr->cr_save_offset
9236 - info_ptr->spe_padding_size
9237 - info_ptr->spe_gp_size;
9238
9239 /* Adjust for SPE case. */
9240 info_ptr->toc_save_offset
9241 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
9242 }
9243 else if (TARGET_ALTIVEC_ABI)
9244 {
9245 info_ptr->vrsave_save_offset
9246 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
9247
9248 /* Align stack so vector save area is on a quadword boundary. */
9249 if (info_ptr->altivec_size != 0)
9250 info_ptr->altivec_padding_size
9251 = 16 - (-info_ptr->vrsave_save_offset % 16);
9252 else
9253 info_ptr->altivec_padding_size = 0;
9254
9255 info_ptr->altivec_save_offset
9256 = info_ptr->vrsave_save_offset
9257 - info_ptr->altivec_padding_size
9258 - info_ptr->altivec_size;
9259
9260 /* Adjust for AltiVec case. */
9261 info_ptr->toc_save_offset
9262 = info_ptr->altivec_save_offset - info_ptr->toc_size;
9263 }
9264 else
9265 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
9266 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
9267 info_ptr->lr_save_offset = reg_size;
9268 break;
9269 }
9270
9271 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
9272 + info_ptr->gp_size
9273 + info_ptr->altivec_size
9274 + info_ptr->altivec_padding_size
9275 + info_ptr->vrsave_size
9276 + info_ptr->spe_gp_size
9277 + info_ptr->spe_padding_size
9278 + ehrd_size
9279 + info_ptr->cr_size
9280 + info_ptr->lr_size
9281 + info_ptr->vrsave_size
9282 + info_ptr->toc_size,
9283 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
9284 ? 16 : 8);
9285
9286 total_raw_size = (info_ptr->vars_size
9287 + info_ptr->parm_size
9288 + info_ptr->save_size
9289 + info_ptr->varargs_size
9290 + info_ptr->fixed_size);
9291
9292 info_ptr->total_size =
9293 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
9294
9295 /* Determine if we need to allocate any stack frame:
9296
9297 For AIX we need to push the stack if a frame pointer is needed
9298 (because the stack might be dynamically adjusted), if we are
9299 debugging, if we make calls, or if the sum of fp_save, gp_save,
9300 and local variables are more than the space needed to save all
9301 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9302 + 18*8 = 288 (GPR13 reserved).
9303
9304 For V.4 we don't have the stack cushion that AIX uses, but assume
9305 that the debugger can handle stackless frames. */
9306
9307 if (info_ptr->calls_p)
9308 info_ptr->push_p = 1;
9309
9310 else if (abi == ABI_V4)
9311 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
9312
9313 else
9314 info_ptr->push_p = (frame_pointer_needed
9315 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
9316 || ((total_raw_size - info_ptr->fixed_size)
9317 > (TARGET_32BIT ? 220 : 288)));
9318
9319 /* Zero offsets if we're not saving those registers. */
9320 if (info_ptr->fp_size == 0)
9321 info_ptr->fp_save_offset = 0;
9322
9323 if (info_ptr->gp_size == 0)
9324 info_ptr->gp_save_offset = 0;
9325
9326 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
9327 info_ptr->altivec_save_offset = 0;
9328
9329 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
9330 info_ptr->vrsave_save_offset = 0;
9331
9332 if (! TARGET_SPE_ABI || info_ptr->spe_gp_size == 0)
9333 info_ptr->spe_gp_save_offset = 0;
9334
9335 if (! info_ptr->lr_save_p)
9336 info_ptr->lr_save_offset = 0;
9337
9338 if (! info_ptr->cr_save_p)
9339 info_ptr->cr_save_offset = 0;
9340
9341 if (! info_ptr->toc_save_p)
9342 info_ptr->toc_save_offset = 0;
9343
9344 return info_ptr;
9345 }
9346
9347 void
9348 debug_stack_info (info)
9349 rs6000_stack_t *info;
9350 {
9351 const char *abi_string;
9352
9353 if (! info)
9354 info = rs6000_stack_info ();
9355
9356 fprintf (stderr, "\nStack information for function %s:\n",
9357 ((current_function_decl && DECL_NAME (current_function_decl))
9358 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
9359 : "<unknown>"));
9360
9361 switch (info->abi)
9362 {
9363 default: abi_string = "Unknown"; break;
9364 case ABI_NONE: abi_string = "NONE"; break;
9365 case ABI_AIX:
9366 case ABI_AIX_NODESC: abi_string = "AIX"; break;
9367 case ABI_DARWIN: abi_string = "Darwin"; break;
9368 case ABI_V4: abi_string = "V.4"; break;
9369 }
9370
9371 fprintf (stderr, "\tABI = %5s\n", abi_string);
9372
9373 if (TARGET_ALTIVEC_ABI)
9374 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
9375
9376 if (TARGET_SPE_ABI)
9377 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
9378
9379 if (info->first_gp_reg_save != 32)
9380 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
9381
9382 if (info->first_fp_reg_save != 64)
9383 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9384
9385 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
9386 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
9387 info->first_altivec_reg_save);
9388
9389 if (info->lr_save_p)
9390 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9391
9392 if (info->cr_save_p)
9393 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
9394
9395 if (info->toc_save_p)
9396 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
9397
9398 if (info->vrsave_mask)
9399 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
9400
9401 if (info->push_p)
9402 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
9403
9404 if (info->calls_p)
9405 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
9406
9407 if (info->gp_save_offset)
9408 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
9409
9410 if (info->fp_save_offset)
9411 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
9412
9413 if (info->altivec_save_offset)
9414 fprintf (stderr, "\taltivec_save_offset = %5d\n",
9415 info->altivec_save_offset);
9416
9417 if (info->spe_gp_save_offset)
9418 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
9419 info->spe_gp_save_offset);
9420
9421 if (info->vrsave_save_offset)
9422 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
9423 info->vrsave_save_offset);
9424
9425 if (info->lr_save_offset)
9426 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
9427
9428 if (info->cr_save_offset)
9429 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
9430
9431 if (info->toc_save_offset)
9432 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
9433
9434 if (info->varargs_save_offset)
9435 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
9436
9437 if (info->total_size)
9438 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
9439
9440 if (info->varargs_size)
9441 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
9442
9443 if (info->vars_size)
9444 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
9445
9446 if (info->parm_size)
9447 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
9448
9449 if (info->fixed_size)
9450 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
9451
9452 if (info->gp_size)
9453 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
9454
9455 if (info->spe_gp_size)
9456 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
9457
9458 if (info->fp_size)
9459 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
9460
9461 if (info->altivec_size)
9462 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
9463
9464 if (info->vrsave_size)
9465 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
9466
9467 if (info->altivec_padding_size)
9468 fprintf (stderr, "\taltivec_padding_size= %5d\n",
9469 info->altivec_padding_size);
9470
9471 if (info->spe_padding_size)
9472 fprintf (stderr, "\tspe_padding_size = %5d\n",
9473 info->spe_padding_size);
9474
9475 if (info->lr_size)
9476 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
9477
9478 if (info->cr_size)
9479 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
9480
9481 if (info->toc_size)
9482 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
9483
9484 if (info->save_size)
9485 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
9486
9487 if (info->reg_size != 4)
9488 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
9489
9490 fprintf (stderr, "\n");
9491 }
9492
9493 rtx
9494 rs6000_return_addr (count, frame)
9495 int count;
9496 rtx frame;
9497 {
9498 /* Currently we don't optimize very well between prolog and body
9499 code and for PIC code the code can be actually quite bad, so
9500 don't try to be too clever here. */
9501 if (count != 0 || flag_pic != 0)
9502 {
9503 cfun->machine->ra_needs_full_frame = 1;
9504
9505 return
9506 gen_rtx_MEM
9507 (Pmode,
9508 memory_address
9509 (Pmode,
9510 plus_constant (copy_to_reg
9511 (gen_rtx_MEM (Pmode,
9512 memory_address (Pmode, frame))),
9513 RETURN_ADDRESS_OFFSET)));
9514 }
9515
9516 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
9517 }
9518
9519 /* Say whether a function is a candidate for sibcall handling or not.
9520 We do not allow indirect calls to be optimized into sibling calls.
9521 Also, we can't do it if there are any vector parameters; there's
9522 nowhere to put the VRsave code so it works; note that functions with
9523 vector parameters are required to have a prototype, so the argument
9524 type info must be available here. (The tail recursion case can work
9525 with vector parameters, but there's no way to distinguish here.) */
9526 static bool
9527 rs6000_function_ok_for_sibcall (decl, exp)
9528 tree decl;
9529 tree exp ATTRIBUTE_UNUSED;
9530 {
9531 tree type;
9532 if (decl)
9533 {
9534 if (TARGET_ALTIVEC_VRSAVE)
9535 {
9536 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
9537 type; type = TREE_CHAIN (type))
9538 {
9539 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
9540 return false;
9541 }
9542 }
9543 if (DEFAULT_ABI == ABI_DARWIN
9544 || (*targetm.binds_local_p) (decl))
9545 {
9546 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
9547
9548 if (!lookup_attribute ("longcall", attr_list)
9549 || lookup_attribute ("shortcall", attr_list))
9550 return true;
9551 }
9552 }
9553 return false;
9554 }
9555
9556 static int
9557 rs6000_ra_ever_killed ()
9558 {
9559 rtx top;
9560 rtx reg;
9561 rtx insn;
9562
9563 /* Irritatingly, there are two kinds of thunks -- those created with
9564 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
9565 through the regular part of the compiler. This is a very hacky
9566 way to tell them apart. */
9567 if (current_function_is_thunk && !no_new_pseudos)
9568 return 0;
9569
9570 /* regs_ever_live has LR marked as used if any sibcalls are present,
9571 but this should not force saving and restoring in the
9572 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
9573 clobbers LR, so that is inappropriate. */
9574
9575 /* Also, the prologue can generate a store into LR that
9576 doesn't really count, like this:
9577
9578 move LR->R0
9579 bcl to set PIC register
9580 move LR->R31
9581 move R0->LR
9582
9583 When we're called from the epilogue, we need to avoid counting
9584 this as a store. */
9585
9586 push_topmost_sequence ();
9587 top = get_insns ();
9588 pop_topmost_sequence ();
9589 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
9590
9591 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
9592 {
9593 if (INSN_P (insn))
9594 {
9595 if (FIND_REG_INC_NOTE (insn, reg))
9596 return 1;
9597 else if (GET_CODE (insn) == CALL_INSN
9598 && !SIBLING_CALL_P (insn))
9599 return 1;
9600 else if (set_of (reg, insn) != NULL_RTX
9601 && !prologue_epilogue_contains (insn))
9602 return 1;
9603 }
9604 }
9605 return 0;
9606 }
9607 \f
9608 /* Add a REG_MAYBE_DEAD note to the insn. */
9609 static void
9610 rs6000_maybe_dead (insn)
9611 rtx insn;
9612 {
9613 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
9614 const0_rtx,
9615 REG_NOTES (insn));
9616 }
9617
9618 /* Emit instructions needed to load the TOC register.
9619 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9620 a constant pool; or for SVR4 -fpic. */
9621
9622 void
9623 rs6000_emit_load_toc_table (fromprolog)
9624 int fromprolog;
9625 {
9626 rtx dest;
9627 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
9628
9629 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9630 {
9631 rtx temp = (fromprolog
9632 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9633 : gen_reg_rtx (Pmode));
9634 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
9635 rs6000_maybe_dead (emit_move_insn (dest, temp));
9636 }
9637 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
9638 {
9639 char buf[30];
9640 rtx tempLR = (fromprolog
9641 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9642 : gen_reg_rtx (Pmode));
9643 rtx temp0 = (fromprolog
9644 ? gen_rtx_REG (Pmode, 0)
9645 : gen_reg_rtx (Pmode));
9646 rtx symF;
9647
9648 /* possibly create the toc section */
9649 if (! toc_initialized)
9650 {
9651 toc_section ();
9652 function_section (current_function_decl);
9653 }
9654
9655 if (fromprolog)
9656 {
9657 rtx symL;
9658
9659 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
9660 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9661
9662 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
9663 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9664
9665 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
9666 symF)));
9667 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
9668 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
9669 symL,
9670 symF)));
9671 }
9672 else
9673 {
9674 rtx tocsym;
9675 static int reload_toc_labelno = 0;
9676
9677 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
9678
9679 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
9680 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9681
9682 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
9683 symF,
9684 tocsym)));
9685 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
9686 rs6000_maybe_dead (emit_move_insn (temp0,
9687 gen_rtx_MEM (Pmode, dest)));
9688 }
9689 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
9690 }
9691 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
9692 {
9693 /* This is for AIX code running in non-PIC ELF32. */
9694 char buf[30];
9695 rtx realsym;
9696 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
9697 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9698
9699 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
9700 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
9701 }
9702 else if (DEFAULT_ABI == ABI_AIX)
9703 {
9704 if (TARGET_32BIT)
9705 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
9706 else
9707 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
9708 }
9709 else
9710 abort ();
9711 }
9712
9713 int
9714 get_TOC_alias_set ()
9715 {
9716 static int set = -1;
9717 if (set == -1)
9718 set = new_alias_set ();
9719 return set;
9720 }
9721
9722 /* This retuns nonzero if the current function uses the TOC. This is
9723 determined by the presence of (unspec ... 7), which is generated by
9724 the various load_toc_* patterns. */
9725
9726 int
9727 uses_TOC ()
9728 {
9729 rtx insn;
9730
9731 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9732 if (INSN_P (insn))
9733 {
9734 rtx pat = PATTERN (insn);
9735 int i;
9736
9737 if (GET_CODE (pat) == PARALLEL)
9738 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
9739 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
9740 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
9741 return 1;
9742 }
9743 return 0;
9744 }
9745
9746 rtx
9747 create_TOC_reference (symbol)
9748 rtx symbol;
9749 {
9750 return gen_rtx_PLUS (Pmode,
9751 gen_rtx_REG (Pmode, TOC_REGISTER),
9752 gen_rtx_CONST (Pmode,
9753 gen_rtx_MINUS (Pmode, symbol,
9754 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9755 }
9756
9757 #if TARGET_AIX
9758 /* __throw will restore its own return address to be the same as the
9759 return address of the function that the throw is being made to.
9760 This is unfortunate, because we want to check the original
9761 return address to see if we need to restore the TOC.
9762 So we have to squirrel it away here.
9763 This is used only in compiling __throw and __rethrow.
9764
9765 Most of this code should be removed by CSE. */
9766 static rtx insn_after_throw;
9767
9768 /* This does the saving... */
9769 void
9770 rs6000_aix_emit_builtin_unwind_init ()
9771 {
9772 rtx mem;
9773 rtx stack_top = gen_reg_rtx (Pmode);
9774 rtx opcode_addr = gen_reg_rtx (Pmode);
9775
9776 insn_after_throw = gen_reg_rtx (SImode);
9777
9778 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
9779 emit_move_insn (stack_top, mem);
9780
9781 mem = gen_rtx_MEM (Pmode,
9782 gen_rtx_PLUS (Pmode, stack_top,
9783 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9784 emit_move_insn (opcode_addr, mem);
9785 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
9786 }
9787
9788 /* Emit insns to _restore_ the TOC register, at runtime (specifically
9789 in _eh.o). Only used on AIX.
9790
9791 The idea is that on AIX, function calls look like this:
9792 bl somefunction-trampoline
9793 lwz r2,20(sp)
9794
9795 and later,
9796 somefunction-trampoline:
9797 stw r2,20(sp)
9798 ... load function address in the count register ...
9799 bctr
9800 or like this, if the linker determines that this is not a cross-module call
9801 and so the TOC need not be restored:
9802 bl somefunction
9803 nop
9804 or like this, if the compiler could determine that this is not a
9805 cross-module call:
9806 bl somefunction
9807 now, the tricky bit here is that register 2 is saved and restored
9808 by the _linker_, so we can't readily generate debugging information
9809 for it. So we need to go back up the call chain looking at the
9810 insns at return addresses to see which calls saved the TOC register
9811 and so see where it gets restored from.
9812
9813 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
9814 just before the actual epilogue.
9815
9816 On the bright side, this incurs no space or time overhead unless an
9817 exception is thrown, except for the extra code in libgcc.a.
9818
9819 The parameter STACKSIZE is a register containing (at runtime)
9820 the amount to be popped off the stack in addition to the stack frame
9821 of this routine (which will be __throw or __rethrow, and so is
9822 guaranteed to have a stack frame). */
9823
9824 void
9825 rs6000_emit_eh_toc_restore (stacksize)
9826 rtx stacksize;
9827 {
9828 rtx top_of_stack;
9829 rtx bottom_of_stack = gen_reg_rtx (Pmode);
9830 rtx tocompare = gen_reg_rtx (SImode);
9831 rtx opcode = gen_reg_rtx (SImode);
9832 rtx opcode_addr = gen_reg_rtx (Pmode);
9833 rtx mem;
9834 rtx loop_start = gen_label_rtx ();
9835 rtx no_toc_restore_needed = gen_label_rtx ();
9836 rtx loop_exit = gen_label_rtx ();
9837
9838 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
9839 set_mem_alias_set (mem, rs6000_sr_alias_set);
9840 emit_move_insn (bottom_of_stack, mem);
9841
9842 top_of_stack = expand_binop (Pmode, add_optab,
9843 bottom_of_stack, stacksize,
9844 NULL_RTX, 1, OPTAB_WIDEN);
9845
9846 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
9847 : 0xE8410028, SImode));
9848
9849 if (insn_after_throw == NULL_RTX)
9850 abort ();
9851 emit_move_insn (opcode, insn_after_throw);
9852
9853 emit_note (NULL, NOTE_INSN_LOOP_BEG);
9854 emit_label (loop_start);
9855
9856 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
9857 SImode, NULL_RTX, NULL_RTX,
9858 no_toc_restore_needed);
9859
9860 mem = gen_rtx_MEM (Pmode,
9861 gen_rtx_PLUS (Pmode, bottom_of_stack,
9862 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
9863 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
9864
9865 emit_label (no_toc_restore_needed);
9866 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
9867 Pmode, NULL_RTX, NULL_RTX,
9868 loop_exit);
9869
9870 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
9871 set_mem_alias_set (mem, rs6000_sr_alias_set);
9872 emit_move_insn (bottom_of_stack, mem);
9873
9874 mem = gen_rtx_MEM (Pmode,
9875 gen_rtx_PLUS (Pmode, bottom_of_stack,
9876 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9877 emit_move_insn (opcode_addr, mem);
9878 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
9879
9880 emit_note (NULL, NOTE_INSN_LOOP_CONT);
9881 emit_jump (loop_start);
9882 emit_note (NULL, NOTE_INSN_LOOP_END);
9883 emit_label (loop_exit);
9884 }
9885 #endif /* TARGET_AIX */
9886 \f
9887 /* This ties together stack memory (MEM with an alias set of
9888 rs6000_sr_alias_set) and the change to the stack pointer. */
9889
9890 static void
9891 rs6000_emit_stack_tie ()
9892 {
9893 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
9894
9895 set_mem_alias_set (mem, rs6000_sr_alias_set);
9896 emit_insn (gen_stack_tie (mem));
9897 }
9898
9899 /* Emit the correct code for allocating stack space, as insns.
9900 If COPY_R12, make sure a copy of the old frame is left in r12.
9901 The generated code may use hard register 0 as a temporary. */
9902
9903 static void
9904 rs6000_emit_allocate_stack (size, copy_r12)
9905 HOST_WIDE_INT size;
9906 int copy_r12;
9907 {
9908 rtx insn;
9909 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
9910 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
9911 rtx todec = GEN_INT (-size);
9912
9913 if (current_function_limit_stack)
9914 {
9915 if (REG_P (stack_limit_rtx)
9916 && REGNO (stack_limit_rtx) > 1
9917 && REGNO (stack_limit_rtx) <= 31)
9918 {
9919 emit_insn (TARGET_32BIT
9920 ? gen_addsi3 (tmp_reg,
9921 stack_limit_rtx,
9922 GEN_INT (size))
9923 : gen_adddi3 (tmp_reg,
9924 stack_limit_rtx,
9925 GEN_INT (size)));
9926
9927 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
9928 const0_rtx));
9929 }
9930 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9931 && TARGET_32BIT
9932 && DEFAULT_ABI == ABI_V4)
9933 {
9934 rtx toload = gen_rtx_CONST (VOIDmode,
9935 gen_rtx_PLUS (Pmode,
9936 stack_limit_rtx,
9937 GEN_INT (size)));
9938
9939 emit_insn (gen_elf_high (tmp_reg, toload));
9940 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
9941 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
9942 const0_rtx));
9943 }
9944 else
9945 warning ("stack limit expression is not supported");
9946 }
9947
9948 if (copy_r12 || ! TARGET_UPDATE)
9949 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
9950
9951 if (TARGET_UPDATE)
9952 {
9953 if (size > 32767)
9954 {
9955 /* Need a note here so that try_split doesn't get confused. */
9956 if (get_last_insn() == NULL_RTX)
9957 emit_note (0, NOTE_INSN_DELETED);
9958 insn = emit_move_insn (tmp_reg, todec);
9959 try_split (PATTERN (insn), insn, 0);
9960 todec = tmp_reg;
9961 }
9962
9963 insn = emit_insn (TARGET_32BIT
9964 ? gen_movsi_update (stack_reg, stack_reg,
9965 todec, stack_reg)
9966 : gen_movdi_update (stack_reg, stack_reg,
9967 todec, stack_reg));
9968 }
9969 else
9970 {
9971 insn = emit_insn (TARGET_32BIT
9972 ? gen_addsi3 (stack_reg, stack_reg, todec)
9973 : gen_adddi3 (stack_reg, stack_reg, todec));
9974 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
9975 gen_rtx_REG (Pmode, 12));
9976 }
9977
9978 RTX_FRAME_RELATED_P (insn) = 1;
9979 REG_NOTES (insn) =
9980 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
9981 gen_rtx_SET (VOIDmode, stack_reg,
9982 gen_rtx_PLUS (Pmode, stack_reg,
9983 GEN_INT (-size))),
9984 REG_NOTES (insn));
9985 }
9986
9987 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
9988 knows that:
9989
9990 (mem (plus (blah) (regXX)))
9991
9992 is really:
9993
9994 (mem (plus (blah) (const VALUE_OF_REGXX))). */
9995
9996 static void
9997 altivec_frame_fixup (insn, reg, val)
9998 rtx insn, reg;
9999 HOST_WIDE_INT val;
10000 {
10001 rtx real;
10002
10003 real = copy_rtx (PATTERN (insn));
10004
10005 real = replace_rtx (real, reg, GEN_INT (val));
10006
10007 RTX_FRAME_RELATED_P (insn) = 1;
10008 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10009 real,
10010 REG_NOTES (insn));
10011 }
10012
10013 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10014 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10015 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10016 deduce these equivalences by itself so it wasn't necessary to hold
10017 its hand so much. */
10018
10019 static void
10020 rs6000_frame_related (insn, reg, val, reg2, rreg)
10021 rtx insn;
10022 rtx reg;
10023 HOST_WIDE_INT val;
10024 rtx reg2;
10025 rtx rreg;
10026 {
10027 rtx real, temp;
10028
10029 /* copy_rtx will not make unique copies of registers, so we need to
10030 ensure we don't have unwanted sharing here. */
10031 if (reg == reg2)
10032 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10033
10034 if (reg == rreg)
10035 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10036
10037 real = copy_rtx (PATTERN (insn));
10038
10039 if (reg2 != NULL_RTX)
10040 real = replace_rtx (real, reg2, rreg);
10041
10042 real = replace_rtx (real, reg,
10043 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
10044 STACK_POINTER_REGNUM),
10045 GEN_INT (val)));
10046
10047 /* We expect that 'real' is either a SET or a PARALLEL containing
10048 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10049 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10050
10051 if (GET_CODE (real) == SET)
10052 {
10053 rtx set = real;
10054
10055 temp = simplify_rtx (SET_SRC (set));
10056 if (temp)
10057 SET_SRC (set) = temp;
10058 temp = simplify_rtx (SET_DEST (set));
10059 if (temp)
10060 SET_DEST (set) = temp;
10061 if (GET_CODE (SET_DEST (set)) == MEM)
10062 {
10063 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10064 if (temp)
10065 XEXP (SET_DEST (set), 0) = temp;
10066 }
10067 }
10068 else if (GET_CODE (real) == PARALLEL)
10069 {
10070 int i;
10071 for (i = 0; i < XVECLEN (real, 0); i++)
10072 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
10073 {
10074 rtx set = XVECEXP (real, 0, i);
10075
10076 temp = simplify_rtx (SET_SRC (set));
10077 if (temp)
10078 SET_SRC (set) = temp;
10079 temp = simplify_rtx (SET_DEST (set));
10080 if (temp)
10081 SET_DEST (set) = temp;
10082 if (GET_CODE (SET_DEST (set)) == MEM)
10083 {
10084 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10085 if (temp)
10086 XEXP (SET_DEST (set), 0) = temp;
10087 }
10088 RTX_FRAME_RELATED_P (set) = 1;
10089 }
10090 }
10091 else
10092 abort ();
10093
10094 RTX_FRAME_RELATED_P (insn) = 1;
10095 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10096 real,
10097 REG_NOTES (insn));
10098 }
10099
10100 /* Returns an insn that has a vrsave set operation with the
10101 appropriate CLOBBERs. */
10102
10103 static rtx
10104 generate_set_vrsave (reg, info, epiloguep)
10105 rtx reg;
10106 rs6000_stack_t *info;
10107 int epiloguep;
10108 {
10109 int nclobs, i;
10110 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
10111 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10112
10113 clobs[0]
10114 = gen_rtx_SET (VOIDmode,
10115 vrsave,
10116 gen_rtx_UNSPEC_VOLATILE (SImode,
10117 gen_rtvec (2, reg, vrsave),
10118 30));
10119
10120 nclobs = 1;
10121
10122 /* We need to clobber the registers in the mask so the scheduler
10123 does not move sets to VRSAVE before sets of AltiVec registers.
10124
10125 However, if the function receives nonlocal gotos, reload will set
10126 all call saved registers live. We will end up with:
10127
10128 (set (reg 999) (mem))
10129 (parallel [ (set (reg vrsave) (unspec blah))
10130 (clobber (reg 999))])
10131
10132 The clobber will cause the store into reg 999 to be dead, and
10133 flow will attempt to delete an epilogue insn. In this case, we
10134 need an unspec use/set of the register. */
10135
10136 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10137 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
10138 {
10139 if (!epiloguep || call_used_regs [i])
10140 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
10141 gen_rtx_REG (V4SImode, i));
10142 else
10143 {
10144 rtx reg = gen_rtx_REG (V4SImode, i);
10145
10146 clobs[nclobs++]
10147 = gen_rtx_SET (VOIDmode,
10148 reg,
10149 gen_rtx_UNSPEC (V4SImode,
10150 gen_rtvec (1, reg), 27));
10151 }
10152 }
10153
10154 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
10155
10156 for (i = 0; i < nclobs; ++i)
10157 XVECEXP (insn, 0, i) = clobs[i];
10158
10159 return insn;
10160 }
10161
10162 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10163 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
10164
10165 static void
10166 emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
10167 rtx frame_reg;
10168 rtx frame_ptr;
10169 enum machine_mode mode;
10170 unsigned int regno;
10171 int offset;
10172 int total_size;
10173 {
10174 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
10175 rtx replacea, replaceb;
10176
10177 int_rtx = GEN_INT (offset);
10178
10179 /* Some cases that need register indexed addressing. */
10180 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
10181 || (TARGET_SPE_ABI
10182 && SPE_VECTOR_MODE (mode)
10183 && !SPE_CONST_OFFSET_OK (offset)))
10184 {
10185 /* Whomever calls us must make sure r11 is available in the
10186 flow path of instructions in the prologue. */
10187 offset_rtx = gen_rtx_REG (Pmode, 11);
10188 emit_move_insn (offset_rtx, int_rtx);
10189
10190 replacea = offset_rtx;
10191 replaceb = int_rtx;
10192 }
10193 else
10194 {
10195 offset_rtx = int_rtx;
10196 replacea = NULL_RTX;
10197 replaceb = NULL_RTX;
10198 }
10199
10200 reg = gen_rtx_REG (mode, regno);
10201 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
10202 mem = gen_rtx_MEM (mode, addr);
10203 set_mem_alias_set (mem, rs6000_sr_alias_set);
10204
10205 insn = emit_move_insn (mem, reg);
10206
10207 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
10208 }
10209
10210 /* Emit an offset memory reference suitable for a frame store, while
10211 converting to a valid addressing mode. */
10212
10213 static rtx
10214 gen_frame_mem_offset (mode, reg, offset)
10215 enum machine_mode mode;
10216 rtx reg;
10217 int offset;
10218 {
10219 rtx int_rtx, offset_rtx;
10220
10221 int_rtx = GEN_INT (offset);
10222
10223 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
10224 {
10225 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10226 emit_move_insn (offset_rtx, int_rtx);
10227 }
10228 else
10229 offset_rtx = int_rtx;
10230
10231 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
10232 }
10233
10234 /* Emit function prologue as insns. */
10235
10236 void
10237 rs6000_emit_prologue ()
10238 {
10239 rs6000_stack_t *info = rs6000_stack_info ();
10240 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10241 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10242 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10243 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
10244 rtx frame_reg_rtx = sp_reg_rtx;
10245 rtx cr_save_rtx = NULL;
10246 rtx insn;
10247 int saving_FPRs_inline;
10248 int using_store_multiple;
10249 HOST_WIDE_INT sp_offset = 0;
10250
10251 if (TARGET_SPE_ABI)
10252 {
10253 reg_mode = V2SImode;
10254 reg_size = 8;
10255 }
10256
10257 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10258 && !TARGET_SPE_ABI
10259 && info->first_gp_reg_save < 31);
10260 saving_FPRs_inline = (info->first_fp_reg_save == 64
10261 || FP_SAVE_INLINE (info->first_fp_reg_save));
10262
10263 /* For V.4, update stack before we do any saving and set back pointer. */
10264 if (info->push_p && DEFAULT_ABI == ABI_V4)
10265 {
10266 if (info->total_size < 32767)
10267 sp_offset = info->total_size;
10268 else
10269 frame_reg_rtx = frame_ptr_rtx;
10270 rs6000_emit_allocate_stack (info->total_size,
10271 (frame_reg_rtx != sp_reg_rtx
10272 && (info->cr_save_p
10273 || info->lr_save_p
10274 || info->first_fp_reg_save < 64
10275 || info->first_gp_reg_save < 32
10276 )));
10277 if (frame_reg_rtx != sp_reg_rtx)
10278 rs6000_emit_stack_tie ();
10279 }
10280
10281 /* Save AltiVec registers if needed. */
10282 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10283 {
10284 int i;
10285
10286 /* There should be a non inline version of this, for when we
10287 are saving lots of vector registers. */
10288 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10289 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10290 {
10291 rtx areg, savereg, mem;
10292 int offset;
10293
10294 offset = info->altivec_save_offset + sp_offset
10295 + 16 * (i - info->first_altivec_reg_save);
10296
10297 savereg = gen_rtx_REG (V4SImode, i);
10298
10299 areg = gen_rtx_REG (Pmode, 0);
10300 emit_move_insn (areg, GEN_INT (offset));
10301
10302 /* AltiVec addressing mode is [reg+reg]. */
10303 mem = gen_rtx_MEM (V4SImode,
10304 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
10305
10306 set_mem_alias_set (mem, rs6000_sr_alias_set);
10307
10308 insn = emit_move_insn (mem, savereg);
10309
10310 altivec_frame_fixup (insn, areg, offset);
10311 }
10312 }
10313
10314 /* VRSAVE is a bit vector representing which AltiVec registers
10315 are used. The OS uses this to determine which vector
10316 registers to save on a context switch. We need to save
10317 VRSAVE on the stack frame, add whatever AltiVec registers we
10318 used in this function, and do the corresponding magic in the
10319 epilogue. */
10320
10321 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
10322 {
10323 rtx reg, mem, vrsave;
10324 int offset;
10325
10326 /* Get VRSAVE onto a GPR. */
10327 reg = gen_rtx_REG (SImode, 12);
10328 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10329 if (TARGET_MACHO)
10330 emit_insn (gen_get_vrsave_internal (reg));
10331 else
10332 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
10333
10334 /* Save VRSAVE. */
10335 offset = info->vrsave_save_offset + sp_offset;
10336 mem
10337 = gen_rtx_MEM (SImode,
10338 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
10339 set_mem_alias_set (mem, rs6000_sr_alias_set);
10340 insn = emit_move_insn (mem, reg);
10341
10342 /* Include the registers in the mask. */
10343 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
10344
10345 insn = emit_insn (generate_set_vrsave (reg, info, 0));
10346 }
10347
10348 /* If we use the link register, get it into r0. */
10349 if (info->lr_save_p)
10350 emit_move_insn (gen_rtx_REG (Pmode, 0),
10351 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10352
10353 /* If we need to save CR, put it into r12. */
10354 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
10355 {
10356 cr_save_rtx = gen_rtx_REG (SImode, 12);
10357 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10358 }
10359
10360 /* Do any required saving of fpr's. If only one or two to save, do
10361 it ourselves. Otherwise, call function. */
10362 if (saving_FPRs_inline)
10363 {
10364 int i;
10365 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10366 if ((regs_ever_live[info->first_fp_reg_save+i]
10367 && ! call_used_regs[info->first_fp_reg_save+i]))
10368 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
10369 info->first_fp_reg_save + i,
10370 info->fp_save_offset + sp_offset + 8 * i,
10371 info->total_size);
10372 }
10373 else if (info->first_fp_reg_save != 64)
10374 {
10375 int i;
10376 char rname[30];
10377 const char *alloc_rname;
10378 rtvec p;
10379 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
10380
10381 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
10382 gen_rtx_REG (Pmode,
10383 LINK_REGISTER_REGNUM));
10384 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
10385 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
10386 alloc_rname = ggc_strdup (rname);
10387 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
10388 gen_rtx_SYMBOL_REF (Pmode,
10389 alloc_rname));
10390 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10391 {
10392 rtx addr, reg, mem;
10393 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
10394 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10395 GEN_INT (info->fp_save_offset
10396 + sp_offset + 8*i));
10397 mem = gen_rtx_MEM (DFmode, addr);
10398 set_mem_alias_set (mem, rs6000_sr_alias_set);
10399
10400 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
10401 }
10402 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10403 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10404 NULL_RTX, NULL_RTX);
10405 }
10406
10407 /* Save GPRs. This is done as a PARALLEL if we are using
10408 the store-multiple instructions. */
10409 if (using_store_multiple)
10410 {
10411 rtvec p;
10412 int i;
10413 p = rtvec_alloc (32 - info->first_gp_reg_save);
10414 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10415 {
10416 rtx addr, reg, mem;
10417 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10418 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10419 GEN_INT (info->gp_save_offset
10420 + sp_offset
10421 + reg_size * i));
10422 mem = gen_rtx_MEM (reg_mode, addr);
10423 set_mem_alias_set (mem, rs6000_sr_alias_set);
10424
10425 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
10426 }
10427 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10428 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10429 NULL_RTX, NULL_RTX);
10430 }
10431 else
10432 {
10433 int i;
10434 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10435 if ((regs_ever_live[info->first_gp_reg_save+i]
10436 && ! call_used_regs[info->first_gp_reg_save+i])
10437 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10438 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10439 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10440 {
10441 rtx addr, reg, mem;
10442 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10443
10444 if (TARGET_SPE_ABI)
10445 {
10446 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10447 rtx b;
10448
10449 if (!SPE_CONST_OFFSET_OK (offset))
10450 {
10451 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10452 emit_move_insn (b, GEN_INT (offset));
10453 }
10454 else
10455 b = GEN_INT (offset);
10456
10457 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10458 mem = gen_rtx_MEM (V2SImode, addr);
10459 set_mem_alias_set (mem, rs6000_sr_alias_set);
10460 insn = emit_move_insn (mem, reg);
10461
10462 if (GET_CODE (b) == CONST_INT)
10463 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10464 NULL_RTX, NULL_RTX);
10465 else
10466 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10467 b, GEN_INT (offset));
10468 }
10469 else
10470 {
10471 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10472 GEN_INT (info->gp_save_offset
10473 + sp_offset
10474 + reg_size * i));
10475 mem = gen_rtx_MEM (reg_mode, addr);
10476 set_mem_alias_set (mem, rs6000_sr_alias_set);
10477
10478 insn = emit_move_insn (mem, reg);
10479 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10480 NULL_RTX, NULL_RTX);
10481 }
10482 }
10483 }
10484
10485 /* ??? There's no need to emit actual instructions here, but it's the
10486 easiest way to get the frame unwind information emitted. */
10487 if (current_function_calls_eh_return)
10488 {
10489 unsigned int i, regno;
10490
10491 for (i = 0; ; ++i)
10492 {
10493 regno = EH_RETURN_DATA_REGNO (i);
10494 if (regno == INVALID_REGNUM)
10495 break;
10496
10497 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
10498 info->ehrd_offset + sp_offset
10499 + reg_size * (int) i,
10500 info->total_size);
10501 }
10502 }
10503
10504 /* Save lr if we used it. */
10505 if (info->lr_save_p)
10506 {
10507 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10508 GEN_INT (info->lr_save_offset + sp_offset));
10509 rtx reg = gen_rtx_REG (Pmode, 0);
10510 rtx mem = gen_rtx_MEM (Pmode, addr);
10511 /* This should not be of rs6000_sr_alias_set, because of
10512 __builtin_return_address. */
10513
10514 insn = emit_move_insn (mem, reg);
10515 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10516 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10517 }
10518
10519 /* Save CR if we use any that must be preserved. */
10520 if (info->cr_save_p)
10521 {
10522 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10523 GEN_INT (info->cr_save_offset + sp_offset));
10524 rtx mem = gen_rtx_MEM (SImode, addr);
10525
10526 set_mem_alias_set (mem, rs6000_sr_alias_set);
10527
10528 /* If r12 was used to hold the original sp, copy cr into r0 now
10529 that it's free. */
10530 if (REGNO (frame_reg_rtx) == 12)
10531 {
10532 cr_save_rtx = gen_rtx_REG (SImode, 0);
10533 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10534 }
10535 insn = emit_move_insn (mem, cr_save_rtx);
10536
10537 /* Now, there's no way that dwarf2out_frame_debug_expr is going
10538 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
10539 OK. All we have to do is specify that _one_ condition code
10540 register is saved in this stack slot. The thrower's epilogue
10541 will then restore all the call-saved registers.
10542 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
10543 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10544 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
10545 }
10546
10547 /* Update stack and set back pointer unless this is V.4,
10548 for which it was done previously. */
10549 if (info->push_p && DEFAULT_ABI != ABI_V4)
10550 rs6000_emit_allocate_stack (info->total_size, FALSE);
10551
10552 /* Set frame pointer, if needed. */
10553 if (frame_pointer_needed)
10554 {
10555 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
10556 sp_reg_rtx);
10557 RTX_FRAME_RELATED_P (insn) = 1;
10558 }
10559
10560 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
10561 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
10562 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
10563 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
10564 {
10565 /* If emit_load_toc_table will use the link register, we need to save
10566 it. We use R11 for this purpose because emit_load_toc_table
10567 can use register 0. This allows us to use a plain 'blr' to return
10568 from the procedure more often. */
10569 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
10570 && ! info->lr_save_p
10571 && EXIT_BLOCK_PTR->pred != NULL);
10572 if (save_LR_around_toc_setup)
10573 emit_move_insn (gen_rtx_REG (Pmode, 11),
10574 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10575
10576 rs6000_emit_load_toc_table (TRUE);
10577
10578 if (save_LR_around_toc_setup)
10579 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10580 gen_rtx_REG (Pmode, 11));
10581 }
10582
10583 #if TARGET_MACHO
10584 if (DEFAULT_ABI == ABI_DARWIN
10585 && flag_pic && current_function_uses_pic_offset_table)
10586 {
10587 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10588 char *picbase = machopic_function_base_name ();
10589 rtx src = gen_rtx_SYMBOL_REF (Pmode, ggc_alloc_string (picbase, -1));
10590
10591 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
10592
10593 rs6000_maybe_dead (
10594 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
10595 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
10596 }
10597 #endif
10598 }
10599
10600 /* Write function prologue. */
10601
10602 static void
10603 rs6000_output_function_prologue (file, size)
10604 FILE *file;
10605 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10606 {
10607 rs6000_stack_t *info = rs6000_stack_info ();
10608
10609 if (TARGET_DEBUG_STACK)
10610 debug_stack_info (info);
10611
10612 /* Write .extern for any function we will call to save and restore
10613 fp values. */
10614 if (info->first_fp_reg_save < 64
10615 && !FP_SAVE_INLINE (info->first_fp_reg_save))
10616 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
10617 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
10618 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
10619 RESTORE_FP_SUFFIX);
10620
10621 /* Write .extern for AIX common mode routines, if needed. */
10622 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
10623 {
10624 fputs ("\t.extern __mulh\n", file);
10625 fputs ("\t.extern __mull\n", file);
10626 fputs ("\t.extern __divss\n", file);
10627 fputs ("\t.extern __divus\n", file);
10628 fputs ("\t.extern __quoss\n", file);
10629 fputs ("\t.extern __quous\n", file);
10630 common_mode_defined = 1;
10631 }
10632
10633 if (! HAVE_prologue)
10634 {
10635 start_sequence ();
10636
10637 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
10638 the "toplevel" insn chain. */
10639 emit_note (0, NOTE_INSN_DELETED);
10640 rs6000_emit_prologue ();
10641 emit_note (0, NOTE_INSN_DELETED);
10642
10643 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10644 {
10645 rtx insn;
10646 unsigned addr = 0;
10647 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
10648 {
10649 INSN_ADDRESSES_NEW (insn, addr);
10650 addr += 4;
10651 }
10652 }
10653
10654 if (TARGET_DEBUG_STACK)
10655 debug_rtx_list (get_insns (), 100);
10656 final (get_insns (), file, FALSE, FALSE);
10657 end_sequence ();
10658 }
10659
10660 rs6000_pic_labelno++;
10661 }
10662
10663 /* Emit function epilogue as insns.
10664
10665 At present, dwarf2out_frame_debug_expr doesn't understand
10666 register restores, so we don't bother setting RTX_FRAME_RELATED_P
10667 anywhere in the epilogue. Most of the insns below would in any case
10668 need special notes to explain where r11 is in relation to the stack. */
10669
10670 void
10671 rs6000_emit_epilogue (sibcall)
10672 int sibcall;
10673 {
10674 rs6000_stack_t *info;
10675 int restoring_FPRs_inline;
10676 int using_load_multiple;
10677 int using_mfcr_multiple;
10678 int use_backchain_to_restore_sp;
10679 int sp_offset = 0;
10680 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
10681 rtx frame_reg_rtx = sp_reg_rtx;
10682 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10683 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10684 int i;
10685
10686 if (TARGET_SPE_ABI)
10687 {
10688 reg_mode = V2SImode;
10689 reg_size = 8;
10690 }
10691
10692 info = rs6000_stack_info ();
10693 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10694 && !TARGET_SPE_ABI
10695 && info->first_gp_reg_save < 31);
10696 restoring_FPRs_inline = (sibcall
10697 || current_function_calls_eh_return
10698 || info->first_fp_reg_save == 64
10699 || FP_SAVE_INLINE (info->first_fp_reg_save));
10700 use_backchain_to_restore_sp = (frame_pointer_needed
10701 || current_function_calls_alloca
10702 || info->total_size > 32767);
10703 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
10704 || rs6000_cpu == PROCESSOR_PPC603
10705 || rs6000_cpu == PROCESSOR_PPC750
10706 || optimize_size);
10707
10708 /* If we have a frame pointer, a call to alloca, or a large stack
10709 frame, restore the old stack pointer using the backchain. Otherwise,
10710 we know what size to update it with. */
10711 if (use_backchain_to_restore_sp)
10712 {
10713 /* Under V.4, don't reset the stack pointer until after we're done
10714 loading the saved registers. */
10715 if (DEFAULT_ABI == ABI_V4)
10716 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
10717
10718 emit_move_insn (frame_reg_rtx,
10719 gen_rtx_MEM (Pmode, sp_reg_rtx));
10720
10721 }
10722 else if (info->push_p)
10723 {
10724 if (DEFAULT_ABI == ABI_V4)
10725 sp_offset = info->total_size;
10726 else
10727 {
10728 emit_insn (TARGET_32BIT
10729 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
10730 GEN_INT (info->total_size))
10731 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
10732 GEN_INT (info->total_size)));
10733 }
10734 }
10735
10736 /* Restore AltiVec registers if needed. */
10737 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10738 {
10739 int i;
10740
10741 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10742 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10743 {
10744 rtx addr, areg, mem;
10745
10746 areg = gen_rtx_REG (Pmode, 0);
10747 emit_move_insn
10748 (areg, GEN_INT (info->altivec_save_offset
10749 + sp_offset
10750 + 16 * (i - info->first_altivec_reg_save)));
10751
10752 /* AltiVec addressing mode is [reg+reg]. */
10753 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
10754 mem = gen_rtx_MEM (V4SImode, addr);
10755 set_mem_alias_set (mem, rs6000_sr_alias_set);
10756
10757 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
10758 }
10759 }
10760
10761 /* Restore VRSAVE if needed. */
10762 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
10763 {
10764 rtx addr, mem, reg;
10765
10766 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10767 GEN_INT (info->vrsave_save_offset + sp_offset));
10768 mem = gen_rtx_MEM (SImode, addr);
10769 set_mem_alias_set (mem, rs6000_sr_alias_set);
10770 reg = gen_rtx_REG (SImode, 12);
10771 emit_move_insn (reg, mem);
10772
10773 emit_insn (generate_set_vrsave (reg, info, 1));
10774 }
10775
10776 /* Get the old lr if we saved it. */
10777 if (info->lr_save_p)
10778 {
10779 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
10780 info->lr_save_offset + sp_offset);
10781
10782 set_mem_alias_set (mem, rs6000_sr_alias_set);
10783
10784 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
10785 }
10786
10787 /* Get the old cr if we saved it. */
10788 if (info->cr_save_p)
10789 {
10790 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10791 GEN_INT (info->cr_save_offset + sp_offset));
10792 rtx mem = gen_rtx_MEM (SImode, addr);
10793
10794 set_mem_alias_set (mem, rs6000_sr_alias_set);
10795
10796 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
10797 }
10798
10799 /* Set LR here to try to overlap restores below. */
10800 if (info->lr_save_p)
10801 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10802 gen_rtx_REG (Pmode, 0));
10803
10804 /* Load exception handler data registers, if needed. */
10805 if (current_function_calls_eh_return)
10806 {
10807 unsigned int i, regno;
10808
10809 for (i = 0; ; ++i)
10810 {
10811 rtx mem;
10812
10813 regno = EH_RETURN_DATA_REGNO (i);
10814 if (regno == INVALID_REGNUM)
10815 break;
10816
10817 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
10818 info->ehrd_offset + sp_offset
10819 + reg_size * (int) i);
10820 set_mem_alias_set (mem, rs6000_sr_alias_set);
10821
10822 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
10823 }
10824 }
10825
10826 /* Restore GPRs. This is done as a PARALLEL if we are using
10827 the load-multiple instructions. */
10828 if (using_load_multiple)
10829 {
10830 rtvec p;
10831 p = rtvec_alloc (32 - info->first_gp_reg_save);
10832 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10833 {
10834 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10835 GEN_INT (info->gp_save_offset
10836 + sp_offset
10837 + reg_size * i));
10838 rtx mem = gen_rtx_MEM (reg_mode, addr);
10839
10840 set_mem_alias_set (mem, rs6000_sr_alias_set);
10841
10842 RTVEC_ELT (p, i) =
10843 gen_rtx_SET (VOIDmode,
10844 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
10845 mem);
10846 }
10847 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10848 }
10849 else
10850 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10851 if ((regs_ever_live[info->first_gp_reg_save+i]
10852 && ! call_used_regs[info->first_gp_reg_save+i])
10853 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10854 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10855 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10856 {
10857 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10858 GEN_INT (info->gp_save_offset
10859 + sp_offset
10860 + reg_size * i));
10861 rtx mem = gen_rtx_MEM (reg_mode, addr);
10862
10863 /* Restore 64-bit quantities for SPE. */
10864 if (TARGET_SPE_ABI)
10865 {
10866 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10867 rtx b;
10868
10869 if (!SPE_CONST_OFFSET_OK (offset))
10870 {
10871 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10872 emit_move_insn (b, GEN_INT (offset));
10873 }
10874 else
10875 b = GEN_INT (offset);
10876
10877 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10878 mem = gen_rtx_MEM (V2SImode, addr);
10879 }
10880
10881 set_mem_alias_set (mem, rs6000_sr_alias_set);
10882
10883 emit_move_insn (gen_rtx_REG (reg_mode,
10884 info->first_gp_reg_save + i), mem);
10885 }
10886
10887 /* Restore fpr's if we need to do it without calling a function. */
10888 if (restoring_FPRs_inline)
10889 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10890 if ((regs_ever_live[info->first_fp_reg_save+i]
10891 && ! call_used_regs[info->first_fp_reg_save+i]))
10892 {
10893 rtx addr, mem;
10894 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10895 GEN_INT (info->fp_save_offset
10896 + sp_offset
10897 + 8 * i));
10898 mem = gen_rtx_MEM (DFmode, addr);
10899 set_mem_alias_set (mem, rs6000_sr_alias_set);
10900
10901 emit_move_insn (gen_rtx_REG (DFmode,
10902 info->first_fp_reg_save + i),
10903 mem);
10904 }
10905
10906 /* If we saved cr, restore it here. Just those that were used. */
10907 if (info->cr_save_p)
10908 {
10909 rtx r12_rtx = gen_rtx_REG (SImode, 12);
10910 int count = 0;
10911
10912 if (using_mfcr_multiple)
10913 {
10914 for (i = 0; i < 8; i++)
10915 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10916 count++;
10917 if (count == 0)
10918 abort ();
10919 }
10920
10921 if (using_mfcr_multiple && count > 1)
10922 {
10923 rtvec p;
10924 int ndx;
10925
10926 p = rtvec_alloc (count);
10927
10928 ndx = 0;
10929 for (i = 0; i < 8; i++)
10930 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10931 {
10932 rtvec r = rtvec_alloc (2);
10933 RTVEC_ELT (r, 0) = r12_rtx;
10934 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
10935 RTVEC_ELT (p, ndx) =
10936 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
10937 gen_rtx_UNSPEC (CCmode, r, 20));
10938 ndx++;
10939 }
10940 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10941 if (ndx != count)
10942 abort ();
10943 }
10944 else
10945 for (i = 0; i < 8; i++)
10946 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10947 {
10948 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
10949 CR0_REGNO+i),
10950 r12_rtx));
10951 }
10952 }
10953
10954 /* If this is V.4, unwind the stack pointer after all of the loads
10955 have been done. We need to emit a block here so that sched
10956 doesn't decide to move the sp change before the register restores
10957 (which may not have any obvious dependency on the stack). This
10958 doesn't hurt performance, because there is no scheduling that can
10959 be done after this point. */
10960 if (DEFAULT_ABI == ABI_V4)
10961 {
10962 if (frame_reg_rtx != sp_reg_rtx)
10963 rs6000_emit_stack_tie ();
10964
10965 if (use_backchain_to_restore_sp)
10966 {
10967 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
10968 }
10969 else if (sp_offset != 0)
10970 {
10971 emit_insn (TARGET_32BIT
10972 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
10973 GEN_INT (sp_offset))
10974 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
10975 GEN_INT (sp_offset)));
10976 }
10977 }
10978
10979 if (current_function_calls_eh_return)
10980 {
10981 rtx sa = EH_RETURN_STACKADJ_RTX;
10982 emit_insn (TARGET_32BIT
10983 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
10984 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
10985 }
10986
10987 if (!sibcall)
10988 {
10989 rtvec p;
10990 if (! restoring_FPRs_inline)
10991 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
10992 else
10993 p = rtvec_alloc (2);
10994
10995 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
10996 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
10997 gen_rtx_REG (Pmode,
10998 LINK_REGISTER_REGNUM));
10999
11000 /* If we have to restore more than two FP registers, branch to the
11001 restore function. It will return to our caller. */
11002 if (! restoring_FPRs_inline)
11003 {
11004 int i;
11005 char rname[30];
11006 const char *alloc_rname;
11007
11008 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
11009 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
11010 alloc_rname = ggc_strdup (rname);
11011 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
11012 gen_rtx_SYMBOL_REF (Pmode,
11013 alloc_rname));
11014
11015 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11016 {
11017 rtx addr, mem;
11018 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
11019 GEN_INT (info->fp_save_offset + 8*i));
11020 mem = gen_rtx_MEM (DFmode, addr);
11021 set_mem_alias_set (mem, rs6000_sr_alias_set);
11022
11023 RTVEC_ELT (p, i+3) =
11024 gen_rtx_SET (VOIDmode,
11025 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
11026 mem);
11027 }
11028 }
11029
11030 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
11031 }
11032 }
11033
11034 /* Write function epilogue. */
11035
11036 static void
11037 rs6000_output_function_epilogue (file, size)
11038 FILE *file;
11039 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11040 {
11041 rs6000_stack_t *info = rs6000_stack_info ();
11042
11043 if (! HAVE_epilogue)
11044 {
11045 rtx insn = get_last_insn ();
11046 /* If the last insn was a BARRIER, we don't have to write anything except
11047 the trace table. */
11048 if (GET_CODE (insn) == NOTE)
11049 insn = prev_nonnote_insn (insn);
11050 if (insn == 0 || GET_CODE (insn) != BARRIER)
11051 {
11052 /* This is slightly ugly, but at least we don't have two
11053 copies of the epilogue-emitting code. */
11054 start_sequence ();
11055
11056 /* A NOTE_INSN_DELETED is supposed to be at the start
11057 and end of the "toplevel" insn chain. */
11058 emit_note (0, NOTE_INSN_DELETED);
11059 rs6000_emit_epilogue (FALSE);
11060 emit_note (0, NOTE_INSN_DELETED);
11061
11062 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11063 {
11064 rtx insn;
11065 unsigned addr = 0;
11066 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11067 {
11068 INSN_ADDRESSES_NEW (insn, addr);
11069 addr += 4;
11070 }
11071 }
11072
11073 if (TARGET_DEBUG_STACK)
11074 debug_rtx_list (get_insns (), 100);
11075 final (get_insns (), file, FALSE, FALSE);
11076 end_sequence ();
11077 }
11078 }
11079
11080 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11081 on its format.
11082
11083 We don't output a traceback table if -finhibit-size-directive was
11084 used. The documentation for -finhibit-size-directive reads
11085 ``don't output a @code{.size} assembler directive, or anything
11086 else that would cause trouble if the function is split in the
11087 middle, and the two halves are placed at locations far apart in
11088 memory.'' The traceback table has this property, since it
11089 includes the offset from the start of the function to the
11090 traceback table itself.
11091
11092 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11093 different traceback table. */
11094 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
11095 && rs6000_traceback != traceback_none)
11096 {
11097 const char *fname = NULL;
11098 const char *language_string = lang_hooks.name;
11099 int fixed_parms = 0, float_parms = 0, parm_info = 0;
11100 int i;
11101 int optional_tbtab;
11102
11103 if (rs6000_traceback == traceback_full)
11104 optional_tbtab = 1;
11105 else if (rs6000_traceback == traceback_part)
11106 optional_tbtab = 0;
11107 else
11108 optional_tbtab = !optimize_size && !TARGET_ELF;
11109
11110 if (optional_tbtab)
11111 {
11112 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
11113 while (*fname == '.') /* V.4 encodes . in the name */
11114 fname++;
11115
11116 /* Need label immediately before tbtab, so we can compute
11117 its offset from the function start. */
11118 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11119 ASM_OUTPUT_LABEL (file, fname);
11120 }
11121
11122 /* The .tbtab pseudo-op can only be used for the first eight
11123 expressions, since it can't handle the possibly variable
11124 length fields that follow. However, if you omit the optional
11125 fields, the assembler outputs zeros for all optional fields
11126 anyways, giving each variable length field is minimum length
11127 (as defined in sys/debug.h). Thus we can not use the .tbtab
11128 pseudo-op at all. */
11129
11130 /* An all-zero word flags the start of the tbtab, for debuggers
11131 that have to find it by searching forward from the entry
11132 point or from the current pc. */
11133 fputs ("\t.long 0\n", file);
11134
11135 /* Tbtab format type. Use format type 0. */
11136 fputs ("\t.byte 0,", file);
11137
11138 /* Language type. Unfortunately, there doesn't seem to be any
11139 official way to get this info, so we use language_string. C
11140 is 0. C++ is 9. No number defined for Obj-C, so use the
11141 value for C for now. There is no official value for Java,
11142 although IBM appears to be using 13. There is no official value
11143 for Chill, so we've chosen 44 pseudo-randomly. */
11144 if (! strcmp (language_string, "GNU C")
11145 || ! strcmp (language_string, "GNU Objective-C"))
11146 i = 0;
11147 else if (! strcmp (language_string, "GNU F77"))
11148 i = 1;
11149 else if (! strcmp (language_string, "GNU Ada"))
11150 i = 3;
11151 else if (! strcmp (language_string, "GNU Pascal"))
11152 i = 2;
11153 else if (! strcmp (language_string, "GNU C++"))
11154 i = 9;
11155 else if (! strcmp (language_string, "GNU Java"))
11156 i = 13;
11157 else if (! strcmp (language_string, "GNU CHILL"))
11158 i = 44;
11159 else
11160 abort ();
11161 fprintf (file, "%d,", i);
11162
11163 /* 8 single bit fields: global linkage (not set for C extern linkage,
11164 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
11165 from start of procedure stored in tbtab, internal function, function
11166 has controlled storage, function has no toc, function uses fp,
11167 function logs/aborts fp operations. */
11168 /* Assume that fp operations are used if any fp reg must be saved. */
11169 fprintf (file, "%d,",
11170 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
11171
11172 /* 6 bitfields: function is interrupt handler, name present in
11173 proc table, function calls alloca, on condition directives
11174 (controls stack walks, 3 bits), saves condition reg, saves
11175 link reg. */
11176 /* The `function calls alloca' bit seems to be set whenever reg 31 is
11177 set up as a frame pointer, even when there is no alloca call. */
11178 fprintf (file, "%d,",
11179 ((optional_tbtab << 6)
11180 | ((optional_tbtab & frame_pointer_needed) << 5)
11181 | (info->cr_save_p << 1)
11182 | (info->lr_save_p)));
11183
11184 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
11185 (6 bits). */
11186 fprintf (file, "%d,",
11187 (info->push_p << 7) | (64 - info->first_fp_reg_save));
11188
11189 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
11190 fprintf (file, "%d,", (32 - first_reg_to_save ()));
11191
11192 if (optional_tbtab)
11193 {
11194 /* Compute the parameter info from the function decl argument
11195 list. */
11196 tree decl;
11197 int next_parm_info_bit = 31;
11198
11199 for (decl = DECL_ARGUMENTS (current_function_decl);
11200 decl; decl = TREE_CHAIN (decl))
11201 {
11202 rtx parameter = DECL_INCOMING_RTL (decl);
11203 enum machine_mode mode = GET_MODE (parameter);
11204
11205 if (GET_CODE (parameter) == REG)
11206 {
11207 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
11208 {
11209 int bits;
11210
11211 float_parms++;
11212
11213 if (mode == SFmode)
11214 bits = 0x2;
11215 else if (mode == DFmode || mode == TFmode)
11216 bits = 0x3;
11217 else
11218 abort ();
11219
11220 /* If only one bit will fit, don't or in this entry. */
11221 if (next_parm_info_bit > 0)
11222 parm_info |= (bits << (next_parm_info_bit - 1));
11223 next_parm_info_bit -= 2;
11224 }
11225 else
11226 {
11227 fixed_parms += ((GET_MODE_SIZE (mode)
11228 + (UNITS_PER_WORD - 1))
11229 / UNITS_PER_WORD);
11230 next_parm_info_bit -= 1;
11231 }
11232 }
11233 }
11234 }
11235
11236 /* Number of fixed point parameters. */
11237 /* This is actually the number of words of fixed point parameters; thus
11238 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11239 fprintf (file, "%d,", fixed_parms);
11240
11241 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11242 all on stack. */
11243 /* This is actually the number of fp registers that hold parameters;
11244 and thus the maximum value is 13. */
11245 /* Set parameters on stack bit if parameters are not in their original
11246 registers, regardless of whether they are on the stack? Xlc
11247 seems to set the bit when not optimizing. */
11248 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
11249
11250 if (! optional_tbtab)
11251 return;
11252
11253 /* Optional fields follow. Some are variable length. */
11254
11255 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11256 11 double float. */
11257 /* There is an entry for each parameter in a register, in the order that
11258 they occur in the parameter list. Any intervening arguments on the
11259 stack are ignored. If the list overflows a long (max possible length
11260 34 bits) then completely leave off all elements that don't fit. */
11261 /* Only emit this long if there was at least one parameter. */
11262 if (fixed_parms || float_parms)
11263 fprintf (file, "\t.long %d\n", parm_info);
11264
11265 /* Offset from start of code to tb table. */
11266 fputs ("\t.long ", file);
11267 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11268 #if TARGET_AIX
11269 RS6000_OUTPUT_BASENAME (file, fname);
11270 #else
11271 assemble_name (file, fname);
11272 #endif
11273 fputs ("-.", file);
11274 #if TARGET_AIX
11275 RS6000_OUTPUT_BASENAME (file, fname);
11276 #else
11277 assemble_name (file, fname);
11278 #endif
11279 putc ('\n', file);
11280
11281 /* Interrupt handler mask. */
11282 /* Omit this long, since we never set the interrupt handler bit
11283 above. */
11284
11285 /* Number of CTL (controlled storage) anchors. */
11286 /* Omit this long, since the has_ctl bit is never set above. */
11287
11288 /* Displacement into stack of each CTL anchor. */
11289 /* Omit this list of longs, because there are no CTL anchors. */
11290
11291 /* Length of function name. */
11292 if (*fname == '*')
11293 ++fname;
11294 fprintf (file, "\t.short %d\n", (int) strlen (fname));
11295
11296 /* Function name. */
11297 assemble_string (fname, strlen (fname));
11298
11299 /* Register for alloca automatic storage; this is always reg 31.
11300 Only emit this if the alloca bit was set above. */
11301 if (frame_pointer_needed)
11302 fputs ("\t.byte 31\n", file);
11303
11304 fputs ("\t.align 2\n", file);
11305 }
11306 }
11307 \f
11308 /* A C compound statement that outputs the assembler code for a thunk
11309 function, used to implement C++ virtual function calls with
11310 multiple inheritance. The thunk acts as a wrapper around a virtual
11311 function, adjusting the implicit object parameter before handing
11312 control off to the real function.
11313
11314 First, emit code to add the integer DELTA to the location that
11315 contains the incoming first argument. Assume that this argument
11316 contains a pointer, and is the one used to pass the `this' pointer
11317 in C++. This is the incoming argument *before* the function
11318 prologue, e.g. `%o0' on a sparc. The addition must preserve the
11319 values of all other incoming arguments.
11320
11321 After the addition, emit code to jump to FUNCTION, which is a
11322 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
11323 not touch the return address. Hence returning from FUNCTION will
11324 return to whoever called the current `thunk'.
11325
11326 The effect must be as if FUNCTION had been called directly with the
11327 adjusted first argument. This macro is responsible for emitting
11328 all of the code for a thunk function; output_function_prologue()
11329 and output_function_epilogue() are not invoked.
11330
11331 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
11332 been extracted from it.) It might possibly be useful on some
11333 targets, but probably not.
11334
11335 If you do not define this macro, the target-independent code in the
11336 C++ frontend will generate a less efficient heavyweight thunk that
11337 calls FUNCTION instead of jumping to it. The generic approach does
11338 not support varargs. */
11339
11340 static void
11341 rs6000_output_mi_thunk (file, thunk_fndecl, delta, vcall_offset, function)
11342 FILE *file;
11343 tree thunk_fndecl ATTRIBUTE_UNUSED;
11344 HOST_WIDE_INT delta;
11345 HOST_WIDE_INT vcall_offset;
11346 tree function;
11347 {
11348 rtx this, insn, funexp;
11349
11350 reload_completed = 1;
11351 no_new_pseudos = 1;
11352
11353 /* Mark the end of the (empty) prologue. */
11354 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
11355
11356 /* Find the "this" pointer. If the function returns a structure,
11357 the structure return pointer is in r3. */
11358 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
11359 this = gen_rtx_REG (Pmode, 4);
11360 else
11361 this = gen_rtx_REG (Pmode, 3);
11362
11363 /* Apply the constant offset, if required. */
11364 if (delta)
11365 {
11366 rtx delta_rtx = GEN_INT (delta);
11367 emit_insn (TARGET_32BIT
11368 ? gen_addsi3 (this, this, delta_rtx)
11369 : gen_adddi3 (this, this, delta_rtx));
11370 }
11371
11372 /* Apply the offset from the vtable, if required. */
11373 if (vcall_offset)
11374 {
11375 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
11376 rtx tmp = gen_rtx_REG (Pmode, 12);
11377
11378 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
11379 emit_insn (TARGET_32BIT
11380 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
11381 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
11382 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
11383 emit_insn (TARGET_32BIT
11384 ? gen_addsi3 (this, this, tmp)
11385 : gen_adddi3 (this, this, tmp));
11386 }
11387
11388 /* Generate a tail call to the target function. */
11389 if (!TREE_USED (function))
11390 {
11391 assemble_external (function);
11392 TREE_USED (function) = 1;
11393 }
11394 funexp = XEXP (DECL_RTL (function), 0);
11395
11396 SYMBOL_REF_FLAG (funexp) = 0;
11397 if (current_file_function_operand (funexp, VOIDmode)
11398 && (! lookup_attribute ("longcall",
11399 TYPE_ATTRIBUTES (TREE_TYPE (function)))
11400 || lookup_attribute ("shortcall",
11401 TYPE_ATTRIBUTES (TREE_TYPE (function)))))
11402 SYMBOL_REF_FLAG (funexp) = 1;
11403
11404 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
11405
11406 #if TARGET_MACHO
11407 if (flag_pic)
11408 funexp = machopic_indirect_call_target (funexp);
11409 #endif
11410
11411 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
11412 generate sibcall RTL explicitly to avoid constraint abort. */
11413 insn = emit_call_insn (
11414 gen_rtx_PARALLEL (VOIDmode,
11415 gen_rtvec (4,
11416 gen_rtx_CALL (VOIDmode,
11417 funexp, const0_rtx),
11418 gen_rtx_USE (VOIDmode, const0_rtx),
11419 gen_rtx_USE (VOIDmode,
11420 gen_rtx_REG (SImode,
11421 LINK_REGISTER_REGNUM)),
11422 gen_rtx_RETURN (VOIDmode))));
11423 SIBLING_CALL_P (insn) = 1;
11424 emit_barrier ();
11425
11426 /* Run just enough of rest_of_compilation to get the insns emitted.
11427 There's not really enough bulk here to make other passes such as
11428 instruction scheduling worth while. Note that use_thunk calls
11429 assemble_start_function and assemble_end_function. */
11430 insn = get_insns ();
11431 shorten_branches (insn);
11432 final_start_function (insn, file, 1);
11433 final (insn, file, 1, 0);
11434 final_end_function ();
11435
11436 reload_completed = 0;
11437 no_new_pseudos = 0;
11438 }
11439 \f
11440 /* A quick summary of the various types of 'constant-pool tables'
11441 under PowerPC:
11442
11443 Target Flags Name One table per
11444 AIX (none) AIX TOC object file
11445 AIX -mfull-toc AIX TOC object file
11446 AIX -mminimal-toc AIX minimal TOC translation unit
11447 SVR4/EABI (none) SVR4 SDATA object file
11448 SVR4/EABI -fpic SVR4 pic object file
11449 SVR4/EABI -fPIC SVR4 PIC translation unit
11450 SVR4/EABI -mrelocatable EABI TOC function
11451 SVR4/EABI -maix AIX TOC object file
11452 SVR4/EABI -maix -mminimal-toc
11453 AIX minimal TOC translation unit
11454
11455 Name Reg. Set by entries contains:
11456 made by addrs? fp? sum?
11457
11458 AIX TOC 2 crt0 as Y option option
11459 AIX minimal TOC 30 prolog gcc Y Y option
11460 SVR4 SDATA 13 crt0 gcc N Y N
11461 SVR4 pic 30 prolog ld Y not yet N
11462 SVR4 PIC 30 prolog gcc Y option option
11463 EABI TOC 30 prolog gcc Y option option
11464
11465 */
11466
11467 /* Hash functions for the hash table. */
11468
11469 static unsigned
11470 rs6000_hash_constant (k)
11471 rtx k;
11472 {
11473 enum rtx_code code = GET_CODE (k);
11474 enum machine_mode mode = GET_MODE (k);
11475 unsigned result = (code << 3) ^ mode;
11476 const char *format;
11477 int flen, fidx;
11478
11479 format = GET_RTX_FORMAT (code);
11480 flen = strlen (format);
11481 fidx = 0;
11482
11483 switch (code)
11484 {
11485 case LABEL_REF:
11486 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
11487
11488 case CONST_DOUBLE:
11489 if (mode != VOIDmode)
11490 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
11491 flen = 2;
11492 break;
11493
11494 case CODE_LABEL:
11495 fidx = 3;
11496 break;
11497
11498 default:
11499 break;
11500 }
11501
11502 for (; fidx < flen; fidx++)
11503 switch (format[fidx])
11504 {
11505 case 's':
11506 {
11507 unsigned i, len;
11508 const char *str = XSTR (k, fidx);
11509 len = strlen (str);
11510 result = result * 613 + len;
11511 for (i = 0; i < len; i++)
11512 result = result * 613 + (unsigned) str[i];
11513 break;
11514 }
11515 case 'u':
11516 case 'e':
11517 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
11518 break;
11519 case 'i':
11520 case 'n':
11521 result = result * 613 + (unsigned) XINT (k, fidx);
11522 break;
11523 case 'w':
11524 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
11525 result = result * 613 + (unsigned) XWINT (k, fidx);
11526 else
11527 {
11528 size_t i;
11529 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
11530 result = result * 613 + (unsigned) (XWINT (k, fidx)
11531 >> CHAR_BIT * i);
11532 }
11533 break;
11534 default:
11535 abort ();
11536 }
11537
11538 return result;
11539 }
11540
11541 static unsigned
11542 toc_hash_function (hash_entry)
11543 const void * hash_entry;
11544 {
11545 const struct toc_hash_struct *thc =
11546 (const struct toc_hash_struct *) hash_entry;
11547 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
11548 }
11549
11550 /* Compare H1 and H2 for equivalence. */
11551
11552 static int
11553 toc_hash_eq (h1, h2)
11554 const void * h1;
11555 const void * h2;
11556 {
11557 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
11558 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
11559
11560 if (((const struct toc_hash_struct *) h1)->key_mode
11561 != ((const struct toc_hash_struct *) h2)->key_mode)
11562 return 0;
11563
11564 return rtx_equal_p (r1, r2);
11565 }
11566
11567 /* These are the names given by the C++ front-end to vtables, and
11568 vtable-like objects. Ideally, this logic should not be here;
11569 instead, there should be some programmatic way of inquiring as
11570 to whether or not an object is a vtable. */
11571
11572 #define VTABLE_NAME_P(NAME) \
11573 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
11574 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
11575 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
11576 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
11577
11578 void
11579 rs6000_output_symbol_ref (file, x)
11580 FILE *file;
11581 rtx x;
11582 {
11583 /* Currently C++ toc references to vtables can be emitted before it
11584 is decided whether the vtable is public or private. If this is
11585 the case, then the linker will eventually complain that there is
11586 a reference to an unknown section. Thus, for vtables only,
11587 we emit the TOC reference to reference the symbol and not the
11588 section. */
11589 const char *name = XSTR (x, 0);
11590
11591 if (VTABLE_NAME_P (name))
11592 {
11593 RS6000_OUTPUT_BASENAME (file, name);
11594 }
11595 else
11596 assemble_name (file, name);
11597 }
11598
11599 /* Output a TOC entry. We derive the entry name from what is being
11600 written. */
11601
11602 void
11603 output_toc (file, x, labelno, mode)
11604 FILE *file;
11605 rtx x;
11606 int labelno;
11607 enum machine_mode mode;
11608 {
11609 char buf[256];
11610 const char *name = buf;
11611 const char *real_name;
11612 rtx base = x;
11613 int offset = 0;
11614
11615 if (TARGET_NO_TOC)
11616 abort ();
11617
11618 /* When the linker won't eliminate them, don't output duplicate
11619 TOC entries (this happens on AIX if there is any kind of TOC,
11620 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
11621 CODE_LABELs. */
11622 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
11623 {
11624 struct toc_hash_struct *h;
11625 void * * found;
11626
11627 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
11628 time because GGC is not initialised at that point. */
11629 if (toc_hash_table == NULL)
11630 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
11631 toc_hash_eq, NULL);
11632
11633 h = ggc_alloc (sizeof (*h));
11634 h->key = x;
11635 h->key_mode = mode;
11636 h->labelno = labelno;
11637
11638 found = htab_find_slot (toc_hash_table, h, 1);
11639 if (*found == NULL)
11640 *found = h;
11641 else /* This is indeed a duplicate.
11642 Set this label equal to that label. */
11643 {
11644 fputs ("\t.set ", file);
11645 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11646 fprintf (file, "%d,", labelno);
11647 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11648 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
11649 found)->labelno));
11650 return;
11651 }
11652 }
11653
11654 /* If we're going to put a double constant in the TOC, make sure it's
11655 aligned properly when strict alignment is on. */
11656 if (GET_CODE (x) == CONST_DOUBLE
11657 && STRICT_ALIGNMENT
11658 && GET_MODE_BITSIZE (mode) >= 64
11659 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
11660 ASM_OUTPUT_ALIGN (file, 3);
11661 }
11662
11663 (*targetm.asm_out.internal_label) (file, "LC", labelno);
11664
11665 /* Handle FP constants specially. Note that if we have a minimal
11666 TOC, things we put here aren't actually in the TOC, so we can allow
11667 FP constants. */
11668 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
11669 {
11670 REAL_VALUE_TYPE rv;
11671 long k[4];
11672
11673 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11674 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
11675
11676 if (TARGET_64BIT)
11677 {
11678 if (TARGET_MINIMAL_TOC)
11679 fputs (DOUBLE_INT_ASM_OP, file);
11680 else
11681 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
11682 k[0] & 0xffffffff, k[1] & 0xffffffff,
11683 k[2] & 0xffffffff, k[3] & 0xffffffff);
11684 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
11685 k[0] & 0xffffffff, k[1] & 0xffffffff,
11686 k[2] & 0xffffffff, k[3] & 0xffffffff);
11687 return;
11688 }
11689 else
11690 {
11691 if (TARGET_MINIMAL_TOC)
11692 fputs ("\t.long ", file);
11693 else
11694 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
11695 k[0] & 0xffffffff, k[1] & 0xffffffff,
11696 k[2] & 0xffffffff, k[3] & 0xffffffff);
11697 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
11698 k[0] & 0xffffffff, k[1] & 0xffffffff,
11699 k[2] & 0xffffffff, k[3] & 0xffffffff);
11700 return;
11701 }
11702 }
11703 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
11704 {
11705 REAL_VALUE_TYPE rv;
11706 long k[2];
11707
11708 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11709 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
11710
11711 if (TARGET_64BIT)
11712 {
11713 if (TARGET_MINIMAL_TOC)
11714 fputs (DOUBLE_INT_ASM_OP, file);
11715 else
11716 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
11717 k[0] & 0xffffffff, k[1] & 0xffffffff);
11718 fprintf (file, "0x%lx%08lx\n",
11719 k[0] & 0xffffffff, k[1] & 0xffffffff);
11720 return;
11721 }
11722 else
11723 {
11724 if (TARGET_MINIMAL_TOC)
11725 fputs ("\t.long ", file);
11726 else
11727 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
11728 k[0] & 0xffffffff, k[1] & 0xffffffff);
11729 fprintf (file, "0x%lx,0x%lx\n",
11730 k[0] & 0xffffffff, k[1] & 0xffffffff);
11731 return;
11732 }
11733 }
11734 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
11735 {
11736 REAL_VALUE_TYPE rv;
11737 long l;
11738
11739 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11740 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
11741
11742 if (TARGET_64BIT)
11743 {
11744 if (TARGET_MINIMAL_TOC)
11745 fputs (DOUBLE_INT_ASM_OP, file);
11746 else
11747 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
11748 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
11749 return;
11750 }
11751 else
11752 {
11753 if (TARGET_MINIMAL_TOC)
11754 fputs ("\t.long ", file);
11755 else
11756 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
11757 fprintf (file, "0x%lx\n", l & 0xffffffff);
11758 return;
11759 }
11760 }
11761 else if (GET_MODE (x) == VOIDmode
11762 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
11763 {
11764 unsigned HOST_WIDE_INT low;
11765 HOST_WIDE_INT high;
11766
11767 if (GET_CODE (x) == CONST_DOUBLE)
11768 {
11769 low = CONST_DOUBLE_LOW (x);
11770 high = CONST_DOUBLE_HIGH (x);
11771 }
11772 else
11773 #if HOST_BITS_PER_WIDE_INT == 32
11774 {
11775 low = INTVAL (x);
11776 high = (low & 0x80000000) ? ~0 : 0;
11777 }
11778 #else
11779 {
11780 low = INTVAL (x) & 0xffffffff;
11781 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
11782 }
11783 #endif
11784
11785 /* TOC entries are always Pmode-sized, but since this
11786 is a bigendian machine then if we're putting smaller
11787 integer constants in the TOC we have to pad them.
11788 (This is still a win over putting the constants in
11789 a separate constant pool, because then we'd have
11790 to have both a TOC entry _and_ the actual constant.)
11791
11792 For a 32-bit target, CONST_INT values are loaded and shifted
11793 entirely within `low' and can be stored in one TOC entry. */
11794
11795 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
11796 abort ();/* It would be easy to make this work, but it doesn't now. */
11797
11798 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
11799 {
11800 #if HOST_BITS_PER_WIDE_INT == 32
11801 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
11802 POINTER_SIZE, &low, &high, 0);
11803 #else
11804 low |= high << 32;
11805 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
11806 high = (HOST_WIDE_INT) low >> 32;
11807 low &= 0xffffffff;
11808 #endif
11809 }
11810
11811 if (TARGET_64BIT)
11812 {
11813 if (TARGET_MINIMAL_TOC)
11814 fputs (DOUBLE_INT_ASM_OP, file);
11815 else
11816 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
11817 (long) high & 0xffffffff, (long) low & 0xffffffff);
11818 fprintf (file, "0x%lx%08lx\n",
11819 (long) high & 0xffffffff, (long) low & 0xffffffff);
11820 return;
11821 }
11822 else
11823 {
11824 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
11825 {
11826 if (TARGET_MINIMAL_TOC)
11827 fputs ("\t.long ", file);
11828 else
11829 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
11830 (long) high & 0xffffffff, (long) low & 0xffffffff);
11831 fprintf (file, "0x%lx,0x%lx\n",
11832 (long) high & 0xffffffff, (long) low & 0xffffffff);
11833 }
11834 else
11835 {
11836 if (TARGET_MINIMAL_TOC)
11837 fputs ("\t.long ", file);
11838 else
11839 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
11840 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
11841 }
11842 return;
11843 }
11844 }
11845
11846 if (GET_CODE (x) == CONST)
11847 {
11848 if (GET_CODE (XEXP (x, 0)) != PLUS)
11849 abort ();
11850
11851 base = XEXP (XEXP (x, 0), 0);
11852 offset = INTVAL (XEXP (XEXP (x, 0), 1));
11853 }
11854
11855 if (GET_CODE (base) == SYMBOL_REF)
11856 name = XSTR (base, 0);
11857 else if (GET_CODE (base) == LABEL_REF)
11858 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
11859 else if (GET_CODE (base) == CODE_LABEL)
11860 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
11861 else
11862 abort ();
11863
11864 real_name = (*targetm.strip_name_encoding) (name);
11865 if (TARGET_MINIMAL_TOC)
11866 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
11867 else
11868 {
11869 fprintf (file, "\t.tc %s", real_name);
11870
11871 if (offset < 0)
11872 fprintf (file, ".N%d", - offset);
11873 else if (offset)
11874 fprintf (file, ".P%d", offset);
11875
11876 fputs ("[TC],", file);
11877 }
11878
11879 /* Currently C++ toc references to vtables can be emitted before it
11880 is decided whether the vtable is public or private. If this is
11881 the case, then the linker will eventually complain that there is
11882 a TOC reference to an unknown section. Thus, for vtables only,
11883 we emit the TOC reference to reference the symbol and not the
11884 section. */
11885 if (VTABLE_NAME_P (name))
11886 {
11887 RS6000_OUTPUT_BASENAME (file, name);
11888 if (offset < 0)
11889 fprintf (file, "%d", offset);
11890 else if (offset > 0)
11891 fprintf (file, "+%d", offset);
11892 }
11893 else
11894 output_addr_const (file, x);
11895 putc ('\n', file);
11896 }
11897 \f
11898 /* Output an assembler pseudo-op to write an ASCII string of N characters
11899 starting at P to FILE.
11900
11901 On the RS/6000, we have to do this using the .byte operation and
11902 write out special characters outside the quoted string.
11903 Also, the assembler is broken; very long strings are truncated,
11904 so we must artificially break them up early. */
11905
11906 void
11907 output_ascii (file, p, n)
11908 FILE *file;
11909 const char *p;
11910 int n;
11911 {
11912 char c;
11913 int i, count_string;
11914 const char *for_string = "\t.byte \"";
11915 const char *for_decimal = "\t.byte ";
11916 const char *to_close = NULL;
11917
11918 count_string = 0;
11919 for (i = 0; i < n; i++)
11920 {
11921 c = *p++;
11922 if (c >= ' ' && c < 0177)
11923 {
11924 if (for_string)
11925 fputs (for_string, file);
11926 putc (c, file);
11927
11928 /* Write two quotes to get one. */
11929 if (c == '"')
11930 {
11931 putc (c, file);
11932 ++count_string;
11933 }
11934
11935 for_string = NULL;
11936 for_decimal = "\"\n\t.byte ";
11937 to_close = "\"\n";
11938 ++count_string;
11939
11940 if (count_string >= 512)
11941 {
11942 fputs (to_close, file);
11943
11944 for_string = "\t.byte \"";
11945 for_decimal = "\t.byte ";
11946 to_close = NULL;
11947 count_string = 0;
11948 }
11949 }
11950 else
11951 {
11952 if (for_decimal)
11953 fputs (for_decimal, file);
11954 fprintf (file, "%d", c);
11955
11956 for_string = "\n\t.byte \"";
11957 for_decimal = ", ";
11958 to_close = "\n";
11959 count_string = 0;
11960 }
11961 }
11962
11963 /* Now close the string if we have written one. Then end the line. */
11964 if (to_close)
11965 fputs (to_close, file);
11966 }
11967 \f
11968 /* Generate a unique section name for FILENAME for a section type
11969 represented by SECTION_DESC. Output goes into BUF.
11970
11971 SECTION_DESC can be any string, as long as it is different for each
11972 possible section type.
11973
11974 We name the section in the same manner as xlc. The name begins with an
11975 underscore followed by the filename (after stripping any leading directory
11976 names) with the last period replaced by the string SECTION_DESC. If
11977 FILENAME does not contain a period, SECTION_DESC is appended to the end of
11978 the name. */
11979
11980 void
11981 rs6000_gen_section_name (buf, filename, section_desc)
11982 char **buf;
11983 const char *filename;
11984 const char *section_desc;
11985 {
11986 const char *q, *after_last_slash, *last_period = 0;
11987 char *p;
11988 int len;
11989
11990 after_last_slash = filename;
11991 for (q = filename; *q; q++)
11992 {
11993 if (*q == '/')
11994 after_last_slash = q + 1;
11995 else if (*q == '.')
11996 last_period = q;
11997 }
11998
11999 len = strlen (after_last_slash) + strlen (section_desc) + 2;
12000 *buf = (char *) xmalloc (len);
12001
12002 p = *buf;
12003 *p++ = '_';
12004
12005 for (q = after_last_slash; *q; q++)
12006 {
12007 if (q == last_period)
12008 {
12009 strcpy (p, section_desc);
12010 p += strlen (section_desc);
12011 }
12012
12013 else if (ISALNUM (*q))
12014 *p++ = *q;
12015 }
12016
12017 if (last_period == 0)
12018 strcpy (p, section_desc);
12019 else
12020 *p = '\0';
12021 }
12022 \f
12023 /* Emit profile function. */
12024
12025 void
12026 output_profile_hook (labelno)
12027 int labelno ATTRIBUTE_UNUSED;
12028 {
12029 if (DEFAULT_ABI == ABI_AIX)
12030 {
12031 #ifdef NO_PROFILE_COUNTERS
12032 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
12033 #else
12034 char buf[30];
12035 const char *label_name;
12036 rtx fun;
12037
12038 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12039 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
12040 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
12041
12042 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
12043 fun, Pmode);
12044 #endif
12045 }
12046 else if (DEFAULT_ABI == ABI_DARWIN)
12047 {
12048 const char *mcount_name = RS6000_MCOUNT;
12049 int caller_addr_regno = LINK_REGISTER_REGNUM;
12050
12051 /* Be conservative and always set this, at least for now. */
12052 current_function_uses_pic_offset_table = 1;
12053
12054 #if TARGET_MACHO
12055 /* For PIC code, set up a stub and collect the caller's address
12056 from r0, which is where the prologue puts it. */
12057 if (flag_pic)
12058 {
12059 mcount_name = machopic_stub_name (mcount_name);
12060 if (current_function_uses_pic_offset_table)
12061 caller_addr_regno = 0;
12062 }
12063 #endif
12064 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
12065 0, VOIDmode, 1,
12066 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
12067 }
12068 }
12069
12070 /* Write function profiler code. */
12071
12072 void
12073 output_function_profiler (file, labelno)
12074 FILE *file;
12075 int labelno;
12076 {
12077 char buf[100];
12078 int save_lr = 8;
12079
12080 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12081 switch (DEFAULT_ABI)
12082 {
12083 default:
12084 abort ();
12085
12086 case ABI_V4:
12087 save_lr = 4;
12088 /* Fall through. */
12089
12090 case ABI_AIX_NODESC:
12091 if (!TARGET_32BIT)
12092 {
12093 warning ("no profiling of 64-bit code for this ABI");
12094 return;
12095 }
12096 fprintf (file, "\tmflr %s\n", reg_names[0]);
12097 if (flag_pic == 1)
12098 {
12099 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
12100 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12101 reg_names[0], save_lr, reg_names[1]);
12102 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
12103 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
12104 assemble_name (file, buf);
12105 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
12106 }
12107 else if (flag_pic > 1)
12108 {
12109 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12110 reg_names[0], save_lr, reg_names[1]);
12111 /* Now, we need to get the address of the label. */
12112 fputs ("\tbl 1f\n\t.long ", file);
12113 assemble_name (file, buf);
12114 fputs ("-.\n1:", file);
12115 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
12116 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
12117 reg_names[0], reg_names[11]);
12118 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
12119 reg_names[0], reg_names[0], reg_names[11]);
12120 }
12121 else
12122 {
12123 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
12124 assemble_name (file, buf);
12125 fputs ("@ha\n", file);
12126 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12127 reg_names[0], save_lr, reg_names[1]);
12128 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
12129 assemble_name (file, buf);
12130 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
12131 }
12132
12133 if (current_function_needs_context && DEFAULT_ABI == ABI_AIX_NODESC)
12134 {
12135 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12136 reg_names[STATIC_CHAIN_REGNUM],
12137 12, reg_names[1]);
12138 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12139 asm_fprintf (file, "\t{l|lwz} %s,%d(%s)\n",
12140 reg_names[STATIC_CHAIN_REGNUM],
12141 12, reg_names[1]);
12142 }
12143 else
12144 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12145 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12146 break;
12147
12148 case ABI_AIX:
12149 case ABI_DARWIN:
12150 /* Don't do anything, done in output_profile_hook (). */
12151 break;
12152 }
12153 }
12154
12155 /* Adjust the cost of a scheduling dependency. Return the new cost of
12156 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
12157
12158 static int
12159 rs6000_adjust_cost (insn, link, dep_insn, cost)
12160 rtx insn;
12161 rtx link;
12162 rtx dep_insn ATTRIBUTE_UNUSED;
12163 int cost;
12164 {
12165 if (! recog_memoized (insn))
12166 return 0;
12167
12168 if (REG_NOTE_KIND (link) != 0)
12169 return 0;
12170
12171 if (REG_NOTE_KIND (link) == 0)
12172 {
12173 /* Data dependency; DEP_INSN writes a register that INSN reads
12174 some cycles later. */
12175 switch (get_attr_type (insn))
12176 {
12177 case TYPE_JMPREG:
12178 /* Tell the first scheduling pass about the latency between
12179 a mtctr and bctr (and mtlr and br/blr). The first
12180 scheduling pass will not know about this latency since
12181 the mtctr instruction, which has the latency associated
12182 to it, will be generated by reload. */
12183 return TARGET_POWER ? 5 : 4;
12184 case TYPE_BRANCH:
12185 /* Leave some extra cycles between a compare and its
12186 dependent branch, to inhibit expensive mispredicts. */
12187 if ((rs6000_cpu_attr == CPU_PPC603
12188 || rs6000_cpu_attr == CPU_PPC604
12189 || rs6000_cpu_attr == CPU_PPC604E
12190 || rs6000_cpu_attr == CPU_PPC620
12191 || rs6000_cpu_attr == CPU_PPC630
12192 || rs6000_cpu_attr == CPU_PPC750
12193 || rs6000_cpu_attr == CPU_PPC7400
12194 || rs6000_cpu_attr == CPU_PPC7450
12195 || rs6000_cpu_attr == CPU_POWER4)
12196 && recog_memoized (dep_insn)
12197 && (INSN_CODE (dep_insn) >= 0)
12198 && (get_attr_type (dep_insn) == TYPE_COMPARE
12199 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
12200 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
12201 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
12202 return cost + 2;
12203 default:
12204 break;
12205 }
12206 /* Fall out to return default cost. */
12207 }
12208
12209 return cost;
12210 }
12211
12212 /* A C statement (sans semicolon) to update the integer scheduling
12213 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12214 INSN earlier, increase the priority to execute INSN later. Do not
12215 define this macro if you do not need to adjust the scheduling
12216 priorities of insns. */
12217
12218 static int
12219 rs6000_adjust_priority (insn, priority)
12220 rtx insn ATTRIBUTE_UNUSED;
12221 int priority;
12222 {
12223 /* On machines (like the 750) which have asymmetric integer units,
12224 where one integer unit can do multiply and divides and the other
12225 can't, reduce the priority of multiply/divide so it is scheduled
12226 before other integer operations. */
12227
12228 #if 0
12229 if (! INSN_P (insn))
12230 return priority;
12231
12232 if (GET_CODE (PATTERN (insn)) == USE)
12233 return priority;
12234
12235 switch (rs6000_cpu_attr) {
12236 case CPU_PPC750:
12237 switch (get_attr_type (insn))
12238 {
12239 default:
12240 break;
12241
12242 case TYPE_IMUL:
12243 case TYPE_IDIV:
12244 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
12245 priority, priority);
12246 if (priority >= 0 && priority < 0x01000000)
12247 priority >>= 3;
12248 break;
12249 }
12250 }
12251 #endif
12252
12253 return priority;
12254 }
12255
12256 /* Return how many instructions the machine can issue per cycle. */
12257
12258 static int
12259 rs6000_issue_rate ()
12260 {
12261 switch (rs6000_cpu_attr) {
12262 case CPU_RIOS1: /* ? */
12263 case CPU_RS64A:
12264 case CPU_PPC601: /* ? */
12265 case CPU_PPC7450:
12266 return 3;
12267 case CPU_PPC603:
12268 case CPU_PPC750:
12269 case CPU_PPC7400:
12270 return 2;
12271 case CPU_RIOS2:
12272 case CPU_PPC604:
12273 case CPU_PPC604E:
12274 case CPU_PPC620:
12275 case CPU_PPC630:
12276 case CPU_POWER4:
12277 return 4;
12278 default:
12279 return 1;
12280 }
12281 }
12282
12283 \f
12284 /* Length in units of the trampoline for entering a nested function. */
12285
12286 int
12287 rs6000_trampoline_size ()
12288 {
12289 int ret = 0;
12290
12291 switch (DEFAULT_ABI)
12292 {
12293 default:
12294 abort ();
12295
12296 case ABI_AIX:
12297 ret = (TARGET_32BIT) ? 12 : 24;
12298 break;
12299
12300 case ABI_DARWIN:
12301 case ABI_V4:
12302 case ABI_AIX_NODESC:
12303 ret = (TARGET_32BIT) ? 40 : 48;
12304 break;
12305 }
12306
12307 return ret;
12308 }
12309
12310 /* Emit RTL insns to initialize the variable parts of a trampoline.
12311 FNADDR is an RTX for the address of the function's pure code.
12312 CXT is an RTX for the static chain value for the function. */
12313
12314 void
12315 rs6000_initialize_trampoline (addr, fnaddr, cxt)
12316 rtx addr;
12317 rtx fnaddr;
12318 rtx cxt;
12319 {
12320 enum machine_mode pmode = Pmode;
12321 int regsize = (TARGET_32BIT) ? 4 : 8;
12322 rtx ctx_reg = force_reg (pmode, cxt);
12323
12324 switch (DEFAULT_ABI)
12325 {
12326 default:
12327 abort ();
12328
12329 /* Macros to shorten the code expansions below. */
12330 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12331 #define MEM_PLUS(addr,offset) \
12332 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12333
12334 /* Under AIX, just build the 3 word function descriptor */
12335 case ABI_AIX:
12336 {
12337 rtx fn_reg = gen_reg_rtx (pmode);
12338 rtx toc_reg = gen_reg_rtx (pmode);
12339 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
12340 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
12341 emit_move_insn (MEM_DEREF (addr), fn_reg);
12342 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
12343 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
12344 }
12345 break;
12346
12347 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
12348 case ABI_DARWIN:
12349 case ABI_V4:
12350 case ABI_AIX_NODESC:
12351 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
12352 FALSE, VOIDmode, 4,
12353 addr, pmode,
12354 GEN_INT (rs6000_trampoline_size ()), SImode,
12355 fnaddr, pmode,
12356 ctx_reg, pmode);
12357 break;
12358 }
12359
12360 return;
12361 }
12362
12363 \f
12364 /* Table of valid machine attributes. */
12365
12366 const struct attribute_spec rs6000_attribute_table[] =
12367 {
12368 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12369 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12370 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12371 { NULL, 0, 0, false, false, false, NULL }
12372 };
12373
12374 /* Handle a "longcall" or "shortcall" attribute; arguments as in
12375 struct attribute_spec.handler. */
12376
12377 static tree
12378 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
12379 tree *node;
12380 tree name;
12381 tree args ATTRIBUTE_UNUSED;
12382 int flags ATTRIBUTE_UNUSED;
12383 bool *no_add_attrs;
12384 {
12385 if (TREE_CODE (*node) != FUNCTION_TYPE
12386 && TREE_CODE (*node) != FIELD_DECL
12387 && TREE_CODE (*node) != TYPE_DECL)
12388 {
12389 warning ("`%s' attribute only applies to functions",
12390 IDENTIFIER_POINTER (name));
12391 *no_add_attrs = true;
12392 }
12393
12394 return NULL_TREE;
12395 }
12396
12397 /* Set longcall attributes on all functions declared when
12398 rs6000_default_long_calls is true. */
12399 static void
12400 rs6000_set_default_type_attributes (type)
12401 tree type;
12402 {
12403 if (rs6000_default_long_calls
12404 && (TREE_CODE (type) == FUNCTION_TYPE
12405 || TREE_CODE (type) == METHOD_TYPE))
12406 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
12407 NULL_TREE,
12408 TYPE_ATTRIBUTES (type));
12409 }
12410
12411 /* Return a reference suitable for calling a function with the
12412 longcall attribute. */
12413
12414 struct rtx_def *
12415 rs6000_longcall_ref (call_ref)
12416 rtx call_ref;
12417 {
12418 const char *call_name;
12419 tree node;
12420
12421 if (GET_CODE (call_ref) != SYMBOL_REF)
12422 return call_ref;
12423
12424 /* System V adds '.' to the internal name, so skip them. */
12425 call_name = XSTR (call_ref, 0);
12426 if (*call_name == '.')
12427 {
12428 while (*call_name == '.')
12429 call_name++;
12430
12431 node = get_identifier (call_name);
12432 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
12433 }
12434
12435 return force_reg (Pmode, call_ref);
12436 }
12437
12438 \f
12439 #ifdef USING_ELFOS_H
12440
12441 /* A C statement or statements to switch to the appropriate section
12442 for output of RTX in mode MODE. You can assume that RTX is some
12443 kind of constant in RTL. The argument MODE is redundant except in
12444 the case of a `const_int' rtx. Select the section by calling
12445 `text_section' or one of the alternatives for other sections.
12446
12447 Do not define this macro if you put all constants in the read-only
12448 data section. */
12449
12450 static void
12451 rs6000_elf_select_rtx_section (mode, x, align)
12452 enum machine_mode mode;
12453 rtx x;
12454 unsigned HOST_WIDE_INT align;
12455 {
12456 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
12457 toc_section ();
12458 else
12459 default_elf_select_rtx_section (mode, x, align);
12460 }
12461
12462 /* A C statement or statements to switch to the appropriate
12463 section for output of DECL. DECL is either a `VAR_DECL' node
12464 or a constant of some sort. RELOC indicates whether forming
12465 the initial value of DECL requires link-time relocations. */
12466
12467 static void
12468 rs6000_elf_select_section (decl, reloc, align)
12469 tree decl;
12470 int reloc;
12471 unsigned HOST_WIDE_INT align;
12472 {
12473 default_elf_select_section_1 (decl, reloc, align,
12474 flag_pic || DEFAULT_ABI == ABI_AIX);
12475 }
12476
12477 /* A C statement to build up a unique section name, expressed as a
12478 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
12479 RELOC indicates whether the initial value of EXP requires
12480 link-time relocations. If you do not define this macro, GCC will use
12481 the symbol name prefixed by `.' as the section name. Note - this
12482 macro can now be called for uninitialized data items as well as
12483 initialized data and functions. */
12484
12485 static void
12486 rs6000_elf_unique_section (decl, reloc)
12487 tree decl;
12488 int reloc;
12489 {
12490 default_unique_section_1 (decl, reloc,
12491 flag_pic || DEFAULT_ABI == ABI_AIX);
12492 }
12493
12494 \f
12495 /* If we are referencing a function that is static or is known to be
12496 in this file, make the SYMBOL_REF special. We can use this to indicate
12497 that we can branch to this function without emitting a no-op after the
12498 call. For real AIX calling sequences, we also replace the
12499 function name with the real name (1 or 2 leading .'s), rather than
12500 the function descriptor name. This saves a lot of overriding code
12501 to read the prefixes. */
12502
12503 static void
12504 rs6000_elf_encode_section_info (decl, first)
12505 tree decl;
12506 int first;
12507 {
12508 if (!first)
12509 return;
12510
12511 if (TREE_CODE (decl) == FUNCTION_DECL)
12512 {
12513 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12514 if ((*targetm.binds_local_p) (decl))
12515 SYMBOL_REF_FLAG (sym_ref) = 1;
12516
12517 if (DEFAULT_ABI == ABI_AIX)
12518 {
12519 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
12520 size_t len2 = strlen (XSTR (sym_ref, 0));
12521 char *str = alloca (len1 + len2 + 1);
12522 str[0] = '.';
12523 str[1] = '.';
12524 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
12525
12526 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
12527 }
12528 }
12529 else if (rs6000_sdata != SDATA_NONE
12530 && DEFAULT_ABI == ABI_V4
12531 && TREE_CODE (decl) == VAR_DECL)
12532 {
12533 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12534 int size = int_size_in_bytes (TREE_TYPE (decl));
12535 tree section_name = DECL_SECTION_NAME (decl);
12536 const char *name = (char *)0;
12537 int len = 0;
12538
12539 if ((*targetm.binds_local_p) (decl))
12540 SYMBOL_REF_FLAG (sym_ref) = 1;
12541
12542 if (section_name)
12543 {
12544 if (TREE_CODE (section_name) == STRING_CST)
12545 {
12546 name = TREE_STRING_POINTER (section_name);
12547 len = TREE_STRING_LENGTH (section_name);
12548 }
12549 else
12550 abort ();
12551 }
12552
12553 if ((size > 0 && size <= g_switch_value)
12554 || (name
12555 && ((len == sizeof (".sdata") - 1
12556 && strcmp (name, ".sdata") == 0)
12557 || (len == sizeof (".sdata2") - 1
12558 && strcmp (name, ".sdata2") == 0)
12559 || (len == sizeof (".sbss") - 1
12560 && strcmp (name, ".sbss") == 0)
12561 || (len == sizeof (".sbss2") - 1
12562 && strcmp (name, ".sbss2") == 0)
12563 || (len == sizeof (".PPC.EMB.sdata0") - 1
12564 && strcmp (name, ".PPC.EMB.sdata0") == 0)
12565 || (len == sizeof (".PPC.EMB.sbss0") - 1
12566 && strcmp (name, ".PPC.EMB.sbss0") == 0))))
12567 {
12568 size_t len = strlen (XSTR (sym_ref, 0));
12569 char *str = alloca (len + 2);
12570
12571 str[0] = '@';
12572 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
12573 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
12574 }
12575 }
12576 }
12577
12578 static const char *
12579 rs6000_elf_strip_name_encoding (str)
12580 const char *str;
12581 {
12582 while (*str == '*' || *str == '@')
12583 str++;
12584 return str;
12585 }
12586
12587 static bool
12588 rs6000_elf_in_small_data_p (decl)
12589 tree decl;
12590 {
12591 if (rs6000_sdata == SDATA_NONE)
12592 return false;
12593
12594 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
12595 {
12596 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
12597 if (strcmp (section, ".sdata") == 0
12598 || strcmp (section, ".sdata2") == 0
12599 || strcmp (section, ".sbss") == 0)
12600 return true;
12601 }
12602 else
12603 {
12604 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
12605
12606 if (size > 0
12607 && size <= g_switch_value
12608 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
12609 return true;
12610 }
12611
12612 return false;
12613 }
12614
12615 #endif /* USING_ELFOS_H */
12616
12617 \f
12618 /* Return a REG that occurs in ADDR with coefficient 1.
12619 ADDR can be effectively incremented by incrementing REG.
12620
12621 r0 is special and we must not select it as an address
12622 register by this routine since our caller will try to
12623 increment the returned register via an "la" instruction. */
12624
12625 struct rtx_def *
12626 find_addr_reg (addr)
12627 rtx addr;
12628 {
12629 while (GET_CODE (addr) == PLUS)
12630 {
12631 if (GET_CODE (XEXP (addr, 0)) == REG
12632 && REGNO (XEXP (addr, 0)) != 0)
12633 addr = XEXP (addr, 0);
12634 else if (GET_CODE (XEXP (addr, 1)) == REG
12635 && REGNO (XEXP (addr, 1)) != 0)
12636 addr = XEXP (addr, 1);
12637 else if (CONSTANT_P (XEXP (addr, 0)))
12638 addr = XEXP (addr, 1);
12639 else if (CONSTANT_P (XEXP (addr, 1)))
12640 addr = XEXP (addr, 0);
12641 else
12642 abort ();
12643 }
12644 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
12645 return addr;
12646 abort ();
12647 }
12648
12649 void
12650 rs6000_fatal_bad_address (op)
12651 rtx op;
12652 {
12653 fatal_insn ("bad address", op);
12654 }
12655
12656 #if TARGET_MACHO
12657
12658 #if 0
12659 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
12660 reference and a constant. */
12661
12662 int
12663 symbolic_operand (op)
12664 rtx op;
12665 {
12666 switch (GET_CODE (op))
12667 {
12668 case SYMBOL_REF:
12669 case LABEL_REF:
12670 return 1;
12671 case CONST:
12672 op = XEXP (op, 0);
12673 return (GET_CODE (op) == SYMBOL_REF ||
12674 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
12675 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
12676 && GET_CODE (XEXP (op, 1)) == CONST_INT);
12677 default:
12678 return 0;
12679 }
12680 }
12681 #endif
12682
12683 #ifdef RS6000_LONG_BRANCH
12684
12685 static tree stub_list = 0;
12686
12687 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
12688 procedure calls to the linked list. */
12689
12690 void
12691 add_compiler_stub (label_name, function_name, line_number)
12692 tree label_name;
12693 tree function_name;
12694 int line_number;
12695 {
12696 tree stub = build_tree_list (function_name, label_name);
12697 TREE_TYPE (stub) = build_int_2 (line_number, 0);
12698 TREE_CHAIN (stub) = stub_list;
12699 stub_list = stub;
12700 }
12701
12702 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
12703 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
12704 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
12705
12706 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
12707 handling procedure calls from the linked list and initializes the
12708 linked list. */
12709
12710 void
12711 output_compiler_stub ()
12712 {
12713 char tmp_buf[256];
12714 char label_buf[256];
12715 tree stub;
12716
12717 if (!flag_pic)
12718 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12719 {
12720 fprintf (asm_out_file,
12721 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
12722
12723 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12724 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
12725 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
12726 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12727
12728 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
12729 strcpy (label_buf,
12730 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
12731 else
12732 {
12733 label_buf[0] = '_';
12734 strcpy (label_buf+1,
12735 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
12736 }
12737
12738 strcpy (tmp_buf, "lis r12,hi16(");
12739 strcat (tmp_buf, label_buf);
12740 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
12741 strcat (tmp_buf, label_buf);
12742 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
12743 output_asm_insn (tmp_buf, 0);
12744
12745 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12746 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
12747 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
12748 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12749 }
12750
12751 stub_list = 0;
12752 }
12753
12754 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
12755 already there or not. */
12756
12757 int
12758 no_previous_def (function_name)
12759 tree function_name;
12760 {
12761 tree stub;
12762 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12763 if (function_name == STUB_FUNCTION_NAME (stub))
12764 return 0;
12765 return 1;
12766 }
12767
12768 /* GET_PREV_LABEL gets the label name from the previous definition of
12769 the function. */
12770
12771 tree
12772 get_prev_label (function_name)
12773 tree function_name;
12774 {
12775 tree stub;
12776 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12777 if (function_name == STUB_FUNCTION_NAME (stub))
12778 return STUB_LABEL_NAME (stub);
12779 return 0;
12780 }
12781
12782 /* INSN is either a function call or a millicode call. It may have an
12783 unconditional jump in its delay slot.
12784
12785 CALL_DEST is the routine we are calling. */
12786
12787 char *
12788 output_call (insn, call_dest, operand_number)
12789 rtx insn;
12790 rtx call_dest;
12791 int operand_number;
12792 {
12793 static char buf[256];
12794 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
12795 {
12796 tree labelname;
12797 tree funname = get_identifier (XSTR (call_dest, 0));
12798
12799 if (no_previous_def (funname))
12800 {
12801 int line_number = 0;
12802 rtx label_rtx = gen_label_rtx ();
12803 char *label_buf, temp_buf[256];
12804 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
12805 CODE_LABEL_NUMBER (label_rtx));
12806 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
12807 labelname = get_identifier (label_buf);
12808 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
12809 if (insn)
12810 line_number = NOTE_LINE_NUMBER (insn);
12811 add_compiler_stub (labelname, funname, line_number);
12812 }
12813 else
12814 labelname = get_prev_label (funname);
12815
12816 sprintf (buf, "jbsr %%z%d,%.246s",
12817 operand_number, IDENTIFIER_POINTER (labelname));
12818 return buf;
12819 }
12820 else
12821 {
12822 sprintf (buf, "bl %%z%d", operand_number);
12823 return buf;
12824 }
12825 }
12826
12827 #endif /* RS6000_LONG_BRANCH */
12828
12829 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
12830 do { \
12831 const char *const symbol_ = (SYMBOL); \
12832 char *buffer_ = (BUF); \
12833 if (symbol_[0] == '"') \
12834 { \
12835 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
12836 } \
12837 else if (name_needs_quotes(symbol_)) \
12838 { \
12839 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
12840 } \
12841 else \
12842 { \
12843 sprintf(buffer_, "L%d$%s", (N), symbol_); \
12844 } \
12845 } while (0)
12846
12847
12848 /* Generate PIC and indirect symbol stubs. */
12849
12850 void
12851 machopic_output_stub (file, symb, stub)
12852 FILE *file;
12853 const char *symb, *stub;
12854 {
12855 unsigned int length;
12856 char *symbol_name, *lazy_ptr_name;
12857 char *local_label_0;
12858 static int label = 0;
12859
12860 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
12861 symb = (*targetm.strip_name_encoding) (symb);
12862
12863 label += 1;
12864
12865 length = strlen (symb);
12866 symbol_name = alloca (length + 32);
12867 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
12868
12869 lazy_ptr_name = alloca (length + 32);
12870 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
12871
12872 local_label_0 = alloca (length + 32);
12873 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
12874
12875 if (flag_pic == 2)
12876 machopic_picsymbol_stub1_section ();
12877 else
12878 machopic_symbol_stub1_section ();
12879 fprintf (file, "\t.align 2\n");
12880
12881 fprintf (file, "%s:\n", stub);
12882 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
12883
12884 if (flag_pic == 2)
12885 {
12886 fprintf (file, "\tmflr r0\n");
12887 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
12888 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
12889 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
12890 lazy_ptr_name, local_label_0);
12891 fprintf (file, "\tmtlr r0\n");
12892 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
12893 lazy_ptr_name, local_label_0);
12894 fprintf (file, "\tmtctr r12\n");
12895 fprintf (file, "\tbctr\n");
12896 }
12897 else
12898 fprintf (file, "non-pure not supported\n");
12899
12900 machopic_lazy_symbol_ptr_section ();
12901 fprintf (file, "%s:\n", lazy_ptr_name);
12902 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
12903 fprintf (file, "\t.long dyld_stub_binding_helper\n");
12904 }
12905
12906 /* Legitimize PIC addresses. If the address is already
12907 position-independent, we return ORIG. Newly generated
12908 position-independent addresses go into a reg. This is REG if non
12909 zero, otherwise we allocate register(s) as necessary. */
12910
12911 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
12912
12913 rtx
12914 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
12915 rtx orig;
12916 enum machine_mode mode;
12917 rtx reg;
12918 {
12919 rtx base, offset;
12920
12921 if (reg == NULL && ! reload_in_progress && ! reload_completed)
12922 reg = gen_reg_rtx (Pmode);
12923
12924 if (GET_CODE (orig) == CONST)
12925 {
12926 if (GET_CODE (XEXP (orig, 0)) == PLUS
12927 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
12928 return orig;
12929
12930 if (GET_CODE (XEXP (orig, 0)) == PLUS)
12931 {
12932 base =
12933 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
12934 Pmode, reg);
12935 offset =
12936 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
12937 Pmode, reg);
12938 }
12939 else
12940 abort ();
12941
12942 if (GET_CODE (offset) == CONST_INT)
12943 {
12944 if (SMALL_INT (offset))
12945 return plus_constant (base, INTVAL (offset));
12946 else if (! reload_in_progress && ! reload_completed)
12947 offset = force_reg (Pmode, offset);
12948 else
12949 {
12950 rtx mem = force_const_mem (Pmode, orig);
12951 return machopic_legitimize_pic_address (mem, Pmode, reg);
12952 }
12953 }
12954 return gen_rtx (PLUS, Pmode, base, offset);
12955 }
12956
12957 /* Fall back on generic machopic code. */
12958 return machopic_legitimize_pic_address (orig, mode, reg);
12959 }
12960
12961 /* This is just a placeholder to make linking work without having to
12962 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
12963 ever needed for Darwin (not too likely!) this would have to get a
12964 real definition. */
12965
12966 void
12967 toc_section ()
12968 {
12969 }
12970
12971 #endif /* TARGET_MACHO */
12972
12973 #if TARGET_ELF
12974 static unsigned int
12975 rs6000_elf_section_type_flags (decl, name, reloc)
12976 tree decl;
12977 const char *name;
12978 int reloc;
12979 {
12980 unsigned int flags
12981 = default_section_type_flags_1 (decl, name, reloc,
12982 flag_pic || DEFAULT_ABI == ABI_AIX);
12983
12984 if (TARGET_RELOCATABLE)
12985 flags |= SECTION_WRITE;
12986
12987 return flags;
12988 }
12989
12990 /* Record an element in the table of global constructors. SYMBOL is
12991 a SYMBOL_REF of the function to be called; PRIORITY is a number
12992 between 0 and MAX_INIT_PRIORITY.
12993
12994 This differs from default_named_section_asm_out_constructor in
12995 that we have special handling for -mrelocatable. */
12996
12997 static void
12998 rs6000_elf_asm_out_constructor (symbol, priority)
12999 rtx symbol;
13000 int priority;
13001 {
13002 const char *section = ".ctors";
13003 char buf[16];
13004
13005 if (priority != DEFAULT_INIT_PRIORITY)
13006 {
13007 sprintf (buf, ".ctors.%.5u",
13008 /* Invert the numbering so the linker puts us in the proper
13009 order; constructors are run from right to left, and the
13010 linker sorts in increasing order. */
13011 MAX_INIT_PRIORITY - priority);
13012 section = buf;
13013 }
13014
13015 named_section_flags (section, SECTION_WRITE);
13016 assemble_align (POINTER_SIZE);
13017
13018 if (TARGET_RELOCATABLE)
13019 {
13020 fputs ("\t.long (", asm_out_file);
13021 output_addr_const (asm_out_file, symbol);
13022 fputs (")@fixup\n", asm_out_file);
13023 }
13024 else
13025 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13026 }
13027
13028 static void
13029 rs6000_elf_asm_out_destructor (symbol, priority)
13030 rtx symbol;
13031 int priority;
13032 {
13033 const char *section = ".dtors";
13034 char buf[16];
13035
13036 if (priority != DEFAULT_INIT_PRIORITY)
13037 {
13038 sprintf (buf, ".dtors.%.5u",
13039 /* Invert the numbering so the linker puts us in the proper
13040 order; constructors are run from right to left, and the
13041 linker sorts in increasing order. */
13042 MAX_INIT_PRIORITY - priority);
13043 section = buf;
13044 }
13045
13046 named_section_flags (section, SECTION_WRITE);
13047 assemble_align (POINTER_SIZE);
13048
13049 if (TARGET_RELOCATABLE)
13050 {
13051 fputs ("\t.long (", asm_out_file);
13052 output_addr_const (asm_out_file, symbol);
13053 fputs (")@fixup\n", asm_out_file);
13054 }
13055 else
13056 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13057 }
13058 #endif
13059
13060 #if TARGET_XCOFF
13061 static void
13062 rs6000_xcoff_asm_globalize_label (stream, name)
13063 FILE *stream;
13064 const char *name;
13065 {
13066 fputs (GLOBAL_ASM_OP, stream);
13067 RS6000_OUTPUT_BASENAME (stream, name);
13068 putc ('\n', stream);
13069 }
13070
13071 static void
13072 rs6000_xcoff_asm_named_section (name, flags)
13073 const char *name;
13074 unsigned int flags;
13075 {
13076 int smclass;
13077 static const char * const suffix[3] = { "PR", "RO", "RW" };
13078
13079 if (flags & SECTION_CODE)
13080 smclass = 0;
13081 else if (flags & SECTION_WRITE)
13082 smclass = 2;
13083 else
13084 smclass = 1;
13085
13086 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
13087 (flags & SECTION_CODE) ? "." : "",
13088 name, suffix[smclass], flags & SECTION_ENTSIZE);
13089 }
13090
13091 static void
13092 rs6000_xcoff_select_section (decl, reloc, align)
13093 tree decl;
13094 int reloc;
13095 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13096 {
13097 if (decl_readonly_section_1 (decl, reloc, 1))
13098 {
13099 if (TREE_PUBLIC (decl))
13100 read_only_data_section ();
13101 else
13102 read_only_private_data_section ();
13103 }
13104 else
13105 {
13106 if (TREE_PUBLIC (decl))
13107 data_section ();
13108 else
13109 private_data_section ();
13110 }
13111 }
13112
13113 static void
13114 rs6000_xcoff_unique_section (decl, reloc)
13115 tree decl;
13116 int reloc ATTRIBUTE_UNUSED;
13117 {
13118 const char *name;
13119
13120 /* Use select_section for private and uninitialized data. */
13121 if (!TREE_PUBLIC (decl)
13122 || DECL_COMMON (decl)
13123 || DECL_INITIAL (decl) == NULL_TREE
13124 || DECL_INITIAL (decl) == error_mark_node
13125 || (flag_zero_initialized_in_bss
13126 && initializer_zerop (DECL_INITIAL (decl))))
13127 return;
13128
13129 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
13130 name = (*targetm.strip_name_encoding) (name);
13131 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
13132 }
13133
13134 /* Select section for constant in constant pool.
13135
13136 On RS/6000, all constants are in the private read-only data area.
13137 However, if this is being placed in the TOC it must be output as a
13138 toc entry. */
13139
13140 static void
13141 rs6000_xcoff_select_rtx_section (mode, x, align)
13142 enum machine_mode mode;
13143 rtx x;
13144 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13145 {
13146 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13147 toc_section ();
13148 else
13149 read_only_private_data_section ();
13150 }
13151
13152 /* Remove any trailing [DS] or the like from the symbol name. */
13153
13154 static const char *
13155 rs6000_xcoff_strip_name_encoding (name)
13156 const char *name;
13157 {
13158 size_t len;
13159 if (*name == '*')
13160 name++;
13161 len = strlen (name);
13162 if (name[len - 1] == ']')
13163 return ggc_alloc_string (name, len - 4);
13164 else
13165 return name;
13166 }
13167
13168 /* Section attributes. AIX is always PIC. */
13169
13170 static unsigned int
13171 rs6000_xcoff_section_type_flags (decl, name, reloc)
13172 tree decl;
13173 const char *name;
13174 int reloc;
13175 {
13176 unsigned int align;
13177 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
13178
13179 /* Align to at least UNIT size. */
13180 if (flags & SECTION_CODE)
13181 align = MIN_UNITS_PER_WORD;
13182 else
13183 /* Increase alignment of large objects if not already stricter. */
13184 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
13185 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
13186 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
13187
13188 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
13189 }
13190
13191 #endif /* TARGET_XCOFF */
13192
13193 /* Note that this is also used for PPC64 Linux. */
13194
13195 static void
13196 rs6000_xcoff_encode_section_info (decl, first)
13197 tree decl;
13198 int first ATTRIBUTE_UNUSED;
13199 {
13200 if (TREE_CODE (decl) == FUNCTION_DECL
13201 && (*targetm.binds_local_p) (decl))
13202 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
13203 }
13204
13205 /* Cross-module name binding. For AIX and PPC64 Linux, which always are
13206 PIC, use private copy of flag_pic. */
13207
13208 static bool
13209 rs6000_binds_local_p (decl)
13210 tree decl;
13211 {
13212 return default_binds_local_p_1 (decl, flag_pic || rs6000_flag_pic);
13213 }
13214
13215 /* A C expression returning the cost of moving data from a register of class
13216 CLASS1 to one of CLASS2. */
13217
13218 int
13219 rs6000_register_move_cost (mode, from, to)
13220 enum machine_mode mode;
13221 enum reg_class from, to;
13222 {
13223 /* Moves from/to GENERAL_REGS. */
13224 if (reg_classes_intersect_p (to, GENERAL_REGS)
13225 || reg_classes_intersect_p (from, GENERAL_REGS))
13226 {
13227 if (! reg_classes_intersect_p (to, GENERAL_REGS))
13228 from = to;
13229
13230 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
13231 return (rs6000_memory_move_cost (mode, from, 0)
13232 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
13233
13234 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
13235 else if (from == CR_REGS)
13236 return 4;
13237
13238 else
13239 /* A move will cost one instruction per GPR moved. */
13240 return 2 * HARD_REGNO_NREGS (0, mode);
13241 }
13242
13243 /* Moving between two similar registers is just one instruction. */
13244 else if (reg_classes_intersect_p (to, from))
13245 return mode == TFmode ? 4 : 2;
13246
13247 /* Everything else has to go through GENERAL_REGS. */
13248 else
13249 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
13250 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
13251 }
13252
13253 /* A C expressions returning the cost of moving data of MODE from a register to
13254 or from memory. */
13255
13256 int
13257 rs6000_memory_move_cost (mode, class, in)
13258 enum machine_mode mode;
13259 enum reg_class class;
13260 int in ATTRIBUTE_UNUSED;
13261 {
13262 if (reg_classes_intersect_p (class, GENERAL_REGS))
13263 return 4 * HARD_REGNO_NREGS (0, mode);
13264 else if (reg_classes_intersect_p (class, FLOAT_REGS))
13265 return 4 * HARD_REGNO_NREGS (32, mode);
13266 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
13267 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
13268 else
13269 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
13270 }
13271
13272 #include "gt-rs6000.h"
This page took 0.653909 seconds and 4 git commands to generate.