]> gcc.gnu.org Git - gcc.git/blob - gcc/config/rs6000/rs6000.c
emit-rtl.c (set_mem_size): New function.
[gcc.git] / gcc / config / rs6000 / rs6000.c
1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
5
6 This file is part of GNU CC.
7
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "real.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "flags.h"
33 #include "recog.h"
34 #include "obstack.h"
35 #include "tree.h"
36 #include "expr.h"
37 #include "optabs.h"
38 #include "except.h"
39 #include "function.h"
40 #include "output.h"
41 #include "basic-block.h"
42 #include "integrate.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "hashtab.h"
46 #include "tm_p.h"
47 #include "target.h"
48 #include "target-def.h"
49 #include "langhooks.h"
50 #include "reload.h"
51
52 #ifndef TARGET_NO_PROTOTYPE
53 #define TARGET_NO_PROTOTYPE 0
54 #endif
55
56 #define min(A,B) ((A) < (B) ? (A) : (B))
57 #define max(A,B) ((A) > (B) ? (A) : (B))
58
59 /* Target cpu type */
60
61 enum processor_type rs6000_cpu;
62 struct rs6000_cpu_select rs6000_select[3] =
63 {
64 /* switch name, tune arch */
65 { (const char *)0, "--with-cpu=", 1, 1 },
66 { (const char *)0, "-mcpu=", 1, 1 },
67 { (const char *)0, "-mtune=", 1, 0 },
68 };
69
70 /* Size of long double */
71 const char *rs6000_long_double_size_string;
72 int rs6000_long_double_type_size;
73
74 /* Whether -mabi=altivec has appeared */
75 int rs6000_altivec_abi;
76
77 /* Whether VRSAVE instructions should be generated. */
78 int rs6000_altivec_vrsave;
79
80 /* String from -mvrsave= option. */
81 const char *rs6000_altivec_vrsave_string;
82
83 /* Nonzero if we want SPE ABI extensions. */
84 int rs6000_spe_abi;
85
86 /* Whether isel instructions should be generated. */
87 int rs6000_isel;
88
89 /* Nonzero if we have FPRs. */
90 int rs6000_fprs = 1;
91
92 /* String from -misel=. */
93 const char *rs6000_isel_string;
94
95 /* Set to non-zero once AIX common-mode calls have been defined. */
96 static int common_mode_defined;
97
98 /* Private copy of original value of flag_pic for ABI_AIX. */
99 static int rs6000_flag_pic;
100
101 /* Save information from a "cmpxx" operation until the branch or scc is
102 emitted. */
103 rtx rs6000_compare_op0, rs6000_compare_op1;
104 int rs6000_compare_fp_p;
105
106 /* Label number of label created for -mrelocatable, to call to so we can
107 get the address of the GOT section */
108 int rs6000_pic_labelno;
109
110 #ifdef USING_ELFOS_H
111 /* Which abi to adhere to */
112 const char *rs6000_abi_name = RS6000_ABI_NAME;
113
114 /* Semantics of the small data area */
115 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
116
117 /* Which small data model to use */
118 const char *rs6000_sdata_name = (char *)0;
119
120 /* Counter for labels which are to be placed in .fixup. */
121 int fixuplabelno = 0;
122 #endif
123
124 /* ABI enumeration available for subtarget to use. */
125 enum rs6000_abi rs6000_current_abi;
126
127 /* ABI string from -mabi= option. */
128 const char *rs6000_abi_string;
129
130 /* Debug flags */
131 const char *rs6000_debug_name;
132 int rs6000_debug_stack; /* debug stack applications */
133 int rs6000_debug_arg; /* debug argument handling */
134
135 const char *rs6000_traceback_name;
136 static enum {
137 traceback_default = 0,
138 traceback_none,
139 traceback_part,
140 traceback_full
141 } rs6000_traceback;
142
143 /* Flag to say the TOC is initialized */
144 int toc_initialized;
145 char toc_label_name[10];
146
147 /* Alias set for saves and restores from the rs6000 stack. */
148 static int rs6000_sr_alias_set;
149
150 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
151 The only place that looks at this is rs6000_set_default_type_attributes;
152 everywhere else should rely on the presence or absence of a longcall
153 attribute on the function declaration. */
154 int rs6000_default_long_calls;
155 const char *rs6000_longcall_switch;
156
157 struct builtin_description
158 {
159 /* mask is not const because we're going to alter it below. This
160 nonsense will go away when we rewrite the -march infrastructure
161 to give us more target flag bits. */
162 unsigned int mask;
163 const enum insn_code icode;
164 const char *const name;
165 const enum rs6000_builtins code;
166 };
167
168 static void rs6000_add_gc_roots PARAMS ((void));
169 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
170 static void validate_condition_mode
171 PARAMS ((enum rtx_code, enum machine_mode));
172 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
173 static void rs6000_maybe_dead PARAMS ((rtx));
174 static void rs6000_emit_stack_tie PARAMS ((void));
175 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
176 static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
177 unsigned int, int, int));
178 static rtx gen_frame_mem_offset PARAMS ((enum machine_mode, rtx, int));
179 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
180 static unsigned rs6000_hash_constant PARAMS ((rtx));
181 static unsigned toc_hash_function PARAMS ((const void *));
182 static int toc_hash_eq PARAMS ((const void *, const void *));
183 static int toc_hash_mark_entry PARAMS ((void **, void *));
184 static void toc_hash_mark_table PARAMS ((void *));
185 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
186 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
187 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
188 #ifdef HAVE_GAS_HIDDEN
189 static void rs6000_assemble_visibility PARAMS ((tree, const char *));
190 #endif
191 static int rs6000_ra_ever_killed PARAMS ((void));
192 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
193 const struct attribute_spec rs6000_attribute_table[];
194 static void rs6000_set_default_type_attributes PARAMS ((tree));
195 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
196 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
197 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
198 HOST_WIDE_INT, HOST_WIDE_INT));
199 #if TARGET_ELF
200 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
201 int));
202 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
203 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
204 static void rs6000_elf_select_section PARAMS ((tree, int,
205 unsigned HOST_WIDE_INT));
206 static void rs6000_elf_unique_section PARAMS ((tree, int));
207 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
208 unsigned HOST_WIDE_INT));
209 static void rs6000_elf_encode_section_info PARAMS ((tree, int))
210 ATTRIBUTE_UNUSED;
211 static const char *rs6000_elf_strip_name_encoding PARAMS ((const char *));
212 static bool rs6000_elf_in_small_data_p PARAMS ((tree));
213 #endif
214 #if TARGET_XCOFF
215 static void rs6000_xcoff_asm_globalize_label PARAMS ((FILE *, const char *));
216 static void rs6000_xcoff_asm_named_section PARAMS ((const char *, unsigned int));
217 static void rs6000_xcoff_select_section PARAMS ((tree, int,
218 unsigned HOST_WIDE_INT));
219 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
220 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
221 unsigned HOST_WIDE_INT));
222 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
223 static unsigned int rs6000_xcoff_section_type_flags PARAMS ((tree, const char *, int));
224 #endif
225 static void rs6000_xcoff_encode_section_info PARAMS ((tree, int))
226 ATTRIBUTE_UNUSED;
227 static bool rs6000_binds_local_p PARAMS ((tree));
228 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
229 static int rs6000_adjust_priority PARAMS ((rtx, int));
230 static int rs6000_issue_rate PARAMS ((void));
231
232 static void rs6000_init_builtins PARAMS ((void));
233 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
234 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
235 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
236 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
237 static void altivec_init_builtins PARAMS ((void));
238 static void rs6000_common_init_builtins PARAMS ((void));
239
240 static void enable_mask_for_builtins PARAMS ((struct builtin_description *,
241 int, enum rs6000_builtins,
242 enum rs6000_builtins));
243 static void spe_init_builtins PARAMS ((void));
244 static rtx spe_expand_builtin PARAMS ((tree, rtx, bool *));
245 static rtx spe_expand_predicate_builtin PARAMS ((enum insn_code, tree, rtx));
246 static rtx spe_expand_evsel_builtin PARAMS ((enum insn_code, tree, rtx));
247 static int rs6000_emit_int_cmove PARAMS ((rtx, rtx, rtx, rtx));
248
249 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
250 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
251 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
252 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
253 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
254 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
255 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
256 static void rs6000_parse_abi_options PARAMS ((void));
257 static void rs6000_parse_vrsave_option PARAMS ((void));
258 static void rs6000_parse_isel_option PARAMS ((void));
259 static int first_altivec_reg_to_save PARAMS ((void));
260 static unsigned int compute_vrsave_mask PARAMS ((void));
261 static void is_altivec_return_reg PARAMS ((rtx, void *));
262 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
263 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
264 static int easy_vector_constant PARAMS ((rtx));
265 \f
266 /* Default register names. */
267 char rs6000_reg_names[][8] =
268 {
269 "0", "1", "2", "3", "4", "5", "6", "7",
270 "8", "9", "10", "11", "12", "13", "14", "15",
271 "16", "17", "18", "19", "20", "21", "22", "23",
272 "24", "25", "26", "27", "28", "29", "30", "31",
273 "0", "1", "2", "3", "4", "5", "6", "7",
274 "8", "9", "10", "11", "12", "13", "14", "15",
275 "16", "17", "18", "19", "20", "21", "22", "23",
276 "24", "25", "26", "27", "28", "29", "30", "31",
277 "mq", "lr", "ctr","ap",
278 "0", "1", "2", "3", "4", "5", "6", "7",
279 "xer",
280 /* AltiVec registers. */
281 "0", "1", "2", "3", "4", "5", "6", "7",
282 "8", "9", "10", "11", "12", "13", "14", "15",
283 "16", "17", "18", "19", "20", "21", "22", "23",
284 "24", "25", "26", "27", "28", "29", "30", "31",
285 "vrsave"
286 };
287
288 #ifdef TARGET_REGNAMES
289 static const char alt_reg_names[][8] =
290 {
291 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
292 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
293 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
294 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
295 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
296 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
297 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
298 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
299 "mq", "lr", "ctr", "ap",
300 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
301 "xer",
302 /* AltiVec registers. */
303 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
304 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
305 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
306 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
307 "vrsave"
308 };
309 #endif
310 \f
311 #ifndef MASK_STRICT_ALIGN
312 #define MASK_STRICT_ALIGN 0
313 #endif
314 \f
315 /* Initialize the GCC target structure. */
316 #undef TARGET_ATTRIBUTE_TABLE
317 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
318 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
319 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
320
321 #undef TARGET_ASM_ALIGNED_DI_OP
322 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
323
324 /* Default unaligned ops are only provided for ELF. Find the ops needed
325 for non-ELF systems. */
326 #ifndef OBJECT_FORMAT_ELF
327 #if TARGET_XCOFF
328 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
329 64-bit targets. */
330 #undef TARGET_ASM_UNALIGNED_HI_OP
331 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
332 #undef TARGET_ASM_UNALIGNED_SI_OP
333 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
334 #undef TARGET_ASM_UNALIGNED_DI_OP
335 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
336 #else
337 /* For Darwin. */
338 #undef TARGET_ASM_UNALIGNED_HI_OP
339 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
340 #undef TARGET_ASM_UNALIGNED_SI_OP
341 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
342 #endif
343 #endif
344
345 /* This hook deals with fixups for relocatable code and DI-mode objects
346 in 64-bit code. */
347 #undef TARGET_ASM_INTEGER
348 #define TARGET_ASM_INTEGER rs6000_assemble_integer
349
350 #ifdef HAVE_GAS_HIDDEN
351 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
352 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
353 #endif
354
355 #undef TARGET_ASM_FUNCTION_PROLOGUE
356 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
357 #undef TARGET_ASM_FUNCTION_EPILOGUE
358 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
359
360 #undef TARGET_SCHED_ISSUE_RATE
361 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
362 #undef TARGET_SCHED_ADJUST_COST
363 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
364 #undef TARGET_SCHED_ADJUST_PRIORITY
365 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
366
367 #undef TARGET_INIT_BUILTINS
368 #define TARGET_INIT_BUILTINS rs6000_init_builtins
369
370 #undef TARGET_EXPAND_BUILTIN
371 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
372
373 #undef TARGET_BINDS_LOCAL_P
374 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
375
376 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
377 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
378
379 struct gcc_target targetm = TARGET_INITIALIZER;
380 \f
381 /* Override command line options. Mostly we process the processor
382 type and sometimes adjust other TARGET_ options. */
383
384 void
385 rs6000_override_options (default_cpu)
386 const char *default_cpu;
387 {
388 size_t i, j;
389 struct rs6000_cpu_select *ptr;
390
391 /* Simplify the entries below by making a mask for any POWER
392 variant and any PowerPC variant. */
393
394 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
395 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
396 | MASK_PPC_GFXOPT | MASK_POWERPC64)
397 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
398
399 static struct ptt
400 {
401 const char *const name; /* Canonical processor name. */
402 const enum processor_type processor; /* Processor type enum value. */
403 const int target_enable; /* Target flags to enable. */
404 const int target_disable; /* Target flags to disable. */
405 } const processor_target_table[]
406 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
407 POWER_MASKS | POWERPC_MASKS},
408 {"power", PROCESSOR_POWER,
409 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
410 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
411 {"power2", PROCESSOR_POWER,
412 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
413 POWERPC_MASKS | MASK_NEW_MNEMONICS},
414 {"power3", PROCESSOR_PPC630,
415 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
416 POWER_MASKS | MASK_PPC_GPOPT},
417 {"power4", PROCESSOR_POWER4,
418 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
419 POWER_MASKS | MASK_PPC_GPOPT},
420 {"powerpc", PROCESSOR_POWERPC,
421 MASK_POWERPC | MASK_NEW_MNEMONICS,
422 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
423 {"powerpc64", PROCESSOR_POWERPC64,
424 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
425 POWER_MASKS | POWERPC_OPT_MASKS},
426 {"rios", PROCESSOR_RIOS1,
427 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
428 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
429 {"rios1", PROCESSOR_RIOS1,
430 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
431 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
432 {"rsc", PROCESSOR_PPC601,
433 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
434 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
435 {"rsc1", PROCESSOR_PPC601,
436 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
437 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
438 {"rios2", PROCESSOR_RIOS2,
439 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
440 POWERPC_MASKS | MASK_NEW_MNEMONICS},
441 {"rs64a", PROCESSOR_RS64A,
442 MASK_POWERPC | MASK_NEW_MNEMONICS,
443 POWER_MASKS | POWERPC_OPT_MASKS},
444 {"401", PROCESSOR_PPC403,
445 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
446 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
447 {"403", PROCESSOR_PPC403,
448 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
449 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
450 {"405", PROCESSOR_PPC405,
451 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
452 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
453 {"505", PROCESSOR_MPCCORE,
454 MASK_POWERPC | MASK_NEW_MNEMONICS,
455 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
456 {"601", PROCESSOR_PPC601,
457 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
458 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
459 {"602", PROCESSOR_PPC603,
460 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
461 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
462 {"603", PROCESSOR_PPC603,
463 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
464 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
465 {"603e", PROCESSOR_PPC603,
466 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
467 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
468 {"ec603e", PROCESSOR_PPC603,
469 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
470 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
471 {"604", PROCESSOR_PPC604,
472 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
473 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
474 {"604e", PROCESSOR_PPC604e,
475 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
476 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
477 {"620", PROCESSOR_PPC620,
478 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
479 POWER_MASKS | MASK_PPC_GPOPT},
480 {"630", PROCESSOR_PPC630,
481 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
482 POWER_MASKS | MASK_PPC_GPOPT},
483 {"740", PROCESSOR_PPC750,
484 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
485 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
486 {"750", PROCESSOR_PPC750,
487 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
488 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
489 {"7400", PROCESSOR_PPC7400,
490 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
491 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
492 {"7450", PROCESSOR_PPC7450,
493 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
494 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
495 {"8540", PROCESSOR_PPC8540,
496 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
497 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
498 {"801", PROCESSOR_MPCCORE,
499 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
500 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
501 {"821", PROCESSOR_MPCCORE,
502 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
503 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
504 {"823", PROCESSOR_MPCCORE,
505 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
506 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
507 {"860", PROCESSOR_MPCCORE,
508 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
509 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
510
511 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
512
513 /* Save current -mmultiple/-mno-multiple status. */
514 int multiple = TARGET_MULTIPLE;
515 /* Save current -mstring/-mno-string status. */
516 int string = TARGET_STRING;
517
518 /* Identify the processor type. */
519 rs6000_select[0].string = default_cpu;
520 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
521
522 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
523 {
524 ptr = &rs6000_select[i];
525 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
526 {
527 for (j = 0; j < ptt_size; j++)
528 if (! strcmp (ptr->string, processor_target_table[j].name))
529 {
530 if (ptr->set_tune_p)
531 rs6000_cpu = processor_target_table[j].processor;
532
533 if (ptr->set_arch_p)
534 {
535 target_flags |= processor_target_table[j].target_enable;
536 target_flags &= ~processor_target_table[j].target_disable;
537 }
538 break;
539 }
540
541 if (j == ptt_size)
542 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
543 }
544 }
545
546 if (rs6000_cpu == PROCESSOR_PPC8540)
547 rs6000_isel = 1;
548
549 /* If we are optimizing big endian systems for space, use the store
550 multiple instructions. */
551 if (BYTES_BIG_ENDIAN && optimize_size)
552 target_flags |= MASK_MULTIPLE;
553
554 /* If -mmultiple or -mno-multiple was explicitly used, don't
555 override with the processor default */
556 if (TARGET_MULTIPLE_SET)
557 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
558
559 /* If -mstring or -mno-string was explicitly used, don't override
560 with the processor default. */
561 if (TARGET_STRING_SET)
562 target_flags = (target_flags & ~MASK_STRING) | string;
563
564 /* Don't allow -mmultiple or -mstring on little endian systems
565 unless the cpu is a 750, because the hardware doesn't support the
566 instructions used in little endian mode, and causes an alignment
567 trap. The 750 does not cause an alignment trap (except when the
568 target is unaligned). */
569
570 if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
571 {
572 if (TARGET_MULTIPLE)
573 {
574 target_flags &= ~MASK_MULTIPLE;
575 if (TARGET_MULTIPLE_SET)
576 warning ("-mmultiple is not supported on little endian systems");
577 }
578
579 if (TARGET_STRING)
580 {
581 target_flags &= ~MASK_STRING;
582 if (TARGET_STRING_SET)
583 warning ("-mstring is not supported on little endian systems");
584 }
585 }
586
587 if (flag_pic != 0 && DEFAULT_ABI == ABI_AIX)
588 {
589 rs6000_flag_pic = flag_pic;
590 flag_pic = 0;
591 }
592
593 #ifdef XCOFF_DEBUGGING_INFO
594 if (flag_function_sections && (write_symbols != NO_DEBUG)
595 && DEFAULT_ABI == ABI_AIX)
596 {
597 warning ("-ffunction-sections disabled on AIX when debugging");
598 flag_function_sections = 0;
599 }
600
601 if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
602 {
603 warning ("-fdata-sections not supported on AIX");
604 flag_data_sections = 0;
605 }
606 #endif
607
608 /* For Darwin, always silently make -fpic and -fPIC identical. */
609 if (flag_pic == 1 && DEFAULT_ABI == ABI_DARWIN)
610 flag_pic = 2;
611
612 /* Set debug flags */
613 if (rs6000_debug_name)
614 {
615 if (! strcmp (rs6000_debug_name, "all"))
616 rs6000_debug_stack = rs6000_debug_arg = 1;
617 else if (! strcmp (rs6000_debug_name, "stack"))
618 rs6000_debug_stack = 1;
619 else if (! strcmp (rs6000_debug_name, "arg"))
620 rs6000_debug_arg = 1;
621 else
622 error ("unknown -mdebug-%s switch", rs6000_debug_name);
623 }
624
625 if (rs6000_traceback_name)
626 {
627 if (! strncmp (rs6000_traceback_name, "full", 4))
628 rs6000_traceback = traceback_full;
629 else if (! strncmp (rs6000_traceback_name, "part", 4))
630 rs6000_traceback = traceback_part;
631 else if (! strncmp (rs6000_traceback_name, "no", 2))
632 rs6000_traceback = traceback_none;
633 else
634 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
635 rs6000_traceback_name);
636 }
637
638 /* Set size of long double */
639 rs6000_long_double_type_size = 64;
640 if (rs6000_long_double_size_string)
641 {
642 char *tail;
643 int size = strtol (rs6000_long_double_size_string, &tail, 10);
644 if (*tail != '\0' || (size != 64 && size != 128))
645 error ("Unknown switch -mlong-double-%s",
646 rs6000_long_double_size_string);
647 else
648 rs6000_long_double_type_size = size;
649 }
650
651 /* Handle -mabi= options. */
652 rs6000_parse_abi_options ();
653
654 /* Handle -mvrsave= option. */
655 rs6000_parse_vrsave_option ();
656
657 /* Handle -misel= option. */
658 rs6000_parse_isel_option ();
659
660 #ifdef SUBTARGET_OVERRIDE_OPTIONS
661 SUBTARGET_OVERRIDE_OPTIONS;
662 #endif
663 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
664 SUBSUBTARGET_OVERRIDE_OPTIONS;
665 #endif
666
667 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
668 using TARGET_OPTIONS to handle a toggle switch, but we're out of
669 bits in target_flags so TARGET_SWITCHES cannot be used.
670 Assumption here is that rs6000_longcall_switch points into the
671 text of the complete option, rather than being a copy, so we can
672 scan back for the presence or absence of the no- modifier. */
673 if (rs6000_longcall_switch)
674 {
675 const char *base = rs6000_longcall_switch;
676 while (base[-1] != 'm') base--;
677
678 if (*rs6000_longcall_switch != '\0')
679 error ("invalid option `%s'", base);
680 rs6000_default_long_calls = (base[0] != 'n');
681 }
682
683 #ifdef TARGET_REGNAMES
684 /* If the user desires alternate register names, copy in the
685 alternate names now. */
686 if (TARGET_REGNAMES)
687 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
688 #endif
689
690 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
691 If -maix-struct-return or -msvr4-struct-return was explicitly
692 used, don't override with the ABI default. */
693 if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
694 {
695 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
696 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
697 else
698 target_flags |= MASK_AIX_STRUCT_RET;
699 }
700
701 /* Register global variables with the garbage collector. */
702 rs6000_add_gc_roots ();
703
704 /* Allocate an alias set for register saves & restores from stack. */
705 rs6000_sr_alias_set = new_alias_set ();
706
707 if (TARGET_TOC)
708 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
709
710 /* We can only guarantee the availability of DI pseudo-ops when
711 assembling for 64-bit targets. */
712 if (!TARGET_64BIT)
713 {
714 targetm.asm_out.aligned_op.di = NULL;
715 targetm.asm_out.unaligned_op.di = NULL;
716 }
717
718 /* Arrange to save and restore machine status around nested functions. */
719 init_machine_status = rs6000_init_machine_status;
720 }
721
722 /* Handle -misel= option. */
723 static void
724 rs6000_parse_isel_option ()
725 {
726 if (rs6000_isel_string == 0)
727 return;
728 else if (! strcmp (rs6000_isel_string, "yes"))
729 rs6000_isel = 1;
730 else if (! strcmp (rs6000_isel_string, "no"))
731 rs6000_isel = 0;
732 else
733 error ("unknown -misel= option specified: '%s'",
734 rs6000_isel_string);
735 }
736
737 /* Handle -mvrsave= options. */
738 static void
739 rs6000_parse_vrsave_option ()
740 {
741 /* Generate VRSAVE instructions by default. */
742 if (rs6000_altivec_vrsave_string == 0
743 || ! strcmp (rs6000_altivec_vrsave_string, "yes"))
744 rs6000_altivec_vrsave = 1;
745 else if (! strcmp (rs6000_altivec_vrsave_string, "no"))
746 rs6000_altivec_vrsave = 0;
747 else
748 error ("unknown -mvrsave= option specified: '%s'",
749 rs6000_altivec_vrsave_string);
750 }
751
752 /* Handle -mabi= options. */
753 static void
754 rs6000_parse_abi_options ()
755 {
756 if (rs6000_abi_string == 0)
757 return;
758 else if (! strcmp (rs6000_abi_string, "altivec"))
759 rs6000_altivec_abi = 1;
760 else if (! strcmp (rs6000_abi_string, "no-altivec"))
761 rs6000_altivec_abi = 0;
762 else if (! strcmp (rs6000_abi_string, "spe"))
763 rs6000_spe_abi = 1;
764 else if (! strcmp (rs6000_abi_string, "no-spe"))
765 rs6000_spe_abi = 0;
766 else
767 error ("unknown ABI specified: '%s'", rs6000_abi_string);
768 }
769
770 void
771 optimization_options (level, size)
772 int level ATTRIBUTE_UNUSED;
773 int size ATTRIBUTE_UNUSED;
774 {
775 }
776 \f
777 /* Do anything needed at the start of the asm file. */
778
779 void
780 rs6000_file_start (file, default_cpu)
781 FILE *file;
782 const char *default_cpu;
783 {
784 size_t i;
785 char buffer[80];
786 const char *start = buffer;
787 struct rs6000_cpu_select *ptr;
788
789 if (flag_verbose_asm)
790 {
791 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
792 rs6000_select[0].string = default_cpu;
793
794 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
795 {
796 ptr = &rs6000_select[i];
797 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
798 {
799 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
800 start = "";
801 }
802 }
803
804 #ifdef USING_ELFOS_H
805 switch (rs6000_sdata)
806 {
807 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
808 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
809 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
810 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
811 }
812
813 if (rs6000_sdata && g_switch_value)
814 {
815 fprintf (file, "%s -G %d", start, g_switch_value);
816 start = "";
817 }
818 #endif
819
820 if (*start == '\0')
821 putc ('\n', file);
822 }
823 }
824 \f
825 /* Return non-zero if this function is known to have a null epilogue. */
826
827 int
828 direct_return ()
829 {
830 if (reload_completed)
831 {
832 rs6000_stack_t *info = rs6000_stack_info ();
833
834 if (info->first_gp_reg_save == 32
835 && info->first_fp_reg_save == 64
836 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
837 && ! info->lr_save_p
838 && ! info->cr_save_p
839 && info->vrsave_mask == 0
840 && ! info->push_p)
841 return 1;
842 }
843
844 return 0;
845 }
846
847 /* Returns 1 always. */
848
849 int
850 any_operand (op, mode)
851 rtx op ATTRIBUTE_UNUSED;
852 enum machine_mode mode ATTRIBUTE_UNUSED;
853 {
854 return 1;
855 }
856
857 /* Returns 1 if op is the count register. */
858 int
859 count_register_operand (op, mode)
860 rtx op;
861 enum machine_mode mode ATTRIBUTE_UNUSED;
862 {
863 if (GET_CODE (op) != REG)
864 return 0;
865
866 if (REGNO (op) == COUNT_REGISTER_REGNUM)
867 return 1;
868
869 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
870 return 1;
871
872 return 0;
873 }
874
875 /* Returns 1 if op is an altivec register. */
876 int
877 altivec_register_operand (op, mode)
878 rtx op;
879 enum machine_mode mode ATTRIBUTE_UNUSED;
880 {
881
882 return (register_operand (op, mode)
883 && (GET_CODE (op) != REG
884 || REGNO (op) > FIRST_PSEUDO_REGISTER
885 || ALTIVEC_REGNO_P (REGNO (op))));
886 }
887
888 int
889 xer_operand (op, mode)
890 rtx op;
891 enum machine_mode mode ATTRIBUTE_UNUSED;
892 {
893 if (GET_CODE (op) != REG)
894 return 0;
895
896 if (XER_REGNO_P (REGNO (op)))
897 return 1;
898
899 return 0;
900 }
901
902 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
903 by such constants completes more quickly. */
904
905 int
906 s8bit_cint_operand (op, mode)
907 rtx op;
908 enum machine_mode mode ATTRIBUTE_UNUSED;
909 {
910 return ( GET_CODE (op) == CONST_INT
911 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
912 }
913
914 /* Return 1 if OP is a constant that can fit in a D field. */
915
916 int
917 short_cint_operand (op, mode)
918 rtx op;
919 enum machine_mode mode ATTRIBUTE_UNUSED;
920 {
921 return (GET_CODE (op) == CONST_INT
922 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
923 }
924
925 /* Similar for an unsigned D field. */
926
927 int
928 u_short_cint_operand (op, mode)
929 rtx op;
930 enum machine_mode mode ATTRIBUTE_UNUSED;
931 {
932 return (GET_CODE (op) == CONST_INT
933 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
934 }
935
936 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
937
938 int
939 non_short_cint_operand (op, mode)
940 rtx op;
941 enum machine_mode mode ATTRIBUTE_UNUSED;
942 {
943 return (GET_CODE (op) == CONST_INT
944 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
945 }
946
947 /* Returns 1 if OP is a CONST_INT that is a positive value
948 and an exact power of 2. */
949
950 int
951 exact_log2_cint_operand (op, mode)
952 rtx op;
953 enum machine_mode mode ATTRIBUTE_UNUSED;
954 {
955 return (GET_CODE (op) == CONST_INT
956 && INTVAL (op) > 0
957 && exact_log2 (INTVAL (op)) >= 0);
958 }
959
960 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
961 ctr, or lr). */
962
963 int
964 gpc_reg_operand (op, mode)
965 rtx op;
966 enum machine_mode mode;
967 {
968 return (register_operand (op, mode)
969 && (GET_CODE (op) != REG
970 || (REGNO (op) >= ARG_POINTER_REGNUM
971 && !XER_REGNO_P (REGNO (op)))
972 || REGNO (op) < MQ_REGNO));
973 }
974
975 /* Returns 1 if OP is either a pseudo-register or a register denoting a
976 CR field. */
977
978 int
979 cc_reg_operand (op, mode)
980 rtx op;
981 enum machine_mode mode;
982 {
983 return (register_operand (op, mode)
984 && (GET_CODE (op) != REG
985 || REGNO (op) >= FIRST_PSEUDO_REGISTER
986 || CR_REGNO_P (REGNO (op))));
987 }
988
989 /* Returns 1 if OP is either a pseudo-register or a register denoting a
990 CR field that isn't CR0. */
991
992 int
993 cc_reg_not_cr0_operand (op, mode)
994 rtx op;
995 enum machine_mode mode;
996 {
997 return (register_operand (op, mode)
998 && (GET_CODE (op) != REG
999 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1000 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1001 }
1002
1003 /* Returns 1 if OP is either a constant integer valid for a D-field or
1004 a non-special register. If a register, it must be in the proper
1005 mode unless MODE is VOIDmode. */
1006
1007 int
1008 reg_or_short_operand (op, mode)
1009 rtx op;
1010 enum machine_mode mode;
1011 {
1012 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1013 }
1014
1015 /* Similar, except check if the negation of the constant would be
1016 valid for a D-field. */
1017
1018 int
1019 reg_or_neg_short_operand (op, mode)
1020 rtx op;
1021 enum machine_mode mode;
1022 {
1023 if (GET_CODE (op) == CONST_INT)
1024 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1025
1026 return gpc_reg_operand (op, mode);
1027 }
1028
1029 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1030 a non-special register. If a register, it must be in the proper
1031 mode unless MODE is VOIDmode. */
1032
1033 int
1034 reg_or_aligned_short_operand (op, mode)
1035 rtx op;
1036 enum machine_mode mode;
1037 {
1038 if (gpc_reg_operand (op, mode))
1039 return 1;
1040 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1041 return 1;
1042
1043 return 0;
1044 }
1045
1046
1047 /* Return 1 if the operand is either a register or an integer whose
1048 high-order 16 bits are zero. */
1049
1050 int
1051 reg_or_u_short_operand (op, mode)
1052 rtx op;
1053 enum machine_mode mode;
1054 {
1055 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1056 }
1057
1058 /* Return 1 is the operand is either a non-special register or ANY
1059 constant integer. */
1060
1061 int
1062 reg_or_cint_operand (op, mode)
1063 rtx op;
1064 enum machine_mode mode;
1065 {
1066 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1067 }
1068
1069 /* Return 1 is the operand is either a non-special register or ANY
1070 32-bit signed constant integer. */
1071
1072 int
1073 reg_or_arith_cint_operand (op, mode)
1074 rtx op;
1075 enum machine_mode mode;
1076 {
1077 return (gpc_reg_operand (op, mode)
1078 || (GET_CODE (op) == CONST_INT
1079 #if HOST_BITS_PER_WIDE_INT != 32
1080 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1081 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1082 #endif
1083 ));
1084 }
1085
1086 /* Return 1 is the operand is either a non-special register or a 32-bit
1087 signed constant integer valid for 64-bit addition. */
1088
1089 int
1090 reg_or_add_cint64_operand (op, mode)
1091 rtx op;
1092 enum machine_mode mode;
1093 {
1094 return (gpc_reg_operand (op, mode)
1095 || (GET_CODE (op) == CONST_INT
1096 #if HOST_BITS_PER_WIDE_INT == 32
1097 && INTVAL (op) < 0x7fff8000
1098 #else
1099 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1100 < 0x100000000ll)
1101 #endif
1102 ));
1103 }
1104
1105 /* Return 1 is the operand is either a non-special register or a 32-bit
1106 signed constant integer valid for 64-bit subtraction. */
1107
1108 int
1109 reg_or_sub_cint64_operand (op, mode)
1110 rtx op;
1111 enum machine_mode mode;
1112 {
1113 return (gpc_reg_operand (op, mode)
1114 || (GET_CODE (op) == CONST_INT
1115 #if HOST_BITS_PER_WIDE_INT == 32
1116 && (- INTVAL (op)) < 0x7fff8000
1117 #else
1118 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1119 < 0x100000000ll)
1120 #endif
1121 ));
1122 }
1123
1124 /* Return 1 is the operand is either a non-special register or ANY
1125 32-bit unsigned constant integer. */
1126
1127 int
1128 reg_or_logical_cint_operand (op, mode)
1129 rtx op;
1130 enum machine_mode mode;
1131 {
1132 if (GET_CODE (op) == CONST_INT)
1133 {
1134 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1135 {
1136 if (GET_MODE_BITSIZE (mode) <= 32)
1137 abort ();
1138
1139 if (INTVAL (op) < 0)
1140 return 0;
1141 }
1142
1143 return ((INTVAL (op) & GET_MODE_MASK (mode)
1144 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1145 }
1146 else if (GET_CODE (op) == CONST_DOUBLE)
1147 {
1148 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1149 || mode != DImode)
1150 abort ();
1151
1152 return CONST_DOUBLE_HIGH (op) == 0;
1153 }
1154 else
1155 return gpc_reg_operand (op, mode);
1156 }
1157
1158 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1159
1160 int
1161 got_operand (op, mode)
1162 rtx op;
1163 enum machine_mode mode ATTRIBUTE_UNUSED;
1164 {
1165 return (GET_CODE (op) == SYMBOL_REF
1166 || GET_CODE (op) == CONST
1167 || GET_CODE (op) == LABEL_REF);
1168 }
1169
1170 /* Return 1 if the operand is a simple references that can be loaded via
1171 the GOT (labels involving addition aren't allowed). */
1172
1173 int
1174 got_no_const_operand (op, mode)
1175 rtx op;
1176 enum machine_mode mode ATTRIBUTE_UNUSED;
1177 {
1178 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1179 }
1180
1181 /* Return the number of instructions it takes to form a constant in an
1182 integer register. */
1183
1184 static int
1185 num_insns_constant_wide (value)
1186 HOST_WIDE_INT value;
1187 {
1188 /* signed constant loadable with {cal|addi} */
1189 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1190 return 1;
1191
1192 /* constant loadable with {cau|addis} */
1193 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1194 return 1;
1195
1196 #if HOST_BITS_PER_WIDE_INT == 64
1197 else if (TARGET_POWERPC64)
1198 {
1199 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1200 HOST_WIDE_INT high = value >> 31;
1201
1202 if (high == 0 || high == -1)
1203 return 2;
1204
1205 high >>= 1;
1206
1207 if (low == 0)
1208 return num_insns_constant_wide (high) + 1;
1209 else
1210 return (num_insns_constant_wide (high)
1211 + num_insns_constant_wide (low) + 1);
1212 }
1213 #endif
1214
1215 else
1216 return 2;
1217 }
1218
1219 int
1220 num_insns_constant (op, mode)
1221 rtx op;
1222 enum machine_mode mode;
1223 {
1224 if (GET_CODE (op) == CONST_INT)
1225 {
1226 #if HOST_BITS_PER_WIDE_INT == 64
1227 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1228 && mask64_operand (op, mode))
1229 return 2;
1230 else
1231 #endif
1232 return num_insns_constant_wide (INTVAL (op));
1233 }
1234
1235 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1236 {
1237 long l;
1238 REAL_VALUE_TYPE rv;
1239
1240 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1241 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1242 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1243 }
1244
1245 else if (GET_CODE (op) == CONST_DOUBLE)
1246 {
1247 HOST_WIDE_INT low;
1248 HOST_WIDE_INT high;
1249 long l[2];
1250 REAL_VALUE_TYPE rv;
1251 int endian = (WORDS_BIG_ENDIAN == 0);
1252
1253 if (mode == VOIDmode || mode == DImode)
1254 {
1255 high = CONST_DOUBLE_HIGH (op);
1256 low = CONST_DOUBLE_LOW (op);
1257 }
1258 else
1259 {
1260 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1261 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1262 high = l[endian];
1263 low = l[1 - endian];
1264 }
1265
1266 if (TARGET_32BIT)
1267 return (num_insns_constant_wide (low)
1268 + num_insns_constant_wide (high));
1269
1270 else
1271 {
1272 if (high == 0 && low >= 0)
1273 return num_insns_constant_wide (low);
1274
1275 else if (high == -1 && low < 0)
1276 return num_insns_constant_wide (low);
1277
1278 else if (mask64_operand (op, mode))
1279 return 2;
1280
1281 else if (low == 0)
1282 return num_insns_constant_wide (high) + 1;
1283
1284 else
1285 return (num_insns_constant_wide (high)
1286 + num_insns_constant_wide (low) + 1);
1287 }
1288 }
1289
1290 else
1291 abort ();
1292 }
1293
1294 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1295 register with one instruction per word. We only do this if we can
1296 safely read CONST_DOUBLE_{LOW,HIGH}. */
1297
1298 int
1299 easy_fp_constant (op, mode)
1300 rtx op;
1301 enum machine_mode mode;
1302 {
1303 if (GET_CODE (op) != CONST_DOUBLE
1304 || GET_MODE (op) != mode
1305 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1306 return 0;
1307
1308 /* Consider all constants with -msoft-float to be easy. */
1309 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1310 && mode != DImode)
1311 return 1;
1312
1313 /* If we are using V.4 style PIC, consider all constants to be hard. */
1314 if (flag_pic && DEFAULT_ABI == ABI_V4)
1315 return 0;
1316
1317 #ifdef TARGET_RELOCATABLE
1318 /* Similarly if we are using -mrelocatable, consider all constants
1319 to be hard. */
1320 if (TARGET_RELOCATABLE)
1321 return 0;
1322 #endif
1323
1324 if (mode == DFmode)
1325 {
1326 long k[2];
1327 REAL_VALUE_TYPE rv;
1328
1329 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1330 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1331
1332 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1333 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1334 }
1335
1336 else if (mode == SFmode)
1337 {
1338 long l;
1339 REAL_VALUE_TYPE rv;
1340
1341 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1342 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1343
1344 return num_insns_constant_wide (l) == 1;
1345 }
1346
1347 else if (mode == DImode)
1348 return ((TARGET_POWERPC64
1349 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1350 || (num_insns_constant (op, DImode) <= 2));
1351
1352 else if (mode == SImode)
1353 return 1;
1354 else
1355 abort ();
1356 }
1357
1358 /* Return 1 if the operand is a CONST_INT and can be put into a
1359 register with one instruction. */
1360
1361 static int
1362 easy_vector_constant (op)
1363 rtx op;
1364 {
1365 rtx elt;
1366 int units, i;
1367
1368 if (GET_CODE (op) != CONST_VECTOR)
1369 return 0;
1370
1371 units = CONST_VECTOR_NUNITS (op);
1372
1373 /* We can generate 0 easily. Look for that. */
1374 for (i = 0; i < units; ++i)
1375 {
1376 elt = CONST_VECTOR_ELT (op, i);
1377
1378 /* We could probably simplify this by just checking for equality
1379 with CONST0_RTX for the current mode, but let's be safe
1380 instead. */
1381
1382 switch (GET_CODE (elt))
1383 {
1384 case CONST_INT:
1385 if (INTVAL (elt) != 0)
1386 return 0;
1387 break;
1388 case CONST_DOUBLE:
1389 if (CONST_DOUBLE_LOW (elt) != 0 || CONST_DOUBLE_HIGH (elt) != 0)
1390 return 0;
1391 break;
1392 default:
1393 return 0;
1394 }
1395 }
1396
1397 /* We could probably generate a few other constants trivially, but
1398 gcc doesn't generate them yet. FIXME later. */
1399 return 1;
1400 }
1401
1402 /* Return 1 if the operand is the constant 0. This works for scalars
1403 as well as vectors. */
1404 int
1405 zero_constant (op, mode)
1406 rtx op;
1407 enum machine_mode mode;
1408 {
1409 return op == CONST0_RTX (mode);
1410 }
1411
1412 /* Return 1 if the operand is 0.0. */
1413 int
1414 zero_fp_constant (op, mode)
1415 rtx op;
1416 enum machine_mode mode;
1417 {
1418 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1419 }
1420
1421 /* Return 1 if the operand is in volatile memory. Note that during
1422 the RTL generation phase, memory_operand does not return TRUE for
1423 volatile memory references. So this function allows us to
1424 recognize volatile references where its safe. */
1425
1426 int
1427 volatile_mem_operand (op, mode)
1428 rtx op;
1429 enum machine_mode mode;
1430 {
1431 if (GET_CODE (op) != MEM)
1432 return 0;
1433
1434 if (!MEM_VOLATILE_P (op))
1435 return 0;
1436
1437 if (mode != GET_MODE (op))
1438 return 0;
1439
1440 if (reload_completed)
1441 return memory_operand (op, mode);
1442
1443 if (reload_in_progress)
1444 return strict_memory_address_p (mode, XEXP (op, 0));
1445
1446 return memory_address_p (mode, XEXP (op, 0));
1447 }
1448
1449 /* Return 1 if the operand is an offsettable memory operand. */
1450
1451 int
1452 offsettable_mem_operand (op, mode)
1453 rtx op;
1454 enum machine_mode mode;
1455 {
1456 return ((GET_CODE (op) == MEM)
1457 && offsettable_address_p (reload_completed || reload_in_progress,
1458 mode, XEXP (op, 0)));
1459 }
1460
1461 /* Return 1 if the operand is either an easy FP constant (see above) or
1462 memory. */
1463
1464 int
1465 mem_or_easy_const_operand (op, mode)
1466 rtx op;
1467 enum machine_mode mode;
1468 {
1469 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1470 }
1471
1472 /* Return 1 if the operand is either a non-special register or an item
1473 that can be used as the operand of a `mode' add insn. */
1474
1475 int
1476 add_operand (op, mode)
1477 rtx op;
1478 enum machine_mode mode;
1479 {
1480 if (GET_CODE (op) == CONST_INT)
1481 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1482 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1483
1484 return gpc_reg_operand (op, mode);
1485 }
1486
1487 /* Return 1 if OP is a constant but not a valid add_operand. */
1488
1489 int
1490 non_add_cint_operand (op, mode)
1491 rtx op;
1492 enum machine_mode mode ATTRIBUTE_UNUSED;
1493 {
1494 return (GET_CODE (op) == CONST_INT
1495 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1496 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1497 }
1498
1499 /* Return 1 if the operand is a non-special register or a constant that
1500 can be used as the operand of an OR or XOR insn on the RS/6000. */
1501
1502 int
1503 logical_operand (op, mode)
1504 rtx op;
1505 enum machine_mode mode;
1506 {
1507 HOST_WIDE_INT opl, oph;
1508
1509 if (gpc_reg_operand (op, mode))
1510 return 1;
1511
1512 if (GET_CODE (op) == CONST_INT)
1513 {
1514 opl = INTVAL (op) & GET_MODE_MASK (mode);
1515
1516 #if HOST_BITS_PER_WIDE_INT <= 32
1517 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1518 return 0;
1519 #endif
1520 }
1521 else if (GET_CODE (op) == CONST_DOUBLE)
1522 {
1523 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1524 abort ();
1525
1526 opl = CONST_DOUBLE_LOW (op);
1527 oph = CONST_DOUBLE_HIGH (op);
1528 if (oph != 0)
1529 return 0;
1530 }
1531 else
1532 return 0;
1533
1534 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1535 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1536 }
1537
1538 /* Return 1 if C is a constant that is not a logical operand (as
1539 above), but could be split into one. */
1540
1541 int
1542 non_logical_cint_operand (op, mode)
1543 rtx op;
1544 enum machine_mode mode;
1545 {
1546 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1547 && ! logical_operand (op, mode)
1548 && reg_or_logical_cint_operand (op, mode));
1549 }
1550
1551 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1552 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1553 Reject all ones and all zeros, since these should have been optimized
1554 away and confuse the making of MB and ME. */
1555
1556 int
1557 mask_operand (op, mode)
1558 rtx op;
1559 enum machine_mode mode ATTRIBUTE_UNUSED;
1560 {
1561 HOST_WIDE_INT c, lsb;
1562
1563 if (GET_CODE (op) != CONST_INT)
1564 return 0;
1565
1566 c = INTVAL (op);
1567
1568 /* Fail in 64-bit mode if the mask wraps around because the upper
1569 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1570 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1571 return 0;
1572
1573 /* We don't change the number of transitions by inverting,
1574 so make sure we start with the LS bit zero. */
1575 if (c & 1)
1576 c = ~c;
1577
1578 /* Reject all zeros or all ones. */
1579 if (c == 0)
1580 return 0;
1581
1582 /* Find the first transition. */
1583 lsb = c & -c;
1584
1585 /* Invert to look for a second transition. */
1586 c = ~c;
1587
1588 /* Erase first transition. */
1589 c &= -lsb;
1590
1591 /* Find the second transition (if any). */
1592 lsb = c & -c;
1593
1594 /* Match if all the bits above are 1's (or c is zero). */
1595 return c == -lsb;
1596 }
1597
1598 /* Return 1 for the PowerPC64 rlwinm corner case. */
1599
1600 int
1601 mask_operand_wrap (op, mode)
1602 rtx op;
1603 enum machine_mode mode ATTRIBUTE_UNUSED;
1604 {
1605 HOST_WIDE_INT c, lsb;
1606
1607 if (GET_CODE (op) != CONST_INT)
1608 return 0;
1609
1610 c = INTVAL (op);
1611
1612 if ((c & 0x80000001) != 0x80000001)
1613 return 0;
1614
1615 c = ~c;
1616 if (c == 0)
1617 return 0;
1618
1619 lsb = c & -c;
1620 c = ~c;
1621 c &= -lsb;
1622 lsb = c & -c;
1623 return c == -lsb;
1624 }
1625
1626 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1627 It is if there are no more than one 1->0 or 0->1 transitions.
1628 Reject all zeros, since zero should have been optimized away and
1629 confuses the making of MB and ME. */
1630
1631 int
1632 mask64_operand (op, mode)
1633 rtx op;
1634 enum machine_mode mode ATTRIBUTE_UNUSED;
1635 {
1636 if (GET_CODE (op) == CONST_INT)
1637 {
1638 HOST_WIDE_INT c, lsb;
1639
1640 c = INTVAL (op);
1641
1642 /* Reject all zeros. */
1643 if (c == 0)
1644 return 0;
1645
1646 /* We don't change the number of transitions by inverting,
1647 so make sure we start with the LS bit zero. */
1648 if (c & 1)
1649 c = ~c;
1650
1651 /* Find the transition, and check that all bits above are 1's. */
1652 lsb = c & -c;
1653 return c == -lsb;
1654 }
1655 return 0;
1656 }
1657
1658 /* Like mask64_operand, but allow up to three transitions. This
1659 predicate is used by insn patterns that generate two rldicl or
1660 rldicr machine insns. */
1661
1662 int
1663 mask64_2_operand (op, mode)
1664 rtx op;
1665 enum machine_mode mode ATTRIBUTE_UNUSED;
1666 {
1667 if (GET_CODE (op) == CONST_INT)
1668 {
1669 HOST_WIDE_INT c, lsb;
1670
1671 c = INTVAL (op);
1672
1673 /* Disallow all zeros. */
1674 if (c == 0)
1675 return 0;
1676
1677 /* We don't change the number of transitions by inverting,
1678 so make sure we start with the LS bit zero. */
1679 if (c & 1)
1680 c = ~c;
1681
1682 /* Find the first transition. */
1683 lsb = c & -c;
1684
1685 /* Invert to look for a second transition. */
1686 c = ~c;
1687
1688 /* Erase first transition. */
1689 c &= -lsb;
1690
1691 /* Find the second transition. */
1692 lsb = c & -c;
1693
1694 /* Invert to look for a third transition. */
1695 c = ~c;
1696
1697 /* Erase second transition. */
1698 c &= -lsb;
1699
1700 /* Find the third transition (if any). */
1701 lsb = c & -c;
1702
1703 /* Match if all the bits above are 1's (or c is zero). */
1704 return c == -lsb;
1705 }
1706 return 0;
1707 }
1708
1709 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1710 implement ANDing by the mask IN. */
1711 void
1712 build_mask64_2_operands (in, out)
1713 rtx in;
1714 rtx *out;
1715 {
1716 #if HOST_BITS_PER_WIDE_INT >= 64
1717 unsigned HOST_WIDE_INT c, lsb, m1, m2;
1718 int shift;
1719
1720 if (GET_CODE (in) != CONST_INT)
1721 abort ();
1722
1723 c = INTVAL (in);
1724 if (c & 1)
1725 {
1726 /* Assume c initially something like 0x00fff000000fffff. The idea
1727 is to rotate the word so that the middle ^^^^^^ group of zeros
1728 is at the MS end and can be cleared with an rldicl mask. We then
1729 rotate back and clear off the MS ^^ group of zeros with a
1730 second rldicl. */
1731 c = ~c; /* c == 0xff000ffffff00000 */
1732 lsb = c & -c; /* lsb == 0x0000000000100000 */
1733 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
1734 c = ~c; /* c == 0x00fff000000fffff */
1735 c &= -lsb; /* c == 0x00fff00000000000 */
1736 lsb = c & -c; /* lsb == 0x0000100000000000 */
1737 c = ~c; /* c == 0xff000fffffffffff */
1738 c &= -lsb; /* c == 0xff00000000000000 */
1739 shift = 0;
1740 while ((lsb >>= 1) != 0)
1741 shift++; /* shift == 44 on exit from loop */
1742 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
1743 m1 = ~m1; /* m1 == 0x000000ffffffffff */
1744 m2 = ~c; /* m2 == 0x00ffffffffffffff */
1745 }
1746 else
1747 {
1748 /* Assume c initially something like 0xff000f0000000000. The idea
1749 is to rotate the word so that the ^^^ middle group of zeros
1750 is at the LS end and can be cleared with an rldicr mask. We then
1751 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1752 a second rldicr. */
1753 lsb = c & -c; /* lsb == 0x0000010000000000 */
1754 m2 = -lsb; /* m2 == 0xffffff0000000000 */
1755 c = ~c; /* c == 0x00fff0ffffffffff */
1756 c &= -lsb; /* c == 0x00fff00000000000 */
1757 lsb = c & -c; /* lsb == 0x0000100000000000 */
1758 c = ~c; /* c == 0xff000fffffffffff */
1759 c &= -lsb; /* c == 0xff00000000000000 */
1760 shift = 0;
1761 while ((lsb >>= 1) != 0)
1762 shift++; /* shift == 44 on exit from loop */
1763 m1 = ~c; /* m1 == 0x00ffffffffffffff */
1764 m1 >>= shift; /* m1 == 0x0000000000000fff */
1765 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
1766 }
1767
1768 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1769 masks will be all 1's. We are guaranteed more than one transition. */
1770 out[0] = GEN_INT (64 - shift);
1771 out[1] = GEN_INT (m1);
1772 out[2] = GEN_INT (shift);
1773 out[3] = GEN_INT (m2);
1774 #else
1775 abort ();
1776 #endif
1777 }
1778
1779 /* Return 1 if the operand is either a non-special register or a constant
1780 that can be used as the operand of a PowerPC64 logical AND insn. */
1781
1782 int
1783 and64_operand (op, mode)
1784 rtx op;
1785 enum machine_mode mode;
1786 {
1787 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1788 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1789
1790 return (logical_operand (op, mode) || mask64_operand (op, mode));
1791 }
1792
1793 /* Like the above, but also match constants that can be implemented
1794 with two rldicl or rldicr insns. */
1795
1796 int
1797 and64_2_operand (op, mode)
1798 rtx op;
1799 enum machine_mode mode;
1800 {
1801 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1802 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
1803
1804 return logical_operand (op, mode) || mask64_2_operand (op, mode);
1805 }
1806
1807 /* Return 1 if the operand is either a non-special register or a
1808 constant that can be used as the operand of an RS/6000 logical AND insn. */
1809
1810 int
1811 and_operand (op, mode)
1812 rtx op;
1813 enum machine_mode mode;
1814 {
1815 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1816 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1817
1818 return (logical_operand (op, mode) || mask_operand (op, mode));
1819 }
1820
1821 /* Return 1 if the operand is a general register or memory operand. */
1822
1823 int
1824 reg_or_mem_operand (op, mode)
1825 rtx op;
1826 enum machine_mode mode;
1827 {
1828 return (gpc_reg_operand (op, mode)
1829 || memory_operand (op, mode)
1830 || volatile_mem_operand (op, mode));
1831 }
1832
1833 /* Return 1 if the operand is a general register or memory operand without
1834 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1835 instruction. */
1836
1837 int
1838 lwa_operand (op, mode)
1839 rtx op;
1840 enum machine_mode mode;
1841 {
1842 rtx inner = op;
1843
1844 if (reload_completed && GET_CODE (inner) == SUBREG)
1845 inner = SUBREG_REG (inner);
1846
1847 return gpc_reg_operand (inner, mode)
1848 || (memory_operand (inner, mode)
1849 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1850 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1851 && (GET_CODE (XEXP (inner, 0)) != PLUS
1852 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1853 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1854 }
1855
1856 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
1857
1858 int
1859 symbol_ref_operand (op, mode)
1860 rtx op;
1861 enum machine_mode mode;
1862 {
1863 if (mode != VOIDmode && GET_MODE (op) != mode)
1864 return 0;
1865
1866 return (GET_CODE (op) == SYMBOL_REF);
1867 }
1868
1869 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1870 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
1871
1872 int
1873 call_operand (op, mode)
1874 rtx op;
1875 enum machine_mode mode;
1876 {
1877 if (mode != VOIDmode && GET_MODE (op) != mode)
1878 return 0;
1879
1880 return (GET_CODE (op) == SYMBOL_REF
1881 || (GET_CODE (op) == REG
1882 && (REGNO (op) == LINK_REGISTER_REGNUM
1883 || REGNO (op) == COUNT_REGISTER_REGNUM
1884 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1885 }
1886
1887 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1888 this file and the function is not weakly defined. */
1889
1890 int
1891 current_file_function_operand (op, mode)
1892 rtx op;
1893 enum machine_mode mode ATTRIBUTE_UNUSED;
1894 {
1895 return (GET_CODE (op) == SYMBOL_REF
1896 && (SYMBOL_REF_FLAG (op)
1897 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1898 && ! DECL_WEAK (current_function_decl))));
1899 }
1900
1901 /* Return 1 if this operand is a valid input for a move insn. */
1902
1903 int
1904 input_operand (op, mode)
1905 rtx op;
1906 enum machine_mode mode;
1907 {
1908 /* Memory is always valid. */
1909 if (memory_operand (op, mode))
1910 return 1;
1911
1912 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1913 if (GET_CODE (op) == CONSTANT_P_RTX)
1914 return 1;
1915
1916 /* For floating-point, easy constants are valid. */
1917 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1918 && CONSTANT_P (op)
1919 && easy_fp_constant (op, mode))
1920 return 1;
1921
1922 /* Allow any integer constant. */
1923 if (GET_MODE_CLASS (mode) == MODE_INT
1924 && (GET_CODE (op) == CONST_INT
1925 || GET_CODE (op) == CONST_DOUBLE))
1926 return 1;
1927
1928 /* For floating-point or multi-word mode, the only remaining valid type
1929 is a register. */
1930 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1931 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1932 return register_operand (op, mode);
1933
1934 /* The only cases left are integral modes one word or smaller (we
1935 do not get called for MODE_CC values). These can be in any
1936 register. */
1937 if (register_operand (op, mode))
1938 return 1;
1939
1940 /* A SYMBOL_REF referring to the TOC is valid. */
1941 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1942 return 1;
1943
1944 /* A constant pool expression (relative to the TOC) is valid */
1945 if (TOC_RELATIVE_EXPR_P (op))
1946 return 1;
1947
1948 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1949 to be valid. */
1950 if (DEFAULT_ABI == ABI_V4
1951 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1952 && small_data_operand (op, Pmode))
1953 return 1;
1954
1955 return 0;
1956 }
1957
1958 /* Return 1 for an operand in small memory on V.4/eabi. */
1959
1960 int
1961 small_data_operand (op, mode)
1962 rtx op ATTRIBUTE_UNUSED;
1963 enum machine_mode mode ATTRIBUTE_UNUSED;
1964 {
1965 #if TARGET_ELF
1966 rtx sym_ref;
1967
1968 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1969 return 0;
1970
1971 if (DEFAULT_ABI != ABI_V4)
1972 return 0;
1973
1974 if (GET_CODE (op) == SYMBOL_REF)
1975 sym_ref = op;
1976
1977 else if (GET_CODE (op) != CONST
1978 || GET_CODE (XEXP (op, 0)) != PLUS
1979 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1980 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1981 return 0;
1982
1983 else
1984 {
1985 rtx sum = XEXP (op, 0);
1986 HOST_WIDE_INT summand;
1987
1988 /* We have to be careful here, because it is the referenced address
1989 that must be 32k from _SDA_BASE_, not just the symbol. */
1990 summand = INTVAL (XEXP (sum, 1));
1991 if (summand < 0 || summand > g_switch_value)
1992 return 0;
1993
1994 sym_ref = XEXP (sum, 0);
1995 }
1996
1997 if (*XSTR (sym_ref, 0) != '@')
1998 return 0;
1999
2000 return 1;
2001
2002 #else
2003 return 0;
2004 #endif
2005 }
2006 \f
2007 static int
2008 constant_pool_expr_1 (op, have_sym, have_toc)
2009 rtx op;
2010 int *have_sym;
2011 int *have_toc;
2012 {
2013 switch (GET_CODE(op))
2014 {
2015 case SYMBOL_REF:
2016 if (CONSTANT_POOL_ADDRESS_P (op))
2017 {
2018 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2019 {
2020 *have_sym = 1;
2021 return 1;
2022 }
2023 else
2024 return 0;
2025 }
2026 else if (! strcmp (XSTR (op, 0), toc_label_name))
2027 {
2028 *have_toc = 1;
2029 return 1;
2030 }
2031 else
2032 return 0;
2033 case PLUS:
2034 case MINUS:
2035 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2036 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2037 case CONST:
2038 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2039 case CONST_INT:
2040 return 1;
2041 default:
2042 return 0;
2043 }
2044 }
2045
2046 int
2047 constant_pool_expr_p (op)
2048 rtx op;
2049 {
2050 int have_sym = 0;
2051 int have_toc = 0;
2052 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2053 }
2054
2055 int
2056 toc_relative_expr_p (op)
2057 rtx op;
2058 {
2059 int have_sym = 0;
2060 int have_toc = 0;
2061 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2062 }
2063
2064 /* Try machine-dependent ways of modifying an illegitimate address
2065 to be legitimate. If we find one, return the new, valid address.
2066 This is used from only one place: `memory_address' in explow.c.
2067
2068 OLDX is the address as it was before break_out_memory_refs was
2069 called. In some cases it is useful to look at this to decide what
2070 needs to be done.
2071
2072 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2073
2074 It is always safe for this function to do nothing. It exists to
2075 recognize opportunities to optimize the output.
2076
2077 On RS/6000, first check for the sum of a register with a constant
2078 integer that is out of range. If so, generate code to add the
2079 constant with the low-order 16 bits masked to the register and force
2080 this result into another register (this can be done with `cau').
2081 Then generate an address of REG+(CONST&0xffff), allowing for the
2082 possibility of bit 16 being a one.
2083
2084 Then check for the sum of a register and something not constant, try to
2085 load the other things into a register and return the sum. */
2086 rtx
2087 rs6000_legitimize_address (x, oldx, mode)
2088 rtx x;
2089 rtx oldx ATTRIBUTE_UNUSED;
2090 enum machine_mode mode;
2091 {
2092 if (GET_CODE (x) == PLUS
2093 && GET_CODE (XEXP (x, 0)) == REG
2094 && GET_CODE (XEXP (x, 1)) == CONST_INT
2095 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2096 {
2097 HOST_WIDE_INT high_int, low_int;
2098 rtx sum;
2099 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2100 high_int = INTVAL (XEXP (x, 1)) - low_int;
2101 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2102 GEN_INT (high_int)), 0);
2103 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2104 }
2105 else if (GET_CODE (x) == PLUS
2106 && GET_CODE (XEXP (x, 0)) == REG
2107 && GET_CODE (XEXP (x, 1)) != CONST_INT
2108 && GET_MODE_NUNITS (mode) == 1
2109 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2110 || TARGET_POWERPC64
2111 || mode != DFmode)
2112 && (TARGET_POWERPC64 || mode != DImode)
2113 && mode != TImode)
2114 {
2115 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2116 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2117 }
2118 else if (ALTIVEC_VECTOR_MODE (mode))
2119 {
2120 rtx reg;
2121
2122 /* Make sure both operands are registers. */
2123 if (GET_CODE (x) == PLUS)
2124 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2125 force_reg (Pmode, XEXP (x, 1)));
2126
2127 reg = force_reg (Pmode, x);
2128 return reg;
2129 }
2130 else if (SPE_VECTOR_MODE (mode))
2131 {
2132 /* We accept [reg + reg] and [reg + OFFSET]. */
2133
2134 if (GET_CODE (x) == PLUS)
2135 {
2136 rtx op1 = XEXP (x, 0);
2137 rtx op2 = XEXP (x, 1);
2138
2139 op1 = force_reg (Pmode, op1);
2140
2141 if (GET_CODE (op2) != REG
2142 && (GET_CODE (op2) != CONST_INT
2143 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2144 op2 = force_reg (Pmode, op2);
2145
2146 return gen_rtx_PLUS (Pmode, op1, op2);
2147 }
2148
2149 return force_reg (Pmode, x);
2150 }
2151 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
2152 && GET_CODE (x) != CONST_INT
2153 && GET_CODE (x) != CONST_DOUBLE
2154 && CONSTANT_P (x)
2155 && GET_MODE_NUNITS (mode) == 1
2156 && (GET_MODE_BITSIZE (mode) <= 32
2157 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2158 {
2159 rtx reg = gen_reg_rtx (Pmode);
2160 emit_insn (gen_elf_high (reg, (x)));
2161 return gen_rtx_LO_SUM (Pmode, reg, (x));
2162 }
2163 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2164 && ! flag_pic
2165 && GET_CODE (x) != CONST_INT
2166 && GET_CODE (x) != CONST_DOUBLE
2167 && CONSTANT_P (x)
2168 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2169 && mode != DImode
2170 && mode != TImode)
2171 {
2172 rtx reg = gen_reg_rtx (Pmode);
2173 emit_insn (gen_macho_high (reg, (x)));
2174 return gen_rtx_LO_SUM (Pmode, reg, (x));
2175 }
2176 else if (TARGET_TOC
2177 && CONSTANT_POOL_EXPR_P (x)
2178 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2179 {
2180 return create_TOC_reference (x);
2181 }
2182 else
2183 return NULL_RTX;
2184 }
2185
2186 /* The convention appears to be to define this wherever it is used.
2187 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2188 is now used here. */
2189 #ifndef REG_MODE_OK_FOR_BASE_P
2190 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2191 #endif
2192
2193 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2194 replace the input X, or the original X if no replacement is called for.
2195 The output parameter *WIN is 1 if the calling macro should goto WIN,
2196 0 if it should not.
2197
2198 For RS/6000, we wish to handle large displacements off a base
2199 register by splitting the addend across an addiu/addis and the mem insn.
2200 This cuts number of extra insns needed from 3 to 1.
2201
2202 On Darwin, we use this to generate code for floating point constants.
2203 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2204 The Darwin code is inside #if TARGET_MACHO because only then is
2205 machopic_function_base_name() defined. */
2206 rtx
2207 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
2208 rtx x;
2209 enum machine_mode mode;
2210 int opnum;
2211 int type;
2212 int ind_levels ATTRIBUTE_UNUSED;
2213 int *win;
2214 {
2215 /* We must recognize output that we have already generated ourselves. */
2216 if (GET_CODE (x) == PLUS
2217 && GET_CODE (XEXP (x, 0)) == PLUS
2218 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2219 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2220 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2221 {
2222 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2223 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2224 opnum, (enum reload_type)type);
2225 *win = 1;
2226 return x;
2227 }
2228
2229 #if TARGET_MACHO
2230 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2231 && GET_CODE (x) == LO_SUM
2232 && GET_CODE (XEXP (x, 0)) == PLUS
2233 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2234 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2235 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2236 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2237 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2238 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2239 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2240 {
2241 /* Result of previous invocation of this function on Darwin
2242 floating point constant. */
2243 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2244 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2245 opnum, (enum reload_type)type);
2246 *win = 1;
2247 return x;
2248 }
2249 #endif
2250 if (GET_CODE (x) == PLUS
2251 && GET_CODE (XEXP (x, 0)) == REG
2252 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2253 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2254 && GET_CODE (XEXP (x, 1)) == CONST_INT
2255 && !SPE_VECTOR_MODE (mode)
2256 && !ALTIVEC_VECTOR_MODE (mode))
2257 {
2258 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2259 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2260 HOST_WIDE_INT high
2261 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2262
2263 /* Check for 32-bit overflow. */
2264 if (high + low != val)
2265 {
2266 *win = 0;
2267 return x;
2268 }
2269
2270 /* Reload the high part into a base reg; leave the low part
2271 in the mem directly. */
2272
2273 x = gen_rtx_PLUS (GET_MODE (x),
2274 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2275 GEN_INT (high)),
2276 GEN_INT (low));
2277
2278 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2279 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2280 opnum, (enum reload_type)type);
2281 *win = 1;
2282 return x;
2283 }
2284 #if TARGET_MACHO
2285 if (GET_CODE (x) == SYMBOL_REF
2286 && DEFAULT_ABI == ABI_DARWIN
2287 && !ALTIVEC_VECTOR_MODE (mode)
2288 && flag_pic)
2289 {
2290 /* Darwin load of floating point constant. */
2291 rtx offset = gen_rtx (CONST, Pmode,
2292 gen_rtx (MINUS, Pmode, x,
2293 gen_rtx (SYMBOL_REF, Pmode,
2294 machopic_function_base_name ())));
2295 x = gen_rtx (LO_SUM, GET_MODE (x),
2296 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2297 gen_rtx (HIGH, Pmode, offset)), offset);
2298 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2299 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2300 opnum, (enum reload_type)type);
2301 *win = 1;
2302 return x;
2303 }
2304 #endif
2305 if (TARGET_TOC
2306 && CONSTANT_POOL_EXPR_P (x)
2307 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2308 {
2309 (x) = create_TOC_reference (x);
2310 *win = 1;
2311 return x;
2312 }
2313 *win = 0;
2314 return x;
2315 }
2316
2317 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2318 that is a valid memory address for an instruction.
2319 The MODE argument is the machine mode for the MEM expression
2320 that wants to use this address.
2321
2322 On the RS/6000, there are four valid address: a SYMBOL_REF that
2323 refers to a constant pool entry of an address (or the sum of it
2324 plus a constant), a short (16-bit signed) constant plus a register,
2325 the sum of two registers, or a register indirect, possibly with an
2326 auto-increment. For DFmode and DImode with an constant plus register,
2327 we must ensure that both words are addressable or PowerPC64 with offset
2328 word aligned.
2329
2330 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2331 32-bit DImode, TImode), indexed addressing cannot be used because
2332 adjacent memory cells are accessed by adding word-sized offsets
2333 during assembly output. */
2334 int
2335 rs6000_legitimate_address (mode, x, reg_ok_strict)
2336 enum machine_mode mode;
2337 rtx x;
2338 int reg_ok_strict;
2339 {
2340 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2341 return 1;
2342 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2343 && !ALTIVEC_VECTOR_MODE (mode)
2344 && !SPE_VECTOR_MODE (mode)
2345 && TARGET_UPDATE
2346 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2347 return 1;
2348 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2349 return 1;
2350 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2351 return 1;
2352 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2353 if (! reg_ok_strict
2354 && GET_CODE (x) == PLUS
2355 && GET_CODE (XEXP (x, 0)) == REG
2356 && XEXP (x, 0) == virtual_stack_vars_rtx
2357 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2358 return 1;
2359 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2360 return 1;
2361 if (mode != TImode
2362 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2363 || TARGET_POWERPC64
2364 || mode != DFmode)
2365 && (TARGET_POWERPC64 || mode != DImode)
2366 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2367 return 1;
2368 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2369 return 1;
2370 return 0;
2371 }
2372 \f
2373 /* Try to output insns to set TARGET equal to the constant C if it can
2374 be done in less than N insns. Do all computations in MODE.
2375 Returns the place where the output has been placed if it can be
2376 done and the insns have been emitted. If it would take more than N
2377 insns, zero is returned and no insns and emitted. */
2378
2379 rtx
2380 rs6000_emit_set_const (dest, mode, source, n)
2381 rtx dest, source;
2382 enum machine_mode mode;
2383 int n ATTRIBUTE_UNUSED;
2384 {
2385 rtx result, insn, set;
2386 HOST_WIDE_INT c0, c1;
2387
2388 if (mode == QImode || mode == HImode)
2389 {
2390 if (dest == NULL)
2391 dest = gen_reg_rtx (mode);
2392 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2393 return dest;
2394 }
2395 else if (mode == SImode)
2396 {
2397 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
2398
2399 emit_insn (gen_rtx_SET (VOIDmode, result,
2400 GEN_INT (INTVAL (source)
2401 & (~ (HOST_WIDE_INT) 0xffff))));
2402 emit_insn (gen_rtx_SET (VOIDmode, dest,
2403 gen_rtx_IOR (SImode, result,
2404 GEN_INT (INTVAL (source) & 0xffff))));
2405 result = dest;
2406 }
2407 else if (mode == DImode)
2408 {
2409 if (GET_CODE (source) == CONST_INT)
2410 {
2411 c0 = INTVAL (source);
2412 c1 = -(c0 < 0);
2413 }
2414 else if (GET_CODE (source) == CONST_DOUBLE)
2415 {
2416 #if HOST_BITS_PER_WIDE_INT >= 64
2417 c0 = CONST_DOUBLE_LOW (source);
2418 c1 = -(c0 < 0);
2419 #else
2420 c0 = CONST_DOUBLE_LOW (source);
2421 c1 = CONST_DOUBLE_HIGH (source);
2422 #endif
2423 }
2424 else
2425 abort ();
2426
2427 result = rs6000_emit_set_long_const (dest, c0, c1);
2428 }
2429 else
2430 abort ();
2431
2432 insn = get_last_insn ();
2433 set = single_set (insn);
2434 if (! CONSTANT_P (SET_SRC (set)))
2435 set_unique_reg_note (insn, REG_EQUAL, source);
2436
2437 return result;
2438 }
2439
2440 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2441 fall back to a straight forward decomposition. We do this to avoid
2442 exponential run times encountered when looking for longer sequences
2443 with rs6000_emit_set_const. */
2444 static rtx
2445 rs6000_emit_set_long_const (dest, c1, c2)
2446 rtx dest;
2447 HOST_WIDE_INT c1, c2;
2448 {
2449 if (!TARGET_POWERPC64)
2450 {
2451 rtx operand1, operand2;
2452
2453 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2454 DImode);
2455 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2456 DImode);
2457 emit_move_insn (operand1, GEN_INT (c1));
2458 emit_move_insn (operand2, GEN_INT (c2));
2459 }
2460 else
2461 {
2462 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2463
2464 ud1 = c1 & 0xffff;
2465 ud2 = (c1 & 0xffff0000) >> 16;
2466 #if HOST_BITS_PER_WIDE_INT >= 64
2467 c2 = c1 >> 32;
2468 #endif
2469 ud3 = c2 & 0xffff;
2470 ud4 = (c2 & 0xffff0000) >> 16;
2471
2472 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2473 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2474 {
2475 if (ud1 & 0x8000)
2476 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2477 else
2478 emit_move_insn (dest, GEN_INT (ud1));
2479 }
2480
2481 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2482 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2483 {
2484 if (ud2 & 0x8000)
2485 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2486 - 0x80000000));
2487 else
2488 emit_move_insn (dest, GEN_INT (ud2 << 16));
2489 if (ud1 != 0)
2490 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2491 }
2492 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2493 || (ud4 == 0 && ! (ud3 & 0x8000)))
2494 {
2495 if (ud3 & 0x8000)
2496 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2497 - 0x80000000));
2498 else
2499 emit_move_insn (dest, GEN_INT (ud3 << 16));
2500
2501 if (ud2 != 0)
2502 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2503 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2504 if (ud1 != 0)
2505 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2506 }
2507 else
2508 {
2509 if (ud4 & 0x8000)
2510 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2511 - 0x80000000));
2512 else
2513 emit_move_insn (dest, GEN_INT (ud4 << 16));
2514
2515 if (ud3 != 0)
2516 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2517
2518 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2519 if (ud2 != 0)
2520 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2521 GEN_INT (ud2 << 16)));
2522 if (ud1 != 0)
2523 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2524 }
2525 }
2526 return dest;
2527 }
2528
2529 /* Emit a move from SOURCE to DEST in mode MODE. */
2530 void
2531 rs6000_emit_move (dest, source, mode)
2532 rtx dest;
2533 rtx source;
2534 enum machine_mode mode;
2535 {
2536 rtx operands[2];
2537 operands[0] = dest;
2538 operands[1] = source;
2539
2540 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2541 if (GET_CODE (operands[1]) == CONST_DOUBLE
2542 && ! FLOAT_MODE_P (mode)
2543 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2544 {
2545 /* FIXME. This should never happen. */
2546 /* Since it seems that it does, do the safe thing and convert
2547 to a CONST_INT. */
2548 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2549 }
2550 if (GET_CODE (operands[1]) == CONST_DOUBLE
2551 && ! FLOAT_MODE_P (mode)
2552 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2553 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2554 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2555 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2556 abort ();
2557
2558 /* Check if GCC is setting up a block move that will end up using FP
2559 registers as temporaries. We must make sure this is acceptable. */
2560 if (GET_CODE (operands[0]) == MEM
2561 && GET_CODE (operands[1]) == MEM
2562 && mode == DImode
2563 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2564 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2565 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2566 ? 32 : MEM_ALIGN (operands[0])))
2567 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2568 ? 32
2569 : MEM_ALIGN (operands[1]))))
2570 && ! MEM_VOLATILE_P (operands [0])
2571 && ! MEM_VOLATILE_P (operands [1]))
2572 {
2573 emit_move_insn (adjust_address (operands[0], SImode, 0),
2574 adjust_address (operands[1], SImode, 0));
2575 emit_move_insn (adjust_address (operands[0], SImode, 4),
2576 adjust_address (operands[1], SImode, 4));
2577 return;
2578 }
2579
2580 if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
2581 operands[1] = force_reg (mode, operands[1]);
2582
2583 if (mode == SFmode && ! TARGET_POWERPC
2584 && TARGET_HARD_FLOAT && TARGET_FPRS
2585 && GET_CODE (operands[0]) == MEM)
2586 {
2587 int regnum;
2588
2589 if (reload_in_progress || reload_completed)
2590 regnum = true_regnum (operands[1]);
2591 else if (GET_CODE (operands[1]) == REG)
2592 regnum = REGNO (operands[1]);
2593 else
2594 regnum = -1;
2595
2596 /* If operands[1] is a register, on POWER it may have
2597 double-precision data in it, so truncate it to single
2598 precision. */
2599 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2600 {
2601 rtx newreg;
2602 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2603 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2604 operands[1] = newreg;
2605 }
2606 }
2607
2608 /* Handle the case where reload calls us with an invalid address;
2609 and the case of CONSTANT_P_RTX. */
2610 if (!ALTIVEC_VECTOR_MODE (mode)
2611 && (! general_operand (operands[1], mode)
2612 || ! nonimmediate_operand (operands[0], mode)
2613 || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2614 {
2615 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2616 return;
2617 }
2618
2619 /* FIXME: In the long term, this switch statement should go away
2620 and be replaced by a sequence of tests based on things like
2621 mode == Pmode. */
2622 switch (mode)
2623 {
2624 case HImode:
2625 case QImode:
2626 if (CONSTANT_P (operands[1])
2627 && GET_CODE (operands[1]) != CONST_INT)
2628 operands[1] = force_const_mem (mode, operands[1]);
2629 break;
2630
2631 case TFmode:
2632 case DFmode:
2633 case SFmode:
2634 if (CONSTANT_P (operands[1])
2635 && ! easy_fp_constant (operands[1], mode))
2636 operands[1] = force_const_mem (mode, operands[1]);
2637 break;
2638
2639 case V16QImode:
2640 case V8HImode:
2641 case V4SFmode:
2642 case V4SImode:
2643 case V4HImode:
2644 case V2SFmode:
2645 case V2SImode:
2646 if (CONSTANT_P (operands[1])
2647 && !easy_vector_constant (operands[1]))
2648 operands[1] = force_const_mem (mode, operands[1]);
2649 break;
2650
2651 case SImode:
2652 case DImode:
2653 /* Use default pattern for address of ELF small data */
2654 if (TARGET_ELF
2655 && mode == Pmode
2656 && DEFAULT_ABI == ABI_V4
2657 && (GET_CODE (operands[1]) == SYMBOL_REF
2658 || GET_CODE (operands[1]) == CONST)
2659 && small_data_operand (operands[1], mode))
2660 {
2661 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2662 return;
2663 }
2664
2665 if (DEFAULT_ABI == ABI_V4
2666 && mode == Pmode && mode == SImode
2667 && flag_pic == 1 && got_operand (operands[1], mode))
2668 {
2669 emit_insn (gen_movsi_got (operands[0], operands[1]));
2670 return;
2671 }
2672
2673 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2674 && TARGET_NO_TOC && ! flag_pic
2675 && mode == Pmode
2676 && CONSTANT_P (operands[1])
2677 && GET_CODE (operands[1]) != HIGH
2678 && GET_CODE (operands[1]) != CONST_INT)
2679 {
2680 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2681
2682 /* If this is a function address on -mcall-aixdesc,
2683 convert it to the address of the descriptor. */
2684 if (DEFAULT_ABI == ABI_AIX
2685 && GET_CODE (operands[1]) == SYMBOL_REF
2686 && XSTR (operands[1], 0)[0] == '.')
2687 {
2688 const char *name = XSTR (operands[1], 0);
2689 rtx new_ref;
2690 while (*name == '.')
2691 name++;
2692 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2693 CONSTANT_POOL_ADDRESS_P (new_ref)
2694 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2695 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2696 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2697 operands[1] = new_ref;
2698 }
2699
2700 if (DEFAULT_ABI == ABI_DARWIN)
2701 {
2702 emit_insn (gen_macho_high (target, operands[1]));
2703 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2704 return;
2705 }
2706
2707 emit_insn (gen_elf_high (target, operands[1]));
2708 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2709 return;
2710 }
2711
2712 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2713 and we have put it in the TOC, we just need to make a TOC-relative
2714 reference to it. */
2715 if (TARGET_TOC
2716 && GET_CODE (operands[1]) == SYMBOL_REF
2717 && CONSTANT_POOL_EXPR_P (operands[1])
2718 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2719 get_pool_mode (operands[1])))
2720 {
2721 operands[1] = create_TOC_reference (operands[1]);
2722 }
2723 else if (mode == Pmode
2724 && CONSTANT_P (operands[1])
2725 && ((GET_CODE (operands[1]) != CONST_INT
2726 && ! easy_fp_constant (operands[1], mode))
2727 || (GET_CODE (operands[1]) == CONST_INT
2728 && num_insns_constant (operands[1], mode) > 2)
2729 || (GET_CODE (operands[0]) == REG
2730 && FP_REGNO_P (REGNO (operands[0]))))
2731 && GET_CODE (operands[1]) != HIGH
2732 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2733 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2734 {
2735 /* Emit a USE operation so that the constant isn't deleted if
2736 expensive optimizations are turned on because nobody
2737 references it. This should only be done for operands that
2738 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2739 This should not be done for operands that contain LABEL_REFs.
2740 For now, we just handle the obvious case. */
2741 if (GET_CODE (operands[1]) != LABEL_REF)
2742 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2743
2744 #if TARGET_MACHO
2745 /* Darwin uses a special PIC legitimizer. */
2746 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2747 {
2748 operands[1] =
2749 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2750 operands[0]);
2751 if (operands[0] != operands[1])
2752 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2753 return;
2754 }
2755 #endif
2756
2757 /* If we are to limit the number of things we put in the TOC and
2758 this is a symbol plus a constant we can add in one insn,
2759 just put the symbol in the TOC and add the constant. Don't do
2760 this if reload is in progress. */
2761 if (GET_CODE (operands[1]) == CONST
2762 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2763 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2764 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2765 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2766 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2767 && ! side_effects_p (operands[0]))
2768 {
2769 rtx sym =
2770 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2771 rtx other = XEXP (XEXP (operands[1], 0), 1);
2772
2773 sym = force_reg (mode, sym);
2774 if (mode == SImode)
2775 emit_insn (gen_addsi3 (operands[0], sym, other));
2776 else
2777 emit_insn (gen_adddi3 (operands[0], sym, other));
2778 return;
2779 }
2780
2781 operands[1] = force_const_mem (mode, operands[1]);
2782
2783 if (TARGET_TOC
2784 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2785 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2786 get_pool_constant (XEXP (operands[1], 0)),
2787 get_pool_mode (XEXP (operands[1], 0))))
2788 {
2789 operands[1]
2790 = gen_rtx_MEM (mode,
2791 create_TOC_reference (XEXP (operands[1], 0)));
2792 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2793 RTX_UNCHANGING_P (operands[1]) = 1;
2794 }
2795 }
2796 break;
2797
2798 case TImode:
2799 if (GET_CODE (operands[0]) == MEM
2800 && GET_CODE (XEXP (operands[0], 0)) != REG
2801 && ! reload_in_progress)
2802 operands[0]
2803 = replace_equiv_address (operands[0],
2804 copy_addr_to_reg (XEXP (operands[0], 0)));
2805
2806 if (GET_CODE (operands[1]) == MEM
2807 && GET_CODE (XEXP (operands[1], 0)) != REG
2808 && ! reload_in_progress)
2809 operands[1]
2810 = replace_equiv_address (operands[1],
2811 copy_addr_to_reg (XEXP (operands[1], 0)));
2812 break;
2813
2814 default:
2815 abort ();
2816 }
2817
2818 /* Above, we may have called force_const_mem which may have returned
2819 an invalid address. If we can, fix this up; otherwise, reload will
2820 have to deal with it. */
2821 if (GET_CODE (operands[1]) == MEM
2822 && ! memory_address_p (mode, XEXP (operands[1], 0))
2823 && ! reload_in_progress)
2824 operands[1] = adjust_address (operands[1], mode, 0);
2825
2826 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2827 return;
2828 }
2829 \f
2830 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2831 for a call to a function whose data type is FNTYPE.
2832 For a library call, FNTYPE is 0.
2833
2834 For incoming args we set the number of arguments in the prototype large
2835 so we never return a PARALLEL. */
2836
2837 void
2838 init_cumulative_args (cum, fntype, libname, incoming)
2839 CUMULATIVE_ARGS *cum;
2840 tree fntype;
2841 rtx libname ATTRIBUTE_UNUSED;
2842 int incoming;
2843 {
2844 static CUMULATIVE_ARGS zero_cumulative;
2845
2846 *cum = zero_cumulative;
2847 cum->words = 0;
2848 cum->fregno = FP_ARG_MIN_REG;
2849 cum->vregno = ALTIVEC_ARG_MIN_REG;
2850 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2851 cum->call_cookie = CALL_NORMAL;
2852 cum->sysv_gregno = GP_ARG_MIN_REG;
2853
2854 if (incoming)
2855 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2856
2857 else if (cum->prototype)
2858 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2859 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2860 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2861
2862 else
2863 cum->nargs_prototype = 0;
2864
2865 cum->orig_nargs = cum->nargs_prototype;
2866
2867 /* Check for a longcall attribute. */
2868 if (fntype
2869 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
2870 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
2871 cum->call_cookie = CALL_LONG;
2872
2873 if (TARGET_DEBUG_ARG)
2874 {
2875 fprintf (stderr, "\ninit_cumulative_args:");
2876 if (fntype)
2877 {
2878 tree ret_type = TREE_TYPE (fntype);
2879 fprintf (stderr, " ret code = %s,",
2880 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2881 }
2882
2883 if (cum->call_cookie & CALL_LONG)
2884 fprintf (stderr, " longcall,");
2885
2886 fprintf (stderr, " proto = %d, nargs = %d\n",
2887 cum->prototype, cum->nargs_prototype);
2888 }
2889 }
2890 \f
2891 /* If defined, a C expression which determines whether, and in which
2892 direction, to pad out an argument with extra space. The value
2893 should be of type `enum direction': either `upward' to pad above
2894 the argument, `downward' to pad below, or `none' to inhibit
2895 padding.
2896
2897 For the AIX ABI structs are always stored left shifted in their
2898 argument slot. */
2899
2900 enum direction
2901 function_arg_padding (mode, type)
2902 enum machine_mode mode;
2903 tree type;
2904 {
2905 if (type != 0 && AGGREGATE_TYPE_P (type))
2906 return upward;
2907
2908 /* This is the default definition. */
2909 return (! BYTES_BIG_ENDIAN
2910 ? upward
2911 : ((mode == BLKmode
2912 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2913 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2914 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2915 ? downward : upward));
2916 }
2917
2918 /* If defined, a C expression that gives the alignment boundary, in bits,
2919 of an argument with the specified mode and type. If it is not defined,
2920 PARM_BOUNDARY is used for all arguments.
2921
2922 V.4 wants long longs to be double word aligned. */
2923
2924 int
2925 function_arg_boundary (mode, type)
2926 enum machine_mode mode;
2927 tree type ATTRIBUTE_UNUSED;
2928 {
2929 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2930 return 64;
2931 else if (SPE_VECTOR_MODE (mode))
2932 return 64;
2933 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2934 return 128;
2935 else
2936 return PARM_BOUNDARY;
2937 }
2938 \f
2939 /* Update the data in CUM to advance over an argument
2940 of mode MODE and data type TYPE.
2941 (TYPE is null for libcalls where that information may not be available.) */
2942
2943 void
2944 function_arg_advance (cum, mode, type, named)
2945 CUMULATIVE_ARGS *cum;
2946 enum machine_mode mode;
2947 tree type;
2948 int named;
2949 {
2950 cum->nargs_prototype--;
2951
2952 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2953 {
2954 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2955 cum->vregno++;
2956 else
2957 cum->words += RS6000_ARG_SIZE (mode, type);
2958 }
2959 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
2960 && named && cum->sysv_gregno <= GP_ARG_MAX_REG)
2961 cum->sysv_gregno++;
2962 else if (DEFAULT_ABI == ABI_V4)
2963 {
2964 if (TARGET_HARD_FLOAT && TARGET_FPRS
2965 && (mode == SFmode || mode == DFmode))
2966 {
2967 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2968 cum->fregno++;
2969 else
2970 {
2971 if (mode == DFmode)
2972 cum->words += cum->words & 1;
2973 cum->words += RS6000_ARG_SIZE (mode, type);
2974 }
2975 }
2976 else
2977 {
2978 int n_words;
2979 int gregno = cum->sysv_gregno;
2980
2981 /* Aggregates and IEEE quad get passed by reference. */
2982 if ((type && AGGREGATE_TYPE_P (type))
2983 || mode == TFmode)
2984 n_words = 1;
2985 else
2986 n_words = RS6000_ARG_SIZE (mode, type);
2987
2988 /* Long long and SPE vectors are put in odd registers. */
2989 if (n_words == 2 && (gregno & 1) == 0)
2990 gregno += 1;
2991
2992 /* Long long and SPE vectors are not split between registers
2993 and stack. */
2994 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2995 {
2996 /* Long long is aligned on the stack. */
2997 if (n_words == 2)
2998 cum->words += cum->words & 1;
2999 cum->words += n_words;
3000 }
3001
3002 /* Note: continuing to accumulate gregno past when we've started
3003 spilling to the stack indicates the fact that we've started
3004 spilling to the stack to expand_builtin_saveregs. */
3005 cum->sysv_gregno = gregno + n_words;
3006 }
3007
3008 if (TARGET_DEBUG_ARG)
3009 {
3010 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3011 cum->words, cum->fregno);
3012 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3013 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3014 fprintf (stderr, "mode = %4s, named = %d\n",
3015 GET_MODE_NAME (mode), named);
3016 }
3017 }
3018 else
3019 {
3020 int align = (TARGET_32BIT && (cum->words & 1) != 0
3021 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3022
3023 cum->words += align + RS6000_ARG_SIZE (mode, type);
3024
3025 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3026 && TARGET_HARD_FLOAT && TARGET_FPRS)
3027 cum->fregno++;
3028
3029 if (TARGET_DEBUG_ARG)
3030 {
3031 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3032 cum->words, cum->fregno);
3033 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3034 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3035 fprintf (stderr, "named = %d, align = %d\n", named, align);
3036 }
3037 }
3038 }
3039 \f
3040 /* Determine where to put an argument to a function.
3041 Value is zero to push the argument on the stack,
3042 or a hard register in which to store the argument.
3043
3044 MODE is the argument's machine mode.
3045 TYPE is the data type of the argument (as a tree).
3046 This is null for libcalls where that information may
3047 not be available.
3048 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3049 the preceding args and about the function being called.
3050 NAMED is nonzero if this argument is a named parameter
3051 (otherwise it is an extra parameter matching an ellipsis).
3052
3053 On RS/6000 the first eight words of non-FP are normally in registers
3054 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3055 Under V.4, the first 8 FP args are in registers.
3056
3057 If this is floating-point and no prototype is specified, we use
3058 both an FP and integer register (or possibly FP reg and stack). Library
3059 functions (when TYPE is zero) always have the proper types for args,
3060 so we can pass the FP value just in one register. emit_library_function
3061 doesn't support PARALLEL anyway. */
3062
3063 struct rtx_def *
3064 function_arg (cum, mode, type, named)
3065 CUMULATIVE_ARGS *cum;
3066 enum machine_mode mode;
3067 tree type;
3068 int named;
3069 {
3070 enum rs6000_abi abi = DEFAULT_ABI;
3071
3072 /* Return a marker to indicate whether CR1 needs to set or clear the
3073 bit that V.4 uses to say fp args were passed in registers.
3074 Assume that we don't need the marker for software floating point,
3075 or compiler generated library calls. */
3076 if (mode == VOIDmode)
3077 {
3078 if (abi == ABI_V4
3079 && cum->nargs_prototype < 0
3080 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
3081 {
3082 /* For the SPE, we need to crxor CR6 always. */
3083 if (TARGET_SPE_ABI)
3084 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
3085 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
3086 return GEN_INT (cum->call_cookie
3087 | ((cum->fregno == FP_ARG_MIN_REG)
3088 ? CALL_V4_SET_FP_ARGS
3089 : CALL_V4_CLEAR_FP_ARGS));
3090 }
3091
3092 return GEN_INT (cum->call_cookie);
3093 }
3094
3095 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3096 {
3097 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
3098 return gen_rtx_REG (mode, cum->vregno);
3099 else
3100 return NULL;
3101 }
3102 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode) && named)
3103 {
3104 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3105 return gen_rtx_REG (mode, cum->sysv_gregno);
3106 else
3107 return NULL;
3108 }
3109 else if (abi == ABI_V4)
3110 {
3111 if (TARGET_HARD_FLOAT && TARGET_FPRS
3112 && (mode == SFmode || mode == DFmode))
3113 {
3114 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3115 return gen_rtx_REG (mode, cum->fregno);
3116 else
3117 return NULL;
3118 }
3119 else
3120 {
3121 int n_words;
3122 int gregno = cum->sysv_gregno;
3123
3124 /* Aggregates and IEEE quad get passed by reference. */
3125 if ((type && AGGREGATE_TYPE_P (type))
3126 || mode == TFmode)
3127 n_words = 1;
3128 else
3129 n_words = RS6000_ARG_SIZE (mode, type);
3130
3131 /* Long long and SPE vectors are put in odd registers. */
3132 if (n_words == 2 && (gregno & 1) == 0)
3133 gregno += 1;
3134
3135 /* Long long and SPE vectors are not split between registers
3136 and stack. */
3137 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3138 {
3139 /* SPE vectors in ... get split into 2 registers. */
3140 if (TARGET_SPE && TARGET_SPE_ABI
3141 && SPE_VECTOR_MODE (mode) && !named)
3142 {
3143 rtx r1, r2;
3144 enum machine_mode m = GET_MODE_INNER (mode);
3145
3146 r1 = gen_rtx_REG (m, gregno);
3147 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3148 r2 = gen_rtx_REG (m, gregno + 1);
3149 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3150 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3151 }
3152 return gen_rtx_REG (mode, gregno);
3153 }
3154 else
3155 return NULL;
3156 }
3157 }
3158 else
3159 {
3160 int align = (TARGET_32BIT && (cum->words & 1) != 0
3161 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3162 int align_words = cum->words + align;
3163
3164 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3165 return NULL_RTX;
3166
3167 if (USE_FP_FOR_ARG_P (*cum, mode, type))
3168 {
3169 if (! type
3170 || ((cum->nargs_prototype > 0)
3171 /* IBM AIX extended its linkage convention definition always
3172 to require FP args after register save area hole on the
3173 stack. */
3174 && (DEFAULT_ABI != ABI_AIX
3175 || ! TARGET_XL_CALL
3176 || (align_words < GP_ARG_NUM_REG))))
3177 return gen_rtx_REG (mode, cum->fregno);
3178
3179 return gen_rtx_PARALLEL (mode,
3180 gen_rtvec (2,
3181 gen_rtx_EXPR_LIST (VOIDmode,
3182 ((align_words >= GP_ARG_NUM_REG)
3183 ? NULL_RTX
3184 : (align_words
3185 + RS6000_ARG_SIZE (mode, type)
3186 > GP_ARG_NUM_REG
3187 /* If this is partially on the stack, then
3188 we only include the portion actually
3189 in registers here. */
3190 ? gen_rtx_REG (SImode,
3191 GP_ARG_MIN_REG + align_words)
3192 : gen_rtx_REG (mode,
3193 GP_ARG_MIN_REG + align_words))),
3194 const0_rtx),
3195 gen_rtx_EXPR_LIST (VOIDmode,
3196 gen_rtx_REG (mode, cum->fregno),
3197 const0_rtx)));
3198 }
3199 else if (align_words < GP_ARG_NUM_REG)
3200 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
3201 else
3202 return NULL_RTX;
3203 }
3204 }
3205 \f
3206 /* For an arg passed partly in registers and partly in memory,
3207 this is the number of registers used.
3208 For args passed entirely in registers or entirely in memory, zero. */
3209
3210 int
3211 function_arg_partial_nregs (cum, mode, type, named)
3212 CUMULATIVE_ARGS *cum;
3213 enum machine_mode mode;
3214 tree type;
3215 int named ATTRIBUTE_UNUSED;
3216 {
3217 if (DEFAULT_ABI == ABI_V4)
3218 return 0;
3219
3220 if (USE_FP_FOR_ARG_P (*cum, mode, type)
3221 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
3222 {
3223 if (cum->nargs_prototype >= 0)
3224 return 0;
3225 }
3226
3227 if (cum->words < GP_ARG_NUM_REG
3228 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
3229 {
3230 int ret = GP_ARG_NUM_REG - cum->words;
3231 if (ret && TARGET_DEBUG_ARG)
3232 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
3233
3234 return ret;
3235 }
3236
3237 return 0;
3238 }
3239 \f
3240 /* A C expression that indicates when an argument must be passed by
3241 reference. If nonzero for an argument, a copy of that argument is
3242 made in memory and a pointer to the argument is passed instead of
3243 the argument itself. The pointer is passed in whatever way is
3244 appropriate for passing a pointer to that type.
3245
3246 Under V.4, structures and unions are passed by reference. */
3247
3248 int
3249 function_arg_pass_by_reference (cum, mode, type, named)
3250 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
3251 enum machine_mode mode ATTRIBUTE_UNUSED;
3252 tree type;
3253 int named ATTRIBUTE_UNUSED;
3254 {
3255 if (DEFAULT_ABI == ABI_V4
3256 && ((type && AGGREGATE_TYPE_P (type))
3257 || mode == TFmode))
3258 {
3259 if (TARGET_DEBUG_ARG)
3260 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
3261
3262 return 1;
3263 }
3264
3265 return 0;
3266 }
3267 \f
3268 /* Perform any needed actions needed for a function that is receiving a
3269 variable number of arguments.
3270
3271 CUM is as above.
3272
3273 MODE and TYPE are the mode and type of the current parameter.
3274
3275 PRETEND_SIZE is a variable that should be set to the amount of stack
3276 that must be pushed by the prolog to pretend that our caller pushed
3277 it.
3278
3279 Normally, this macro will push all remaining incoming registers on the
3280 stack and set PRETEND_SIZE to the length of the registers pushed. */
3281
3282 void
3283 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
3284 CUMULATIVE_ARGS *cum;
3285 enum machine_mode mode;
3286 tree type;
3287 int *pretend_size;
3288 int no_rtl;
3289
3290 {
3291 CUMULATIVE_ARGS next_cum;
3292 int reg_size = TARGET_32BIT ? 4 : 8;
3293 rtx save_area = NULL_RTX, mem;
3294 int first_reg_offset, set;
3295 tree fntype;
3296 int stdarg_p;
3297
3298 fntype = TREE_TYPE (current_function_decl);
3299 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
3300 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3301 != void_type_node));
3302
3303 /* For varargs, we do not want to skip the dummy va_dcl argument.
3304 For stdargs, we do want to skip the last named argument. */
3305 next_cum = *cum;
3306 if (stdarg_p)
3307 function_arg_advance (&next_cum, mode, type, 1);
3308
3309 if (DEFAULT_ABI == ABI_V4)
3310 {
3311 /* Indicate to allocate space on the stack for varargs save area. */
3312 cfun->machine->sysv_varargs_p = 1;
3313 if (! no_rtl)
3314 save_area = plus_constant (virtual_stack_vars_rtx,
3315 - RS6000_VARARGS_SIZE);
3316
3317 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
3318 }
3319 else
3320 {
3321 first_reg_offset = next_cum.words;
3322 save_area = virtual_incoming_args_rtx;
3323 cfun->machine->sysv_varargs_p = 0;
3324
3325 if (MUST_PASS_IN_STACK (mode, type))
3326 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
3327 }
3328
3329 set = get_varargs_alias_set ();
3330 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
3331 {
3332 mem = gen_rtx_MEM (BLKmode,
3333 plus_constant (save_area,
3334 first_reg_offset * reg_size)),
3335 set_mem_alias_set (mem, set);
3336 set_mem_align (mem, BITS_PER_WORD);
3337
3338 move_block_from_reg
3339 (GP_ARG_MIN_REG + first_reg_offset, mem,
3340 GP_ARG_NUM_REG - first_reg_offset,
3341 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
3342
3343 /* ??? Does ABI_V4 need this at all? */
3344 *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
3345 }
3346
3347 /* Save FP registers if needed. */
3348 if (DEFAULT_ABI == ABI_V4
3349 && TARGET_HARD_FLOAT && TARGET_FPRS
3350 && ! no_rtl
3351 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
3352 {
3353 int fregno = next_cum.fregno;
3354 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
3355 rtx lab = gen_label_rtx ();
3356 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
3357
3358 emit_jump_insn (gen_rtx_SET (VOIDmode,
3359 pc_rtx,
3360 gen_rtx_IF_THEN_ELSE (VOIDmode,
3361 gen_rtx_NE (VOIDmode, cr1,
3362 const0_rtx),
3363 gen_rtx_LABEL_REF (VOIDmode, lab),
3364 pc_rtx)));
3365
3366 while (fregno <= FP_ARG_V4_MAX_REG)
3367 {
3368 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
3369 set_mem_alias_set (mem, set);
3370 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
3371 fregno++;
3372 off += 8;
3373 }
3374
3375 emit_label (lab);
3376 }
3377 }
3378
3379 /* Create the va_list data type. */
3380
3381 tree
3382 rs6000_build_va_list ()
3383 {
3384 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
3385
3386 /* For AIX, prefer 'char *' because that's what the system
3387 header files like. */
3388 if (DEFAULT_ABI != ABI_V4)
3389 return build_pointer_type (char_type_node);
3390
3391 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3392 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3393
3394 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3395 unsigned_char_type_node);
3396 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3397 unsigned_char_type_node);
3398 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3399 ptr_type_node);
3400 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3401 ptr_type_node);
3402
3403 DECL_FIELD_CONTEXT (f_gpr) = record;
3404 DECL_FIELD_CONTEXT (f_fpr) = record;
3405 DECL_FIELD_CONTEXT (f_ovf) = record;
3406 DECL_FIELD_CONTEXT (f_sav) = record;
3407
3408 TREE_CHAIN (record) = type_decl;
3409 TYPE_NAME (record) = type_decl;
3410 TYPE_FIELDS (record) = f_gpr;
3411 TREE_CHAIN (f_gpr) = f_fpr;
3412 TREE_CHAIN (f_fpr) = f_ovf;
3413 TREE_CHAIN (f_ovf) = f_sav;
3414
3415 layout_type (record);
3416
3417 /* The correct type is an array type of one element. */
3418 return build_array_type (record, build_index_type (size_zero_node));
3419 }
3420
3421 /* Implement va_start. */
3422
3423 void
3424 rs6000_va_start (valist, nextarg)
3425 tree valist;
3426 rtx nextarg;
3427 {
3428 HOST_WIDE_INT words, n_gpr, n_fpr;
3429 tree f_gpr, f_fpr, f_ovf, f_sav;
3430 tree gpr, fpr, ovf, sav, t;
3431
3432 /* Only SVR4 needs something special. */
3433 if (DEFAULT_ABI != ABI_V4)
3434 {
3435 std_expand_builtin_va_start (valist, nextarg);
3436 return;
3437 }
3438
3439 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3440 f_fpr = TREE_CHAIN (f_gpr);
3441 f_ovf = TREE_CHAIN (f_fpr);
3442 f_sav = TREE_CHAIN (f_ovf);
3443
3444 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3445 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3446 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3447 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3448 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3449
3450 /* Count number of gp and fp argument registers used. */
3451 words = current_function_args_info.words;
3452 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3453 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3454
3455 if (TARGET_DEBUG_ARG)
3456 {
3457 fputs ("va_start: words = ", stderr);
3458 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3459 fputs (", n_gpr = ", stderr);
3460 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3461 fputs (", n_fpr = ", stderr);
3462 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3463 putc ('\n', stderr);
3464 }
3465
3466 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3467 TREE_SIDE_EFFECTS (t) = 1;
3468 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3469
3470 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3471 TREE_SIDE_EFFECTS (t) = 1;
3472 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3473
3474 /* Find the overflow area. */
3475 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3476 if (words != 0)
3477 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3478 build_int_2 (words * UNITS_PER_WORD, 0));
3479 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3480 TREE_SIDE_EFFECTS (t) = 1;
3481 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3482
3483 /* Find the register save area. */
3484 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3485 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3486 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3487 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3488 TREE_SIDE_EFFECTS (t) = 1;
3489 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3490 }
3491
3492 /* Implement va_arg. */
3493
3494 rtx
3495 rs6000_va_arg (valist, type)
3496 tree valist, type;
3497 {
3498 tree f_gpr, f_fpr, f_ovf, f_sav;
3499 tree gpr, fpr, ovf, sav, reg, t, u;
3500 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3501 rtx lab_false, lab_over, addr_rtx, r;
3502
3503 if (DEFAULT_ABI != ABI_V4)
3504 return std_expand_builtin_va_arg (valist, type);
3505
3506 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3507 f_fpr = TREE_CHAIN (f_gpr);
3508 f_ovf = TREE_CHAIN (f_fpr);
3509 f_sav = TREE_CHAIN (f_ovf);
3510
3511 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3512 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3513 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3514 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3515 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3516
3517 size = int_size_in_bytes (type);
3518 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3519
3520 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3521 {
3522 /* Aggregates and long doubles are passed by reference. */
3523 indirect_p = 1;
3524 reg = gpr;
3525 n_reg = 1;
3526 sav_ofs = 0;
3527 sav_scale = 4;
3528 size = UNITS_PER_WORD;
3529 rsize = 1;
3530 }
3531 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
3532 {
3533 /* FP args go in FP registers, if present. */
3534 indirect_p = 0;
3535 reg = fpr;
3536 n_reg = 1;
3537 sav_ofs = 8*4;
3538 sav_scale = 8;
3539 }
3540 else
3541 {
3542 /* Otherwise into GP registers. */
3543 indirect_p = 0;
3544 reg = gpr;
3545 n_reg = rsize;
3546 sav_ofs = 0;
3547 sav_scale = 4;
3548 }
3549
3550 /* Pull the value out of the saved registers ... */
3551
3552 lab_false = gen_label_rtx ();
3553 lab_over = gen_label_rtx ();
3554 addr_rtx = gen_reg_rtx (Pmode);
3555
3556 /* AltiVec vectors never go in registers. */
3557 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
3558 {
3559 TREE_THIS_VOLATILE (reg) = 1;
3560 emit_cmp_and_jump_insns
3561 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3562 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3563 lab_false);
3564
3565 /* Long long is aligned in the registers. */
3566 if (n_reg > 1)
3567 {
3568 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3569 build_int_2 (n_reg - 1, 0));
3570 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3571 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3572 TREE_SIDE_EFFECTS (u) = 1;
3573 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3574 }
3575
3576 if (sav_ofs)
3577 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3578 else
3579 t = sav;
3580
3581 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3582 build_int_2 (n_reg, 0));
3583 TREE_SIDE_EFFECTS (u) = 1;
3584
3585 u = build1 (CONVERT_EXPR, integer_type_node, u);
3586 TREE_SIDE_EFFECTS (u) = 1;
3587
3588 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3589 TREE_SIDE_EFFECTS (u) = 1;
3590
3591 t = build (PLUS_EXPR, ptr_type_node, t, u);
3592 TREE_SIDE_EFFECTS (t) = 1;
3593
3594 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3595 if (r != addr_rtx)
3596 emit_move_insn (addr_rtx, r);
3597
3598 emit_jump_insn (gen_jump (lab_over));
3599 emit_barrier ();
3600 }
3601
3602 emit_label (lab_false);
3603
3604 /* ... otherwise out of the overflow area. */
3605
3606 /* Make sure we don't find reg 7 for the next int arg.
3607
3608 All AltiVec vectors go in the overflow area. So in the AltiVec
3609 case we need to get the vectors from the overflow area, but
3610 remember where the GPRs and FPRs are. */
3611 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
3612 || !TARGET_ALTIVEC))
3613 {
3614 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3615 TREE_SIDE_EFFECTS (t) = 1;
3616 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3617 }
3618
3619 /* Care for on-stack alignment if needed. */
3620 if (rsize <= 1)
3621 t = ovf;
3622 else
3623 {
3624 int align;
3625
3626 /* AltiVec vectors are 16 byte aligned. */
3627 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
3628 align = 15;
3629 else
3630 align = 7;
3631
3632 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3633 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3634 }
3635 t = save_expr (t);
3636
3637 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3638 if (r != addr_rtx)
3639 emit_move_insn (addr_rtx, r);
3640
3641 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3642 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3643 TREE_SIDE_EFFECTS (t) = 1;
3644 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3645
3646 emit_label (lab_over);
3647
3648 if (indirect_p)
3649 {
3650 r = gen_rtx_MEM (Pmode, addr_rtx);
3651 set_mem_alias_set (r, get_varargs_alias_set ());
3652 emit_move_insn (addr_rtx, r);
3653 }
3654
3655 return addr_rtx;
3656 }
3657
3658 /* Builtins. */
3659
3660 #define def_builtin(MASK, NAME, TYPE, CODE) \
3661 do { \
3662 if ((MASK) & target_flags) \
3663 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3664 NULL, NULL_TREE); \
3665 } while (0)
3666
3667 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3668
3669 static const struct builtin_description bdesc_3arg[] =
3670 {
3671 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3672 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3673 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3674 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3675 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3676 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3677 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3678 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3679 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3680 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3681 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3682 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3683 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3684 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3685 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3686 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3687 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3688 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3689 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3690 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3691 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3692 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3693 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3694 };
3695
3696 /* DST operations: void foo (void *, const int, const char). */
3697
3698 static const struct builtin_description bdesc_dst[] =
3699 {
3700 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3701 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3702 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3703 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3704 };
3705
3706 /* Simple binary operations: VECc = foo (VECa, VECb). */
3707
3708 static struct builtin_description bdesc_2arg[] =
3709 {
3710 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3711 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3712 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3713 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3714 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3715 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3716 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3717 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3718 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3719 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3720 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3721 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3722 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3723 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3724 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3725 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3726 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3727 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3728 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3729 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3730 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3731 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3732 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3733 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3734 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3735 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3736 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3737 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3738 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3739 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3740 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3741 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3742 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3743 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3744 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3745 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3746 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3747 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3748 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3749 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3750 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3751 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3752 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3753 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3754 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3755 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3756 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3757 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3758 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3759 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3760 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3761 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3762 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3763 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3764 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3765 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3766 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3767 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3768 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3769 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3770 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3771 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3772 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3773 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3774 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3775 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3776 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3777 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3778 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3779 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3780 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3781 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3782 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3783 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3784 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3785 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3786 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3787 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3788 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3789 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3790 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3791 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3792 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3793 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3794 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3795 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3796 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3797 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3798 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3799 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3800 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3801 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3802 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3803 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3804 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3805 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3806 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3807 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3808 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3809 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3810 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3811 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3812 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3813 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3814 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3815 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3816 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3817 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3818 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3819 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3820 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3821 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3822 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3823
3824 /* Place holder, leave as first spe builtin. */
3825 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
3826 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
3827 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
3828 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
3829 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
3830 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
3831 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
3832 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
3833 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
3834 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
3835 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
3836 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
3837 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
3838 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
3839 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
3840 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
3841 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
3842 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
3843 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
3844 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
3845 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
3846 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
3847 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
3848 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
3849 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
3850 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
3851 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
3852 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
3853 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
3854 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
3855 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
3856 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
3857 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
3858 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
3859 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
3860 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
3861 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
3862 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
3863 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
3864 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
3865 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
3866 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
3867 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
3868 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
3869 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
3870 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
3871 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
3872 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
3873 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
3874 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
3875 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
3876 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
3877 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
3878 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
3879 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
3880 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
3881 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
3882 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
3883 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
3884 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
3885 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
3886 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
3887 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
3888 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
3889 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
3890 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
3891 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
3892 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
3893 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
3894 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
3895 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
3896 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
3897 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
3898 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
3899 { 0, CODE_FOR_spe_evmwlsmf, "__builtin_spe_evmwlsmf", SPE_BUILTIN_EVMWLSMF },
3900 { 0, CODE_FOR_spe_evmwlsmfa, "__builtin_spe_evmwlsmfa", SPE_BUILTIN_EVMWLSMFA },
3901 { 0, CODE_FOR_spe_evmwlsmfaaw, "__builtin_spe_evmwlsmfaaw", SPE_BUILTIN_EVMWLSMFAAW },
3902 { 0, CODE_FOR_spe_evmwlsmfanw, "__builtin_spe_evmwlsmfanw", SPE_BUILTIN_EVMWLSMFANW },
3903 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
3904 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
3905 { 0, CODE_FOR_spe_evmwlssf, "__builtin_spe_evmwlssf", SPE_BUILTIN_EVMWLSSF },
3906 { 0, CODE_FOR_spe_evmwlssfa, "__builtin_spe_evmwlssfa", SPE_BUILTIN_EVMWLSSFA },
3907 { 0, CODE_FOR_spe_evmwlssfaaw, "__builtin_spe_evmwlssfaaw", SPE_BUILTIN_EVMWLSSFAAW },
3908 { 0, CODE_FOR_spe_evmwlssfanw, "__builtin_spe_evmwlssfanw", SPE_BUILTIN_EVMWLSSFANW },
3909 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
3910 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
3911 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
3912 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
3913 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
3914 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
3915 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
3916 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
3917 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
3918 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
3919 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
3920 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
3921 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
3922 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
3923 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
3924 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
3925 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
3926 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
3927 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
3928 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
3929 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
3930 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
3931 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
3932 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
3933 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
3934 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
3935 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
3936 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
3937 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
3938 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
3939 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
3940 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
3941 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
3942
3943 /* SPE binary operations expecting a 5-bit unsigned literal. */
3944 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
3945
3946 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
3947 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
3948 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
3949 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
3950 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
3951 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
3952 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
3953 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
3954 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
3955 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
3956 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
3957 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
3958 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
3959 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
3960 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
3961 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
3962 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
3963 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
3964 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
3965 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
3966 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
3967 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
3968 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
3969 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
3970 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
3971 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
3972
3973 /* Place-holder. Leave as last binary SPE builtin. */
3974 { 0, CODE_FOR_spe_evxor, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
3975 };
3976
3977 /* AltiVec predicates. */
3978
3979 struct builtin_description_predicates
3980 {
3981 const unsigned int mask;
3982 const enum insn_code icode;
3983 const char *opcode;
3984 const char *const name;
3985 const enum rs6000_builtins code;
3986 };
3987
3988 static const struct builtin_description_predicates bdesc_altivec_preds[] =
3989 {
3990 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
3991 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
3992 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
3993 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
3994 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
3995 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
3996 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
3997 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
3998 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
3999 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
4000 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
4001 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
4002 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
4003 };
4004
4005 /* SPE predicates. */
4006 static struct builtin_description bdesc_spe_predicates[] =
4007 {
4008 /* Place-holder. Leave as first. */
4009 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
4010 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
4011 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
4012 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
4013 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
4014 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
4015 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
4016 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
4017 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
4018 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
4019 /* Place-holder. Leave as last. */
4020 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
4021 };
4022
4023 /* SPE evsel predicates. */
4024 static struct builtin_description bdesc_spe_evsel[] =
4025 {
4026 /* Place-holder. Leave as first. */
4027 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
4028 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
4029 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
4030 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
4031 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
4032 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
4033 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
4034 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
4035 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
4036 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
4037 /* Place-holder. Leave as last. */
4038 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4039 };
4040
4041 /* ABS* opreations. */
4042
4043 static const struct builtin_description bdesc_abs[] =
4044 {
4045 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4046 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4047 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4048 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4049 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4050 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4051 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4052 };
4053
4054 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4055 foo (VECa). */
4056
4057 static struct builtin_description bdesc_1arg[] =
4058 {
4059 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4060 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4061 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4062 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4063 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4064 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4065 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4066 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4067 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4068 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4069 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4070 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4071 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4072 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4073 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4074 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4075 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4076
4077 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4078 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4079 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4080 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4081 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4082 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4083 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4084 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4085 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4086 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4087 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4088 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4089 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4090 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4091 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4092 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4093 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
4094 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
4095 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
4096 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
4097 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
4098 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
4099 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
4100 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
4101 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
4102 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
4103 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
4104 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
4105 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
4106 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
4107 { 0, CODE_FOR_spe_evsplatfi, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI },
4108 { 0, CODE_FOR_spe_evsplati, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI },
4109
4110 /* Place-holder. Leave as last unary SPE builtin. */
4111 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
4112 };
4113
4114 static rtx
4115 rs6000_expand_unop_builtin (icode, arglist, target)
4116 enum insn_code icode;
4117 tree arglist;
4118 rtx target;
4119 {
4120 rtx pat;
4121 tree arg0 = TREE_VALUE (arglist);
4122 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4123 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4124 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4125
4126 if (icode == CODE_FOR_nothing)
4127 /* Builtin not supported on this processor. */
4128 return 0;
4129
4130 /* If we got invalid arguments bail out before generating bad rtl. */
4131 if (arg0 == error_mark_node)
4132 return const0_rtx;
4133
4134 if (icode == CODE_FOR_altivec_vspltisb
4135 || icode == CODE_FOR_altivec_vspltish
4136 || icode == CODE_FOR_altivec_vspltisw
4137 || icode == CODE_FOR_spe_evsplatfi
4138 || icode == CODE_FOR_spe_evsplati)
4139 {
4140 /* Only allow 5-bit *signed* literals. */
4141 if (GET_CODE (op0) != CONST_INT
4142 || INTVAL (op0) > 0x1f
4143 || INTVAL (op0) < -0x1f)
4144 {
4145 error ("argument 1 must be a 5-bit signed literal");
4146 return const0_rtx;
4147 }
4148 }
4149
4150 if (target == 0
4151 || GET_MODE (target) != tmode
4152 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4153 target = gen_reg_rtx (tmode);
4154
4155 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4156 op0 = copy_to_mode_reg (mode0, op0);
4157
4158 pat = GEN_FCN (icode) (target, op0);
4159 if (! pat)
4160 return 0;
4161 emit_insn (pat);
4162
4163 return target;
4164 }
4165
4166 static rtx
4167 altivec_expand_abs_builtin (icode, arglist, target)
4168 enum insn_code icode;
4169 tree arglist;
4170 rtx target;
4171 {
4172 rtx pat, scratch1, scratch2;
4173 tree arg0 = TREE_VALUE (arglist);
4174 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4175 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4176 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4177
4178 /* If we have invalid arguments, bail out before generating bad rtl. */
4179 if (arg0 == error_mark_node)
4180 return const0_rtx;
4181
4182 if (target == 0
4183 || GET_MODE (target) != tmode
4184 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4185 target = gen_reg_rtx (tmode);
4186
4187 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4188 op0 = copy_to_mode_reg (mode0, op0);
4189
4190 scratch1 = gen_reg_rtx (mode0);
4191 scratch2 = gen_reg_rtx (mode0);
4192
4193 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
4194 if (! pat)
4195 return 0;
4196 emit_insn (pat);
4197
4198 return target;
4199 }
4200
4201 static rtx
4202 rs6000_expand_binop_builtin (icode, arglist, target)
4203 enum insn_code icode;
4204 tree arglist;
4205 rtx target;
4206 {
4207 rtx pat;
4208 tree arg0 = TREE_VALUE (arglist);
4209 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4210 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4211 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4212 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4213 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4214 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4215
4216 if (icode == CODE_FOR_nothing)
4217 /* Builtin not supported on this processor. */
4218 return 0;
4219
4220 /* If we got invalid arguments bail out before generating bad rtl. */
4221 if (arg0 == error_mark_node || arg1 == error_mark_node)
4222 return const0_rtx;
4223
4224 if (icode == CODE_FOR_altivec_vcfux
4225 || icode == CODE_FOR_altivec_vcfsx
4226 || icode == CODE_FOR_altivec_vctsxs
4227 || icode == CODE_FOR_altivec_vctuxs
4228 || icode == CODE_FOR_altivec_vspltb
4229 || icode == CODE_FOR_altivec_vsplth
4230 || icode == CODE_FOR_altivec_vspltw
4231 || icode == CODE_FOR_spe_evaddiw
4232 || icode == CODE_FOR_spe_evldd
4233 || icode == CODE_FOR_spe_evldh
4234 || icode == CODE_FOR_spe_evldw
4235 || icode == CODE_FOR_spe_evlhhesplat
4236 || icode == CODE_FOR_spe_evlhhossplat
4237 || icode == CODE_FOR_spe_evlhhousplat
4238 || icode == CODE_FOR_spe_evlwhe
4239 || icode == CODE_FOR_spe_evlwhos
4240 || icode == CODE_FOR_spe_evlwhou
4241 || icode == CODE_FOR_spe_evlwhsplat
4242 || icode == CODE_FOR_spe_evlwwsplat
4243 || icode == CODE_FOR_spe_evrlwi
4244 || icode == CODE_FOR_spe_evslwi
4245 || icode == CODE_FOR_spe_evsrwis
4246 || icode == CODE_FOR_spe_evsrwiu)
4247 {
4248 /* Only allow 5-bit unsigned literals. */
4249 if (TREE_CODE (arg1) != INTEGER_CST
4250 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4251 {
4252 error ("argument 2 must be a 5-bit unsigned literal");
4253 return const0_rtx;
4254 }
4255 }
4256
4257 if (target == 0
4258 || GET_MODE (target) != tmode
4259 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4260 target = gen_reg_rtx (tmode);
4261
4262 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4263 op0 = copy_to_mode_reg (mode0, op0);
4264 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4265 op1 = copy_to_mode_reg (mode1, op1);
4266
4267 pat = GEN_FCN (icode) (target, op0, op1);
4268 if (! pat)
4269 return 0;
4270 emit_insn (pat);
4271
4272 return target;
4273 }
4274
4275 static rtx
4276 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
4277 enum insn_code icode;
4278 const char *opcode;
4279 tree arglist;
4280 rtx target;
4281 {
4282 rtx pat, scratch;
4283 tree cr6_form = TREE_VALUE (arglist);
4284 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
4285 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4286 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4287 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4288 enum machine_mode tmode = SImode;
4289 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4290 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4291 int cr6_form_int;
4292
4293 if (TREE_CODE (cr6_form) != INTEGER_CST)
4294 {
4295 error ("argument 1 of __builtin_altivec_predicate must be a constant");
4296 return const0_rtx;
4297 }
4298 else
4299 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
4300
4301 if (mode0 != mode1)
4302 abort ();
4303
4304 /* If we have invalid arguments, bail out before generating bad rtl. */
4305 if (arg0 == error_mark_node || arg1 == error_mark_node)
4306 return const0_rtx;
4307
4308 if (target == 0
4309 || GET_MODE (target) != tmode
4310 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4311 target = gen_reg_rtx (tmode);
4312
4313 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4314 op0 = copy_to_mode_reg (mode0, op0);
4315 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4316 op1 = copy_to_mode_reg (mode1, op1);
4317
4318 scratch = gen_reg_rtx (mode0);
4319
4320 pat = GEN_FCN (icode) (scratch, op0, op1,
4321 gen_rtx (SYMBOL_REF, Pmode, opcode));
4322 if (! pat)
4323 return 0;
4324 emit_insn (pat);
4325
4326 /* The vec_any* and vec_all* predicates use the same opcodes for two
4327 different operations, but the bits in CR6 will be different
4328 depending on what information we want. So we have to play tricks
4329 with CR6 to get the right bits out.
4330
4331 If you think this is disgusting, look at the specs for the
4332 AltiVec predicates. */
4333
4334 switch (cr6_form_int)
4335 {
4336 case 0:
4337 emit_insn (gen_cr6_test_for_zero (target));
4338 break;
4339 case 1:
4340 emit_insn (gen_cr6_test_for_zero_reverse (target));
4341 break;
4342 case 2:
4343 emit_insn (gen_cr6_test_for_lt (target));
4344 break;
4345 case 3:
4346 emit_insn (gen_cr6_test_for_lt_reverse (target));
4347 break;
4348 default:
4349 error ("argument 1 of __builtin_altivec_predicate is out of range");
4350 break;
4351 }
4352
4353 return target;
4354 }
4355
4356 static rtx
4357 altivec_expand_stv_builtin (icode, arglist)
4358 enum insn_code icode;
4359 tree arglist;
4360 {
4361 tree arg0 = TREE_VALUE (arglist);
4362 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4363 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4364 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4365 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4366 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4367 rtx pat;
4368 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
4369 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
4370 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
4371
4372 /* Invalid arguments. Bail before doing anything stoopid! */
4373 if (arg0 == error_mark_node
4374 || arg1 == error_mark_node
4375 || arg2 == error_mark_node)
4376 return const0_rtx;
4377
4378 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
4379 op0 = copy_to_mode_reg (mode2, op0);
4380 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
4381 op1 = copy_to_mode_reg (mode0, op1);
4382 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
4383 op2 = copy_to_mode_reg (mode1, op2);
4384
4385 pat = GEN_FCN (icode) (op1, op2, op0);
4386 if (pat)
4387 emit_insn (pat);
4388 return NULL_RTX;
4389 }
4390
4391 static rtx
4392 rs6000_expand_ternop_builtin (icode, arglist, target)
4393 enum insn_code icode;
4394 tree arglist;
4395 rtx target;
4396 {
4397 rtx pat;
4398 tree arg0 = TREE_VALUE (arglist);
4399 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4400 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4401 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4402 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4403 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4404 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4405 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4406 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4407 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
4408
4409 if (icode == CODE_FOR_nothing)
4410 /* Builtin not supported on this processor. */
4411 return 0;
4412
4413 /* If we got invalid arguments bail out before generating bad rtl. */
4414 if (arg0 == error_mark_node
4415 || arg1 == error_mark_node
4416 || arg2 == error_mark_node)
4417 return const0_rtx;
4418
4419 if (icode == CODE_FOR_altivec_vsldoi_4sf
4420 || icode == CODE_FOR_altivec_vsldoi_4si
4421 || icode == CODE_FOR_altivec_vsldoi_8hi
4422 || icode == CODE_FOR_altivec_vsldoi_16qi)
4423 {
4424 /* Only allow 4-bit unsigned literals. */
4425 if (TREE_CODE (arg2) != INTEGER_CST
4426 || TREE_INT_CST_LOW (arg2) & ~0xf)
4427 {
4428 error ("argument 3 must be a 4-bit unsigned literal");
4429 return const0_rtx;
4430 }
4431 }
4432
4433 if (target == 0
4434 || GET_MODE (target) != tmode
4435 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4436 target = gen_reg_rtx (tmode);
4437
4438 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4439 op0 = copy_to_mode_reg (mode0, op0);
4440 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4441 op1 = copy_to_mode_reg (mode1, op1);
4442 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
4443 op2 = copy_to_mode_reg (mode2, op2);
4444
4445 pat = GEN_FCN (icode) (target, op0, op1, op2);
4446 if (! pat)
4447 return 0;
4448 emit_insn (pat);
4449
4450 return target;
4451 }
4452
4453 /* Expand the lvx builtins. */
4454 static rtx
4455 altivec_expand_ld_builtin (exp, target, expandedp)
4456 tree exp;
4457 rtx target;
4458 bool *expandedp;
4459 {
4460 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4461 tree arglist = TREE_OPERAND (exp, 1);
4462 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4463 tree arg0;
4464 enum machine_mode tmode, mode0;
4465 rtx pat, op0;
4466 enum insn_code icode;
4467
4468 switch (fcode)
4469 {
4470 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
4471 icode = CODE_FOR_altivec_lvx_16qi;
4472 break;
4473 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
4474 icode = CODE_FOR_altivec_lvx_8hi;
4475 break;
4476 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
4477 icode = CODE_FOR_altivec_lvx_4si;
4478 break;
4479 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
4480 icode = CODE_FOR_altivec_lvx_4sf;
4481 break;
4482 default:
4483 *expandedp = false;
4484 return NULL_RTX;
4485 }
4486
4487 *expandedp = true;
4488
4489 arg0 = TREE_VALUE (arglist);
4490 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4491 tmode = insn_data[icode].operand[0].mode;
4492 mode0 = insn_data[icode].operand[1].mode;
4493
4494 if (target == 0
4495 || GET_MODE (target) != tmode
4496 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4497 target = gen_reg_rtx (tmode);
4498
4499 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4500 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4501
4502 pat = GEN_FCN (icode) (target, op0);
4503 if (! pat)
4504 return 0;
4505 emit_insn (pat);
4506 return target;
4507 }
4508
4509 /* Expand the stvx builtins. */
4510 static rtx
4511 altivec_expand_st_builtin (exp, target, expandedp)
4512 tree exp;
4513 rtx target ATTRIBUTE_UNUSED;
4514 bool *expandedp;
4515 {
4516 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4517 tree arglist = TREE_OPERAND (exp, 1);
4518 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4519 tree arg0, arg1;
4520 enum machine_mode mode0, mode1;
4521 rtx pat, op0, op1;
4522 enum insn_code icode;
4523
4524 switch (fcode)
4525 {
4526 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
4527 icode = CODE_FOR_altivec_stvx_16qi;
4528 break;
4529 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
4530 icode = CODE_FOR_altivec_stvx_8hi;
4531 break;
4532 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
4533 icode = CODE_FOR_altivec_stvx_4si;
4534 break;
4535 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
4536 icode = CODE_FOR_altivec_stvx_4sf;
4537 break;
4538 default:
4539 *expandedp = false;
4540 return NULL_RTX;
4541 }
4542
4543 arg0 = TREE_VALUE (arglist);
4544 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4545 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4546 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4547 mode0 = insn_data[icode].operand[0].mode;
4548 mode1 = insn_data[icode].operand[1].mode;
4549
4550 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4551 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4552 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
4553 op1 = copy_to_mode_reg (mode1, op1);
4554
4555 pat = GEN_FCN (icode) (op0, op1);
4556 if (pat)
4557 emit_insn (pat);
4558
4559 *expandedp = true;
4560 return NULL_RTX;
4561 }
4562
4563 /* Expand the dst builtins. */
4564 static rtx
4565 altivec_expand_dst_builtin (exp, target, expandedp)
4566 tree exp;
4567 rtx target ATTRIBUTE_UNUSED;
4568 bool *expandedp;
4569 {
4570 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4571 tree arglist = TREE_OPERAND (exp, 1);
4572 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4573 tree arg0, arg1, arg2;
4574 enum machine_mode mode0, mode1, mode2;
4575 rtx pat, op0, op1, op2;
4576 struct builtin_description *d;
4577 size_t i;
4578
4579 *expandedp = false;
4580
4581 /* Handle DST variants. */
4582 d = (struct builtin_description *) bdesc_dst;
4583 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4584 if (d->code == fcode)
4585 {
4586 arg0 = TREE_VALUE (arglist);
4587 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4588 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4589 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4590 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4591 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4592 mode0 = insn_data[d->icode].operand[0].mode;
4593 mode1 = insn_data[d->icode].operand[1].mode;
4594 mode2 = insn_data[d->icode].operand[2].mode;
4595
4596 /* Invalid arguments, bail out before generating bad rtl. */
4597 if (arg0 == error_mark_node
4598 || arg1 == error_mark_node
4599 || arg2 == error_mark_node)
4600 return const0_rtx;
4601
4602 if (TREE_CODE (arg2) != INTEGER_CST
4603 || TREE_INT_CST_LOW (arg2) & ~0x3)
4604 {
4605 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
4606 return const0_rtx;
4607 }
4608
4609 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4610 op0 = copy_to_mode_reg (mode0, op0);
4611 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4612 op1 = copy_to_mode_reg (mode1, op1);
4613
4614 pat = GEN_FCN (d->icode) (op0, op1, op2);
4615 if (pat != 0)
4616 emit_insn (pat);
4617
4618 *expandedp = true;
4619 return NULL_RTX;
4620 }
4621
4622 return NULL_RTX;
4623 }
4624
4625 /* Expand the builtin in EXP and store the result in TARGET. Store
4626 true in *EXPANDEDP if we found a builtin to expand. */
4627 static rtx
4628 altivec_expand_builtin (exp, target, expandedp)
4629 tree exp;
4630 rtx target;
4631 bool *expandedp;
4632 {
4633 struct builtin_description *d;
4634 struct builtin_description_predicates *dp;
4635 size_t i;
4636 enum insn_code icode;
4637 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4638 tree arglist = TREE_OPERAND (exp, 1);
4639 tree arg0;
4640 rtx op0, pat;
4641 enum machine_mode tmode, mode0;
4642 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4643
4644 target = altivec_expand_ld_builtin (exp, target, expandedp);
4645 if (*expandedp)
4646 return target;
4647
4648 target = altivec_expand_st_builtin (exp, target, expandedp);
4649 if (*expandedp)
4650 return target;
4651
4652 target = altivec_expand_dst_builtin (exp, target, expandedp);
4653 if (*expandedp)
4654 return target;
4655
4656 *expandedp = true;
4657
4658 switch (fcode)
4659 {
4660 case ALTIVEC_BUILTIN_STVX:
4661 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
4662 case ALTIVEC_BUILTIN_STVEBX:
4663 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
4664 case ALTIVEC_BUILTIN_STVEHX:
4665 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
4666 case ALTIVEC_BUILTIN_STVEWX:
4667 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
4668 case ALTIVEC_BUILTIN_STVXL:
4669 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
4670
4671 case ALTIVEC_BUILTIN_MFVSCR:
4672 icode = CODE_FOR_altivec_mfvscr;
4673 tmode = insn_data[icode].operand[0].mode;
4674
4675 if (target == 0
4676 || GET_MODE (target) != tmode
4677 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4678 target = gen_reg_rtx (tmode);
4679
4680 pat = GEN_FCN (icode) (target);
4681 if (! pat)
4682 return 0;
4683 emit_insn (pat);
4684 return target;
4685
4686 case ALTIVEC_BUILTIN_MTVSCR:
4687 icode = CODE_FOR_altivec_mtvscr;
4688 arg0 = TREE_VALUE (arglist);
4689 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4690 mode0 = insn_data[icode].operand[0].mode;
4691
4692 /* If we got invalid arguments bail out before generating bad rtl. */
4693 if (arg0 == error_mark_node)
4694 return const0_rtx;
4695
4696 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4697 op0 = copy_to_mode_reg (mode0, op0);
4698
4699 pat = GEN_FCN (icode) (op0);
4700 if (pat)
4701 emit_insn (pat);
4702 return NULL_RTX;
4703
4704 case ALTIVEC_BUILTIN_DSSALL:
4705 emit_insn (gen_altivec_dssall ());
4706 return NULL_RTX;
4707
4708 case ALTIVEC_BUILTIN_DSS:
4709 icode = CODE_FOR_altivec_dss;
4710 arg0 = TREE_VALUE (arglist);
4711 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4712 mode0 = insn_data[icode].operand[0].mode;
4713
4714 /* If we got invalid arguments bail out before generating bad rtl. */
4715 if (arg0 == error_mark_node)
4716 return const0_rtx;
4717
4718 if (TREE_CODE (arg0) != INTEGER_CST
4719 || TREE_INT_CST_LOW (arg0) & ~0x3)
4720 {
4721 error ("argument to dss must be a 2-bit unsigned literal");
4722 return const0_rtx;
4723 }
4724
4725 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4726 op0 = copy_to_mode_reg (mode0, op0);
4727
4728 emit_insn (gen_altivec_dss (op0));
4729 return NULL_RTX;
4730 }
4731
4732 /* Expand abs* operations. */
4733 d = (struct builtin_description *) bdesc_abs;
4734 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4735 if (d->code == fcode)
4736 return altivec_expand_abs_builtin (d->icode, arglist, target);
4737
4738 /* Expand the AltiVec predicates. */
4739 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4740 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4741 if (dp->code == fcode)
4742 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4743
4744 /* LV* are funky. We initialized them differently. */
4745 switch (fcode)
4746 {
4747 case ALTIVEC_BUILTIN_LVSL:
4748 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4749 arglist, target);
4750 case ALTIVEC_BUILTIN_LVSR:
4751 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4752 arglist, target);
4753 case ALTIVEC_BUILTIN_LVEBX:
4754 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4755 arglist, target);
4756 case ALTIVEC_BUILTIN_LVEHX:
4757 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4758 arglist, target);
4759 case ALTIVEC_BUILTIN_LVEWX:
4760 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4761 arglist, target);
4762 case ALTIVEC_BUILTIN_LVXL:
4763 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4764 arglist, target);
4765 case ALTIVEC_BUILTIN_LVX:
4766 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
4767 arglist, target);
4768 default:
4769 break;
4770 /* Fall through. */
4771 }
4772
4773 *expandedp = false;
4774 return NULL_RTX;
4775 }
4776
4777 /* Binops that need to be initialized manually, but can be expanded
4778 automagically by rs6000_expand_binop_builtin. */
4779 static struct builtin_description bdesc_2arg_spe[] =
4780 {
4781 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
4782 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
4783 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
4784 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
4785 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
4786 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
4787 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
4788 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
4789 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
4790 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
4791 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
4792 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
4793 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
4794 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
4795 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
4796 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
4797 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
4798 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
4799 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
4800 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
4801 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
4802 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
4803 };
4804
4805 /* Expand the builtin in EXP and store the result in TARGET. Store
4806 true in *EXPANDEDP if we found a builtin to expand.
4807
4808 This expands the SPE builtins that are not simple unary and binary
4809 operations. */
4810 static rtx
4811 spe_expand_builtin (exp, target, expandedp)
4812 tree exp;
4813 rtx target;
4814 bool *expandedp;
4815 {
4816 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4817 tree arglist = TREE_OPERAND (exp, 1);
4818 tree arg1, arg0;
4819 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4820 enum insn_code icode;
4821 enum machine_mode tmode, mode0;
4822 rtx pat, op0;
4823 struct builtin_description *d;
4824 size_t i;
4825
4826 *expandedp = true;
4827
4828 /* Syntax check for a 5-bit unsigned immediate. */
4829 switch (fcode)
4830 {
4831 case SPE_BUILTIN_EVSTDD:
4832 case SPE_BUILTIN_EVSTDH:
4833 case SPE_BUILTIN_EVSTDW:
4834 case SPE_BUILTIN_EVSTWHE:
4835 case SPE_BUILTIN_EVSTWHO:
4836 case SPE_BUILTIN_EVSTWWE:
4837 case SPE_BUILTIN_EVSTWWO:
4838 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4839 if (TREE_CODE (arg1) != INTEGER_CST
4840 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4841 {
4842 error ("argument 2 must be a 5-bit unsigned literal");
4843 return const0_rtx;
4844 }
4845 break;
4846 default:
4847 break;
4848 }
4849
4850 d = (struct builtin_description *) bdesc_2arg_spe;
4851 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
4852 if (d->code == fcode)
4853 return rs6000_expand_binop_builtin (d->icode, arglist, target);
4854
4855 d = (struct builtin_description *) bdesc_spe_predicates;
4856 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
4857 if (d->code == fcode)
4858 return spe_expand_predicate_builtin (d->icode, arglist, target);
4859
4860 d = (struct builtin_description *) bdesc_spe_evsel;
4861 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
4862 if (d->code == fcode)
4863 return spe_expand_evsel_builtin (d->icode, arglist, target);
4864
4865 switch (fcode)
4866 {
4867 case SPE_BUILTIN_EVSTDDX:
4868 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
4869 case SPE_BUILTIN_EVSTDHX:
4870 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
4871 case SPE_BUILTIN_EVSTDWX:
4872 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
4873 case SPE_BUILTIN_EVSTWHEX:
4874 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
4875 case SPE_BUILTIN_EVSTWHOX:
4876 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
4877 case SPE_BUILTIN_EVSTWWEX:
4878 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
4879 case SPE_BUILTIN_EVSTWWOX:
4880 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
4881 case SPE_BUILTIN_EVSTDD:
4882 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
4883 case SPE_BUILTIN_EVSTDH:
4884 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
4885 case SPE_BUILTIN_EVSTDW:
4886 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
4887 case SPE_BUILTIN_EVSTWHE:
4888 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
4889 case SPE_BUILTIN_EVSTWHO:
4890 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
4891 case SPE_BUILTIN_EVSTWWE:
4892 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
4893 case SPE_BUILTIN_EVSTWWO:
4894 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
4895 case SPE_BUILTIN_MFSPEFSCR:
4896 icode = CODE_FOR_spe_mfspefscr;
4897 tmode = insn_data[icode].operand[0].mode;
4898
4899 if (target == 0
4900 || GET_MODE (target) != tmode
4901 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4902 target = gen_reg_rtx (tmode);
4903
4904 pat = GEN_FCN (icode) (target);
4905 if (! pat)
4906 return 0;
4907 emit_insn (pat);
4908 return target;
4909 case SPE_BUILTIN_MTSPEFSCR:
4910 icode = CODE_FOR_spe_mtspefscr;
4911 arg0 = TREE_VALUE (arglist);
4912 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4913 mode0 = insn_data[icode].operand[0].mode;
4914
4915 if (arg0 == error_mark_node)
4916 return const0_rtx;
4917
4918 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4919 op0 = copy_to_mode_reg (mode0, op0);
4920
4921 pat = GEN_FCN (icode) (op0);
4922 if (pat)
4923 emit_insn (pat);
4924 return NULL_RTX;
4925 default:
4926 break;
4927 }
4928
4929 *expandedp = false;
4930 return NULL_RTX;
4931 }
4932
4933 static rtx
4934 spe_expand_predicate_builtin (icode, arglist, target)
4935 enum insn_code icode;
4936 tree arglist;
4937 rtx target;
4938 {
4939 rtx pat, scratch, tmp;
4940 tree form = TREE_VALUE (arglist);
4941 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
4942 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4943 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4944 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4945 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4946 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4947 int form_int;
4948 enum rtx_code code;
4949
4950 if (TREE_CODE (form) != INTEGER_CST)
4951 {
4952 error ("argument 1 of __builtin_spe_predicate must be a constant");
4953 return const0_rtx;
4954 }
4955 else
4956 form_int = TREE_INT_CST_LOW (form);
4957
4958 if (mode0 != mode1)
4959 abort ();
4960
4961 if (arg0 == error_mark_node || arg1 == error_mark_node)
4962 return const0_rtx;
4963
4964 if (target == 0
4965 || GET_MODE (target) != SImode
4966 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
4967 target = gen_reg_rtx (SImode);
4968
4969 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4970 op0 = copy_to_mode_reg (mode0, op0);
4971 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4972 op1 = copy_to_mode_reg (mode1, op1);
4973
4974 scratch = gen_reg_rtx (CCmode);
4975
4976 pat = GEN_FCN (icode) (scratch, op0, op1);
4977 if (! pat)
4978 return const0_rtx;
4979 emit_insn (pat);
4980
4981 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
4982 _lower_. We use one compare, but look in different bits of the
4983 CR for each variant.
4984
4985 There are 2 elements in each SPE simd type (upper/lower). The CR
4986 bits are set as follows:
4987
4988 BIT0 | BIT 1 | BIT 2 | BIT 3
4989 U | L | (U | L) | (U & L)
4990
4991 So, for an "all" relationship, BIT 3 would be set.
4992 For an "any" relationship, BIT 2 would be set. Etc.
4993
4994 Following traditional nomenclature, these bits map to:
4995
4996 BIT0 | BIT 1 | BIT 2 | BIT 3
4997 LT | GT | EQ | OV
4998
4999 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5000 */
5001
5002 switch (form_int)
5003 {
5004 /* All variant. OV bit. */
5005 case 0:
5006 /* We need to get to the OV bit, which is the ORDERED bit. We
5007 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5008 that's ugly and will trigger a validate_condition_mode abort.
5009 So let's just use another pattern. */
5010 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
5011 return target;
5012 /* Any variant. EQ bit. */
5013 case 1:
5014 code = EQ;
5015 break;
5016 /* Upper variant. LT bit. */
5017 case 2:
5018 code = LT;
5019 break;
5020 /* Lower variant. GT bit. */
5021 case 3:
5022 code = GT;
5023 break;
5024 default:
5025 error ("argument 1 of __builtin_spe_predicate is out of range");
5026 return const0_rtx;
5027 }
5028
5029 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
5030 emit_move_insn (target, tmp);
5031
5032 return target;
5033 }
5034
5035 /* The evsel builtins look like this:
5036
5037 e = __builtin_spe_evsel_OP (a, b, c, d);
5038
5039 and work like this:
5040
5041 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5042 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5043 */
5044
5045 static rtx
5046 spe_expand_evsel_builtin (icode, arglist, target)
5047 enum insn_code icode;
5048 tree arglist;
5049 rtx target;
5050 {
5051 rtx pat, scratch;
5052 tree arg0 = TREE_VALUE (arglist);
5053 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5054 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5055 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5056 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5057 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5058 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5059 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5060 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5061 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5062
5063 if (mode0 != mode1)
5064 abort ();
5065
5066 if (arg0 == error_mark_node || arg1 == error_mark_node
5067 || arg2 == error_mark_node || arg3 == error_mark_node)
5068 return const0_rtx;
5069
5070 if (target == 0
5071 || GET_MODE (target) != mode0
5072 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5073 target = gen_reg_rtx (mode0);
5074
5075 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5076 op0 = copy_to_mode_reg (mode0, op0);
5077 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5078 op1 = copy_to_mode_reg (mode0, op1);
5079 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5080 op2 = copy_to_mode_reg (mode0, op2);
5081 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5082 op3 = copy_to_mode_reg (mode0, op3);
5083
5084 /* Generate the compare. */
5085 scratch = gen_reg_rtx (CCmode);
5086 pat = GEN_FCN (icode) (scratch, op0, op1);
5087 if (! pat)
5088 return const0_rtx;
5089 emit_insn (pat);
5090
5091 if (mode0 == V2SImode)
5092 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
5093 else
5094 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
5095
5096 return target;
5097 }
5098
5099 /* Expand an expression EXP that calls a built-in function,
5100 with result going to TARGET if that's convenient
5101 (and in mode MODE if that's convenient).
5102 SUBTARGET may be used as the target for computing one of EXP's operands.
5103 IGNORE is nonzero if the value is to be ignored. */
5104
5105 static rtx
5106 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
5107 tree exp;
5108 rtx target;
5109 rtx subtarget ATTRIBUTE_UNUSED;
5110 enum machine_mode mode ATTRIBUTE_UNUSED;
5111 int ignore ATTRIBUTE_UNUSED;
5112 {
5113 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5114 tree arglist = TREE_OPERAND (exp, 1);
5115 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5116 struct builtin_description *d;
5117 size_t i;
5118 rtx ret;
5119 bool success;
5120
5121 if (TARGET_ALTIVEC)
5122 {
5123 ret = altivec_expand_builtin (exp, target, &success);
5124
5125 if (success)
5126 return ret;
5127 }
5128 if (TARGET_SPE)
5129 {
5130 ret = spe_expand_builtin (exp, target, &success);
5131
5132 if (success)
5133 return ret;
5134 }
5135
5136 if (TARGET_ALTIVEC || TARGET_SPE)
5137 {
5138 /* Handle simple unary operations. */
5139 d = (struct builtin_description *) bdesc_1arg;
5140 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5141 if (d->code == fcode)
5142 return rs6000_expand_unop_builtin (d->icode, arglist, target);
5143
5144 /* Handle simple binary operations. */
5145 d = (struct builtin_description *) bdesc_2arg;
5146 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5147 if (d->code == fcode)
5148 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5149
5150 /* Handle simple ternary operations. */
5151 d = (struct builtin_description *) bdesc_3arg;
5152 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5153 if (d->code == fcode)
5154 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
5155 }
5156
5157 abort ();
5158 return NULL_RTX;
5159 }
5160
5161 static void
5162 rs6000_init_builtins ()
5163 {
5164 if (TARGET_SPE)
5165 spe_init_builtins ();
5166 if (TARGET_ALTIVEC)
5167 altivec_init_builtins ();
5168 if (TARGET_ALTIVEC || TARGET_SPE)
5169 rs6000_common_init_builtins ();
5170 }
5171
5172 /* Search through a set of builtins and enable the mask bits.
5173 DESC is an array of builtins.
5174 SIZE is the totaly number of builtins.
5175 START is the builtin enum at which to start.
5176 END is the builtin enum at which to end. */
5177 static void
5178 enable_mask_for_builtins (desc, size, start, end)
5179 struct builtin_description *desc;
5180 int size;
5181 enum rs6000_builtins start, end;
5182 {
5183 int i;
5184
5185 for (i = 0; i < size; ++i)
5186 if (desc[i].code == start)
5187 break;
5188
5189 if (i == size)
5190 return;
5191
5192 for (; i < size; ++i)
5193 {
5194 /* Flip all the bits on. */
5195 desc[i].mask = target_flags;
5196 if (desc[i].code == end)
5197 break;
5198 }
5199 }
5200
5201 static void
5202 spe_init_builtins ()
5203 {
5204 tree endlink = void_list_node;
5205 tree puint_type_node = build_pointer_type (unsigned_type_node);
5206 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
5207 tree pv2si_type_node = build_pointer_type (V2SI_type_node);
5208 struct builtin_description *d;
5209 size_t i;
5210
5211 tree v2si_ftype_4_v2si
5212 = build_function_type
5213 (V2SI_type_node,
5214 tree_cons (NULL_TREE, V2SI_type_node,
5215 tree_cons (NULL_TREE, V2SI_type_node,
5216 tree_cons (NULL_TREE, V2SI_type_node,
5217 tree_cons (NULL_TREE, V2SI_type_node,
5218 endlink)))));
5219
5220 tree v2sf_ftype_4_v2sf
5221 = build_function_type
5222 (V2SF_type_node,
5223 tree_cons (NULL_TREE, V2SF_type_node,
5224 tree_cons (NULL_TREE, V2SF_type_node,
5225 tree_cons (NULL_TREE, V2SF_type_node,
5226 tree_cons (NULL_TREE, V2SF_type_node,
5227 endlink)))));
5228
5229 tree int_ftype_int_v2si_v2si
5230 = build_function_type
5231 (integer_type_node,
5232 tree_cons (NULL_TREE, integer_type_node,
5233 tree_cons (NULL_TREE, V2SI_type_node,
5234 tree_cons (NULL_TREE, V2SI_type_node,
5235 endlink))));
5236
5237 tree int_ftype_int_v2sf_v2sf
5238 = build_function_type
5239 (integer_type_node,
5240 tree_cons (NULL_TREE, integer_type_node,
5241 tree_cons (NULL_TREE, V2SF_type_node,
5242 tree_cons (NULL_TREE, V2SF_type_node,
5243 endlink))));
5244
5245 tree void_ftype_v2si_puint_int
5246 = build_function_type (void_type_node,
5247 tree_cons (NULL_TREE, V2SI_type_node,
5248 tree_cons (NULL_TREE, puint_type_node,
5249 tree_cons (NULL_TREE,
5250 integer_type_node,
5251 endlink))));
5252
5253 tree void_ftype_v2si_puint_char
5254 = build_function_type (void_type_node,
5255 tree_cons (NULL_TREE, V2SI_type_node,
5256 tree_cons (NULL_TREE, puint_type_node,
5257 tree_cons (NULL_TREE,
5258 char_type_node,
5259 endlink))));
5260
5261 tree void_ftype_v2si_pv2si_int
5262 = build_function_type (void_type_node,
5263 tree_cons (NULL_TREE, V2SI_type_node,
5264 tree_cons (NULL_TREE, pv2si_type_node,
5265 tree_cons (NULL_TREE,
5266 integer_type_node,
5267 endlink))));
5268
5269 tree void_ftype_v2si_pv2si_char
5270 = build_function_type (void_type_node,
5271 tree_cons (NULL_TREE, V2SI_type_node,
5272 tree_cons (NULL_TREE, pv2si_type_node,
5273 tree_cons (NULL_TREE,
5274 char_type_node,
5275 endlink))));
5276
5277 tree void_ftype_int
5278 = build_function_type (void_type_node,
5279 tree_cons (NULL_TREE, integer_type_node, endlink));
5280
5281 tree int_ftype_void
5282 = build_function_type (integer_type_node,
5283 tree_cons (NULL_TREE, void_type_node, endlink));
5284
5285 tree v2si_ftype_pv2si_int
5286 = build_function_type (V2SI_type_node,
5287 tree_cons (NULL_TREE, pv2si_type_node,
5288 tree_cons (NULL_TREE, integer_type_node,
5289 endlink)));
5290
5291 tree v2si_ftype_puint_int
5292 = build_function_type (V2SI_type_node,
5293 tree_cons (NULL_TREE, puint_type_node,
5294 tree_cons (NULL_TREE, integer_type_node,
5295 endlink)));
5296
5297 tree v2si_ftype_pushort_int
5298 = build_function_type (V2SI_type_node,
5299 tree_cons (NULL_TREE, pushort_type_node,
5300 tree_cons (NULL_TREE, integer_type_node,
5301 endlink)));
5302
5303 /* The initialization of the simple binary and unary builtins is
5304 done in rs6000_common_init_builtins, but we have to enable the
5305 mask bits here manually because we have run out of `target_flags'
5306 bits. We really need to redesign this mask business. */
5307
5308 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
5309 ARRAY_SIZE (bdesc_2arg),
5310 SPE_BUILTIN_EVADDW,
5311 SPE_BUILTIN_EVXOR);
5312 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
5313 ARRAY_SIZE (bdesc_1arg),
5314 SPE_BUILTIN_EVABS,
5315 SPE_BUILTIN_EVSUBFUSIAAW);
5316 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
5317 ARRAY_SIZE (bdesc_spe_predicates),
5318 SPE_BUILTIN_EVCMPEQ,
5319 SPE_BUILTIN_EVFSTSTLT);
5320 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
5321 ARRAY_SIZE (bdesc_spe_evsel),
5322 SPE_BUILTIN_EVSEL_CMPGTS,
5323 SPE_BUILTIN_EVSEL_FSTSTEQ);
5324
5325 /* Initialize irregular SPE builtins. */
5326
5327 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
5328 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
5329 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
5330 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
5331 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
5332 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
5333 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
5334 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
5335 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
5336 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
5337 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
5338 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
5339 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
5340 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
5341 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
5342 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
5343
5344 /* Loads. */
5345 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
5346 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
5347 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
5348 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
5349 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
5350 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
5351 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
5352 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
5353 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
5354 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
5355 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
5356 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
5357 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
5358 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
5359 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
5360 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
5361 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
5362 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
5363 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
5364 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
5365 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
5366 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
5367
5368 /* Predicates. */
5369 d = (struct builtin_description *) bdesc_spe_predicates;
5370 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
5371 {
5372 tree type;
5373
5374 switch (insn_data[d->icode].operand[1].mode)
5375 {
5376 case V2SImode:
5377 type = int_ftype_int_v2si_v2si;
5378 break;
5379 case V2SFmode:
5380 type = int_ftype_int_v2sf_v2sf;
5381 break;
5382 default:
5383 abort ();
5384 }
5385
5386 def_builtin (d->mask, d->name, type, d->code);
5387 }
5388
5389 /* Evsel predicates. */
5390 d = (struct builtin_description *) bdesc_spe_evsel;
5391 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
5392 {
5393 tree type;
5394
5395 switch (insn_data[d->icode].operand[1].mode)
5396 {
5397 case V2SImode:
5398 type = v2si_ftype_4_v2si;
5399 break;
5400 case V2SFmode:
5401 type = v2sf_ftype_4_v2sf;
5402 break;
5403 default:
5404 abort ();
5405 }
5406
5407 def_builtin (d->mask, d->name, type, d->code);
5408 }
5409 }
5410
5411 static void
5412 altivec_init_builtins ()
5413 {
5414 struct builtin_description *d;
5415 struct builtin_description_predicates *dp;
5416 size_t i;
5417 tree pfloat_type_node = build_pointer_type (float_type_node);
5418 tree pint_type_node = build_pointer_type (integer_type_node);
5419 tree pshort_type_node = build_pointer_type (short_integer_type_node);
5420 tree pchar_type_node = build_pointer_type (char_type_node);
5421
5422 tree pvoid_type_node = build_pointer_type (void_type_node);
5423
5424 tree int_ftype_int_v4si_v4si
5425 = build_function_type_list (integer_type_node,
5426 integer_type_node, V4SI_type_node,
5427 V4SI_type_node, NULL_TREE);
5428 tree v4sf_ftype_pfloat
5429 = build_function_type_list (V4SF_type_node, pfloat_type_node, NULL_TREE);
5430 tree void_ftype_pfloat_v4sf
5431 = build_function_type_list (void_type_node,
5432 pfloat_type_node, V4SF_type_node, NULL_TREE);
5433 tree v4si_ftype_pint
5434 = build_function_type_list (V4SI_type_node, pint_type_node, NULL_TREE); tree void_ftype_pint_v4si
5435 = build_function_type_list (void_type_node,
5436 pint_type_node, V4SI_type_node, NULL_TREE);
5437 tree v8hi_ftype_pshort
5438 = build_function_type_list (V8HI_type_node, pshort_type_node, NULL_TREE);
5439 tree void_ftype_pshort_v8hi
5440 = build_function_type_list (void_type_node,
5441 pshort_type_node, V8HI_type_node, NULL_TREE);
5442 tree v16qi_ftype_pchar
5443 = build_function_type_list (V16QI_type_node, pchar_type_node, NULL_TREE);
5444 tree void_ftype_pchar_v16qi
5445 = build_function_type_list (void_type_node,
5446 pchar_type_node, V16QI_type_node, NULL_TREE);
5447 tree void_ftype_v4si
5448 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
5449 tree v8hi_ftype_void
5450 = build_function_type (V8HI_type_node, void_list_node);
5451 tree void_ftype_void
5452 = build_function_type (void_type_node, void_list_node);
5453 tree void_ftype_qi
5454 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
5455 tree v16qi_ftype_int_pvoid
5456 = build_function_type_list (V16QI_type_node,
5457 integer_type_node, pvoid_type_node, NULL_TREE);
5458 tree v8hi_ftype_int_pvoid
5459 = build_function_type_list (V8HI_type_node,
5460 integer_type_node, pvoid_type_node, NULL_TREE);
5461 tree v4si_ftype_int_pvoid
5462 = build_function_type_list (V4SI_type_node,
5463 integer_type_node, pvoid_type_node, NULL_TREE);
5464 tree void_ftype_v4si_int_pvoid
5465 = build_function_type_list (void_type_node,
5466 V4SI_type_node, integer_type_node,
5467 pvoid_type_node, NULL_TREE);
5468 tree void_ftype_v16qi_int_pvoid
5469 = build_function_type_list (void_type_node,
5470 V16QI_type_node, integer_type_node,
5471 pvoid_type_node, NULL_TREE);
5472 tree void_ftype_v8hi_int_pvoid
5473 = build_function_type_list (void_type_node,
5474 V8HI_type_node, integer_type_node,
5475 pvoid_type_node, NULL_TREE);
5476 tree int_ftype_int_v8hi_v8hi
5477 = build_function_type_list (integer_type_node,
5478 integer_type_node, V8HI_type_node,
5479 V8HI_type_node, NULL_TREE);
5480 tree int_ftype_int_v16qi_v16qi
5481 = build_function_type_list (integer_type_node,
5482 integer_type_node, V16QI_type_node,
5483 V16QI_type_node, NULL_TREE);
5484 tree int_ftype_int_v4sf_v4sf
5485 = build_function_type_list (integer_type_node,
5486 integer_type_node, V4SF_type_node,
5487 V4SF_type_node, NULL_TREE);
5488 tree v4si_ftype_v4si
5489 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
5490 tree v8hi_ftype_v8hi
5491 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
5492 tree v16qi_ftype_v16qi
5493 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
5494 tree v4sf_ftype_v4sf
5495 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5496 tree void_ftype_pvoid_int_char
5497 = build_function_type_list (void_type_node,
5498 pvoid_type_node, integer_type_node,
5499 char_type_node, NULL_TREE);
5500
5501 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
5502 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
5503 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
5504 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
5505 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
5506 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
5507 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
5508 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
5509 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
5510 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
5511 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
5512 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
5513 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSL);
5514 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSR);
5515 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEBX);
5516 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEHX);
5517 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEWX);
5518 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVXL);
5519 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVX);
5520 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
5521 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
5522 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
5523 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
5524 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
5525
5526 /* Add the DST variants. */
5527 d = (struct builtin_description *) bdesc_dst;
5528 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5529 def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code);
5530
5531 /* Initialize the predicates. */
5532 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5533 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5534 {
5535 enum machine_mode mode1;
5536 tree type;
5537
5538 mode1 = insn_data[dp->icode].operand[1].mode;
5539
5540 switch (mode1)
5541 {
5542 case V4SImode:
5543 type = int_ftype_int_v4si_v4si;
5544 break;
5545 case V8HImode:
5546 type = int_ftype_int_v8hi_v8hi;
5547 break;
5548 case V16QImode:
5549 type = int_ftype_int_v16qi_v16qi;
5550 break;
5551 case V4SFmode:
5552 type = int_ftype_int_v4sf_v4sf;
5553 break;
5554 default:
5555 abort ();
5556 }
5557
5558 def_builtin (dp->mask, dp->name, type, dp->code);
5559 }
5560
5561 /* Initialize the abs* operators. */
5562 d = (struct builtin_description *) bdesc_abs;
5563 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5564 {
5565 enum machine_mode mode0;
5566 tree type;
5567
5568 mode0 = insn_data[d->icode].operand[0].mode;
5569
5570 switch (mode0)
5571 {
5572 case V4SImode:
5573 type = v4si_ftype_v4si;
5574 break;
5575 case V8HImode:
5576 type = v8hi_ftype_v8hi;
5577 break;
5578 case V16QImode:
5579 type = v16qi_ftype_v16qi;
5580 break;
5581 case V4SFmode:
5582 type = v4sf_ftype_v4sf;
5583 break;
5584 default:
5585 abort ();
5586 }
5587
5588 def_builtin (d->mask, d->name, type, d->code);
5589 }
5590 }
5591
5592 static void
5593 rs6000_common_init_builtins ()
5594 {
5595 struct builtin_description *d;
5596 size_t i;
5597
5598 tree v4sf_ftype_v4sf_v4sf_v16qi
5599 = build_function_type_list (V4SF_type_node,
5600 V4SF_type_node, V4SF_type_node,
5601 V16QI_type_node, NULL_TREE);
5602 tree v4si_ftype_v4si_v4si_v16qi
5603 = build_function_type_list (V4SI_type_node,
5604 V4SI_type_node, V4SI_type_node,
5605 V16QI_type_node, NULL_TREE);
5606 tree v8hi_ftype_v8hi_v8hi_v16qi
5607 = build_function_type_list (V8HI_type_node,
5608 V8HI_type_node, V8HI_type_node,
5609 V16QI_type_node, NULL_TREE);
5610 tree v16qi_ftype_v16qi_v16qi_v16qi
5611 = build_function_type_list (V16QI_type_node,
5612 V16QI_type_node, V16QI_type_node,
5613 V16QI_type_node, NULL_TREE);
5614 tree v4si_ftype_char
5615 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
5616 tree v8hi_ftype_char
5617 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
5618 tree v16qi_ftype_char
5619 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
5620 tree v8hi_ftype_v16qi
5621 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
5622 tree v4sf_ftype_v4sf
5623 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5624
5625 tree v2si_ftype_v2si_v2si
5626 = build_function_type_list (V2SI_type_node,
5627 V2SI_type_node, V2SI_type_node, NULL_TREE);
5628
5629 tree v2sf_ftype_v2sf_v2sf
5630 = build_function_type_list (V2SF_type_node,
5631 V2SF_type_node, V2SF_type_node, NULL_TREE);
5632
5633 tree v2si_ftype_int_int
5634 = build_function_type_list (V2SI_type_node,
5635 integer_type_node, integer_type_node,
5636 NULL_TREE);
5637
5638 tree v2si_ftype_v2si
5639 = build_function_type_list (V2SI_type_node, V2SI_type_node, NULL_TREE);
5640
5641 tree v2sf_ftype_v2sf
5642 = build_function_type_list (V2SF_type_node,
5643 V2SF_type_node, NULL_TREE);
5644
5645 tree v2sf_ftype_v2si
5646 = build_function_type_list (V2SF_type_node,
5647 V2SI_type_node, NULL_TREE);
5648
5649 tree v2si_ftype_v2sf
5650 = build_function_type_list (V2SI_type_node,
5651 V2SF_type_node, NULL_TREE);
5652
5653 tree v2si_ftype_v2si_char
5654 = build_function_type_list (V2SI_type_node,
5655 V2SI_type_node, char_type_node, NULL_TREE);
5656
5657 tree v2si_ftype_int_char
5658 = build_function_type_list (V2SI_type_node,
5659 integer_type_node, char_type_node, NULL_TREE);
5660
5661 tree v2si_ftype_char
5662 = build_function_type_list (V2SI_type_node, char_type_node, NULL_TREE);
5663
5664 tree int_ftype_int_int
5665 = build_function_type_list (integer_type_node,
5666 integer_type_node, integer_type_node,
5667 NULL_TREE);
5668
5669 tree v4si_ftype_v4si_v4si
5670 = build_function_type_list (V4SI_type_node,
5671 V4SI_type_node, V4SI_type_node, NULL_TREE);
5672 tree v4sf_ftype_v4si_char
5673 = build_function_type_list (V4SF_type_node,
5674 V4SI_type_node, char_type_node, NULL_TREE);
5675 tree v4si_ftype_v4sf_char
5676 = build_function_type_list (V4SI_type_node,
5677 V4SF_type_node, char_type_node, NULL_TREE);
5678 tree v4si_ftype_v4si_char
5679 = build_function_type_list (V4SI_type_node,
5680 V4SI_type_node, char_type_node, NULL_TREE);
5681 tree v8hi_ftype_v8hi_char
5682 = build_function_type_list (V8HI_type_node,
5683 V8HI_type_node, char_type_node, NULL_TREE);
5684 tree v16qi_ftype_v16qi_char
5685 = build_function_type_list (V16QI_type_node,
5686 V16QI_type_node, char_type_node, NULL_TREE);
5687 tree v16qi_ftype_v16qi_v16qi_char
5688 = build_function_type_list (V16QI_type_node,
5689 V16QI_type_node, V16QI_type_node,
5690 char_type_node, NULL_TREE);
5691 tree v8hi_ftype_v8hi_v8hi_char
5692 = build_function_type_list (V8HI_type_node,
5693 V8HI_type_node, V8HI_type_node,
5694 char_type_node, NULL_TREE);
5695 tree v4si_ftype_v4si_v4si_char
5696 = build_function_type_list (V4SI_type_node,
5697 V4SI_type_node, V4SI_type_node,
5698 char_type_node, NULL_TREE);
5699 tree v4sf_ftype_v4sf_v4sf_char
5700 = build_function_type_list (V4SF_type_node,
5701 V4SF_type_node, V4SF_type_node,
5702 char_type_node, NULL_TREE);
5703 tree v4sf_ftype_v4sf_v4sf
5704 = build_function_type_list (V4SF_type_node,
5705 V4SF_type_node, V4SF_type_node, NULL_TREE);
5706 tree v4sf_ftype_v4sf_v4sf_v4si
5707 = build_function_type_list (V4SF_type_node,
5708 V4SF_type_node, V4SF_type_node,
5709 V4SI_type_node, NULL_TREE);
5710 tree v4sf_ftype_v4sf_v4sf_v4sf
5711 = build_function_type_list (V4SF_type_node,
5712 V4SF_type_node, V4SF_type_node,
5713 V4SF_type_node, NULL_TREE);
5714 tree v4si_ftype_v4si_v4si_v4si
5715 = build_function_type_list (V4SI_type_node,
5716 V4SI_type_node, V4SI_type_node,
5717 V4SI_type_node, NULL_TREE);
5718 tree v8hi_ftype_v8hi_v8hi
5719 = build_function_type_list (V8HI_type_node,
5720 V8HI_type_node, V8HI_type_node, NULL_TREE);
5721 tree v8hi_ftype_v8hi_v8hi_v8hi
5722 = build_function_type_list (V8HI_type_node,
5723 V8HI_type_node, V8HI_type_node,
5724 V8HI_type_node, NULL_TREE);
5725 tree v4si_ftype_v8hi_v8hi_v4si
5726 = build_function_type_list (V4SI_type_node,
5727 V8HI_type_node, V8HI_type_node,
5728 V4SI_type_node, NULL_TREE);
5729 tree v4si_ftype_v16qi_v16qi_v4si
5730 = build_function_type_list (V4SI_type_node,
5731 V16QI_type_node, V16QI_type_node,
5732 V4SI_type_node, NULL_TREE);
5733 tree v16qi_ftype_v16qi_v16qi
5734 = build_function_type_list (V16QI_type_node,
5735 V16QI_type_node, V16QI_type_node, NULL_TREE);
5736 tree v4si_ftype_v4sf_v4sf
5737 = build_function_type_list (V4SI_type_node,
5738 V4SF_type_node, V4SF_type_node, NULL_TREE);
5739 tree v8hi_ftype_v16qi_v16qi
5740 = build_function_type_list (V8HI_type_node,
5741 V16QI_type_node, V16QI_type_node, NULL_TREE);
5742 tree v4si_ftype_v8hi_v8hi
5743 = build_function_type_list (V4SI_type_node,
5744 V8HI_type_node, V8HI_type_node, NULL_TREE);
5745 tree v8hi_ftype_v4si_v4si
5746 = build_function_type_list (V8HI_type_node,
5747 V4SI_type_node, V4SI_type_node, NULL_TREE);
5748 tree v16qi_ftype_v8hi_v8hi
5749 = build_function_type_list (V16QI_type_node,
5750 V8HI_type_node, V8HI_type_node, NULL_TREE);
5751 tree v4si_ftype_v16qi_v4si
5752 = build_function_type_list (V4SI_type_node,
5753 V16QI_type_node, V4SI_type_node, NULL_TREE);
5754 tree v4si_ftype_v16qi_v16qi
5755 = build_function_type_list (V4SI_type_node,
5756 V16QI_type_node, V16QI_type_node, NULL_TREE);
5757 tree v4si_ftype_v8hi_v4si
5758 = build_function_type_list (V4SI_type_node,
5759 V8HI_type_node, V4SI_type_node, NULL_TREE);
5760 tree v4si_ftype_v8hi
5761 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
5762 tree int_ftype_v4si_v4si
5763 = build_function_type_list (integer_type_node,
5764 V4SI_type_node, V4SI_type_node, NULL_TREE);
5765 tree int_ftype_v4sf_v4sf
5766 = build_function_type_list (integer_type_node,
5767 V4SF_type_node, V4SF_type_node, NULL_TREE);
5768 tree int_ftype_v16qi_v16qi
5769 = build_function_type_list (integer_type_node,
5770 V16QI_type_node, V16QI_type_node, NULL_TREE);
5771 tree int_ftype_v8hi_v8hi
5772 = build_function_type_list (integer_type_node,
5773 V8HI_type_node, V8HI_type_node, NULL_TREE);
5774
5775 /* Add the simple ternary operators. */
5776 d = (struct builtin_description *) bdesc_3arg;
5777 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5778 {
5779
5780 enum machine_mode mode0, mode1, mode2, mode3;
5781 tree type;
5782
5783 if (d->name == 0 || d->icode == CODE_FOR_nothing)
5784 continue;
5785
5786 mode0 = insn_data[d->icode].operand[0].mode;
5787 mode1 = insn_data[d->icode].operand[1].mode;
5788 mode2 = insn_data[d->icode].operand[2].mode;
5789 mode3 = insn_data[d->icode].operand[3].mode;
5790
5791 /* When all four are of the same mode. */
5792 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
5793 {
5794 switch (mode0)
5795 {
5796 case V4SImode:
5797 type = v4si_ftype_v4si_v4si_v4si;
5798 break;
5799 case V4SFmode:
5800 type = v4sf_ftype_v4sf_v4sf_v4sf;
5801 break;
5802 case V8HImode:
5803 type = v8hi_ftype_v8hi_v8hi_v8hi;
5804 break;
5805 case V16QImode:
5806 type = v16qi_ftype_v16qi_v16qi_v16qi;
5807 break;
5808 default:
5809 abort();
5810 }
5811 }
5812 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
5813 {
5814 switch (mode0)
5815 {
5816 case V4SImode:
5817 type = v4si_ftype_v4si_v4si_v16qi;
5818 break;
5819 case V4SFmode:
5820 type = v4sf_ftype_v4sf_v4sf_v16qi;
5821 break;
5822 case V8HImode:
5823 type = v8hi_ftype_v8hi_v8hi_v16qi;
5824 break;
5825 case V16QImode:
5826 type = v16qi_ftype_v16qi_v16qi_v16qi;
5827 break;
5828 default:
5829 abort();
5830 }
5831 }
5832 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
5833 && mode3 == V4SImode)
5834 type = v4si_ftype_v16qi_v16qi_v4si;
5835 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
5836 && mode3 == V4SImode)
5837 type = v4si_ftype_v8hi_v8hi_v4si;
5838 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
5839 && mode3 == V4SImode)
5840 type = v4sf_ftype_v4sf_v4sf_v4si;
5841
5842 /* vchar, vchar, vchar, 4 bit literal. */
5843 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
5844 && mode3 == QImode)
5845 type = v16qi_ftype_v16qi_v16qi_char;
5846
5847 /* vshort, vshort, vshort, 4 bit literal. */
5848 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
5849 && mode3 == QImode)
5850 type = v8hi_ftype_v8hi_v8hi_char;
5851
5852 /* vint, vint, vint, 4 bit literal. */
5853 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
5854 && mode3 == QImode)
5855 type = v4si_ftype_v4si_v4si_char;
5856
5857 /* vfloat, vfloat, vfloat, 4 bit literal. */
5858 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
5859 && mode3 == QImode)
5860 type = v4sf_ftype_v4sf_v4sf_char;
5861
5862 else
5863 abort ();
5864
5865 def_builtin (d->mask, d->name, type, d->code);
5866 }
5867
5868 /* Add the simple binary operators. */
5869 d = (struct builtin_description *) bdesc_2arg;
5870 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5871 {
5872 enum machine_mode mode0, mode1, mode2;
5873 tree type;
5874
5875 if (d->name == 0 || d->icode == CODE_FOR_nothing)
5876 continue;
5877
5878 mode0 = insn_data[d->icode].operand[0].mode;
5879 mode1 = insn_data[d->icode].operand[1].mode;
5880 mode2 = insn_data[d->icode].operand[2].mode;
5881
5882 /* When all three operands are of the same mode. */
5883 if (mode0 == mode1 && mode1 == mode2)
5884 {
5885 switch (mode0)
5886 {
5887 case V4SFmode:
5888 type = v4sf_ftype_v4sf_v4sf;
5889 break;
5890 case V4SImode:
5891 type = v4si_ftype_v4si_v4si;
5892 break;
5893 case V16QImode:
5894 type = v16qi_ftype_v16qi_v16qi;
5895 break;
5896 case V8HImode:
5897 type = v8hi_ftype_v8hi_v8hi;
5898 break;
5899 case V2SImode:
5900 type = v2si_ftype_v2si_v2si;
5901 break;
5902 case V2SFmode:
5903 type = v2sf_ftype_v2sf_v2sf;
5904 break;
5905 case SImode:
5906 type = int_ftype_int_int;
5907 break;
5908 default:
5909 abort ();
5910 }
5911 }
5912
5913 /* A few other combos we really don't want to do manually. */
5914
5915 /* vint, vfloat, vfloat. */
5916 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
5917 type = v4si_ftype_v4sf_v4sf;
5918
5919 /* vshort, vchar, vchar. */
5920 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
5921 type = v8hi_ftype_v16qi_v16qi;
5922
5923 /* vint, vshort, vshort. */
5924 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
5925 type = v4si_ftype_v8hi_v8hi;
5926
5927 /* vshort, vint, vint. */
5928 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
5929 type = v8hi_ftype_v4si_v4si;
5930
5931 /* vchar, vshort, vshort. */
5932 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
5933 type = v16qi_ftype_v8hi_v8hi;
5934
5935 /* vint, vchar, vint. */
5936 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
5937 type = v4si_ftype_v16qi_v4si;
5938
5939 /* vint, vchar, vchar. */
5940 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
5941 type = v4si_ftype_v16qi_v16qi;
5942
5943 /* vint, vshort, vint. */
5944 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
5945 type = v4si_ftype_v8hi_v4si;
5946
5947 /* vint, vint, 5 bit literal. */
5948 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
5949 type = v4si_ftype_v4si_char;
5950
5951 /* vshort, vshort, 5 bit literal. */
5952 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
5953 type = v8hi_ftype_v8hi_char;
5954
5955 /* vchar, vchar, 5 bit literal. */
5956 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
5957 type = v16qi_ftype_v16qi_char;
5958
5959 /* vfloat, vint, 5 bit literal. */
5960 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
5961 type = v4sf_ftype_v4si_char;
5962
5963 /* vint, vfloat, 5 bit literal. */
5964 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
5965 type = v4si_ftype_v4sf_char;
5966
5967 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
5968 type = v2si_ftype_int_int;
5969
5970 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
5971 type = v2si_ftype_v2si_char;
5972
5973 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
5974 type = v2si_ftype_int_char;
5975
5976 /* int, x, x. */
5977 else if (mode0 == SImode)
5978 {
5979 switch (mode1)
5980 {
5981 case V4SImode:
5982 type = int_ftype_v4si_v4si;
5983 break;
5984 case V4SFmode:
5985 type = int_ftype_v4sf_v4sf;
5986 break;
5987 case V16QImode:
5988 type = int_ftype_v16qi_v16qi;
5989 break;
5990 case V8HImode:
5991 type = int_ftype_v8hi_v8hi;
5992 break;
5993 default:
5994 abort ();
5995 }
5996 }
5997
5998 else
5999 abort ();
6000
6001 def_builtin (d->mask, d->name, type, d->code);
6002 }
6003
6004 /* Add the simple unary operators. */
6005 d = (struct builtin_description *) bdesc_1arg;
6006 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6007 {
6008 enum machine_mode mode0, mode1;
6009 tree type;
6010
6011 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6012 continue;
6013
6014 mode0 = insn_data[d->icode].operand[0].mode;
6015 mode1 = insn_data[d->icode].operand[1].mode;
6016
6017 if (mode0 == V4SImode && mode1 == QImode)
6018 type = v4si_ftype_char;
6019 else if (mode0 == V8HImode && mode1 == QImode)
6020 type = v8hi_ftype_char;
6021 else if (mode0 == V16QImode && mode1 == QImode)
6022 type = v16qi_ftype_char;
6023 else if (mode0 == V4SFmode && mode1 == V4SFmode)
6024 type = v4sf_ftype_v4sf;
6025 else if (mode0 == V8HImode && mode1 == V16QImode)
6026 type = v8hi_ftype_v16qi;
6027 else if (mode0 == V4SImode && mode1 == V8HImode)
6028 type = v4si_ftype_v8hi;
6029 else if (mode0 == V2SImode && mode1 == V2SImode)
6030 type = v2si_ftype_v2si;
6031 else if (mode0 == V2SFmode && mode1 == V2SFmode)
6032 type = v2sf_ftype_v2sf;
6033 else if (mode0 == V2SFmode && mode1 == V2SImode)
6034 type = v2sf_ftype_v2si;
6035 else if (mode0 == V2SImode && mode1 == V2SFmode)
6036 type = v2si_ftype_v2sf;
6037 else if (mode0 == V2SImode && mode1 == QImode)
6038 type = v2si_ftype_char;
6039 else
6040 abort ();
6041
6042 def_builtin (d->mask, d->name, type, d->code);
6043 }
6044 }
6045
6046 \f
6047 /* Expand a block move operation, and return 1 if successful. Return 0
6048 if we should let the compiler generate normal code.
6049
6050 operands[0] is the destination
6051 operands[1] is the source
6052 operands[2] is the length
6053 operands[3] is the alignment */
6054
6055 #define MAX_MOVE_REG 4
6056
6057 int
6058 expand_block_move (operands)
6059 rtx operands[];
6060 {
6061 rtx orig_dest = operands[0];
6062 rtx orig_src = operands[1];
6063 rtx bytes_rtx = operands[2];
6064 rtx align_rtx = operands[3];
6065 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
6066 int align;
6067 int bytes;
6068 int offset;
6069 int move_bytes;
6070
6071 /* If this is not a fixed size move, just call memcpy */
6072 if (! constp)
6073 return 0;
6074
6075 /* If this is not a fixed size alignment, abort */
6076 if (GET_CODE (align_rtx) != CONST_INT)
6077 abort ();
6078 align = INTVAL (align_rtx);
6079
6080 /* Anything to move? */
6081 bytes = INTVAL (bytes_rtx);
6082 if (bytes <= 0)
6083 return 1;
6084
6085 /* store_one_arg depends on expand_block_move to handle at least the size of
6086 reg_parm_stack_space. */
6087 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
6088 return 0;
6089
6090 if (TARGET_STRING) /* string instructions are available */
6091 {
6092 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6093 {
6094 union {
6095 rtx (*movstrsi) PARAMS ((rtx, rtx, rtx, rtx));
6096 rtx (*mov) PARAMS ((rtx, rtx));
6097 } gen_func;
6098 enum machine_mode mode = BLKmode;
6099 rtx src, dest;
6100
6101 if (bytes > 24 /* move up to 32 bytes at a time */
6102 && ! fixed_regs[5]
6103 && ! fixed_regs[6]
6104 && ! fixed_regs[7]
6105 && ! fixed_regs[8]
6106 && ! fixed_regs[9]
6107 && ! fixed_regs[10]
6108 && ! fixed_regs[11]
6109 && ! fixed_regs[12])
6110 {
6111 move_bytes = (bytes > 32) ? 32 : bytes;
6112 gen_func.movstrsi = gen_movstrsi_8reg;
6113 }
6114 else if (bytes > 16 /* move up to 24 bytes at a time */
6115 && ! fixed_regs[5]
6116 && ! fixed_regs[6]
6117 && ! fixed_regs[7]
6118 && ! fixed_regs[8]
6119 && ! fixed_regs[9]
6120 && ! fixed_regs[10])
6121 {
6122 move_bytes = (bytes > 24) ? 24 : bytes;
6123 gen_func.movstrsi = gen_movstrsi_6reg;
6124 }
6125 else if (bytes > 8 /* move up to 16 bytes at a time */
6126 && ! fixed_regs[5]
6127 && ! fixed_regs[6]
6128 && ! fixed_regs[7]
6129 && ! fixed_regs[8])
6130 {
6131 move_bytes = (bytes > 16) ? 16 : bytes;
6132 gen_func.movstrsi = gen_movstrsi_4reg;
6133 }
6134 else if (bytes >= 8 && TARGET_POWERPC64
6135 /* 64-bit loads and stores require word-aligned
6136 displacements. */
6137 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6138 {
6139 move_bytes = 8;
6140 mode = DImode;
6141 gen_func.mov = gen_movdi;
6142 }
6143 else if (bytes > 4 && !TARGET_POWERPC64)
6144 { /* move up to 8 bytes at a time */
6145 move_bytes = (bytes > 8) ? 8 : bytes;
6146 gen_func.movstrsi = gen_movstrsi_2reg;
6147 }
6148 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6149 { /* move 4 bytes */
6150 move_bytes = 4;
6151 mode = SImode;
6152 gen_func.mov = gen_movsi;
6153 }
6154 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6155 { /* move 2 bytes */
6156 move_bytes = 2;
6157 mode = HImode;
6158 gen_func.mov = gen_movhi;
6159 }
6160 else if (bytes == 1) /* move 1 byte */
6161 {
6162 move_bytes = 1;
6163 mode = QImode;
6164 gen_func.mov = gen_movqi;
6165 }
6166 else
6167 { /* move up to 4 bytes at a time */
6168 move_bytes = (bytes > 4) ? 4 : bytes;
6169 gen_func.movstrsi = gen_movstrsi_1reg;
6170 }
6171
6172 src = adjust_address (orig_src, mode, offset);
6173 dest = adjust_address (orig_dest, mode, offset);
6174
6175 if (mode == BLKmode)
6176 {
6177 /* Move the address into scratch registers. The movstrsi
6178 patterns require zero offset. */
6179 if (!REG_P (XEXP (src, 0)))
6180 {
6181 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
6182 src = replace_equiv_address (src, src_reg);
6183 }
6184 set_mem_size (src, GEN_INT (move_bytes));
6185
6186 if (!REG_P (XEXP (dest, 0)))
6187 {
6188 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
6189 dest = replace_equiv_address (dest, dest_reg);
6190 }
6191 set_mem_size (dest, GEN_INT (move_bytes));
6192
6193 emit_insn ((*gen_func.movstrsi) (dest, src,
6194 GEN_INT (move_bytes & 31),
6195 align_rtx));
6196 }
6197 else
6198 {
6199 rtx tmp_reg = gen_reg_rtx (mode);
6200
6201 emit_insn ((*gen_func.mov) (tmp_reg, src));
6202 emit_insn ((*gen_func.mov) (dest, tmp_reg));
6203 }
6204 }
6205 }
6206
6207 else /* string instructions not available */
6208 {
6209 rtx stores[MAX_MOVE_REG];
6210 int num_reg = 0;
6211 int i;
6212
6213 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6214 {
6215 rtx (*gen_mov_func) PARAMS ((rtx, rtx));
6216 enum machine_mode mode;
6217 rtx src, dest, tmp_reg;
6218
6219 /* Generate the appropriate load and store, saving the stores
6220 for later. */
6221 if (bytes >= 8 && TARGET_POWERPC64
6222 /* 64-bit loads and stores require word-aligned
6223 displacements. */
6224 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6225 {
6226 move_bytes = 8;
6227 mode = DImode;
6228 gen_mov_func = gen_movdi;
6229 }
6230 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6231 {
6232 move_bytes = 4;
6233 mode = SImode;
6234 gen_mov_func = gen_movsi;
6235 }
6236 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6237 {
6238 move_bytes = 2;
6239 mode = HImode;
6240 gen_mov_func = gen_movhi;
6241 }
6242 else
6243 {
6244 move_bytes = 1;
6245 mode = QImode;
6246 gen_mov_func = gen_movqi;
6247 }
6248
6249 src = adjust_address (orig_src, mode, offset);
6250 dest = adjust_address (orig_dest, mode, offset);
6251 tmp_reg = gen_reg_rtx (mode);
6252
6253 emit_insn ((*gen_mov_func) (tmp_reg, src));
6254 stores[num_reg++] = (*gen_mov_func) (dest, tmp_reg);
6255
6256 if (num_reg >= MAX_MOVE_REG)
6257 {
6258 for (i = 0; i < num_reg; i++)
6259 emit_insn (stores[i]);
6260 num_reg = 0;
6261 }
6262 }
6263
6264 for (i = 0; i < num_reg; i++)
6265 emit_insn (stores[i]);
6266 }
6267
6268 return 1;
6269 }
6270
6271 \f
6272 /* Return 1 if OP is a load multiple operation. It is known to be a
6273 PARALLEL and the first section will be tested. */
6274
6275 int
6276 load_multiple_operation (op, mode)
6277 rtx op;
6278 enum machine_mode mode ATTRIBUTE_UNUSED;
6279 {
6280 int count = XVECLEN (op, 0);
6281 unsigned int dest_regno;
6282 rtx src_addr;
6283 int i;
6284
6285 /* Perform a quick check so we don't blow up below. */
6286 if (count <= 1
6287 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6288 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6289 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6290 return 0;
6291
6292 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6293 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6294
6295 for (i = 1; i < count; i++)
6296 {
6297 rtx elt = XVECEXP (op, 0, i);
6298
6299 if (GET_CODE (elt) != SET
6300 || GET_CODE (SET_DEST (elt)) != REG
6301 || GET_MODE (SET_DEST (elt)) != SImode
6302 || REGNO (SET_DEST (elt)) != dest_regno + i
6303 || GET_CODE (SET_SRC (elt)) != MEM
6304 || GET_MODE (SET_SRC (elt)) != SImode
6305 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
6306 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
6307 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
6308 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
6309 return 0;
6310 }
6311
6312 return 1;
6313 }
6314
6315 /* Similar, but tests for store multiple. Here, the second vector element
6316 is a CLOBBER. It will be tested later. */
6317
6318 int
6319 store_multiple_operation (op, mode)
6320 rtx op;
6321 enum machine_mode mode ATTRIBUTE_UNUSED;
6322 {
6323 int count = XVECLEN (op, 0) - 1;
6324 unsigned int src_regno;
6325 rtx dest_addr;
6326 int i;
6327
6328 /* Perform a quick check so we don't blow up below. */
6329 if (count <= 1
6330 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6331 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6332 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6333 return 0;
6334
6335 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6336 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6337
6338 for (i = 1; i < count; i++)
6339 {
6340 rtx elt = XVECEXP (op, 0, i + 1);
6341
6342 if (GET_CODE (elt) != SET
6343 || GET_CODE (SET_SRC (elt)) != REG
6344 || GET_MODE (SET_SRC (elt)) != SImode
6345 || REGNO (SET_SRC (elt)) != src_regno + i
6346 || GET_CODE (SET_DEST (elt)) != MEM
6347 || GET_MODE (SET_DEST (elt)) != SImode
6348 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
6349 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
6350 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
6351 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
6352 return 0;
6353 }
6354
6355 return 1;
6356 }
6357
6358 /* Return 1 for a parallel vrsave operation. */
6359
6360 int
6361 vrsave_operation (op, mode)
6362 rtx op;
6363 enum machine_mode mode ATTRIBUTE_UNUSED;
6364 {
6365 int count = XVECLEN (op, 0);
6366 unsigned int dest_regno, src_regno;
6367 int i;
6368
6369 if (count <= 1
6370 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6371 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6372 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
6373 return 0;
6374
6375 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6376 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6377
6378 if (dest_regno != VRSAVE_REGNO
6379 && src_regno != VRSAVE_REGNO)
6380 return 0;
6381
6382 for (i = 1; i < count; i++)
6383 {
6384 rtx elt = XVECEXP (op, 0, i);
6385
6386 if (GET_CODE (elt) != CLOBBER
6387 && GET_CODE (elt) != SET)
6388 return 0;
6389 }
6390
6391 return 1;
6392 }
6393
6394 /* Return 1 for an PARALLEL suitable for mtcrf. */
6395
6396 int
6397 mtcrf_operation (op, mode)
6398 rtx op;
6399 enum machine_mode mode ATTRIBUTE_UNUSED;
6400 {
6401 int count = XVECLEN (op, 0);
6402 int i;
6403 rtx src_reg;
6404
6405 /* Perform a quick check so we don't blow up below. */
6406 if (count < 1
6407 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6408 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
6409 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
6410 return 0;
6411 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
6412
6413 if (GET_CODE (src_reg) != REG
6414 || GET_MODE (src_reg) != SImode
6415 || ! INT_REGNO_P (REGNO (src_reg)))
6416 return 0;
6417
6418 for (i = 0; i < count; i++)
6419 {
6420 rtx exp = XVECEXP (op, 0, i);
6421 rtx unspec;
6422 int maskval;
6423
6424 if (GET_CODE (exp) != SET
6425 || GET_CODE (SET_DEST (exp)) != REG
6426 || GET_MODE (SET_DEST (exp)) != CCmode
6427 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
6428 return 0;
6429 unspec = SET_SRC (exp);
6430 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
6431
6432 if (GET_CODE (unspec) != UNSPEC
6433 || XINT (unspec, 1) != 20
6434 || XVECLEN (unspec, 0) != 2
6435 || XVECEXP (unspec, 0, 0) != src_reg
6436 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
6437 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
6438 return 0;
6439 }
6440 return 1;
6441 }
6442
6443 /* Return 1 for an PARALLEL suitable for lmw. */
6444
6445 int
6446 lmw_operation (op, mode)
6447 rtx op;
6448 enum machine_mode mode ATTRIBUTE_UNUSED;
6449 {
6450 int count = XVECLEN (op, 0);
6451 unsigned int dest_regno;
6452 rtx src_addr;
6453 unsigned int base_regno;
6454 HOST_WIDE_INT offset;
6455 int i;
6456
6457 /* Perform a quick check so we don't blow up below. */
6458 if (count <= 1
6459 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6460 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6461 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6462 return 0;
6463
6464 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6465 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6466
6467 if (dest_regno > 31
6468 || count != 32 - (int) dest_regno)
6469 return 0;
6470
6471 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
6472 {
6473 offset = 0;
6474 base_regno = REGNO (src_addr);
6475 if (base_regno == 0)
6476 return 0;
6477 }
6478 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
6479 {
6480 offset = INTVAL (XEXP (src_addr, 1));
6481 base_regno = REGNO (XEXP (src_addr, 0));
6482 }
6483 else
6484 return 0;
6485
6486 for (i = 0; i < count; i++)
6487 {
6488 rtx elt = XVECEXP (op, 0, i);
6489 rtx newaddr;
6490 rtx addr_reg;
6491 HOST_WIDE_INT newoffset;
6492
6493 if (GET_CODE (elt) != SET
6494 || GET_CODE (SET_DEST (elt)) != REG
6495 || GET_MODE (SET_DEST (elt)) != SImode
6496 || REGNO (SET_DEST (elt)) != dest_regno + i
6497 || GET_CODE (SET_SRC (elt)) != MEM
6498 || GET_MODE (SET_SRC (elt)) != SImode)
6499 return 0;
6500 newaddr = XEXP (SET_SRC (elt), 0);
6501 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6502 {
6503 newoffset = 0;
6504 addr_reg = newaddr;
6505 }
6506 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6507 {
6508 addr_reg = XEXP (newaddr, 0);
6509 newoffset = INTVAL (XEXP (newaddr, 1));
6510 }
6511 else
6512 return 0;
6513 if (REGNO (addr_reg) != base_regno
6514 || newoffset != offset + 4 * i)
6515 return 0;
6516 }
6517
6518 return 1;
6519 }
6520
6521 /* Return 1 for an PARALLEL suitable for stmw. */
6522
6523 int
6524 stmw_operation (op, mode)
6525 rtx op;
6526 enum machine_mode mode ATTRIBUTE_UNUSED;
6527 {
6528 int count = XVECLEN (op, 0);
6529 unsigned int src_regno;
6530 rtx dest_addr;
6531 unsigned int base_regno;
6532 HOST_WIDE_INT offset;
6533 int i;
6534
6535 /* Perform a quick check so we don't blow up below. */
6536 if (count <= 1
6537 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6538 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6539 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6540 return 0;
6541
6542 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6543 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6544
6545 if (src_regno > 31
6546 || count != 32 - (int) src_regno)
6547 return 0;
6548
6549 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
6550 {
6551 offset = 0;
6552 base_regno = REGNO (dest_addr);
6553 if (base_regno == 0)
6554 return 0;
6555 }
6556 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
6557 {
6558 offset = INTVAL (XEXP (dest_addr, 1));
6559 base_regno = REGNO (XEXP (dest_addr, 0));
6560 }
6561 else
6562 return 0;
6563
6564 for (i = 0; i < count; i++)
6565 {
6566 rtx elt = XVECEXP (op, 0, i);
6567 rtx newaddr;
6568 rtx addr_reg;
6569 HOST_WIDE_INT newoffset;
6570
6571 if (GET_CODE (elt) != SET
6572 || GET_CODE (SET_SRC (elt)) != REG
6573 || GET_MODE (SET_SRC (elt)) != SImode
6574 || REGNO (SET_SRC (elt)) != src_regno + i
6575 || GET_CODE (SET_DEST (elt)) != MEM
6576 || GET_MODE (SET_DEST (elt)) != SImode)
6577 return 0;
6578 newaddr = XEXP (SET_DEST (elt), 0);
6579 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6580 {
6581 newoffset = 0;
6582 addr_reg = newaddr;
6583 }
6584 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6585 {
6586 addr_reg = XEXP (newaddr, 0);
6587 newoffset = INTVAL (XEXP (newaddr, 1));
6588 }
6589 else
6590 return 0;
6591 if (REGNO (addr_reg) != base_regno
6592 || newoffset != offset + 4 * i)
6593 return 0;
6594 }
6595
6596 return 1;
6597 }
6598 \f
6599 /* A validation routine: say whether CODE, a condition code, and MODE
6600 match. The other alternatives either don't make sense or should
6601 never be generated. */
6602
6603 static void
6604 validate_condition_mode (code, mode)
6605 enum rtx_code code;
6606 enum machine_mode mode;
6607 {
6608 if (GET_RTX_CLASS (code) != '<'
6609 || GET_MODE_CLASS (mode) != MODE_CC)
6610 abort ();
6611
6612 /* These don't make sense. */
6613 if ((code == GT || code == LT || code == GE || code == LE)
6614 && mode == CCUNSmode)
6615 abort ();
6616
6617 if ((code == GTU || code == LTU || code == GEU || code == LEU)
6618 && mode != CCUNSmode)
6619 abort ();
6620
6621 if (mode != CCFPmode
6622 && (code == ORDERED || code == UNORDERED
6623 || code == UNEQ || code == LTGT
6624 || code == UNGT || code == UNLT
6625 || code == UNGE || code == UNLE))
6626 abort ();
6627
6628 /* These should never be generated except for
6629 flag_unsafe_math_optimizations and flag_finite_math_only. */
6630 if (mode == CCFPmode
6631 && ! flag_unsafe_math_optimizations
6632 && ! flag_finite_math_only
6633 && (code == LE || code == GE
6634 || code == UNEQ || code == LTGT
6635 || code == UNGT || code == UNLT))
6636 abort ();
6637
6638 /* These are invalid; the information is not there. */
6639 if (mode == CCEQmode
6640 && code != EQ && code != NE)
6641 abort ();
6642 }
6643
6644 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
6645 We only check the opcode against the mode of the CC value here. */
6646
6647 int
6648 branch_comparison_operator (op, mode)
6649 rtx op;
6650 enum machine_mode mode ATTRIBUTE_UNUSED;
6651 {
6652 enum rtx_code code = GET_CODE (op);
6653 enum machine_mode cc_mode;
6654
6655 if (GET_RTX_CLASS (code) != '<')
6656 return 0;
6657
6658 cc_mode = GET_MODE (XEXP (op, 0));
6659 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
6660 return 0;
6661
6662 validate_condition_mode (code, cc_mode);
6663
6664 return 1;
6665 }
6666
6667 /* Return 1 if OP is a comparison operation that is valid for a branch
6668 insn and which is true if the corresponding bit in the CC register
6669 is set. */
6670
6671 int
6672 branch_positive_comparison_operator (op, mode)
6673 rtx op;
6674 enum machine_mode mode;
6675 {
6676 enum rtx_code code;
6677
6678 if (! branch_comparison_operator (op, mode))
6679 return 0;
6680
6681 code = GET_CODE (op);
6682 return (code == EQ || code == LT || code == GT
6683 || (TARGET_SPE && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
6684 || code == LTU || code == GTU
6685 || code == UNORDERED);
6686 }
6687
6688 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
6689 We check the opcode against the mode of the CC value and disallow EQ or
6690 NE comparisons for integers. */
6691
6692 int
6693 scc_comparison_operator (op, mode)
6694 rtx op;
6695 enum machine_mode mode;
6696 {
6697 enum rtx_code code = GET_CODE (op);
6698 enum machine_mode cc_mode;
6699
6700 if (GET_MODE (op) != mode && mode != VOIDmode)
6701 return 0;
6702
6703 if (GET_RTX_CLASS (code) != '<')
6704 return 0;
6705
6706 cc_mode = GET_MODE (XEXP (op, 0));
6707 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
6708 return 0;
6709
6710 validate_condition_mode (code, cc_mode);
6711
6712 if (code == NE && cc_mode != CCFPmode)
6713 return 0;
6714
6715 return 1;
6716 }
6717
6718 int
6719 trap_comparison_operator (op, mode)
6720 rtx op;
6721 enum machine_mode mode;
6722 {
6723 if (mode != VOIDmode && mode != GET_MODE (op))
6724 return 0;
6725 return GET_RTX_CLASS (GET_CODE (op)) == '<';
6726 }
6727
6728 int
6729 boolean_operator (op, mode)
6730 rtx op;
6731 enum machine_mode mode ATTRIBUTE_UNUSED;
6732 {
6733 enum rtx_code code = GET_CODE (op);
6734 return (code == AND || code == IOR || code == XOR);
6735 }
6736
6737 int
6738 boolean_or_operator (op, mode)
6739 rtx op;
6740 enum machine_mode mode ATTRIBUTE_UNUSED;
6741 {
6742 enum rtx_code code = GET_CODE (op);
6743 return (code == IOR || code == XOR);
6744 }
6745
6746 int
6747 min_max_operator (op, mode)
6748 rtx op;
6749 enum machine_mode mode ATTRIBUTE_UNUSED;
6750 {
6751 enum rtx_code code = GET_CODE (op);
6752 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
6753 }
6754 \f
6755 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
6756 mask required to convert the result of a rotate insn into a shift
6757 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
6758
6759 int
6760 includes_lshift_p (shiftop, andop)
6761 rtx shiftop;
6762 rtx andop;
6763 {
6764 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
6765
6766 shift_mask <<= INTVAL (shiftop);
6767
6768 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
6769 }
6770
6771 /* Similar, but for right shift. */
6772
6773 int
6774 includes_rshift_p (shiftop, andop)
6775 rtx shiftop;
6776 rtx andop;
6777 {
6778 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
6779
6780 shift_mask >>= INTVAL (shiftop);
6781
6782 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
6783 }
6784
6785 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
6786 to perform a left shift. It must have exactly SHIFTOP least
6787 signifigant 0's, then one or more 1's, then zero or more 0's. */
6788
6789 int
6790 includes_rldic_lshift_p (shiftop, andop)
6791 rtx shiftop;
6792 rtx andop;
6793 {
6794 if (GET_CODE (andop) == CONST_INT)
6795 {
6796 HOST_WIDE_INT c, lsb, shift_mask;
6797
6798 c = INTVAL (andop);
6799 if (c == 0 || c == ~0)
6800 return 0;
6801
6802 shift_mask = ~0;
6803 shift_mask <<= INTVAL (shiftop);
6804
6805 /* Find the least signifigant one bit. */
6806 lsb = c & -c;
6807
6808 /* It must coincide with the LSB of the shift mask. */
6809 if (-lsb != shift_mask)
6810 return 0;
6811
6812 /* Invert to look for the next transition (if any). */
6813 c = ~c;
6814
6815 /* Remove the low group of ones (originally low group of zeros). */
6816 c &= -lsb;
6817
6818 /* Again find the lsb, and check we have all 1's above. */
6819 lsb = c & -c;
6820 return c == -lsb;
6821 }
6822 else if (GET_CODE (andop) == CONST_DOUBLE
6823 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
6824 {
6825 HOST_WIDE_INT low, high, lsb;
6826 HOST_WIDE_INT shift_mask_low, shift_mask_high;
6827
6828 low = CONST_DOUBLE_LOW (andop);
6829 if (HOST_BITS_PER_WIDE_INT < 64)
6830 high = CONST_DOUBLE_HIGH (andop);
6831
6832 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
6833 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
6834 return 0;
6835
6836 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
6837 {
6838 shift_mask_high = ~0;
6839 if (INTVAL (shiftop) > 32)
6840 shift_mask_high <<= INTVAL (shiftop) - 32;
6841
6842 lsb = high & -high;
6843
6844 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
6845 return 0;
6846
6847 high = ~high;
6848 high &= -lsb;
6849
6850 lsb = high & -high;
6851 return high == -lsb;
6852 }
6853
6854 shift_mask_low = ~0;
6855 shift_mask_low <<= INTVAL (shiftop);
6856
6857 lsb = low & -low;
6858
6859 if (-lsb != shift_mask_low)
6860 return 0;
6861
6862 if (HOST_BITS_PER_WIDE_INT < 64)
6863 high = ~high;
6864 low = ~low;
6865 low &= -lsb;
6866
6867 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
6868 {
6869 lsb = high & -high;
6870 return high == -lsb;
6871 }
6872
6873 lsb = low & -low;
6874 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
6875 }
6876 else
6877 return 0;
6878 }
6879
6880 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
6881 to perform a left shift. It must have SHIFTOP or more least
6882 signifigant 0's, with the remainder of the word 1's. */
6883
6884 int
6885 includes_rldicr_lshift_p (shiftop, andop)
6886 rtx shiftop;
6887 rtx andop;
6888 {
6889 if (GET_CODE (andop) == CONST_INT)
6890 {
6891 HOST_WIDE_INT c, lsb, shift_mask;
6892
6893 shift_mask = ~0;
6894 shift_mask <<= INTVAL (shiftop);
6895 c = INTVAL (andop);
6896
6897 /* Find the least signifigant one bit. */
6898 lsb = c & -c;
6899
6900 /* It must be covered by the shift mask.
6901 This test also rejects c == 0. */
6902 if ((lsb & shift_mask) == 0)
6903 return 0;
6904
6905 /* Check we have all 1's above the transition, and reject all 1's. */
6906 return c == -lsb && lsb != 1;
6907 }
6908 else if (GET_CODE (andop) == CONST_DOUBLE
6909 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
6910 {
6911 HOST_WIDE_INT low, lsb, shift_mask_low;
6912
6913 low = CONST_DOUBLE_LOW (andop);
6914
6915 if (HOST_BITS_PER_WIDE_INT < 64)
6916 {
6917 HOST_WIDE_INT high, shift_mask_high;
6918
6919 high = CONST_DOUBLE_HIGH (andop);
6920
6921 if (low == 0)
6922 {
6923 shift_mask_high = ~0;
6924 if (INTVAL (shiftop) > 32)
6925 shift_mask_high <<= INTVAL (shiftop) - 32;
6926
6927 lsb = high & -high;
6928
6929 if ((lsb & shift_mask_high) == 0)
6930 return 0;
6931
6932 return high == -lsb;
6933 }
6934 if (high != ~0)
6935 return 0;
6936 }
6937
6938 shift_mask_low = ~0;
6939 shift_mask_low <<= INTVAL (shiftop);
6940
6941 lsb = low & -low;
6942
6943 if ((lsb & shift_mask_low) == 0)
6944 return 0;
6945
6946 return low == -lsb && lsb != 1;
6947 }
6948 else
6949 return 0;
6950 }
6951
6952 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
6953 for lfq and stfq insns.
6954
6955 Note reg1 and reg2 *must* be hard registers. To be sure we will
6956 abort if we are passed pseudo registers. */
6957
6958 int
6959 registers_ok_for_quad_peep (reg1, reg2)
6960 rtx reg1, reg2;
6961 {
6962 /* We might have been passed a SUBREG. */
6963 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
6964 return 0;
6965
6966 return (REGNO (reg1) == REGNO (reg2) - 1);
6967 }
6968
6969 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
6970 addr1 and addr2 must be in consecutive memory locations
6971 (addr2 == addr1 + 8). */
6972
6973 int
6974 addrs_ok_for_quad_peep (addr1, addr2)
6975 rtx addr1;
6976 rtx addr2;
6977 {
6978 unsigned int reg1;
6979 int offset1;
6980
6981 /* Extract an offset (if used) from the first addr. */
6982 if (GET_CODE (addr1) == PLUS)
6983 {
6984 /* If not a REG, return zero. */
6985 if (GET_CODE (XEXP (addr1, 0)) != REG)
6986 return 0;
6987 else
6988 {
6989 reg1 = REGNO (XEXP (addr1, 0));
6990 /* The offset must be constant! */
6991 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
6992 return 0;
6993 offset1 = INTVAL (XEXP (addr1, 1));
6994 }
6995 }
6996 else if (GET_CODE (addr1) != REG)
6997 return 0;
6998 else
6999 {
7000 reg1 = REGNO (addr1);
7001 /* This was a simple (mem (reg)) expression. Offset is 0. */
7002 offset1 = 0;
7003 }
7004
7005 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
7006 if (GET_CODE (addr2) != PLUS)
7007 return 0;
7008
7009 if (GET_CODE (XEXP (addr2, 0)) != REG
7010 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7011 return 0;
7012
7013 if (reg1 != REGNO (XEXP (addr2, 0)))
7014 return 0;
7015
7016 /* The offset for the second addr must be 8 more than the first addr. */
7017 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7018 return 0;
7019
7020 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7021 instructions. */
7022 return 1;
7023 }
7024 \f
7025 /* Return the register class of a scratch register needed to copy IN into
7026 or out of a register in CLASS in MODE. If it can be done directly,
7027 NO_REGS is returned. */
7028
7029 enum reg_class
7030 secondary_reload_class (class, mode, in)
7031 enum reg_class class;
7032 enum machine_mode mode ATTRIBUTE_UNUSED;
7033 rtx in;
7034 {
7035 int regno;
7036
7037 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
7038 {
7039 /* We cannot copy a symbolic operand directly into anything
7040 other than BASE_REGS for TARGET_ELF. So indicate that a
7041 register from BASE_REGS is needed as an intermediate
7042 register.
7043
7044 On Darwin, pic addresses require a load from memory, which
7045 needs a base register. */
7046 if (class != BASE_REGS
7047 && (GET_CODE (in) == SYMBOL_REF
7048 || GET_CODE (in) == HIGH
7049 || GET_CODE (in) == LABEL_REF
7050 || GET_CODE (in) == CONST))
7051 return BASE_REGS;
7052 }
7053
7054 if (GET_CODE (in) == REG)
7055 {
7056 regno = REGNO (in);
7057 if (regno >= FIRST_PSEUDO_REGISTER)
7058 {
7059 regno = true_regnum (in);
7060 if (regno >= FIRST_PSEUDO_REGISTER)
7061 regno = -1;
7062 }
7063 }
7064 else if (GET_CODE (in) == SUBREG)
7065 {
7066 regno = true_regnum (in);
7067 if (regno >= FIRST_PSEUDO_REGISTER)
7068 regno = -1;
7069 }
7070 else
7071 regno = -1;
7072
7073 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7074 into anything. */
7075 if (class == GENERAL_REGS || class == BASE_REGS
7076 || (regno >= 0 && INT_REGNO_P (regno)))
7077 return NO_REGS;
7078
7079 /* Constants, memory, and FP registers can go into FP registers. */
7080 if ((regno == -1 || FP_REGNO_P (regno))
7081 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
7082 return NO_REGS;
7083
7084 /* Memory, and AltiVec registers can go into AltiVec registers. */
7085 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
7086 && class == ALTIVEC_REGS)
7087 return NO_REGS;
7088
7089 /* We can copy among the CR registers. */
7090 if ((class == CR_REGS || class == CR0_REGS)
7091 && regno >= 0 && CR_REGNO_P (regno))
7092 return NO_REGS;
7093
7094 /* Otherwise, we need GENERAL_REGS. */
7095 return GENERAL_REGS;
7096 }
7097 \f
7098 /* Given a comparison operation, return the bit number in CCR to test. We
7099 know this is a valid comparison.
7100
7101 SCC_P is 1 if this is for an scc. That means that %D will have been
7102 used instead of %C, so the bits will be in different places.
7103
7104 Return -1 if OP isn't a valid comparison for some reason. */
7105
7106 int
7107 ccr_bit (op, scc_p)
7108 rtx op;
7109 int scc_p;
7110 {
7111 enum rtx_code code = GET_CODE (op);
7112 enum machine_mode cc_mode;
7113 int cc_regnum;
7114 int base_bit;
7115 rtx reg;
7116
7117 if (GET_RTX_CLASS (code) != '<')
7118 return -1;
7119
7120 reg = XEXP (op, 0);
7121
7122 if (GET_CODE (reg) != REG
7123 || ! CR_REGNO_P (REGNO (reg)))
7124 abort ();
7125
7126 cc_mode = GET_MODE (reg);
7127 cc_regnum = REGNO (reg);
7128 base_bit = 4 * (cc_regnum - CR0_REGNO);
7129
7130 validate_condition_mode (code, cc_mode);
7131
7132 switch (code)
7133 {
7134 case NE:
7135 if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7136 return base_bit + 1;
7137 return scc_p ? base_bit + 3 : base_bit + 2;
7138 case EQ:
7139 if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7140 return base_bit + 1;
7141 return base_bit + 2;
7142 case GT: case GTU: case UNLE:
7143 return base_bit + 1;
7144 case LT: case LTU: case UNGE:
7145 return base_bit;
7146 case ORDERED: case UNORDERED:
7147 return base_bit + 3;
7148
7149 case GE: case GEU:
7150 /* If scc, we will have done a cror to put the bit in the
7151 unordered position. So test that bit. For integer, this is ! LT
7152 unless this is an scc insn. */
7153 return scc_p ? base_bit + 3 : base_bit;
7154
7155 case LE: case LEU:
7156 return scc_p ? base_bit + 3 : base_bit + 1;
7157
7158 default:
7159 abort ();
7160 }
7161 }
7162 \f
7163 /* Return the GOT register. */
7164
7165 struct rtx_def *
7166 rs6000_got_register (value)
7167 rtx value ATTRIBUTE_UNUSED;
7168 {
7169 /* The second flow pass currently (June 1999) can't update
7170 regs_ever_live without disturbing other parts of the compiler, so
7171 update it here to make the prolog/epilogue code happy. */
7172 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
7173 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
7174
7175 current_function_uses_pic_offset_table = 1;
7176
7177 return pic_offset_table_rtx;
7178 }
7179 \f
7180 /* Function to init struct machine_function.
7181 This will be called, via a pointer variable,
7182 from push_function_context. */
7183
7184 static struct machine_function *
7185 rs6000_init_machine_status ()
7186 {
7187 return ggc_alloc_cleared (sizeof (machine_function));
7188 }
7189 \f
7190 /* These macros test for integers and extract the low-order bits. */
7191 #define INT_P(X) \
7192 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7193 && GET_MODE (X) == VOIDmode)
7194
7195 #define INT_LOWPART(X) \
7196 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7197
7198 int
7199 extract_MB (op)
7200 rtx op;
7201 {
7202 int i;
7203 unsigned long val = INT_LOWPART (op);
7204
7205 /* If the high bit is zero, the value is the first 1 bit we find
7206 from the left. */
7207 if ((val & 0x80000000) == 0)
7208 {
7209 if ((val & 0xffffffff) == 0)
7210 abort ();
7211
7212 i = 1;
7213 while (((val <<= 1) & 0x80000000) == 0)
7214 ++i;
7215 return i;
7216 }
7217
7218 /* If the high bit is set and the low bit is not, or the mask is all
7219 1's, the value is zero. */
7220 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
7221 return 0;
7222
7223 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7224 from the right. */
7225 i = 31;
7226 while (((val >>= 1) & 1) != 0)
7227 --i;
7228
7229 return i;
7230 }
7231
7232 int
7233 extract_ME (op)
7234 rtx op;
7235 {
7236 int i;
7237 unsigned long val = INT_LOWPART (op);
7238
7239 /* If the low bit is zero, the value is the first 1 bit we find from
7240 the right. */
7241 if ((val & 1) == 0)
7242 {
7243 if ((val & 0xffffffff) == 0)
7244 abort ();
7245
7246 i = 30;
7247 while (((val >>= 1) & 1) == 0)
7248 --i;
7249
7250 return i;
7251 }
7252
7253 /* If the low bit is set and the high bit is not, or the mask is all
7254 1's, the value is 31. */
7255 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
7256 return 31;
7257
7258 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7259 from the left. */
7260 i = 0;
7261 while (((val <<= 1) & 0x80000000) != 0)
7262 ++i;
7263
7264 return i;
7265 }
7266
7267 /* Print an operand. Recognize special options, documented below. */
7268
7269 #if TARGET_ELF
7270 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7271 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7272 #else
7273 #define SMALL_DATA_RELOC "sda21"
7274 #define SMALL_DATA_REG 0
7275 #endif
7276
7277 void
7278 print_operand (file, x, code)
7279 FILE *file;
7280 rtx x;
7281 int code;
7282 {
7283 int i;
7284 HOST_WIDE_INT val;
7285 unsigned HOST_WIDE_INT uval;
7286
7287 switch (code)
7288 {
7289 case '.':
7290 /* Write out an instruction after the call which may be replaced
7291 with glue code by the loader. This depends on the AIX version. */
7292 asm_fprintf (file, RS6000_CALL_GLUE);
7293 return;
7294
7295 /* %a is output_address. */
7296
7297 case 'A':
7298 /* If X is a constant integer whose low-order 5 bits are zero,
7299 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
7300 in the AIX assembler where "sri" with a zero shift count
7301 writes a trash instruction. */
7302 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
7303 putc ('l', file);
7304 else
7305 putc ('r', file);
7306 return;
7307
7308 case 'b':
7309 /* If constant, low-order 16 bits of constant, unsigned.
7310 Otherwise, write normally. */
7311 if (INT_P (x))
7312 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
7313 else
7314 print_operand (file, x, 0);
7315 return;
7316
7317 case 'B':
7318 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7319 for 64-bit mask direction. */
7320 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
7321 return;
7322
7323 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7324 output_operand. */
7325
7326 case 'D':
7327 /* There used to be a comment for 'C' reading "This is an
7328 optional cror needed for certain floating-point
7329 comparisons. Otherwise write nothing." */
7330
7331 /* Similar, except that this is for an scc, so we must be able to
7332 encode the test in a single bit that is one. We do the above
7333 for any LE, GE, GEU, or LEU and invert the bit for NE. */
7334 if (GET_CODE (x) == LE || GET_CODE (x) == GE
7335 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
7336 {
7337 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7338
7339 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
7340 base_bit + 2,
7341 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
7342 }
7343
7344 else if (GET_CODE (x) == NE)
7345 {
7346 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7347
7348 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
7349 base_bit + 2, base_bit + 2);
7350 }
7351 else if (TARGET_SPE && TARGET_HARD_FLOAT
7352 && GET_CODE (x) == EQ
7353 && GET_MODE (XEXP (x, 0)) == CCFPmode)
7354 {
7355 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7356
7357 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 1,
7358 base_bit + 1, base_bit + 1);
7359 }
7360 return;
7361
7362 case 'E':
7363 /* X is a CR register. Print the number of the EQ bit of the CR */
7364 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7365 output_operand_lossage ("invalid %%E value");
7366 else
7367 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
7368 return;
7369
7370 case 'f':
7371 /* X is a CR register. Print the shift count needed to move it
7372 to the high-order four bits. */
7373 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7374 output_operand_lossage ("invalid %%f value");
7375 else
7376 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
7377 return;
7378
7379 case 'F':
7380 /* Similar, but print the count for the rotate in the opposite
7381 direction. */
7382 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7383 output_operand_lossage ("invalid %%F value");
7384 else
7385 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
7386 return;
7387
7388 case 'G':
7389 /* X is a constant integer. If it is negative, print "m",
7390 otherwise print "z". This is to make a aze or ame insn. */
7391 if (GET_CODE (x) != CONST_INT)
7392 output_operand_lossage ("invalid %%G value");
7393 else if (INTVAL (x) >= 0)
7394 putc ('z', file);
7395 else
7396 putc ('m', file);
7397 return;
7398
7399 case 'h':
7400 /* If constant, output low-order five bits. Otherwise, write
7401 normally. */
7402 if (INT_P (x))
7403 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
7404 else
7405 print_operand (file, x, 0);
7406 return;
7407
7408 case 'H':
7409 /* If constant, output low-order six bits. Otherwise, write
7410 normally. */
7411 if (INT_P (x))
7412 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
7413 else
7414 print_operand (file, x, 0);
7415 return;
7416
7417 case 'I':
7418 /* Print `i' if this is a constant, else nothing. */
7419 if (INT_P (x))
7420 putc ('i', file);
7421 return;
7422
7423 case 'j':
7424 /* Write the bit number in CCR for jump. */
7425 i = ccr_bit (x, 0);
7426 if (i == -1)
7427 output_operand_lossage ("invalid %%j code");
7428 else
7429 fprintf (file, "%d", i);
7430 return;
7431
7432 case 'J':
7433 /* Similar, but add one for shift count in rlinm for scc and pass
7434 scc flag to `ccr_bit'. */
7435 i = ccr_bit (x, 1);
7436 if (i == -1)
7437 output_operand_lossage ("invalid %%J code");
7438 else
7439 /* If we want bit 31, write a shift count of zero, not 32. */
7440 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7441 return;
7442
7443 case 'k':
7444 /* X must be a constant. Write the 1's complement of the
7445 constant. */
7446 if (! INT_P (x))
7447 output_operand_lossage ("invalid %%k value");
7448 else
7449 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
7450 return;
7451
7452 case 'K':
7453 /* X must be a symbolic constant on ELF. Write an
7454 expression suitable for an 'addi' that adds in the low 16
7455 bits of the MEM. */
7456 if (GET_CODE (x) != CONST)
7457 {
7458 print_operand_address (file, x);
7459 fputs ("@l", file);
7460 }
7461 else
7462 {
7463 if (GET_CODE (XEXP (x, 0)) != PLUS
7464 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
7465 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
7466 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
7467 output_operand_lossage ("invalid %%K value");
7468 print_operand_address (file, XEXP (XEXP (x, 0), 0));
7469 fputs ("@l", file);
7470 /* For GNU as, there must be a non-alphanumeric character
7471 between 'l' and the number. The '-' is added by
7472 print_operand() already. */
7473 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
7474 fputs ("+", file);
7475 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
7476 }
7477 return;
7478
7479 /* %l is output_asm_label. */
7480
7481 case 'L':
7482 /* Write second word of DImode or DFmode reference. Works on register
7483 or non-indexed memory only. */
7484 if (GET_CODE (x) == REG)
7485 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
7486 else if (GET_CODE (x) == MEM)
7487 {
7488 /* Handle possible auto-increment. Since it is pre-increment and
7489 we have already done it, we can just use an offset of word. */
7490 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7491 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7492 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
7493 UNITS_PER_WORD));
7494 else
7495 output_address (XEXP (adjust_address_nv (x, SImode,
7496 UNITS_PER_WORD),
7497 0));
7498
7499 if (small_data_operand (x, GET_MODE (x)))
7500 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7501 reg_names[SMALL_DATA_REG]);
7502 }
7503 return;
7504
7505 case 'm':
7506 /* MB value for a mask operand. */
7507 if (! mask_operand (x, SImode))
7508 output_operand_lossage ("invalid %%m value");
7509
7510 fprintf (file, "%d", extract_MB (x));
7511 return;
7512
7513 case 'M':
7514 /* ME value for a mask operand. */
7515 if (! mask_operand (x, SImode))
7516 output_operand_lossage ("invalid %%M value");
7517
7518 fprintf (file, "%d", extract_ME (x));
7519 return;
7520
7521 /* %n outputs the negative of its operand. */
7522
7523 case 'N':
7524 /* Write the number of elements in the vector times 4. */
7525 if (GET_CODE (x) != PARALLEL)
7526 output_operand_lossage ("invalid %%N value");
7527 else
7528 fprintf (file, "%d", XVECLEN (x, 0) * 4);
7529 return;
7530
7531 case 'O':
7532 /* Similar, but subtract 1 first. */
7533 if (GET_CODE (x) != PARALLEL)
7534 output_operand_lossage ("invalid %%O value");
7535 else
7536 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
7537 return;
7538
7539 case 'p':
7540 /* X is a CONST_INT that is a power of two. Output the logarithm. */
7541 if (! INT_P (x)
7542 || INT_LOWPART (x) < 0
7543 || (i = exact_log2 (INT_LOWPART (x))) < 0)
7544 output_operand_lossage ("invalid %%p value");
7545 else
7546 fprintf (file, "%d", i);
7547 return;
7548
7549 case 'P':
7550 /* The operand must be an indirect memory reference. The result
7551 is the register number. */
7552 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
7553 || REGNO (XEXP (x, 0)) >= 32)
7554 output_operand_lossage ("invalid %%P value");
7555 else
7556 fprintf (file, "%d", REGNO (XEXP (x, 0)));
7557 return;
7558
7559 case 'q':
7560 /* This outputs the logical code corresponding to a boolean
7561 expression. The expression may have one or both operands
7562 negated (if one, only the first one). For condition register
7563 logical operations, it will also treat the negated
7564 CR codes as NOTs, but not handle NOTs of them. */
7565 {
7566 const char *const *t = 0;
7567 const char *s;
7568 enum rtx_code code = GET_CODE (x);
7569 static const char * const tbl[3][3] = {
7570 { "and", "andc", "nor" },
7571 { "or", "orc", "nand" },
7572 { "xor", "eqv", "xor" } };
7573
7574 if (code == AND)
7575 t = tbl[0];
7576 else if (code == IOR)
7577 t = tbl[1];
7578 else if (code == XOR)
7579 t = tbl[2];
7580 else
7581 output_operand_lossage ("invalid %%q value");
7582
7583 if (GET_CODE (XEXP (x, 0)) != NOT)
7584 s = t[0];
7585 else
7586 {
7587 if (GET_CODE (XEXP (x, 1)) == NOT)
7588 s = t[2];
7589 else
7590 s = t[1];
7591 }
7592
7593 fputs (s, file);
7594 }
7595 return;
7596
7597 case 'R':
7598 /* X is a CR register. Print the mask for `mtcrf'. */
7599 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7600 output_operand_lossage ("invalid %%R value");
7601 else
7602 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
7603 return;
7604
7605 case 's':
7606 /* Low 5 bits of 32 - value */
7607 if (! INT_P (x))
7608 output_operand_lossage ("invalid %%s value");
7609 else
7610 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
7611 return;
7612
7613 case 'S':
7614 /* PowerPC64 mask position. All 0's is excluded.
7615 CONST_INT 32-bit mask is considered sign-extended so any
7616 transition must occur within the CONST_INT, not on the boundary. */
7617 if (! mask64_operand (x, DImode))
7618 output_operand_lossage ("invalid %%S value");
7619
7620 uval = INT_LOWPART (x);
7621
7622 if (uval & 1) /* Clear Left */
7623 {
7624 uval &= ((unsigned HOST_WIDE_INT) 1 << 63 << 1) - 1;
7625 i = 64;
7626 }
7627 else /* Clear Right */
7628 {
7629 uval = ~uval;
7630 uval &= ((unsigned HOST_WIDE_INT) 1 << 63 << 1) - 1;
7631 i = 63;
7632 }
7633 while (uval != 0)
7634 --i, uval >>= 1;
7635 if (i < 0)
7636 abort ();
7637 fprintf (file, "%d", i);
7638 return;
7639
7640 case 't':
7641 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
7642 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
7643 abort ();
7644
7645 /* Bit 3 is OV bit. */
7646 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
7647
7648 /* If we want bit 31, write a shift count of zero, not 32. */
7649 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7650 return;
7651
7652 case 'T':
7653 /* Print the symbolic name of a branch target register. */
7654 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
7655 && REGNO (x) != COUNT_REGISTER_REGNUM))
7656 output_operand_lossage ("invalid %%T value");
7657 else if (REGNO (x) == LINK_REGISTER_REGNUM)
7658 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
7659 else
7660 fputs ("ctr", file);
7661 return;
7662
7663 case 'u':
7664 /* High-order 16 bits of constant for use in unsigned operand. */
7665 if (! INT_P (x))
7666 output_operand_lossage ("invalid %%u value");
7667 else
7668 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
7669 (INT_LOWPART (x) >> 16) & 0xffff);
7670 return;
7671
7672 case 'v':
7673 /* High-order 16 bits of constant for use in signed operand. */
7674 if (! INT_P (x))
7675 output_operand_lossage ("invalid %%v value");
7676 else
7677 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
7678 (INT_LOWPART (x) >> 16) & 0xffff);
7679 return;
7680
7681 case 'U':
7682 /* Print `u' if this has an auto-increment or auto-decrement. */
7683 if (GET_CODE (x) == MEM
7684 && (GET_CODE (XEXP (x, 0)) == PRE_INC
7685 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
7686 putc ('u', file);
7687 return;
7688
7689 case 'V':
7690 /* Print the trap code for this operand. */
7691 switch (GET_CODE (x))
7692 {
7693 case EQ:
7694 fputs ("eq", file); /* 4 */
7695 break;
7696 case NE:
7697 fputs ("ne", file); /* 24 */
7698 break;
7699 case LT:
7700 fputs ("lt", file); /* 16 */
7701 break;
7702 case LE:
7703 fputs ("le", file); /* 20 */
7704 break;
7705 case GT:
7706 fputs ("gt", file); /* 8 */
7707 break;
7708 case GE:
7709 fputs ("ge", file); /* 12 */
7710 break;
7711 case LTU:
7712 fputs ("llt", file); /* 2 */
7713 break;
7714 case LEU:
7715 fputs ("lle", file); /* 6 */
7716 break;
7717 case GTU:
7718 fputs ("lgt", file); /* 1 */
7719 break;
7720 case GEU:
7721 fputs ("lge", file); /* 5 */
7722 break;
7723 default:
7724 abort ();
7725 }
7726 break;
7727
7728 case 'w':
7729 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
7730 normally. */
7731 if (INT_P (x))
7732 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7733 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
7734 else
7735 print_operand (file, x, 0);
7736 return;
7737
7738 case 'W':
7739 /* MB value for a PowerPC64 rldic operand. */
7740 val = (GET_CODE (x) == CONST_INT
7741 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
7742
7743 if (val < 0)
7744 i = -1;
7745 else
7746 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
7747 if ((val <<= 1) < 0)
7748 break;
7749
7750 #if HOST_BITS_PER_WIDE_INT == 32
7751 if (GET_CODE (x) == CONST_INT && i >= 0)
7752 i += 32; /* zero-extend high-part was all 0's */
7753 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
7754 {
7755 val = CONST_DOUBLE_LOW (x);
7756
7757 if (val == 0)
7758 abort ();
7759 else if (val < 0)
7760 --i;
7761 else
7762 for ( ; i < 64; i++)
7763 if ((val <<= 1) < 0)
7764 break;
7765 }
7766 #endif
7767
7768 fprintf (file, "%d", i + 1);
7769 return;
7770
7771 case 'X':
7772 if (GET_CODE (x) == MEM
7773 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
7774 putc ('x', file);
7775 return;
7776
7777 case 'Y':
7778 /* Like 'L', for third word of TImode */
7779 if (GET_CODE (x) == REG)
7780 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
7781 else if (GET_CODE (x) == MEM)
7782 {
7783 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7784 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7785 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
7786 else
7787 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
7788 if (small_data_operand (x, GET_MODE (x)))
7789 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7790 reg_names[SMALL_DATA_REG]);
7791 }
7792 return;
7793
7794 case 'z':
7795 /* X is a SYMBOL_REF. Write out the name preceded by a
7796 period and without any trailing data in brackets. Used for function
7797 names. If we are configured for System V (or the embedded ABI) on
7798 the PowerPC, do not emit the period, since those systems do not use
7799 TOCs and the like. */
7800 if (GET_CODE (x) != SYMBOL_REF)
7801 abort ();
7802
7803 if (XSTR (x, 0)[0] != '.')
7804 {
7805 switch (DEFAULT_ABI)
7806 {
7807 default:
7808 abort ();
7809
7810 case ABI_AIX:
7811 putc ('.', file);
7812 break;
7813
7814 case ABI_V4:
7815 case ABI_AIX_NODESC:
7816 case ABI_DARWIN:
7817 break;
7818 }
7819 }
7820 #if TARGET_AIX
7821 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
7822 #else
7823 assemble_name (file, XSTR (x, 0));
7824 #endif
7825 return;
7826
7827 case 'Z':
7828 /* Like 'L', for last word of TImode. */
7829 if (GET_CODE (x) == REG)
7830 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
7831 else if (GET_CODE (x) == MEM)
7832 {
7833 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7834 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7835 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
7836 else
7837 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
7838 if (small_data_operand (x, GET_MODE (x)))
7839 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7840 reg_names[SMALL_DATA_REG]);
7841 }
7842 return;
7843
7844 /* Print AltiVec or SPE memory operand. */
7845 case 'y':
7846 {
7847 rtx tmp;
7848
7849 if (GET_CODE (x) != MEM)
7850 abort ();
7851
7852 tmp = XEXP (x, 0);
7853
7854 if (TARGET_SPE)
7855 {
7856 /* Handle [reg]. */
7857 if (GET_CODE (tmp) == REG)
7858 {
7859 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
7860 break;
7861 }
7862 /* Handle [reg+UIMM]. */
7863 else if (GET_CODE (tmp) == PLUS &&
7864 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
7865 {
7866 int x;
7867
7868 if (GET_CODE (XEXP (tmp, 0)) != REG)
7869 abort ();
7870
7871 x = INTVAL (XEXP (tmp, 1));
7872 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
7873 break;
7874 }
7875
7876 /* Fall through. Must be [reg+reg]. */
7877 }
7878 if (GET_CODE (tmp) == REG)
7879 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
7880 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
7881 {
7882 if (REGNO (XEXP (tmp, 0)) == 0)
7883 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
7884 reg_names[ REGNO (XEXP (tmp, 0)) ]);
7885 else
7886 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
7887 reg_names[ REGNO (XEXP (tmp, 1)) ]);
7888 }
7889 else
7890 abort ();
7891 break;
7892 }
7893
7894 case 0:
7895 if (GET_CODE (x) == REG)
7896 fprintf (file, "%s", reg_names[REGNO (x)]);
7897 else if (GET_CODE (x) == MEM)
7898 {
7899 /* We need to handle PRE_INC and PRE_DEC here, since we need to
7900 know the width from the mode. */
7901 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
7902 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
7903 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
7904 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
7905 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
7906 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
7907 else
7908 output_address (XEXP (x, 0));
7909 }
7910 else
7911 output_addr_const (file, x);
7912 return;
7913
7914 default:
7915 output_operand_lossage ("invalid %%xn code");
7916 }
7917 }
7918 \f
7919 /* Print the address of an operand. */
7920
7921 void
7922 print_operand_address (file, x)
7923 FILE *file;
7924 rtx x;
7925 {
7926 if (GET_CODE (x) == REG)
7927 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
7928 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
7929 || GET_CODE (x) == LABEL_REF)
7930 {
7931 output_addr_const (file, x);
7932 if (small_data_operand (x, GET_MODE (x)))
7933 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7934 reg_names[SMALL_DATA_REG]);
7935 else if (TARGET_TOC)
7936 abort ();
7937 }
7938 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
7939 {
7940 if (REGNO (XEXP (x, 0)) == 0)
7941 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
7942 reg_names[ REGNO (XEXP (x, 0)) ]);
7943 else
7944 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
7945 reg_names[ REGNO (XEXP (x, 1)) ]);
7946 }
7947 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
7948 {
7949 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
7950 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
7951 }
7952 #if TARGET_ELF
7953 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
7954 && CONSTANT_P (XEXP (x, 1)))
7955 {
7956 output_addr_const (file, XEXP (x, 1));
7957 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
7958 }
7959 #endif
7960 #if TARGET_MACHO
7961 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
7962 && CONSTANT_P (XEXP (x, 1)))
7963 {
7964 fprintf (file, "lo16(");
7965 output_addr_const (file, XEXP (x, 1));
7966 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
7967 }
7968 #endif
7969 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
7970 {
7971 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
7972 {
7973 rtx contains_minus = XEXP (x, 1);
7974 rtx minus, symref;
7975 const char *name;
7976
7977 /* Find the (minus (sym) (toc)) buried in X, and temporarily
7978 turn it into (sym) for output_addr_const. */
7979 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
7980 contains_minus = XEXP (contains_minus, 0);
7981
7982 minus = XEXP (contains_minus, 0);
7983 symref = XEXP (minus, 0);
7984 XEXP (contains_minus, 0) = symref;
7985 if (TARGET_ELF)
7986 {
7987 char *newname;
7988
7989 name = XSTR (symref, 0);
7990 newname = alloca (strlen (name) + sizeof ("@toc"));
7991 strcpy (newname, name);
7992 strcat (newname, "@toc");
7993 XSTR (symref, 0) = newname;
7994 }
7995 output_addr_const (file, XEXP (x, 1));
7996 if (TARGET_ELF)
7997 XSTR (symref, 0) = name;
7998 XEXP (contains_minus, 0) = minus;
7999 }
8000 else
8001 output_addr_const (file, XEXP (x, 1));
8002
8003 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
8004 }
8005 else
8006 abort ();
8007 }
8008 \f
8009 /* Target hook for assembling integer objects. The powerpc version has
8010 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8011 is defined. It also needs to handle DI-mode objects on 64-bit
8012 targets. */
8013
8014 static bool
8015 rs6000_assemble_integer (x, size, aligned_p)
8016 rtx x;
8017 unsigned int size;
8018 int aligned_p;
8019 {
8020 #ifdef RELOCATABLE_NEEDS_FIXUP
8021 /* Special handling for SI values. */
8022 if (size == 4 && aligned_p)
8023 {
8024 extern int in_toc_section PARAMS ((void));
8025 static int recurse = 0;
8026
8027 /* For -mrelocatable, we mark all addresses that need to be fixed up
8028 in the .fixup section. */
8029 if (TARGET_RELOCATABLE
8030 && !in_toc_section ()
8031 && !in_text_section ()
8032 && !recurse
8033 && GET_CODE (x) != CONST_INT
8034 && GET_CODE (x) != CONST_DOUBLE
8035 && CONSTANT_P (x))
8036 {
8037 char buf[256];
8038
8039 recurse = 1;
8040 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
8041 fixuplabelno++;
8042 ASM_OUTPUT_LABEL (asm_out_file, buf);
8043 fprintf (asm_out_file, "\t.long\t(");
8044 output_addr_const (asm_out_file, x);
8045 fprintf (asm_out_file, ")@fixup\n");
8046 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
8047 ASM_OUTPUT_ALIGN (asm_out_file, 2);
8048 fprintf (asm_out_file, "\t.long\t");
8049 assemble_name (asm_out_file, buf);
8050 fprintf (asm_out_file, "\n\t.previous\n");
8051 recurse = 0;
8052 return true;
8053 }
8054 /* Remove initial .'s to turn a -mcall-aixdesc function
8055 address into the address of the descriptor, not the function
8056 itself. */
8057 else if (GET_CODE (x) == SYMBOL_REF
8058 && XSTR (x, 0)[0] == '.'
8059 && DEFAULT_ABI == ABI_AIX)
8060 {
8061 const char *name = XSTR (x, 0);
8062 while (*name == '.')
8063 name++;
8064
8065 fprintf (asm_out_file, "\t.long\t%s\n", name);
8066 return true;
8067 }
8068 }
8069 #endif /* RELOCATABLE_NEEDS_FIXUP */
8070 return default_assemble_integer (x, size, aligned_p);
8071 }
8072
8073 #ifdef HAVE_GAS_HIDDEN
8074 /* Emit an assembler directive to set symbol visibility for DECL to
8075 VISIBILITY_TYPE. */
8076
8077 static void
8078 rs6000_assemble_visibility (decl, visibility_type)
8079 tree decl;
8080 const char *visibility_type;
8081 {
8082 default_assemble_visibility (decl, visibility_type);
8083
8084 /* Functions need to have their entry point symbol visibility set as
8085 well as their descriptor symbol visibility. */
8086 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
8087 {
8088 const char *name;
8089
8090 name = ((* targetm.strip_name_encoding)
8091 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
8092
8093 fprintf (asm_out_file, "\t.%s\t.%s\n", visibility_type, name);
8094 }
8095 }
8096 #endif
8097 \f
8098 enum rtx_code
8099 rs6000_reverse_condition (mode, code)
8100 enum machine_mode mode;
8101 enum rtx_code code;
8102 {
8103 /* Reversal of FP compares takes care -- an ordered compare
8104 becomes an unordered compare and vice versa. */
8105 if (mode == CCFPmode)
8106 return reverse_condition_maybe_unordered (code);
8107 else
8108 return reverse_condition (code);
8109 }
8110
8111 /* Generate a compare for CODE. Return a brand-new rtx that
8112 represents the result of the compare. */
8113
8114 static rtx
8115 rs6000_generate_compare (code)
8116 enum rtx_code code;
8117 {
8118 enum machine_mode comp_mode;
8119 rtx compare_result;
8120
8121 if (rs6000_compare_fp_p)
8122 comp_mode = CCFPmode;
8123 else if (code == GTU || code == LTU
8124 || code == GEU || code == LEU)
8125 comp_mode = CCUNSmode;
8126 else
8127 comp_mode = CCmode;
8128
8129 /* First, the compare. */
8130 compare_result = gen_reg_rtx (comp_mode);
8131
8132 /* SPE FP compare instructions on the GPRs. Yuck! */
8133 if ((TARGET_SPE && TARGET_HARD_FLOAT) && rs6000_compare_fp_p)
8134 {
8135 rtx cmp, or1, or2, or_result, compare_result2;
8136
8137 switch (code)
8138 {
8139 case EQ:
8140 case UNEQ:
8141 case NE:
8142 case LTGT:
8143 cmp = flag_unsafe_math_optimizations
8144 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
8145 rs6000_compare_op1)
8146 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
8147 rs6000_compare_op1);
8148 break;
8149 case GT:
8150 case GTU:
8151 case UNGT:
8152 case UNGE:
8153 case GE:
8154 case GEU:
8155 cmp = flag_unsafe_math_optimizations
8156 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
8157 rs6000_compare_op1)
8158 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
8159 rs6000_compare_op1);
8160 break;
8161 case LT:
8162 case LTU:
8163 case UNLT:
8164 case UNLE:
8165 case LE:
8166 case LEU:
8167 cmp = flag_unsafe_math_optimizations
8168 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
8169 rs6000_compare_op1)
8170 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
8171 rs6000_compare_op1);
8172 break;
8173 default:
8174 abort ();
8175 }
8176
8177 /* Synthesize LE and GE from LT/GT || EQ. */
8178 if (code == LE || code == GE || code == LEU || code == GEU)
8179 {
8180 /* Synthesize GE/LE frome GT/LT || EQ. */
8181
8182 emit_insn (cmp);
8183
8184 switch (code)
8185 {
8186 case LE: code = LT; break;
8187 case GE: code = GT; break;
8188 case LEU: code = LT; break;
8189 case GEU: code = GT; break;
8190 default: abort ();
8191 }
8192
8193 or1 = gen_reg_rtx (SImode);
8194 or2 = gen_reg_rtx (SImode);
8195 or_result = gen_reg_rtx (CCEQmode);
8196 compare_result2 = gen_reg_rtx (CCFPmode);
8197
8198 /* Do the EQ. */
8199 cmp = flag_unsafe_math_optimizations
8200 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
8201 rs6000_compare_op1)
8202 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
8203 rs6000_compare_op1);
8204 emit_insn (cmp);
8205
8206 /* The MC8540 FP compare instructions set the CR bits
8207 differently than other PPC compare instructions. For
8208 that matter, there is no generic test instruction, but a
8209 testgt, testlt, and testeq. For a true condition, bit 2
8210 is set (x1xx) in the CR. Following the traditional CR
8211 values:
8212
8213 LT GT EQ OV
8214 bit3 bit2 bit1 bit0
8215
8216 ... bit 2 would be a GT CR alias, so later on we
8217 look in the GT bits for the branch instructins.
8218 However, we must be careful to emit correct RTL in
8219 the meantime, so optimizations don't get confused. */
8220
8221 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
8222 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
8223
8224 /* OR them together. */
8225 cmp = gen_rtx_SET (VOIDmode, or_result,
8226 gen_rtx_COMPARE (CCEQmode,
8227 gen_rtx_IOR (SImode, or1, or2),
8228 const_true_rtx));
8229 compare_result = or_result;
8230 code = EQ;
8231 }
8232 else
8233 {
8234 /* We only care about 1 bit (x1xx), so map everything to NE to
8235 maintain rtl sanity. We'll get to the right bit (x1xx) at
8236 code output time. */
8237 if (code == NE || code == LTGT)
8238 /* Do the inverse here because we have no cmpne
8239 instruction. We use the cmpeq instruction and expect
8240 to get a 0 instead. */
8241 code = EQ;
8242 else
8243 code = NE;
8244 }
8245
8246 emit_insn (cmp);
8247 }
8248 else
8249 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
8250 gen_rtx_COMPARE (comp_mode,
8251 rs6000_compare_op0,
8252 rs6000_compare_op1)));
8253
8254 /* Some kinds of FP comparisons need an OR operation;
8255 except for flag_unsafe_math_optimizations we don't bother. */
8256 if (rs6000_compare_fp_p
8257 && ! flag_unsafe_math_optimizations
8258 && ! (TARGET_HARD_FLOAT && TARGET_SPE)
8259 && (code == LE || code == GE
8260 || code == UNEQ || code == LTGT
8261 || code == UNGT || code == UNLT))
8262 {
8263 enum rtx_code or1, or2;
8264 rtx or1_rtx, or2_rtx, compare2_rtx;
8265 rtx or_result = gen_reg_rtx (CCEQmode);
8266
8267 switch (code)
8268 {
8269 case LE: or1 = LT; or2 = EQ; break;
8270 case GE: or1 = GT; or2 = EQ; break;
8271 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
8272 case LTGT: or1 = LT; or2 = GT; break;
8273 case UNGT: or1 = UNORDERED; or2 = GT; break;
8274 case UNLT: or1 = UNORDERED; or2 = LT; break;
8275 default: abort ();
8276 }
8277 validate_condition_mode (or1, comp_mode);
8278 validate_condition_mode (or2, comp_mode);
8279 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
8280 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
8281 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
8282 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
8283 const_true_rtx);
8284 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
8285
8286 compare_result = or_result;
8287 code = EQ;
8288 }
8289
8290 validate_condition_mode (code, GET_MODE (compare_result));
8291
8292 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
8293 }
8294
8295
8296 /* Emit the RTL for an sCOND pattern. */
8297
8298 void
8299 rs6000_emit_sCOND (code, result)
8300 enum rtx_code code;
8301 rtx result;
8302 {
8303 rtx condition_rtx;
8304 enum machine_mode op_mode;
8305
8306 condition_rtx = rs6000_generate_compare (code);
8307
8308 op_mode = GET_MODE (rs6000_compare_op0);
8309 if (op_mode == VOIDmode)
8310 op_mode = GET_MODE (rs6000_compare_op1);
8311
8312 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
8313 {
8314 PUT_MODE (condition_rtx, DImode);
8315 convert_move (result, condition_rtx, 0);
8316 }
8317 else
8318 {
8319 PUT_MODE (condition_rtx, SImode);
8320 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
8321 }
8322 }
8323
8324 /* Emit a branch of kind CODE to location LOC. */
8325
8326 void
8327 rs6000_emit_cbranch (code, loc)
8328 enum rtx_code code;
8329 rtx loc;
8330 {
8331 rtx condition_rtx, loc_ref;
8332
8333 condition_rtx = rs6000_generate_compare (code);
8334 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
8335 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
8336 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
8337 loc_ref, pc_rtx)));
8338 }
8339
8340 /* Return the string to output a conditional branch to LABEL, which is
8341 the operand number of the label, or -1 if the branch is really a
8342 conditional return.
8343
8344 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
8345 condition code register and its mode specifies what kind of
8346 comparison we made.
8347
8348 REVERSED is non-zero if we should reverse the sense of the comparison.
8349
8350 INSN is the insn. */
8351
8352 char *
8353 output_cbranch (op, label, reversed, insn)
8354 rtx op;
8355 const char * label;
8356 int reversed;
8357 rtx insn;
8358 {
8359 static char string[64];
8360 enum rtx_code code = GET_CODE (op);
8361 rtx cc_reg = XEXP (op, 0);
8362 enum machine_mode mode = GET_MODE (cc_reg);
8363 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
8364 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
8365 int really_reversed = reversed ^ need_longbranch;
8366 char *s = string;
8367 const char *ccode;
8368 const char *pred;
8369 rtx note;
8370
8371 validate_condition_mode (code, mode);
8372
8373 /* Work out which way this really branches. We could use
8374 reverse_condition_maybe_unordered here always but this
8375 makes the resulting assembler clearer. */
8376 if (really_reversed)
8377 code = rs6000_reverse_condition (mode, code);
8378
8379 if ((TARGET_SPE && TARGET_HARD_FLOAT) && mode == CCFPmode)
8380 {
8381 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8382 to the GT bit. */
8383 if (code == EQ)
8384 /* Opposite of GT. */
8385 code = UNLE;
8386 else if (code == NE)
8387 code = GT;
8388 else
8389 abort ();
8390 }
8391
8392 switch (code)
8393 {
8394 /* Not all of these are actually distinct opcodes, but
8395 we distinguish them for clarity of the resulting assembler. */
8396 case NE: case LTGT:
8397 ccode = "ne"; break;
8398 case EQ: case UNEQ:
8399 ccode = "eq"; break;
8400 case GE: case GEU:
8401 ccode = "ge"; break;
8402 case GT: case GTU: case UNGT:
8403 ccode = "gt"; break;
8404 case LE: case LEU:
8405 ccode = "le"; break;
8406 case LT: case LTU: case UNLT:
8407 ccode = "lt"; break;
8408 case UNORDERED: ccode = "un"; break;
8409 case ORDERED: ccode = "nu"; break;
8410 case UNGE: ccode = "nl"; break;
8411 case UNLE: ccode = "ng"; break;
8412 default:
8413 abort ();
8414 }
8415
8416 /* Maybe we have a guess as to how likely the branch is.
8417 The old mnemonics don't have a way to specify this information. */
8418 pred = "";
8419 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
8420 if (note != NULL_RTX)
8421 {
8422 /* PROB is the difference from 50%. */
8423 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
8424 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
8425
8426 /* Only hint for highly probable/improbable branches on newer
8427 cpus as static prediction overrides processor dynamic
8428 prediction. For older cpus we may as well always hint, but
8429 assume not taken for branches that are very close to 50% as a
8430 mispredicted taken branch is more expensive than a
8431 mispredicted not-taken branch. */
8432 if (always_hint
8433 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
8434 {
8435 if (abs (prob) > REG_BR_PROB_BASE / 20
8436 && ((prob > 0) ^ need_longbranch))
8437 pred = "+";
8438 else
8439 pred = "-";
8440 }
8441 }
8442
8443 if (label == NULL)
8444 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
8445 else
8446 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
8447
8448 /* We need to escape any '%' characters in the reg_names string.
8449 Assume they'd only be the first character... */
8450 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
8451 *s++ = '%';
8452 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
8453
8454 if (label != NULL)
8455 {
8456 /* If the branch distance was too far, we may have to use an
8457 unconditional branch to go the distance. */
8458 if (need_longbranch)
8459 s += sprintf (s, ",$+8\n\tb %s", label);
8460 else
8461 s += sprintf (s, ",%s", label);
8462 }
8463
8464 return string;
8465 }
8466
8467 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
8468 operands of the last comparison is nonzero/true, FALSE_COND if it
8469 is zero/false. Return 0 if the hardware has no such operation. */
8470
8471 int
8472 rs6000_emit_cmove (dest, op, true_cond, false_cond)
8473 rtx dest;
8474 rtx op;
8475 rtx true_cond;
8476 rtx false_cond;
8477 {
8478 enum rtx_code code = GET_CODE (op);
8479 rtx op0 = rs6000_compare_op0;
8480 rtx op1 = rs6000_compare_op1;
8481 REAL_VALUE_TYPE c1;
8482 enum machine_mode compare_mode = GET_MODE (op0);
8483 enum machine_mode result_mode = GET_MODE (dest);
8484 rtx temp;
8485
8486 /* These modes should always match. */
8487 if (GET_MODE (op1) != compare_mode
8488 /* In the isel case however, we can use a compare immediate, so
8489 op1 may be a small constant. */
8490 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
8491 return 0;
8492 if (GET_MODE (true_cond) != result_mode)
8493 return 0;
8494 if (GET_MODE (false_cond) != result_mode)
8495 return 0;
8496
8497 /* First, work out if the hardware can do this at all, or
8498 if it's too slow... */
8499 if (! rs6000_compare_fp_p)
8500 {
8501 if (TARGET_ISEL)
8502 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
8503 return 0;
8504 }
8505
8506 /* Eliminate half of the comparisons by switching operands, this
8507 makes the remaining code simpler. */
8508 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
8509 || code == LTGT || code == LT)
8510 {
8511 code = reverse_condition_maybe_unordered (code);
8512 temp = true_cond;
8513 true_cond = false_cond;
8514 false_cond = temp;
8515 }
8516
8517 /* UNEQ and LTGT take four instructions for a comparison with zero,
8518 it'll probably be faster to use a branch here too. */
8519 if (code == UNEQ)
8520 return 0;
8521
8522 if (GET_CODE (op1) == CONST_DOUBLE)
8523 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
8524
8525 /* We're going to try to implement comparions by performing
8526 a subtract, then comparing against zero. Unfortunately,
8527 Inf - Inf is NaN which is not zero, and so if we don't
8528 know that the operand is finite and the comparison
8529 would treat EQ different to UNORDERED, we can't do it. */
8530 if (! flag_unsafe_math_optimizations
8531 && code != GT && code != UNGE
8532 && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
8533 /* Constructs of the form (a OP b ? a : b) are safe. */
8534 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
8535 || (! rtx_equal_p (op0, true_cond)
8536 && ! rtx_equal_p (op1, true_cond))))
8537 return 0;
8538 /* At this point we know we can use fsel. */
8539
8540 /* Reduce the comparison to a comparison against zero. */
8541 temp = gen_reg_rtx (compare_mode);
8542 emit_insn (gen_rtx_SET (VOIDmode, temp,
8543 gen_rtx_MINUS (compare_mode, op0, op1)));
8544 op0 = temp;
8545 op1 = CONST0_RTX (compare_mode);
8546
8547 /* If we don't care about NaNs we can reduce some of the comparisons
8548 down to faster ones. */
8549 if (flag_unsafe_math_optimizations)
8550 switch (code)
8551 {
8552 case GT:
8553 code = LE;
8554 temp = true_cond;
8555 true_cond = false_cond;
8556 false_cond = temp;
8557 break;
8558 case UNGE:
8559 code = GE;
8560 break;
8561 case UNEQ:
8562 code = EQ;
8563 break;
8564 default:
8565 break;
8566 }
8567
8568 /* Now, reduce everything down to a GE. */
8569 switch (code)
8570 {
8571 case GE:
8572 break;
8573
8574 case LE:
8575 temp = gen_reg_rtx (compare_mode);
8576 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8577 op0 = temp;
8578 break;
8579
8580 case ORDERED:
8581 temp = gen_reg_rtx (compare_mode);
8582 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
8583 op0 = temp;
8584 break;
8585
8586 case EQ:
8587 temp = gen_reg_rtx (compare_mode);
8588 emit_insn (gen_rtx_SET (VOIDmode, temp,
8589 gen_rtx_NEG (compare_mode,
8590 gen_rtx_ABS (compare_mode, op0))));
8591 op0 = temp;
8592 break;
8593
8594 case UNGE:
8595 temp = gen_reg_rtx (result_mode);
8596 emit_insn (gen_rtx_SET (VOIDmode, temp,
8597 gen_rtx_IF_THEN_ELSE (result_mode,
8598 gen_rtx_GE (VOIDmode,
8599 op0, op1),
8600 true_cond, false_cond)));
8601 false_cond = temp;
8602 true_cond = false_cond;
8603
8604 temp = gen_reg_rtx (compare_mode);
8605 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8606 op0 = temp;
8607 break;
8608
8609 case GT:
8610 temp = gen_reg_rtx (result_mode);
8611 emit_insn (gen_rtx_SET (VOIDmode, temp,
8612 gen_rtx_IF_THEN_ELSE (result_mode,
8613 gen_rtx_GE (VOIDmode,
8614 op0, op1),
8615 true_cond, false_cond)));
8616 true_cond = temp;
8617 false_cond = true_cond;
8618
8619 temp = gen_reg_rtx (compare_mode);
8620 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8621 op0 = temp;
8622 break;
8623
8624 default:
8625 abort ();
8626 }
8627
8628 emit_insn (gen_rtx_SET (VOIDmode, dest,
8629 gen_rtx_IF_THEN_ELSE (result_mode,
8630 gen_rtx_GE (VOIDmode,
8631 op0, op1),
8632 true_cond, false_cond)));
8633 return 1;
8634 }
8635
8636 /* Same as above, but for ints (isel). */
8637
8638 static int
8639 rs6000_emit_int_cmove (dest, op, true_cond, false_cond)
8640 rtx dest;
8641 rtx op;
8642 rtx true_cond;
8643 rtx false_cond;
8644 {
8645 rtx condition_rtx, cr;
8646
8647 /* All isel implementations thus far are 32-bits. */
8648 if (GET_MODE (rs6000_compare_op0) != SImode)
8649 return 0;
8650
8651 /* We still have to do the compare, because isel doesn't do a
8652 compare, it just looks at the CRx bits set by a previous compare
8653 instruction. */
8654 condition_rtx = rs6000_generate_compare (GET_CODE (op));
8655 cr = XEXP (condition_rtx, 0);
8656
8657 if (GET_MODE (cr) == CCmode)
8658 emit_insn (gen_isel_signed (dest, condition_rtx,
8659 true_cond, false_cond, cr));
8660 else
8661 emit_insn (gen_isel_unsigned (dest, condition_rtx,
8662 true_cond, false_cond, cr));
8663
8664 return 1;
8665 }
8666
8667 const char *
8668 output_isel (operands)
8669 rtx *operands;
8670 {
8671 enum rtx_code code;
8672
8673 code = GET_CODE (operands[1]);
8674 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
8675 {
8676 PUT_CODE (operands[1], reverse_condition (code));
8677 return "isel %0,%3,%2,%j1";
8678 }
8679 else
8680 return "isel %0,%2,%3,%j1";
8681 }
8682
8683 void
8684 rs6000_emit_minmax (dest, code, op0, op1)
8685 rtx dest;
8686 enum rtx_code code;
8687 rtx op0;
8688 rtx op1;
8689 {
8690 enum machine_mode mode = GET_MODE (op0);
8691 enum rtx_code c;
8692 rtx target;
8693
8694 if (code == SMAX || code == SMIN)
8695 c = GE;
8696 else
8697 c = GEU;
8698
8699 if (code == SMAX || code == UMAX)
8700 target = emit_conditional_move (dest, c, op0, op1, mode,
8701 op0, op1, mode, 0);
8702 else
8703 target = emit_conditional_move (dest, c, op0, op1, mode,
8704 op1, op0, mode, 0);
8705 if (target == NULL_RTX)
8706 abort ();
8707 if (target != dest)
8708 emit_move_insn (dest, target);
8709 }
8710 \f
8711 /* This page contains routines that are used to determine what the
8712 function prologue and epilogue code will do and write them out. */
8713
8714 /* Return the first fixed-point register that is required to be
8715 saved. 32 if none. */
8716
8717 int
8718 first_reg_to_save ()
8719 {
8720 int first_reg;
8721
8722 /* Find lowest numbered live register. */
8723 for (first_reg = 13; first_reg <= 31; first_reg++)
8724 if (regs_ever_live[first_reg]
8725 && (! call_used_regs[first_reg]
8726 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
8727 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8728 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
8729 break;
8730
8731 #if TARGET_MACHO
8732 if (flag_pic
8733 && current_function_uses_pic_offset_table
8734 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
8735 return RS6000_PIC_OFFSET_TABLE_REGNUM;
8736 #endif
8737
8738 return first_reg;
8739 }
8740
8741 /* Similar, for FP regs. */
8742
8743 int
8744 first_fp_reg_to_save ()
8745 {
8746 int first_reg;
8747
8748 /* Find lowest numbered live register. */
8749 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
8750 if (regs_ever_live[first_reg])
8751 break;
8752
8753 return first_reg;
8754 }
8755
8756 /* Similar, for AltiVec regs. */
8757
8758 static int
8759 first_altivec_reg_to_save ()
8760 {
8761 int i;
8762
8763 /* Stack frame remains as is unless we are in AltiVec ABI. */
8764 if (! TARGET_ALTIVEC_ABI)
8765 return LAST_ALTIVEC_REGNO + 1;
8766
8767 /* Find lowest numbered live register. */
8768 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
8769 if (regs_ever_live[i])
8770 break;
8771
8772 return i;
8773 }
8774
8775 /* Return a 32-bit mask of the AltiVec registers we need to set in
8776 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
8777 the 32-bit word is 0. */
8778
8779 static unsigned int
8780 compute_vrsave_mask ()
8781 {
8782 unsigned int i, mask = 0;
8783
8784 /* First, find out if we use _any_ altivec registers. */
8785 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8786 if (regs_ever_live[i])
8787 mask |= ALTIVEC_REG_BIT (i);
8788
8789 if (mask == 0)
8790 return mask;
8791
8792 /* Next, add all registers that are call-clobbered. We do this
8793 because post-reload register optimizers such as regrename_optimize
8794 may choose to use them. They never change the register class
8795 chosen by reload, so cannot create new uses of altivec registers
8796 if there were none before, so the early exit above is safe. */
8797 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
8798 altivec registers not saved in the mask, which might well make the
8799 adjustments below more effective in eliding the save/restore of
8800 VRSAVE in small functions. */
8801 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8802 if (call_used_regs[i])
8803 mask |= ALTIVEC_REG_BIT (i);
8804
8805 /* Next, remove the argument registers from the set. These must
8806 be in the VRSAVE mask set by the caller, so we don't need to add
8807 them in again. More importantly, the mask we compute here is
8808 used to generate CLOBBERs in the set_vrsave insn, and we do not
8809 wish the argument registers to die. */
8810 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
8811 mask &= ~ALTIVEC_REG_BIT (i);
8812
8813 /* Similarly, remove the return value from the set. */
8814 {
8815 bool yes = false;
8816 diddle_return_value (is_altivec_return_reg, &yes);
8817 if (yes)
8818 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
8819 }
8820
8821 return mask;
8822 }
8823
8824 static void
8825 is_altivec_return_reg (reg, xyes)
8826 rtx reg;
8827 void *xyes;
8828 {
8829 bool *yes = (bool *) xyes;
8830 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
8831 *yes = true;
8832 }
8833
8834 \f
8835 /* Calculate the stack information for the current function. This is
8836 complicated by having two separate calling sequences, the AIX calling
8837 sequence and the V.4 calling sequence.
8838
8839 AIX (and Darwin/Mac OS X) stack frames look like:
8840 32-bit 64-bit
8841 SP----> +---------------------------------------+
8842 | back chain to caller | 0 0
8843 +---------------------------------------+
8844 | saved CR | 4 8 (8-11)
8845 +---------------------------------------+
8846 | saved LR | 8 16
8847 +---------------------------------------+
8848 | reserved for compilers | 12 24
8849 +---------------------------------------+
8850 | reserved for binders | 16 32
8851 +---------------------------------------+
8852 | saved TOC pointer | 20 40
8853 +---------------------------------------+
8854 | Parameter save area (P) | 24 48
8855 +---------------------------------------+
8856 | Alloca space (A) | 24+P etc.
8857 +---------------------------------------+
8858 | Local variable space (L) | 24+P+A
8859 +---------------------------------------+
8860 | Float/int conversion temporary (X) | 24+P+A+L
8861 +---------------------------------------+
8862 | Save area for AltiVec registers (W) | 24+P+A+L+X
8863 +---------------------------------------+
8864 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
8865 +---------------------------------------+
8866 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
8867 +---------------------------------------+
8868 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
8869 +---------------------------------------+
8870 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
8871 +---------------------------------------+
8872 old SP->| back chain to caller's caller |
8873 +---------------------------------------+
8874
8875 The required alignment for AIX configurations is two words (i.e., 8
8876 or 16 bytes).
8877
8878
8879 V.4 stack frames look like:
8880
8881 SP----> +---------------------------------------+
8882 | back chain to caller | 0
8883 +---------------------------------------+
8884 | caller's saved LR | 4
8885 +---------------------------------------+
8886 | Parameter save area (P) | 8
8887 +---------------------------------------+
8888 | Alloca space (A) | 8+P
8889 +---------------------------------------+
8890 | Varargs save area (V) | 8+P+A
8891 +---------------------------------------+
8892 | Local variable space (L) | 8+P+A+V
8893 +---------------------------------------+
8894 | Float/int conversion temporary (X) | 8+P+A+V+L
8895 +---------------------------------------+
8896 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
8897 +---------------------------------------+
8898 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
8899 +---------------------------------------+
8900 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
8901 +---------------------------------------+
8902 | SPE: area for 64-bit GP registers |
8903 +---------------------------------------+
8904 | SPE alignment padding |
8905 +---------------------------------------+
8906 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
8907 +---------------------------------------+
8908 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
8909 +---------------------------------------+
8910 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
8911 +---------------------------------------+
8912 old SP->| back chain to caller's caller |
8913 +---------------------------------------+
8914
8915 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
8916 given. (But note below and in sysv4.h that we require only 8 and
8917 may round up the size of our stack frame anyways. The historical
8918 reason is early versions of powerpc-linux which didn't properly
8919 align the stack at program startup. A happy side-effect is that
8920 -mno-eabi libraries can be used with -meabi programs.)
8921
8922 The EABI configuration defaults to the V.4 layout, unless
8923 -mcall-aix is used, in which case the AIX layout is used. However,
8924 the stack alignment requirements may differ. If -mno-eabi is not
8925 given, the required stack alignment is 8 bytes; if -mno-eabi is
8926 given, the required alignment is 16 bytes. (But see V.4 comment
8927 above.) */
8928
8929 #ifndef ABI_STACK_BOUNDARY
8930 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
8931 #endif
8932
8933 rs6000_stack_t *
8934 rs6000_stack_info ()
8935 {
8936 static rs6000_stack_t info, zero_info;
8937 rs6000_stack_t *info_ptr = &info;
8938 int reg_size = TARGET_POWERPC64 ? 8 : 4;
8939 enum rs6000_abi abi;
8940 int ehrd_size;
8941 int total_raw_size;
8942
8943 /* Zero all fields portably. */
8944 info = zero_info;
8945
8946 /* Select which calling sequence. */
8947 info_ptr->abi = abi = DEFAULT_ABI;
8948
8949 /* Calculate which registers need to be saved & save area size. */
8950 info_ptr->first_gp_reg_save = first_reg_to_save ();
8951 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
8952 even if it currently looks like we won't. */
8953 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
8954 || (flag_pic == 1 && abi == ABI_V4)
8955 || (flag_pic && abi == ABI_DARWIN))
8956 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
8957 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
8958 else
8959 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
8960
8961 /* For the SPE, we have an additional upper 32-bits on each GPR.
8962 Ideally we should save the entire 64-bits only when the upper
8963 half is used in SIMD instructions. Since we only record
8964 registers live (not the size they are used in), this proves
8965 difficult because we'd have to traverse the instruction chain at
8966 the right time, taking reload into account. This is a real pain,
8967 so we opt to save the GPRs in 64-bits always. Anyone overly
8968 concerned with frame size can fix this. ;-).
8969
8970 So... since we save all GPRs (except the SP) in 64-bits, the
8971 traditional GP save area will be empty. */
8972 if (TARGET_SPE_ABI)
8973 info_ptr->gp_size = 0;
8974
8975 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
8976 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
8977
8978 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
8979 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
8980 - info_ptr->first_altivec_reg_save);
8981
8982 /* Does this function call anything? */
8983 info_ptr->calls_p = (! current_function_is_leaf
8984 || cfun->machine->ra_needs_full_frame);
8985
8986 /* Determine if we need to save the link register. */
8987 if (rs6000_ra_ever_killed ()
8988 || (DEFAULT_ABI == ABI_AIX && current_function_profile)
8989 #ifdef TARGET_RELOCATABLE
8990 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
8991 #endif
8992 || (info_ptr->first_fp_reg_save != 64
8993 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
8994 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
8995 || (abi == ABI_V4 && current_function_calls_alloca)
8996 || (DEFAULT_ABI == ABI_DARWIN
8997 && flag_pic
8998 && current_function_uses_pic_offset_table)
8999 || info_ptr->calls_p)
9000 {
9001 info_ptr->lr_save_p = 1;
9002 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
9003 }
9004
9005 /* Determine if we need to save the condition code registers. */
9006 if (regs_ever_live[CR2_REGNO]
9007 || regs_ever_live[CR3_REGNO]
9008 || regs_ever_live[CR4_REGNO])
9009 {
9010 info_ptr->cr_save_p = 1;
9011 if (abi == ABI_V4)
9012 info_ptr->cr_size = reg_size;
9013 }
9014
9015 /* If the current function calls __builtin_eh_return, then we need
9016 to allocate stack space for registers that will hold data for
9017 the exception handler. */
9018 if (current_function_calls_eh_return)
9019 {
9020 unsigned int i;
9021 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
9022 continue;
9023
9024 /* SPE saves EH registers in 64-bits. */
9025 ehrd_size = i * (TARGET_SPE_ABI ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
9026 }
9027 else
9028 ehrd_size = 0;
9029
9030 /* Determine various sizes. */
9031 info_ptr->reg_size = reg_size;
9032 info_ptr->fixed_size = RS6000_SAVE_AREA;
9033 info_ptr->varargs_size = RS6000_VARARGS_AREA;
9034 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
9035 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
9036 8);
9037
9038 if (TARGET_SPE_ABI)
9039 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
9040 else
9041 info_ptr->spe_gp_size = 0;
9042
9043 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
9044 {
9045 info_ptr->vrsave_mask = compute_vrsave_mask ();
9046 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
9047 }
9048 else
9049 {
9050 info_ptr->vrsave_mask = 0;
9051 info_ptr->vrsave_size = 0;
9052 }
9053
9054 /* Calculate the offsets. */
9055 switch (abi)
9056 {
9057 case ABI_NONE:
9058 default:
9059 abort ();
9060
9061 case ABI_AIX:
9062 case ABI_AIX_NODESC:
9063 case ABI_DARWIN:
9064 info_ptr->fp_save_offset = - info_ptr->fp_size;
9065 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9066
9067 if (TARGET_ALTIVEC_ABI)
9068 {
9069 info_ptr->vrsave_save_offset
9070 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
9071
9072 /* Align stack so vector save area is on a quadword boundary. */
9073 if (info_ptr->altivec_size != 0)
9074 info_ptr->altivec_padding_size
9075 = 16 - (-info_ptr->vrsave_save_offset % 16);
9076 else
9077 info_ptr->altivec_padding_size = 0;
9078
9079 info_ptr->altivec_save_offset
9080 = info_ptr->vrsave_save_offset
9081 - info_ptr->altivec_padding_size
9082 - info_ptr->altivec_size;
9083
9084 /* Adjust for AltiVec case. */
9085 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
9086 }
9087 else
9088 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
9089 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
9090 info_ptr->lr_save_offset = 2*reg_size;
9091 break;
9092
9093 case ABI_V4:
9094 info_ptr->fp_save_offset = - info_ptr->fp_size;
9095 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9096 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
9097
9098 if (TARGET_SPE_ABI)
9099 {
9100 /* Align stack so SPE GPR save area is aligned on a
9101 double-word boundary. */
9102 if (info_ptr->spe_gp_size != 0)
9103 info_ptr->spe_padding_size
9104 = 8 - (-info_ptr->cr_save_offset % 8);
9105 else
9106 info_ptr->spe_padding_size = 0;
9107
9108 info_ptr->spe_gp_save_offset
9109 = info_ptr->cr_save_offset
9110 - info_ptr->spe_padding_size
9111 - info_ptr->spe_gp_size;
9112
9113 /* Adjust for SPE case. */
9114 info_ptr->toc_save_offset
9115 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
9116 }
9117 else if (TARGET_ALTIVEC_ABI)
9118 {
9119 info_ptr->vrsave_save_offset
9120 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
9121
9122 /* Align stack so vector save area is on a quadword boundary. */
9123 if (info_ptr->altivec_size != 0)
9124 info_ptr->altivec_padding_size
9125 = 16 - (-info_ptr->vrsave_save_offset % 16);
9126 else
9127 info_ptr->altivec_padding_size = 0;
9128
9129 info_ptr->altivec_save_offset
9130 = info_ptr->vrsave_save_offset
9131 - info_ptr->altivec_padding_size
9132 - info_ptr->altivec_size;
9133
9134 /* Adjust for AltiVec case. */
9135 info_ptr->toc_save_offset
9136 = info_ptr->altivec_save_offset - info_ptr->toc_size;
9137 }
9138 else
9139 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
9140 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
9141 info_ptr->lr_save_offset = reg_size;
9142 break;
9143 }
9144
9145 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
9146 + info_ptr->gp_size
9147 + info_ptr->altivec_size
9148 + info_ptr->altivec_padding_size
9149 + info_ptr->vrsave_size
9150 + info_ptr->spe_gp_size
9151 + info_ptr->spe_padding_size
9152 + ehrd_size
9153 + info_ptr->cr_size
9154 + info_ptr->lr_size
9155 + info_ptr->vrsave_size
9156 + info_ptr->toc_size,
9157 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
9158 ? 16 : 8);
9159
9160 total_raw_size = (info_ptr->vars_size
9161 + info_ptr->parm_size
9162 + info_ptr->save_size
9163 + info_ptr->varargs_size
9164 + info_ptr->fixed_size);
9165
9166 info_ptr->total_size =
9167 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
9168
9169 /* Determine if we need to allocate any stack frame:
9170
9171 For AIX we need to push the stack if a frame pointer is needed
9172 (because the stack might be dynamically adjusted), if we are
9173 debugging, if we make calls, or if the sum of fp_save, gp_save,
9174 and local variables are more than the space needed to save all
9175 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9176 + 18*8 = 288 (GPR13 reserved).
9177
9178 For V.4 we don't have the stack cushion that AIX uses, but assume
9179 that the debugger can handle stackless frames. */
9180
9181 if (info_ptr->calls_p)
9182 info_ptr->push_p = 1;
9183
9184 else if (abi == ABI_V4)
9185 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
9186
9187 else
9188 info_ptr->push_p = (frame_pointer_needed
9189 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
9190 || ((total_raw_size - info_ptr->fixed_size)
9191 > (TARGET_32BIT ? 220 : 288)));
9192
9193 /* Zero offsets if we're not saving those registers. */
9194 if (info_ptr->fp_size == 0)
9195 info_ptr->fp_save_offset = 0;
9196
9197 if (info_ptr->gp_size == 0)
9198 info_ptr->gp_save_offset = 0;
9199
9200 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
9201 info_ptr->altivec_save_offset = 0;
9202
9203 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
9204 info_ptr->vrsave_save_offset = 0;
9205
9206 if (! TARGET_SPE_ABI || info_ptr->spe_gp_size == 0)
9207 info_ptr->spe_gp_save_offset = 0;
9208
9209 if (! info_ptr->lr_save_p)
9210 info_ptr->lr_save_offset = 0;
9211
9212 if (! info_ptr->cr_save_p)
9213 info_ptr->cr_save_offset = 0;
9214
9215 if (! info_ptr->toc_save_p)
9216 info_ptr->toc_save_offset = 0;
9217
9218 return info_ptr;
9219 }
9220
9221 void
9222 debug_stack_info (info)
9223 rs6000_stack_t *info;
9224 {
9225 const char *abi_string;
9226
9227 if (! info)
9228 info = rs6000_stack_info ();
9229
9230 fprintf (stderr, "\nStack information for function %s:\n",
9231 ((current_function_decl && DECL_NAME (current_function_decl))
9232 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
9233 : "<unknown>"));
9234
9235 switch (info->abi)
9236 {
9237 default: abi_string = "Unknown"; break;
9238 case ABI_NONE: abi_string = "NONE"; break;
9239 case ABI_AIX:
9240 case ABI_AIX_NODESC: abi_string = "AIX"; break;
9241 case ABI_DARWIN: abi_string = "Darwin"; break;
9242 case ABI_V4: abi_string = "V.4"; break;
9243 }
9244
9245 fprintf (stderr, "\tABI = %5s\n", abi_string);
9246
9247 if (TARGET_ALTIVEC_ABI)
9248 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
9249
9250 if (TARGET_SPE_ABI)
9251 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
9252
9253 if (info->first_gp_reg_save != 32)
9254 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
9255
9256 if (info->first_fp_reg_save != 64)
9257 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9258
9259 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
9260 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
9261 info->first_altivec_reg_save);
9262
9263 if (info->lr_save_p)
9264 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9265
9266 if (info->cr_save_p)
9267 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
9268
9269 if (info->toc_save_p)
9270 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
9271
9272 if (info->vrsave_mask)
9273 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
9274
9275 if (info->push_p)
9276 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
9277
9278 if (info->calls_p)
9279 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
9280
9281 if (info->gp_save_offset)
9282 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
9283
9284 if (info->fp_save_offset)
9285 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
9286
9287 if (info->altivec_save_offset)
9288 fprintf (stderr, "\taltivec_save_offset = %5d\n",
9289 info->altivec_save_offset);
9290
9291 if (info->spe_gp_save_offset)
9292 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
9293 info->spe_gp_save_offset);
9294
9295 if (info->vrsave_save_offset)
9296 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
9297 info->vrsave_save_offset);
9298
9299 if (info->lr_save_offset)
9300 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
9301
9302 if (info->cr_save_offset)
9303 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
9304
9305 if (info->toc_save_offset)
9306 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
9307
9308 if (info->varargs_save_offset)
9309 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
9310
9311 if (info->total_size)
9312 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
9313
9314 if (info->varargs_size)
9315 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
9316
9317 if (info->vars_size)
9318 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
9319
9320 if (info->parm_size)
9321 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
9322
9323 if (info->fixed_size)
9324 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
9325
9326 if (info->gp_size)
9327 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
9328
9329 if (info->spe_gp_size)
9330 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
9331
9332 if (info->fp_size)
9333 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
9334
9335 if (info->altivec_size)
9336 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
9337
9338 if (info->vrsave_size)
9339 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
9340
9341 if (info->altivec_padding_size)
9342 fprintf (stderr, "\taltivec_padding_size= %5d\n",
9343 info->altivec_padding_size);
9344
9345 if (info->spe_padding_size)
9346 fprintf (stderr, "\tspe_padding_size = %5d\n",
9347 info->spe_padding_size);
9348
9349 if (info->lr_size)
9350 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
9351
9352 if (info->cr_size)
9353 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
9354
9355 if (info->toc_size)
9356 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
9357
9358 if (info->save_size)
9359 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
9360
9361 if (info->reg_size != 4)
9362 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
9363
9364 fprintf (stderr, "\n");
9365 }
9366
9367 rtx
9368 rs6000_return_addr (count, frame)
9369 int count;
9370 rtx frame;
9371 {
9372 /* Currently we don't optimize very well between prolog and body
9373 code and for PIC code the code can be actually quite bad, so
9374 don't try to be too clever here. */
9375 if (count != 0 || flag_pic != 0)
9376 {
9377 cfun->machine->ra_needs_full_frame = 1;
9378
9379 return
9380 gen_rtx_MEM
9381 (Pmode,
9382 memory_address
9383 (Pmode,
9384 plus_constant (copy_to_reg
9385 (gen_rtx_MEM (Pmode,
9386 memory_address (Pmode, frame))),
9387 RETURN_ADDRESS_OFFSET)));
9388 }
9389
9390 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
9391 }
9392
9393 /* Say whether a function is a candidate for sibcall handling or not.
9394 We do not allow indirect calls to be optimized into sibling calls.
9395 Also, we can't do it if there are any vector parameters; there's
9396 nowhere to put the VRsave code so it works; note that functions with
9397 vector parameters are required to have a prototype, so the argument
9398 type info must be available here. (The tail recursion case can work
9399 with vector parameters, but there's no way to distinguish here.) */
9400 int
9401 function_ok_for_sibcall (fndecl)
9402 tree fndecl;
9403 {
9404 tree type;
9405 if (fndecl)
9406 {
9407 if (TARGET_ALTIVEC_VRSAVE)
9408 {
9409 for (type = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
9410 type; type = TREE_CHAIN (type))
9411 {
9412 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
9413 return 0;
9414 }
9415 }
9416 if (DEFAULT_ABI == ABI_DARWIN
9417 || (*targetm.binds_local_p) (fndecl))
9418 {
9419 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (fndecl));
9420
9421 if (!lookup_attribute ("longcall", attr_list)
9422 || lookup_attribute ("shortcall", attr_list))
9423 return 1;
9424 }
9425 }
9426 return 0;
9427 }
9428
9429 /* function rewritten to handle sibcalls */
9430 static int
9431 rs6000_ra_ever_killed ()
9432 {
9433 rtx top;
9434 rtx reg;
9435 rtx insn;
9436
9437 #ifdef ASM_OUTPUT_MI_THUNK
9438 if (current_function_is_thunk)
9439 return 0;
9440 #endif
9441 /* regs_ever_live has LR marked as used if any sibcalls
9442 are present. Which it is, but this should not force
9443 saving and restoring in the prologue/epilog. Likewise,
9444 reg_set_between_p thinks a sibcall clobbers LR, so
9445 that is inappropriate. */
9446 /* Also, the prologue can generate a store into LR that
9447 doesn't really count, like this:
9448 move LR->R0
9449 bcl to set PIC register
9450 move LR->R31
9451 move R0->LR
9452 When we're called from the epilog, we need to avoid counting
9453 this as a store; thus we ignore any insns with a REG_MAYBE_DEAD note. */
9454
9455 push_topmost_sequence ();
9456 top = get_insns ();
9457 pop_topmost_sequence ();
9458 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
9459
9460 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
9461 {
9462 if (INSN_P (insn))
9463 {
9464 if (FIND_REG_INC_NOTE (insn, reg))
9465 return 1;
9466 else if (GET_CODE (insn) == CALL_INSN
9467 && !SIBLING_CALL_P (insn))
9468 return 1;
9469 else if (set_of (reg, insn) != NULL_RTX
9470 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
9471 return 1;
9472 }
9473 }
9474 return 0;
9475 }
9476 \f
9477 /* Add a REG_MAYBE_DEAD note to the insn. */
9478 static void
9479 rs6000_maybe_dead (insn)
9480 rtx insn;
9481 {
9482 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
9483 const0_rtx,
9484 REG_NOTES (insn));
9485 }
9486
9487 /* Emit instructions needed to load the TOC register.
9488 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9489 a constant pool; or for SVR4 -fpic. */
9490
9491 void
9492 rs6000_emit_load_toc_table (fromprolog)
9493 int fromprolog;
9494 {
9495 rtx dest;
9496 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
9497
9498 if (TARGET_ELF && DEFAULT_ABI != ABI_AIX)
9499 {
9500 if (DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9501 {
9502 rtx temp = (fromprolog
9503 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9504 : gen_reg_rtx (Pmode));
9505 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
9506 rs6000_maybe_dead (emit_move_insn (dest, temp));
9507 }
9508 else if (flag_pic == 2)
9509 {
9510 char buf[30];
9511 rtx tempLR = (fromprolog
9512 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9513 : gen_reg_rtx (Pmode));
9514 rtx temp0 = (fromprolog
9515 ? gen_rtx_REG (Pmode, 0)
9516 : gen_reg_rtx (Pmode));
9517 rtx symF;
9518
9519 /* possibly create the toc section */
9520 if (! toc_initialized)
9521 {
9522 toc_section ();
9523 function_section (current_function_decl);
9524 }
9525
9526 if (fromprolog)
9527 {
9528 rtx symL;
9529
9530 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
9531 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9532
9533 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
9534 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9535
9536 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
9537 symF)));
9538 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
9539 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
9540 symL,
9541 symF)));
9542 }
9543 else
9544 {
9545 rtx tocsym;
9546 static int reload_toc_labelno = 0;
9547
9548 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
9549
9550 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
9551 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9552
9553 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
9554 symF,
9555 tocsym)));
9556 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
9557 rs6000_maybe_dead (emit_move_insn (temp0,
9558 gen_rtx_MEM (Pmode, dest)));
9559 }
9560 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
9561 }
9562 else if (flag_pic == 0 && TARGET_MINIMAL_TOC)
9563 {
9564 /* This is for AIX code running in non-PIC ELF. */
9565 char buf[30];
9566 rtx realsym;
9567 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
9568 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9569
9570 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
9571 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
9572 }
9573 else
9574 abort ();
9575 }
9576 else
9577 {
9578 if (TARGET_32BIT)
9579 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
9580 else
9581 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
9582 }
9583 }
9584
9585 int
9586 get_TOC_alias_set ()
9587 {
9588 static int set = -1;
9589 if (set == -1)
9590 set = new_alias_set ();
9591 return set;
9592 }
9593
9594 /* This retuns nonzero if the current function uses the TOC. This is
9595 determined by the presence of (unspec ... 7), which is generated by
9596 the various load_toc_* patterns. */
9597
9598 int
9599 uses_TOC ()
9600 {
9601 rtx insn;
9602
9603 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9604 if (INSN_P (insn))
9605 {
9606 rtx pat = PATTERN (insn);
9607 int i;
9608
9609 if (GET_CODE (pat) == PARALLEL)
9610 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
9611 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
9612 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
9613 return 1;
9614 }
9615 return 0;
9616 }
9617
9618 rtx
9619 create_TOC_reference (symbol)
9620 rtx symbol;
9621 {
9622 return gen_rtx_PLUS (Pmode,
9623 gen_rtx_REG (Pmode, TOC_REGISTER),
9624 gen_rtx_CONST (Pmode,
9625 gen_rtx_MINUS (Pmode, symbol,
9626 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9627 }
9628
9629 #if TARGET_AIX
9630 /* __throw will restore its own return address to be the same as the
9631 return address of the function that the throw is being made to.
9632 This is unfortunate, because we want to check the original
9633 return address to see if we need to restore the TOC.
9634 So we have to squirrel it away here.
9635 This is used only in compiling __throw and __rethrow.
9636
9637 Most of this code should be removed by CSE. */
9638 static rtx insn_after_throw;
9639
9640 /* This does the saving... */
9641 void
9642 rs6000_aix_emit_builtin_unwind_init ()
9643 {
9644 rtx mem;
9645 rtx stack_top = gen_reg_rtx (Pmode);
9646 rtx opcode_addr = gen_reg_rtx (Pmode);
9647
9648 insn_after_throw = gen_reg_rtx (SImode);
9649
9650 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
9651 emit_move_insn (stack_top, mem);
9652
9653 mem = gen_rtx_MEM (Pmode,
9654 gen_rtx_PLUS (Pmode, stack_top,
9655 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9656 emit_move_insn (opcode_addr, mem);
9657 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
9658 }
9659
9660 /* Emit insns to _restore_ the TOC register, at runtime (specifically
9661 in _eh.o). Only used on AIX.
9662
9663 The idea is that on AIX, function calls look like this:
9664 bl somefunction-trampoline
9665 lwz r2,20(sp)
9666
9667 and later,
9668 somefunction-trampoline:
9669 stw r2,20(sp)
9670 ... load function address in the count register ...
9671 bctr
9672 or like this, if the linker determines that this is not a cross-module call
9673 and so the TOC need not be restored:
9674 bl somefunction
9675 nop
9676 or like this, if the compiler could determine that this is not a
9677 cross-module call:
9678 bl somefunction
9679 now, the tricky bit here is that register 2 is saved and restored
9680 by the _linker_, so we can't readily generate debugging information
9681 for it. So we need to go back up the call chain looking at the
9682 insns at return addresses to see which calls saved the TOC register
9683 and so see where it gets restored from.
9684
9685 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
9686 just before the actual epilogue.
9687
9688 On the bright side, this incurs no space or time overhead unless an
9689 exception is thrown, except for the extra code in libgcc.a.
9690
9691 The parameter STACKSIZE is a register containing (at runtime)
9692 the amount to be popped off the stack in addition to the stack frame
9693 of this routine (which will be __throw or __rethrow, and so is
9694 guaranteed to have a stack frame). */
9695
9696 void
9697 rs6000_emit_eh_toc_restore (stacksize)
9698 rtx stacksize;
9699 {
9700 rtx top_of_stack;
9701 rtx bottom_of_stack = gen_reg_rtx (Pmode);
9702 rtx tocompare = gen_reg_rtx (SImode);
9703 rtx opcode = gen_reg_rtx (SImode);
9704 rtx opcode_addr = gen_reg_rtx (Pmode);
9705 rtx mem;
9706 rtx loop_start = gen_label_rtx ();
9707 rtx no_toc_restore_needed = gen_label_rtx ();
9708 rtx loop_exit = gen_label_rtx ();
9709
9710 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
9711 set_mem_alias_set (mem, rs6000_sr_alias_set);
9712 emit_move_insn (bottom_of_stack, mem);
9713
9714 top_of_stack = expand_binop (Pmode, add_optab,
9715 bottom_of_stack, stacksize,
9716 NULL_RTX, 1, OPTAB_WIDEN);
9717
9718 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
9719 : 0xE8410028, SImode));
9720
9721 if (insn_after_throw == NULL_RTX)
9722 abort ();
9723 emit_move_insn (opcode, insn_after_throw);
9724
9725 emit_note (NULL, NOTE_INSN_LOOP_BEG);
9726 emit_label (loop_start);
9727
9728 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
9729 SImode, NULL_RTX, NULL_RTX,
9730 no_toc_restore_needed);
9731
9732 mem = gen_rtx_MEM (Pmode,
9733 gen_rtx_PLUS (Pmode, bottom_of_stack,
9734 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
9735 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
9736
9737 emit_label (no_toc_restore_needed);
9738 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
9739 Pmode, NULL_RTX, NULL_RTX,
9740 loop_exit);
9741
9742 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
9743 set_mem_alias_set (mem, rs6000_sr_alias_set);
9744 emit_move_insn (bottom_of_stack, mem);
9745
9746 mem = gen_rtx_MEM (Pmode,
9747 gen_rtx_PLUS (Pmode, bottom_of_stack,
9748 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9749 emit_move_insn (opcode_addr, mem);
9750 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
9751
9752 emit_note (NULL, NOTE_INSN_LOOP_CONT);
9753 emit_jump (loop_start);
9754 emit_note (NULL, NOTE_INSN_LOOP_END);
9755 emit_label (loop_exit);
9756 }
9757 #endif /* TARGET_AIX */
9758 \f
9759 /* This ties together stack memory (MEM with an alias set of
9760 rs6000_sr_alias_set) and the change to the stack pointer. */
9761
9762 static void
9763 rs6000_emit_stack_tie ()
9764 {
9765 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
9766
9767 set_mem_alias_set (mem, rs6000_sr_alias_set);
9768 emit_insn (gen_stack_tie (mem));
9769 }
9770
9771 /* Emit the correct code for allocating stack space, as insns.
9772 If COPY_R12, make sure a copy of the old frame is left in r12.
9773 The generated code may use hard register 0 as a temporary. */
9774
9775 static void
9776 rs6000_emit_allocate_stack (size, copy_r12)
9777 HOST_WIDE_INT size;
9778 int copy_r12;
9779 {
9780 rtx insn;
9781 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
9782 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
9783 rtx todec = GEN_INT (-size);
9784
9785 if (current_function_limit_stack)
9786 {
9787 if (REG_P (stack_limit_rtx)
9788 && REGNO (stack_limit_rtx) > 1
9789 && REGNO (stack_limit_rtx) <= 31)
9790 {
9791 emit_insn (Pmode == SImode
9792 ? gen_addsi3 (tmp_reg,
9793 stack_limit_rtx,
9794 GEN_INT (size))
9795 : gen_adddi3 (tmp_reg,
9796 stack_limit_rtx,
9797 GEN_INT (size)));
9798
9799 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
9800 const0_rtx));
9801 }
9802 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9803 && TARGET_32BIT
9804 && DEFAULT_ABI == ABI_V4)
9805 {
9806 rtx toload = gen_rtx_CONST (VOIDmode,
9807 gen_rtx_PLUS (Pmode,
9808 stack_limit_rtx,
9809 GEN_INT (size)));
9810
9811 emit_insn (gen_elf_high (tmp_reg, toload));
9812 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
9813 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
9814 const0_rtx));
9815 }
9816 else
9817 warning ("stack limit expression is not supported");
9818 }
9819
9820 if (copy_r12 || ! TARGET_UPDATE)
9821 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
9822
9823 if (TARGET_UPDATE)
9824 {
9825 if (size > 32767)
9826 {
9827 /* Need a note here so that try_split doesn't get confused. */
9828 if (get_last_insn() == NULL_RTX)
9829 emit_note (0, NOTE_INSN_DELETED);
9830 insn = emit_move_insn (tmp_reg, todec);
9831 try_split (PATTERN (insn), insn, 0);
9832 todec = tmp_reg;
9833 }
9834
9835 if (Pmode == SImode)
9836 insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
9837 todec, stack_reg));
9838 else
9839 insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
9840 todec, stack_reg));
9841 }
9842 else
9843 {
9844 if (Pmode == SImode)
9845 insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
9846 else
9847 insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
9848 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
9849 gen_rtx_REG (Pmode, 12));
9850 }
9851
9852 RTX_FRAME_RELATED_P (insn) = 1;
9853 REG_NOTES (insn) =
9854 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
9855 gen_rtx_SET (VOIDmode, stack_reg,
9856 gen_rtx_PLUS (Pmode, stack_reg,
9857 GEN_INT (-size))),
9858 REG_NOTES (insn));
9859 }
9860
9861 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
9862 knows that:
9863
9864 (mem (plus (blah) (regXX)))
9865
9866 is really:
9867
9868 (mem (plus (blah) (const VALUE_OF_REGXX))). */
9869
9870 static void
9871 altivec_frame_fixup (insn, reg, val)
9872 rtx insn, reg;
9873 HOST_WIDE_INT val;
9874 {
9875 rtx real;
9876
9877 real = copy_rtx (PATTERN (insn));
9878
9879 real = replace_rtx (real, reg, GEN_INT (val));
9880
9881 RTX_FRAME_RELATED_P (insn) = 1;
9882 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
9883 real,
9884 REG_NOTES (insn));
9885 }
9886
9887 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
9888 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
9889 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
9890 deduce these equivalences by itself so it wasn't necessary to hold
9891 its hand so much. */
9892
9893 static void
9894 rs6000_frame_related (insn, reg, val, reg2, rreg)
9895 rtx insn;
9896 rtx reg;
9897 HOST_WIDE_INT val;
9898 rtx reg2;
9899 rtx rreg;
9900 {
9901 rtx real, temp;
9902
9903 /* copy_rtx will not make unique copies of registers, so we need to
9904 ensure we don't have unwanted sharing here. */
9905 if (reg == reg2)
9906 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
9907
9908 if (reg == rreg)
9909 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
9910
9911 real = copy_rtx (PATTERN (insn));
9912
9913 if (reg2 != NULL_RTX)
9914 real = replace_rtx (real, reg2, rreg);
9915
9916 real = replace_rtx (real, reg,
9917 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
9918 STACK_POINTER_REGNUM),
9919 GEN_INT (val)));
9920
9921 /* We expect that 'real' is either a SET or a PARALLEL containing
9922 SETs (and possibly other stuff). In a PARALLEL, all the SETs
9923 are important so they all have to be marked RTX_FRAME_RELATED_P. */
9924
9925 if (GET_CODE (real) == SET)
9926 {
9927 rtx set = real;
9928
9929 temp = simplify_rtx (SET_SRC (set));
9930 if (temp)
9931 SET_SRC (set) = temp;
9932 temp = simplify_rtx (SET_DEST (set));
9933 if (temp)
9934 SET_DEST (set) = temp;
9935 if (GET_CODE (SET_DEST (set)) == MEM)
9936 {
9937 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
9938 if (temp)
9939 XEXP (SET_DEST (set), 0) = temp;
9940 }
9941 }
9942 else if (GET_CODE (real) == PARALLEL)
9943 {
9944 int i;
9945 for (i = 0; i < XVECLEN (real, 0); i++)
9946 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
9947 {
9948 rtx set = XVECEXP (real, 0, i);
9949
9950 temp = simplify_rtx (SET_SRC (set));
9951 if (temp)
9952 SET_SRC (set) = temp;
9953 temp = simplify_rtx (SET_DEST (set));
9954 if (temp)
9955 SET_DEST (set) = temp;
9956 if (GET_CODE (SET_DEST (set)) == MEM)
9957 {
9958 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
9959 if (temp)
9960 XEXP (SET_DEST (set), 0) = temp;
9961 }
9962 RTX_FRAME_RELATED_P (set) = 1;
9963 }
9964 }
9965 else
9966 abort ();
9967
9968 RTX_FRAME_RELATED_P (insn) = 1;
9969 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
9970 real,
9971 REG_NOTES (insn));
9972 }
9973
9974 /* Returns an insn that has a vrsave set operation with the
9975 appropriate CLOBBERs. */
9976
9977 static rtx
9978 generate_set_vrsave (reg, info, epiloguep)
9979 rtx reg;
9980 rs6000_stack_t *info;
9981 int epiloguep;
9982 {
9983 int nclobs, i;
9984 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
9985 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
9986
9987 clobs[0]
9988 = gen_rtx_SET (VOIDmode,
9989 vrsave,
9990 gen_rtx_UNSPEC_VOLATILE (SImode,
9991 gen_rtvec (2, reg, vrsave),
9992 30));
9993
9994 nclobs = 1;
9995
9996 /* We need to clobber the registers in the mask so the scheduler
9997 does not move sets to VRSAVE before sets of AltiVec registers.
9998
9999 However, if the function receives nonlocal gotos, reload will set
10000 all call saved registers live. We will end up with:
10001
10002 (set (reg 999) (mem))
10003 (parallel [ (set (reg vrsave) (unspec blah))
10004 (clobber (reg 999))])
10005
10006 The clobber will cause the store into reg 999 to be dead, and
10007 flow will attempt to delete an epilogue insn. In this case, we
10008 need an unspec use/set of the register. */
10009
10010 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10011 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
10012 {
10013 if (!epiloguep || call_used_regs [i])
10014 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
10015 gen_rtx_REG (V4SImode, i));
10016 else
10017 {
10018 rtx reg = gen_rtx_REG (V4SImode, i);
10019
10020 clobs[nclobs++]
10021 = gen_rtx_SET (VOIDmode,
10022 reg,
10023 gen_rtx_UNSPEC (V4SImode,
10024 gen_rtvec (1, reg), 27));
10025 }
10026 }
10027
10028 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
10029
10030 for (i = 0; i < nclobs; ++i)
10031 XVECEXP (insn, 0, i) = clobs[i];
10032
10033 return insn;
10034 }
10035
10036 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10037 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
10038
10039 static void
10040 emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
10041 rtx frame_reg;
10042 rtx frame_ptr;
10043 enum machine_mode mode;
10044 unsigned int regno;
10045 int offset;
10046 int total_size;
10047 {
10048 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
10049 rtx replacea, replaceb;
10050
10051 int_rtx = GEN_INT (offset);
10052
10053 /* Some cases that need register indexed addressing. */
10054 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
10055 || (TARGET_SPE_ABI
10056 && SPE_VECTOR_MODE (mode)
10057 && !SPE_CONST_OFFSET_OK (offset)))
10058 {
10059 /* Whomever calls us must make sure r11 is available in the
10060 flow path of instructions in the prologue. */
10061 offset_rtx = gen_rtx_REG (Pmode, 11);
10062 emit_move_insn (offset_rtx, int_rtx);
10063
10064 replacea = offset_rtx;
10065 replaceb = int_rtx;
10066 }
10067 else
10068 {
10069 offset_rtx = int_rtx;
10070 replacea = NULL_RTX;
10071 replaceb = NULL_RTX;
10072 }
10073
10074 reg = gen_rtx_REG (mode, regno);
10075 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
10076 mem = gen_rtx_MEM (mode, addr);
10077 set_mem_alias_set (mem, rs6000_sr_alias_set);
10078
10079 insn = emit_move_insn (mem, reg);
10080
10081 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
10082 }
10083
10084 /* Emit an offset memory reference suitable for a frame store, while
10085 converting to a valid addressing mode. */
10086
10087 static rtx
10088 gen_frame_mem_offset (mode, reg, offset)
10089 enum machine_mode mode;
10090 rtx reg;
10091 int offset;
10092 {
10093 rtx int_rtx, offset_rtx;
10094
10095 int_rtx = GEN_INT (offset);
10096
10097 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
10098 {
10099 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10100 emit_move_insn (offset_rtx, int_rtx);
10101 }
10102 else
10103 offset_rtx = int_rtx;
10104
10105 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
10106 }
10107
10108 /* Emit function prologue as insns. */
10109
10110 void
10111 rs6000_emit_prologue ()
10112 {
10113 rs6000_stack_t *info = rs6000_stack_info ();
10114 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10115 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10116 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10117 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
10118 rtx frame_reg_rtx = sp_reg_rtx;
10119 rtx cr_save_rtx = NULL;
10120 rtx insn;
10121 int saving_FPRs_inline;
10122 int using_store_multiple;
10123 HOST_WIDE_INT sp_offset = 0;
10124
10125 if (TARGET_SPE_ABI)
10126 {
10127 reg_mode = V2SImode;
10128 reg_size = 8;
10129 }
10130
10131 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10132 && !TARGET_SPE_ABI
10133 && info->first_gp_reg_save < 31);
10134 saving_FPRs_inline = (info->first_fp_reg_save == 64
10135 || FP_SAVE_INLINE (info->first_fp_reg_save));
10136
10137 /* For V.4, update stack before we do any saving and set back pointer. */
10138 if (info->push_p && DEFAULT_ABI == ABI_V4)
10139 {
10140 if (info->total_size < 32767)
10141 sp_offset = info->total_size;
10142 else
10143 frame_reg_rtx = frame_ptr_rtx;
10144 rs6000_emit_allocate_stack (info->total_size,
10145 (frame_reg_rtx != sp_reg_rtx
10146 && (info->cr_save_p
10147 || info->lr_save_p
10148 || info->first_fp_reg_save < 64
10149 || info->first_gp_reg_save < 32
10150 )));
10151 if (frame_reg_rtx != sp_reg_rtx)
10152 rs6000_emit_stack_tie ();
10153 }
10154
10155 /* Save AltiVec registers if needed. */
10156 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10157 {
10158 int i;
10159
10160 /* There should be a non inline version of this, for when we
10161 are saving lots of vector registers. */
10162 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10163 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10164 {
10165 rtx areg, savereg, mem;
10166 int offset;
10167
10168 offset = info->altivec_save_offset + sp_offset
10169 + 16 * (i - info->first_altivec_reg_save);
10170
10171 savereg = gen_rtx_REG (V4SImode, i);
10172
10173 areg = gen_rtx_REG (Pmode, 0);
10174 emit_move_insn (areg, GEN_INT (offset));
10175
10176 /* AltiVec addressing mode is [reg+reg]. */
10177 mem = gen_rtx_MEM (V4SImode,
10178 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
10179
10180 set_mem_alias_set (mem, rs6000_sr_alias_set);
10181
10182 insn = emit_move_insn (mem, savereg);
10183
10184 altivec_frame_fixup (insn, areg, offset);
10185 }
10186 }
10187
10188 /* VRSAVE is a bit vector representing which AltiVec registers
10189 are used. The OS uses this to determine which vector
10190 registers to save on a context switch. We need to save
10191 VRSAVE on the stack frame, add whatever AltiVec registers we
10192 used in this function, and do the corresponding magic in the
10193 epilogue. */
10194
10195 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
10196 {
10197 rtx reg, mem, vrsave;
10198 int offset;
10199
10200 /* Get VRSAVE onto a GPR. */
10201 reg = gen_rtx_REG (SImode, 12);
10202 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10203 if (TARGET_MACHO)
10204 emit_insn (gen_get_vrsave_internal (reg));
10205 else
10206 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
10207
10208 /* Save VRSAVE. */
10209 offset = info->vrsave_save_offset + sp_offset;
10210 mem
10211 = gen_rtx_MEM (SImode,
10212 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
10213 set_mem_alias_set (mem, rs6000_sr_alias_set);
10214 insn = emit_move_insn (mem, reg);
10215
10216 /* Include the registers in the mask. */
10217 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
10218
10219 insn = emit_insn (generate_set_vrsave (reg, info, 0));
10220 }
10221
10222 /* If we use the link register, get it into r0. */
10223 if (info->lr_save_p)
10224 emit_move_insn (gen_rtx_REG (Pmode, 0),
10225 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10226
10227 /* If we need to save CR, put it into r12. */
10228 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
10229 {
10230 cr_save_rtx = gen_rtx_REG (SImode, 12);
10231 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10232 }
10233
10234 /* Do any required saving of fpr's. If only one or two to save, do
10235 it ourselves. Otherwise, call function. */
10236 if (saving_FPRs_inline)
10237 {
10238 int i;
10239 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10240 if ((regs_ever_live[info->first_fp_reg_save+i]
10241 && ! call_used_regs[info->first_fp_reg_save+i]))
10242 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
10243 info->first_fp_reg_save + i,
10244 info->fp_save_offset + sp_offset + 8 * i,
10245 info->total_size);
10246 }
10247 else if (info->first_fp_reg_save != 64)
10248 {
10249 int i;
10250 char rname[30];
10251 const char *alloc_rname;
10252 rtvec p;
10253 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
10254
10255 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
10256 gen_rtx_REG (Pmode,
10257 LINK_REGISTER_REGNUM));
10258 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
10259 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
10260 alloc_rname = ggc_strdup (rname);
10261 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
10262 gen_rtx_SYMBOL_REF (Pmode,
10263 alloc_rname));
10264 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10265 {
10266 rtx addr, reg, mem;
10267 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
10268 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10269 GEN_INT (info->fp_save_offset
10270 + sp_offset + 8*i));
10271 mem = gen_rtx_MEM (DFmode, addr);
10272 set_mem_alias_set (mem, rs6000_sr_alias_set);
10273
10274 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
10275 }
10276 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10277 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10278 NULL_RTX, NULL_RTX);
10279 }
10280
10281 /* Save GPRs. This is done as a PARALLEL if we are using
10282 the store-multiple instructions. */
10283 if (using_store_multiple)
10284 {
10285 rtvec p;
10286 int i;
10287 p = rtvec_alloc (32 - info->first_gp_reg_save);
10288 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10289 {
10290 rtx addr, reg, mem;
10291 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10292 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10293 GEN_INT (info->gp_save_offset
10294 + sp_offset
10295 + reg_size * i));
10296 mem = gen_rtx_MEM (reg_mode, addr);
10297 set_mem_alias_set (mem, rs6000_sr_alias_set);
10298
10299 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
10300 }
10301 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10302 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10303 NULL_RTX, NULL_RTX);
10304 }
10305 else
10306 {
10307 int i;
10308 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10309 if ((regs_ever_live[info->first_gp_reg_save+i]
10310 && ! call_used_regs[info->first_gp_reg_save+i])
10311 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10312 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10313 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10314 {
10315 rtx addr, reg, mem;
10316 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10317
10318 if (TARGET_SPE_ABI)
10319 {
10320 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10321 rtx b;
10322
10323 if (!SPE_CONST_OFFSET_OK (offset))
10324 {
10325 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10326 emit_move_insn (b, GEN_INT (offset));
10327 }
10328 else
10329 b = GEN_INT (offset);
10330
10331 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10332 mem = gen_rtx_MEM (V2SImode, addr);
10333 set_mem_alias_set (mem, rs6000_sr_alias_set);
10334 insn = emit_move_insn (mem, reg);
10335
10336 if (GET_CODE (b) == CONST_INT)
10337 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10338 NULL_RTX, NULL_RTX);
10339 else
10340 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10341 b, GEN_INT (offset));
10342 }
10343 else
10344 {
10345 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10346 GEN_INT (info->gp_save_offset
10347 + sp_offset
10348 + reg_size * i));
10349 mem = gen_rtx_MEM (reg_mode, addr);
10350 set_mem_alias_set (mem, rs6000_sr_alias_set);
10351
10352 insn = emit_move_insn (mem, reg);
10353 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10354 NULL_RTX, NULL_RTX);
10355 }
10356 }
10357 }
10358
10359 /* ??? There's no need to emit actual instructions here, but it's the
10360 easiest way to get the frame unwind information emitted. */
10361 if (current_function_calls_eh_return)
10362 {
10363 unsigned int i, regno;
10364
10365 for (i = 0; ; ++i)
10366 {
10367 regno = EH_RETURN_DATA_REGNO (i);
10368 if (regno == INVALID_REGNUM)
10369 break;
10370
10371 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
10372 info->ehrd_offset + sp_offset
10373 + reg_size * (int) i,
10374 info->total_size);
10375 }
10376 }
10377
10378 /* Save lr if we used it. */
10379 if (info->lr_save_p)
10380 {
10381 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10382 GEN_INT (info->lr_save_offset + sp_offset));
10383 rtx reg = gen_rtx_REG (Pmode, 0);
10384 rtx mem = gen_rtx_MEM (Pmode, addr);
10385 /* This should not be of rs6000_sr_alias_set, because of
10386 __builtin_return_address. */
10387
10388 insn = emit_move_insn (mem, reg);
10389 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10390 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10391 }
10392
10393 /* Save CR if we use any that must be preserved. */
10394 if (info->cr_save_p)
10395 {
10396 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10397 GEN_INT (info->cr_save_offset + sp_offset));
10398 rtx mem = gen_rtx_MEM (SImode, addr);
10399
10400 set_mem_alias_set (mem, rs6000_sr_alias_set);
10401
10402 /* If r12 was used to hold the original sp, copy cr into r0 now
10403 that it's free. */
10404 if (REGNO (frame_reg_rtx) == 12)
10405 {
10406 cr_save_rtx = gen_rtx_REG (SImode, 0);
10407 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10408 }
10409 insn = emit_move_insn (mem, cr_save_rtx);
10410
10411 /* Now, there's no way that dwarf2out_frame_debug_expr is going
10412 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
10413 OK. All we have to do is specify that _one_ condition code
10414 register is saved in this stack slot. The thrower's epilogue
10415 will then restore all the call-saved registers.
10416 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
10417 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10418 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
10419 }
10420
10421 /* Update stack and set back pointer unless this is V.4,
10422 for which it was done previously. */
10423 if (info->push_p && DEFAULT_ABI != ABI_V4)
10424 rs6000_emit_allocate_stack (info->total_size, FALSE);
10425
10426 /* Set frame pointer, if needed. */
10427 if (frame_pointer_needed)
10428 {
10429 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
10430 sp_reg_rtx);
10431 RTX_FRAME_RELATED_P (insn) = 1;
10432 }
10433
10434 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
10435 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
10436 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
10437 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
10438 {
10439 /* If emit_load_toc_table will use the link register, we need to save
10440 it. We use R11 for this purpose because emit_load_toc_table
10441 can use register 0. This allows us to use a plain 'blr' to return
10442 from the procedure more often. */
10443 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
10444 && ! info->lr_save_p
10445 && EXIT_BLOCK_PTR->pred != NULL);
10446 if (save_LR_around_toc_setup)
10447 emit_move_insn (gen_rtx_REG (Pmode, 11),
10448 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10449
10450 rs6000_emit_load_toc_table (TRUE);
10451
10452 if (save_LR_around_toc_setup)
10453 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10454 gen_rtx_REG (Pmode, 11));
10455 }
10456
10457 if (DEFAULT_ABI == ABI_DARWIN
10458 && flag_pic && current_function_uses_pic_offset_table)
10459 {
10460 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10461
10462 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
10463
10464 rs6000_maybe_dead (
10465 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
10466 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
10467 }
10468 }
10469
10470 /* Write function prologue. */
10471
10472 static void
10473 rs6000_output_function_prologue (file, size)
10474 FILE *file;
10475 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10476 {
10477 rs6000_stack_t *info = rs6000_stack_info ();
10478
10479 if (TARGET_DEBUG_STACK)
10480 debug_stack_info (info);
10481
10482 /* Write .extern for any function we will call to save and restore
10483 fp values. */
10484 if (info->first_fp_reg_save < 64
10485 && !FP_SAVE_INLINE (info->first_fp_reg_save))
10486 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
10487 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
10488 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
10489 RESTORE_FP_SUFFIX);
10490
10491 /* Write .extern for AIX common mode routines, if needed. */
10492 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
10493 {
10494 fputs ("\t.extern __mulh\n", file);
10495 fputs ("\t.extern __mull\n", file);
10496 fputs ("\t.extern __divss\n", file);
10497 fputs ("\t.extern __divus\n", file);
10498 fputs ("\t.extern __quoss\n", file);
10499 fputs ("\t.extern __quous\n", file);
10500 common_mode_defined = 1;
10501 }
10502
10503 if (! HAVE_prologue)
10504 {
10505 start_sequence ();
10506
10507 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
10508 the "toplevel" insn chain. */
10509 emit_note (0, NOTE_INSN_DELETED);
10510 rs6000_emit_prologue ();
10511 emit_note (0, NOTE_INSN_DELETED);
10512
10513 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10514 {
10515 rtx insn;
10516 unsigned addr = 0;
10517 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
10518 {
10519 INSN_ADDRESSES_NEW (insn, addr);
10520 addr += 4;
10521 }
10522 }
10523
10524 if (TARGET_DEBUG_STACK)
10525 debug_rtx_list (get_insns (), 100);
10526 final (get_insns (), file, FALSE, FALSE);
10527 end_sequence ();
10528 }
10529
10530 rs6000_pic_labelno++;
10531 }
10532
10533 /* Emit function epilogue as insns.
10534
10535 At present, dwarf2out_frame_debug_expr doesn't understand
10536 register restores, so we don't bother setting RTX_FRAME_RELATED_P
10537 anywhere in the epilogue. Most of the insns below would in any case
10538 need special notes to explain where r11 is in relation to the stack. */
10539
10540 void
10541 rs6000_emit_epilogue (sibcall)
10542 int sibcall;
10543 {
10544 rs6000_stack_t *info;
10545 int restoring_FPRs_inline;
10546 int using_load_multiple;
10547 int using_mfcr_multiple;
10548 int use_backchain_to_restore_sp;
10549 int sp_offset = 0;
10550 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
10551 rtx frame_reg_rtx = sp_reg_rtx;
10552 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10553 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10554 int i;
10555
10556 if (TARGET_SPE_ABI)
10557 {
10558 reg_mode = V2SImode;
10559 reg_size = 8;
10560 }
10561
10562 info = rs6000_stack_info ();
10563 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10564 && !TARGET_SPE_ABI
10565 && info->first_gp_reg_save < 31);
10566 restoring_FPRs_inline = (sibcall
10567 || current_function_calls_eh_return
10568 || info->first_fp_reg_save == 64
10569 || FP_SAVE_INLINE (info->first_fp_reg_save));
10570 use_backchain_to_restore_sp = (frame_pointer_needed
10571 || current_function_calls_alloca
10572 || info->total_size > 32767);
10573 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
10574 || rs6000_cpu == PROCESSOR_PPC603
10575 || rs6000_cpu == PROCESSOR_PPC750
10576 || optimize_size);
10577
10578 /* If we have a frame pointer, a call to alloca, or a large stack
10579 frame, restore the old stack pointer using the backchain. Otherwise,
10580 we know what size to update it with. */
10581 if (use_backchain_to_restore_sp)
10582 {
10583 /* Under V.4, don't reset the stack pointer until after we're done
10584 loading the saved registers. */
10585 if (DEFAULT_ABI == ABI_V4)
10586 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
10587
10588 emit_move_insn (frame_reg_rtx,
10589 gen_rtx_MEM (Pmode, sp_reg_rtx));
10590
10591 }
10592 else if (info->push_p)
10593 {
10594 if (DEFAULT_ABI == ABI_V4)
10595 sp_offset = info->total_size;
10596 else
10597 {
10598 emit_insn (TARGET_32BIT
10599 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
10600 GEN_INT (info->total_size))
10601 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
10602 GEN_INT (info->total_size)));
10603 }
10604 }
10605
10606 /* Restore AltiVec registers if needed. */
10607 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10608 {
10609 int i;
10610
10611 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10612 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10613 {
10614 rtx addr, areg, mem;
10615
10616 areg = gen_rtx_REG (Pmode, 0);
10617 emit_move_insn
10618 (areg, GEN_INT (info->altivec_save_offset
10619 + sp_offset
10620 + 16 * (i - info->first_altivec_reg_save)));
10621
10622 /* AltiVec addressing mode is [reg+reg]. */
10623 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
10624 mem = gen_rtx_MEM (V4SImode, addr);
10625 set_mem_alias_set (mem, rs6000_sr_alias_set);
10626
10627 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
10628 }
10629 }
10630
10631 /* Restore VRSAVE if needed. */
10632 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
10633 {
10634 rtx addr, mem, reg;
10635
10636 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10637 GEN_INT (info->vrsave_save_offset + sp_offset));
10638 mem = gen_rtx_MEM (SImode, addr);
10639 set_mem_alias_set (mem, rs6000_sr_alias_set);
10640 reg = gen_rtx_REG (SImode, 12);
10641 emit_move_insn (reg, mem);
10642
10643 emit_insn (generate_set_vrsave (reg, info, 1));
10644 }
10645
10646 /* Get the old lr if we saved it. */
10647 if (info->lr_save_p)
10648 {
10649 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
10650 info->lr_save_offset + sp_offset);
10651
10652 set_mem_alias_set (mem, rs6000_sr_alias_set);
10653
10654 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
10655 }
10656
10657 /* Get the old cr if we saved it. */
10658 if (info->cr_save_p)
10659 {
10660 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10661 GEN_INT (info->cr_save_offset + sp_offset));
10662 rtx mem = gen_rtx_MEM (SImode, addr);
10663
10664 set_mem_alias_set (mem, rs6000_sr_alias_set);
10665
10666 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
10667 }
10668
10669 /* Set LR here to try to overlap restores below. */
10670 if (info->lr_save_p)
10671 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10672 gen_rtx_REG (Pmode, 0));
10673
10674 /* Load exception handler data registers, if needed. */
10675 if (current_function_calls_eh_return)
10676 {
10677 unsigned int i, regno;
10678
10679 for (i = 0; ; ++i)
10680 {
10681 rtx mem;
10682
10683 regno = EH_RETURN_DATA_REGNO (i);
10684 if (regno == INVALID_REGNUM)
10685 break;
10686
10687 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
10688 info->ehrd_offset + sp_offset
10689 + reg_size * (int) i);
10690 set_mem_alias_set (mem, rs6000_sr_alias_set);
10691
10692 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
10693 }
10694 }
10695
10696 /* Restore GPRs. This is done as a PARALLEL if we are using
10697 the load-multiple instructions. */
10698 if (using_load_multiple)
10699 {
10700 rtvec p;
10701 p = rtvec_alloc (32 - info->first_gp_reg_save);
10702 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10703 {
10704 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10705 GEN_INT (info->gp_save_offset
10706 + sp_offset
10707 + reg_size * i));
10708 rtx mem = gen_rtx_MEM (reg_mode, addr);
10709
10710 set_mem_alias_set (mem, rs6000_sr_alias_set);
10711
10712 RTVEC_ELT (p, i) =
10713 gen_rtx_SET (VOIDmode,
10714 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
10715 mem);
10716 }
10717 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10718 }
10719 else
10720 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10721 if ((regs_ever_live[info->first_gp_reg_save+i]
10722 && ! call_used_regs[info->first_gp_reg_save+i])
10723 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10724 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10725 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10726 {
10727 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10728 GEN_INT (info->gp_save_offset
10729 + sp_offset
10730 + reg_size * i));
10731 rtx mem = gen_rtx_MEM (reg_mode, addr);
10732
10733 /* Restore 64-bit quantities for SPE. */
10734 if (TARGET_SPE_ABI)
10735 {
10736 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10737 rtx b;
10738
10739 if (!SPE_CONST_OFFSET_OK (offset))
10740 {
10741 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10742 emit_move_insn (b, GEN_INT (offset));
10743 }
10744 else
10745 b = GEN_INT (offset);
10746
10747 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10748 mem = gen_rtx_MEM (V2SImode, addr);
10749 }
10750
10751 set_mem_alias_set (mem, rs6000_sr_alias_set);
10752
10753 emit_move_insn (gen_rtx_REG (reg_mode,
10754 info->first_gp_reg_save + i), mem);
10755 }
10756
10757 /* Restore fpr's if we need to do it without calling a function. */
10758 if (restoring_FPRs_inline)
10759 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10760 if ((regs_ever_live[info->first_fp_reg_save+i]
10761 && ! call_used_regs[info->first_fp_reg_save+i]))
10762 {
10763 rtx addr, mem;
10764 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10765 GEN_INT (info->fp_save_offset
10766 + sp_offset
10767 + 8 * i));
10768 mem = gen_rtx_MEM (DFmode, addr);
10769 set_mem_alias_set (mem, rs6000_sr_alias_set);
10770
10771 emit_move_insn (gen_rtx_REG (DFmode,
10772 info->first_fp_reg_save + i),
10773 mem);
10774 }
10775
10776 /* If we saved cr, restore it here. Just those that were used. */
10777 if (info->cr_save_p)
10778 {
10779 rtx r12_rtx = gen_rtx_REG (SImode, 12);
10780 int count = 0;
10781
10782 if (using_mfcr_multiple)
10783 {
10784 for (i = 0; i < 8; i++)
10785 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10786 count++;
10787 if (count == 0)
10788 abort ();
10789 }
10790
10791 if (using_mfcr_multiple && count > 1)
10792 {
10793 rtvec p;
10794 int ndx;
10795
10796 p = rtvec_alloc (count);
10797
10798 ndx = 0;
10799 for (i = 0; i < 8; i++)
10800 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10801 {
10802 rtvec r = rtvec_alloc (2);
10803 RTVEC_ELT (r, 0) = r12_rtx;
10804 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
10805 RTVEC_ELT (p, ndx) =
10806 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
10807 gen_rtx_UNSPEC (CCmode, r, 20));
10808 ndx++;
10809 }
10810 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10811 if (ndx != count)
10812 abort ();
10813 }
10814 else
10815 for (i = 0; i < 8; i++)
10816 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10817 {
10818 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
10819 CR0_REGNO+i),
10820 r12_rtx));
10821 }
10822 }
10823
10824 /* If this is V.4, unwind the stack pointer after all of the loads
10825 have been done. We need to emit a block here so that sched
10826 doesn't decide to move the sp change before the register restores
10827 (which may not have any obvious dependency on the stack). This
10828 doesn't hurt performance, because there is no scheduling that can
10829 be done after this point. */
10830 if (DEFAULT_ABI == ABI_V4)
10831 {
10832 if (frame_reg_rtx != sp_reg_rtx)
10833 rs6000_emit_stack_tie ();
10834
10835 if (use_backchain_to_restore_sp)
10836 {
10837 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
10838 }
10839 else if (sp_offset != 0)
10840 {
10841 emit_insn (Pmode == SImode
10842 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
10843 GEN_INT (sp_offset))
10844 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
10845 GEN_INT (sp_offset)));
10846 }
10847 }
10848
10849 if (current_function_calls_eh_return)
10850 {
10851 rtx sa = EH_RETURN_STACKADJ_RTX;
10852 emit_insn (Pmode == SImode
10853 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
10854 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
10855 }
10856
10857 if (!sibcall)
10858 {
10859 rtvec p;
10860 if (! restoring_FPRs_inline)
10861 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
10862 else
10863 p = rtvec_alloc (2);
10864
10865 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
10866 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
10867 gen_rtx_REG (Pmode,
10868 LINK_REGISTER_REGNUM));
10869
10870 /* If we have to restore more than two FP registers, branch to the
10871 restore function. It will return to our caller. */
10872 if (! restoring_FPRs_inline)
10873 {
10874 int i;
10875 char rname[30];
10876 const char *alloc_rname;
10877
10878 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
10879 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
10880 alloc_rname = ggc_strdup (rname);
10881 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
10882 gen_rtx_SYMBOL_REF (Pmode,
10883 alloc_rname));
10884
10885 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10886 {
10887 rtx addr, mem;
10888 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
10889 GEN_INT (info->fp_save_offset + 8*i));
10890 mem = gen_rtx_MEM (DFmode, addr);
10891 set_mem_alias_set (mem, rs6000_sr_alias_set);
10892
10893 RTVEC_ELT (p, i+3) =
10894 gen_rtx_SET (VOIDmode,
10895 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
10896 mem);
10897 }
10898 }
10899
10900 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
10901 }
10902 }
10903
10904 /* Write function epilogue. */
10905
10906 static void
10907 rs6000_output_function_epilogue (file, size)
10908 FILE *file;
10909 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10910 {
10911 rs6000_stack_t *info = rs6000_stack_info ();
10912
10913 if (! HAVE_epilogue)
10914 {
10915 rtx insn = get_last_insn ();
10916 /* If the last insn was a BARRIER, we don't have to write anything except
10917 the trace table. */
10918 if (GET_CODE (insn) == NOTE)
10919 insn = prev_nonnote_insn (insn);
10920 if (insn == 0 || GET_CODE (insn) != BARRIER)
10921 {
10922 /* This is slightly ugly, but at least we don't have two
10923 copies of the epilogue-emitting code. */
10924 start_sequence ();
10925
10926 /* A NOTE_INSN_DELETED is supposed to be at the start
10927 and end of the "toplevel" insn chain. */
10928 emit_note (0, NOTE_INSN_DELETED);
10929 rs6000_emit_epilogue (FALSE);
10930 emit_note (0, NOTE_INSN_DELETED);
10931
10932 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10933 {
10934 rtx insn;
10935 unsigned addr = 0;
10936 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
10937 {
10938 INSN_ADDRESSES_NEW (insn, addr);
10939 addr += 4;
10940 }
10941 }
10942
10943 if (TARGET_DEBUG_STACK)
10944 debug_rtx_list (get_insns (), 100);
10945 final (get_insns (), file, FALSE, FALSE);
10946 end_sequence ();
10947 }
10948 }
10949
10950 /* Output a traceback table here. See /usr/include/sys/debug.h for info
10951 on its format.
10952
10953 We don't output a traceback table if -finhibit-size-directive was
10954 used. The documentation for -finhibit-size-directive reads
10955 ``don't output a @code{.size} assembler directive, or anything
10956 else that would cause trouble if the function is split in the
10957 middle, and the two halves are placed at locations far apart in
10958 memory.'' The traceback table has this property, since it
10959 includes the offset from the start of the function to the
10960 traceback table itself.
10961
10962 System V.4 Powerpc's (and the embedded ABI derived from it) use a
10963 different traceback table. */
10964 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
10965 && rs6000_traceback != traceback_none)
10966 {
10967 const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
10968 const char *language_string = lang_hooks.name;
10969 int fixed_parms = 0, float_parms = 0, parm_info = 0;
10970 int i;
10971 int optional_tbtab;
10972
10973 if (rs6000_traceback == traceback_full)
10974 optional_tbtab = 1;
10975 else if (rs6000_traceback == traceback_part)
10976 optional_tbtab = 0;
10977 else
10978 optional_tbtab = !optimize_size && !TARGET_ELF;
10979
10980 while (*fname == '.') /* V.4 encodes . in the name */
10981 fname++;
10982
10983 /* Need label immediately before tbtab, so we can compute its offset
10984 from the function start. */
10985 if (*fname == '*')
10986 ++fname;
10987 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
10988 ASM_OUTPUT_LABEL (file, fname);
10989
10990 /* The .tbtab pseudo-op can only be used for the first eight
10991 expressions, since it can't handle the possibly variable
10992 length fields that follow. However, if you omit the optional
10993 fields, the assembler outputs zeros for all optional fields
10994 anyways, giving each variable length field is minimum length
10995 (as defined in sys/debug.h). Thus we can not use the .tbtab
10996 pseudo-op at all. */
10997
10998 /* An all-zero word flags the start of the tbtab, for debuggers
10999 that have to find it by searching forward from the entry
11000 point or from the current pc. */
11001 fputs ("\t.long 0\n", file);
11002
11003 /* Tbtab format type. Use format type 0. */
11004 fputs ("\t.byte 0,", file);
11005
11006 /* Language type. Unfortunately, there doesn't seem to be any
11007 official way to get this info, so we use language_string. C
11008 is 0. C++ is 9. No number defined for Obj-C, so use the
11009 value for C for now. There is no official value for Java,
11010 although IBM appears to be using 13. There is no official value
11011 for Chill, so we've chosen 44 pseudo-randomly. */
11012 if (! strcmp (language_string, "GNU C")
11013 || ! strcmp (language_string, "GNU Objective-C"))
11014 i = 0;
11015 else if (! strcmp (language_string, "GNU F77"))
11016 i = 1;
11017 else if (! strcmp (language_string, "GNU Ada"))
11018 i = 3;
11019 else if (! strcmp (language_string, "GNU Pascal"))
11020 i = 2;
11021 else if (! strcmp (language_string, "GNU C++"))
11022 i = 9;
11023 else if (! strcmp (language_string, "GNU Java"))
11024 i = 13;
11025 else if (! strcmp (language_string, "GNU CHILL"))
11026 i = 44;
11027 else
11028 abort ();
11029 fprintf (file, "%d,", i);
11030
11031 /* 8 single bit fields: global linkage (not set for C extern linkage,
11032 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
11033 from start of procedure stored in tbtab, internal function, function
11034 has controlled storage, function has no toc, function uses fp,
11035 function logs/aborts fp operations. */
11036 /* Assume that fp operations are used if any fp reg must be saved. */
11037 fprintf (file, "%d,",
11038 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
11039
11040 /* 6 bitfields: function is interrupt handler, name present in
11041 proc table, function calls alloca, on condition directives
11042 (controls stack walks, 3 bits), saves condition reg, saves
11043 link reg. */
11044 /* The `function calls alloca' bit seems to be set whenever reg 31 is
11045 set up as a frame pointer, even when there is no alloca call. */
11046 fprintf (file, "%d,",
11047 ((optional_tbtab << 6)
11048 | ((optional_tbtab & frame_pointer_needed) << 5)
11049 | (info->cr_save_p << 1)
11050 | (info->lr_save_p)));
11051
11052 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
11053 (6 bits). */
11054 fprintf (file, "%d,",
11055 (info->push_p << 7) | (64 - info->first_fp_reg_save));
11056
11057 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
11058 fprintf (file, "%d,", (32 - first_reg_to_save ()));
11059
11060 if (optional_tbtab)
11061 {
11062 /* Compute the parameter info from the function decl argument
11063 list. */
11064 tree decl;
11065 int next_parm_info_bit = 31;
11066
11067 for (decl = DECL_ARGUMENTS (current_function_decl);
11068 decl; decl = TREE_CHAIN (decl))
11069 {
11070 rtx parameter = DECL_INCOMING_RTL (decl);
11071 enum machine_mode mode = GET_MODE (parameter);
11072
11073 if (GET_CODE (parameter) == REG)
11074 {
11075 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
11076 {
11077 int bits;
11078
11079 float_parms++;
11080
11081 if (mode == SFmode)
11082 bits = 0x2;
11083 else if (mode == DFmode)
11084 bits = 0x3;
11085 else
11086 abort ();
11087
11088 /* If only one bit will fit, don't or in this entry. */
11089 if (next_parm_info_bit > 0)
11090 parm_info |= (bits << (next_parm_info_bit - 1));
11091 next_parm_info_bit -= 2;
11092 }
11093 else
11094 {
11095 fixed_parms += ((GET_MODE_SIZE (mode)
11096 + (UNITS_PER_WORD - 1))
11097 / UNITS_PER_WORD);
11098 next_parm_info_bit -= 1;
11099 }
11100 }
11101 }
11102 }
11103
11104 /* Number of fixed point parameters. */
11105 /* This is actually the number of words of fixed point parameters; thus
11106 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11107 fprintf (file, "%d,", fixed_parms);
11108
11109 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11110 all on stack. */
11111 /* This is actually the number of fp registers that hold parameters;
11112 and thus the maximum value is 13. */
11113 /* Set parameters on stack bit if parameters are not in their original
11114 registers, regardless of whether they are on the stack? Xlc
11115 seems to set the bit when not optimizing. */
11116 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
11117
11118 if (! optional_tbtab)
11119 return;
11120
11121 /* Optional fields follow. Some are variable length. */
11122
11123 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11124 11 double float. */
11125 /* There is an entry for each parameter in a register, in the order that
11126 they occur in the parameter list. Any intervening arguments on the
11127 stack are ignored. If the list overflows a long (max possible length
11128 34 bits) then completely leave off all elements that don't fit. */
11129 /* Only emit this long if there was at least one parameter. */
11130 if (fixed_parms || float_parms)
11131 fprintf (file, "\t.long %d\n", parm_info);
11132
11133 /* Offset from start of code to tb table. */
11134 fputs ("\t.long ", file);
11135 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11136 #if TARGET_AIX
11137 RS6000_OUTPUT_BASENAME (file, fname);
11138 #else
11139 assemble_name (file, fname);
11140 #endif
11141 fputs ("-.", file);
11142 #if TARGET_AIX
11143 RS6000_OUTPUT_BASENAME (file, fname);
11144 #else
11145 assemble_name (file, fname);
11146 #endif
11147 putc ('\n', file);
11148
11149 /* Interrupt handler mask. */
11150 /* Omit this long, since we never set the interrupt handler bit
11151 above. */
11152
11153 /* Number of CTL (controlled storage) anchors. */
11154 /* Omit this long, since the has_ctl bit is never set above. */
11155
11156 /* Displacement into stack of each CTL anchor. */
11157 /* Omit this list of longs, because there are no CTL anchors. */
11158
11159 /* Length of function name. */
11160 fprintf (file, "\t.short %d\n", (int) strlen (fname));
11161
11162 /* Function name. */
11163 assemble_string (fname, strlen (fname));
11164
11165 /* Register for alloca automatic storage; this is always reg 31.
11166 Only emit this if the alloca bit was set above. */
11167 if (frame_pointer_needed)
11168 fputs ("\t.byte 31\n", file);
11169
11170 fputs ("\t.align 2\n", file);
11171 }
11172 }
11173 \f
11174 /* A C compound statement that outputs the assembler code for a thunk
11175 function, used to implement C++ virtual function calls with
11176 multiple inheritance. The thunk acts as a wrapper around a virtual
11177 function, adjusting the implicit object parameter before handing
11178 control off to the real function.
11179
11180 First, emit code to add the integer DELTA to the location that
11181 contains the incoming first argument. Assume that this argument
11182 contains a pointer, and is the one used to pass the `this' pointer
11183 in C++. This is the incoming argument *before* the function
11184 prologue, e.g. `%o0' on a sparc. The addition must preserve the
11185 values of all other incoming arguments.
11186
11187 After the addition, emit code to jump to FUNCTION, which is a
11188 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
11189 not touch the return address. Hence returning from FUNCTION will
11190 return to whoever called the current `thunk'.
11191
11192 The effect must be as if FUNCTION had been called directly with the
11193 adjusted first argument. This macro is responsible for emitting
11194 all of the code for a thunk function; output_function_prologue()
11195 and output_function_epilogue() are not invoked.
11196
11197 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
11198 been extracted from it.) It might possibly be useful on some
11199 targets, but probably not.
11200
11201 If you do not define this macro, the target-independent code in the
11202 C++ frontend will generate a less efficient heavyweight thunk that
11203 calls FUNCTION instead of jumping to it. The generic approach does
11204 not support varargs. */
11205
11206 void
11207 output_mi_thunk (file, thunk_fndecl, delta, function)
11208 FILE *file;
11209 tree thunk_fndecl ATTRIBUTE_UNUSED;
11210 int delta;
11211 tree function;
11212 {
11213 const char *this_reg =
11214 reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
11215 const char *prefix;
11216 const char *fname;
11217 const char *r0 = reg_names[0];
11218 const char *toc = reg_names[2];
11219 const char *schain = reg_names[11];
11220 const char *r12 = reg_names[12];
11221 char buf[512];
11222 static int labelno = 0;
11223
11224 /* Small constants that can be done by one add instruction. */
11225 if (delta >= -32768 && delta <= 32767)
11226 {
11227 if (! TARGET_NEW_MNEMONICS)
11228 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
11229 else
11230 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
11231 }
11232
11233 /* 64-bit constants. If "int" is 32 bits, we'll never hit this abort. */
11234 else if (TARGET_64BIT && (delta < -2147483647 - 1 || delta > 2147483647))
11235 abort ();
11236
11237 /* Large constants that can be done by one addis instruction. */
11238 else if ((delta & 0xffff) == 0)
11239 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
11240 delta >> 16);
11241
11242 /* 32-bit constants that can be done by an add and addis instruction. */
11243 else
11244 {
11245 /* Break into two pieces, propagating the sign bit from the low
11246 word to the upper word. */
11247 int delta_low = ((delta & 0xffff) ^ 0x8000) - 0x8000;
11248 int delta_high = (delta - delta_low) >> 16;
11249
11250 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
11251 delta_high);
11252
11253 if (! TARGET_NEW_MNEMONICS)
11254 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
11255 else
11256 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
11257 }
11258
11259 /* Get the prefix in front of the names. */
11260 switch (DEFAULT_ABI)
11261 {
11262 default:
11263 abort ();
11264
11265 case ABI_AIX:
11266 prefix = ".";
11267 break;
11268
11269 case ABI_V4:
11270 case ABI_AIX_NODESC:
11271 prefix = "";
11272 break;
11273 }
11274
11275 /* If the function is compiled in this module, jump to it directly.
11276 Otherwise, load up its address and jump to it. */
11277
11278 fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
11279
11280 if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
11281 && (! lookup_attribute ("longcall",
11282 TYPE_ATTRIBUTES (TREE_TYPE (function)))
11283 || lookup_attribute ("shortcall",
11284 TYPE_ATTRIBUTES (TREE_TYPE (function)))))
11285
11286 {
11287 fprintf (file, "\tb %s", prefix);
11288 assemble_name (file, fname);
11289 if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
11290 putc ('\n', file);
11291 }
11292
11293 else
11294 {
11295 switch (DEFAULT_ABI)
11296 {
11297 default:
11298 abort ();
11299
11300 case ABI_AIX:
11301 /* Set up a TOC entry for the function. */
11302 ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
11303 toc_section ();
11304 ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
11305 labelno++;
11306
11307 if (TARGET_MINIMAL_TOC)
11308 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
11309 else
11310 {
11311 fputs ("\t.tc ", file);
11312 assemble_name (file, fname);
11313 fputs ("[TC],", file);
11314 }
11315 assemble_name (file, fname);
11316 putc ('\n', file);
11317 if (TARGET_ELF)
11318 function_section (current_function_decl);
11319 else
11320 text_section();
11321 if (TARGET_MINIMAL_TOC)
11322 asm_fprintf (file, (TARGET_32BIT)
11323 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
11324 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
11325 asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
11326 assemble_name (file, buf);
11327 if (TARGET_ELF && TARGET_MINIMAL_TOC)
11328 fputs ("-(.LCTOC1)", file);
11329 asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
11330 asm_fprintf (file,
11331 (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
11332 r0, r12);
11333
11334 asm_fprintf (file,
11335 (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
11336 toc, r12);
11337
11338 asm_fprintf (file, "\tmtctr %s\n", r0);
11339 asm_fprintf (file,
11340 (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
11341 schain, r12);
11342
11343 asm_fprintf (file, "\tbctr\n");
11344 break;
11345
11346 case ABI_AIX_NODESC:
11347 case ABI_V4:
11348 fprintf (file, "\tb %s", prefix);
11349 assemble_name (file, fname);
11350 if (flag_pic) fputs ("@plt", file);
11351 putc ('\n', file);
11352 break;
11353
11354 #if TARGET_MACHO
11355 case ABI_DARWIN:
11356 fprintf (file, "\tb %s", prefix);
11357 if (flag_pic && !machopic_name_defined_p (fname))
11358 assemble_name (file, machopic_stub_name (fname));
11359 else
11360 assemble_name (file, fname);
11361 putc ('\n', file);
11362 break;
11363 #endif
11364 }
11365 }
11366 }
11367
11368 \f
11369 /* A quick summary of the various types of 'constant-pool tables'
11370 under PowerPC:
11371
11372 Target Flags Name One table per
11373 AIX (none) AIX TOC object file
11374 AIX -mfull-toc AIX TOC object file
11375 AIX -mminimal-toc AIX minimal TOC translation unit
11376 SVR4/EABI (none) SVR4 SDATA object file
11377 SVR4/EABI -fpic SVR4 pic object file
11378 SVR4/EABI -fPIC SVR4 PIC translation unit
11379 SVR4/EABI -mrelocatable EABI TOC function
11380 SVR4/EABI -maix AIX TOC object file
11381 SVR4/EABI -maix -mminimal-toc
11382 AIX minimal TOC translation unit
11383
11384 Name Reg. Set by entries contains:
11385 made by addrs? fp? sum?
11386
11387 AIX TOC 2 crt0 as Y option option
11388 AIX minimal TOC 30 prolog gcc Y Y option
11389 SVR4 SDATA 13 crt0 gcc N Y N
11390 SVR4 pic 30 prolog ld Y not yet N
11391 SVR4 PIC 30 prolog gcc Y option option
11392 EABI TOC 30 prolog gcc Y option option
11393
11394 */
11395
11396 /* Hash table stuff for keeping track of TOC entries. */
11397
11398 struct toc_hash_struct
11399 {
11400 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
11401 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
11402 rtx key;
11403 enum machine_mode key_mode;
11404 int labelno;
11405 };
11406
11407 static htab_t toc_hash_table;
11408
11409 /* Hash functions for the hash table. */
11410
11411 static unsigned
11412 rs6000_hash_constant (k)
11413 rtx k;
11414 {
11415 unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
11416 const char *format = GET_RTX_FORMAT (GET_CODE (k));
11417 int flen = strlen (format);
11418 int fidx;
11419
11420 if (GET_CODE (k) == LABEL_REF)
11421 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
11422
11423 if (GET_CODE (k) == CODE_LABEL)
11424 fidx = 3;
11425 else
11426 fidx = 0;
11427
11428 for (; fidx < flen; fidx++)
11429 switch (format[fidx])
11430 {
11431 case 's':
11432 {
11433 unsigned i, len;
11434 const char *str = XSTR (k, fidx);
11435 len = strlen (str);
11436 result = result * 613 + len;
11437 for (i = 0; i < len; i++)
11438 result = result * 613 + (unsigned) str[i];
11439 break;
11440 }
11441 case 'u':
11442 case 'e':
11443 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
11444 break;
11445 case 'i':
11446 case 'n':
11447 result = result * 613 + (unsigned) XINT (k, fidx);
11448 break;
11449 case 'w':
11450 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
11451 result = result * 613 + (unsigned) XWINT (k, fidx);
11452 else
11453 {
11454 size_t i;
11455 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
11456 result = result * 613 + (unsigned) (XWINT (k, fidx)
11457 >> CHAR_BIT * i);
11458 }
11459 break;
11460 default:
11461 abort ();
11462 }
11463 return result;
11464 }
11465
11466 static unsigned
11467 toc_hash_function (hash_entry)
11468 const void * hash_entry;
11469 {
11470 const struct toc_hash_struct *thc =
11471 (const struct toc_hash_struct *) hash_entry;
11472 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
11473 }
11474
11475 /* Compare H1 and H2 for equivalence. */
11476
11477 static int
11478 toc_hash_eq (h1, h2)
11479 const void * h1;
11480 const void * h2;
11481 {
11482 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
11483 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
11484
11485 if (((const struct toc_hash_struct *) h1)->key_mode
11486 != ((const struct toc_hash_struct *) h2)->key_mode)
11487 return 0;
11488
11489 return rtx_equal_p (r1, r2);
11490 }
11491
11492 /* Mark the hash table-entry HASH_ENTRY. */
11493
11494 static int
11495 toc_hash_mark_entry (hash_slot, unused)
11496 void ** hash_slot;
11497 void * unused ATTRIBUTE_UNUSED;
11498 {
11499 const struct toc_hash_struct * hash_entry =
11500 *(const struct toc_hash_struct **) hash_slot;
11501 rtx r = hash_entry->key;
11502 ggc_set_mark (hash_entry);
11503 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
11504 if (GET_CODE (r) == LABEL_REF)
11505 {
11506 ggc_set_mark (r);
11507 ggc_set_mark (XEXP (r, 0));
11508 }
11509 else
11510 ggc_mark_rtx (r);
11511 return 1;
11512 }
11513
11514 /* Mark all the elements of the TOC hash-table *HT. */
11515
11516 static void
11517 toc_hash_mark_table (vht)
11518 void *vht;
11519 {
11520 htab_t *ht = vht;
11521
11522 htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
11523 }
11524
11525 /* These are the names given by the C++ front-end to vtables, and
11526 vtable-like objects. Ideally, this logic should not be here;
11527 instead, there should be some programmatic way of inquiring as
11528 to whether or not an object is a vtable. */
11529
11530 #define VTABLE_NAME_P(NAME) \
11531 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
11532 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
11533 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
11534 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
11535
11536 void
11537 rs6000_output_symbol_ref (file, x)
11538 FILE *file;
11539 rtx x;
11540 {
11541 /* Currently C++ toc references to vtables can be emitted before it
11542 is decided whether the vtable is public or private. If this is
11543 the case, then the linker will eventually complain that there is
11544 a reference to an unknown section. Thus, for vtables only,
11545 we emit the TOC reference to reference the symbol and not the
11546 section. */
11547 const char *name = XSTR (x, 0);
11548
11549 if (VTABLE_NAME_P (name))
11550 {
11551 RS6000_OUTPUT_BASENAME (file, name);
11552 }
11553 else
11554 assemble_name (file, name);
11555 }
11556
11557 /* Output a TOC entry. We derive the entry name from what is being
11558 written. */
11559
11560 void
11561 output_toc (file, x, labelno, mode)
11562 FILE *file;
11563 rtx x;
11564 int labelno;
11565 enum machine_mode mode;
11566 {
11567 char buf[256];
11568 const char *name = buf;
11569 const char *real_name;
11570 rtx base = x;
11571 int offset = 0;
11572
11573 if (TARGET_NO_TOC)
11574 abort ();
11575
11576 /* When the linker won't eliminate them, don't output duplicate
11577 TOC entries (this happens on AIX if there is any kind of TOC,
11578 and on SVR4 under -fPIC or -mrelocatable). */
11579 if (TARGET_TOC)
11580 {
11581 struct toc_hash_struct *h;
11582 void * * found;
11583
11584 h = ggc_alloc (sizeof (*h));
11585 h->key = x;
11586 h->key_mode = mode;
11587 h->labelno = labelno;
11588
11589 found = htab_find_slot (toc_hash_table, h, 1);
11590 if (*found == NULL)
11591 *found = h;
11592 else /* This is indeed a duplicate.
11593 Set this label equal to that label. */
11594 {
11595 fputs ("\t.set ", file);
11596 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11597 fprintf (file, "%d,", labelno);
11598 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11599 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
11600 found)->labelno));
11601 return;
11602 }
11603 }
11604
11605 /* If we're going to put a double constant in the TOC, make sure it's
11606 aligned properly when strict alignment is on. */
11607 if (GET_CODE (x) == CONST_DOUBLE
11608 && STRICT_ALIGNMENT
11609 && GET_MODE_BITSIZE (mode) >= 64
11610 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
11611 ASM_OUTPUT_ALIGN (file, 3);
11612 }
11613
11614 ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
11615
11616 /* Handle FP constants specially. Note that if we have a minimal
11617 TOC, things we put here aren't actually in the TOC, so we can allow
11618 FP constants. */
11619 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
11620 {
11621 REAL_VALUE_TYPE rv;
11622 long k[2];
11623
11624 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11625 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
11626
11627 if (TARGET_64BIT)
11628 {
11629 if (TARGET_MINIMAL_TOC)
11630 fputs (DOUBLE_INT_ASM_OP, file);
11631 else
11632 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
11633 k[0] & 0xffffffff, k[1] & 0xffffffff);
11634 fprintf (file, "0x%lx%08lx\n",
11635 k[0] & 0xffffffff, k[1] & 0xffffffff);
11636 return;
11637 }
11638 else
11639 {
11640 if (TARGET_MINIMAL_TOC)
11641 fputs ("\t.long ", file);
11642 else
11643 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
11644 k[0] & 0xffffffff, k[1] & 0xffffffff);
11645 fprintf (file, "0x%lx,0x%lx\n",
11646 k[0] & 0xffffffff, k[1] & 0xffffffff);
11647 return;
11648 }
11649 }
11650 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
11651 {
11652 REAL_VALUE_TYPE rv;
11653 long l;
11654
11655 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11656 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
11657
11658 if (TARGET_64BIT)
11659 {
11660 if (TARGET_MINIMAL_TOC)
11661 fputs (DOUBLE_INT_ASM_OP, file);
11662 else
11663 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
11664 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
11665 return;
11666 }
11667 else
11668 {
11669 if (TARGET_MINIMAL_TOC)
11670 fputs ("\t.long ", file);
11671 else
11672 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
11673 fprintf (file, "0x%lx\n", l & 0xffffffff);
11674 return;
11675 }
11676 }
11677 else if (GET_MODE (x) == VOIDmode
11678 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
11679 {
11680 unsigned HOST_WIDE_INT low;
11681 HOST_WIDE_INT high;
11682
11683 if (GET_CODE (x) == CONST_DOUBLE)
11684 {
11685 low = CONST_DOUBLE_LOW (x);
11686 high = CONST_DOUBLE_HIGH (x);
11687 }
11688 else
11689 #if HOST_BITS_PER_WIDE_INT == 32
11690 {
11691 low = INTVAL (x);
11692 high = (low & 0x80000000) ? ~0 : 0;
11693 }
11694 #else
11695 {
11696 low = INTVAL (x) & 0xffffffff;
11697 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
11698 }
11699 #endif
11700
11701 /* TOC entries are always Pmode-sized, but since this
11702 is a bigendian machine then if we're putting smaller
11703 integer constants in the TOC we have to pad them.
11704 (This is still a win over putting the constants in
11705 a separate constant pool, because then we'd have
11706 to have both a TOC entry _and_ the actual constant.)
11707
11708 For a 32-bit target, CONST_INT values are loaded and shifted
11709 entirely within `low' and can be stored in one TOC entry. */
11710
11711 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
11712 abort ();/* It would be easy to make this work, but it doesn't now. */
11713
11714 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
11715 {
11716 #if HOST_BITS_PER_WIDE_INT == 32
11717 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
11718 POINTER_SIZE, &low, &high, 0);
11719 #else
11720 low |= high << 32;
11721 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
11722 high = (HOST_WIDE_INT) low >> 32;
11723 low &= 0xffffffff;
11724 #endif
11725 }
11726
11727 if (TARGET_64BIT)
11728 {
11729 if (TARGET_MINIMAL_TOC)
11730 fputs (DOUBLE_INT_ASM_OP, file);
11731 else
11732 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
11733 (long) high & 0xffffffff, (long) low & 0xffffffff);
11734 fprintf (file, "0x%lx%08lx\n",
11735 (long) high & 0xffffffff, (long) low & 0xffffffff);
11736 return;
11737 }
11738 else
11739 {
11740 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
11741 {
11742 if (TARGET_MINIMAL_TOC)
11743 fputs ("\t.long ", file);
11744 else
11745 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
11746 (long) high & 0xffffffff, (long) low & 0xffffffff);
11747 fprintf (file, "0x%lx,0x%lx\n",
11748 (long) high & 0xffffffff, (long) low & 0xffffffff);
11749 }
11750 else
11751 {
11752 if (TARGET_MINIMAL_TOC)
11753 fputs ("\t.long ", file);
11754 else
11755 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
11756 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
11757 }
11758 return;
11759 }
11760 }
11761
11762 if (GET_CODE (x) == CONST)
11763 {
11764 if (GET_CODE (XEXP (x, 0)) != PLUS)
11765 abort ();
11766
11767 base = XEXP (XEXP (x, 0), 0);
11768 offset = INTVAL (XEXP (XEXP (x, 0), 1));
11769 }
11770
11771 if (GET_CODE (base) == SYMBOL_REF)
11772 name = XSTR (base, 0);
11773 else if (GET_CODE (base) == LABEL_REF)
11774 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
11775 else if (GET_CODE (base) == CODE_LABEL)
11776 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
11777 else
11778 abort ();
11779
11780 real_name = (*targetm.strip_name_encoding) (name);
11781 if (TARGET_MINIMAL_TOC)
11782 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
11783 else
11784 {
11785 fprintf (file, "\t.tc %s", real_name);
11786
11787 if (offset < 0)
11788 fprintf (file, ".N%d", - offset);
11789 else if (offset)
11790 fprintf (file, ".P%d", offset);
11791
11792 fputs ("[TC],", file);
11793 }
11794
11795 /* Currently C++ toc references to vtables can be emitted before it
11796 is decided whether the vtable is public or private. If this is
11797 the case, then the linker will eventually complain that there is
11798 a TOC reference to an unknown section. Thus, for vtables only,
11799 we emit the TOC reference to reference the symbol and not the
11800 section. */
11801 if (VTABLE_NAME_P (name))
11802 {
11803 RS6000_OUTPUT_BASENAME (file, name);
11804 if (offset < 0)
11805 fprintf (file, "%d", offset);
11806 else if (offset > 0)
11807 fprintf (file, "+%d", offset);
11808 }
11809 else
11810 output_addr_const (file, x);
11811 putc ('\n', file);
11812 }
11813 \f
11814 /* Output an assembler pseudo-op to write an ASCII string of N characters
11815 starting at P to FILE.
11816
11817 On the RS/6000, we have to do this using the .byte operation and
11818 write out special characters outside the quoted string.
11819 Also, the assembler is broken; very long strings are truncated,
11820 so we must artificially break them up early. */
11821
11822 void
11823 output_ascii (file, p, n)
11824 FILE *file;
11825 const char *p;
11826 int n;
11827 {
11828 char c;
11829 int i, count_string;
11830 const char *for_string = "\t.byte \"";
11831 const char *for_decimal = "\t.byte ";
11832 const char *to_close = NULL;
11833
11834 count_string = 0;
11835 for (i = 0; i < n; i++)
11836 {
11837 c = *p++;
11838 if (c >= ' ' && c < 0177)
11839 {
11840 if (for_string)
11841 fputs (for_string, file);
11842 putc (c, file);
11843
11844 /* Write two quotes to get one. */
11845 if (c == '"')
11846 {
11847 putc (c, file);
11848 ++count_string;
11849 }
11850
11851 for_string = NULL;
11852 for_decimal = "\"\n\t.byte ";
11853 to_close = "\"\n";
11854 ++count_string;
11855
11856 if (count_string >= 512)
11857 {
11858 fputs (to_close, file);
11859
11860 for_string = "\t.byte \"";
11861 for_decimal = "\t.byte ";
11862 to_close = NULL;
11863 count_string = 0;
11864 }
11865 }
11866 else
11867 {
11868 if (for_decimal)
11869 fputs (for_decimal, file);
11870 fprintf (file, "%d", c);
11871
11872 for_string = "\n\t.byte \"";
11873 for_decimal = ", ";
11874 to_close = "\n";
11875 count_string = 0;
11876 }
11877 }
11878
11879 /* Now close the string if we have written one. Then end the line. */
11880 if (to_close)
11881 fputs (to_close, file);
11882 }
11883 \f
11884 /* Generate a unique section name for FILENAME for a section type
11885 represented by SECTION_DESC. Output goes into BUF.
11886
11887 SECTION_DESC can be any string, as long as it is different for each
11888 possible section type.
11889
11890 We name the section in the same manner as xlc. The name begins with an
11891 underscore followed by the filename (after stripping any leading directory
11892 names) with the last period replaced by the string SECTION_DESC. If
11893 FILENAME does not contain a period, SECTION_DESC is appended to the end of
11894 the name. */
11895
11896 void
11897 rs6000_gen_section_name (buf, filename, section_desc)
11898 char **buf;
11899 const char *filename;
11900 const char *section_desc;
11901 {
11902 const char *q, *after_last_slash, *last_period = 0;
11903 char *p;
11904 int len;
11905
11906 after_last_slash = filename;
11907 for (q = filename; *q; q++)
11908 {
11909 if (*q == '/')
11910 after_last_slash = q + 1;
11911 else if (*q == '.')
11912 last_period = q;
11913 }
11914
11915 len = strlen (after_last_slash) + strlen (section_desc) + 2;
11916 *buf = (char *) xmalloc (len);
11917
11918 p = *buf;
11919 *p++ = '_';
11920
11921 for (q = after_last_slash; *q; q++)
11922 {
11923 if (q == last_period)
11924 {
11925 strcpy (p, section_desc);
11926 p += strlen (section_desc);
11927 }
11928
11929 else if (ISALNUM (*q))
11930 *p++ = *q;
11931 }
11932
11933 if (last_period == 0)
11934 strcpy (p, section_desc);
11935 else
11936 *p = '\0';
11937 }
11938 \f
11939 /* Emit profile function. */
11940
11941 void
11942 output_profile_hook (labelno)
11943 int labelno ATTRIBUTE_UNUSED;
11944 {
11945 if (DEFAULT_ABI == ABI_AIX)
11946 {
11947 #ifdef NO_PROFILE_COUNTERS
11948 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
11949 #else
11950 char buf[30];
11951 const char *label_name;
11952 rtx fun;
11953
11954 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
11955 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
11956 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
11957
11958 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
11959 fun, Pmode);
11960 #endif
11961 }
11962 else if (DEFAULT_ABI == ABI_DARWIN)
11963 {
11964 const char *mcount_name = RS6000_MCOUNT;
11965 int caller_addr_regno = LINK_REGISTER_REGNUM;
11966
11967 /* Be conservative and always set this, at least for now. */
11968 current_function_uses_pic_offset_table = 1;
11969
11970 #if TARGET_MACHO
11971 /* For PIC code, set up a stub and collect the caller's address
11972 from r0, which is where the prologue puts it. */
11973 if (flag_pic)
11974 {
11975 mcount_name = machopic_stub_name (mcount_name);
11976 if (current_function_uses_pic_offset_table)
11977 caller_addr_regno = 0;
11978 }
11979 #endif
11980 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
11981 0, VOIDmode, 1,
11982 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
11983 }
11984 }
11985
11986 /* Write function profiler code. */
11987
11988 void
11989 output_function_profiler (file, labelno)
11990 FILE *file;
11991 int labelno;
11992 {
11993 char buf[100];
11994 int save_lr = 8;
11995
11996 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
11997 switch (DEFAULT_ABI)
11998 {
11999 default:
12000 abort ();
12001
12002 case ABI_V4:
12003 save_lr = 4;
12004 /* Fall through. */
12005
12006 case ABI_AIX_NODESC:
12007 if (!TARGET_32BIT)
12008 {
12009 warning ("no profiling of 64-bit code for this ABI");
12010 return;
12011 }
12012 fprintf (file, "\tmflr %s\n", reg_names[0]);
12013 if (flag_pic == 1)
12014 {
12015 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
12016 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12017 reg_names[0], save_lr, reg_names[1]);
12018 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
12019 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
12020 assemble_name (file, buf);
12021 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
12022 }
12023 else if (flag_pic > 1)
12024 {
12025 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12026 reg_names[0], save_lr, reg_names[1]);
12027 /* Now, we need to get the address of the label. */
12028 fputs ("\tbl 1f\n\t.long ", file);
12029 assemble_name (file, buf);
12030 fputs ("-.\n1:", file);
12031 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
12032 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
12033 reg_names[0], reg_names[11]);
12034 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
12035 reg_names[0], reg_names[0], reg_names[11]);
12036 }
12037 else
12038 {
12039 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
12040 assemble_name (file, buf);
12041 fputs ("@ha\n", file);
12042 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12043 reg_names[0], save_lr, reg_names[1]);
12044 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
12045 assemble_name (file, buf);
12046 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
12047 }
12048
12049 if (current_function_needs_context && DEFAULT_ABI == ABI_AIX_NODESC)
12050 {
12051 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12052 reg_names[STATIC_CHAIN_REGNUM],
12053 12, reg_names[1]);
12054 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12055 asm_fprintf (file, "\t{l|lwz} %s,%d(%s)\n",
12056 reg_names[STATIC_CHAIN_REGNUM],
12057 12, reg_names[1]);
12058 }
12059 else
12060 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12061 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12062 break;
12063
12064 case ABI_AIX:
12065 case ABI_DARWIN:
12066 /* Don't do anything, done in output_profile_hook (). */
12067 break;
12068 }
12069 }
12070
12071 /* Adjust the cost of a scheduling dependency. Return the new cost of
12072 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
12073
12074 static int
12075 rs6000_adjust_cost (insn, link, dep_insn, cost)
12076 rtx insn;
12077 rtx link;
12078 rtx dep_insn ATTRIBUTE_UNUSED;
12079 int cost;
12080 {
12081 if (! recog_memoized (insn))
12082 return 0;
12083
12084 if (REG_NOTE_KIND (link) != 0)
12085 return 0;
12086
12087 if (REG_NOTE_KIND (link) == 0)
12088 {
12089 /* Data dependency; DEP_INSN writes a register that INSN reads
12090 some cycles later. */
12091 switch (get_attr_type (insn))
12092 {
12093 case TYPE_JMPREG:
12094 /* Tell the first scheduling pass about the latency between
12095 a mtctr and bctr (and mtlr and br/blr). The first
12096 scheduling pass will not know about this latency since
12097 the mtctr instruction, which has the latency associated
12098 to it, will be generated by reload. */
12099 return TARGET_POWER ? 5 : 4;
12100 case TYPE_BRANCH:
12101 /* Leave some extra cycles between a compare and its
12102 dependent branch, to inhibit expensive mispredicts. */
12103 if ((rs6000_cpu_attr == CPU_PPC603
12104 || rs6000_cpu_attr == CPU_PPC604
12105 || rs6000_cpu_attr == CPU_PPC604E
12106 || rs6000_cpu_attr == CPU_PPC620
12107 || rs6000_cpu_attr == CPU_PPC630
12108 || rs6000_cpu_attr == CPU_PPC750
12109 || rs6000_cpu_attr == CPU_PPC7400
12110 || rs6000_cpu_attr == CPU_PPC7450
12111 || rs6000_cpu_attr == CPU_POWER4)
12112 && recog_memoized (dep_insn)
12113 && (INSN_CODE (dep_insn) >= 0)
12114 && (get_attr_type (dep_insn) == TYPE_COMPARE
12115 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
12116 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
12117 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
12118 return cost + 2;
12119 default:
12120 break;
12121 }
12122 /* Fall out to return default cost. */
12123 }
12124
12125 return cost;
12126 }
12127
12128 /* A C statement (sans semicolon) to update the integer scheduling
12129 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12130 INSN earlier, increase the priority to execute INSN later. Do not
12131 define this macro if you do not need to adjust the scheduling
12132 priorities of insns. */
12133
12134 static int
12135 rs6000_adjust_priority (insn, priority)
12136 rtx insn ATTRIBUTE_UNUSED;
12137 int priority;
12138 {
12139 /* On machines (like the 750) which have asymmetric integer units,
12140 where one integer unit can do multiply and divides and the other
12141 can't, reduce the priority of multiply/divide so it is scheduled
12142 before other integer operations. */
12143
12144 #if 0
12145 if (! INSN_P (insn))
12146 return priority;
12147
12148 if (GET_CODE (PATTERN (insn)) == USE)
12149 return priority;
12150
12151 switch (rs6000_cpu_attr) {
12152 case CPU_PPC750:
12153 switch (get_attr_type (insn))
12154 {
12155 default:
12156 break;
12157
12158 case TYPE_IMUL:
12159 case TYPE_IDIV:
12160 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
12161 priority, priority);
12162 if (priority >= 0 && priority < 0x01000000)
12163 priority >>= 3;
12164 break;
12165 }
12166 }
12167 #endif
12168
12169 return priority;
12170 }
12171
12172 /* Return how many instructions the machine can issue per cycle. */
12173
12174 static int
12175 rs6000_issue_rate ()
12176 {
12177 switch (rs6000_cpu_attr) {
12178 case CPU_RIOS1: /* ? */
12179 case CPU_RS64A:
12180 case CPU_PPC601: /* ? */
12181 case CPU_PPC7450:
12182 return 3;
12183 case CPU_PPC603:
12184 case CPU_PPC750:
12185 case CPU_PPC7400:
12186 return 2;
12187 case CPU_RIOS2:
12188 case CPU_PPC604:
12189 case CPU_PPC604E:
12190 case CPU_PPC620:
12191 case CPU_PPC630:
12192 case CPU_POWER4:
12193 return 4;
12194 default:
12195 return 1;
12196 }
12197 }
12198
12199 \f
12200 /* Length in units of the trampoline for entering a nested function. */
12201
12202 int
12203 rs6000_trampoline_size ()
12204 {
12205 int ret = 0;
12206
12207 switch (DEFAULT_ABI)
12208 {
12209 default:
12210 abort ();
12211
12212 case ABI_AIX:
12213 ret = (TARGET_32BIT) ? 12 : 24;
12214 break;
12215
12216 case ABI_DARWIN:
12217 case ABI_V4:
12218 case ABI_AIX_NODESC:
12219 ret = (TARGET_32BIT) ? 40 : 48;
12220 break;
12221 }
12222
12223 return ret;
12224 }
12225
12226 /* Emit RTL insns to initialize the variable parts of a trampoline.
12227 FNADDR is an RTX for the address of the function's pure code.
12228 CXT is an RTX for the static chain value for the function. */
12229
12230 void
12231 rs6000_initialize_trampoline (addr, fnaddr, cxt)
12232 rtx addr;
12233 rtx fnaddr;
12234 rtx cxt;
12235 {
12236 enum machine_mode pmode = Pmode;
12237 int regsize = (TARGET_32BIT) ? 4 : 8;
12238 rtx ctx_reg = force_reg (pmode, cxt);
12239
12240 switch (DEFAULT_ABI)
12241 {
12242 default:
12243 abort ();
12244
12245 /* Macros to shorten the code expansions below. */
12246 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12247 #define MEM_PLUS(addr,offset) \
12248 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12249
12250 /* Under AIX, just build the 3 word function descriptor */
12251 case ABI_AIX:
12252 {
12253 rtx fn_reg = gen_reg_rtx (pmode);
12254 rtx toc_reg = gen_reg_rtx (pmode);
12255 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
12256 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
12257 emit_move_insn (MEM_DEREF (addr), fn_reg);
12258 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
12259 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
12260 }
12261 break;
12262
12263 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
12264 case ABI_DARWIN:
12265 case ABI_V4:
12266 case ABI_AIX_NODESC:
12267 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
12268 FALSE, VOIDmode, 4,
12269 addr, pmode,
12270 GEN_INT (rs6000_trampoline_size ()), SImode,
12271 fnaddr, pmode,
12272 ctx_reg, pmode);
12273 break;
12274 }
12275
12276 return;
12277 }
12278
12279 \f
12280 /* Table of valid machine attributes. */
12281
12282 const struct attribute_spec rs6000_attribute_table[] =
12283 {
12284 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12285 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12286 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12287 { NULL, 0, 0, false, false, false, NULL }
12288 };
12289
12290 /* Handle a "longcall" or "shortcall" attribute; arguments as in
12291 struct attribute_spec.handler. */
12292
12293 static tree
12294 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
12295 tree *node;
12296 tree name;
12297 tree args ATTRIBUTE_UNUSED;
12298 int flags ATTRIBUTE_UNUSED;
12299 bool *no_add_attrs;
12300 {
12301 if (TREE_CODE (*node) != FUNCTION_TYPE
12302 && TREE_CODE (*node) != FIELD_DECL
12303 && TREE_CODE (*node) != TYPE_DECL)
12304 {
12305 warning ("`%s' attribute only applies to functions",
12306 IDENTIFIER_POINTER (name));
12307 *no_add_attrs = true;
12308 }
12309
12310 return NULL_TREE;
12311 }
12312
12313 /* Set longcall attributes on all functions declared when
12314 rs6000_default_long_calls is true. */
12315 static void
12316 rs6000_set_default_type_attributes (type)
12317 tree type;
12318 {
12319 if (rs6000_default_long_calls
12320 && (TREE_CODE (type) == FUNCTION_TYPE
12321 || TREE_CODE (type) == METHOD_TYPE))
12322 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
12323 NULL_TREE,
12324 TYPE_ATTRIBUTES (type));
12325 }
12326
12327 /* Return a reference suitable for calling a function with the
12328 longcall attribute. */
12329
12330 struct rtx_def *
12331 rs6000_longcall_ref (call_ref)
12332 rtx call_ref;
12333 {
12334 const char *call_name;
12335 tree node;
12336
12337 if (GET_CODE (call_ref) != SYMBOL_REF)
12338 return call_ref;
12339
12340 /* System V adds '.' to the internal name, so skip them. */
12341 call_name = XSTR (call_ref, 0);
12342 if (*call_name == '.')
12343 {
12344 while (*call_name == '.')
12345 call_name++;
12346
12347 node = get_identifier (call_name);
12348 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
12349 }
12350
12351 return force_reg (Pmode, call_ref);
12352 }
12353
12354 \f
12355 #ifdef USING_ELFOS_H
12356
12357 /* A C statement or statements to switch to the appropriate section
12358 for output of RTX in mode MODE. You can assume that RTX is some
12359 kind of constant in RTL. The argument MODE is redundant except in
12360 the case of a `const_int' rtx. Select the section by calling
12361 `text_section' or one of the alternatives for other sections.
12362
12363 Do not define this macro if you put all constants in the read-only
12364 data section. */
12365
12366 static void
12367 rs6000_elf_select_rtx_section (mode, x, align)
12368 enum machine_mode mode;
12369 rtx x;
12370 unsigned HOST_WIDE_INT align;
12371 {
12372 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
12373 toc_section ();
12374 else
12375 default_elf_select_rtx_section (mode, x, align);
12376 }
12377
12378 /* A C statement or statements to switch to the appropriate
12379 section for output of DECL. DECL is either a `VAR_DECL' node
12380 or a constant of some sort. RELOC indicates whether forming
12381 the initial value of DECL requires link-time relocations. */
12382
12383 static void
12384 rs6000_elf_select_section (decl, reloc, align)
12385 tree decl;
12386 int reloc;
12387 unsigned HOST_WIDE_INT align;
12388 {
12389 default_elf_select_section_1 (decl, reloc, align,
12390 flag_pic || DEFAULT_ABI == ABI_AIX);
12391 }
12392
12393 /* A C statement to build up a unique section name, expressed as a
12394 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
12395 RELOC indicates whether the initial value of EXP requires
12396 link-time relocations. If you do not define this macro, GCC will use
12397 the symbol name prefixed by `.' as the section name. Note - this
12398 macro can now be called for uninitialized data items as well as
12399 initialised data and functions. */
12400
12401 static void
12402 rs6000_elf_unique_section (decl, reloc)
12403 tree decl;
12404 int reloc;
12405 {
12406 default_unique_section_1 (decl, reloc,
12407 flag_pic || DEFAULT_ABI == ABI_AIX);
12408 }
12409
12410 \f
12411 /* If we are referencing a function that is static or is known to be
12412 in this file, make the SYMBOL_REF special. We can use this to indicate
12413 that we can branch to this function without emitting a no-op after the
12414 call. For real AIX calling sequences, we also replace the
12415 function name with the real name (1 or 2 leading .'s), rather than
12416 the function descriptor name. This saves a lot of overriding code
12417 to read the prefixes. */
12418
12419 static void
12420 rs6000_elf_encode_section_info (decl, first)
12421 tree decl;
12422 int first;
12423 {
12424 if (!first)
12425 return;
12426
12427 if (TREE_CODE (decl) == FUNCTION_DECL)
12428 {
12429 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12430 if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
12431 && ! DECL_WEAK (decl))
12432 SYMBOL_REF_FLAG (sym_ref) = 1;
12433
12434 if (DEFAULT_ABI == ABI_AIX)
12435 {
12436 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
12437 size_t len2 = strlen (XSTR (sym_ref, 0));
12438 char *str = alloca (len1 + len2 + 1);
12439 str[0] = '.';
12440 str[1] = '.';
12441 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
12442
12443 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
12444 }
12445 }
12446 else if (rs6000_sdata != SDATA_NONE
12447 && DEFAULT_ABI == ABI_V4
12448 && TREE_CODE (decl) == VAR_DECL)
12449 {
12450 int size = int_size_in_bytes (TREE_TYPE (decl));
12451 tree section_name = DECL_SECTION_NAME (decl);
12452 const char *name = (char *)0;
12453 int len = 0;
12454
12455 if (section_name)
12456 {
12457 if (TREE_CODE (section_name) == STRING_CST)
12458 {
12459 name = TREE_STRING_POINTER (section_name);
12460 len = TREE_STRING_LENGTH (section_name);
12461 }
12462 else
12463 abort ();
12464 }
12465
12466 if ((size > 0 && size <= g_switch_value)
12467 || (name
12468 && ((len == sizeof (".sdata") - 1
12469 && strcmp (name, ".sdata") == 0)
12470 || (len == sizeof (".sdata2") - 1
12471 && strcmp (name, ".sdata2") == 0)
12472 || (len == sizeof (".sbss") - 1
12473 && strcmp (name, ".sbss") == 0)
12474 || (len == sizeof (".sbss2") - 1
12475 && strcmp (name, ".sbss2") == 0)
12476 || (len == sizeof (".PPC.EMB.sdata0") - 1
12477 && strcmp (name, ".PPC.EMB.sdata0") == 0)
12478 || (len == sizeof (".PPC.EMB.sbss0") - 1
12479 && strcmp (name, ".PPC.EMB.sbss0") == 0))))
12480 {
12481 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12482 size_t len = strlen (XSTR (sym_ref, 0));
12483 char *str = alloca (len + 2);
12484
12485 str[0] = '@';
12486 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
12487 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
12488 }
12489 }
12490 }
12491
12492 static const char *
12493 rs6000_elf_strip_name_encoding (str)
12494 const char *str;
12495 {
12496 while (*str == '*' || *str == '@')
12497 str++;
12498 return str;
12499 }
12500
12501 static bool
12502 rs6000_elf_in_small_data_p (decl)
12503 tree decl;
12504 {
12505 if (rs6000_sdata == SDATA_NONE)
12506 return false;
12507
12508 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
12509 {
12510 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
12511 if (strcmp (section, ".sdata") == 0
12512 || strcmp (section, ".sdata2") == 0
12513 || strcmp (section, ".sbss") == 0)
12514 return true;
12515 }
12516 else
12517 {
12518 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
12519
12520 if (size > 0
12521 && size <= g_switch_value
12522 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
12523 return true;
12524 }
12525
12526 return false;
12527 }
12528
12529 #endif /* USING_ELFOS_H */
12530
12531 \f
12532 /* Return a REG that occurs in ADDR with coefficient 1.
12533 ADDR can be effectively incremented by incrementing REG.
12534
12535 r0 is special and we must not select it as an address
12536 register by this routine since our caller will try to
12537 increment the returned register via an "la" instruction. */
12538
12539 struct rtx_def *
12540 find_addr_reg (addr)
12541 rtx addr;
12542 {
12543 while (GET_CODE (addr) == PLUS)
12544 {
12545 if (GET_CODE (XEXP (addr, 0)) == REG
12546 && REGNO (XEXP (addr, 0)) != 0)
12547 addr = XEXP (addr, 0);
12548 else if (GET_CODE (XEXP (addr, 1)) == REG
12549 && REGNO (XEXP (addr, 1)) != 0)
12550 addr = XEXP (addr, 1);
12551 else if (CONSTANT_P (XEXP (addr, 0)))
12552 addr = XEXP (addr, 1);
12553 else if (CONSTANT_P (XEXP (addr, 1)))
12554 addr = XEXP (addr, 0);
12555 else
12556 abort ();
12557 }
12558 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
12559 return addr;
12560 abort ();
12561 }
12562
12563 void
12564 rs6000_fatal_bad_address (op)
12565 rtx op;
12566 {
12567 fatal_insn ("bad address", op);
12568 }
12569
12570 /* Called to register all of our global variables with the garbage
12571 collector. */
12572
12573 static void
12574 rs6000_add_gc_roots ()
12575 {
12576 toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
12577 ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
12578 toc_hash_mark_table);
12579 }
12580
12581 #if TARGET_MACHO
12582
12583 #if 0
12584 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
12585 reference and a constant. */
12586
12587 int
12588 symbolic_operand (op)
12589 rtx op;
12590 {
12591 switch (GET_CODE (op))
12592 {
12593 case SYMBOL_REF:
12594 case LABEL_REF:
12595 return 1;
12596 case CONST:
12597 op = XEXP (op, 0);
12598 return (GET_CODE (op) == SYMBOL_REF ||
12599 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
12600 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
12601 && GET_CODE (XEXP (op, 1)) == CONST_INT);
12602 default:
12603 return 0;
12604 }
12605 }
12606 #endif
12607
12608 #ifdef RS6000_LONG_BRANCH
12609
12610 static tree stub_list = 0;
12611
12612 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
12613 procedure calls to the linked list. */
12614
12615 void
12616 add_compiler_stub (label_name, function_name, line_number)
12617 tree label_name;
12618 tree function_name;
12619 int line_number;
12620 {
12621 tree stub = build_tree_list (function_name, label_name);
12622 TREE_TYPE (stub) = build_int_2 (line_number, 0);
12623 TREE_CHAIN (stub) = stub_list;
12624 stub_list = stub;
12625 }
12626
12627 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
12628 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
12629 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
12630
12631 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
12632 handling procedure calls from the linked list and initializes the
12633 linked list. */
12634
12635 void
12636 output_compiler_stub ()
12637 {
12638 char tmp_buf[256];
12639 char label_buf[256];
12640 tree stub;
12641
12642 if (!flag_pic)
12643 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12644 {
12645 fprintf (asm_out_file,
12646 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
12647
12648 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12649 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
12650 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
12651 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12652
12653 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
12654 strcpy (label_buf,
12655 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
12656 else
12657 {
12658 label_buf[0] = '_';
12659 strcpy (label_buf+1,
12660 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
12661 }
12662
12663 strcpy (tmp_buf, "lis r12,hi16(");
12664 strcat (tmp_buf, label_buf);
12665 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
12666 strcat (tmp_buf, label_buf);
12667 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
12668 output_asm_insn (tmp_buf, 0);
12669
12670 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12671 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
12672 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
12673 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12674 }
12675
12676 stub_list = 0;
12677 }
12678
12679 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
12680 already there or not. */
12681
12682 int
12683 no_previous_def (function_name)
12684 tree function_name;
12685 {
12686 tree stub;
12687 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12688 if (function_name == STUB_FUNCTION_NAME (stub))
12689 return 0;
12690 return 1;
12691 }
12692
12693 /* GET_PREV_LABEL gets the label name from the previous definition of
12694 the function. */
12695
12696 tree
12697 get_prev_label (function_name)
12698 tree function_name;
12699 {
12700 tree stub;
12701 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12702 if (function_name == STUB_FUNCTION_NAME (stub))
12703 return STUB_LABEL_NAME (stub);
12704 return 0;
12705 }
12706
12707 /* INSN is either a function call or a millicode call. It may have an
12708 unconditional jump in its delay slot.
12709
12710 CALL_DEST is the routine we are calling. */
12711
12712 char *
12713 output_call (insn, call_dest, operand_number)
12714 rtx insn;
12715 rtx call_dest;
12716 int operand_number;
12717 {
12718 static char buf[256];
12719 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
12720 {
12721 tree labelname;
12722 tree funname = get_identifier (XSTR (call_dest, 0));
12723
12724 if (no_previous_def (funname))
12725 {
12726 int line_number = 0;
12727 rtx label_rtx = gen_label_rtx ();
12728 char *label_buf, temp_buf[256];
12729 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
12730 CODE_LABEL_NUMBER (label_rtx));
12731 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
12732 labelname = get_identifier (label_buf);
12733 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
12734 if (insn)
12735 line_number = NOTE_LINE_NUMBER (insn);
12736 add_compiler_stub (labelname, funname, line_number);
12737 }
12738 else
12739 labelname = get_prev_label (funname);
12740
12741 sprintf (buf, "jbsr %%z%d,%.246s",
12742 operand_number, IDENTIFIER_POINTER (labelname));
12743 return buf;
12744 }
12745 else
12746 {
12747 sprintf (buf, "bl %%z%d", operand_number);
12748 return buf;
12749 }
12750 }
12751
12752 #endif /* RS6000_LONG_BRANCH */
12753
12754 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
12755 do { \
12756 const char *const symbol_ = (SYMBOL); \
12757 char *buffer_ = (BUF); \
12758 if (symbol_[0] == '"') \
12759 { \
12760 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
12761 } \
12762 else if (name_needs_quotes(symbol_)) \
12763 { \
12764 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
12765 } \
12766 else \
12767 { \
12768 sprintf(buffer_, "L%d$%s", (N), symbol_); \
12769 } \
12770 } while (0)
12771
12772
12773 /* Generate PIC and indirect symbol stubs. */
12774
12775 void
12776 machopic_output_stub (file, symb, stub)
12777 FILE *file;
12778 const char *symb, *stub;
12779 {
12780 unsigned int length;
12781 char *symbol_name, *lazy_ptr_name;
12782 char *local_label_0;
12783 static int label = 0;
12784
12785 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
12786 symb = (*targetm.strip_name_encoding) (symb);
12787
12788 label += 1;
12789
12790 length = strlen (symb);
12791 symbol_name = alloca (length + 32);
12792 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
12793
12794 lazy_ptr_name = alloca (length + 32);
12795 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
12796
12797 local_label_0 = alloca (length + 32);
12798 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
12799
12800 if (flag_pic == 2)
12801 machopic_picsymbol_stub_section ();
12802 else
12803 machopic_symbol_stub_section ();
12804
12805 fprintf (file, "%s:\n", stub);
12806 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
12807
12808 if (flag_pic == 2)
12809 {
12810 fprintf (file, "\tmflr r0\n");
12811 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
12812 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
12813 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
12814 lazy_ptr_name, local_label_0);
12815 fprintf (file, "\tmtlr r0\n");
12816 fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
12817 lazy_ptr_name, local_label_0);
12818 fprintf (file, "\tmtctr r12\n");
12819 fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
12820 lazy_ptr_name, local_label_0);
12821 fprintf (file, "\tbctr\n");
12822 }
12823 else
12824 fprintf (file, "non-pure not supported\n");
12825
12826 machopic_lazy_symbol_ptr_section ();
12827 fprintf (file, "%s:\n", lazy_ptr_name);
12828 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
12829 fprintf (file, "\t.long dyld_stub_binding_helper\n");
12830 }
12831
12832 /* Legitimize PIC addresses. If the address is already
12833 position-independent, we return ORIG. Newly generated
12834 position-independent addresses go into a reg. This is REG if non
12835 zero, otherwise we allocate register(s) as necessary. */
12836
12837 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
12838
12839 rtx
12840 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
12841 rtx orig;
12842 enum machine_mode mode;
12843 rtx reg;
12844 {
12845 rtx base, offset;
12846
12847 if (reg == NULL && ! reload_in_progress && ! reload_completed)
12848 reg = gen_reg_rtx (Pmode);
12849
12850 if (GET_CODE (orig) == CONST)
12851 {
12852 if (GET_CODE (XEXP (orig, 0)) == PLUS
12853 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
12854 return orig;
12855
12856 if (GET_CODE (XEXP (orig, 0)) == PLUS)
12857 {
12858 base =
12859 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
12860 Pmode, reg);
12861 offset =
12862 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
12863 Pmode, reg);
12864 }
12865 else
12866 abort ();
12867
12868 if (GET_CODE (offset) == CONST_INT)
12869 {
12870 if (SMALL_INT (offset))
12871 return plus_constant (base, INTVAL (offset));
12872 else if (! reload_in_progress && ! reload_completed)
12873 offset = force_reg (Pmode, offset);
12874 else
12875 {
12876 rtx mem = force_const_mem (Pmode, orig);
12877 return machopic_legitimize_pic_address (mem, Pmode, reg);
12878 }
12879 }
12880 return gen_rtx (PLUS, Pmode, base, offset);
12881 }
12882
12883 /* Fall back on generic machopic code. */
12884 return machopic_legitimize_pic_address (orig, mode, reg);
12885 }
12886
12887 /* This is just a placeholder to make linking work without having to
12888 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
12889 ever needed for Darwin (not too likely!) this would have to get a
12890 real definition. */
12891
12892 void
12893 toc_section ()
12894 {
12895 }
12896
12897 #endif /* TARGET_MACHO */
12898
12899 #if TARGET_ELF
12900 static unsigned int
12901 rs6000_elf_section_type_flags (decl, name, reloc)
12902 tree decl;
12903 const char *name;
12904 int reloc;
12905 {
12906 unsigned int flags
12907 = default_section_type_flags_1 (decl, name, reloc,
12908 flag_pic || DEFAULT_ABI == ABI_AIX);
12909
12910 if (TARGET_RELOCATABLE)
12911 flags |= SECTION_WRITE;
12912
12913 return flags;
12914 }
12915
12916 /* Record an element in the table of global constructors. SYMBOL is
12917 a SYMBOL_REF of the function to be called; PRIORITY is a number
12918 between 0 and MAX_INIT_PRIORITY.
12919
12920 This differs from default_named_section_asm_out_constructor in
12921 that we have special handling for -mrelocatable. */
12922
12923 static void
12924 rs6000_elf_asm_out_constructor (symbol, priority)
12925 rtx symbol;
12926 int priority;
12927 {
12928 const char *section = ".ctors";
12929 char buf[16];
12930
12931 if (priority != DEFAULT_INIT_PRIORITY)
12932 {
12933 sprintf (buf, ".ctors.%.5u",
12934 /* Invert the numbering so the linker puts us in the proper
12935 order; constructors are run from right to left, and the
12936 linker sorts in increasing order. */
12937 MAX_INIT_PRIORITY - priority);
12938 section = buf;
12939 }
12940
12941 named_section_flags (section, SECTION_WRITE);
12942 assemble_align (POINTER_SIZE);
12943
12944 if (TARGET_RELOCATABLE)
12945 {
12946 fputs ("\t.long (", asm_out_file);
12947 output_addr_const (asm_out_file, symbol);
12948 fputs (")@fixup\n", asm_out_file);
12949 }
12950 else
12951 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
12952 }
12953
12954 static void
12955 rs6000_elf_asm_out_destructor (symbol, priority)
12956 rtx symbol;
12957 int priority;
12958 {
12959 const char *section = ".dtors";
12960 char buf[16];
12961
12962 if (priority != DEFAULT_INIT_PRIORITY)
12963 {
12964 sprintf (buf, ".dtors.%.5u",
12965 /* Invert the numbering so the linker puts us in the proper
12966 order; constructors are run from right to left, and the
12967 linker sorts in increasing order. */
12968 MAX_INIT_PRIORITY - priority);
12969 section = buf;
12970 }
12971
12972 named_section_flags (section, SECTION_WRITE);
12973 assemble_align (POINTER_SIZE);
12974
12975 if (TARGET_RELOCATABLE)
12976 {
12977 fputs ("\t.long (", asm_out_file);
12978 output_addr_const (asm_out_file, symbol);
12979 fputs (")@fixup\n", asm_out_file);
12980 }
12981 else
12982 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
12983 }
12984 #endif
12985
12986 #if TARGET_XCOFF
12987 static void
12988 rs6000_xcoff_asm_globalize_label (stream, name)
12989 FILE *stream;
12990 const char *name;
12991 {
12992 fputs (GLOBAL_ASM_OP, stream);
12993 RS6000_OUTPUT_BASENAME (stream, name);
12994 putc ('\n', stream);
12995 }
12996
12997 static void
12998 rs6000_xcoff_asm_named_section (name, flags)
12999 const char *name;
13000 unsigned int flags;
13001 {
13002 int smclass;
13003 static const char * const suffix[3] = { "PR", "RO", "RW" };
13004
13005 if (flags & SECTION_CODE)
13006 smclass = 0;
13007 else if (flags & SECTION_WRITE)
13008 smclass = 2;
13009 else
13010 smclass = 1;
13011
13012 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
13013 (flags & SECTION_CODE) ? "." : "",
13014 name, suffix[smclass], flags & SECTION_ENTSIZE);
13015 }
13016
13017 static void
13018 rs6000_xcoff_select_section (decl, reloc, align)
13019 tree decl;
13020 int reloc;
13021 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13022 {
13023 if (decl_readonly_section_1 (decl, reloc, 1))
13024 {
13025 if (TREE_PUBLIC (decl))
13026 read_only_data_section ();
13027 else
13028 read_only_private_data_section ();
13029 }
13030 else
13031 {
13032 if (TREE_PUBLIC (decl))
13033 data_section ();
13034 else
13035 private_data_section ();
13036 }
13037 }
13038
13039 static void
13040 rs6000_xcoff_unique_section (decl, reloc)
13041 tree decl;
13042 int reloc ATTRIBUTE_UNUSED;
13043 {
13044 const char *name;
13045
13046 /* Use select_section for private and uninitialized data. */
13047 if (!TREE_PUBLIC (decl)
13048 || DECL_COMMON (decl)
13049 || DECL_INITIAL (decl) == NULL_TREE
13050 || DECL_INITIAL (decl) == error_mark_node
13051 || (flag_zero_initialized_in_bss
13052 && initializer_zerop (DECL_INITIAL (decl))))
13053 return;
13054
13055 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
13056 name = (*targetm.strip_name_encoding) (name);
13057 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
13058 }
13059
13060 /* Select section for constant in constant pool.
13061
13062 On RS/6000, all constants are in the private read-only data area.
13063 However, if this is being placed in the TOC it must be output as a
13064 toc entry. */
13065
13066 static void
13067 rs6000_xcoff_select_rtx_section (mode, x, align)
13068 enum machine_mode mode;
13069 rtx x;
13070 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13071 {
13072 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13073 toc_section ();
13074 else
13075 read_only_private_data_section ();
13076 }
13077
13078 /* Remove any trailing [DS] or the like from the symbol name. */
13079
13080 static const char *
13081 rs6000_xcoff_strip_name_encoding (name)
13082 const char *name;
13083 {
13084 size_t len;
13085 if (*name == '*')
13086 name++;
13087 len = strlen (name);
13088 if (name[len - 1] == ']')
13089 return ggc_alloc_string (name, len - 4);
13090 else
13091 return name;
13092 }
13093
13094 /* Section attributes. AIX is always PIC. */
13095
13096 static unsigned int
13097 rs6000_xcoff_section_type_flags (decl, name, reloc)
13098 tree decl;
13099 const char *name;
13100 int reloc;
13101 {
13102 unsigned int align;
13103 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
13104
13105 /* Align to at least UNIT size. */
13106 if (flags & SECTION_CODE)
13107 align = MIN_UNITS_PER_WORD;
13108 else
13109 /* Increase alignment of large objects if not already stricter. */
13110 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
13111 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
13112 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
13113
13114 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
13115 }
13116
13117 #endif /* TARGET_XCOFF */
13118
13119 /* Note that this is also used for PPC64 Linux. */
13120
13121 static void
13122 rs6000_xcoff_encode_section_info (decl, first)
13123 tree decl;
13124 int first ATTRIBUTE_UNUSED;
13125 {
13126 if (TREE_CODE (decl) == FUNCTION_DECL
13127 && (TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
13128 && ! DECL_WEAK (decl))
13129 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
13130 }
13131
13132 /* Cross-module name binding. For AIX and PPC64 Linux, which always are
13133 PIC, use private copy of flag_pic. */
13134
13135 static bool
13136 rs6000_binds_local_p (decl)
13137 tree decl;
13138 {
13139 return default_binds_local_p_1 (decl, flag_pic || rs6000_flag_pic);
13140 }
This page took 0.627606 seconds and 6 git commands to generate.