]> gcc.gnu.org Git - gcc.git/blob - gcc/config/rs6000/rs6000.c
ChangeLog.4: Fix typos.
[gcc.git] / gcc / config / rs6000 / rs6000.c
1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
5
6 This file is part of GNU CC.
7
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "real.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "flags.h"
33 #include "recog.h"
34 #include "obstack.h"
35 #include "tree.h"
36 #include "expr.h"
37 #include "optabs.h"
38 #include "except.h"
39 #include "function.h"
40 #include "output.h"
41 #include "basic-block.h"
42 #include "integrate.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "hashtab.h"
46 #include "tm_p.h"
47 #include "target.h"
48 #include "target-def.h"
49 #include "langhooks.h"
50 #include "reload.h"
51
52 #ifndef TARGET_NO_PROTOTYPE
53 #define TARGET_NO_PROTOTYPE 0
54 #endif
55
56 #define min(A,B) ((A) < (B) ? (A) : (B))
57 #define max(A,B) ((A) > (B) ? (A) : (B))
58
59 /* Target cpu type */
60
61 enum processor_type rs6000_cpu;
62 struct rs6000_cpu_select rs6000_select[3] =
63 {
64 /* switch name, tune arch */
65 { (const char *)0, "--with-cpu=", 1, 1 },
66 { (const char *)0, "-mcpu=", 1, 1 },
67 { (const char *)0, "-mtune=", 1, 0 },
68 };
69
70 /* Size of long double */
71 const char *rs6000_long_double_size_string;
72 int rs6000_long_double_type_size;
73
74 /* Whether -mabi=altivec has appeared */
75 int rs6000_altivec_abi;
76
77 /* Whether VRSAVE instructions should be generated. */
78 int rs6000_altivec_vrsave;
79
80 /* String from -mvrsave= option. */
81 const char *rs6000_altivec_vrsave_string;
82
83 /* Nonzero if we want SPE ABI extensions. */
84 int rs6000_spe_abi;
85
86 /* Whether isel instructions should be generated. */
87 int rs6000_isel;
88
89 /* Nonzero if we have FPRs. */
90 int rs6000_fprs = 1;
91
92 /* String from -misel=. */
93 const char *rs6000_isel_string;
94
95 /* Set to nonzero once AIX common-mode calls have been defined. */
96 static int common_mode_defined;
97
98 /* Private copy of original value of flag_pic for ABI_AIX. */
99 static int rs6000_flag_pic;
100
101 /* Save information from a "cmpxx" operation until the branch or scc is
102 emitted. */
103 rtx rs6000_compare_op0, rs6000_compare_op1;
104 int rs6000_compare_fp_p;
105
106 /* Label number of label created for -mrelocatable, to call to so we can
107 get the address of the GOT section */
108 int rs6000_pic_labelno;
109
110 #ifdef USING_ELFOS_H
111 /* Which abi to adhere to */
112 const char *rs6000_abi_name = RS6000_ABI_NAME;
113
114 /* Semantics of the small data area */
115 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
116
117 /* Which small data model to use */
118 const char *rs6000_sdata_name = (char *)0;
119
120 /* Counter for labels which are to be placed in .fixup. */
121 int fixuplabelno = 0;
122 #endif
123
124 /* ABI enumeration available for subtarget to use. */
125 enum rs6000_abi rs6000_current_abi;
126
127 /* ABI string from -mabi= option. */
128 const char *rs6000_abi_string;
129
130 /* Debug flags */
131 const char *rs6000_debug_name;
132 int rs6000_debug_stack; /* debug stack applications */
133 int rs6000_debug_arg; /* debug argument handling */
134
135 const char *rs6000_traceback_name;
136 static enum {
137 traceback_default = 0,
138 traceback_none,
139 traceback_part,
140 traceback_full
141 } rs6000_traceback;
142
143 /* Flag to say the TOC is initialized */
144 int toc_initialized;
145 char toc_label_name[10];
146
147 /* Alias set for saves and restores from the rs6000 stack. */
148 static int rs6000_sr_alias_set;
149
150 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
151 The only place that looks at this is rs6000_set_default_type_attributes;
152 everywhere else should rely on the presence or absence of a longcall
153 attribute on the function declaration. */
154 int rs6000_default_long_calls;
155 const char *rs6000_longcall_switch;
156
157 struct builtin_description
158 {
159 /* mask is not const because we're going to alter it below. This
160 nonsense will go away when we rewrite the -march infrastructure
161 to give us more target flag bits. */
162 unsigned int mask;
163 const enum insn_code icode;
164 const char *const name;
165 const enum rs6000_builtins code;
166 };
167
168 static void rs6000_add_gc_roots PARAMS ((void));
169 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
170 static void validate_condition_mode
171 PARAMS ((enum rtx_code, enum machine_mode));
172 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
173 static void rs6000_maybe_dead PARAMS ((rtx));
174 static void rs6000_emit_stack_tie PARAMS ((void));
175 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
176 static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
177 unsigned int, int, int));
178 static rtx gen_frame_mem_offset PARAMS ((enum machine_mode, rtx, int));
179 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
180 static unsigned rs6000_hash_constant PARAMS ((rtx));
181 static unsigned toc_hash_function PARAMS ((const void *));
182 static int toc_hash_eq PARAMS ((const void *, const void *));
183 static int toc_hash_mark_entry PARAMS ((void **, void *));
184 static void toc_hash_mark_table PARAMS ((void *));
185 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
186 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
187 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
188 #ifdef HAVE_GAS_HIDDEN
189 static void rs6000_assemble_visibility PARAMS ((tree, const char *));
190 #endif
191 static int rs6000_ra_ever_killed PARAMS ((void));
192 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
193 const struct attribute_spec rs6000_attribute_table[];
194 static void rs6000_set_default_type_attributes PARAMS ((tree));
195 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
196 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
197 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
198 HOST_WIDE_INT, HOST_WIDE_INT));
199 #if TARGET_ELF
200 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
201 int));
202 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
203 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
204 static void rs6000_elf_select_section PARAMS ((tree, int,
205 unsigned HOST_WIDE_INT));
206 static void rs6000_elf_unique_section PARAMS ((tree, int));
207 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
208 unsigned HOST_WIDE_INT));
209 static void rs6000_elf_encode_section_info PARAMS ((tree, int))
210 ATTRIBUTE_UNUSED;
211 static const char *rs6000_elf_strip_name_encoding PARAMS ((const char *));
212 static bool rs6000_elf_in_small_data_p PARAMS ((tree));
213 #endif
214 #if TARGET_XCOFF
215 static void rs6000_xcoff_asm_globalize_label PARAMS ((FILE *, const char *));
216 static void rs6000_xcoff_asm_named_section PARAMS ((const char *, unsigned int));
217 static void rs6000_xcoff_select_section PARAMS ((tree, int,
218 unsigned HOST_WIDE_INT));
219 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
220 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
221 unsigned HOST_WIDE_INT));
222 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
223 static unsigned int rs6000_xcoff_section_type_flags PARAMS ((tree, const char *, int));
224 #endif
225 static void rs6000_xcoff_encode_section_info PARAMS ((tree, int))
226 ATTRIBUTE_UNUSED;
227 static bool rs6000_binds_local_p PARAMS ((tree));
228 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
229 static int rs6000_adjust_priority PARAMS ((rtx, int));
230 static int rs6000_issue_rate PARAMS ((void));
231
232 static void rs6000_init_builtins PARAMS ((void));
233 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
234 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
235 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
236 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
237 static void altivec_init_builtins PARAMS ((void));
238 static void rs6000_common_init_builtins PARAMS ((void));
239
240 static void enable_mask_for_builtins PARAMS ((struct builtin_description *,
241 int, enum rs6000_builtins,
242 enum rs6000_builtins));
243 static void spe_init_builtins PARAMS ((void));
244 static rtx spe_expand_builtin PARAMS ((tree, rtx, bool *));
245 static rtx spe_expand_predicate_builtin PARAMS ((enum insn_code, tree, rtx));
246 static rtx spe_expand_evsel_builtin PARAMS ((enum insn_code, tree, rtx));
247 static int rs6000_emit_int_cmove PARAMS ((rtx, rtx, rtx, rtx));
248
249 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
250 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
251 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
252 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
253 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
254 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
255 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
256 static void rs6000_parse_abi_options PARAMS ((void));
257 static void rs6000_parse_vrsave_option PARAMS ((void));
258 static void rs6000_parse_isel_option PARAMS ((void));
259 static int first_altivec_reg_to_save PARAMS ((void));
260 static unsigned int compute_vrsave_mask PARAMS ((void));
261 static void is_altivec_return_reg PARAMS ((rtx, void *));
262 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
263 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
264 static int easy_vector_constant PARAMS ((rtx));
265 \f
266 /* Default register names. */
267 char rs6000_reg_names[][8] =
268 {
269 "0", "1", "2", "3", "4", "5", "6", "7",
270 "8", "9", "10", "11", "12", "13", "14", "15",
271 "16", "17", "18", "19", "20", "21", "22", "23",
272 "24", "25", "26", "27", "28", "29", "30", "31",
273 "0", "1", "2", "3", "4", "5", "6", "7",
274 "8", "9", "10", "11", "12", "13", "14", "15",
275 "16", "17", "18", "19", "20", "21", "22", "23",
276 "24", "25", "26", "27", "28", "29", "30", "31",
277 "mq", "lr", "ctr","ap",
278 "0", "1", "2", "3", "4", "5", "6", "7",
279 "xer",
280 /* AltiVec registers. */
281 "0", "1", "2", "3", "4", "5", "6", "7",
282 "8", "9", "10", "11", "12", "13", "14", "15",
283 "16", "17", "18", "19", "20", "21", "22", "23",
284 "24", "25", "26", "27", "28", "29", "30", "31",
285 "vrsave"
286 };
287
288 #ifdef TARGET_REGNAMES
289 static const char alt_reg_names[][8] =
290 {
291 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
292 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
293 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
294 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
295 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
296 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
297 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
298 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
299 "mq", "lr", "ctr", "ap",
300 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
301 "xer",
302 /* AltiVec registers. */
303 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
304 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
305 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
306 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
307 "vrsave"
308 };
309 #endif
310 \f
311 #ifndef MASK_STRICT_ALIGN
312 #define MASK_STRICT_ALIGN 0
313 #endif
314 \f
315 /* Initialize the GCC target structure. */
316 #undef TARGET_ATTRIBUTE_TABLE
317 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
318 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
319 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
320
321 #undef TARGET_ASM_ALIGNED_DI_OP
322 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
323
324 /* Default unaligned ops are only provided for ELF. Find the ops needed
325 for non-ELF systems. */
326 #ifndef OBJECT_FORMAT_ELF
327 #if TARGET_XCOFF
328 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
329 64-bit targets. */
330 #undef TARGET_ASM_UNALIGNED_HI_OP
331 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
332 #undef TARGET_ASM_UNALIGNED_SI_OP
333 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
334 #undef TARGET_ASM_UNALIGNED_DI_OP
335 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
336 #else
337 /* For Darwin. */
338 #undef TARGET_ASM_UNALIGNED_HI_OP
339 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
340 #undef TARGET_ASM_UNALIGNED_SI_OP
341 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
342 #endif
343 #endif
344
345 /* This hook deals with fixups for relocatable code and DI-mode objects
346 in 64-bit code. */
347 #undef TARGET_ASM_INTEGER
348 #define TARGET_ASM_INTEGER rs6000_assemble_integer
349
350 #ifdef HAVE_GAS_HIDDEN
351 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
352 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
353 #endif
354
355 #undef TARGET_ASM_FUNCTION_PROLOGUE
356 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
357 #undef TARGET_ASM_FUNCTION_EPILOGUE
358 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
359
360 #undef TARGET_SCHED_ISSUE_RATE
361 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
362 #undef TARGET_SCHED_ADJUST_COST
363 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
364 #undef TARGET_SCHED_ADJUST_PRIORITY
365 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
366
367 #undef TARGET_INIT_BUILTINS
368 #define TARGET_INIT_BUILTINS rs6000_init_builtins
369
370 #undef TARGET_EXPAND_BUILTIN
371 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
372
373 #undef TARGET_BINDS_LOCAL_P
374 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
375
376 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
377 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
378
379 struct gcc_target targetm = TARGET_INITIALIZER;
380 \f
381 /* Override command line options. Mostly we process the processor
382 type and sometimes adjust other TARGET_ options. */
383
384 void
385 rs6000_override_options (default_cpu)
386 const char *default_cpu;
387 {
388 size_t i, j;
389 struct rs6000_cpu_select *ptr;
390
391 /* Simplify the entries below by making a mask for any POWER
392 variant and any PowerPC variant. */
393
394 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
395 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
396 | MASK_PPC_GFXOPT | MASK_POWERPC64)
397 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
398
399 static struct ptt
400 {
401 const char *const name; /* Canonical processor name. */
402 const enum processor_type processor; /* Processor type enum value. */
403 const int target_enable; /* Target flags to enable. */
404 const int target_disable; /* Target flags to disable. */
405 } const processor_target_table[]
406 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
407 POWER_MASKS | POWERPC_MASKS},
408 {"power", PROCESSOR_POWER,
409 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
410 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
411 {"power2", PROCESSOR_POWER,
412 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
413 POWERPC_MASKS | MASK_NEW_MNEMONICS},
414 {"power3", PROCESSOR_PPC630,
415 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
416 POWER_MASKS | MASK_PPC_GPOPT},
417 {"power4", PROCESSOR_POWER4,
418 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
419 POWER_MASKS | MASK_PPC_GPOPT},
420 {"powerpc", PROCESSOR_POWERPC,
421 MASK_POWERPC | MASK_NEW_MNEMONICS,
422 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
423 {"powerpc64", PROCESSOR_POWERPC64,
424 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
425 POWER_MASKS | POWERPC_OPT_MASKS},
426 {"rios", PROCESSOR_RIOS1,
427 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
428 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
429 {"rios1", PROCESSOR_RIOS1,
430 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
431 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
432 {"rsc", PROCESSOR_PPC601,
433 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
434 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
435 {"rsc1", PROCESSOR_PPC601,
436 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
437 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
438 {"rios2", PROCESSOR_RIOS2,
439 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
440 POWERPC_MASKS | MASK_NEW_MNEMONICS},
441 {"rs64a", PROCESSOR_RS64A,
442 MASK_POWERPC | MASK_NEW_MNEMONICS,
443 POWER_MASKS | POWERPC_OPT_MASKS},
444 {"401", PROCESSOR_PPC403,
445 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
446 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
447 {"403", PROCESSOR_PPC403,
448 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
449 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
450 {"405", PROCESSOR_PPC405,
451 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
452 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
453 {"505", PROCESSOR_MPCCORE,
454 MASK_POWERPC | MASK_NEW_MNEMONICS,
455 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
456 {"601", PROCESSOR_PPC601,
457 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
458 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
459 {"602", PROCESSOR_PPC603,
460 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
461 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
462 {"603", PROCESSOR_PPC603,
463 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
464 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
465 {"603e", PROCESSOR_PPC603,
466 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
467 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
468 {"ec603e", PROCESSOR_PPC603,
469 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
470 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
471 {"604", PROCESSOR_PPC604,
472 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
473 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
474 {"604e", PROCESSOR_PPC604e,
475 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
476 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
477 {"620", PROCESSOR_PPC620,
478 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
479 POWER_MASKS | MASK_PPC_GPOPT},
480 {"630", PROCESSOR_PPC630,
481 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
482 POWER_MASKS | MASK_PPC_GPOPT},
483 {"740", PROCESSOR_PPC750,
484 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
485 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
486 {"750", PROCESSOR_PPC750,
487 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
488 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
489 {"7400", PROCESSOR_PPC7400,
490 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
491 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
492 {"7450", PROCESSOR_PPC7450,
493 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
494 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
495 {"8540", PROCESSOR_PPC8540,
496 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
497 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
498 {"801", PROCESSOR_MPCCORE,
499 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
500 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
501 {"821", PROCESSOR_MPCCORE,
502 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
503 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
504 {"823", PROCESSOR_MPCCORE,
505 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
506 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
507 {"860", PROCESSOR_MPCCORE,
508 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
509 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
510
511 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
512
513 /* Save current -mmultiple/-mno-multiple status. */
514 int multiple = TARGET_MULTIPLE;
515 /* Save current -mstring/-mno-string status. */
516 int string = TARGET_STRING;
517
518 /* Identify the processor type. */
519 rs6000_select[0].string = default_cpu;
520 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
521
522 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
523 {
524 ptr = &rs6000_select[i];
525 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
526 {
527 for (j = 0; j < ptt_size; j++)
528 if (! strcmp (ptr->string, processor_target_table[j].name))
529 {
530 if (ptr->set_tune_p)
531 rs6000_cpu = processor_target_table[j].processor;
532
533 if (ptr->set_arch_p)
534 {
535 target_flags |= processor_target_table[j].target_enable;
536 target_flags &= ~processor_target_table[j].target_disable;
537 }
538 break;
539 }
540
541 if (j == ptt_size)
542 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
543 }
544 }
545
546 if (rs6000_cpu == PROCESSOR_PPC8540)
547 rs6000_isel = 1;
548
549 /* If we are optimizing big endian systems for space, use the store
550 multiple instructions. */
551 if (BYTES_BIG_ENDIAN && optimize_size)
552 target_flags |= MASK_MULTIPLE;
553
554 /* If -mmultiple or -mno-multiple was explicitly used, don't
555 override with the processor default */
556 if (TARGET_MULTIPLE_SET)
557 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
558
559 /* If -mstring or -mno-string was explicitly used, don't override
560 with the processor default. */
561 if (TARGET_STRING_SET)
562 target_flags = (target_flags & ~MASK_STRING) | string;
563
564 /* Don't allow -mmultiple or -mstring on little endian systems
565 unless the cpu is a 750, because the hardware doesn't support the
566 instructions used in little endian mode, and causes an alignment
567 trap. The 750 does not cause an alignment trap (except when the
568 target is unaligned). */
569
570 if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
571 {
572 if (TARGET_MULTIPLE)
573 {
574 target_flags &= ~MASK_MULTIPLE;
575 if (TARGET_MULTIPLE_SET)
576 warning ("-mmultiple is not supported on little endian systems");
577 }
578
579 if (TARGET_STRING)
580 {
581 target_flags &= ~MASK_STRING;
582 if (TARGET_STRING_SET)
583 warning ("-mstring is not supported on little endian systems");
584 }
585 }
586
587 if (flag_pic != 0 && DEFAULT_ABI == ABI_AIX)
588 {
589 rs6000_flag_pic = flag_pic;
590 flag_pic = 0;
591 }
592
593 /* For Darwin, always silently make -fpic and -fPIC identical. */
594 if (flag_pic == 1 && DEFAULT_ABI == ABI_DARWIN)
595 flag_pic = 2;
596
597 /* Set debug flags */
598 if (rs6000_debug_name)
599 {
600 if (! strcmp (rs6000_debug_name, "all"))
601 rs6000_debug_stack = rs6000_debug_arg = 1;
602 else if (! strcmp (rs6000_debug_name, "stack"))
603 rs6000_debug_stack = 1;
604 else if (! strcmp (rs6000_debug_name, "arg"))
605 rs6000_debug_arg = 1;
606 else
607 error ("unknown -mdebug-%s switch", rs6000_debug_name);
608 }
609
610 if (rs6000_traceback_name)
611 {
612 if (! strncmp (rs6000_traceback_name, "full", 4))
613 rs6000_traceback = traceback_full;
614 else if (! strncmp (rs6000_traceback_name, "part", 4))
615 rs6000_traceback = traceback_part;
616 else if (! strncmp (rs6000_traceback_name, "no", 2))
617 rs6000_traceback = traceback_none;
618 else
619 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
620 rs6000_traceback_name);
621 }
622
623 /* Set size of long double */
624 rs6000_long_double_type_size = 64;
625 if (rs6000_long_double_size_string)
626 {
627 char *tail;
628 int size = strtol (rs6000_long_double_size_string, &tail, 10);
629 if (*tail != '\0' || (size != 64 && size != 128))
630 error ("Unknown switch -mlong-double-%s",
631 rs6000_long_double_size_string);
632 else
633 rs6000_long_double_type_size = size;
634 }
635
636 /* Handle -mabi= options. */
637 rs6000_parse_abi_options ();
638
639 /* Handle -mvrsave= option. */
640 rs6000_parse_vrsave_option ();
641
642 /* Handle -misel= option. */
643 rs6000_parse_isel_option ();
644
645 #ifdef SUBTARGET_OVERRIDE_OPTIONS
646 SUBTARGET_OVERRIDE_OPTIONS;
647 #endif
648 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
649 SUBSUBTARGET_OVERRIDE_OPTIONS;
650 #endif
651
652 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
653 using TARGET_OPTIONS to handle a toggle switch, but we're out of
654 bits in target_flags so TARGET_SWITCHES cannot be used.
655 Assumption here is that rs6000_longcall_switch points into the
656 text of the complete option, rather than being a copy, so we can
657 scan back for the presence or absence of the no- modifier. */
658 if (rs6000_longcall_switch)
659 {
660 const char *base = rs6000_longcall_switch;
661 while (base[-1] != 'm') base--;
662
663 if (*rs6000_longcall_switch != '\0')
664 error ("invalid option `%s'", base);
665 rs6000_default_long_calls = (base[0] != 'n');
666 }
667
668 #ifdef TARGET_REGNAMES
669 /* If the user desires alternate register names, copy in the
670 alternate names now. */
671 if (TARGET_REGNAMES)
672 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
673 #endif
674
675 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
676 If -maix-struct-return or -msvr4-struct-return was explicitly
677 used, don't override with the ABI default. */
678 if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
679 {
680 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
681 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
682 else
683 target_flags |= MASK_AIX_STRUCT_RET;
684 }
685
686 /* Register global variables with the garbage collector. */
687 rs6000_add_gc_roots ();
688
689 /* Allocate an alias set for register saves & restores from stack. */
690 rs6000_sr_alias_set = new_alias_set ();
691
692 if (TARGET_TOC)
693 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
694
695 /* We can only guarantee the availability of DI pseudo-ops when
696 assembling for 64-bit targets. */
697 if (!TARGET_64BIT)
698 {
699 targetm.asm_out.aligned_op.di = NULL;
700 targetm.asm_out.unaligned_op.di = NULL;
701 }
702
703 /* Arrange to save and restore machine status around nested functions. */
704 init_machine_status = rs6000_init_machine_status;
705 }
706
707 /* Handle -misel= option. */
708 static void
709 rs6000_parse_isel_option ()
710 {
711 if (rs6000_isel_string == 0)
712 return;
713 else if (! strcmp (rs6000_isel_string, "yes"))
714 rs6000_isel = 1;
715 else if (! strcmp (rs6000_isel_string, "no"))
716 rs6000_isel = 0;
717 else
718 error ("unknown -misel= option specified: '%s'",
719 rs6000_isel_string);
720 }
721
722 /* Handle -mvrsave= options. */
723 static void
724 rs6000_parse_vrsave_option ()
725 {
726 /* Generate VRSAVE instructions by default. */
727 if (rs6000_altivec_vrsave_string == 0
728 || ! strcmp (rs6000_altivec_vrsave_string, "yes"))
729 rs6000_altivec_vrsave = 1;
730 else if (! strcmp (rs6000_altivec_vrsave_string, "no"))
731 rs6000_altivec_vrsave = 0;
732 else
733 error ("unknown -mvrsave= option specified: '%s'",
734 rs6000_altivec_vrsave_string);
735 }
736
737 /* Handle -mabi= options. */
738 static void
739 rs6000_parse_abi_options ()
740 {
741 if (rs6000_abi_string == 0)
742 return;
743 else if (! strcmp (rs6000_abi_string, "altivec"))
744 rs6000_altivec_abi = 1;
745 else if (! strcmp (rs6000_abi_string, "no-altivec"))
746 rs6000_altivec_abi = 0;
747 else if (! strcmp (rs6000_abi_string, "spe"))
748 rs6000_spe_abi = 1;
749 else if (! strcmp (rs6000_abi_string, "no-spe"))
750 rs6000_spe_abi = 0;
751 else
752 error ("unknown ABI specified: '%s'", rs6000_abi_string);
753 }
754
755 void
756 optimization_options (level, size)
757 int level ATTRIBUTE_UNUSED;
758 int size ATTRIBUTE_UNUSED;
759 {
760 }
761 \f
762 /* Do anything needed at the start of the asm file. */
763
764 void
765 rs6000_file_start (file, default_cpu)
766 FILE *file;
767 const char *default_cpu;
768 {
769 size_t i;
770 char buffer[80];
771 const char *start = buffer;
772 struct rs6000_cpu_select *ptr;
773
774 if (flag_verbose_asm)
775 {
776 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
777 rs6000_select[0].string = default_cpu;
778
779 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
780 {
781 ptr = &rs6000_select[i];
782 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
783 {
784 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
785 start = "";
786 }
787 }
788
789 #ifdef USING_ELFOS_H
790 switch (rs6000_sdata)
791 {
792 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
793 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
794 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
795 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
796 }
797
798 if (rs6000_sdata && g_switch_value)
799 {
800 fprintf (file, "%s -G %d", start, g_switch_value);
801 start = "";
802 }
803 #endif
804
805 if (*start == '\0')
806 putc ('\n', file);
807 }
808 }
809 \f
810 /* Return nonzero if this function is known to have a null epilogue. */
811
812 int
813 direct_return ()
814 {
815 if (reload_completed)
816 {
817 rs6000_stack_t *info = rs6000_stack_info ();
818
819 if (info->first_gp_reg_save == 32
820 && info->first_fp_reg_save == 64
821 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
822 && ! info->lr_save_p
823 && ! info->cr_save_p
824 && info->vrsave_mask == 0
825 && ! info->push_p)
826 return 1;
827 }
828
829 return 0;
830 }
831
832 /* Returns 1 always. */
833
834 int
835 any_operand (op, mode)
836 rtx op ATTRIBUTE_UNUSED;
837 enum machine_mode mode ATTRIBUTE_UNUSED;
838 {
839 return 1;
840 }
841
842 /* Returns 1 if op is the count register. */
843 int
844 count_register_operand (op, mode)
845 rtx op;
846 enum machine_mode mode ATTRIBUTE_UNUSED;
847 {
848 if (GET_CODE (op) != REG)
849 return 0;
850
851 if (REGNO (op) == COUNT_REGISTER_REGNUM)
852 return 1;
853
854 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
855 return 1;
856
857 return 0;
858 }
859
860 /* Returns 1 if op is an altivec register. */
861 int
862 altivec_register_operand (op, mode)
863 rtx op;
864 enum machine_mode mode ATTRIBUTE_UNUSED;
865 {
866
867 return (register_operand (op, mode)
868 && (GET_CODE (op) != REG
869 || REGNO (op) > FIRST_PSEUDO_REGISTER
870 || ALTIVEC_REGNO_P (REGNO (op))));
871 }
872
873 int
874 xer_operand (op, mode)
875 rtx op;
876 enum machine_mode mode ATTRIBUTE_UNUSED;
877 {
878 if (GET_CODE (op) != REG)
879 return 0;
880
881 if (XER_REGNO_P (REGNO (op)))
882 return 1;
883
884 return 0;
885 }
886
887 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
888 by such constants completes more quickly. */
889
890 int
891 s8bit_cint_operand (op, mode)
892 rtx op;
893 enum machine_mode mode ATTRIBUTE_UNUSED;
894 {
895 return ( GET_CODE (op) == CONST_INT
896 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
897 }
898
899 /* Return 1 if OP is a constant that can fit in a D field. */
900
901 int
902 short_cint_operand (op, mode)
903 rtx op;
904 enum machine_mode mode ATTRIBUTE_UNUSED;
905 {
906 return (GET_CODE (op) == CONST_INT
907 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
908 }
909
910 /* Similar for an unsigned D field. */
911
912 int
913 u_short_cint_operand (op, mode)
914 rtx op;
915 enum machine_mode mode ATTRIBUTE_UNUSED;
916 {
917 return (GET_CODE (op) == CONST_INT
918 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
919 }
920
921 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
922
923 int
924 non_short_cint_operand (op, mode)
925 rtx op;
926 enum machine_mode mode ATTRIBUTE_UNUSED;
927 {
928 return (GET_CODE (op) == CONST_INT
929 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
930 }
931
932 /* Returns 1 if OP is a CONST_INT that is a positive value
933 and an exact power of 2. */
934
935 int
936 exact_log2_cint_operand (op, mode)
937 rtx op;
938 enum machine_mode mode ATTRIBUTE_UNUSED;
939 {
940 return (GET_CODE (op) == CONST_INT
941 && INTVAL (op) > 0
942 && exact_log2 (INTVAL (op)) >= 0);
943 }
944
945 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
946 ctr, or lr). */
947
948 int
949 gpc_reg_operand (op, mode)
950 rtx op;
951 enum machine_mode mode;
952 {
953 return (register_operand (op, mode)
954 && (GET_CODE (op) != REG
955 || (REGNO (op) >= ARG_POINTER_REGNUM
956 && !XER_REGNO_P (REGNO (op)))
957 || REGNO (op) < MQ_REGNO));
958 }
959
960 /* Returns 1 if OP is either a pseudo-register or a register denoting a
961 CR field. */
962
963 int
964 cc_reg_operand (op, mode)
965 rtx op;
966 enum machine_mode mode;
967 {
968 return (register_operand (op, mode)
969 && (GET_CODE (op) != REG
970 || REGNO (op) >= FIRST_PSEUDO_REGISTER
971 || CR_REGNO_P (REGNO (op))));
972 }
973
974 /* Returns 1 if OP is either a pseudo-register or a register denoting a
975 CR field that isn't CR0. */
976
977 int
978 cc_reg_not_cr0_operand (op, mode)
979 rtx op;
980 enum machine_mode mode;
981 {
982 return (register_operand (op, mode)
983 && (GET_CODE (op) != REG
984 || REGNO (op) >= FIRST_PSEUDO_REGISTER
985 || CR_REGNO_NOT_CR0_P (REGNO (op))));
986 }
987
988 /* Returns 1 if OP is either a constant integer valid for a D-field or
989 a non-special register. If a register, it must be in the proper
990 mode unless MODE is VOIDmode. */
991
992 int
993 reg_or_short_operand (op, mode)
994 rtx op;
995 enum machine_mode mode;
996 {
997 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
998 }
999
1000 /* Similar, except check if the negation of the constant would be
1001 valid for a D-field. */
1002
1003 int
1004 reg_or_neg_short_operand (op, mode)
1005 rtx op;
1006 enum machine_mode mode;
1007 {
1008 if (GET_CODE (op) == CONST_INT)
1009 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1010
1011 return gpc_reg_operand (op, mode);
1012 }
1013
1014 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1015 a non-special register. If a register, it must be in the proper
1016 mode unless MODE is VOIDmode. */
1017
1018 int
1019 reg_or_aligned_short_operand (op, mode)
1020 rtx op;
1021 enum machine_mode mode;
1022 {
1023 if (gpc_reg_operand (op, mode))
1024 return 1;
1025 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1026 return 1;
1027
1028 return 0;
1029 }
1030
1031
1032 /* Return 1 if the operand is either a register or an integer whose
1033 high-order 16 bits are zero. */
1034
1035 int
1036 reg_or_u_short_operand (op, mode)
1037 rtx op;
1038 enum machine_mode mode;
1039 {
1040 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1041 }
1042
1043 /* Return 1 is the operand is either a non-special register or ANY
1044 constant integer. */
1045
1046 int
1047 reg_or_cint_operand (op, mode)
1048 rtx op;
1049 enum machine_mode mode;
1050 {
1051 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1052 }
1053
1054 /* Return 1 is the operand is either a non-special register or ANY
1055 32-bit signed constant integer. */
1056
1057 int
1058 reg_or_arith_cint_operand (op, mode)
1059 rtx op;
1060 enum machine_mode mode;
1061 {
1062 return (gpc_reg_operand (op, mode)
1063 || (GET_CODE (op) == CONST_INT
1064 #if HOST_BITS_PER_WIDE_INT != 32
1065 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1066 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1067 #endif
1068 ));
1069 }
1070
1071 /* Return 1 is the operand is either a non-special register or a 32-bit
1072 signed constant integer valid for 64-bit addition. */
1073
1074 int
1075 reg_or_add_cint64_operand (op, mode)
1076 rtx op;
1077 enum machine_mode mode;
1078 {
1079 return (gpc_reg_operand (op, mode)
1080 || (GET_CODE (op) == CONST_INT
1081 #if HOST_BITS_PER_WIDE_INT == 32
1082 && INTVAL (op) < 0x7fff8000
1083 #else
1084 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1085 < 0x100000000ll)
1086 #endif
1087 ));
1088 }
1089
1090 /* Return 1 is the operand is either a non-special register or a 32-bit
1091 signed constant integer valid for 64-bit subtraction. */
1092
1093 int
1094 reg_or_sub_cint64_operand (op, mode)
1095 rtx op;
1096 enum machine_mode mode;
1097 {
1098 return (gpc_reg_operand (op, mode)
1099 || (GET_CODE (op) == CONST_INT
1100 #if HOST_BITS_PER_WIDE_INT == 32
1101 && (- INTVAL (op)) < 0x7fff8000
1102 #else
1103 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1104 < 0x100000000ll)
1105 #endif
1106 ));
1107 }
1108
1109 /* Return 1 is the operand is either a non-special register or ANY
1110 32-bit unsigned constant integer. */
1111
1112 int
1113 reg_or_logical_cint_operand (op, mode)
1114 rtx op;
1115 enum machine_mode mode;
1116 {
1117 if (GET_CODE (op) == CONST_INT)
1118 {
1119 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1120 {
1121 if (GET_MODE_BITSIZE (mode) <= 32)
1122 abort ();
1123
1124 if (INTVAL (op) < 0)
1125 return 0;
1126 }
1127
1128 return ((INTVAL (op) & GET_MODE_MASK (mode)
1129 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1130 }
1131 else if (GET_CODE (op) == CONST_DOUBLE)
1132 {
1133 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1134 || mode != DImode)
1135 abort ();
1136
1137 return CONST_DOUBLE_HIGH (op) == 0;
1138 }
1139 else
1140 return gpc_reg_operand (op, mode);
1141 }
1142
1143 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1144
1145 int
1146 got_operand (op, mode)
1147 rtx op;
1148 enum machine_mode mode ATTRIBUTE_UNUSED;
1149 {
1150 return (GET_CODE (op) == SYMBOL_REF
1151 || GET_CODE (op) == CONST
1152 || GET_CODE (op) == LABEL_REF);
1153 }
1154
1155 /* Return 1 if the operand is a simple references that can be loaded via
1156 the GOT (labels involving addition aren't allowed). */
1157
1158 int
1159 got_no_const_operand (op, mode)
1160 rtx op;
1161 enum machine_mode mode ATTRIBUTE_UNUSED;
1162 {
1163 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1164 }
1165
1166 /* Return the number of instructions it takes to form a constant in an
1167 integer register. */
1168
1169 static int
1170 num_insns_constant_wide (value)
1171 HOST_WIDE_INT value;
1172 {
1173 /* signed constant loadable with {cal|addi} */
1174 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1175 return 1;
1176
1177 /* constant loadable with {cau|addis} */
1178 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1179 return 1;
1180
1181 #if HOST_BITS_PER_WIDE_INT == 64
1182 else if (TARGET_POWERPC64)
1183 {
1184 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1185 HOST_WIDE_INT high = value >> 31;
1186
1187 if (high == 0 || high == -1)
1188 return 2;
1189
1190 high >>= 1;
1191
1192 if (low == 0)
1193 return num_insns_constant_wide (high) + 1;
1194 else
1195 return (num_insns_constant_wide (high)
1196 + num_insns_constant_wide (low) + 1);
1197 }
1198 #endif
1199
1200 else
1201 return 2;
1202 }
1203
1204 int
1205 num_insns_constant (op, mode)
1206 rtx op;
1207 enum machine_mode mode;
1208 {
1209 if (GET_CODE (op) == CONST_INT)
1210 {
1211 #if HOST_BITS_PER_WIDE_INT == 64
1212 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1213 && mask64_operand (op, mode))
1214 return 2;
1215 else
1216 #endif
1217 return num_insns_constant_wide (INTVAL (op));
1218 }
1219
1220 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1221 {
1222 long l;
1223 REAL_VALUE_TYPE rv;
1224
1225 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1226 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1227 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1228 }
1229
1230 else if (GET_CODE (op) == CONST_DOUBLE)
1231 {
1232 HOST_WIDE_INT low;
1233 HOST_WIDE_INT high;
1234 long l[2];
1235 REAL_VALUE_TYPE rv;
1236 int endian = (WORDS_BIG_ENDIAN == 0);
1237
1238 if (mode == VOIDmode || mode == DImode)
1239 {
1240 high = CONST_DOUBLE_HIGH (op);
1241 low = CONST_DOUBLE_LOW (op);
1242 }
1243 else
1244 {
1245 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1246 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1247 high = l[endian];
1248 low = l[1 - endian];
1249 }
1250
1251 if (TARGET_32BIT)
1252 return (num_insns_constant_wide (low)
1253 + num_insns_constant_wide (high));
1254
1255 else
1256 {
1257 if (high == 0 && low >= 0)
1258 return num_insns_constant_wide (low);
1259
1260 else if (high == -1 && low < 0)
1261 return num_insns_constant_wide (low);
1262
1263 else if (mask64_operand (op, mode))
1264 return 2;
1265
1266 else if (low == 0)
1267 return num_insns_constant_wide (high) + 1;
1268
1269 else
1270 return (num_insns_constant_wide (high)
1271 + num_insns_constant_wide (low) + 1);
1272 }
1273 }
1274
1275 else
1276 abort ();
1277 }
1278
1279 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1280 register with one instruction per word. We only do this if we can
1281 safely read CONST_DOUBLE_{LOW,HIGH}. */
1282
1283 int
1284 easy_fp_constant (op, mode)
1285 rtx op;
1286 enum machine_mode mode;
1287 {
1288 if (GET_CODE (op) != CONST_DOUBLE
1289 || GET_MODE (op) != mode
1290 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1291 return 0;
1292
1293 /* Consider all constants with -msoft-float to be easy. */
1294 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1295 && mode != DImode)
1296 return 1;
1297
1298 /* If we are using V.4 style PIC, consider all constants to be hard. */
1299 if (flag_pic && DEFAULT_ABI == ABI_V4)
1300 return 0;
1301
1302 #ifdef TARGET_RELOCATABLE
1303 /* Similarly if we are using -mrelocatable, consider all constants
1304 to be hard. */
1305 if (TARGET_RELOCATABLE)
1306 return 0;
1307 #endif
1308
1309 if (mode == DFmode)
1310 {
1311 long k[2];
1312 REAL_VALUE_TYPE rv;
1313
1314 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1315 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1316
1317 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1318 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1319 }
1320
1321 else if (mode == SFmode)
1322 {
1323 long l;
1324 REAL_VALUE_TYPE rv;
1325
1326 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1327 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1328
1329 return num_insns_constant_wide (l) == 1;
1330 }
1331
1332 else if (mode == DImode)
1333 return ((TARGET_POWERPC64
1334 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1335 || (num_insns_constant (op, DImode) <= 2));
1336
1337 else if (mode == SImode)
1338 return 1;
1339 else
1340 abort ();
1341 }
1342
1343 /* Return 1 if the operand is a CONST_INT and can be put into a
1344 register with one instruction. */
1345
1346 static int
1347 easy_vector_constant (op)
1348 rtx op;
1349 {
1350 rtx elt;
1351 int units, i;
1352
1353 if (GET_CODE (op) != CONST_VECTOR)
1354 return 0;
1355
1356 units = CONST_VECTOR_NUNITS (op);
1357
1358 /* We can generate 0 easily. Look for that. */
1359 for (i = 0; i < units; ++i)
1360 {
1361 elt = CONST_VECTOR_ELT (op, i);
1362
1363 /* We could probably simplify this by just checking for equality
1364 with CONST0_RTX for the current mode, but let's be safe
1365 instead. */
1366
1367 switch (GET_CODE (elt))
1368 {
1369 case CONST_INT:
1370 if (INTVAL (elt) != 0)
1371 return 0;
1372 break;
1373 case CONST_DOUBLE:
1374 if (CONST_DOUBLE_LOW (elt) != 0 || CONST_DOUBLE_HIGH (elt) != 0)
1375 return 0;
1376 break;
1377 default:
1378 return 0;
1379 }
1380 }
1381
1382 /* We could probably generate a few other constants trivially, but
1383 gcc doesn't generate them yet. FIXME later. */
1384 return 1;
1385 }
1386
1387 /* Return 1 if the operand is the constant 0. This works for scalars
1388 as well as vectors. */
1389 int
1390 zero_constant (op, mode)
1391 rtx op;
1392 enum machine_mode mode;
1393 {
1394 return op == CONST0_RTX (mode);
1395 }
1396
1397 /* Return 1 if the operand is 0.0. */
1398 int
1399 zero_fp_constant (op, mode)
1400 rtx op;
1401 enum machine_mode mode;
1402 {
1403 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1404 }
1405
1406 /* Return 1 if the operand is in volatile memory. Note that during
1407 the RTL generation phase, memory_operand does not return TRUE for
1408 volatile memory references. So this function allows us to
1409 recognize volatile references where its safe. */
1410
1411 int
1412 volatile_mem_operand (op, mode)
1413 rtx op;
1414 enum machine_mode mode;
1415 {
1416 if (GET_CODE (op) != MEM)
1417 return 0;
1418
1419 if (!MEM_VOLATILE_P (op))
1420 return 0;
1421
1422 if (mode != GET_MODE (op))
1423 return 0;
1424
1425 if (reload_completed)
1426 return memory_operand (op, mode);
1427
1428 if (reload_in_progress)
1429 return strict_memory_address_p (mode, XEXP (op, 0));
1430
1431 return memory_address_p (mode, XEXP (op, 0));
1432 }
1433
1434 /* Return 1 if the operand is an offsettable memory operand. */
1435
1436 int
1437 offsettable_mem_operand (op, mode)
1438 rtx op;
1439 enum machine_mode mode;
1440 {
1441 return ((GET_CODE (op) == MEM)
1442 && offsettable_address_p (reload_completed || reload_in_progress,
1443 mode, XEXP (op, 0)));
1444 }
1445
1446 /* Return 1 if the operand is either an easy FP constant (see above) or
1447 memory. */
1448
1449 int
1450 mem_or_easy_const_operand (op, mode)
1451 rtx op;
1452 enum machine_mode mode;
1453 {
1454 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1455 }
1456
1457 /* Return 1 if the operand is either a non-special register or an item
1458 that can be used as the operand of a `mode' add insn. */
1459
1460 int
1461 add_operand (op, mode)
1462 rtx op;
1463 enum machine_mode mode;
1464 {
1465 if (GET_CODE (op) == CONST_INT)
1466 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1467 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1468
1469 return gpc_reg_operand (op, mode);
1470 }
1471
1472 /* Return 1 if OP is a constant but not a valid add_operand. */
1473
1474 int
1475 non_add_cint_operand (op, mode)
1476 rtx op;
1477 enum machine_mode mode ATTRIBUTE_UNUSED;
1478 {
1479 return (GET_CODE (op) == CONST_INT
1480 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1481 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1482 }
1483
1484 /* Return 1 if the operand is a non-special register or a constant that
1485 can be used as the operand of an OR or XOR insn on the RS/6000. */
1486
1487 int
1488 logical_operand (op, mode)
1489 rtx op;
1490 enum machine_mode mode;
1491 {
1492 HOST_WIDE_INT opl, oph;
1493
1494 if (gpc_reg_operand (op, mode))
1495 return 1;
1496
1497 if (GET_CODE (op) == CONST_INT)
1498 {
1499 opl = INTVAL (op) & GET_MODE_MASK (mode);
1500
1501 #if HOST_BITS_PER_WIDE_INT <= 32
1502 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1503 return 0;
1504 #endif
1505 }
1506 else if (GET_CODE (op) == CONST_DOUBLE)
1507 {
1508 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1509 abort ();
1510
1511 opl = CONST_DOUBLE_LOW (op);
1512 oph = CONST_DOUBLE_HIGH (op);
1513 if (oph != 0)
1514 return 0;
1515 }
1516 else
1517 return 0;
1518
1519 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1520 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1521 }
1522
1523 /* Return 1 if C is a constant that is not a logical operand (as
1524 above), but could be split into one. */
1525
1526 int
1527 non_logical_cint_operand (op, mode)
1528 rtx op;
1529 enum machine_mode mode;
1530 {
1531 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1532 && ! logical_operand (op, mode)
1533 && reg_or_logical_cint_operand (op, mode));
1534 }
1535
1536 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1537 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1538 Reject all ones and all zeros, since these should have been optimized
1539 away and confuse the making of MB and ME. */
1540
1541 int
1542 mask_operand (op, mode)
1543 rtx op;
1544 enum machine_mode mode ATTRIBUTE_UNUSED;
1545 {
1546 HOST_WIDE_INT c, lsb;
1547
1548 if (GET_CODE (op) != CONST_INT)
1549 return 0;
1550
1551 c = INTVAL (op);
1552
1553 /* Fail in 64-bit mode if the mask wraps around because the upper
1554 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1555 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1556 return 0;
1557
1558 /* We don't change the number of transitions by inverting,
1559 so make sure we start with the LS bit zero. */
1560 if (c & 1)
1561 c = ~c;
1562
1563 /* Reject all zeros or all ones. */
1564 if (c == 0)
1565 return 0;
1566
1567 /* Find the first transition. */
1568 lsb = c & -c;
1569
1570 /* Invert to look for a second transition. */
1571 c = ~c;
1572
1573 /* Erase first transition. */
1574 c &= -lsb;
1575
1576 /* Find the second transition (if any). */
1577 lsb = c & -c;
1578
1579 /* Match if all the bits above are 1's (or c is zero). */
1580 return c == -lsb;
1581 }
1582
1583 /* Return 1 for the PowerPC64 rlwinm corner case. */
1584
1585 int
1586 mask_operand_wrap (op, mode)
1587 rtx op;
1588 enum machine_mode mode ATTRIBUTE_UNUSED;
1589 {
1590 HOST_WIDE_INT c, lsb;
1591
1592 if (GET_CODE (op) != CONST_INT)
1593 return 0;
1594
1595 c = INTVAL (op);
1596
1597 if ((c & 0x80000001) != 0x80000001)
1598 return 0;
1599
1600 c = ~c;
1601 if (c == 0)
1602 return 0;
1603
1604 lsb = c & -c;
1605 c = ~c;
1606 c &= -lsb;
1607 lsb = c & -c;
1608 return c == -lsb;
1609 }
1610
1611 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1612 It is if there are no more than one 1->0 or 0->1 transitions.
1613 Reject all zeros, since zero should have been optimized away and
1614 confuses the making of MB and ME. */
1615
1616 int
1617 mask64_operand (op, mode)
1618 rtx op;
1619 enum machine_mode mode ATTRIBUTE_UNUSED;
1620 {
1621 if (GET_CODE (op) == CONST_INT)
1622 {
1623 HOST_WIDE_INT c, lsb;
1624
1625 c = INTVAL (op);
1626
1627 /* Reject all zeros. */
1628 if (c == 0)
1629 return 0;
1630
1631 /* We don't change the number of transitions by inverting,
1632 so make sure we start with the LS bit zero. */
1633 if (c & 1)
1634 c = ~c;
1635
1636 /* Find the transition, and check that all bits above are 1's. */
1637 lsb = c & -c;
1638 return c == -lsb;
1639 }
1640 return 0;
1641 }
1642
1643 /* Like mask64_operand, but allow up to three transitions. This
1644 predicate is used by insn patterns that generate two rldicl or
1645 rldicr machine insns. */
1646
1647 int
1648 mask64_2_operand (op, mode)
1649 rtx op;
1650 enum machine_mode mode ATTRIBUTE_UNUSED;
1651 {
1652 if (GET_CODE (op) == CONST_INT)
1653 {
1654 HOST_WIDE_INT c, lsb;
1655
1656 c = INTVAL (op);
1657
1658 /* Disallow all zeros. */
1659 if (c == 0)
1660 return 0;
1661
1662 /* We don't change the number of transitions by inverting,
1663 so make sure we start with the LS bit zero. */
1664 if (c & 1)
1665 c = ~c;
1666
1667 /* Find the first transition. */
1668 lsb = c & -c;
1669
1670 /* Invert to look for a second transition. */
1671 c = ~c;
1672
1673 /* Erase first transition. */
1674 c &= -lsb;
1675
1676 /* Find the second transition. */
1677 lsb = c & -c;
1678
1679 /* Invert to look for a third transition. */
1680 c = ~c;
1681
1682 /* Erase second transition. */
1683 c &= -lsb;
1684
1685 /* Find the third transition (if any). */
1686 lsb = c & -c;
1687
1688 /* Match if all the bits above are 1's (or c is zero). */
1689 return c == -lsb;
1690 }
1691 return 0;
1692 }
1693
1694 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1695 implement ANDing by the mask IN. */
1696 void
1697 build_mask64_2_operands (in, out)
1698 rtx in;
1699 rtx *out;
1700 {
1701 #if HOST_BITS_PER_WIDE_INT >= 64
1702 unsigned HOST_WIDE_INT c, lsb, m1, m2;
1703 int shift;
1704
1705 if (GET_CODE (in) != CONST_INT)
1706 abort ();
1707
1708 c = INTVAL (in);
1709 if (c & 1)
1710 {
1711 /* Assume c initially something like 0x00fff000000fffff. The idea
1712 is to rotate the word so that the middle ^^^^^^ group of zeros
1713 is at the MS end and can be cleared with an rldicl mask. We then
1714 rotate back and clear off the MS ^^ group of zeros with a
1715 second rldicl. */
1716 c = ~c; /* c == 0xff000ffffff00000 */
1717 lsb = c & -c; /* lsb == 0x0000000000100000 */
1718 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
1719 c = ~c; /* c == 0x00fff000000fffff */
1720 c &= -lsb; /* c == 0x00fff00000000000 */
1721 lsb = c & -c; /* lsb == 0x0000100000000000 */
1722 c = ~c; /* c == 0xff000fffffffffff */
1723 c &= -lsb; /* c == 0xff00000000000000 */
1724 shift = 0;
1725 while ((lsb >>= 1) != 0)
1726 shift++; /* shift == 44 on exit from loop */
1727 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
1728 m1 = ~m1; /* m1 == 0x000000ffffffffff */
1729 m2 = ~c; /* m2 == 0x00ffffffffffffff */
1730 }
1731 else
1732 {
1733 /* Assume c initially something like 0xff000f0000000000. The idea
1734 is to rotate the word so that the ^^^ middle group of zeros
1735 is at the LS end and can be cleared with an rldicr mask. We then
1736 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1737 a second rldicr. */
1738 lsb = c & -c; /* lsb == 0x0000010000000000 */
1739 m2 = -lsb; /* m2 == 0xffffff0000000000 */
1740 c = ~c; /* c == 0x00fff0ffffffffff */
1741 c &= -lsb; /* c == 0x00fff00000000000 */
1742 lsb = c & -c; /* lsb == 0x0000100000000000 */
1743 c = ~c; /* c == 0xff000fffffffffff */
1744 c &= -lsb; /* c == 0xff00000000000000 */
1745 shift = 0;
1746 while ((lsb >>= 1) != 0)
1747 shift++; /* shift == 44 on exit from loop */
1748 m1 = ~c; /* m1 == 0x00ffffffffffffff */
1749 m1 >>= shift; /* m1 == 0x0000000000000fff */
1750 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
1751 }
1752
1753 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1754 masks will be all 1's. We are guaranteed more than one transition. */
1755 out[0] = GEN_INT (64 - shift);
1756 out[1] = GEN_INT (m1);
1757 out[2] = GEN_INT (shift);
1758 out[3] = GEN_INT (m2);
1759 #else
1760 (void)in;
1761 (void)out;
1762 abort ();
1763 #endif
1764 }
1765
1766 /* Return 1 if the operand is either a non-special register or a constant
1767 that can be used as the operand of a PowerPC64 logical AND insn. */
1768
1769 int
1770 and64_operand (op, mode)
1771 rtx op;
1772 enum machine_mode mode;
1773 {
1774 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1775 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1776
1777 return (logical_operand (op, mode) || mask64_operand (op, mode));
1778 }
1779
1780 /* Like the above, but also match constants that can be implemented
1781 with two rldicl or rldicr insns. */
1782
1783 int
1784 and64_2_operand (op, mode)
1785 rtx op;
1786 enum machine_mode mode;
1787 {
1788 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1789 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
1790
1791 return logical_operand (op, mode) || mask64_2_operand (op, mode);
1792 }
1793
1794 /* Return 1 if the operand is either a non-special register or a
1795 constant that can be used as the operand of an RS/6000 logical AND insn. */
1796
1797 int
1798 and_operand (op, mode)
1799 rtx op;
1800 enum machine_mode mode;
1801 {
1802 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1803 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1804
1805 return (logical_operand (op, mode) || mask_operand (op, mode));
1806 }
1807
1808 /* Return 1 if the operand is a general register or memory operand. */
1809
1810 int
1811 reg_or_mem_operand (op, mode)
1812 rtx op;
1813 enum machine_mode mode;
1814 {
1815 return (gpc_reg_operand (op, mode)
1816 || memory_operand (op, mode)
1817 || volatile_mem_operand (op, mode));
1818 }
1819
1820 /* Return 1 if the operand is a general register or memory operand without
1821 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1822 instruction. */
1823
1824 int
1825 lwa_operand (op, mode)
1826 rtx op;
1827 enum machine_mode mode;
1828 {
1829 rtx inner = op;
1830
1831 if (reload_completed && GET_CODE (inner) == SUBREG)
1832 inner = SUBREG_REG (inner);
1833
1834 return gpc_reg_operand (inner, mode)
1835 || (memory_operand (inner, mode)
1836 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1837 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1838 && (GET_CODE (XEXP (inner, 0)) != PLUS
1839 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1840 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1841 }
1842
1843 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
1844
1845 int
1846 symbol_ref_operand (op, mode)
1847 rtx op;
1848 enum machine_mode mode;
1849 {
1850 if (mode != VOIDmode && GET_MODE (op) != mode)
1851 return 0;
1852
1853 return (GET_CODE (op) == SYMBOL_REF);
1854 }
1855
1856 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1857 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
1858
1859 int
1860 call_operand (op, mode)
1861 rtx op;
1862 enum machine_mode mode;
1863 {
1864 if (mode != VOIDmode && GET_MODE (op) != mode)
1865 return 0;
1866
1867 return (GET_CODE (op) == SYMBOL_REF
1868 || (GET_CODE (op) == REG
1869 && (REGNO (op) == LINK_REGISTER_REGNUM
1870 || REGNO (op) == COUNT_REGISTER_REGNUM
1871 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1872 }
1873
1874 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1875 this file and the function is not weakly defined. */
1876
1877 int
1878 current_file_function_operand (op, mode)
1879 rtx op;
1880 enum machine_mode mode ATTRIBUTE_UNUSED;
1881 {
1882 return (GET_CODE (op) == SYMBOL_REF
1883 && (SYMBOL_REF_FLAG (op)
1884 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1885 && ! DECL_WEAK (current_function_decl))));
1886 }
1887
1888 /* Return 1 if this operand is a valid input for a move insn. */
1889
1890 int
1891 input_operand (op, mode)
1892 rtx op;
1893 enum machine_mode mode;
1894 {
1895 /* Memory is always valid. */
1896 if (memory_operand (op, mode))
1897 return 1;
1898
1899 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1900 if (GET_CODE (op) == CONSTANT_P_RTX)
1901 return 1;
1902
1903 /* For floating-point, easy constants are valid. */
1904 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1905 && CONSTANT_P (op)
1906 && easy_fp_constant (op, mode))
1907 return 1;
1908
1909 /* Allow any integer constant. */
1910 if (GET_MODE_CLASS (mode) == MODE_INT
1911 && (GET_CODE (op) == CONST_INT
1912 || GET_CODE (op) == CONST_DOUBLE))
1913 return 1;
1914
1915 /* For floating-point or multi-word mode, the only remaining valid type
1916 is a register. */
1917 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1918 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1919 return register_operand (op, mode);
1920
1921 /* The only cases left are integral modes one word or smaller (we
1922 do not get called for MODE_CC values). These can be in any
1923 register. */
1924 if (register_operand (op, mode))
1925 return 1;
1926
1927 /* A SYMBOL_REF referring to the TOC is valid. */
1928 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1929 return 1;
1930
1931 /* A constant pool expression (relative to the TOC) is valid */
1932 if (TOC_RELATIVE_EXPR_P (op))
1933 return 1;
1934
1935 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1936 to be valid. */
1937 if (DEFAULT_ABI == ABI_V4
1938 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1939 && small_data_operand (op, Pmode))
1940 return 1;
1941
1942 return 0;
1943 }
1944
1945 /* Return 1 for an operand in small memory on V.4/eabi. */
1946
1947 int
1948 small_data_operand (op, mode)
1949 rtx op ATTRIBUTE_UNUSED;
1950 enum machine_mode mode ATTRIBUTE_UNUSED;
1951 {
1952 #if TARGET_ELF
1953 rtx sym_ref;
1954
1955 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1956 return 0;
1957
1958 if (DEFAULT_ABI != ABI_V4)
1959 return 0;
1960
1961 if (GET_CODE (op) == SYMBOL_REF)
1962 sym_ref = op;
1963
1964 else if (GET_CODE (op) != CONST
1965 || GET_CODE (XEXP (op, 0)) != PLUS
1966 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1967 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1968 return 0;
1969
1970 else
1971 {
1972 rtx sum = XEXP (op, 0);
1973 HOST_WIDE_INT summand;
1974
1975 /* We have to be careful here, because it is the referenced address
1976 that must be 32k from _SDA_BASE_, not just the symbol. */
1977 summand = INTVAL (XEXP (sum, 1));
1978 if (summand < 0 || summand > g_switch_value)
1979 return 0;
1980
1981 sym_ref = XEXP (sum, 0);
1982 }
1983
1984 if (*XSTR (sym_ref, 0) != '@')
1985 return 0;
1986
1987 return 1;
1988
1989 #else
1990 return 0;
1991 #endif
1992 }
1993 \f
1994 static int
1995 constant_pool_expr_1 (op, have_sym, have_toc)
1996 rtx op;
1997 int *have_sym;
1998 int *have_toc;
1999 {
2000 switch (GET_CODE(op))
2001 {
2002 case SYMBOL_REF:
2003 if (CONSTANT_POOL_ADDRESS_P (op))
2004 {
2005 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2006 {
2007 *have_sym = 1;
2008 return 1;
2009 }
2010 else
2011 return 0;
2012 }
2013 else if (! strcmp (XSTR (op, 0), toc_label_name))
2014 {
2015 *have_toc = 1;
2016 return 1;
2017 }
2018 else
2019 return 0;
2020 case PLUS:
2021 case MINUS:
2022 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2023 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2024 case CONST:
2025 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2026 case CONST_INT:
2027 return 1;
2028 default:
2029 return 0;
2030 }
2031 }
2032
2033 int
2034 constant_pool_expr_p (op)
2035 rtx op;
2036 {
2037 int have_sym = 0;
2038 int have_toc = 0;
2039 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2040 }
2041
2042 int
2043 toc_relative_expr_p (op)
2044 rtx op;
2045 {
2046 int have_sym = 0;
2047 int have_toc = 0;
2048 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2049 }
2050
2051 /* Try machine-dependent ways of modifying an illegitimate address
2052 to be legitimate. If we find one, return the new, valid address.
2053 This is used from only one place: `memory_address' in explow.c.
2054
2055 OLDX is the address as it was before break_out_memory_refs was
2056 called. In some cases it is useful to look at this to decide what
2057 needs to be done.
2058
2059 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2060
2061 It is always safe for this function to do nothing. It exists to
2062 recognize opportunities to optimize the output.
2063
2064 On RS/6000, first check for the sum of a register with a constant
2065 integer that is out of range. If so, generate code to add the
2066 constant with the low-order 16 bits masked to the register and force
2067 this result into another register (this can be done with `cau').
2068 Then generate an address of REG+(CONST&0xffff), allowing for the
2069 possibility of bit 16 being a one.
2070
2071 Then check for the sum of a register and something not constant, try to
2072 load the other things into a register and return the sum. */
2073 rtx
2074 rs6000_legitimize_address (x, oldx, mode)
2075 rtx x;
2076 rtx oldx ATTRIBUTE_UNUSED;
2077 enum machine_mode mode;
2078 {
2079 if (GET_CODE (x) == PLUS
2080 && GET_CODE (XEXP (x, 0)) == REG
2081 && GET_CODE (XEXP (x, 1)) == CONST_INT
2082 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2083 {
2084 HOST_WIDE_INT high_int, low_int;
2085 rtx sum;
2086 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2087 high_int = INTVAL (XEXP (x, 1)) - low_int;
2088 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2089 GEN_INT (high_int)), 0);
2090 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2091 }
2092 else if (GET_CODE (x) == PLUS
2093 && GET_CODE (XEXP (x, 0)) == REG
2094 && GET_CODE (XEXP (x, 1)) != CONST_INT
2095 && GET_MODE_NUNITS (mode) == 1
2096 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2097 || TARGET_POWERPC64
2098 || mode != DFmode)
2099 && (TARGET_POWERPC64 || mode != DImode)
2100 && mode != TImode)
2101 {
2102 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2103 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2104 }
2105 else if (ALTIVEC_VECTOR_MODE (mode))
2106 {
2107 rtx reg;
2108
2109 /* Make sure both operands are registers. */
2110 if (GET_CODE (x) == PLUS)
2111 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2112 force_reg (Pmode, XEXP (x, 1)));
2113
2114 reg = force_reg (Pmode, x);
2115 return reg;
2116 }
2117 else if (SPE_VECTOR_MODE (mode))
2118 {
2119 /* We accept [reg + reg] and [reg + OFFSET]. */
2120
2121 if (GET_CODE (x) == PLUS)
2122 {
2123 rtx op1 = XEXP (x, 0);
2124 rtx op2 = XEXP (x, 1);
2125
2126 op1 = force_reg (Pmode, op1);
2127
2128 if (GET_CODE (op2) != REG
2129 && (GET_CODE (op2) != CONST_INT
2130 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2131 op2 = force_reg (Pmode, op2);
2132
2133 return gen_rtx_PLUS (Pmode, op1, op2);
2134 }
2135
2136 return force_reg (Pmode, x);
2137 }
2138 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
2139 && GET_CODE (x) != CONST_INT
2140 && GET_CODE (x) != CONST_DOUBLE
2141 && CONSTANT_P (x)
2142 && GET_MODE_NUNITS (mode) == 1
2143 && (GET_MODE_BITSIZE (mode) <= 32
2144 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2145 {
2146 rtx reg = gen_reg_rtx (Pmode);
2147 emit_insn (gen_elf_high (reg, (x)));
2148 return gen_rtx_LO_SUM (Pmode, reg, (x));
2149 }
2150 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2151 && ! flag_pic
2152 && GET_CODE (x) != CONST_INT
2153 && GET_CODE (x) != CONST_DOUBLE
2154 && CONSTANT_P (x)
2155 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2156 && mode != DImode
2157 && mode != TImode)
2158 {
2159 rtx reg = gen_reg_rtx (Pmode);
2160 emit_insn (gen_macho_high (reg, (x)));
2161 return gen_rtx_LO_SUM (Pmode, reg, (x));
2162 }
2163 else if (TARGET_TOC
2164 && CONSTANT_POOL_EXPR_P (x)
2165 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2166 {
2167 return create_TOC_reference (x);
2168 }
2169 else
2170 return NULL_RTX;
2171 }
2172
2173 /* The convention appears to be to define this wherever it is used.
2174 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2175 is now used here. */
2176 #ifndef REG_MODE_OK_FOR_BASE_P
2177 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2178 #endif
2179
2180 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2181 replace the input X, or the original X if no replacement is called for.
2182 The output parameter *WIN is 1 if the calling macro should goto WIN,
2183 0 if it should not.
2184
2185 For RS/6000, we wish to handle large displacements off a base
2186 register by splitting the addend across an addiu/addis and the mem insn.
2187 This cuts number of extra insns needed from 3 to 1.
2188
2189 On Darwin, we use this to generate code for floating point constants.
2190 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2191 The Darwin code is inside #if TARGET_MACHO because only then is
2192 machopic_function_base_name() defined. */
2193 rtx
2194 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
2195 rtx x;
2196 enum machine_mode mode;
2197 int opnum;
2198 int type;
2199 int ind_levels ATTRIBUTE_UNUSED;
2200 int *win;
2201 {
2202 /* We must recognize output that we have already generated ourselves. */
2203 if (GET_CODE (x) == PLUS
2204 && GET_CODE (XEXP (x, 0)) == PLUS
2205 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2206 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2207 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2208 {
2209 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2210 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2211 opnum, (enum reload_type)type);
2212 *win = 1;
2213 return x;
2214 }
2215
2216 #if TARGET_MACHO
2217 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2218 && GET_CODE (x) == LO_SUM
2219 && GET_CODE (XEXP (x, 0)) == PLUS
2220 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2221 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2222 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2223 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2224 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2225 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2226 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2227 {
2228 /* Result of previous invocation of this function on Darwin
2229 floating point constant. */
2230 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2231 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2232 opnum, (enum reload_type)type);
2233 *win = 1;
2234 return x;
2235 }
2236 #endif
2237 if (GET_CODE (x) == PLUS
2238 && GET_CODE (XEXP (x, 0)) == REG
2239 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2240 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2241 && GET_CODE (XEXP (x, 1)) == CONST_INT
2242 && !SPE_VECTOR_MODE (mode)
2243 && !ALTIVEC_VECTOR_MODE (mode))
2244 {
2245 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2246 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2247 HOST_WIDE_INT high
2248 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2249
2250 /* Check for 32-bit overflow. */
2251 if (high + low != val)
2252 {
2253 *win = 0;
2254 return x;
2255 }
2256
2257 /* Reload the high part into a base reg; leave the low part
2258 in the mem directly. */
2259
2260 x = gen_rtx_PLUS (GET_MODE (x),
2261 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2262 GEN_INT (high)),
2263 GEN_INT (low));
2264
2265 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2266 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2267 opnum, (enum reload_type)type);
2268 *win = 1;
2269 return x;
2270 }
2271 #if TARGET_MACHO
2272 if (GET_CODE (x) == SYMBOL_REF
2273 && DEFAULT_ABI == ABI_DARWIN
2274 && !ALTIVEC_VECTOR_MODE (mode)
2275 && flag_pic)
2276 {
2277 /* Darwin load of floating point constant. */
2278 rtx offset = gen_rtx (CONST, Pmode,
2279 gen_rtx (MINUS, Pmode, x,
2280 gen_rtx (SYMBOL_REF, Pmode,
2281 machopic_function_base_name ())));
2282 x = gen_rtx (LO_SUM, GET_MODE (x),
2283 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2284 gen_rtx (HIGH, Pmode, offset)), offset);
2285 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2286 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2287 opnum, (enum reload_type)type);
2288 *win = 1;
2289 return x;
2290 }
2291 #endif
2292 if (TARGET_TOC
2293 && CONSTANT_POOL_EXPR_P (x)
2294 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2295 {
2296 (x) = create_TOC_reference (x);
2297 *win = 1;
2298 return x;
2299 }
2300 *win = 0;
2301 return x;
2302 }
2303
2304 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2305 that is a valid memory address for an instruction.
2306 The MODE argument is the machine mode for the MEM expression
2307 that wants to use this address.
2308
2309 On the RS/6000, there are four valid address: a SYMBOL_REF that
2310 refers to a constant pool entry of an address (or the sum of it
2311 plus a constant), a short (16-bit signed) constant plus a register,
2312 the sum of two registers, or a register indirect, possibly with an
2313 auto-increment. For DFmode and DImode with an constant plus register,
2314 we must ensure that both words are addressable or PowerPC64 with offset
2315 word aligned.
2316
2317 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2318 32-bit DImode, TImode), indexed addressing cannot be used because
2319 adjacent memory cells are accessed by adding word-sized offsets
2320 during assembly output. */
2321 int
2322 rs6000_legitimate_address (mode, x, reg_ok_strict)
2323 enum machine_mode mode;
2324 rtx x;
2325 int reg_ok_strict;
2326 {
2327 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2328 return 1;
2329 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2330 && !ALTIVEC_VECTOR_MODE (mode)
2331 && !SPE_VECTOR_MODE (mode)
2332 && TARGET_UPDATE
2333 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2334 return 1;
2335 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2336 return 1;
2337 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2338 return 1;
2339 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2340 if (! reg_ok_strict
2341 && GET_CODE (x) == PLUS
2342 && GET_CODE (XEXP (x, 0)) == REG
2343 && XEXP (x, 0) == virtual_stack_vars_rtx
2344 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2345 return 1;
2346 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2347 return 1;
2348 if (mode != TImode
2349 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2350 || TARGET_POWERPC64
2351 || mode != DFmode)
2352 && (TARGET_POWERPC64 || mode != DImode)
2353 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2354 return 1;
2355 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2356 return 1;
2357 return 0;
2358 }
2359 \f
2360 /* Try to output insns to set TARGET equal to the constant C if it can
2361 be done in less than N insns. Do all computations in MODE.
2362 Returns the place where the output has been placed if it can be
2363 done and the insns have been emitted. If it would take more than N
2364 insns, zero is returned and no insns and emitted. */
2365
2366 rtx
2367 rs6000_emit_set_const (dest, mode, source, n)
2368 rtx dest, source;
2369 enum machine_mode mode;
2370 int n ATTRIBUTE_UNUSED;
2371 {
2372 rtx result, insn, set;
2373 HOST_WIDE_INT c0, c1;
2374
2375 if (mode == QImode || mode == HImode)
2376 {
2377 if (dest == NULL)
2378 dest = gen_reg_rtx (mode);
2379 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2380 return dest;
2381 }
2382 else if (mode == SImode)
2383 {
2384 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
2385
2386 emit_insn (gen_rtx_SET (VOIDmode, result,
2387 GEN_INT (INTVAL (source)
2388 & (~ (HOST_WIDE_INT) 0xffff))));
2389 emit_insn (gen_rtx_SET (VOIDmode, dest,
2390 gen_rtx_IOR (SImode, result,
2391 GEN_INT (INTVAL (source) & 0xffff))));
2392 result = dest;
2393 }
2394 else if (mode == DImode)
2395 {
2396 if (GET_CODE (source) == CONST_INT)
2397 {
2398 c0 = INTVAL (source);
2399 c1 = -(c0 < 0);
2400 }
2401 else if (GET_CODE (source) == CONST_DOUBLE)
2402 {
2403 #if HOST_BITS_PER_WIDE_INT >= 64
2404 c0 = CONST_DOUBLE_LOW (source);
2405 c1 = -(c0 < 0);
2406 #else
2407 c0 = CONST_DOUBLE_LOW (source);
2408 c1 = CONST_DOUBLE_HIGH (source);
2409 #endif
2410 }
2411 else
2412 abort ();
2413
2414 result = rs6000_emit_set_long_const (dest, c0, c1);
2415 }
2416 else
2417 abort ();
2418
2419 insn = get_last_insn ();
2420 set = single_set (insn);
2421 if (! CONSTANT_P (SET_SRC (set)))
2422 set_unique_reg_note (insn, REG_EQUAL, source);
2423
2424 return result;
2425 }
2426
2427 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2428 fall back to a straight forward decomposition. We do this to avoid
2429 exponential run times encountered when looking for longer sequences
2430 with rs6000_emit_set_const. */
2431 static rtx
2432 rs6000_emit_set_long_const (dest, c1, c2)
2433 rtx dest;
2434 HOST_WIDE_INT c1, c2;
2435 {
2436 if (!TARGET_POWERPC64)
2437 {
2438 rtx operand1, operand2;
2439
2440 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2441 DImode);
2442 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2443 DImode);
2444 emit_move_insn (operand1, GEN_INT (c1));
2445 emit_move_insn (operand2, GEN_INT (c2));
2446 }
2447 else
2448 {
2449 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2450
2451 ud1 = c1 & 0xffff;
2452 ud2 = (c1 & 0xffff0000) >> 16;
2453 #if HOST_BITS_PER_WIDE_INT >= 64
2454 c2 = c1 >> 32;
2455 #endif
2456 ud3 = c2 & 0xffff;
2457 ud4 = (c2 & 0xffff0000) >> 16;
2458
2459 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2460 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2461 {
2462 if (ud1 & 0x8000)
2463 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2464 else
2465 emit_move_insn (dest, GEN_INT (ud1));
2466 }
2467
2468 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2469 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2470 {
2471 if (ud2 & 0x8000)
2472 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2473 - 0x80000000));
2474 else
2475 emit_move_insn (dest, GEN_INT (ud2 << 16));
2476 if (ud1 != 0)
2477 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2478 }
2479 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2480 || (ud4 == 0 && ! (ud3 & 0x8000)))
2481 {
2482 if (ud3 & 0x8000)
2483 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2484 - 0x80000000));
2485 else
2486 emit_move_insn (dest, GEN_INT (ud3 << 16));
2487
2488 if (ud2 != 0)
2489 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2490 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2491 if (ud1 != 0)
2492 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2493 }
2494 else
2495 {
2496 if (ud4 & 0x8000)
2497 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2498 - 0x80000000));
2499 else
2500 emit_move_insn (dest, GEN_INT (ud4 << 16));
2501
2502 if (ud3 != 0)
2503 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2504
2505 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2506 if (ud2 != 0)
2507 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2508 GEN_INT (ud2 << 16)));
2509 if (ud1 != 0)
2510 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2511 }
2512 }
2513 return dest;
2514 }
2515
2516 /* Emit a move from SOURCE to DEST in mode MODE. */
2517 void
2518 rs6000_emit_move (dest, source, mode)
2519 rtx dest;
2520 rtx source;
2521 enum machine_mode mode;
2522 {
2523 rtx operands[2];
2524 operands[0] = dest;
2525 operands[1] = source;
2526
2527 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2528 if (GET_CODE (operands[1]) == CONST_DOUBLE
2529 && ! FLOAT_MODE_P (mode)
2530 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2531 {
2532 /* FIXME. This should never happen. */
2533 /* Since it seems that it does, do the safe thing and convert
2534 to a CONST_INT. */
2535 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2536 }
2537 if (GET_CODE (operands[1]) == CONST_DOUBLE
2538 && ! FLOAT_MODE_P (mode)
2539 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2540 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2541 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2542 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2543 abort ();
2544
2545 /* Check if GCC is setting up a block move that will end up using FP
2546 registers as temporaries. We must make sure this is acceptable. */
2547 if (GET_CODE (operands[0]) == MEM
2548 && GET_CODE (operands[1]) == MEM
2549 && mode == DImode
2550 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2551 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2552 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2553 ? 32 : MEM_ALIGN (operands[0])))
2554 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2555 ? 32
2556 : MEM_ALIGN (operands[1]))))
2557 && ! MEM_VOLATILE_P (operands [0])
2558 && ! MEM_VOLATILE_P (operands [1]))
2559 {
2560 emit_move_insn (adjust_address (operands[0], SImode, 0),
2561 adjust_address (operands[1], SImode, 0));
2562 emit_move_insn (adjust_address (operands[0], SImode, 4),
2563 adjust_address (operands[1], SImode, 4));
2564 return;
2565 }
2566
2567 if (!no_new_pseudos)
2568 {
2569 if (GET_CODE (operands[1]) == MEM && optimize > 0
2570 && (mode == QImode || mode == HImode || mode == SImode)
2571 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2572 {
2573 rtx reg = gen_reg_rtx (word_mode);
2574
2575 emit_insn (gen_rtx_SET (word_mode, reg,
2576 gen_rtx_ZERO_EXTEND (word_mode,
2577 operands[1])));
2578 operands[1] = gen_lowpart (mode, reg);
2579 }
2580 if (GET_CODE (operands[0]) != REG)
2581 operands[1] = force_reg (mode, operands[1]);
2582 }
2583
2584 if (mode == SFmode && ! TARGET_POWERPC
2585 && TARGET_HARD_FLOAT && TARGET_FPRS
2586 && GET_CODE (operands[0]) == MEM)
2587 {
2588 int regnum;
2589
2590 if (reload_in_progress || reload_completed)
2591 regnum = true_regnum (operands[1]);
2592 else if (GET_CODE (operands[1]) == REG)
2593 regnum = REGNO (operands[1]);
2594 else
2595 regnum = -1;
2596
2597 /* If operands[1] is a register, on POWER it may have
2598 double-precision data in it, so truncate it to single
2599 precision. */
2600 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2601 {
2602 rtx newreg;
2603 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2604 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2605 operands[1] = newreg;
2606 }
2607 }
2608
2609 /* Handle the case where reload calls us with an invalid address;
2610 and the case of CONSTANT_P_RTX. */
2611 if (!ALTIVEC_VECTOR_MODE (mode)
2612 && (! general_operand (operands[1], mode)
2613 || ! nonimmediate_operand (operands[0], mode)
2614 || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2615 {
2616 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2617 return;
2618 }
2619
2620 /* FIXME: In the long term, this switch statement should go away
2621 and be replaced by a sequence of tests based on things like
2622 mode == Pmode. */
2623 switch (mode)
2624 {
2625 case HImode:
2626 case QImode:
2627 if (CONSTANT_P (operands[1])
2628 && GET_CODE (operands[1]) != CONST_INT)
2629 operands[1] = force_const_mem (mode, operands[1]);
2630 break;
2631
2632 case TFmode:
2633 case DFmode:
2634 case SFmode:
2635 if (CONSTANT_P (operands[1])
2636 && ! easy_fp_constant (operands[1], mode))
2637 operands[1] = force_const_mem (mode, operands[1]);
2638 break;
2639
2640 case V16QImode:
2641 case V8HImode:
2642 case V4SFmode:
2643 case V4SImode:
2644 case V4HImode:
2645 case V2SFmode:
2646 case V2SImode:
2647 case V1DImode:
2648 if (CONSTANT_P (operands[1])
2649 && !easy_vector_constant (operands[1]))
2650 operands[1] = force_const_mem (mode, operands[1]);
2651 break;
2652
2653 case SImode:
2654 case DImode:
2655 /* Use default pattern for address of ELF small data */
2656 if (TARGET_ELF
2657 && mode == Pmode
2658 && DEFAULT_ABI == ABI_V4
2659 && (GET_CODE (operands[1]) == SYMBOL_REF
2660 || GET_CODE (operands[1]) == CONST)
2661 && small_data_operand (operands[1], mode))
2662 {
2663 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2664 return;
2665 }
2666
2667 if (DEFAULT_ABI == ABI_V4
2668 && mode == Pmode && mode == SImode
2669 && flag_pic == 1 && got_operand (operands[1], mode))
2670 {
2671 emit_insn (gen_movsi_got (operands[0], operands[1]));
2672 return;
2673 }
2674
2675 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2676 && TARGET_NO_TOC && ! flag_pic
2677 && mode == Pmode
2678 && CONSTANT_P (operands[1])
2679 && GET_CODE (operands[1]) != HIGH
2680 && GET_CODE (operands[1]) != CONST_INT)
2681 {
2682 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2683
2684 /* If this is a function address on -mcall-aixdesc,
2685 convert it to the address of the descriptor. */
2686 if (DEFAULT_ABI == ABI_AIX
2687 && GET_CODE (operands[1]) == SYMBOL_REF
2688 && XSTR (operands[1], 0)[0] == '.')
2689 {
2690 const char *name = XSTR (operands[1], 0);
2691 rtx new_ref;
2692 while (*name == '.')
2693 name++;
2694 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2695 CONSTANT_POOL_ADDRESS_P (new_ref)
2696 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2697 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2698 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2699 operands[1] = new_ref;
2700 }
2701
2702 if (DEFAULT_ABI == ABI_DARWIN)
2703 {
2704 emit_insn (gen_macho_high (target, operands[1]));
2705 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2706 return;
2707 }
2708
2709 emit_insn (gen_elf_high (target, operands[1]));
2710 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2711 return;
2712 }
2713
2714 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2715 and we have put it in the TOC, we just need to make a TOC-relative
2716 reference to it. */
2717 if (TARGET_TOC
2718 && GET_CODE (operands[1]) == SYMBOL_REF
2719 && CONSTANT_POOL_EXPR_P (operands[1])
2720 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2721 get_pool_mode (operands[1])))
2722 {
2723 operands[1] = create_TOC_reference (operands[1]);
2724 }
2725 else if (mode == Pmode
2726 && CONSTANT_P (operands[1])
2727 && ((GET_CODE (operands[1]) != CONST_INT
2728 && ! easy_fp_constant (operands[1], mode))
2729 || (GET_CODE (operands[1]) == CONST_INT
2730 && num_insns_constant (operands[1], mode) > 2)
2731 || (GET_CODE (operands[0]) == REG
2732 && FP_REGNO_P (REGNO (operands[0]))))
2733 && GET_CODE (operands[1]) != HIGH
2734 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2735 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2736 {
2737 /* Emit a USE operation so that the constant isn't deleted if
2738 expensive optimizations are turned on because nobody
2739 references it. This should only be done for operands that
2740 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2741 This should not be done for operands that contain LABEL_REFs.
2742 For now, we just handle the obvious case. */
2743 if (GET_CODE (operands[1]) != LABEL_REF)
2744 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2745
2746 #if TARGET_MACHO
2747 /* Darwin uses a special PIC legitimizer. */
2748 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2749 {
2750 operands[1] =
2751 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2752 operands[0]);
2753 if (operands[0] != operands[1])
2754 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2755 return;
2756 }
2757 #endif
2758
2759 /* If we are to limit the number of things we put in the TOC and
2760 this is a symbol plus a constant we can add in one insn,
2761 just put the symbol in the TOC and add the constant. Don't do
2762 this if reload is in progress. */
2763 if (GET_CODE (operands[1]) == CONST
2764 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2765 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2766 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2767 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2768 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2769 && ! side_effects_p (operands[0]))
2770 {
2771 rtx sym =
2772 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2773 rtx other = XEXP (XEXP (operands[1], 0), 1);
2774
2775 sym = force_reg (mode, sym);
2776 if (mode == SImode)
2777 emit_insn (gen_addsi3 (operands[0], sym, other));
2778 else
2779 emit_insn (gen_adddi3 (operands[0], sym, other));
2780 return;
2781 }
2782
2783 operands[1] = force_const_mem (mode, operands[1]);
2784
2785 if (TARGET_TOC
2786 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2787 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2788 get_pool_constant (XEXP (operands[1], 0)),
2789 get_pool_mode (XEXP (operands[1], 0))))
2790 {
2791 operands[1]
2792 = gen_rtx_MEM (mode,
2793 create_TOC_reference (XEXP (operands[1], 0)));
2794 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2795 RTX_UNCHANGING_P (operands[1]) = 1;
2796 }
2797 }
2798 break;
2799
2800 case TImode:
2801 if (GET_CODE (operands[0]) == MEM
2802 && GET_CODE (XEXP (operands[0], 0)) != REG
2803 && ! reload_in_progress)
2804 operands[0]
2805 = replace_equiv_address (operands[0],
2806 copy_addr_to_reg (XEXP (operands[0], 0)));
2807
2808 if (GET_CODE (operands[1]) == MEM
2809 && GET_CODE (XEXP (operands[1], 0)) != REG
2810 && ! reload_in_progress)
2811 operands[1]
2812 = replace_equiv_address (operands[1],
2813 copy_addr_to_reg (XEXP (operands[1], 0)));
2814 break;
2815
2816 default:
2817 abort ();
2818 }
2819
2820 /* Above, we may have called force_const_mem which may have returned
2821 an invalid address. If we can, fix this up; otherwise, reload will
2822 have to deal with it. */
2823 if (GET_CODE (operands[1]) == MEM
2824 && ! memory_address_p (mode, XEXP (operands[1], 0))
2825 && ! reload_in_progress)
2826 operands[1] = adjust_address (operands[1], mode, 0);
2827
2828 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2829 return;
2830 }
2831 \f
2832 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2833 for a call to a function whose data type is FNTYPE.
2834 For a library call, FNTYPE is 0.
2835
2836 For incoming args we set the number of arguments in the prototype large
2837 so we never return a PARALLEL. */
2838
2839 void
2840 init_cumulative_args (cum, fntype, libname, incoming)
2841 CUMULATIVE_ARGS *cum;
2842 tree fntype;
2843 rtx libname ATTRIBUTE_UNUSED;
2844 int incoming;
2845 {
2846 static CUMULATIVE_ARGS zero_cumulative;
2847
2848 *cum = zero_cumulative;
2849 cum->words = 0;
2850 cum->fregno = FP_ARG_MIN_REG;
2851 cum->vregno = ALTIVEC_ARG_MIN_REG;
2852 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2853 cum->call_cookie = CALL_NORMAL;
2854 cum->sysv_gregno = GP_ARG_MIN_REG;
2855
2856 if (incoming)
2857 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2858
2859 else if (cum->prototype)
2860 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2861 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2862 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2863
2864 else
2865 cum->nargs_prototype = 0;
2866
2867 cum->orig_nargs = cum->nargs_prototype;
2868
2869 /* Check for a longcall attribute. */
2870 if (fntype
2871 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
2872 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
2873 cum->call_cookie = CALL_LONG;
2874
2875 if (TARGET_DEBUG_ARG)
2876 {
2877 fprintf (stderr, "\ninit_cumulative_args:");
2878 if (fntype)
2879 {
2880 tree ret_type = TREE_TYPE (fntype);
2881 fprintf (stderr, " ret code = %s,",
2882 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2883 }
2884
2885 if (cum->call_cookie & CALL_LONG)
2886 fprintf (stderr, " longcall,");
2887
2888 fprintf (stderr, " proto = %d, nargs = %d\n",
2889 cum->prototype, cum->nargs_prototype);
2890 }
2891 }
2892 \f
2893 /* If defined, a C expression which determines whether, and in which
2894 direction, to pad out an argument with extra space. The value
2895 should be of type `enum direction': either `upward' to pad above
2896 the argument, `downward' to pad below, or `none' to inhibit
2897 padding.
2898
2899 For the AIX ABI structs are always stored left shifted in their
2900 argument slot. */
2901
2902 enum direction
2903 function_arg_padding (mode, type)
2904 enum machine_mode mode;
2905 tree type;
2906 {
2907 if (type != 0 && AGGREGATE_TYPE_P (type))
2908 return upward;
2909
2910 /* This is the default definition. */
2911 return (! BYTES_BIG_ENDIAN
2912 ? upward
2913 : ((mode == BLKmode
2914 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2915 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2916 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2917 ? downward : upward));
2918 }
2919
2920 /* If defined, a C expression that gives the alignment boundary, in bits,
2921 of an argument with the specified mode and type. If it is not defined,
2922 PARM_BOUNDARY is used for all arguments.
2923
2924 V.4 wants long longs to be double word aligned. */
2925
2926 int
2927 function_arg_boundary (mode, type)
2928 enum machine_mode mode;
2929 tree type ATTRIBUTE_UNUSED;
2930 {
2931 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2932 return 64;
2933 else if (SPE_VECTOR_MODE (mode))
2934 return 64;
2935 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2936 return 128;
2937 else
2938 return PARM_BOUNDARY;
2939 }
2940 \f
2941 /* Update the data in CUM to advance over an argument
2942 of mode MODE and data type TYPE.
2943 (TYPE is null for libcalls where that information may not be available.) */
2944
2945 void
2946 function_arg_advance (cum, mode, type, named)
2947 CUMULATIVE_ARGS *cum;
2948 enum machine_mode mode;
2949 tree type;
2950 int named;
2951 {
2952 cum->nargs_prototype--;
2953
2954 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2955 {
2956 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2957 cum->vregno++;
2958 else
2959 cum->words += RS6000_ARG_SIZE (mode, type);
2960 }
2961 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
2962 && named && cum->sysv_gregno <= GP_ARG_MAX_REG)
2963 cum->sysv_gregno++;
2964 else if (DEFAULT_ABI == ABI_V4)
2965 {
2966 if (TARGET_HARD_FLOAT && TARGET_FPRS
2967 && (mode == SFmode || mode == DFmode))
2968 {
2969 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2970 cum->fregno++;
2971 else
2972 {
2973 if (mode == DFmode)
2974 cum->words += cum->words & 1;
2975 cum->words += RS6000_ARG_SIZE (mode, type);
2976 }
2977 }
2978 else
2979 {
2980 int n_words;
2981 int gregno = cum->sysv_gregno;
2982
2983 /* Aggregates and IEEE quad get passed by reference. */
2984 if ((type && AGGREGATE_TYPE_P (type))
2985 || mode == TFmode)
2986 n_words = 1;
2987 else
2988 n_words = RS6000_ARG_SIZE (mode, type);
2989
2990 /* Long long and SPE vectors are put in odd registers. */
2991 if (n_words == 2 && (gregno & 1) == 0)
2992 gregno += 1;
2993
2994 /* Long long and SPE vectors are not split between registers
2995 and stack. */
2996 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2997 {
2998 /* Long long is aligned on the stack. */
2999 if (n_words == 2)
3000 cum->words += cum->words & 1;
3001 cum->words += n_words;
3002 }
3003
3004 /* Note: continuing to accumulate gregno past when we've started
3005 spilling to the stack indicates the fact that we've started
3006 spilling to the stack to expand_builtin_saveregs. */
3007 cum->sysv_gregno = gregno + n_words;
3008 }
3009
3010 if (TARGET_DEBUG_ARG)
3011 {
3012 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3013 cum->words, cum->fregno);
3014 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3015 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3016 fprintf (stderr, "mode = %4s, named = %d\n",
3017 GET_MODE_NAME (mode), named);
3018 }
3019 }
3020 else
3021 {
3022 int align = (TARGET_32BIT && (cum->words & 1) != 0
3023 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3024
3025 cum->words += align + RS6000_ARG_SIZE (mode, type);
3026
3027 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3028 && TARGET_HARD_FLOAT && TARGET_FPRS)
3029 cum->fregno++;
3030
3031 if (TARGET_DEBUG_ARG)
3032 {
3033 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3034 cum->words, cum->fregno);
3035 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3036 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3037 fprintf (stderr, "named = %d, align = %d\n", named, align);
3038 }
3039 }
3040 }
3041 \f
3042 /* Determine where to put an argument to a function.
3043 Value is zero to push the argument on the stack,
3044 or a hard register in which to store the argument.
3045
3046 MODE is the argument's machine mode.
3047 TYPE is the data type of the argument (as a tree).
3048 This is null for libcalls where that information may
3049 not be available.
3050 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3051 the preceding args and about the function being called.
3052 NAMED is nonzero if this argument is a named parameter
3053 (otherwise it is an extra parameter matching an ellipsis).
3054
3055 On RS/6000 the first eight words of non-FP are normally in registers
3056 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3057 Under V.4, the first 8 FP args are in registers.
3058
3059 If this is floating-point and no prototype is specified, we use
3060 both an FP and integer register (or possibly FP reg and stack). Library
3061 functions (when TYPE is zero) always have the proper types for args,
3062 so we can pass the FP value just in one register. emit_library_function
3063 doesn't support PARALLEL anyway. */
3064
3065 struct rtx_def *
3066 function_arg (cum, mode, type, named)
3067 CUMULATIVE_ARGS *cum;
3068 enum machine_mode mode;
3069 tree type;
3070 int named;
3071 {
3072 enum rs6000_abi abi = DEFAULT_ABI;
3073
3074 /* Return a marker to indicate whether CR1 needs to set or clear the
3075 bit that V.4 uses to say fp args were passed in registers.
3076 Assume that we don't need the marker for software floating point,
3077 or compiler generated library calls. */
3078 if (mode == VOIDmode)
3079 {
3080 if (abi == ABI_V4
3081 && cum->nargs_prototype < 0
3082 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
3083 {
3084 /* For the SPE, we need to crxor CR6 always. */
3085 if (TARGET_SPE_ABI)
3086 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
3087 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
3088 return GEN_INT (cum->call_cookie
3089 | ((cum->fregno == FP_ARG_MIN_REG)
3090 ? CALL_V4_SET_FP_ARGS
3091 : CALL_V4_CLEAR_FP_ARGS));
3092 }
3093
3094 return GEN_INT (cum->call_cookie);
3095 }
3096
3097 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3098 {
3099 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
3100 return gen_rtx_REG (mode, cum->vregno);
3101 else
3102 return NULL;
3103 }
3104 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode) && named)
3105 {
3106 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3107 return gen_rtx_REG (mode, cum->sysv_gregno);
3108 else
3109 return NULL;
3110 }
3111 else if (abi == ABI_V4)
3112 {
3113 if (TARGET_HARD_FLOAT && TARGET_FPRS
3114 && (mode == SFmode || mode == DFmode))
3115 {
3116 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3117 return gen_rtx_REG (mode, cum->fregno);
3118 else
3119 return NULL;
3120 }
3121 else
3122 {
3123 int n_words;
3124 int gregno = cum->sysv_gregno;
3125
3126 /* Aggregates and IEEE quad get passed by reference. */
3127 if ((type && AGGREGATE_TYPE_P (type))
3128 || mode == TFmode)
3129 n_words = 1;
3130 else
3131 n_words = RS6000_ARG_SIZE (mode, type);
3132
3133 /* Long long and SPE vectors are put in odd registers. */
3134 if (n_words == 2 && (gregno & 1) == 0)
3135 gregno += 1;
3136
3137 /* Long long and SPE vectors are not split between registers
3138 and stack. */
3139 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3140 {
3141 /* SPE vectors in ... get split into 2 registers. */
3142 if (TARGET_SPE && TARGET_SPE_ABI
3143 && SPE_VECTOR_MODE (mode) && !named)
3144 {
3145 rtx r1, r2;
3146 enum machine_mode m = GET_MODE_INNER (mode);
3147
3148 r1 = gen_rtx_REG (m, gregno);
3149 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3150 r2 = gen_rtx_REG (m, gregno + 1);
3151 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3152 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3153 }
3154 return gen_rtx_REG (mode, gregno);
3155 }
3156 else
3157 return NULL;
3158 }
3159 }
3160 else
3161 {
3162 int align = (TARGET_32BIT && (cum->words & 1) != 0
3163 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3164 int align_words = cum->words + align;
3165
3166 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3167 return NULL_RTX;
3168
3169 if (USE_FP_FOR_ARG_P (*cum, mode, type))
3170 {
3171 if (! type
3172 || ((cum->nargs_prototype > 0)
3173 /* IBM AIX extended its linkage convention definition always
3174 to require FP args after register save area hole on the
3175 stack. */
3176 && (DEFAULT_ABI != ABI_AIX
3177 || ! TARGET_XL_CALL
3178 || (align_words < GP_ARG_NUM_REG))))
3179 return gen_rtx_REG (mode, cum->fregno);
3180
3181 return gen_rtx_PARALLEL (mode,
3182 gen_rtvec (2,
3183 gen_rtx_EXPR_LIST (VOIDmode,
3184 ((align_words >= GP_ARG_NUM_REG)
3185 ? NULL_RTX
3186 : (align_words
3187 + RS6000_ARG_SIZE (mode, type)
3188 > GP_ARG_NUM_REG
3189 /* If this is partially on the stack, then
3190 we only include the portion actually
3191 in registers here. */
3192 ? gen_rtx_REG (SImode,
3193 GP_ARG_MIN_REG + align_words)
3194 : gen_rtx_REG (mode,
3195 GP_ARG_MIN_REG + align_words))),
3196 const0_rtx),
3197 gen_rtx_EXPR_LIST (VOIDmode,
3198 gen_rtx_REG (mode, cum->fregno),
3199 const0_rtx)));
3200 }
3201 else if (align_words < GP_ARG_NUM_REG)
3202 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
3203 else
3204 return NULL_RTX;
3205 }
3206 }
3207 \f
3208 /* For an arg passed partly in registers and partly in memory,
3209 this is the number of registers used.
3210 For args passed entirely in registers or entirely in memory, zero. */
3211
3212 int
3213 function_arg_partial_nregs (cum, mode, type, named)
3214 CUMULATIVE_ARGS *cum;
3215 enum machine_mode mode;
3216 tree type;
3217 int named ATTRIBUTE_UNUSED;
3218 {
3219 if (DEFAULT_ABI == ABI_V4)
3220 return 0;
3221
3222 if (USE_FP_FOR_ARG_P (*cum, mode, type)
3223 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
3224 {
3225 if (cum->nargs_prototype >= 0)
3226 return 0;
3227 }
3228
3229 if (cum->words < GP_ARG_NUM_REG
3230 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
3231 {
3232 int ret = GP_ARG_NUM_REG - cum->words;
3233 if (ret && TARGET_DEBUG_ARG)
3234 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
3235
3236 return ret;
3237 }
3238
3239 return 0;
3240 }
3241 \f
3242 /* A C expression that indicates when an argument must be passed by
3243 reference. If nonzero for an argument, a copy of that argument is
3244 made in memory and a pointer to the argument is passed instead of
3245 the argument itself. The pointer is passed in whatever way is
3246 appropriate for passing a pointer to that type.
3247
3248 Under V.4, structures and unions are passed by reference. */
3249
3250 int
3251 function_arg_pass_by_reference (cum, mode, type, named)
3252 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
3253 enum machine_mode mode ATTRIBUTE_UNUSED;
3254 tree type;
3255 int named ATTRIBUTE_UNUSED;
3256 {
3257 if (DEFAULT_ABI == ABI_V4
3258 && ((type && AGGREGATE_TYPE_P (type))
3259 || mode == TFmode))
3260 {
3261 if (TARGET_DEBUG_ARG)
3262 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
3263
3264 return 1;
3265 }
3266
3267 return 0;
3268 }
3269 \f
3270 /* Perform any needed actions needed for a function that is receiving a
3271 variable number of arguments.
3272
3273 CUM is as above.
3274
3275 MODE and TYPE are the mode and type of the current parameter.
3276
3277 PRETEND_SIZE is a variable that should be set to the amount of stack
3278 that must be pushed by the prolog to pretend that our caller pushed
3279 it.
3280
3281 Normally, this macro will push all remaining incoming registers on the
3282 stack and set PRETEND_SIZE to the length of the registers pushed. */
3283
3284 void
3285 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
3286 CUMULATIVE_ARGS *cum;
3287 enum machine_mode mode;
3288 tree type;
3289 int *pretend_size;
3290 int no_rtl;
3291
3292 {
3293 CUMULATIVE_ARGS next_cum;
3294 int reg_size = TARGET_32BIT ? 4 : 8;
3295 rtx save_area = NULL_RTX, mem;
3296 int first_reg_offset, set;
3297 tree fntype;
3298 int stdarg_p;
3299
3300 fntype = TREE_TYPE (current_function_decl);
3301 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
3302 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3303 != void_type_node));
3304
3305 /* For varargs, we do not want to skip the dummy va_dcl argument.
3306 For stdargs, we do want to skip the last named argument. */
3307 next_cum = *cum;
3308 if (stdarg_p)
3309 function_arg_advance (&next_cum, mode, type, 1);
3310
3311 if (DEFAULT_ABI == ABI_V4)
3312 {
3313 /* Indicate to allocate space on the stack for varargs save area. */
3314 cfun->machine->sysv_varargs_p = 1;
3315 if (! no_rtl)
3316 save_area = plus_constant (virtual_stack_vars_rtx,
3317 - RS6000_VARARGS_SIZE);
3318
3319 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
3320 }
3321 else
3322 {
3323 first_reg_offset = next_cum.words;
3324 save_area = virtual_incoming_args_rtx;
3325 cfun->machine->sysv_varargs_p = 0;
3326
3327 if (MUST_PASS_IN_STACK (mode, type))
3328 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
3329 }
3330
3331 set = get_varargs_alias_set ();
3332 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
3333 {
3334 mem = gen_rtx_MEM (BLKmode,
3335 plus_constant (save_area,
3336 first_reg_offset * reg_size)),
3337 set_mem_alias_set (mem, set);
3338 set_mem_align (mem, BITS_PER_WORD);
3339
3340 move_block_from_reg
3341 (GP_ARG_MIN_REG + first_reg_offset, mem,
3342 GP_ARG_NUM_REG - first_reg_offset,
3343 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
3344
3345 /* ??? Does ABI_V4 need this at all? */
3346 *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
3347 }
3348
3349 /* Save FP registers if needed. */
3350 if (DEFAULT_ABI == ABI_V4
3351 && TARGET_HARD_FLOAT && TARGET_FPRS
3352 && ! no_rtl
3353 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
3354 {
3355 int fregno = next_cum.fregno;
3356 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
3357 rtx lab = gen_label_rtx ();
3358 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
3359
3360 emit_jump_insn (gen_rtx_SET (VOIDmode,
3361 pc_rtx,
3362 gen_rtx_IF_THEN_ELSE (VOIDmode,
3363 gen_rtx_NE (VOIDmode, cr1,
3364 const0_rtx),
3365 gen_rtx_LABEL_REF (VOIDmode, lab),
3366 pc_rtx)));
3367
3368 while (fregno <= FP_ARG_V4_MAX_REG)
3369 {
3370 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
3371 set_mem_alias_set (mem, set);
3372 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
3373 fregno++;
3374 off += 8;
3375 }
3376
3377 emit_label (lab);
3378 }
3379 }
3380
3381 /* Create the va_list data type. */
3382
3383 tree
3384 rs6000_build_va_list ()
3385 {
3386 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
3387
3388 /* For AIX, prefer 'char *' because that's what the system
3389 header files like. */
3390 if (DEFAULT_ABI != ABI_V4)
3391 return build_pointer_type (char_type_node);
3392
3393 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3394 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3395
3396 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3397 unsigned_char_type_node);
3398 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3399 unsigned_char_type_node);
3400 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3401 ptr_type_node);
3402 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3403 ptr_type_node);
3404
3405 DECL_FIELD_CONTEXT (f_gpr) = record;
3406 DECL_FIELD_CONTEXT (f_fpr) = record;
3407 DECL_FIELD_CONTEXT (f_ovf) = record;
3408 DECL_FIELD_CONTEXT (f_sav) = record;
3409
3410 TREE_CHAIN (record) = type_decl;
3411 TYPE_NAME (record) = type_decl;
3412 TYPE_FIELDS (record) = f_gpr;
3413 TREE_CHAIN (f_gpr) = f_fpr;
3414 TREE_CHAIN (f_fpr) = f_ovf;
3415 TREE_CHAIN (f_ovf) = f_sav;
3416
3417 layout_type (record);
3418
3419 /* The correct type is an array type of one element. */
3420 return build_array_type (record, build_index_type (size_zero_node));
3421 }
3422
3423 /* Implement va_start. */
3424
3425 void
3426 rs6000_va_start (valist, nextarg)
3427 tree valist;
3428 rtx nextarg;
3429 {
3430 HOST_WIDE_INT words, n_gpr, n_fpr;
3431 tree f_gpr, f_fpr, f_ovf, f_sav;
3432 tree gpr, fpr, ovf, sav, t;
3433
3434 /* Only SVR4 needs something special. */
3435 if (DEFAULT_ABI != ABI_V4)
3436 {
3437 std_expand_builtin_va_start (valist, nextarg);
3438 return;
3439 }
3440
3441 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3442 f_fpr = TREE_CHAIN (f_gpr);
3443 f_ovf = TREE_CHAIN (f_fpr);
3444 f_sav = TREE_CHAIN (f_ovf);
3445
3446 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3447 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3448 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3449 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3450 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3451
3452 /* Count number of gp and fp argument registers used. */
3453 words = current_function_args_info.words;
3454 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3455 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3456
3457 if (TARGET_DEBUG_ARG)
3458 {
3459 fputs ("va_start: words = ", stderr);
3460 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3461 fputs (", n_gpr = ", stderr);
3462 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3463 fputs (", n_fpr = ", stderr);
3464 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3465 putc ('\n', stderr);
3466 }
3467
3468 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3469 TREE_SIDE_EFFECTS (t) = 1;
3470 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3471
3472 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3473 TREE_SIDE_EFFECTS (t) = 1;
3474 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3475
3476 /* Find the overflow area. */
3477 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3478 if (words != 0)
3479 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3480 build_int_2 (words * UNITS_PER_WORD, 0));
3481 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3482 TREE_SIDE_EFFECTS (t) = 1;
3483 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3484
3485 /* Find the register save area. */
3486 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3487 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3488 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3489 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3490 TREE_SIDE_EFFECTS (t) = 1;
3491 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3492 }
3493
3494 /* Implement va_arg. */
3495
3496 rtx
3497 rs6000_va_arg (valist, type)
3498 tree valist, type;
3499 {
3500 tree f_gpr, f_fpr, f_ovf, f_sav;
3501 tree gpr, fpr, ovf, sav, reg, t, u;
3502 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3503 rtx lab_false, lab_over, addr_rtx, r;
3504
3505 if (DEFAULT_ABI != ABI_V4)
3506 return std_expand_builtin_va_arg (valist, type);
3507
3508 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3509 f_fpr = TREE_CHAIN (f_gpr);
3510 f_ovf = TREE_CHAIN (f_fpr);
3511 f_sav = TREE_CHAIN (f_ovf);
3512
3513 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3514 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3515 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3516 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3517 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3518
3519 size = int_size_in_bytes (type);
3520 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3521
3522 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3523 {
3524 /* Aggregates and long doubles are passed by reference. */
3525 indirect_p = 1;
3526 reg = gpr;
3527 n_reg = 1;
3528 sav_ofs = 0;
3529 sav_scale = 4;
3530 size = UNITS_PER_WORD;
3531 rsize = 1;
3532 }
3533 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
3534 {
3535 /* FP args go in FP registers, if present. */
3536 indirect_p = 0;
3537 reg = fpr;
3538 n_reg = 1;
3539 sav_ofs = 8*4;
3540 sav_scale = 8;
3541 }
3542 else
3543 {
3544 /* Otherwise into GP registers. */
3545 indirect_p = 0;
3546 reg = gpr;
3547 n_reg = rsize;
3548 sav_ofs = 0;
3549 sav_scale = 4;
3550 }
3551
3552 /* Pull the value out of the saved registers ... */
3553
3554 lab_false = gen_label_rtx ();
3555 lab_over = gen_label_rtx ();
3556 addr_rtx = gen_reg_rtx (Pmode);
3557
3558 /* AltiVec vectors never go in registers. */
3559 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
3560 {
3561 TREE_THIS_VOLATILE (reg) = 1;
3562 emit_cmp_and_jump_insns
3563 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3564 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3565 lab_false);
3566
3567 /* Long long is aligned in the registers. */
3568 if (n_reg > 1)
3569 {
3570 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3571 build_int_2 (n_reg - 1, 0));
3572 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3573 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3574 TREE_SIDE_EFFECTS (u) = 1;
3575 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3576 }
3577
3578 if (sav_ofs)
3579 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3580 else
3581 t = sav;
3582
3583 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3584 build_int_2 (n_reg, 0));
3585 TREE_SIDE_EFFECTS (u) = 1;
3586
3587 u = build1 (CONVERT_EXPR, integer_type_node, u);
3588 TREE_SIDE_EFFECTS (u) = 1;
3589
3590 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3591 TREE_SIDE_EFFECTS (u) = 1;
3592
3593 t = build (PLUS_EXPR, ptr_type_node, t, u);
3594 TREE_SIDE_EFFECTS (t) = 1;
3595
3596 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3597 if (r != addr_rtx)
3598 emit_move_insn (addr_rtx, r);
3599
3600 emit_jump_insn (gen_jump (lab_over));
3601 emit_barrier ();
3602 }
3603
3604 emit_label (lab_false);
3605
3606 /* ... otherwise out of the overflow area. */
3607
3608 /* Make sure we don't find reg 7 for the next int arg.
3609
3610 All AltiVec vectors go in the overflow area. So in the AltiVec
3611 case we need to get the vectors from the overflow area, but
3612 remember where the GPRs and FPRs are. */
3613 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
3614 || !TARGET_ALTIVEC))
3615 {
3616 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3617 TREE_SIDE_EFFECTS (t) = 1;
3618 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3619 }
3620
3621 /* Care for on-stack alignment if needed. */
3622 if (rsize <= 1)
3623 t = ovf;
3624 else
3625 {
3626 int align;
3627
3628 /* AltiVec vectors are 16 byte aligned. */
3629 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
3630 align = 15;
3631 else
3632 align = 7;
3633
3634 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3635 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3636 }
3637 t = save_expr (t);
3638
3639 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3640 if (r != addr_rtx)
3641 emit_move_insn (addr_rtx, r);
3642
3643 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3644 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3645 TREE_SIDE_EFFECTS (t) = 1;
3646 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3647
3648 emit_label (lab_over);
3649
3650 if (indirect_p)
3651 {
3652 r = gen_rtx_MEM (Pmode, addr_rtx);
3653 set_mem_alias_set (r, get_varargs_alias_set ());
3654 emit_move_insn (addr_rtx, r);
3655 }
3656
3657 return addr_rtx;
3658 }
3659
3660 /* Builtins. */
3661
3662 #define def_builtin(MASK, NAME, TYPE, CODE) \
3663 do { \
3664 if ((MASK) & target_flags) \
3665 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3666 NULL, NULL_TREE); \
3667 } while (0)
3668
3669 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3670
3671 static const struct builtin_description bdesc_3arg[] =
3672 {
3673 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3674 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3675 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3676 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3677 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3678 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3679 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3680 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3681 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3682 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3683 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3684 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3685 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3686 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3687 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3688 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3689 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3690 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3691 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3692 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3693 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3694 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3695 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3696 };
3697
3698 /* DST operations: void foo (void *, const int, const char). */
3699
3700 static const struct builtin_description bdesc_dst[] =
3701 {
3702 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3703 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3704 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3705 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3706 };
3707
3708 /* Simple binary operations: VECc = foo (VECa, VECb). */
3709
3710 static struct builtin_description bdesc_2arg[] =
3711 {
3712 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3713 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3714 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3715 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3716 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3717 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3718 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3719 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3720 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3721 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3722 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3723 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3724 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3725 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3726 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3727 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3728 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3729 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3730 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3731 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3732 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3733 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3734 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3735 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3736 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3737 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3738 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3739 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3740 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3741 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3742 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3743 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3744 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3745 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3746 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3747 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3748 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3749 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3750 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3751 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3752 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3753 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3754 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3755 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3756 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3757 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3758 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3759 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3760 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3761 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3762 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3763 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3764 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3765 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3766 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3767 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3768 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3769 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3770 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3771 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3772 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3773 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3774 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3775 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3776 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3777 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3778 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3779 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3780 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3781 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3782 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3783 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3784 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3785 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3786 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3787 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3788 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3789 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3790 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3791 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3792 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3793 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3794 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3795 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3796 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3797 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3798 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3799 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3800 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3801 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3802 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3803 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3804 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3805 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3806 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3807 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3808 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3809 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3810 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3811 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3812 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3813 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3814 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3815 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3816 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3817 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3818 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3819 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3820 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3821 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3822 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3823 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3824 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3825
3826 /* Place holder, leave as first spe builtin. */
3827 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
3828 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
3829 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
3830 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
3831 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
3832 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
3833 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
3834 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
3835 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
3836 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
3837 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
3838 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
3839 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
3840 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
3841 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
3842 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
3843 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
3844 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
3845 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
3846 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
3847 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
3848 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
3849 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
3850 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
3851 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
3852 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
3853 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
3854 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
3855 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
3856 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
3857 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
3858 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
3859 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
3860 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
3861 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
3862 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
3863 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
3864 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
3865 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
3866 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
3867 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
3868 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
3869 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
3870 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
3871 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
3872 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
3873 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
3874 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
3875 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
3876 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
3877 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
3878 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
3879 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
3880 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
3881 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
3882 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
3883 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
3884 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
3885 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
3886 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
3887 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
3888 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
3889 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
3890 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
3891 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
3892 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
3893 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
3894 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
3895 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
3896 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
3897 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
3898 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
3899 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
3900 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
3901 { 0, CODE_FOR_spe_evmwlsmf, "__builtin_spe_evmwlsmf", SPE_BUILTIN_EVMWLSMF },
3902 { 0, CODE_FOR_spe_evmwlsmfa, "__builtin_spe_evmwlsmfa", SPE_BUILTIN_EVMWLSMFA },
3903 { 0, CODE_FOR_spe_evmwlsmfaaw, "__builtin_spe_evmwlsmfaaw", SPE_BUILTIN_EVMWLSMFAAW },
3904 { 0, CODE_FOR_spe_evmwlsmfanw, "__builtin_spe_evmwlsmfanw", SPE_BUILTIN_EVMWLSMFANW },
3905 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
3906 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
3907 { 0, CODE_FOR_spe_evmwlssf, "__builtin_spe_evmwlssf", SPE_BUILTIN_EVMWLSSF },
3908 { 0, CODE_FOR_spe_evmwlssfa, "__builtin_spe_evmwlssfa", SPE_BUILTIN_EVMWLSSFA },
3909 { 0, CODE_FOR_spe_evmwlssfaaw, "__builtin_spe_evmwlssfaaw", SPE_BUILTIN_EVMWLSSFAAW },
3910 { 0, CODE_FOR_spe_evmwlssfanw, "__builtin_spe_evmwlssfanw", SPE_BUILTIN_EVMWLSSFANW },
3911 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
3912 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
3913 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
3914 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
3915 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
3916 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
3917 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
3918 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
3919 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
3920 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
3921 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
3922 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
3923 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
3924 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
3925 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
3926 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
3927 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
3928 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
3929 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
3930 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
3931 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
3932 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
3933 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
3934 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
3935 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
3936 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
3937 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
3938 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
3939 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
3940 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
3941 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
3942 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
3943 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
3944
3945 /* SPE binary operations expecting a 5-bit unsigned literal. */
3946 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
3947
3948 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
3949 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
3950 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
3951 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
3952 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
3953 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
3954 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
3955 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
3956 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
3957 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
3958 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
3959 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
3960 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
3961 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
3962 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
3963 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
3964 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
3965 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
3966 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
3967 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
3968 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
3969 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
3970 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
3971 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
3972 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
3973 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
3974
3975 /* Place-holder. Leave as last binary SPE builtin. */
3976 { 0, CODE_FOR_spe_evxor, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
3977 };
3978
3979 /* AltiVec predicates. */
3980
3981 struct builtin_description_predicates
3982 {
3983 const unsigned int mask;
3984 const enum insn_code icode;
3985 const char *opcode;
3986 const char *const name;
3987 const enum rs6000_builtins code;
3988 };
3989
3990 static const struct builtin_description_predicates bdesc_altivec_preds[] =
3991 {
3992 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
3993 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
3994 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
3995 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
3996 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
3997 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
3998 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
3999 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
4000 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
4001 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
4002 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
4003 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
4004 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
4005 };
4006
4007 /* SPE predicates. */
4008 static struct builtin_description bdesc_spe_predicates[] =
4009 {
4010 /* Place-holder. Leave as first. */
4011 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
4012 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
4013 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
4014 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
4015 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
4016 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
4017 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
4018 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
4019 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
4020 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
4021 /* Place-holder. Leave as last. */
4022 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
4023 };
4024
4025 /* SPE evsel predicates. */
4026 static struct builtin_description bdesc_spe_evsel[] =
4027 {
4028 /* Place-holder. Leave as first. */
4029 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
4030 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
4031 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
4032 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
4033 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
4034 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
4035 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
4036 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
4037 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
4038 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
4039 /* Place-holder. Leave as last. */
4040 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4041 };
4042
4043 /* ABS* opreations. */
4044
4045 static const struct builtin_description bdesc_abs[] =
4046 {
4047 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4048 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4049 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4050 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4051 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4052 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4053 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4054 };
4055
4056 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4057 foo (VECa). */
4058
4059 static struct builtin_description bdesc_1arg[] =
4060 {
4061 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4062 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4063 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4064 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4065 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4066 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4067 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4068 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4069 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4070 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4071 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4072 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4073 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4074 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4075 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4076 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4077 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4078
4079 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4080 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4081 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4082 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4083 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4084 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4085 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4086 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4087 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4088 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4089 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4090 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4091 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4092 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4093 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4094 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4095 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
4096 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
4097 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
4098 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
4099 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
4100 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
4101 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
4102 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
4103 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
4104 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
4105 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
4106 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
4107 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
4108 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
4109 { 0, CODE_FOR_spe_evsplatfi, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI },
4110 { 0, CODE_FOR_spe_evsplati, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI },
4111
4112 /* Place-holder. Leave as last unary SPE builtin. */
4113 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
4114 };
4115
4116 static rtx
4117 rs6000_expand_unop_builtin (icode, arglist, target)
4118 enum insn_code icode;
4119 tree arglist;
4120 rtx target;
4121 {
4122 rtx pat;
4123 tree arg0 = TREE_VALUE (arglist);
4124 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4125 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4126 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4127
4128 if (icode == CODE_FOR_nothing)
4129 /* Builtin not supported on this processor. */
4130 return 0;
4131
4132 /* If we got invalid arguments bail out before generating bad rtl. */
4133 if (arg0 == error_mark_node)
4134 return const0_rtx;
4135
4136 if (icode == CODE_FOR_altivec_vspltisb
4137 || icode == CODE_FOR_altivec_vspltish
4138 || icode == CODE_FOR_altivec_vspltisw
4139 || icode == CODE_FOR_spe_evsplatfi
4140 || icode == CODE_FOR_spe_evsplati)
4141 {
4142 /* Only allow 5-bit *signed* literals. */
4143 if (GET_CODE (op0) != CONST_INT
4144 || INTVAL (op0) > 0x1f
4145 || INTVAL (op0) < -0x1f)
4146 {
4147 error ("argument 1 must be a 5-bit signed literal");
4148 return const0_rtx;
4149 }
4150 }
4151
4152 if (target == 0
4153 || GET_MODE (target) != tmode
4154 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4155 target = gen_reg_rtx (tmode);
4156
4157 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4158 op0 = copy_to_mode_reg (mode0, op0);
4159
4160 pat = GEN_FCN (icode) (target, op0);
4161 if (! pat)
4162 return 0;
4163 emit_insn (pat);
4164
4165 return target;
4166 }
4167
4168 static rtx
4169 altivec_expand_abs_builtin (icode, arglist, target)
4170 enum insn_code icode;
4171 tree arglist;
4172 rtx target;
4173 {
4174 rtx pat, scratch1, scratch2;
4175 tree arg0 = TREE_VALUE (arglist);
4176 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4177 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4178 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4179
4180 /* If we have invalid arguments, bail out before generating bad rtl. */
4181 if (arg0 == error_mark_node)
4182 return const0_rtx;
4183
4184 if (target == 0
4185 || GET_MODE (target) != tmode
4186 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4187 target = gen_reg_rtx (tmode);
4188
4189 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4190 op0 = copy_to_mode_reg (mode0, op0);
4191
4192 scratch1 = gen_reg_rtx (mode0);
4193 scratch2 = gen_reg_rtx (mode0);
4194
4195 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
4196 if (! pat)
4197 return 0;
4198 emit_insn (pat);
4199
4200 return target;
4201 }
4202
4203 static rtx
4204 rs6000_expand_binop_builtin (icode, arglist, target)
4205 enum insn_code icode;
4206 tree arglist;
4207 rtx target;
4208 {
4209 rtx pat;
4210 tree arg0 = TREE_VALUE (arglist);
4211 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4212 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4213 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4214 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4215 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4216 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4217
4218 if (icode == CODE_FOR_nothing)
4219 /* Builtin not supported on this processor. */
4220 return 0;
4221
4222 /* If we got invalid arguments bail out before generating bad rtl. */
4223 if (arg0 == error_mark_node || arg1 == error_mark_node)
4224 return const0_rtx;
4225
4226 if (icode == CODE_FOR_altivec_vcfux
4227 || icode == CODE_FOR_altivec_vcfsx
4228 || icode == CODE_FOR_altivec_vctsxs
4229 || icode == CODE_FOR_altivec_vctuxs
4230 || icode == CODE_FOR_altivec_vspltb
4231 || icode == CODE_FOR_altivec_vsplth
4232 || icode == CODE_FOR_altivec_vspltw
4233 || icode == CODE_FOR_spe_evaddiw
4234 || icode == CODE_FOR_spe_evldd
4235 || icode == CODE_FOR_spe_evldh
4236 || icode == CODE_FOR_spe_evldw
4237 || icode == CODE_FOR_spe_evlhhesplat
4238 || icode == CODE_FOR_spe_evlhhossplat
4239 || icode == CODE_FOR_spe_evlhhousplat
4240 || icode == CODE_FOR_spe_evlwhe
4241 || icode == CODE_FOR_spe_evlwhos
4242 || icode == CODE_FOR_spe_evlwhou
4243 || icode == CODE_FOR_spe_evlwhsplat
4244 || icode == CODE_FOR_spe_evlwwsplat
4245 || icode == CODE_FOR_spe_evrlwi
4246 || icode == CODE_FOR_spe_evslwi
4247 || icode == CODE_FOR_spe_evsrwis
4248 || icode == CODE_FOR_spe_evsrwiu)
4249 {
4250 /* Only allow 5-bit unsigned literals. */
4251 if (TREE_CODE (arg1) != INTEGER_CST
4252 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4253 {
4254 error ("argument 2 must be a 5-bit unsigned literal");
4255 return const0_rtx;
4256 }
4257 }
4258
4259 if (target == 0
4260 || GET_MODE (target) != tmode
4261 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4262 target = gen_reg_rtx (tmode);
4263
4264 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4265 op0 = copy_to_mode_reg (mode0, op0);
4266 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4267 op1 = copy_to_mode_reg (mode1, op1);
4268
4269 pat = GEN_FCN (icode) (target, op0, op1);
4270 if (! pat)
4271 return 0;
4272 emit_insn (pat);
4273
4274 return target;
4275 }
4276
4277 static rtx
4278 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
4279 enum insn_code icode;
4280 const char *opcode;
4281 tree arglist;
4282 rtx target;
4283 {
4284 rtx pat, scratch;
4285 tree cr6_form = TREE_VALUE (arglist);
4286 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
4287 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4288 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4289 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4290 enum machine_mode tmode = SImode;
4291 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4292 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4293 int cr6_form_int;
4294
4295 if (TREE_CODE (cr6_form) != INTEGER_CST)
4296 {
4297 error ("argument 1 of __builtin_altivec_predicate must be a constant");
4298 return const0_rtx;
4299 }
4300 else
4301 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
4302
4303 if (mode0 != mode1)
4304 abort ();
4305
4306 /* If we have invalid arguments, bail out before generating bad rtl. */
4307 if (arg0 == error_mark_node || arg1 == error_mark_node)
4308 return const0_rtx;
4309
4310 if (target == 0
4311 || GET_MODE (target) != tmode
4312 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4313 target = gen_reg_rtx (tmode);
4314
4315 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4316 op0 = copy_to_mode_reg (mode0, op0);
4317 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4318 op1 = copy_to_mode_reg (mode1, op1);
4319
4320 scratch = gen_reg_rtx (mode0);
4321
4322 pat = GEN_FCN (icode) (scratch, op0, op1,
4323 gen_rtx (SYMBOL_REF, Pmode, opcode));
4324 if (! pat)
4325 return 0;
4326 emit_insn (pat);
4327
4328 /* The vec_any* and vec_all* predicates use the same opcodes for two
4329 different operations, but the bits in CR6 will be different
4330 depending on what information we want. So we have to play tricks
4331 with CR6 to get the right bits out.
4332
4333 If you think this is disgusting, look at the specs for the
4334 AltiVec predicates. */
4335
4336 switch (cr6_form_int)
4337 {
4338 case 0:
4339 emit_insn (gen_cr6_test_for_zero (target));
4340 break;
4341 case 1:
4342 emit_insn (gen_cr6_test_for_zero_reverse (target));
4343 break;
4344 case 2:
4345 emit_insn (gen_cr6_test_for_lt (target));
4346 break;
4347 case 3:
4348 emit_insn (gen_cr6_test_for_lt_reverse (target));
4349 break;
4350 default:
4351 error ("argument 1 of __builtin_altivec_predicate is out of range");
4352 break;
4353 }
4354
4355 return target;
4356 }
4357
4358 static rtx
4359 altivec_expand_stv_builtin (icode, arglist)
4360 enum insn_code icode;
4361 tree arglist;
4362 {
4363 tree arg0 = TREE_VALUE (arglist);
4364 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4365 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4366 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4367 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4368 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4369 rtx pat;
4370 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
4371 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
4372 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
4373
4374 /* Invalid arguments. Bail before doing anything stoopid! */
4375 if (arg0 == error_mark_node
4376 || arg1 == error_mark_node
4377 || arg2 == error_mark_node)
4378 return const0_rtx;
4379
4380 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
4381 op0 = copy_to_mode_reg (mode2, op0);
4382 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
4383 op1 = copy_to_mode_reg (mode0, op1);
4384 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
4385 op2 = copy_to_mode_reg (mode1, op2);
4386
4387 pat = GEN_FCN (icode) (op1, op2, op0);
4388 if (pat)
4389 emit_insn (pat);
4390 return NULL_RTX;
4391 }
4392
4393 static rtx
4394 rs6000_expand_ternop_builtin (icode, arglist, target)
4395 enum insn_code icode;
4396 tree arglist;
4397 rtx target;
4398 {
4399 rtx pat;
4400 tree arg0 = TREE_VALUE (arglist);
4401 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4402 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4403 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4404 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4405 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4406 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4407 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4408 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4409 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
4410
4411 if (icode == CODE_FOR_nothing)
4412 /* Builtin not supported on this processor. */
4413 return 0;
4414
4415 /* If we got invalid arguments bail out before generating bad rtl. */
4416 if (arg0 == error_mark_node
4417 || arg1 == error_mark_node
4418 || arg2 == error_mark_node)
4419 return const0_rtx;
4420
4421 if (icode == CODE_FOR_altivec_vsldoi_4sf
4422 || icode == CODE_FOR_altivec_vsldoi_4si
4423 || icode == CODE_FOR_altivec_vsldoi_8hi
4424 || icode == CODE_FOR_altivec_vsldoi_16qi)
4425 {
4426 /* Only allow 4-bit unsigned literals. */
4427 if (TREE_CODE (arg2) != INTEGER_CST
4428 || TREE_INT_CST_LOW (arg2) & ~0xf)
4429 {
4430 error ("argument 3 must be a 4-bit unsigned literal");
4431 return const0_rtx;
4432 }
4433 }
4434
4435 if (target == 0
4436 || GET_MODE (target) != tmode
4437 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4438 target = gen_reg_rtx (tmode);
4439
4440 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4441 op0 = copy_to_mode_reg (mode0, op0);
4442 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4443 op1 = copy_to_mode_reg (mode1, op1);
4444 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
4445 op2 = copy_to_mode_reg (mode2, op2);
4446
4447 pat = GEN_FCN (icode) (target, op0, op1, op2);
4448 if (! pat)
4449 return 0;
4450 emit_insn (pat);
4451
4452 return target;
4453 }
4454
4455 /* Expand the lvx builtins. */
4456 static rtx
4457 altivec_expand_ld_builtin (exp, target, expandedp)
4458 tree exp;
4459 rtx target;
4460 bool *expandedp;
4461 {
4462 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4463 tree arglist = TREE_OPERAND (exp, 1);
4464 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4465 tree arg0;
4466 enum machine_mode tmode, mode0;
4467 rtx pat, op0;
4468 enum insn_code icode;
4469
4470 switch (fcode)
4471 {
4472 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
4473 icode = CODE_FOR_altivec_lvx_16qi;
4474 break;
4475 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
4476 icode = CODE_FOR_altivec_lvx_8hi;
4477 break;
4478 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
4479 icode = CODE_FOR_altivec_lvx_4si;
4480 break;
4481 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
4482 icode = CODE_FOR_altivec_lvx_4sf;
4483 break;
4484 default:
4485 *expandedp = false;
4486 return NULL_RTX;
4487 }
4488
4489 *expandedp = true;
4490
4491 arg0 = TREE_VALUE (arglist);
4492 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4493 tmode = insn_data[icode].operand[0].mode;
4494 mode0 = insn_data[icode].operand[1].mode;
4495
4496 if (target == 0
4497 || GET_MODE (target) != tmode
4498 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4499 target = gen_reg_rtx (tmode);
4500
4501 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4502 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4503
4504 pat = GEN_FCN (icode) (target, op0);
4505 if (! pat)
4506 return 0;
4507 emit_insn (pat);
4508 return target;
4509 }
4510
4511 /* Expand the stvx builtins. */
4512 static rtx
4513 altivec_expand_st_builtin (exp, target, expandedp)
4514 tree exp;
4515 rtx target ATTRIBUTE_UNUSED;
4516 bool *expandedp;
4517 {
4518 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4519 tree arglist = TREE_OPERAND (exp, 1);
4520 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4521 tree arg0, arg1;
4522 enum machine_mode mode0, mode1;
4523 rtx pat, op0, op1;
4524 enum insn_code icode;
4525
4526 switch (fcode)
4527 {
4528 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
4529 icode = CODE_FOR_altivec_stvx_16qi;
4530 break;
4531 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
4532 icode = CODE_FOR_altivec_stvx_8hi;
4533 break;
4534 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
4535 icode = CODE_FOR_altivec_stvx_4si;
4536 break;
4537 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
4538 icode = CODE_FOR_altivec_stvx_4sf;
4539 break;
4540 default:
4541 *expandedp = false;
4542 return NULL_RTX;
4543 }
4544
4545 arg0 = TREE_VALUE (arglist);
4546 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4547 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4548 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4549 mode0 = insn_data[icode].operand[0].mode;
4550 mode1 = insn_data[icode].operand[1].mode;
4551
4552 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4553 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4554 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
4555 op1 = copy_to_mode_reg (mode1, op1);
4556
4557 pat = GEN_FCN (icode) (op0, op1);
4558 if (pat)
4559 emit_insn (pat);
4560
4561 *expandedp = true;
4562 return NULL_RTX;
4563 }
4564
4565 /* Expand the dst builtins. */
4566 static rtx
4567 altivec_expand_dst_builtin (exp, target, expandedp)
4568 tree exp;
4569 rtx target ATTRIBUTE_UNUSED;
4570 bool *expandedp;
4571 {
4572 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4573 tree arglist = TREE_OPERAND (exp, 1);
4574 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4575 tree arg0, arg1, arg2;
4576 enum machine_mode mode0, mode1, mode2;
4577 rtx pat, op0, op1, op2;
4578 struct builtin_description *d;
4579 size_t i;
4580
4581 *expandedp = false;
4582
4583 /* Handle DST variants. */
4584 d = (struct builtin_description *) bdesc_dst;
4585 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4586 if (d->code == fcode)
4587 {
4588 arg0 = TREE_VALUE (arglist);
4589 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4590 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4591 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4592 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4593 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4594 mode0 = insn_data[d->icode].operand[0].mode;
4595 mode1 = insn_data[d->icode].operand[1].mode;
4596 mode2 = insn_data[d->icode].operand[2].mode;
4597
4598 /* Invalid arguments, bail out before generating bad rtl. */
4599 if (arg0 == error_mark_node
4600 || arg1 == error_mark_node
4601 || arg2 == error_mark_node)
4602 return const0_rtx;
4603
4604 if (TREE_CODE (arg2) != INTEGER_CST
4605 || TREE_INT_CST_LOW (arg2) & ~0x3)
4606 {
4607 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
4608 return const0_rtx;
4609 }
4610
4611 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4612 op0 = copy_to_mode_reg (mode0, op0);
4613 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4614 op1 = copy_to_mode_reg (mode1, op1);
4615
4616 pat = GEN_FCN (d->icode) (op0, op1, op2);
4617 if (pat != 0)
4618 emit_insn (pat);
4619
4620 *expandedp = true;
4621 return NULL_RTX;
4622 }
4623
4624 return NULL_RTX;
4625 }
4626
4627 /* Expand the builtin in EXP and store the result in TARGET. Store
4628 true in *EXPANDEDP if we found a builtin to expand. */
4629 static rtx
4630 altivec_expand_builtin (exp, target, expandedp)
4631 tree exp;
4632 rtx target;
4633 bool *expandedp;
4634 {
4635 struct builtin_description *d;
4636 struct builtin_description_predicates *dp;
4637 size_t i;
4638 enum insn_code icode;
4639 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4640 tree arglist = TREE_OPERAND (exp, 1);
4641 tree arg0;
4642 rtx op0, pat;
4643 enum machine_mode tmode, mode0;
4644 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4645
4646 target = altivec_expand_ld_builtin (exp, target, expandedp);
4647 if (*expandedp)
4648 return target;
4649
4650 target = altivec_expand_st_builtin (exp, target, expandedp);
4651 if (*expandedp)
4652 return target;
4653
4654 target = altivec_expand_dst_builtin (exp, target, expandedp);
4655 if (*expandedp)
4656 return target;
4657
4658 *expandedp = true;
4659
4660 switch (fcode)
4661 {
4662 case ALTIVEC_BUILTIN_STVX:
4663 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
4664 case ALTIVEC_BUILTIN_STVEBX:
4665 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
4666 case ALTIVEC_BUILTIN_STVEHX:
4667 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
4668 case ALTIVEC_BUILTIN_STVEWX:
4669 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
4670 case ALTIVEC_BUILTIN_STVXL:
4671 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
4672
4673 case ALTIVEC_BUILTIN_MFVSCR:
4674 icode = CODE_FOR_altivec_mfvscr;
4675 tmode = insn_data[icode].operand[0].mode;
4676
4677 if (target == 0
4678 || GET_MODE (target) != tmode
4679 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4680 target = gen_reg_rtx (tmode);
4681
4682 pat = GEN_FCN (icode) (target);
4683 if (! pat)
4684 return 0;
4685 emit_insn (pat);
4686 return target;
4687
4688 case ALTIVEC_BUILTIN_MTVSCR:
4689 icode = CODE_FOR_altivec_mtvscr;
4690 arg0 = TREE_VALUE (arglist);
4691 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4692 mode0 = insn_data[icode].operand[0].mode;
4693
4694 /* If we got invalid arguments bail out before generating bad rtl. */
4695 if (arg0 == error_mark_node)
4696 return const0_rtx;
4697
4698 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4699 op0 = copy_to_mode_reg (mode0, op0);
4700
4701 pat = GEN_FCN (icode) (op0);
4702 if (pat)
4703 emit_insn (pat);
4704 return NULL_RTX;
4705
4706 case ALTIVEC_BUILTIN_DSSALL:
4707 emit_insn (gen_altivec_dssall ());
4708 return NULL_RTX;
4709
4710 case ALTIVEC_BUILTIN_DSS:
4711 icode = CODE_FOR_altivec_dss;
4712 arg0 = TREE_VALUE (arglist);
4713 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4714 mode0 = insn_data[icode].operand[0].mode;
4715
4716 /* If we got invalid arguments bail out before generating bad rtl. */
4717 if (arg0 == error_mark_node)
4718 return const0_rtx;
4719
4720 if (TREE_CODE (arg0) != INTEGER_CST
4721 || TREE_INT_CST_LOW (arg0) & ~0x3)
4722 {
4723 error ("argument to dss must be a 2-bit unsigned literal");
4724 return const0_rtx;
4725 }
4726
4727 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4728 op0 = copy_to_mode_reg (mode0, op0);
4729
4730 emit_insn (gen_altivec_dss (op0));
4731 return NULL_RTX;
4732 }
4733
4734 /* Expand abs* operations. */
4735 d = (struct builtin_description *) bdesc_abs;
4736 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4737 if (d->code == fcode)
4738 return altivec_expand_abs_builtin (d->icode, arglist, target);
4739
4740 /* Expand the AltiVec predicates. */
4741 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4742 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4743 if (dp->code == fcode)
4744 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4745
4746 /* LV* are funky. We initialized them differently. */
4747 switch (fcode)
4748 {
4749 case ALTIVEC_BUILTIN_LVSL:
4750 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4751 arglist, target);
4752 case ALTIVEC_BUILTIN_LVSR:
4753 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4754 arglist, target);
4755 case ALTIVEC_BUILTIN_LVEBX:
4756 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4757 arglist, target);
4758 case ALTIVEC_BUILTIN_LVEHX:
4759 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4760 arglist, target);
4761 case ALTIVEC_BUILTIN_LVEWX:
4762 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4763 arglist, target);
4764 case ALTIVEC_BUILTIN_LVXL:
4765 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4766 arglist, target);
4767 case ALTIVEC_BUILTIN_LVX:
4768 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
4769 arglist, target);
4770 default:
4771 break;
4772 /* Fall through. */
4773 }
4774
4775 *expandedp = false;
4776 return NULL_RTX;
4777 }
4778
4779 /* Binops that need to be initialized manually, but can be expanded
4780 automagically by rs6000_expand_binop_builtin. */
4781 static struct builtin_description bdesc_2arg_spe[] =
4782 {
4783 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
4784 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
4785 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
4786 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
4787 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
4788 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
4789 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
4790 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
4791 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
4792 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
4793 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
4794 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
4795 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
4796 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
4797 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
4798 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
4799 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
4800 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
4801 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
4802 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
4803 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
4804 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
4805 };
4806
4807 /* Expand the builtin in EXP and store the result in TARGET. Store
4808 true in *EXPANDEDP if we found a builtin to expand.
4809
4810 This expands the SPE builtins that are not simple unary and binary
4811 operations. */
4812 static rtx
4813 spe_expand_builtin (exp, target, expandedp)
4814 tree exp;
4815 rtx target;
4816 bool *expandedp;
4817 {
4818 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4819 tree arglist = TREE_OPERAND (exp, 1);
4820 tree arg1, arg0;
4821 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4822 enum insn_code icode;
4823 enum machine_mode tmode, mode0;
4824 rtx pat, op0;
4825 struct builtin_description *d;
4826 size_t i;
4827
4828 *expandedp = true;
4829
4830 /* Syntax check for a 5-bit unsigned immediate. */
4831 switch (fcode)
4832 {
4833 case SPE_BUILTIN_EVSTDD:
4834 case SPE_BUILTIN_EVSTDH:
4835 case SPE_BUILTIN_EVSTDW:
4836 case SPE_BUILTIN_EVSTWHE:
4837 case SPE_BUILTIN_EVSTWHO:
4838 case SPE_BUILTIN_EVSTWWE:
4839 case SPE_BUILTIN_EVSTWWO:
4840 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4841 if (TREE_CODE (arg1) != INTEGER_CST
4842 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4843 {
4844 error ("argument 2 must be a 5-bit unsigned literal");
4845 return const0_rtx;
4846 }
4847 break;
4848 default:
4849 break;
4850 }
4851
4852 d = (struct builtin_description *) bdesc_2arg_spe;
4853 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
4854 if (d->code == fcode)
4855 return rs6000_expand_binop_builtin (d->icode, arglist, target);
4856
4857 d = (struct builtin_description *) bdesc_spe_predicates;
4858 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
4859 if (d->code == fcode)
4860 return spe_expand_predicate_builtin (d->icode, arglist, target);
4861
4862 d = (struct builtin_description *) bdesc_spe_evsel;
4863 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
4864 if (d->code == fcode)
4865 return spe_expand_evsel_builtin (d->icode, arglist, target);
4866
4867 switch (fcode)
4868 {
4869 case SPE_BUILTIN_EVSTDDX:
4870 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
4871 case SPE_BUILTIN_EVSTDHX:
4872 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
4873 case SPE_BUILTIN_EVSTDWX:
4874 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
4875 case SPE_BUILTIN_EVSTWHEX:
4876 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
4877 case SPE_BUILTIN_EVSTWHOX:
4878 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
4879 case SPE_BUILTIN_EVSTWWEX:
4880 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
4881 case SPE_BUILTIN_EVSTWWOX:
4882 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
4883 case SPE_BUILTIN_EVSTDD:
4884 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
4885 case SPE_BUILTIN_EVSTDH:
4886 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
4887 case SPE_BUILTIN_EVSTDW:
4888 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
4889 case SPE_BUILTIN_EVSTWHE:
4890 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
4891 case SPE_BUILTIN_EVSTWHO:
4892 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
4893 case SPE_BUILTIN_EVSTWWE:
4894 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
4895 case SPE_BUILTIN_EVSTWWO:
4896 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
4897 case SPE_BUILTIN_MFSPEFSCR:
4898 icode = CODE_FOR_spe_mfspefscr;
4899 tmode = insn_data[icode].operand[0].mode;
4900
4901 if (target == 0
4902 || GET_MODE (target) != tmode
4903 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4904 target = gen_reg_rtx (tmode);
4905
4906 pat = GEN_FCN (icode) (target);
4907 if (! pat)
4908 return 0;
4909 emit_insn (pat);
4910 return target;
4911 case SPE_BUILTIN_MTSPEFSCR:
4912 icode = CODE_FOR_spe_mtspefscr;
4913 arg0 = TREE_VALUE (arglist);
4914 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4915 mode0 = insn_data[icode].operand[0].mode;
4916
4917 if (arg0 == error_mark_node)
4918 return const0_rtx;
4919
4920 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4921 op0 = copy_to_mode_reg (mode0, op0);
4922
4923 pat = GEN_FCN (icode) (op0);
4924 if (pat)
4925 emit_insn (pat);
4926 return NULL_RTX;
4927 default:
4928 break;
4929 }
4930
4931 *expandedp = false;
4932 return NULL_RTX;
4933 }
4934
4935 static rtx
4936 spe_expand_predicate_builtin (icode, arglist, target)
4937 enum insn_code icode;
4938 tree arglist;
4939 rtx target;
4940 {
4941 rtx pat, scratch, tmp;
4942 tree form = TREE_VALUE (arglist);
4943 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
4944 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4945 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4946 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4947 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4948 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4949 int form_int;
4950 enum rtx_code code;
4951
4952 if (TREE_CODE (form) != INTEGER_CST)
4953 {
4954 error ("argument 1 of __builtin_spe_predicate must be a constant");
4955 return const0_rtx;
4956 }
4957 else
4958 form_int = TREE_INT_CST_LOW (form);
4959
4960 if (mode0 != mode1)
4961 abort ();
4962
4963 if (arg0 == error_mark_node || arg1 == error_mark_node)
4964 return const0_rtx;
4965
4966 if (target == 0
4967 || GET_MODE (target) != SImode
4968 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
4969 target = gen_reg_rtx (SImode);
4970
4971 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4972 op0 = copy_to_mode_reg (mode0, op0);
4973 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4974 op1 = copy_to_mode_reg (mode1, op1);
4975
4976 scratch = gen_reg_rtx (CCmode);
4977
4978 pat = GEN_FCN (icode) (scratch, op0, op1);
4979 if (! pat)
4980 return const0_rtx;
4981 emit_insn (pat);
4982
4983 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
4984 _lower_. We use one compare, but look in different bits of the
4985 CR for each variant.
4986
4987 There are 2 elements in each SPE simd type (upper/lower). The CR
4988 bits are set as follows:
4989
4990 BIT0 | BIT 1 | BIT 2 | BIT 3
4991 U | L | (U | L) | (U & L)
4992
4993 So, for an "all" relationship, BIT 3 would be set.
4994 For an "any" relationship, BIT 2 would be set. Etc.
4995
4996 Following traditional nomenclature, these bits map to:
4997
4998 BIT0 | BIT 1 | BIT 2 | BIT 3
4999 LT | GT | EQ | OV
5000
5001 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5002 */
5003
5004 switch (form_int)
5005 {
5006 /* All variant. OV bit. */
5007 case 0:
5008 /* We need to get to the OV bit, which is the ORDERED bit. We
5009 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5010 that's ugly and will trigger a validate_condition_mode abort.
5011 So let's just use another pattern. */
5012 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
5013 return target;
5014 /* Any variant. EQ bit. */
5015 case 1:
5016 code = EQ;
5017 break;
5018 /* Upper variant. LT bit. */
5019 case 2:
5020 code = LT;
5021 break;
5022 /* Lower variant. GT bit. */
5023 case 3:
5024 code = GT;
5025 break;
5026 default:
5027 error ("argument 1 of __builtin_spe_predicate is out of range");
5028 return const0_rtx;
5029 }
5030
5031 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
5032 emit_move_insn (target, tmp);
5033
5034 return target;
5035 }
5036
5037 /* The evsel builtins look like this:
5038
5039 e = __builtin_spe_evsel_OP (a, b, c, d);
5040
5041 and work like this:
5042
5043 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5044 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5045 */
5046
5047 static rtx
5048 spe_expand_evsel_builtin (icode, arglist, target)
5049 enum insn_code icode;
5050 tree arglist;
5051 rtx target;
5052 {
5053 rtx pat, scratch;
5054 tree arg0 = TREE_VALUE (arglist);
5055 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5056 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5057 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5058 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5059 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5060 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5061 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5062 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5063 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5064
5065 if (mode0 != mode1)
5066 abort ();
5067
5068 if (arg0 == error_mark_node || arg1 == error_mark_node
5069 || arg2 == error_mark_node || arg3 == error_mark_node)
5070 return const0_rtx;
5071
5072 if (target == 0
5073 || GET_MODE (target) != mode0
5074 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5075 target = gen_reg_rtx (mode0);
5076
5077 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5078 op0 = copy_to_mode_reg (mode0, op0);
5079 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5080 op1 = copy_to_mode_reg (mode0, op1);
5081 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5082 op2 = copy_to_mode_reg (mode0, op2);
5083 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5084 op3 = copy_to_mode_reg (mode0, op3);
5085
5086 /* Generate the compare. */
5087 scratch = gen_reg_rtx (CCmode);
5088 pat = GEN_FCN (icode) (scratch, op0, op1);
5089 if (! pat)
5090 return const0_rtx;
5091 emit_insn (pat);
5092
5093 if (mode0 == V2SImode)
5094 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
5095 else
5096 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
5097
5098 return target;
5099 }
5100
5101 /* Expand an expression EXP that calls a built-in function,
5102 with result going to TARGET if that's convenient
5103 (and in mode MODE if that's convenient).
5104 SUBTARGET may be used as the target for computing one of EXP's operands.
5105 IGNORE is nonzero if the value is to be ignored. */
5106
5107 static rtx
5108 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
5109 tree exp;
5110 rtx target;
5111 rtx subtarget ATTRIBUTE_UNUSED;
5112 enum machine_mode mode ATTRIBUTE_UNUSED;
5113 int ignore ATTRIBUTE_UNUSED;
5114 {
5115 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5116 tree arglist = TREE_OPERAND (exp, 1);
5117 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5118 struct builtin_description *d;
5119 size_t i;
5120 rtx ret;
5121 bool success;
5122
5123 if (TARGET_ALTIVEC)
5124 {
5125 ret = altivec_expand_builtin (exp, target, &success);
5126
5127 if (success)
5128 return ret;
5129 }
5130 if (TARGET_SPE)
5131 {
5132 ret = spe_expand_builtin (exp, target, &success);
5133
5134 if (success)
5135 return ret;
5136 }
5137
5138 if (TARGET_ALTIVEC || TARGET_SPE)
5139 {
5140 /* Handle simple unary operations. */
5141 d = (struct builtin_description *) bdesc_1arg;
5142 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5143 if (d->code == fcode)
5144 return rs6000_expand_unop_builtin (d->icode, arglist, target);
5145
5146 /* Handle simple binary operations. */
5147 d = (struct builtin_description *) bdesc_2arg;
5148 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5149 if (d->code == fcode)
5150 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5151
5152 /* Handle simple ternary operations. */
5153 d = (struct builtin_description *) bdesc_3arg;
5154 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5155 if (d->code == fcode)
5156 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
5157 }
5158
5159 abort ();
5160 return NULL_RTX;
5161 }
5162
5163 static void
5164 rs6000_init_builtins ()
5165 {
5166 if (TARGET_SPE)
5167 spe_init_builtins ();
5168 if (TARGET_ALTIVEC)
5169 altivec_init_builtins ();
5170 if (TARGET_ALTIVEC || TARGET_SPE)
5171 rs6000_common_init_builtins ();
5172 }
5173
5174 /* Search through a set of builtins and enable the mask bits.
5175 DESC is an array of builtins.
5176 SIZE is the totaly number of builtins.
5177 START is the builtin enum at which to start.
5178 END is the builtin enum at which to end. */
5179 static void
5180 enable_mask_for_builtins (desc, size, start, end)
5181 struct builtin_description *desc;
5182 int size;
5183 enum rs6000_builtins start, end;
5184 {
5185 int i;
5186
5187 for (i = 0; i < size; ++i)
5188 if (desc[i].code == start)
5189 break;
5190
5191 if (i == size)
5192 return;
5193
5194 for (; i < size; ++i)
5195 {
5196 /* Flip all the bits on. */
5197 desc[i].mask = target_flags;
5198 if (desc[i].code == end)
5199 break;
5200 }
5201 }
5202
5203 static void
5204 spe_init_builtins ()
5205 {
5206 tree endlink = void_list_node;
5207 tree puint_type_node = build_pointer_type (unsigned_type_node);
5208 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
5209 tree pv2si_type_node = build_pointer_type (V2SI_type_node);
5210 struct builtin_description *d;
5211 size_t i;
5212
5213 tree v2si_ftype_4_v2si
5214 = build_function_type
5215 (V2SI_type_node,
5216 tree_cons (NULL_TREE, V2SI_type_node,
5217 tree_cons (NULL_TREE, V2SI_type_node,
5218 tree_cons (NULL_TREE, V2SI_type_node,
5219 tree_cons (NULL_TREE, V2SI_type_node,
5220 endlink)))));
5221
5222 tree v2sf_ftype_4_v2sf
5223 = build_function_type
5224 (V2SF_type_node,
5225 tree_cons (NULL_TREE, V2SF_type_node,
5226 tree_cons (NULL_TREE, V2SF_type_node,
5227 tree_cons (NULL_TREE, V2SF_type_node,
5228 tree_cons (NULL_TREE, V2SF_type_node,
5229 endlink)))));
5230
5231 tree int_ftype_int_v2si_v2si
5232 = build_function_type
5233 (integer_type_node,
5234 tree_cons (NULL_TREE, integer_type_node,
5235 tree_cons (NULL_TREE, V2SI_type_node,
5236 tree_cons (NULL_TREE, V2SI_type_node,
5237 endlink))));
5238
5239 tree int_ftype_int_v2sf_v2sf
5240 = build_function_type
5241 (integer_type_node,
5242 tree_cons (NULL_TREE, integer_type_node,
5243 tree_cons (NULL_TREE, V2SF_type_node,
5244 tree_cons (NULL_TREE, V2SF_type_node,
5245 endlink))));
5246
5247 tree void_ftype_v2si_puint_int
5248 = build_function_type (void_type_node,
5249 tree_cons (NULL_TREE, V2SI_type_node,
5250 tree_cons (NULL_TREE, puint_type_node,
5251 tree_cons (NULL_TREE,
5252 integer_type_node,
5253 endlink))));
5254
5255 tree void_ftype_v2si_puint_char
5256 = build_function_type (void_type_node,
5257 tree_cons (NULL_TREE, V2SI_type_node,
5258 tree_cons (NULL_TREE, puint_type_node,
5259 tree_cons (NULL_TREE,
5260 char_type_node,
5261 endlink))));
5262
5263 tree void_ftype_v2si_pv2si_int
5264 = build_function_type (void_type_node,
5265 tree_cons (NULL_TREE, V2SI_type_node,
5266 tree_cons (NULL_TREE, pv2si_type_node,
5267 tree_cons (NULL_TREE,
5268 integer_type_node,
5269 endlink))));
5270
5271 tree void_ftype_v2si_pv2si_char
5272 = build_function_type (void_type_node,
5273 tree_cons (NULL_TREE, V2SI_type_node,
5274 tree_cons (NULL_TREE, pv2si_type_node,
5275 tree_cons (NULL_TREE,
5276 char_type_node,
5277 endlink))));
5278
5279 tree void_ftype_int
5280 = build_function_type (void_type_node,
5281 tree_cons (NULL_TREE, integer_type_node, endlink));
5282
5283 tree int_ftype_void
5284 = build_function_type (integer_type_node,
5285 tree_cons (NULL_TREE, void_type_node, endlink));
5286
5287 tree v2si_ftype_pv2si_int
5288 = build_function_type (V2SI_type_node,
5289 tree_cons (NULL_TREE, pv2si_type_node,
5290 tree_cons (NULL_TREE, integer_type_node,
5291 endlink)));
5292
5293 tree v2si_ftype_puint_int
5294 = build_function_type (V2SI_type_node,
5295 tree_cons (NULL_TREE, puint_type_node,
5296 tree_cons (NULL_TREE, integer_type_node,
5297 endlink)));
5298
5299 tree v2si_ftype_pushort_int
5300 = build_function_type (V2SI_type_node,
5301 tree_cons (NULL_TREE, pushort_type_node,
5302 tree_cons (NULL_TREE, integer_type_node,
5303 endlink)));
5304
5305 /* The initialization of the simple binary and unary builtins is
5306 done in rs6000_common_init_builtins, but we have to enable the
5307 mask bits here manually because we have run out of `target_flags'
5308 bits. We really need to redesign this mask business. */
5309
5310 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
5311 ARRAY_SIZE (bdesc_2arg),
5312 SPE_BUILTIN_EVADDW,
5313 SPE_BUILTIN_EVXOR);
5314 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
5315 ARRAY_SIZE (bdesc_1arg),
5316 SPE_BUILTIN_EVABS,
5317 SPE_BUILTIN_EVSUBFUSIAAW);
5318 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
5319 ARRAY_SIZE (bdesc_spe_predicates),
5320 SPE_BUILTIN_EVCMPEQ,
5321 SPE_BUILTIN_EVFSTSTLT);
5322 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
5323 ARRAY_SIZE (bdesc_spe_evsel),
5324 SPE_BUILTIN_EVSEL_CMPGTS,
5325 SPE_BUILTIN_EVSEL_FSTSTEQ);
5326
5327 /* Initialize irregular SPE builtins. */
5328
5329 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
5330 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
5331 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
5332 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
5333 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
5334 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
5335 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
5336 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
5337 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
5338 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
5339 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
5340 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
5341 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
5342 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
5343 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
5344 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
5345
5346 /* Loads. */
5347 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
5348 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
5349 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
5350 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
5351 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
5352 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
5353 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
5354 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
5355 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
5356 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
5357 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
5358 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
5359 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
5360 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
5361 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
5362 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
5363 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
5364 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
5365 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
5366 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
5367 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
5368 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
5369
5370 /* Predicates. */
5371 d = (struct builtin_description *) bdesc_spe_predicates;
5372 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
5373 {
5374 tree type;
5375
5376 switch (insn_data[d->icode].operand[1].mode)
5377 {
5378 case V2SImode:
5379 type = int_ftype_int_v2si_v2si;
5380 break;
5381 case V2SFmode:
5382 type = int_ftype_int_v2sf_v2sf;
5383 break;
5384 default:
5385 abort ();
5386 }
5387
5388 def_builtin (d->mask, d->name, type, d->code);
5389 }
5390
5391 /* Evsel predicates. */
5392 d = (struct builtin_description *) bdesc_spe_evsel;
5393 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
5394 {
5395 tree type;
5396
5397 switch (insn_data[d->icode].operand[1].mode)
5398 {
5399 case V2SImode:
5400 type = v2si_ftype_4_v2si;
5401 break;
5402 case V2SFmode:
5403 type = v2sf_ftype_4_v2sf;
5404 break;
5405 default:
5406 abort ();
5407 }
5408
5409 def_builtin (d->mask, d->name, type, d->code);
5410 }
5411 }
5412
5413 static void
5414 altivec_init_builtins ()
5415 {
5416 struct builtin_description *d;
5417 struct builtin_description_predicates *dp;
5418 size_t i;
5419 tree pfloat_type_node = build_pointer_type (float_type_node);
5420 tree pint_type_node = build_pointer_type (integer_type_node);
5421 tree pshort_type_node = build_pointer_type (short_integer_type_node);
5422 tree pchar_type_node = build_pointer_type (char_type_node);
5423
5424 tree pvoid_type_node = build_pointer_type (void_type_node);
5425
5426 tree int_ftype_int_v4si_v4si
5427 = build_function_type_list (integer_type_node,
5428 integer_type_node, V4SI_type_node,
5429 V4SI_type_node, NULL_TREE);
5430 tree v4sf_ftype_pfloat
5431 = build_function_type_list (V4SF_type_node, pfloat_type_node, NULL_TREE);
5432 tree void_ftype_pfloat_v4sf
5433 = build_function_type_list (void_type_node,
5434 pfloat_type_node, V4SF_type_node, NULL_TREE);
5435 tree v4si_ftype_pint
5436 = build_function_type_list (V4SI_type_node, pint_type_node, NULL_TREE); tree void_ftype_pint_v4si
5437 = build_function_type_list (void_type_node,
5438 pint_type_node, V4SI_type_node, NULL_TREE);
5439 tree v8hi_ftype_pshort
5440 = build_function_type_list (V8HI_type_node, pshort_type_node, NULL_TREE);
5441 tree void_ftype_pshort_v8hi
5442 = build_function_type_list (void_type_node,
5443 pshort_type_node, V8HI_type_node, NULL_TREE);
5444 tree v16qi_ftype_pchar
5445 = build_function_type_list (V16QI_type_node, pchar_type_node, NULL_TREE);
5446 tree void_ftype_pchar_v16qi
5447 = build_function_type_list (void_type_node,
5448 pchar_type_node, V16QI_type_node, NULL_TREE);
5449 tree void_ftype_v4si
5450 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
5451 tree v8hi_ftype_void
5452 = build_function_type (V8HI_type_node, void_list_node);
5453 tree void_ftype_void
5454 = build_function_type (void_type_node, void_list_node);
5455 tree void_ftype_qi
5456 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
5457 tree v16qi_ftype_int_pvoid
5458 = build_function_type_list (V16QI_type_node,
5459 integer_type_node, pvoid_type_node, NULL_TREE);
5460 tree v8hi_ftype_int_pvoid
5461 = build_function_type_list (V8HI_type_node,
5462 integer_type_node, pvoid_type_node, NULL_TREE);
5463 tree v4si_ftype_int_pvoid
5464 = build_function_type_list (V4SI_type_node,
5465 integer_type_node, pvoid_type_node, NULL_TREE);
5466 tree void_ftype_v4si_int_pvoid
5467 = build_function_type_list (void_type_node,
5468 V4SI_type_node, integer_type_node,
5469 pvoid_type_node, NULL_TREE);
5470 tree void_ftype_v16qi_int_pvoid
5471 = build_function_type_list (void_type_node,
5472 V16QI_type_node, integer_type_node,
5473 pvoid_type_node, NULL_TREE);
5474 tree void_ftype_v8hi_int_pvoid
5475 = build_function_type_list (void_type_node,
5476 V8HI_type_node, integer_type_node,
5477 pvoid_type_node, NULL_TREE);
5478 tree int_ftype_int_v8hi_v8hi
5479 = build_function_type_list (integer_type_node,
5480 integer_type_node, V8HI_type_node,
5481 V8HI_type_node, NULL_TREE);
5482 tree int_ftype_int_v16qi_v16qi
5483 = build_function_type_list (integer_type_node,
5484 integer_type_node, V16QI_type_node,
5485 V16QI_type_node, NULL_TREE);
5486 tree int_ftype_int_v4sf_v4sf
5487 = build_function_type_list (integer_type_node,
5488 integer_type_node, V4SF_type_node,
5489 V4SF_type_node, NULL_TREE);
5490 tree v4si_ftype_v4si
5491 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
5492 tree v8hi_ftype_v8hi
5493 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
5494 tree v16qi_ftype_v16qi
5495 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
5496 tree v4sf_ftype_v4sf
5497 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5498 tree void_ftype_pvoid_int_char
5499 = build_function_type_list (void_type_node,
5500 pvoid_type_node, integer_type_node,
5501 char_type_node, NULL_TREE);
5502
5503 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
5504 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
5505 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
5506 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
5507 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
5508 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
5509 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
5510 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
5511 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
5512 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
5513 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
5514 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
5515 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSL);
5516 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSR);
5517 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEBX);
5518 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEHX);
5519 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEWX);
5520 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVXL);
5521 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVX);
5522 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
5523 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
5524 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
5525 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
5526 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
5527
5528 /* Add the DST variants. */
5529 d = (struct builtin_description *) bdesc_dst;
5530 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5531 def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code);
5532
5533 /* Initialize the predicates. */
5534 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5535 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5536 {
5537 enum machine_mode mode1;
5538 tree type;
5539
5540 mode1 = insn_data[dp->icode].operand[1].mode;
5541
5542 switch (mode1)
5543 {
5544 case V4SImode:
5545 type = int_ftype_int_v4si_v4si;
5546 break;
5547 case V8HImode:
5548 type = int_ftype_int_v8hi_v8hi;
5549 break;
5550 case V16QImode:
5551 type = int_ftype_int_v16qi_v16qi;
5552 break;
5553 case V4SFmode:
5554 type = int_ftype_int_v4sf_v4sf;
5555 break;
5556 default:
5557 abort ();
5558 }
5559
5560 def_builtin (dp->mask, dp->name, type, dp->code);
5561 }
5562
5563 /* Initialize the abs* operators. */
5564 d = (struct builtin_description *) bdesc_abs;
5565 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5566 {
5567 enum machine_mode mode0;
5568 tree type;
5569
5570 mode0 = insn_data[d->icode].operand[0].mode;
5571
5572 switch (mode0)
5573 {
5574 case V4SImode:
5575 type = v4si_ftype_v4si;
5576 break;
5577 case V8HImode:
5578 type = v8hi_ftype_v8hi;
5579 break;
5580 case V16QImode:
5581 type = v16qi_ftype_v16qi;
5582 break;
5583 case V4SFmode:
5584 type = v4sf_ftype_v4sf;
5585 break;
5586 default:
5587 abort ();
5588 }
5589
5590 def_builtin (d->mask, d->name, type, d->code);
5591 }
5592 }
5593
5594 static void
5595 rs6000_common_init_builtins ()
5596 {
5597 struct builtin_description *d;
5598 size_t i;
5599
5600 tree v4sf_ftype_v4sf_v4sf_v16qi
5601 = build_function_type_list (V4SF_type_node,
5602 V4SF_type_node, V4SF_type_node,
5603 V16QI_type_node, NULL_TREE);
5604 tree v4si_ftype_v4si_v4si_v16qi
5605 = build_function_type_list (V4SI_type_node,
5606 V4SI_type_node, V4SI_type_node,
5607 V16QI_type_node, NULL_TREE);
5608 tree v8hi_ftype_v8hi_v8hi_v16qi
5609 = build_function_type_list (V8HI_type_node,
5610 V8HI_type_node, V8HI_type_node,
5611 V16QI_type_node, NULL_TREE);
5612 tree v16qi_ftype_v16qi_v16qi_v16qi
5613 = build_function_type_list (V16QI_type_node,
5614 V16QI_type_node, V16QI_type_node,
5615 V16QI_type_node, NULL_TREE);
5616 tree v4si_ftype_char
5617 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
5618 tree v8hi_ftype_char
5619 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
5620 tree v16qi_ftype_char
5621 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
5622 tree v8hi_ftype_v16qi
5623 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
5624 tree v4sf_ftype_v4sf
5625 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5626
5627 tree v2si_ftype_v2si_v2si
5628 = build_function_type_list (V2SI_type_node,
5629 V2SI_type_node, V2SI_type_node, NULL_TREE);
5630
5631 tree v2sf_ftype_v2sf_v2sf
5632 = build_function_type_list (V2SF_type_node,
5633 V2SF_type_node, V2SF_type_node, NULL_TREE);
5634
5635 tree v2si_ftype_int_int
5636 = build_function_type_list (V2SI_type_node,
5637 integer_type_node, integer_type_node,
5638 NULL_TREE);
5639
5640 tree v2si_ftype_v2si
5641 = build_function_type_list (V2SI_type_node, V2SI_type_node, NULL_TREE);
5642
5643 tree v2sf_ftype_v2sf
5644 = build_function_type_list (V2SF_type_node,
5645 V2SF_type_node, NULL_TREE);
5646
5647 tree v2sf_ftype_v2si
5648 = build_function_type_list (V2SF_type_node,
5649 V2SI_type_node, NULL_TREE);
5650
5651 tree v2si_ftype_v2sf
5652 = build_function_type_list (V2SI_type_node,
5653 V2SF_type_node, NULL_TREE);
5654
5655 tree v2si_ftype_v2si_char
5656 = build_function_type_list (V2SI_type_node,
5657 V2SI_type_node, char_type_node, NULL_TREE);
5658
5659 tree v2si_ftype_int_char
5660 = build_function_type_list (V2SI_type_node,
5661 integer_type_node, char_type_node, NULL_TREE);
5662
5663 tree v2si_ftype_char
5664 = build_function_type_list (V2SI_type_node, char_type_node, NULL_TREE);
5665
5666 tree int_ftype_int_int
5667 = build_function_type_list (integer_type_node,
5668 integer_type_node, integer_type_node,
5669 NULL_TREE);
5670
5671 tree v4si_ftype_v4si_v4si
5672 = build_function_type_list (V4SI_type_node,
5673 V4SI_type_node, V4SI_type_node, NULL_TREE);
5674 tree v4sf_ftype_v4si_char
5675 = build_function_type_list (V4SF_type_node,
5676 V4SI_type_node, char_type_node, NULL_TREE);
5677 tree v4si_ftype_v4sf_char
5678 = build_function_type_list (V4SI_type_node,
5679 V4SF_type_node, char_type_node, NULL_TREE);
5680 tree v4si_ftype_v4si_char
5681 = build_function_type_list (V4SI_type_node,
5682 V4SI_type_node, char_type_node, NULL_TREE);
5683 tree v8hi_ftype_v8hi_char
5684 = build_function_type_list (V8HI_type_node,
5685 V8HI_type_node, char_type_node, NULL_TREE);
5686 tree v16qi_ftype_v16qi_char
5687 = build_function_type_list (V16QI_type_node,
5688 V16QI_type_node, char_type_node, NULL_TREE);
5689 tree v16qi_ftype_v16qi_v16qi_char
5690 = build_function_type_list (V16QI_type_node,
5691 V16QI_type_node, V16QI_type_node,
5692 char_type_node, NULL_TREE);
5693 tree v8hi_ftype_v8hi_v8hi_char
5694 = build_function_type_list (V8HI_type_node,
5695 V8HI_type_node, V8HI_type_node,
5696 char_type_node, NULL_TREE);
5697 tree v4si_ftype_v4si_v4si_char
5698 = build_function_type_list (V4SI_type_node,
5699 V4SI_type_node, V4SI_type_node,
5700 char_type_node, NULL_TREE);
5701 tree v4sf_ftype_v4sf_v4sf_char
5702 = build_function_type_list (V4SF_type_node,
5703 V4SF_type_node, V4SF_type_node,
5704 char_type_node, NULL_TREE);
5705 tree v4sf_ftype_v4sf_v4sf
5706 = build_function_type_list (V4SF_type_node,
5707 V4SF_type_node, V4SF_type_node, NULL_TREE);
5708 tree v4sf_ftype_v4sf_v4sf_v4si
5709 = build_function_type_list (V4SF_type_node,
5710 V4SF_type_node, V4SF_type_node,
5711 V4SI_type_node, NULL_TREE);
5712 tree v4sf_ftype_v4sf_v4sf_v4sf
5713 = build_function_type_list (V4SF_type_node,
5714 V4SF_type_node, V4SF_type_node,
5715 V4SF_type_node, NULL_TREE);
5716 tree v4si_ftype_v4si_v4si_v4si
5717 = build_function_type_list (V4SI_type_node,
5718 V4SI_type_node, V4SI_type_node,
5719 V4SI_type_node, NULL_TREE);
5720 tree v8hi_ftype_v8hi_v8hi
5721 = build_function_type_list (V8HI_type_node,
5722 V8HI_type_node, V8HI_type_node, NULL_TREE);
5723 tree v8hi_ftype_v8hi_v8hi_v8hi
5724 = build_function_type_list (V8HI_type_node,
5725 V8HI_type_node, V8HI_type_node,
5726 V8HI_type_node, NULL_TREE);
5727 tree v4si_ftype_v8hi_v8hi_v4si
5728 = build_function_type_list (V4SI_type_node,
5729 V8HI_type_node, V8HI_type_node,
5730 V4SI_type_node, NULL_TREE);
5731 tree v4si_ftype_v16qi_v16qi_v4si
5732 = build_function_type_list (V4SI_type_node,
5733 V16QI_type_node, V16QI_type_node,
5734 V4SI_type_node, NULL_TREE);
5735 tree v16qi_ftype_v16qi_v16qi
5736 = build_function_type_list (V16QI_type_node,
5737 V16QI_type_node, V16QI_type_node, NULL_TREE);
5738 tree v4si_ftype_v4sf_v4sf
5739 = build_function_type_list (V4SI_type_node,
5740 V4SF_type_node, V4SF_type_node, NULL_TREE);
5741 tree v8hi_ftype_v16qi_v16qi
5742 = build_function_type_list (V8HI_type_node,
5743 V16QI_type_node, V16QI_type_node, NULL_TREE);
5744 tree v4si_ftype_v8hi_v8hi
5745 = build_function_type_list (V4SI_type_node,
5746 V8HI_type_node, V8HI_type_node, NULL_TREE);
5747 tree v8hi_ftype_v4si_v4si
5748 = build_function_type_list (V8HI_type_node,
5749 V4SI_type_node, V4SI_type_node, NULL_TREE);
5750 tree v16qi_ftype_v8hi_v8hi
5751 = build_function_type_list (V16QI_type_node,
5752 V8HI_type_node, V8HI_type_node, NULL_TREE);
5753 tree v4si_ftype_v16qi_v4si
5754 = build_function_type_list (V4SI_type_node,
5755 V16QI_type_node, V4SI_type_node, NULL_TREE);
5756 tree v4si_ftype_v16qi_v16qi
5757 = build_function_type_list (V4SI_type_node,
5758 V16QI_type_node, V16QI_type_node, NULL_TREE);
5759 tree v4si_ftype_v8hi_v4si
5760 = build_function_type_list (V4SI_type_node,
5761 V8HI_type_node, V4SI_type_node, NULL_TREE);
5762 tree v4si_ftype_v8hi
5763 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
5764 tree int_ftype_v4si_v4si
5765 = build_function_type_list (integer_type_node,
5766 V4SI_type_node, V4SI_type_node, NULL_TREE);
5767 tree int_ftype_v4sf_v4sf
5768 = build_function_type_list (integer_type_node,
5769 V4SF_type_node, V4SF_type_node, NULL_TREE);
5770 tree int_ftype_v16qi_v16qi
5771 = build_function_type_list (integer_type_node,
5772 V16QI_type_node, V16QI_type_node, NULL_TREE);
5773 tree int_ftype_v8hi_v8hi
5774 = build_function_type_list (integer_type_node,
5775 V8HI_type_node, V8HI_type_node, NULL_TREE);
5776
5777 /* Add the simple ternary operators. */
5778 d = (struct builtin_description *) bdesc_3arg;
5779 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5780 {
5781
5782 enum machine_mode mode0, mode1, mode2, mode3;
5783 tree type;
5784
5785 if (d->name == 0 || d->icode == CODE_FOR_nothing)
5786 continue;
5787
5788 mode0 = insn_data[d->icode].operand[0].mode;
5789 mode1 = insn_data[d->icode].operand[1].mode;
5790 mode2 = insn_data[d->icode].operand[2].mode;
5791 mode3 = insn_data[d->icode].operand[3].mode;
5792
5793 /* When all four are of the same mode. */
5794 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
5795 {
5796 switch (mode0)
5797 {
5798 case V4SImode:
5799 type = v4si_ftype_v4si_v4si_v4si;
5800 break;
5801 case V4SFmode:
5802 type = v4sf_ftype_v4sf_v4sf_v4sf;
5803 break;
5804 case V8HImode:
5805 type = v8hi_ftype_v8hi_v8hi_v8hi;
5806 break;
5807 case V16QImode:
5808 type = v16qi_ftype_v16qi_v16qi_v16qi;
5809 break;
5810 default:
5811 abort();
5812 }
5813 }
5814 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
5815 {
5816 switch (mode0)
5817 {
5818 case V4SImode:
5819 type = v4si_ftype_v4si_v4si_v16qi;
5820 break;
5821 case V4SFmode:
5822 type = v4sf_ftype_v4sf_v4sf_v16qi;
5823 break;
5824 case V8HImode:
5825 type = v8hi_ftype_v8hi_v8hi_v16qi;
5826 break;
5827 case V16QImode:
5828 type = v16qi_ftype_v16qi_v16qi_v16qi;
5829 break;
5830 default:
5831 abort();
5832 }
5833 }
5834 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
5835 && mode3 == V4SImode)
5836 type = v4si_ftype_v16qi_v16qi_v4si;
5837 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
5838 && mode3 == V4SImode)
5839 type = v4si_ftype_v8hi_v8hi_v4si;
5840 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
5841 && mode3 == V4SImode)
5842 type = v4sf_ftype_v4sf_v4sf_v4si;
5843
5844 /* vchar, vchar, vchar, 4 bit literal. */
5845 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
5846 && mode3 == QImode)
5847 type = v16qi_ftype_v16qi_v16qi_char;
5848
5849 /* vshort, vshort, vshort, 4 bit literal. */
5850 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
5851 && mode3 == QImode)
5852 type = v8hi_ftype_v8hi_v8hi_char;
5853
5854 /* vint, vint, vint, 4 bit literal. */
5855 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
5856 && mode3 == QImode)
5857 type = v4si_ftype_v4si_v4si_char;
5858
5859 /* vfloat, vfloat, vfloat, 4 bit literal. */
5860 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
5861 && mode3 == QImode)
5862 type = v4sf_ftype_v4sf_v4sf_char;
5863
5864 else
5865 abort ();
5866
5867 def_builtin (d->mask, d->name, type, d->code);
5868 }
5869
5870 /* Add the simple binary operators. */
5871 d = (struct builtin_description *) bdesc_2arg;
5872 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5873 {
5874 enum machine_mode mode0, mode1, mode2;
5875 tree type;
5876
5877 if (d->name == 0 || d->icode == CODE_FOR_nothing)
5878 continue;
5879
5880 mode0 = insn_data[d->icode].operand[0].mode;
5881 mode1 = insn_data[d->icode].operand[1].mode;
5882 mode2 = insn_data[d->icode].operand[2].mode;
5883
5884 /* When all three operands are of the same mode. */
5885 if (mode0 == mode1 && mode1 == mode2)
5886 {
5887 switch (mode0)
5888 {
5889 case V4SFmode:
5890 type = v4sf_ftype_v4sf_v4sf;
5891 break;
5892 case V4SImode:
5893 type = v4si_ftype_v4si_v4si;
5894 break;
5895 case V16QImode:
5896 type = v16qi_ftype_v16qi_v16qi;
5897 break;
5898 case V8HImode:
5899 type = v8hi_ftype_v8hi_v8hi;
5900 break;
5901 case V2SImode:
5902 type = v2si_ftype_v2si_v2si;
5903 break;
5904 case V2SFmode:
5905 type = v2sf_ftype_v2sf_v2sf;
5906 break;
5907 case SImode:
5908 type = int_ftype_int_int;
5909 break;
5910 default:
5911 abort ();
5912 }
5913 }
5914
5915 /* A few other combos we really don't want to do manually. */
5916
5917 /* vint, vfloat, vfloat. */
5918 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
5919 type = v4si_ftype_v4sf_v4sf;
5920
5921 /* vshort, vchar, vchar. */
5922 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
5923 type = v8hi_ftype_v16qi_v16qi;
5924
5925 /* vint, vshort, vshort. */
5926 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
5927 type = v4si_ftype_v8hi_v8hi;
5928
5929 /* vshort, vint, vint. */
5930 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
5931 type = v8hi_ftype_v4si_v4si;
5932
5933 /* vchar, vshort, vshort. */
5934 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
5935 type = v16qi_ftype_v8hi_v8hi;
5936
5937 /* vint, vchar, vint. */
5938 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
5939 type = v4si_ftype_v16qi_v4si;
5940
5941 /* vint, vchar, vchar. */
5942 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
5943 type = v4si_ftype_v16qi_v16qi;
5944
5945 /* vint, vshort, vint. */
5946 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
5947 type = v4si_ftype_v8hi_v4si;
5948
5949 /* vint, vint, 5 bit literal. */
5950 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
5951 type = v4si_ftype_v4si_char;
5952
5953 /* vshort, vshort, 5 bit literal. */
5954 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
5955 type = v8hi_ftype_v8hi_char;
5956
5957 /* vchar, vchar, 5 bit literal. */
5958 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
5959 type = v16qi_ftype_v16qi_char;
5960
5961 /* vfloat, vint, 5 bit literal. */
5962 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
5963 type = v4sf_ftype_v4si_char;
5964
5965 /* vint, vfloat, 5 bit literal. */
5966 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
5967 type = v4si_ftype_v4sf_char;
5968
5969 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
5970 type = v2si_ftype_int_int;
5971
5972 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
5973 type = v2si_ftype_v2si_char;
5974
5975 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
5976 type = v2si_ftype_int_char;
5977
5978 /* int, x, x. */
5979 else if (mode0 == SImode)
5980 {
5981 switch (mode1)
5982 {
5983 case V4SImode:
5984 type = int_ftype_v4si_v4si;
5985 break;
5986 case V4SFmode:
5987 type = int_ftype_v4sf_v4sf;
5988 break;
5989 case V16QImode:
5990 type = int_ftype_v16qi_v16qi;
5991 break;
5992 case V8HImode:
5993 type = int_ftype_v8hi_v8hi;
5994 break;
5995 default:
5996 abort ();
5997 }
5998 }
5999
6000 else
6001 abort ();
6002
6003 def_builtin (d->mask, d->name, type, d->code);
6004 }
6005
6006 /* Add the simple unary operators. */
6007 d = (struct builtin_description *) bdesc_1arg;
6008 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6009 {
6010 enum machine_mode mode0, mode1;
6011 tree type;
6012
6013 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6014 continue;
6015
6016 mode0 = insn_data[d->icode].operand[0].mode;
6017 mode1 = insn_data[d->icode].operand[1].mode;
6018
6019 if (mode0 == V4SImode && mode1 == QImode)
6020 type = v4si_ftype_char;
6021 else if (mode0 == V8HImode && mode1 == QImode)
6022 type = v8hi_ftype_char;
6023 else if (mode0 == V16QImode && mode1 == QImode)
6024 type = v16qi_ftype_char;
6025 else if (mode0 == V4SFmode && mode1 == V4SFmode)
6026 type = v4sf_ftype_v4sf;
6027 else if (mode0 == V8HImode && mode1 == V16QImode)
6028 type = v8hi_ftype_v16qi;
6029 else if (mode0 == V4SImode && mode1 == V8HImode)
6030 type = v4si_ftype_v8hi;
6031 else if (mode0 == V2SImode && mode1 == V2SImode)
6032 type = v2si_ftype_v2si;
6033 else if (mode0 == V2SFmode && mode1 == V2SFmode)
6034 type = v2sf_ftype_v2sf;
6035 else if (mode0 == V2SFmode && mode1 == V2SImode)
6036 type = v2sf_ftype_v2si;
6037 else if (mode0 == V2SImode && mode1 == V2SFmode)
6038 type = v2si_ftype_v2sf;
6039 else if (mode0 == V2SImode && mode1 == QImode)
6040 type = v2si_ftype_char;
6041 else
6042 abort ();
6043
6044 def_builtin (d->mask, d->name, type, d->code);
6045 }
6046 }
6047
6048 \f
6049 /* Expand a block move operation, and return 1 if successful. Return 0
6050 if we should let the compiler generate normal code.
6051
6052 operands[0] is the destination
6053 operands[1] is the source
6054 operands[2] is the length
6055 operands[3] is the alignment */
6056
6057 #define MAX_MOVE_REG 4
6058
6059 int
6060 expand_block_move (operands)
6061 rtx operands[];
6062 {
6063 rtx orig_dest = operands[0];
6064 rtx orig_src = operands[1];
6065 rtx bytes_rtx = operands[2];
6066 rtx align_rtx = operands[3];
6067 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
6068 int align;
6069 int bytes;
6070 int offset;
6071 int move_bytes;
6072
6073 /* If this is not a fixed size move, just call memcpy */
6074 if (! constp)
6075 return 0;
6076
6077 /* If this is not a fixed size alignment, abort */
6078 if (GET_CODE (align_rtx) != CONST_INT)
6079 abort ();
6080 align = INTVAL (align_rtx);
6081
6082 /* Anything to move? */
6083 bytes = INTVAL (bytes_rtx);
6084 if (bytes <= 0)
6085 return 1;
6086
6087 /* store_one_arg depends on expand_block_move to handle at least the size of
6088 reg_parm_stack_space. */
6089 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
6090 return 0;
6091
6092 if (TARGET_STRING) /* string instructions are available */
6093 {
6094 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6095 {
6096 union {
6097 rtx (*movstrsi) PARAMS ((rtx, rtx, rtx, rtx));
6098 rtx (*mov) PARAMS ((rtx, rtx));
6099 } gen_func;
6100 enum machine_mode mode = BLKmode;
6101 rtx src, dest;
6102
6103 if (bytes > 24 /* move up to 32 bytes at a time */
6104 && ! fixed_regs[5]
6105 && ! fixed_regs[6]
6106 && ! fixed_regs[7]
6107 && ! fixed_regs[8]
6108 && ! fixed_regs[9]
6109 && ! fixed_regs[10]
6110 && ! fixed_regs[11]
6111 && ! fixed_regs[12])
6112 {
6113 move_bytes = (bytes > 32) ? 32 : bytes;
6114 gen_func.movstrsi = gen_movstrsi_8reg;
6115 }
6116 else if (bytes > 16 /* move up to 24 bytes at a time */
6117 && ! fixed_regs[5]
6118 && ! fixed_regs[6]
6119 && ! fixed_regs[7]
6120 && ! fixed_regs[8]
6121 && ! fixed_regs[9]
6122 && ! fixed_regs[10])
6123 {
6124 move_bytes = (bytes > 24) ? 24 : bytes;
6125 gen_func.movstrsi = gen_movstrsi_6reg;
6126 }
6127 else if (bytes > 8 /* move up to 16 bytes at a time */
6128 && ! fixed_regs[5]
6129 && ! fixed_regs[6]
6130 && ! fixed_regs[7]
6131 && ! fixed_regs[8])
6132 {
6133 move_bytes = (bytes > 16) ? 16 : bytes;
6134 gen_func.movstrsi = gen_movstrsi_4reg;
6135 }
6136 else if (bytes >= 8 && TARGET_POWERPC64
6137 /* 64-bit loads and stores require word-aligned
6138 displacements. */
6139 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6140 {
6141 move_bytes = 8;
6142 mode = DImode;
6143 gen_func.mov = gen_movdi;
6144 }
6145 else if (bytes > 4 && !TARGET_POWERPC64)
6146 { /* move up to 8 bytes at a time */
6147 move_bytes = (bytes > 8) ? 8 : bytes;
6148 gen_func.movstrsi = gen_movstrsi_2reg;
6149 }
6150 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6151 { /* move 4 bytes */
6152 move_bytes = 4;
6153 mode = SImode;
6154 gen_func.mov = gen_movsi;
6155 }
6156 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6157 { /* move 2 bytes */
6158 move_bytes = 2;
6159 mode = HImode;
6160 gen_func.mov = gen_movhi;
6161 }
6162 else if (bytes == 1) /* move 1 byte */
6163 {
6164 move_bytes = 1;
6165 mode = QImode;
6166 gen_func.mov = gen_movqi;
6167 }
6168 else
6169 { /* move up to 4 bytes at a time */
6170 move_bytes = (bytes > 4) ? 4 : bytes;
6171 gen_func.movstrsi = gen_movstrsi_1reg;
6172 }
6173
6174 src = adjust_address (orig_src, mode, offset);
6175 dest = adjust_address (orig_dest, mode, offset);
6176
6177 if (mode == BLKmode)
6178 {
6179 /* Move the address into scratch registers. The movstrsi
6180 patterns require zero offset. */
6181 if (!REG_P (XEXP (src, 0)))
6182 {
6183 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
6184 src = replace_equiv_address (src, src_reg);
6185 }
6186 set_mem_size (src, GEN_INT (move_bytes));
6187
6188 if (!REG_P (XEXP (dest, 0)))
6189 {
6190 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
6191 dest = replace_equiv_address (dest, dest_reg);
6192 }
6193 set_mem_size (dest, GEN_INT (move_bytes));
6194
6195 emit_insn ((*gen_func.movstrsi) (dest, src,
6196 GEN_INT (move_bytes & 31),
6197 align_rtx));
6198 }
6199 else
6200 {
6201 rtx tmp_reg = gen_reg_rtx (mode);
6202
6203 emit_insn ((*gen_func.mov) (tmp_reg, src));
6204 emit_insn ((*gen_func.mov) (dest, tmp_reg));
6205 }
6206 }
6207 }
6208
6209 else /* string instructions not available */
6210 {
6211 rtx stores[MAX_MOVE_REG];
6212 int num_reg = 0;
6213 int i;
6214
6215 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6216 {
6217 rtx (*gen_mov_func) PARAMS ((rtx, rtx));
6218 enum machine_mode mode;
6219 rtx src, dest, tmp_reg;
6220
6221 /* Generate the appropriate load and store, saving the stores
6222 for later. */
6223 if (bytes >= 8 && TARGET_POWERPC64
6224 /* 64-bit loads and stores require word-aligned
6225 displacements. */
6226 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6227 {
6228 move_bytes = 8;
6229 mode = DImode;
6230 gen_mov_func = gen_movdi;
6231 }
6232 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6233 {
6234 move_bytes = 4;
6235 mode = SImode;
6236 gen_mov_func = gen_movsi;
6237 }
6238 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6239 {
6240 move_bytes = 2;
6241 mode = HImode;
6242 gen_mov_func = gen_movhi;
6243 }
6244 else
6245 {
6246 move_bytes = 1;
6247 mode = QImode;
6248 gen_mov_func = gen_movqi;
6249 }
6250
6251 src = adjust_address (orig_src, mode, offset);
6252 dest = adjust_address (orig_dest, mode, offset);
6253 tmp_reg = gen_reg_rtx (mode);
6254
6255 emit_insn ((*gen_mov_func) (tmp_reg, src));
6256 stores[num_reg++] = (*gen_mov_func) (dest, tmp_reg);
6257
6258 if (num_reg >= MAX_MOVE_REG)
6259 {
6260 for (i = 0; i < num_reg; i++)
6261 emit_insn (stores[i]);
6262 num_reg = 0;
6263 }
6264 }
6265
6266 for (i = 0; i < num_reg; i++)
6267 emit_insn (stores[i]);
6268 }
6269
6270 return 1;
6271 }
6272
6273 \f
6274 /* Return 1 if OP is a load multiple operation. It is known to be a
6275 PARALLEL and the first section will be tested. */
6276
6277 int
6278 load_multiple_operation (op, mode)
6279 rtx op;
6280 enum machine_mode mode ATTRIBUTE_UNUSED;
6281 {
6282 int count = XVECLEN (op, 0);
6283 unsigned int dest_regno;
6284 rtx src_addr;
6285 int i;
6286
6287 /* Perform a quick check so we don't blow up below. */
6288 if (count <= 1
6289 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6290 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6291 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6292 return 0;
6293
6294 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6295 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6296
6297 for (i = 1; i < count; i++)
6298 {
6299 rtx elt = XVECEXP (op, 0, i);
6300
6301 if (GET_CODE (elt) != SET
6302 || GET_CODE (SET_DEST (elt)) != REG
6303 || GET_MODE (SET_DEST (elt)) != SImode
6304 || REGNO (SET_DEST (elt)) != dest_regno + i
6305 || GET_CODE (SET_SRC (elt)) != MEM
6306 || GET_MODE (SET_SRC (elt)) != SImode
6307 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
6308 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
6309 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
6310 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
6311 return 0;
6312 }
6313
6314 return 1;
6315 }
6316
6317 /* Similar, but tests for store multiple. Here, the second vector element
6318 is a CLOBBER. It will be tested later. */
6319
6320 int
6321 store_multiple_operation (op, mode)
6322 rtx op;
6323 enum machine_mode mode ATTRIBUTE_UNUSED;
6324 {
6325 int count = XVECLEN (op, 0) - 1;
6326 unsigned int src_regno;
6327 rtx dest_addr;
6328 int i;
6329
6330 /* Perform a quick check so we don't blow up below. */
6331 if (count <= 1
6332 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6333 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6334 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6335 return 0;
6336
6337 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6338 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6339
6340 for (i = 1; i < count; i++)
6341 {
6342 rtx elt = XVECEXP (op, 0, i + 1);
6343
6344 if (GET_CODE (elt) != SET
6345 || GET_CODE (SET_SRC (elt)) != REG
6346 || GET_MODE (SET_SRC (elt)) != SImode
6347 || REGNO (SET_SRC (elt)) != src_regno + i
6348 || GET_CODE (SET_DEST (elt)) != MEM
6349 || GET_MODE (SET_DEST (elt)) != SImode
6350 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
6351 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
6352 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
6353 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
6354 return 0;
6355 }
6356
6357 return 1;
6358 }
6359
6360 /* Return 1 for a parallel vrsave operation. */
6361
6362 int
6363 vrsave_operation (op, mode)
6364 rtx op;
6365 enum machine_mode mode ATTRIBUTE_UNUSED;
6366 {
6367 int count = XVECLEN (op, 0);
6368 unsigned int dest_regno, src_regno;
6369 int i;
6370
6371 if (count <= 1
6372 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6373 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6374 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
6375 return 0;
6376
6377 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6378 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6379
6380 if (dest_regno != VRSAVE_REGNO
6381 && src_regno != VRSAVE_REGNO)
6382 return 0;
6383
6384 for (i = 1; i < count; i++)
6385 {
6386 rtx elt = XVECEXP (op, 0, i);
6387
6388 if (GET_CODE (elt) != CLOBBER
6389 && GET_CODE (elt) != SET)
6390 return 0;
6391 }
6392
6393 return 1;
6394 }
6395
6396 /* Return 1 for an PARALLEL suitable for mtcrf. */
6397
6398 int
6399 mtcrf_operation (op, mode)
6400 rtx op;
6401 enum machine_mode mode ATTRIBUTE_UNUSED;
6402 {
6403 int count = XVECLEN (op, 0);
6404 int i;
6405 rtx src_reg;
6406
6407 /* Perform a quick check so we don't blow up below. */
6408 if (count < 1
6409 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6410 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
6411 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
6412 return 0;
6413 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
6414
6415 if (GET_CODE (src_reg) != REG
6416 || GET_MODE (src_reg) != SImode
6417 || ! INT_REGNO_P (REGNO (src_reg)))
6418 return 0;
6419
6420 for (i = 0; i < count; i++)
6421 {
6422 rtx exp = XVECEXP (op, 0, i);
6423 rtx unspec;
6424 int maskval;
6425
6426 if (GET_CODE (exp) != SET
6427 || GET_CODE (SET_DEST (exp)) != REG
6428 || GET_MODE (SET_DEST (exp)) != CCmode
6429 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
6430 return 0;
6431 unspec = SET_SRC (exp);
6432 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
6433
6434 if (GET_CODE (unspec) != UNSPEC
6435 || XINT (unspec, 1) != 20
6436 || XVECLEN (unspec, 0) != 2
6437 || XVECEXP (unspec, 0, 0) != src_reg
6438 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
6439 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
6440 return 0;
6441 }
6442 return 1;
6443 }
6444
6445 /* Return 1 for an PARALLEL suitable for lmw. */
6446
6447 int
6448 lmw_operation (op, mode)
6449 rtx op;
6450 enum machine_mode mode ATTRIBUTE_UNUSED;
6451 {
6452 int count = XVECLEN (op, 0);
6453 unsigned int dest_regno;
6454 rtx src_addr;
6455 unsigned int base_regno;
6456 HOST_WIDE_INT offset;
6457 int i;
6458
6459 /* Perform a quick check so we don't blow up below. */
6460 if (count <= 1
6461 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6462 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6463 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6464 return 0;
6465
6466 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6467 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6468
6469 if (dest_regno > 31
6470 || count != 32 - (int) dest_regno)
6471 return 0;
6472
6473 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
6474 {
6475 offset = 0;
6476 base_regno = REGNO (src_addr);
6477 if (base_regno == 0)
6478 return 0;
6479 }
6480 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
6481 {
6482 offset = INTVAL (XEXP (src_addr, 1));
6483 base_regno = REGNO (XEXP (src_addr, 0));
6484 }
6485 else
6486 return 0;
6487
6488 for (i = 0; i < count; i++)
6489 {
6490 rtx elt = XVECEXP (op, 0, i);
6491 rtx newaddr;
6492 rtx addr_reg;
6493 HOST_WIDE_INT newoffset;
6494
6495 if (GET_CODE (elt) != SET
6496 || GET_CODE (SET_DEST (elt)) != REG
6497 || GET_MODE (SET_DEST (elt)) != SImode
6498 || REGNO (SET_DEST (elt)) != dest_regno + i
6499 || GET_CODE (SET_SRC (elt)) != MEM
6500 || GET_MODE (SET_SRC (elt)) != SImode)
6501 return 0;
6502 newaddr = XEXP (SET_SRC (elt), 0);
6503 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6504 {
6505 newoffset = 0;
6506 addr_reg = newaddr;
6507 }
6508 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6509 {
6510 addr_reg = XEXP (newaddr, 0);
6511 newoffset = INTVAL (XEXP (newaddr, 1));
6512 }
6513 else
6514 return 0;
6515 if (REGNO (addr_reg) != base_regno
6516 || newoffset != offset + 4 * i)
6517 return 0;
6518 }
6519
6520 return 1;
6521 }
6522
6523 /* Return 1 for an PARALLEL suitable for stmw. */
6524
6525 int
6526 stmw_operation (op, mode)
6527 rtx op;
6528 enum machine_mode mode ATTRIBUTE_UNUSED;
6529 {
6530 int count = XVECLEN (op, 0);
6531 unsigned int src_regno;
6532 rtx dest_addr;
6533 unsigned int base_regno;
6534 HOST_WIDE_INT offset;
6535 int i;
6536
6537 /* Perform a quick check so we don't blow up below. */
6538 if (count <= 1
6539 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6540 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6541 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6542 return 0;
6543
6544 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6545 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6546
6547 if (src_regno > 31
6548 || count != 32 - (int) src_regno)
6549 return 0;
6550
6551 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
6552 {
6553 offset = 0;
6554 base_regno = REGNO (dest_addr);
6555 if (base_regno == 0)
6556 return 0;
6557 }
6558 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
6559 {
6560 offset = INTVAL (XEXP (dest_addr, 1));
6561 base_regno = REGNO (XEXP (dest_addr, 0));
6562 }
6563 else
6564 return 0;
6565
6566 for (i = 0; i < count; i++)
6567 {
6568 rtx elt = XVECEXP (op, 0, i);
6569 rtx newaddr;
6570 rtx addr_reg;
6571 HOST_WIDE_INT newoffset;
6572
6573 if (GET_CODE (elt) != SET
6574 || GET_CODE (SET_SRC (elt)) != REG
6575 || GET_MODE (SET_SRC (elt)) != SImode
6576 || REGNO (SET_SRC (elt)) != src_regno + i
6577 || GET_CODE (SET_DEST (elt)) != MEM
6578 || GET_MODE (SET_DEST (elt)) != SImode)
6579 return 0;
6580 newaddr = XEXP (SET_DEST (elt), 0);
6581 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6582 {
6583 newoffset = 0;
6584 addr_reg = newaddr;
6585 }
6586 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6587 {
6588 addr_reg = XEXP (newaddr, 0);
6589 newoffset = INTVAL (XEXP (newaddr, 1));
6590 }
6591 else
6592 return 0;
6593 if (REGNO (addr_reg) != base_regno
6594 || newoffset != offset + 4 * i)
6595 return 0;
6596 }
6597
6598 return 1;
6599 }
6600 \f
6601 /* A validation routine: say whether CODE, a condition code, and MODE
6602 match. The other alternatives either don't make sense or should
6603 never be generated. */
6604
6605 static void
6606 validate_condition_mode (code, mode)
6607 enum rtx_code code;
6608 enum machine_mode mode;
6609 {
6610 if (GET_RTX_CLASS (code) != '<'
6611 || GET_MODE_CLASS (mode) != MODE_CC)
6612 abort ();
6613
6614 /* These don't make sense. */
6615 if ((code == GT || code == LT || code == GE || code == LE)
6616 && mode == CCUNSmode)
6617 abort ();
6618
6619 if ((code == GTU || code == LTU || code == GEU || code == LEU)
6620 && mode != CCUNSmode)
6621 abort ();
6622
6623 if (mode != CCFPmode
6624 && (code == ORDERED || code == UNORDERED
6625 || code == UNEQ || code == LTGT
6626 || code == UNGT || code == UNLT
6627 || code == UNGE || code == UNLE))
6628 abort ();
6629
6630 /* These should never be generated except for
6631 flag_unsafe_math_optimizations and flag_finite_math_only. */
6632 if (mode == CCFPmode
6633 && ! flag_unsafe_math_optimizations
6634 && ! flag_finite_math_only
6635 && (code == LE || code == GE
6636 || code == UNEQ || code == LTGT
6637 || code == UNGT || code == UNLT))
6638 abort ();
6639
6640 /* These are invalid; the information is not there. */
6641 if (mode == CCEQmode
6642 && code != EQ && code != NE)
6643 abort ();
6644 }
6645
6646 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
6647 We only check the opcode against the mode of the CC value here. */
6648
6649 int
6650 branch_comparison_operator (op, mode)
6651 rtx op;
6652 enum machine_mode mode ATTRIBUTE_UNUSED;
6653 {
6654 enum rtx_code code = GET_CODE (op);
6655 enum machine_mode cc_mode;
6656
6657 if (GET_RTX_CLASS (code) != '<')
6658 return 0;
6659
6660 cc_mode = GET_MODE (XEXP (op, 0));
6661 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
6662 return 0;
6663
6664 validate_condition_mode (code, cc_mode);
6665
6666 return 1;
6667 }
6668
6669 /* Return 1 if OP is a comparison operation that is valid for a branch
6670 insn and which is true if the corresponding bit in the CC register
6671 is set. */
6672
6673 int
6674 branch_positive_comparison_operator (op, mode)
6675 rtx op;
6676 enum machine_mode mode;
6677 {
6678 enum rtx_code code;
6679
6680 if (! branch_comparison_operator (op, mode))
6681 return 0;
6682
6683 code = GET_CODE (op);
6684 return (code == EQ || code == LT || code == GT
6685 || (TARGET_SPE && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
6686 || code == LTU || code == GTU
6687 || code == UNORDERED);
6688 }
6689
6690 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
6691 We check the opcode against the mode of the CC value and disallow EQ or
6692 NE comparisons for integers. */
6693
6694 int
6695 scc_comparison_operator (op, mode)
6696 rtx op;
6697 enum machine_mode mode;
6698 {
6699 enum rtx_code code = GET_CODE (op);
6700 enum machine_mode cc_mode;
6701
6702 if (GET_MODE (op) != mode && mode != VOIDmode)
6703 return 0;
6704
6705 if (GET_RTX_CLASS (code) != '<')
6706 return 0;
6707
6708 cc_mode = GET_MODE (XEXP (op, 0));
6709 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
6710 return 0;
6711
6712 validate_condition_mode (code, cc_mode);
6713
6714 if (code == NE && cc_mode != CCFPmode)
6715 return 0;
6716
6717 return 1;
6718 }
6719
6720 int
6721 trap_comparison_operator (op, mode)
6722 rtx op;
6723 enum machine_mode mode;
6724 {
6725 if (mode != VOIDmode && mode != GET_MODE (op))
6726 return 0;
6727 return GET_RTX_CLASS (GET_CODE (op)) == '<';
6728 }
6729
6730 int
6731 boolean_operator (op, mode)
6732 rtx op;
6733 enum machine_mode mode ATTRIBUTE_UNUSED;
6734 {
6735 enum rtx_code code = GET_CODE (op);
6736 return (code == AND || code == IOR || code == XOR);
6737 }
6738
6739 int
6740 boolean_or_operator (op, mode)
6741 rtx op;
6742 enum machine_mode mode ATTRIBUTE_UNUSED;
6743 {
6744 enum rtx_code code = GET_CODE (op);
6745 return (code == IOR || code == XOR);
6746 }
6747
6748 int
6749 min_max_operator (op, mode)
6750 rtx op;
6751 enum machine_mode mode ATTRIBUTE_UNUSED;
6752 {
6753 enum rtx_code code = GET_CODE (op);
6754 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
6755 }
6756 \f
6757 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
6758 mask required to convert the result of a rotate insn into a shift
6759 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
6760
6761 int
6762 includes_lshift_p (shiftop, andop)
6763 rtx shiftop;
6764 rtx andop;
6765 {
6766 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
6767
6768 shift_mask <<= INTVAL (shiftop);
6769
6770 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
6771 }
6772
6773 /* Similar, but for right shift. */
6774
6775 int
6776 includes_rshift_p (shiftop, andop)
6777 rtx shiftop;
6778 rtx andop;
6779 {
6780 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
6781
6782 shift_mask >>= INTVAL (shiftop);
6783
6784 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
6785 }
6786
6787 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
6788 to perform a left shift. It must have exactly SHIFTOP least
6789 signifigant 0's, then one or more 1's, then zero or more 0's. */
6790
6791 int
6792 includes_rldic_lshift_p (shiftop, andop)
6793 rtx shiftop;
6794 rtx andop;
6795 {
6796 if (GET_CODE (andop) == CONST_INT)
6797 {
6798 HOST_WIDE_INT c, lsb, shift_mask;
6799
6800 c = INTVAL (andop);
6801 if (c == 0 || c == ~0)
6802 return 0;
6803
6804 shift_mask = ~0;
6805 shift_mask <<= INTVAL (shiftop);
6806
6807 /* Find the least signifigant one bit. */
6808 lsb = c & -c;
6809
6810 /* It must coincide with the LSB of the shift mask. */
6811 if (-lsb != shift_mask)
6812 return 0;
6813
6814 /* Invert to look for the next transition (if any). */
6815 c = ~c;
6816
6817 /* Remove the low group of ones (originally low group of zeros). */
6818 c &= -lsb;
6819
6820 /* Again find the lsb, and check we have all 1's above. */
6821 lsb = c & -c;
6822 return c == -lsb;
6823 }
6824 else if (GET_CODE (andop) == CONST_DOUBLE
6825 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
6826 {
6827 HOST_WIDE_INT low, high, lsb;
6828 HOST_WIDE_INT shift_mask_low, shift_mask_high;
6829
6830 low = CONST_DOUBLE_LOW (andop);
6831 if (HOST_BITS_PER_WIDE_INT < 64)
6832 high = CONST_DOUBLE_HIGH (andop);
6833
6834 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
6835 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
6836 return 0;
6837
6838 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
6839 {
6840 shift_mask_high = ~0;
6841 if (INTVAL (shiftop) > 32)
6842 shift_mask_high <<= INTVAL (shiftop) - 32;
6843
6844 lsb = high & -high;
6845
6846 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
6847 return 0;
6848
6849 high = ~high;
6850 high &= -lsb;
6851
6852 lsb = high & -high;
6853 return high == -lsb;
6854 }
6855
6856 shift_mask_low = ~0;
6857 shift_mask_low <<= INTVAL (shiftop);
6858
6859 lsb = low & -low;
6860
6861 if (-lsb != shift_mask_low)
6862 return 0;
6863
6864 if (HOST_BITS_PER_WIDE_INT < 64)
6865 high = ~high;
6866 low = ~low;
6867 low &= -lsb;
6868
6869 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
6870 {
6871 lsb = high & -high;
6872 return high == -lsb;
6873 }
6874
6875 lsb = low & -low;
6876 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
6877 }
6878 else
6879 return 0;
6880 }
6881
6882 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
6883 to perform a left shift. It must have SHIFTOP or more least
6884 signifigant 0's, with the remainder of the word 1's. */
6885
6886 int
6887 includes_rldicr_lshift_p (shiftop, andop)
6888 rtx shiftop;
6889 rtx andop;
6890 {
6891 if (GET_CODE (andop) == CONST_INT)
6892 {
6893 HOST_WIDE_INT c, lsb, shift_mask;
6894
6895 shift_mask = ~0;
6896 shift_mask <<= INTVAL (shiftop);
6897 c = INTVAL (andop);
6898
6899 /* Find the least signifigant one bit. */
6900 lsb = c & -c;
6901
6902 /* It must be covered by the shift mask.
6903 This test also rejects c == 0. */
6904 if ((lsb & shift_mask) == 0)
6905 return 0;
6906
6907 /* Check we have all 1's above the transition, and reject all 1's. */
6908 return c == -lsb && lsb != 1;
6909 }
6910 else if (GET_CODE (andop) == CONST_DOUBLE
6911 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
6912 {
6913 HOST_WIDE_INT low, lsb, shift_mask_low;
6914
6915 low = CONST_DOUBLE_LOW (andop);
6916
6917 if (HOST_BITS_PER_WIDE_INT < 64)
6918 {
6919 HOST_WIDE_INT high, shift_mask_high;
6920
6921 high = CONST_DOUBLE_HIGH (andop);
6922
6923 if (low == 0)
6924 {
6925 shift_mask_high = ~0;
6926 if (INTVAL (shiftop) > 32)
6927 shift_mask_high <<= INTVAL (shiftop) - 32;
6928
6929 lsb = high & -high;
6930
6931 if ((lsb & shift_mask_high) == 0)
6932 return 0;
6933
6934 return high == -lsb;
6935 }
6936 if (high != ~0)
6937 return 0;
6938 }
6939
6940 shift_mask_low = ~0;
6941 shift_mask_low <<= INTVAL (shiftop);
6942
6943 lsb = low & -low;
6944
6945 if ((lsb & shift_mask_low) == 0)
6946 return 0;
6947
6948 return low == -lsb && lsb != 1;
6949 }
6950 else
6951 return 0;
6952 }
6953
6954 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
6955 for lfq and stfq insns.
6956
6957 Note reg1 and reg2 *must* be hard registers. To be sure we will
6958 abort if we are passed pseudo registers. */
6959
6960 int
6961 registers_ok_for_quad_peep (reg1, reg2)
6962 rtx reg1, reg2;
6963 {
6964 /* We might have been passed a SUBREG. */
6965 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
6966 return 0;
6967
6968 return (REGNO (reg1) == REGNO (reg2) - 1);
6969 }
6970
6971 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
6972 addr1 and addr2 must be in consecutive memory locations
6973 (addr2 == addr1 + 8). */
6974
6975 int
6976 addrs_ok_for_quad_peep (addr1, addr2)
6977 rtx addr1;
6978 rtx addr2;
6979 {
6980 unsigned int reg1;
6981 int offset1;
6982
6983 /* Extract an offset (if used) from the first addr. */
6984 if (GET_CODE (addr1) == PLUS)
6985 {
6986 /* If not a REG, return zero. */
6987 if (GET_CODE (XEXP (addr1, 0)) != REG)
6988 return 0;
6989 else
6990 {
6991 reg1 = REGNO (XEXP (addr1, 0));
6992 /* The offset must be constant! */
6993 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
6994 return 0;
6995 offset1 = INTVAL (XEXP (addr1, 1));
6996 }
6997 }
6998 else if (GET_CODE (addr1) != REG)
6999 return 0;
7000 else
7001 {
7002 reg1 = REGNO (addr1);
7003 /* This was a simple (mem (reg)) expression. Offset is 0. */
7004 offset1 = 0;
7005 }
7006
7007 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
7008 if (GET_CODE (addr2) != PLUS)
7009 return 0;
7010
7011 if (GET_CODE (XEXP (addr2, 0)) != REG
7012 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7013 return 0;
7014
7015 if (reg1 != REGNO (XEXP (addr2, 0)))
7016 return 0;
7017
7018 /* The offset for the second addr must be 8 more than the first addr. */
7019 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7020 return 0;
7021
7022 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7023 instructions. */
7024 return 1;
7025 }
7026 \f
7027 /* Return the register class of a scratch register needed to copy IN into
7028 or out of a register in CLASS in MODE. If it can be done directly,
7029 NO_REGS is returned. */
7030
7031 enum reg_class
7032 secondary_reload_class (class, mode, in)
7033 enum reg_class class;
7034 enum machine_mode mode ATTRIBUTE_UNUSED;
7035 rtx in;
7036 {
7037 int regno;
7038
7039 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
7040 {
7041 /* We cannot copy a symbolic operand directly into anything
7042 other than BASE_REGS for TARGET_ELF. So indicate that a
7043 register from BASE_REGS is needed as an intermediate
7044 register.
7045
7046 On Darwin, pic addresses require a load from memory, which
7047 needs a base register. */
7048 if (class != BASE_REGS
7049 && (GET_CODE (in) == SYMBOL_REF
7050 || GET_CODE (in) == HIGH
7051 || GET_CODE (in) == LABEL_REF
7052 || GET_CODE (in) == CONST))
7053 return BASE_REGS;
7054 }
7055
7056 if (GET_CODE (in) == REG)
7057 {
7058 regno = REGNO (in);
7059 if (regno >= FIRST_PSEUDO_REGISTER)
7060 {
7061 regno = true_regnum (in);
7062 if (regno >= FIRST_PSEUDO_REGISTER)
7063 regno = -1;
7064 }
7065 }
7066 else if (GET_CODE (in) == SUBREG)
7067 {
7068 regno = true_regnum (in);
7069 if (regno >= FIRST_PSEUDO_REGISTER)
7070 regno = -1;
7071 }
7072 else
7073 regno = -1;
7074
7075 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7076 into anything. */
7077 if (class == GENERAL_REGS || class == BASE_REGS
7078 || (regno >= 0 && INT_REGNO_P (regno)))
7079 return NO_REGS;
7080
7081 /* Constants, memory, and FP registers can go into FP registers. */
7082 if ((regno == -1 || FP_REGNO_P (regno))
7083 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
7084 return NO_REGS;
7085
7086 /* Memory, and AltiVec registers can go into AltiVec registers. */
7087 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
7088 && class == ALTIVEC_REGS)
7089 return NO_REGS;
7090
7091 /* We can copy among the CR registers. */
7092 if ((class == CR_REGS || class == CR0_REGS)
7093 && regno >= 0 && CR_REGNO_P (regno))
7094 return NO_REGS;
7095
7096 /* Otherwise, we need GENERAL_REGS. */
7097 return GENERAL_REGS;
7098 }
7099 \f
7100 /* Given a comparison operation, return the bit number in CCR to test. We
7101 know this is a valid comparison.
7102
7103 SCC_P is 1 if this is for an scc. That means that %D will have been
7104 used instead of %C, so the bits will be in different places.
7105
7106 Return -1 if OP isn't a valid comparison for some reason. */
7107
7108 int
7109 ccr_bit (op, scc_p)
7110 rtx op;
7111 int scc_p;
7112 {
7113 enum rtx_code code = GET_CODE (op);
7114 enum machine_mode cc_mode;
7115 int cc_regnum;
7116 int base_bit;
7117 rtx reg;
7118
7119 if (GET_RTX_CLASS (code) != '<')
7120 return -1;
7121
7122 reg = XEXP (op, 0);
7123
7124 if (GET_CODE (reg) != REG
7125 || ! CR_REGNO_P (REGNO (reg)))
7126 abort ();
7127
7128 cc_mode = GET_MODE (reg);
7129 cc_regnum = REGNO (reg);
7130 base_bit = 4 * (cc_regnum - CR0_REGNO);
7131
7132 validate_condition_mode (code, cc_mode);
7133
7134 switch (code)
7135 {
7136 case NE:
7137 if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7138 return base_bit + 1;
7139 return scc_p ? base_bit + 3 : base_bit + 2;
7140 case EQ:
7141 if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7142 return base_bit + 1;
7143 return base_bit + 2;
7144 case GT: case GTU: case UNLE:
7145 return base_bit + 1;
7146 case LT: case LTU: case UNGE:
7147 return base_bit;
7148 case ORDERED: case UNORDERED:
7149 return base_bit + 3;
7150
7151 case GE: case GEU:
7152 /* If scc, we will have done a cror to put the bit in the
7153 unordered position. So test that bit. For integer, this is ! LT
7154 unless this is an scc insn. */
7155 return scc_p ? base_bit + 3 : base_bit;
7156
7157 case LE: case LEU:
7158 return scc_p ? base_bit + 3 : base_bit + 1;
7159
7160 default:
7161 abort ();
7162 }
7163 }
7164 \f
7165 /* Return the GOT register. */
7166
7167 struct rtx_def *
7168 rs6000_got_register (value)
7169 rtx value ATTRIBUTE_UNUSED;
7170 {
7171 /* The second flow pass currently (June 1999) can't update
7172 regs_ever_live without disturbing other parts of the compiler, so
7173 update it here to make the prolog/epilogue code happy. */
7174 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
7175 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
7176
7177 current_function_uses_pic_offset_table = 1;
7178
7179 return pic_offset_table_rtx;
7180 }
7181 \f
7182 /* Function to init struct machine_function.
7183 This will be called, via a pointer variable,
7184 from push_function_context. */
7185
7186 static struct machine_function *
7187 rs6000_init_machine_status ()
7188 {
7189 return ggc_alloc_cleared (sizeof (machine_function));
7190 }
7191 \f
7192 /* These macros test for integers and extract the low-order bits. */
7193 #define INT_P(X) \
7194 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7195 && GET_MODE (X) == VOIDmode)
7196
7197 #define INT_LOWPART(X) \
7198 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7199
7200 int
7201 extract_MB (op)
7202 rtx op;
7203 {
7204 int i;
7205 unsigned long val = INT_LOWPART (op);
7206
7207 /* If the high bit is zero, the value is the first 1 bit we find
7208 from the left. */
7209 if ((val & 0x80000000) == 0)
7210 {
7211 if ((val & 0xffffffff) == 0)
7212 abort ();
7213
7214 i = 1;
7215 while (((val <<= 1) & 0x80000000) == 0)
7216 ++i;
7217 return i;
7218 }
7219
7220 /* If the high bit is set and the low bit is not, or the mask is all
7221 1's, the value is zero. */
7222 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
7223 return 0;
7224
7225 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7226 from the right. */
7227 i = 31;
7228 while (((val >>= 1) & 1) != 0)
7229 --i;
7230
7231 return i;
7232 }
7233
7234 int
7235 extract_ME (op)
7236 rtx op;
7237 {
7238 int i;
7239 unsigned long val = INT_LOWPART (op);
7240
7241 /* If the low bit is zero, the value is the first 1 bit we find from
7242 the right. */
7243 if ((val & 1) == 0)
7244 {
7245 if ((val & 0xffffffff) == 0)
7246 abort ();
7247
7248 i = 30;
7249 while (((val >>= 1) & 1) == 0)
7250 --i;
7251
7252 return i;
7253 }
7254
7255 /* If the low bit is set and the high bit is not, or the mask is all
7256 1's, the value is 31. */
7257 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
7258 return 31;
7259
7260 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7261 from the left. */
7262 i = 0;
7263 while (((val <<= 1) & 0x80000000) != 0)
7264 ++i;
7265
7266 return i;
7267 }
7268
7269 /* Print an operand. Recognize special options, documented below. */
7270
7271 #if TARGET_ELF
7272 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7273 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7274 #else
7275 #define SMALL_DATA_RELOC "sda21"
7276 #define SMALL_DATA_REG 0
7277 #endif
7278
7279 void
7280 print_operand (file, x, code)
7281 FILE *file;
7282 rtx x;
7283 int code;
7284 {
7285 int i;
7286 HOST_WIDE_INT val;
7287 unsigned HOST_WIDE_INT uval;
7288
7289 switch (code)
7290 {
7291 case '.':
7292 /* Write out an instruction after the call which may be replaced
7293 with glue code by the loader. This depends on the AIX version. */
7294 asm_fprintf (file, RS6000_CALL_GLUE);
7295 return;
7296
7297 /* %a is output_address. */
7298
7299 case 'A':
7300 /* If X is a constant integer whose low-order 5 bits are zero,
7301 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
7302 in the AIX assembler where "sri" with a zero shift count
7303 writes a trash instruction. */
7304 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
7305 putc ('l', file);
7306 else
7307 putc ('r', file);
7308 return;
7309
7310 case 'b':
7311 /* If constant, low-order 16 bits of constant, unsigned.
7312 Otherwise, write normally. */
7313 if (INT_P (x))
7314 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
7315 else
7316 print_operand (file, x, 0);
7317 return;
7318
7319 case 'B':
7320 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7321 for 64-bit mask direction. */
7322 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
7323 return;
7324
7325 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7326 output_operand. */
7327
7328 case 'D':
7329 /* There used to be a comment for 'C' reading "This is an
7330 optional cror needed for certain floating-point
7331 comparisons. Otherwise write nothing." */
7332
7333 /* Similar, except that this is for an scc, so we must be able to
7334 encode the test in a single bit that is one. We do the above
7335 for any LE, GE, GEU, or LEU and invert the bit for NE. */
7336 if (GET_CODE (x) == LE || GET_CODE (x) == GE
7337 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
7338 {
7339 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7340
7341 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
7342 base_bit + 2,
7343 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
7344 }
7345
7346 else if (GET_CODE (x) == NE)
7347 {
7348 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7349
7350 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
7351 base_bit + 2, base_bit + 2);
7352 }
7353 else if (TARGET_SPE && TARGET_HARD_FLOAT
7354 && GET_CODE (x) == EQ
7355 && GET_MODE (XEXP (x, 0)) == CCFPmode)
7356 {
7357 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7358
7359 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 1,
7360 base_bit + 1, base_bit + 1);
7361 }
7362 return;
7363
7364 case 'E':
7365 /* X is a CR register. Print the number of the EQ bit of the CR */
7366 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7367 output_operand_lossage ("invalid %%E value");
7368 else
7369 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
7370 return;
7371
7372 case 'f':
7373 /* X is a CR register. Print the shift count needed to move it
7374 to the high-order four bits. */
7375 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7376 output_operand_lossage ("invalid %%f value");
7377 else
7378 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
7379 return;
7380
7381 case 'F':
7382 /* Similar, but print the count for the rotate in the opposite
7383 direction. */
7384 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7385 output_operand_lossage ("invalid %%F value");
7386 else
7387 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
7388 return;
7389
7390 case 'G':
7391 /* X is a constant integer. If it is negative, print "m",
7392 otherwise print "z". This is to make an aze or ame insn. */
7393 if (GET_CODE (x) != CONST_INT)
7394 output_operand_lossage ("invalid %%G value");
7395 else if (INTVAL (x) >= 0)
7396 putc ('z', file);
7397 else
7398 putc ('m', file);
7399 return;
7400
7401 case 'h':
7402 /* If constant, output low-order five bits. Otherwise, write
7403 normally. */
7404 if (INT_P (x))
7405 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
7406 else
7407 print_operand (file, x, 0);
7408 return;
7409
7410 case 'H':
7411 /* If constant, output low-order six bits. Otherwise, write
7412 normally. */
7413 if (INT_P (x))
7414 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
7415 else
7416 print_operand (file, x, 0);
7417 return;
7418
7419 case 'I':
7420 /* Print `i' if this is a constant, else nothing. */
7421 if (INT_P (x))
7422 putc ('i', file);
7423 return;
7424
7425 case 'j':
7426 /* Write the bit number in CCR for jump. */
7427 i = ccr_bit (x, 0);
7428 if (i == -1)
7429 output_operand_lossage ("invalid %%j code");
7430 else
7431 fprintf (file, "%d", i);
7432 return;
7433
7434 case 'J':
7435 /* Similar, but add one for shift count in rlinm for scc and pass
7436 scc flag to `ccr_bit'. */
7437 i = ccr_bit (x, 1);
7438 if (i == -1)
7439 output_operand_lossage ("invalid %%J code");
7440 else
7441 /* If we want bit 31, write a shift count of zero, not 32. */
7442 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7443 return;
7444
7445 case 'k':
7446 /* X must be a constant. Write the 1's complement of the
7447 constant. */
7448 if (! INT_P (x))
7449 output_operand_lossage ("invalid %%k value");
7450 else
7451 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
7452 return;
7453
7454 case 'K':
7455 /* X must be a symbolic constant on ELF. Write an
7456 expression suitable for an 'addi' that adds in the low 16
7457 bits of the MEM. */
7458 if (GET_CODE (x) != CONST)
7459 {
7460 print_operand_address (file, x);
7461 fputs ("@l", file);
7462 }
7463 else
7464 {
7465 if (GET_CODE (XEXP (x, 0)) != PLUS
7466 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
7467 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
7468 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
7469 output_operand_lossage ("invalid %%K value");
7470 print_operand_address (file, XEXP (XEXP (x, 0), 0));
7471 fputs ("@l", file);
7472 /* For GNU as, there must be a non-alphanumeric character
7473 between 'l' and the number. The '-' is added by
7474 print_operand() already. */
7475 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
7476 fputs ("+", file);
7477 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
7478 }
7479 return;
7480
7481 /* %l is output_asm_label. */
7482
7483 case 'L':
7484 /* Write second word of DImode or DFmode reference. Works on register
7485 or non-indexed memory only. */
7486 if (GET_CODE (x) == REG)
7487 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
7488 else if (GET_CODE (x) == MEM)
7489 {
7490 /* Handle possible auto-increment. Since it is pre-increment and
7491 we have already done it, we can just use an offset of word. */
7492 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7493 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7494 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
7495 UNITS_PER_WORD));
7496 else
7497 output_address (XEXP (adjust_address_nv (x, SImode,
7498 UNITS_PER_WORD),
7499 0));
7500
7501 if (small_data_operand (x, GET_MODE (x)))
7502 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7503 reg_names[SMALL_DATA_REG]);
7504 }
7505 return;
7506
7507 case 'm':
7508 /* MB value for a mask operand. */
7509 if (! mask_operand (x, SImode))
7510 output_operand_lossage ("invalid %%m value");
7511
7512 fprintf (file, "%d", extract_MB (x));
7513 return;
7514
7515 case 'M':
7516 /* ME value for a mask operand. */
7517 if (! mask_operand (x, SImode))
7518 output_operand_lossage ("invalid %%M value");
7519
7520 fprintf (file, "%d", extract_ME (x));
7521 return;
7522
7523 /* %n outputs the negative of its operand. */
7524
7525 case 'N':
7526 /* Write the number of elements in the vector times 4. */
7527 if (GET_CODE (x) != PARALLEL)
7528 output_operand_lossage ("invalid %%N value");
7529 else
7530 fprintf (file, "%d", XVECLEN (x, 0) * 4);
7531 return;
7532
7533 case 'O':
7534 /* Similar, but subtract 1 first. */
7535 if (GET_CODE (x) != PARALLEL)
7536 output_operand_lossage ("invalid %%O value");
7537 else
7538 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
7539 return;
7540
7541 case 'p':
7542 /* X is a CONST_INT that is a power of two. Output the logarithm. */
7543 if (! INT_P (x)
7544 || INT_LOWPART (x) < 0
7545 || (i = exact_log2 (INT_LOWPART (x))) < 0)
7546 output_operand_lossage ("invalid %%p value");
7547 else
7548 fprintf (file, "%d", i);
7549 return;
7550
7551 case 'P':
7552 /* The operand must be an indirect memory reference. The result
7553 is the register number. */
7554 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
7555 || REGNO (XEXP (x, 0)) >= 32)
7556 output_operand_lossage ("invalid %%P value");
7557 else
7558 fprintf (file, "%d", REGNO (XEXP (x, 0)));
7559 return;
7560
7561 case 'q':
7562 /* This outputs the logical code corresponding to a boolean
7563 expression. The expression may have one or both operands
7564 negated (if one, only the first one). For condition register
7565 logical operations, it will also treat the negated
7566 CR codes as NOTs, but not handle NOTs of them. */
7567 {
7568 const char *const *t = 0;
7569 const char *s;
7570 enum rtx_code code = GET_CODE (x);
7571 static const char * const tbl[3][3] = {
7572 { "and", "andc", "nor" },
7573 { "or", "orc", "nand" },
7574 { "xor", "eqv", "xor" } };
7575
7576 if (code == AND)
7577 t = tbl[0];
7578 else if (code == IOR)
7579 t = tbl[1];
7580 else if (code == XOR)
7581 t = tbl[2];
7582 else
7583 output_operand_lossage ("invalid %%q value");
7584
7585 if (GET_CODE (XEXP (x, 0)) != NOT)
7586 s = t[0];
7587 else
7588 {
7589 if (GET_CODE (XEXP (x, 1)) == NOT)
7590 s = t[2];
7591 else
7592 s = t[1];
7593 }
7594
7595 fputs (s, file);
7596 }
7597 return;
7598
7599 case 'R':
7600 /* X is a CR register. Print the mask for `mtcrf'. */
7601 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7602 output_operand_lossage ("invalid %%R value");
7603 else
7604 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
7605 return;
7606
7607 case 's':
7608 /* Low 5 bits of 32 - value */
7609 if (! INT_P (x))
7610 output_operand_lossage ("invalid %%s value");
7611 else
7612 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
7613 return;
7614
7615 case 'S':
7616 /* PowerPC64 mask position. All 0's is excluded.
7617 CONST_INT 32-bit mask is considered sign-extended so any
7618 transition must occur within the CONST_INT, not on the boundary. */
7619 if (! mask64_operand (x, DImode))
7620 output_operand_lossage ("invalid %%S value");
7621
7622 uval = INT_LOWPART (x);
7623
7624 if (uval & 1) /* Clear Left */
7625 {
7626 uval &= ((unsigned HOST_WIDE_INT) 1 << 63 << 1) - 1;
7627 i = 64;
7628 }
7629 else /* Clear Right */
7630 {
7631 uval = ~uval;
7632 uval &= ((unsigned HOST_WIDE_INT) 1 << 63 << 1) - 1;
7633 i = 63;
7634 }
7635 while (uval != 0)
7636 --i, uval >>= 1;
7637 if (i < 0)
7638 abort ();
7639 fprintf (file, "%d", i);
7640 return;
7641
7642 case 't':
7643 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
7644 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
7645 abort ();
7646
7647 /* Bit 3 is OV bit. */
7648 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
7649
7650 /* If we want bit 31, write a shift count of zero, not 32. */
7651 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7652 return;
7653
7654 case 'T':
7655 /* Print the symbolic name of a branch target register. */
7656 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
7657 && REGNO (x) != COUNT_REGISTER_REGNUM))
7658 output_operand_lossage ("invalid %%T value");
7659 else if (REGNO (x) == LINK_REGISTER_REGNUM)
7660 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
7661 else
7662 fputs ("ctr", file);
7663 return;
7664
7665 case 'u':
7666 /* High-order 16 bits of constant for use in unsigned operand. */
7667 if (! INT_P (x))
7668 output_operand_lossage ("invalid %%u value");
7669 else
7670 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
7671 (INT_LOWPART (x) >> 16) & 0xffff);
7672 return;
7673
7674 case 'v':
7675 /* High-order 16 bits of constant for use in signed operand. */
7676 if (! INT_P (x))
7677 output_operand_lossage ("invalid %%v value");
7678 else
7679 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
7680 (INT_LOWPART (x) >> 16) & 0xffff);
7681 return;
7682
7683 case 'U':
7684 /* Print `u' if this has an auto-increment or auto-decrement. */
7685 if (GET_CODE (x) == MEM
7686 && (GET_CODE (XEXP (x, 0)) == PRE_INC
7687 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
7688 putc ('u', file);
7689 return;
7690
7691 case 'V':
7692 /* Print the trap code for this operand. */
7693 switch (GET_CODE (x))
7694 {
7695 case EQ:
7696 fputs ("eq", file); /* 4 */
7697 break;
7698 case NE:
7699 fputs ("ne", file); /* 24 */
7700 break;
7701 case LT:
7702 fputs ("lt", file); /* 16 */
7703 break;
7704 case LE:
7705 fputs ("le", file); /* 20 */
7706 break;
7707 case GT:
7708 fputs ("gt", file); /* 8 */
7709 break;
7710 case GE:
7711 fputs ("ge", file); /* 12 */
7712 break;
7713 case LTU:
7714 fputs ("llt", file); /* 2 */
7715 break;
7716 case LEU:
7717 fputs ("lle", file); /* 6 */
7718 break;
7719 case GTU:
7720 fputs ("lgt", file); /* 1 */
7721 break;
7722 case GEU:
7723 fputs ("lge", file); /* 5 */
7724 break;
7725 default:
7726 abort ();
7727 }
7728 break;
7729
7730 case 'w':
7731 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
7732 normally. */
7733 if (INT_P (x))
7734 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7735 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
7736 else
7737 print_operand (file, x, 0);
7738 return;
7739
7740 case 'W':
7741 /* MB value for a PowerPC64 rldic operand. */
7742 val = (GET_CODE (x) == CONST_INT
7743 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
7744
7745 if (val < 0)
7746 i = -1;
7747 else
7748 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
7749 if ((val <<= 1) < 0)
7750 break;
7751
7752 #if HOST_BITS_PER_WIDE_INT == 32
7753 if (GET_CODE (x) == CONST_INT && i >= 0)
7754 i += 32; /* zero-extend high-part was all 0's */
7755 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
7756 {
7757 val = CONST_DOUBLE_LOW (x);
7758
7759 if (val == 0)
7760 abort ();
7761 else if (val < 0)
7762 --i;
7763 else
7764 for ( ; i < 64; i++)
7765 if ((val <<= 1) < 0)
7766 break;
7767 }
7768 #endif
7769
7770 fprintf (file, "%d", i + 1);
7771 return;
7772
7773 case 'X':
7774 if (GET_CODE (x) == MEM
7775 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
7776 putc ('x', file);
7777 return;
7778
7779 case 'Y':
7780 /* Like 'L', for third word of TImode */
7781 if (GET_CODE (x) == REG)
7782 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
7783 else if (GET_CODE (x) == MEM)
7784 {
7785 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7786 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7787 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
7788 else
7789 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
7790 if (small_data_operand (x, GET_MODE (x)))
7791 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7792 reg_names[SMALL_DATA_REG]);
7793 }
7794 return;
7795
7796 case 'z':
7797 /* X is a SYMBOL_REF. Write out the name preceded by a
7798 period and without any trailing data in brackets. Used for function
7799 names. If we are configured for System V (or the embedded ABI) on
7800 the PowerPC, do not emit the period, since those systems do not use
7801 TOCs and the like. */
7802 if (GET_CODE (x) != SYMBOL_REF)
7803 abort ();
7804
7805 if (XSTR (x, 0)[0] != '.')
7806 {
7807 switch (DEFAULT_ABI)
7808 {
7809 default:
7810 abort ();
7811
7812 case ABI_AIX:
7813 putc ('.', file);
7814 break;
7815
7816 case ABI_V4:
7817 case ABI_AIX_NODESC:
7818 case ABI_DARWIN:
7819 break;
7820 }
7821 }
7822 #if TARGET_AIX
7823 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
7824 #else
7825 assemble_name (file, XSTR (x, 0));
7826 #endif
7827 return;
7828
7829 case 'Z':
7830 /* Like 'L', for last word of TImode. */
7831 if (GET_CODE (x) == REG)
7832 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
7833 else if (GET_CODE (x) == MEM)
7834 {
7835 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7836 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7837 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
7838 else
7839 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
7840 if (small_data_operand (x, GET_MODE (x)))
7841 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7842 reg_names[SMALL_DATA_REG]);
7843 }
7844 return;
7845
7846 /* Print AltiVec or SPE memory operand. */
7847 case 'y':
7848 {
7849 rtx tmp;
7850
7851 if (GET_CODE (x) != MEM)
7852 abort ();
7853
7854 tmp = XEXP (x, 0);
7855
7856 if (TARGET_SPE)
7857 {
7858 /* Handle [reg]. */
7859 if (GET_CODE (tmp) == REG)
7860 {
7861 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
7862 break;
7863 }
7864 /* Handle [reg+UIMM]. */
7865 else if (GET_CODE (tmp) == PLUS &&
7866 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
7867 {
7868 int x;
7869
7870 if (GET_CODE (XEXP (tmp, 0)) != REG)
7871 abort ();
7872
7873 x = INTVAL (XEXP (tmp, 1));
7874 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
7875 break;
7876 }
7877
7878 /* Fall through. Must be [reg+reg]. */
7879 }
7880 if (GET_CODE (tmp) == REG)
7881 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
7882 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
7883 {
7884 if (REGNO (XEXP (tmp, 0)) == 0)
7885 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
7886 reg_names[ REGNO (XEXP (tmp, 0)) ]);
7887 else
7888 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
7889 reg_names[ REGNO (XEXP (tmp, 1)) ]);
7890 }
7891 else
7892 abort ();
7893 break;
7894 }
7895
7896 case 0:
7897 if (GET_CODE (x) == REG)
7898 fprintf (file, "%s", reg_names[REGNO (x)]);
7899 else if (GET_CODE (x) == MEM)
7900 {
7901 /* We need to handle PRE_INC and PRE_DEC here, since we need to
7902 know the width from the mode. */
7903 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
7904 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
7905 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
7906 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
7907 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
7908 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
7909 else
7910 output_address (XEXP (x, 0));
7911 }
7912 else
7913 output_addr_const (file, x);
7914 return;
7915
7916 default:
7917 output_operand_lossage ("invalid %%xn code");
7918 }
7919 }
7920 \f
7921 /* Print the address of an operand. */
7922
7923 void
7924 print_operand_address (file, x)
7925 FILE *file;
7926 rtx x;
7927 {
7928 if (GET_CODE (x) == REG)
7929 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
7930 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
7931 || GET_CODE (x) == LABEL_REF)
7932 {
7933 output_addr_const (file, x);
7934 if (small_data_operand (x, GET_MODE (x)))
7935 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7936 reg_names[SMALL_DATA_REG]);
7937 else if (TARGET_TOC)
7938 abort ();
7939 }
7940 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
7941 {
7942 if (REGNO (XEXP (x, 0)) == 0)
7943 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
7944 reg_names[ REGNO (XEXP (x, 0)) ]);
7945 else
7946 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
7947 reg_names[ REGNO (XEXP (x, 1)) ]);
7948 }
7949 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
7950 {
7951 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
7952 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
7953 }
7954 #if TARGET_ELF
7955 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
7956 && CONSTANT_P (XEXP (x, 1)))
7957 {
7958 output_addr_const (file, XEXP (x, 1));
7959 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
7960 }
7961 #endif
7962 #if TARGET_MACHO
7963 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
7964 && CONSTANT_P (XEXP (x, 1)))
7965 {
7966 fprintf (file, "lo16(");
7967 output_addr_const (file, XEXP (x, 1));
7968 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
7969 }
7970 #endif
7971 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
7972 {
7973 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
7974 {
7975 rtx contains_minus = XEXP (x, 1);
7976 rtx minus, symref;
7977 const char *name;
7978
7979 /* Find the (minus (sym) (toc)) buried in X, and temporarily
7980 turn it into (sym) for output_addr_const. */
7981 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
7982 contains_minus = XEXP (contains_minus, 0);
7983
7984 minus = XEXP (contains_minus, 0);
7985 symref = XEXP (minus, 0);
7986 XEXP (contains_minus, 0) = symref;
7987 if (TARGET_ELF)
7988 {
7989 char *newname;
7990
7991 name = XSTR (symref, 0);
7992 newname = alloca (strlen (name) + sizeof ("@toc"));
7993 strcpy (newname, name);
7994 strcat (newname, "@toc");
7995 XSTR (symref, 0) = newname;
7996 }
7997 output_addr_const (file, XEXP (x, 1));
7998 if (TARGET_ELF)
7999 XSTR (symref, 0) = name;
8000 XEXP (contains_minus, 0) = minus;
8001 }
8002 else
8003 output_addr_const (file, XEXP (x, 1));
8004
8005 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
8006 }
8007 else
8008 abort ();
8009 }
8010 \f
8011 /* Target hook for assembling integer objects. The PowerPC version has
8012 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8013 is defined. It also needs to handle DI-mode objects on 64-bit
8014 targets. */
8015
8016 static bool
8017 rs6000_assemble_integer (x, size, aligned_p)
8018 rtx x;
8019 unsigned int size;
8020 int aligned_p;
8021 {
8022 #ifdef RELOCATABLE_NEEDS_FIXUP
8023 /* Special handling for SI values. */
8024 if (size == 4 && aligned_p)
8025 {
8026 extern int in_toc_section PARAMS ((void));
8027 static int recurse = 0;
8028
8029 /* For -mrelocatable, we mark all addresses that need to be fixed up
8030 in the .fixup section. */
8031 if (TARGET_RELOCATABLE
8032 && !in_toc_section ()
8033 && !in_text_section ()
8034 && !recurse
8035 && GET_CODE (x) != CONST_INT
8036 && GET_CODE (x) != CONST_DOUBLE
8037 && CONSTANT_P (x))
8038 {
8039 char buf[256];
8040
8041 recurse = 1;
8042 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
8043 fixuplabelno++;
8044 ASM_OUTPUT_LABEL (asm_out_file, buf);
8045 fprintf (asm_out_file, "\t.long\t(");
8046 output_addr_const (asm_out_file, x);
8047 fprintf (asm_out_file, ")@fixup\n");
8048 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
8049 ASM_OUTPUT_ALIGN (asm_out_file, 2);
8050 fprintf (asm_out_file, "\t.long\t");
8051 assemble_name (asm_out_file, buf);
8052 fprintf (asm_out_file, "\n\t.previous\n");
8053 recurse = 0;
8054 return true;
8055 }
8056 /* Remove initial .'s to turn a -mcall-aixdesc function
8057 address into the address of the descriptor, not the function
8058 itself. */
8059 else if (GET_CODE (x) == SYMBOL_REF
8060 && XSTR (x, 0)[0] == '.'
8061 && DEFAULT_ABI == ABI_AIX)
8062 {
8063 const char *name = XSTR (x, 0);
8064 while (*name == '.')
8065 name++;
8066
8067 fprintf (asm_out_file, "\t.long\t%s\n", name);
8068 return true;
8069 }
8070 }
8071 #endif /* RELOCATABLE_NEEDS_FIXUP */
8072 return default_assemble_integer (x, size, aligned_p);
8073 }
8074
8075 #ifdef HAVE_GAS_HIDDEN
8076 /* Emit an assembler directive to set symbol visibility for DECL to
8077 VISIBILITY_TYPE. */
8078
8079 static void
8080 rs6000_assemble_visibility (decl, visibility_type)
8081 tree decl;
8082 const char *visibility_type;
8083 {
8084 default_assemble_visibility (decl, visibility_type);
8085
8086 /* Functions need to have their entry point symbol visibility set as
8087 well as their descriptor symbol visibility. */
8088 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
8089 {
8090 const char *name;
8091
8092 name = ((* targetm.strip_name_encoding)
8093 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
8094
8095 fprintf (asm_out_file, "\t.%s\t.%s\n", visibility_type, name);
8096 }
8097 }
8098 #endif
8099 \f
8100 enum rtx_code
8101 rs6000_reverse_condition (mode, code)
8102 enum machine_mode mode;
8103 enum rtx_code code;
8104 {
8105 /* Reversal of FP compares takes care -- an ordered compare
8106 becomes an unordered compare and vice versa. */
8107 if (mode == CCFPmode)
8108 return reverse_condition_maybe_unordered (code);
8109 else
8110 return reverse_condition (code);
8111 }
8112
8113 /* Generate a compare for CODE. Return a brand-new rtx that
8114 represents the result of the compare. */
8115
8116 static rtx
8117 rs6000_generate_compare (code)
8118 enum rtx_code code;
8119 {
8120 enum machine_mode comp_mode;
8121 rtx compare_result;
8122
8123 if (rs6000_compare_fp_p)
8124 comp_mode = CCFPmode;
8125 else if (code == GTU || code == LTU
8126 || code == GEU || code == LEU)
8127 comp_mode = CCUNSmode;
8128 else
8129 comp_mode = CCmode;
8130
8131 /* First, the compare. */
8132 compare_result = gen_reg_rtx (comp_mode);
8133
8134 /* SPE FP compare instructions on the GPRs. Yuck! */
8135 if ((TARGET_SPE && TARGET_HARD_FLOAT) && rs6000_compare_fp_p)
8136 {
8137 rtx cmp, or1, or2, or_result, compare_result2;
8138
8139 switch (code)
8140 {
8141 case EQ:
8142 case UNEQ:
8143 case NE:
8144 case LTGT:
8145 cmp = flag_unsafe_math_optimizations
8146 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
8147 rs6000_compare_op1)
8148 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
8149 rs6000_compare_op1);
8150 break;
8151 case GT:
8152 case GTU:
8153 case UNGT:
8154 case UNGE:
8155 case GE:
8156 case GEU:
8157 cmp = flag_unsafe_math_optimizations
8158 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
8159 rs6000_compare_op1)
8160 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
8161 rs6000_compare_op1);
8162 break;
8163 case LT:
8164 case LTU:
8165 case UNLT:
8166 case UNLE:
8167 case LE:
8168 case LEU:
8169 cmp = flag_unsafe_math_optimizations
8170 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
8171 rs6000_compare_op1)
8172 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
8173 rs6000_compare_op1);
8174 break;
8175 default:
8176 abort ();
8177 }
8178
8179 /* Synthesize LE and GE from LT/GT || EQ. */
8180 if (code == LE || code == GE || code == LEU || code == GEU)
8181 {
8182 /* Synthesize GE/LE frome GT/LT || EQ. */
8183
8184 emit_insn (cmp);
8185
8186 switch (code)
8187 {
8188 case LE: code = LT; break;
8189 case GE: code = GT; break;
8190 case LEU: code = LT; break;
8191 case GEU: code = GT; break;
8192 default: abort ();
8193 }
8194
8195 or1 = gen_reg_rtx (SImode);
8196 or2 = gen_reg_rtx (SImode);
8197 or_result = gen_reg_rtx (CCEQmode);
8198 compare_result2 = gen_reg_rtx (CCFPmode);
8199
8200 /* Do the EQ. */
8201 cmp = flag_unsafe_math_optimizations
8202 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
8203 rs6000_compare_op1)
8204 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
8205 rs6000_compare_op1);
8206 emit_insn (cmp);
8207
8208 /* The MC8540 FP compare instructions set the CR bits
8209 differently than other PPC compare instructions. For
8210 that matter, there is no generic test instruction, but a
8211 testgt, testlt, and testeq. For a true condition, bit 2
8212 is set (x1xx) in the CR. Following the traditional CR
8213 values:
8214
8215 LT GT EQ OV
8216 bit3 bit2 bit1 bit0
8217
8218 ... bit 2 would be a GT CR alias, so later on we
8219 look in the GT bits for the branch instructins.
8220 However, we must be careful to emit correct RTL in
8221 the meantime, so optimizations don't get confused. */
8222
8223 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
8224 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
8225
8226 /* OR them together. */
8227 cmp = gen_rtx_SET (VOIDmode, or_result,
8228 gen_rtx_COMPARE (CCEQmode,
8229 gen_rtx_IOR (SImode, or1, or2),
8230 const_true_rtx));
8231 compare_result = or_result;
8232 code = EQ;
8233 }
8234 else
8235 {
8236 /* We only care about 1 bit (x1xx), so map everything to NE to
8237 maintain rtl sanity. We'll get to the right bit (x1xx) at
8238 code output time. */
8239 if (code == NE || code == LTGT)
8240 /* Do the inverse here because we have no cmpne
8241 instruction. We use the cmpeq instruction and expect
8242 to get a 0 instead. */
8243 code = EQ;
8244 else
8245 code = NE;
8246 }
8247
8248 emit_insn (cmp);
8249 }
8250 else
8251 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
8252 gen_rtx_COMPARE (comp_mode,
8253 rs6000_compare_op0,
8254 rs6000_compare_op1)));
8255
8256 /* Some kinds of FP comparisons need an OR operation;
8257 except for flag_unsafe_math_optimizations we don't bother. */
8258 if (rs6000_compare_fp_p
8259 && ! flag_unsafe_math_optimizations
8260 && ! (TARGET_HARD_FLOAT && TARGET_SPE)
8261 && (code == LE || code == GE
8262 || code == UNEQ || code == LTGT
8263 || code == UNGT || code == UNLT))
8264 {
8265 enum rtx_code or1, or2;
8266 rtx or1_rtx, or2_rtx, compare2_rtx;
8267 rtx or_result = gen_reg_rtx (CCEQmode);
8268
8269 switch (code)
8270 {
8271 case LE: or1 = LT; or2 = EQ; break;
8272 case GE: or1 = GT; or2 = EQ; break;
8273 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
8274 case LTGT: or1 = LT; or2 = GT; break;
8275 case UNGT: or1 = UNORDERED; or2 = GT; break;
8276 case UNLT: or1 = UNORDERED; or2 = LT; break;
8277 default: abort ();
8278 }
8279 validate_condition_mode (or1, comp_mode);
8280 validate_condition_mode (or2, comp_mode);
8281 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
8282 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
8283 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
8284 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
8285 const_true_rtx);
8286 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
8287
8288 compare_result = or_result;
8289 code = EQ;
8290 }
8291
8292 validate_condition_mode (code, GET_MODE (compare_result));
8293
8294 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
8295 }
8296
8297
8298 /* Emit the RTL for an sCOND pattern. */
8299
8300 void
8301 rs6000_emit_sCOND (code, result)
8302 enum rtx_code code;
8303 rtx result;
8304 {
8305 rtx condition_rtx;
8306 enum machine_mode op_mode;
8307
8308 condition_rtx = rs6000_generate_compare (code);
8309
8310 op_mode = GET_MODE (rs6000_compare_op0);
8311 if (op_mode == VOIDmode)
8312 op_mode = GET_MODE (rs6000_compare_op1);
8313
8314 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
8315 {
8316 PUT_MODE (condition_rtx, DImode);
8317 convert_move (result, condition_rtx, 0);
8318 }
8319 else
8320 {
8321 PUT_MODE (condition_rtx, SImode);
8322 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
8323 }
8324 }
8325
8326 /* Emit a branch of kind CODE to location LOC. */
8327
8328 void
8329 rs6000_emit_cbranch (code, loc)
8330 enum rtx_code code;
8331 rtx loc;
8332 {
8333 rtx condition_rtx, loc_ref;
8334
8335 condition_rtx = rs6000_generate_compare (code);
8336 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
8337 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
8338 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
8339 loc_ref, pc_rtx)));
8340 }
8341
8342 /* Return the string to output a conditional branch to LABEL, which is
8343 the operand number of the label, or -1 if the branch is really a
8344 conditional return.
8345
8346 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
8347 condition code register and its mode specifies what kind of
8348 comparison we made.
8349
8350 REVERSED is nonzero if we should reverse the sense of the comparison.
8351
8352 INSN is the insn. */
8353
8354 char *
8355 output_cbranch (op, label, reversed, insn)
8356 rtx op;
8357 const char * label;
8358 int reversed;
8359 rtx insn;
8360 {
8361 static char string[64];
8362 enum rtx_code code = GET_CODE (op);
8363 rtx cc_reg = XEXP (op, 0);
8364 enum machine_mode mode = GET_MODE (cc_reg);
8365 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
8366 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
8367 int really_reversed = reversed ^ need_longbranch;
8368 char *s = string;
8369 const char *ccode;
8370 const char *pred;
8371 rtx note;
8372
8373 validate_condition_mode (code, mode);
8374
8375 /* Work out which way this really branches. We could use
8376 reverse_condition_maybe_unordered here always but this
8377 makes the resulting assembler clearer. */
8378 if (really_reversed)
8379 code = rs6000_reverse_condition (mode, code);
8380
8381 if ((TARGET_SPE && TARGET_HARD_FLOAT) && mode == CCFPmode)
8382 {
8383 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8384 to the GT bit. */
8385 if (code == EQ)
8386 /* Opposite of GT. */
8387 code = UNLE;
8388 else if (code == NE)
8389 code = GT;
8390 else
8391 abort ();
8392 }
8393
8394 switch (code)
8395 {
8396 /* Not all of these are actually distinct opcodes, but
8397 we distinguish them for clarity of the resulting assembler. */
8398 case NE: case LTGT:
8399 ccode = "ne"; break;
8400 case EQ: case UNEQ:
8401 ccode = "eq"; break;
8402 case GE: case GEU:
8403 ccode = "ge"; break;
8404 case GT: case GTU: case UNGT:
8405 ccode = "gt"; break;
8406 case LE: case LEU:
8407 ccode = "le"; break;
8408 case LT: case LTU: case UNLT:
8409 ccode = "lt"; break;
8410 case UNORDERED: ccode = "un"; break;
8411 case ORDERED: ccode = "nu"; break;
8412 case UNGE: ccode = "nl"; break;
8413 case UNLE: ccode = "ng"; break;
8414 default:
8415 abort ();
8416 }
8417
8418 /* Maybe we have a guess as to how likely the branch is.
8419 The old mnemonics don't have a way to specify this information. */
8420 pred = "";
8421 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
8422 if (note != NULL_RTX)
8423 {
8424 /* PROB is the difference from 50%. */
8425 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
8426 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
8427
8428 /* Only hint for highly probable/improbable branches on newer
8429 cpus as static prediction overrides processor dynamic
8430 prediction. For older cpus we may as well always hint, but
8431 assume not taken for branches that are very close to 50% as a
8432 mispredicted taken branch is more expensive than a
8433 mispredicted not-taken branch. */
8434 if (always_hint
8435 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
8436 {
8437 if (abs (prob) > REG_BR_PROB_BASE / 20
8438 && ((prob > 0) ^ need_longbranch))
8439 pred = "+";
8440 else
8441 pred = "-";
8442 }
8443 }
8444
8445 if (label == NULL)
8446 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
8447 else
8448 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
8449
8450 /* We need to escape any '%' characters in the reg_names string.
8451 Assume they'd only be the first character... */
8452 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
8453 *s++ = '%';
8454 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
8455
8456 if (label != NULL)
8457 {
8458 /* If the branch distance was too far, we may have to use an
8459 unconditional branch to go the distance. */
8460 if (need_longbranch)
8461 s += sprintf (s, ",$+8\n\tb %s", label);
8462 else
8463 s += sprintf (s, ",%s", label);
8464 }
8465
8466 return string;
8467 }
8468
8469 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
8470 operands of the last comparison is nonzero/true, FALSE_COND if it
8471 is zero/false. Return 0 if the hardware has no such operation. */
8472
8473 int
8474 rs6000_emit_cmove (dest, op, true_cond, false_cond)
8475 rtx dest;
8476 rtx op;
8477 rtx true_cond;
8478 rtx false_cond;
8479 {
8480 enum rtx_code code = GET_CODE (op);
8481 rtx op0 = rs6000_compare_op0;
8482 rtx op1 = rs6000_compare_op1;
8483 REAL_VALUE_TYPE c1;
8484 enum machine_mode compare_mode = GET_MODE (op0);
8485 enum machine_mode result_mode = GET_MODE (dest);
8486 rtx temp;
8487
8488 /* These modes should always match. */
8489 if (GET_MODE (op1) != compare_mode
8490 /* In the isel case however, we can use a compare immediate, so
8491 op1 may be a small constant. */
8492 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
8493 return 0;
8494 if (GET_MODE (true_cond) != result_mode)
8495 return 0;
8496 if (GET_MODE (false_cond) != result_mode)
8497 return 0;
8498
8499 /* First, work out if the hardware can do this at all, or
8500 if it's too slow... */
8501 if (! rs6000_compare_fp_p)
8502 {
8503 if (TARGET_ISEL)
8504 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
8505 return 0;
8506 }
8507
8508 /* Eliminate half of the comparisons by switching operands, this
8509 makes the remaining code simpler. */
8510 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
8511 || code == LTGT || code == LT)
8512 {
8513 code = reverse_condition_maybe_unordered (code);
8514 temp = true_cond;
8515 true_cond = false_cond;
8516 false_cond = temp;
8517 }
8518
8519 /* UNEQ and LTGT take four instructions for a comparison with zero,
8520 it'll probably be faster to use a branch here too. */
8521 if (code == UNEQ)
8522 return 0;
8523
8524 if (GET_CODE (op1) == CONST_DOUBLE)
8525 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
8526
8527 /* We're going to try to implement comparions by performing
8528 a subtract, then comparing against zero. Unfortunately,
8529 Inf - Inf is NaN which is not zero, and so if we don't
8530 know that the operand is finite and the comparison
8531 would treat EQ different to UNORDERED, we can't do it. */
8532 if (! flag_unsafe_math_optimizations
8533 && code != GT && code != UNGE
8534 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
8535 /* Constructs of the form (a OP b ? a : b) are safe. */
8536 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
8537 || (! rtx_equal_p (op0, true_cond)
8538 && ! rtx_equal_p (op1, true_cond))))
8539 return 0;
8540 /* At this point we know we can use fsel. */
8541
8542 /* Reduce the comparison to a comparison against zero. */
8543 temp = gen_reg_rtx (compare_mode);
8544 emit_insn (gen_rtx_SET (VOIDmode, temp,
8545 gen_rtx_MINUS (compare_mode, op0, op1)));
8546 op0 = temp;
8547 op1 = CONST0_RTX (compare_mode);
8548
8549 /* If we don't care about NaNs we can reduce some of the comparisons
8550 down to faster ones. */
8551 if (flag_unsafe_math_optimizations)
8552 switch (code)
8553 {
8554 case GT:
8555 code = LE;
8556 temp = true_cond;
8557 true_cond = false_cond;
8558 false_cond = temp;
8559 break;
8560 case UNGE:
8561 code = GE;
8562 break;
8563 case UNEQ:
8564 code = EQ;
8565 break;
8566 default:
8567 break;
8568 }
8569
8570 /* Now, reduce everything down to a GE. */
8571 switch (code)
8572 {
8573 case GE:
8574 break;
8575
8576 case LE:
8577 temp = gen_reg_rtx (compare_mode);
8578 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8579 op0 = temp;
8580 break;
8581
8582 case ORDERED:
8583 temp = gen_reg_rtx (compare_mode);
8584 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
8585 op0 = temp;
8586 break;
8587
8588 case EQ:
8589 temp = gen_reg_rtx (compare_mode);
8590 emit_insn (gen_rtx_SET (VOIDmode, temp,
8591 gen_rtx_NEG (compare_mode,
8592 gen_rtx_ABS (compare_mode, op0))));
8593 op0 = temp;
8594 break;
8595
8596 case UNGE:
8597 temp = gen_reg_rtx (result_mode);
8598 emit_insn (gen_rtx_SET (VOIDmode, temp,
8599 gen_rtx_IF_THEN_ELSE (result_mode,
8600 gen_rtx_GE (VOIDmode,
8601 op0, op1),
8602 true_cond, false_cond)));
8603 false_cond = temp;
8604 true_cond = false_cond;
8605
8606 temp = gen_reg_rtx (compare_mode);
8607 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8608 op0 = temp;
8609 break;
8610
8611 case GT:
8612 temp = gen_reg_rtx (result_mode);
8613 emit_insn (gen_rtx_SET (VOIDmode, temp,
8614 gen_rtx_IF_THEN_ELSE (result_mode,
8615 gen_rtx_GE (VOIDmode,
8616 op0, op1),
8617 true_cond, false_cond)));
8618 true_cond = temp;
8619 false_cond = true_cond;
8620
8621 temp = gen_reg_rtx (compare_mode);
8622 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8623 op0 = temp;
8624 break;
8625
8626 default:
8627 abort ();
8628 }
8629
8630 emit_insn (gen_rtx_SET (VOIDmode, dest,
8631 gen_rtx_IF_THEN_ELSE (result_mode,
8632 gen_rtx_GE (VOIDmode,
8633 op0, op1),
8634 true_cond, false_cond)));
8635 return 1;
8636 }
8637
8638 /* Same as above, but for ints (isel). */
8639
8640 static int
8641 rs6000_emit_int_cmove (dest, op, true_cond, false_cond)
8642 rtx dest;
8643 rtx op;
8644 rtx true_cond;
8645 rtx false_cond;
8646 {
8647 rtx condition_rtx, cr;
8648
8649 /* All isel implementations thus far are 32-bits. */
8650 if (GET_MODE (rs6000_compare_op0) != SImode)
8651 return 0;
8652
8653 /* We still have to do the compare, because isel doesn't do a
8654 compare, it just looks at the CRx bits set by a previous compare
8655 instruction. */
8656 condition_rtx = rs6000_generate_compare (GET_CODE (op));
8657 cr = XEXP (condition_rtx, 0);
8658
8659 if (GET_MODE (cr) == CCmode)
8660 emit_insn (gen_isel_signed (dest, condition_rtx,
8661 true_cond, false_cond, cr));
8662 else
8663 emit_insn (gen_isel_unsigned (dest, condition_rtx,
8664 true_cond, false_cond, cr));
8665
8666 return 1;
8667 }
8668
8669 const char *
8670 output_isel (operands)
8671 rtx *operands;
8672 {
8673 enum rtx_code code;
8674
8675 code = GET_CODE (operands[1]);
8676 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
8677 {
8678 PUT_CODE (operands[1], reverse_condition (code));
8679 return "isel %0,%3,%2,%j1";
8680 }
8681 else
8682 return "isel %0,%2,%3,%j1";
8683 }
8684
8685 void
8686 rs6000_emit_minmax (dest, code, op0, op1)
8687 rtx dest;
8688 enum rtx_code code;
8689 rtx op0;
8690 rtx op1;
8691 {
8692 enum machine_mode mode = GET_MODE (op0);
8693 enum rtx_code c;
8694 rtx target;
8695
8696 if (code == SMAX || code == SMIN)
8697 c = GE;
8698 else
8699 c = GEU;
8700
8701 if (code == SMAX || code == UMAX)
8702 target = emit_conditional_move (dest, c, op0, op1, mode,
8703 op0, op1, mode, 0);
8704 else
8705 target = emit_conditional_move (dest, c, op0, op1, mode,
8706 op1, op0, mode, 0);
8707 if (target == NULL_RTX)
8708 abort ();
8709 if (target != dest)
8710 emit_move_insn (dest, target);
8711 }
8712 \f
8713 /* This page contains routines that are used to determine what the
8714 function prologue and epilogue code will do and write them out. */
8715
8716 /* Return the first fixed-point register that is required to be
8717 saved. 32 if none. */
8718
8719 int
8720 first_reg_to_save ()
8721 {
8722 int first_reg;
8723
8724 /* Find lowest numbered live register. */
8725 for (first_reg = 13; first_reg <= 31; first_reg++)
8726 if (regs_ever_live[first_reg]
8727 && (! call_used_regs[first_reg]
8728 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
8729 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8730 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
8731 break;
8732
8733 #if TARGET_MACHO
8734 if (flag_pic
8735 && current_function_uses_pic_offset_table
8736 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
8737 return RS6000_PIC_OFFSET_TABLE_REGNUM;
8738 #endif
8739
8740 return first_reg;
8741 }
8742
8743 /* Similar, for FP regs. */
8744
8745 int
8746 first_fp_reg_to_save ()
8747 {
8748 int first_reg;
8749
8750 /* Find lowest numbered live register. */
8751 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
8752 if (regs_ever_live[first_reg])
8753 break;
8754
8755 return first_reg;
8756 }
8757
8758 /* Similar, for AltiVec regs. */
8759
8760 static int
8761 first_altivec_reg_to_save ()
8762 {
8763 int i;
8764
8765 /* Stack frame remains as is unless we are in AltiVec ABI. */
8766 if (! TARGET_ALTIVEC_ABI)
8767 return LAST_ALTIVEC_REGNO + 1;
8768
8769 /* Find lowest numbered live register. */
8770 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
8771 if (regs_ever_live[i])
8772 break;
8773
8774 return i;
8775 }
8776
8777 /* Return a 32-bit mask of the AltiVec registers we need to set in
8778 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
8779 the 32-bit word is 0. */
8780
8781 static unsigned int
8782 compute_vrsave_mask ()
8783 {
8784 unsigned int i, mask = 0;
8785
8786 /* First, find out if we use _any_ altivec registers. */
8787 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8788 if (regs_ever_live[i])
8789 mask |= ALTIVEC_REG_BIT (i);
8790
8791 if (mask == 0)
8792 return mask;
8793
8794 /* Next, add all registers that are call-clobbered. We do this
8795 because post-reload register optimizers such as regrename_optimize
8796 may choose to use them. They never change the register class
8797 chosen by reload, so cannot create new uses of altivec registers
8798 if there were none before, so the early exit above is safe. */
8799 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
8800 altivec registers not saved in the mask, which might well make the
8801 adjustments below more effective in eliding the save/restore of
8802 VRSAVE in small functions. */
8803 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8804 if (call_used_regs[i])
8805 mask |= ALTIVEC_REG_BIT (i);
8806
8807 /* Next, remove the argument registers from the set. These must
8808 be in the VRSAVE mask set by the caller, so we don't need to add
8809 them in again. More importantly, the mask we compute here is
8810 used to generate CLOBBERs in the set_vrsave insn, and we do not
8811 wish the argument registers to die. */
8812 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
8813 mask &= ~ALTIVEC_REG_BIT (i);
8814
8815 /* Similarly, remove the return value from the set. */
8816 {
8817 bool yes = false;
8818 diddle_return_value (is_altivec_return_reg, &yes);
8819 if (yes)
8820 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
8821 }
8822
8823 return mask;
8824 }
8825
8826 static void
8827 is_altivec_return_reg (reg, xyes)
8828 rtx reg;
8829 void *xyes;
8830 {
8831 bool *yes = (bool *) xyes;
8832 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
8833 *yes = true;
8834 }
8835
8836 \f
8837 /* Calculate the stack information for the current function. This is
8838 complicated by having two separate calling sequences, the AIX calling
8839 sequence and the V.4 calling sequence.
8840
8841 AIX (and Darwin/Mac OS X) stack frames look like:
8842 32-bit 64-bit
8843 SP----> +---------------------------------------+
8844 | back chain to caller | 0 0
8845 +---------------------------------------+
8846 | saved CR | 4 8 (8-11)
8847 +---------------------------------------+
8848 | saved LR | 8 16
8849 +---------------------------------------+
8850 | reserved for compilers | 12 24
8851 +---------------------------------------+
8852 | reserved for binders | 16 32
8853 +---------------------------------------+
8854 | saved TOC pointer | 20 40
8855 +---------------------------------------+
8856 | Parameter save area (P) | 24 48
8857 +---------------------------------------+
8858 | Alloca space (A) | 24+P etc.
8859 +---------------------------------------+
8860 | Local variable space (L) | 24+P+A
8861 +---------------------------------------+
8862 | Float/int conversion temporary (X) | 24+P+A+L
8863 +---------------------------------------+
8864 | Save area for AltiVec registers (W) | 24+P+A+L+X
8865 +---------------------------------------+
8866 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
8867 +---------------------------------------+
8868 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
8869 +---------------------------------------+
8870 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
8871 +---------------------------------------+
8872 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
8873 +---------------------------------------+
8874 old SP->| back chain to caller's caller |
8875 +---------------------------------------+
8876
8877 The required alignment for AIX configurations is two words (i.e., 8
8878 or 16 bytes).
8879
8880
8881 V.4 stack frames look like:
8882
8883 SP----> +---------------------------------------+
8884 | back chain to caller | 0
8885 +---------------------------------------+
8886 | caller's saved LR | 4
8887 +---------------------------------------+
8888 | Parameter save area (P) | 8
8889 +---------------------------------------+
8890 | Alloca space (A) | 8+P
8891 +---------------------------------------+
8892 | Varargs save area (V) | 8+P+A
8893 +---------------------------------------+
8894 | Local variable space (L) | 8+P+A+V
8895 +---------------------------------------+
8896 | Float/int conversion temporary (X) | 8+P+A+V+L
8897 +---------------------------------------+
8898 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
8899 +---------------------------------------+
8900 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
8901 +---------------------------------------+
8902 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
8903 +---------------------------------------+
8904 | SPE: area for 64-bit GP registers |
8905 +---------------------------------------+
8906 | SPE alignment padding |
8907 +---------------------------------------+
8908 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
8909 +---------------------------------------+
8910 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
8911 +---------------------------------------+
8912 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
8913 +---------------------------------------+
8914 old SP->| back chain to caller's caller |
8915 +---------------------------------------+
8916
8917 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
8918 given. (But note below and in sysv4.h that we require only 8 and
8919 may round up the size of our stack frame anyways. The historical
8920 reason is early versions of powerpc-linux which didn't properly
8921 align the stack at program startup. A happy side-effect is that
8922 -mno-eabi libraries can be used with -meabi programs.)
8923
8924 The EABI configuration defaults to the V.4 layout, unless
8925 -mcall-aix is used, in which case the AIX layout is used. However,
8926 the stack alignment requirements may differ. If -mno-eabi is not
8927 given, the required stack alignment is 8 bytes; if -mno-eabi is
8928 given, the required alignment is 16 bytes. (But see V.4 comment
8929 above.) */
8930
8931 #ifndef ABI_STACK_BOUNDARY
8932 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
8933 #endif
8934
8935 rs6000_stack_t *
8936 rs6000_stack_info ()
8937 {
8938 static rs6000_stack_t info, zero_info;
8939 rs6000_stack_t *info_ptr = &info;
8940 int reg_size = TARGET_POWERPC64 ? 8 : 4;
8941 enum rs6000_abi abi;
8942 int ehrd_size;
8943 int total_raw_size;
8944
8945 /* Zero all fields portably. */
8946 info = zero_info;
8947
8948 /* Select which calling sequence. */
8949 info_ptr->abi = abi = DEFAULT_ABI;
8950
8951 /* Calculate which registers need to be saved & save area size. */
8952 info_ptr->first_gp_reg_save = first_reg_to_save ();
8953 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
8954 even if it currently looks like we won't. */
8955 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
8956 || (flag_pic == 1 && abi == ABI_V4)
8957 || (flag_pic && abi == ABI_DARWIN))
8958 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
8959 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
8960 else
8961 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
8962
8963 /* For the SPE, we have an additional upper 32-bits on each GPR.
8964 Ideally we should save the entire 64-bits only when the upper
8965 half is used in SIMD instructions. Since we only record
8966 registers live (not the size they are used in), this proves
8967 difficult because we'd have to traverse the instruction chain at
8968 the right time, taking reload into account. This is a real pain,
8969 so we opt to save the GPRs in 64-bits always. Anyone overly
8970 concerned with frame size can fix this. ;-).
8971
8972 So... since we save all GPRs (except the SP) in 64-bits, the
8973 traditional GP save area will be empty. */
8974 if (TARGET_SPE_ABI)
8975 info_ptr->gp_size = 0;
8976
8977 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
8978 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
8979
8980 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
8981 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
8982 - info_ptr->first_altivec_reg_save);
8983
8984 /* Does this function call anything? */
8985 info_ptr->calls_p = (! current_function_is_leaf
8986 || cfun->machine->ra_needs_full_frame);
8987
8988 /* Determine if we need to save the link register. */
8989 if (rs6000_ra_ever_killed ()
8990 || (DEFAULT_ABI == ABI_AIX && current_function_profile)
8991 #ifdef TARGET_RELOCATABLE
8992 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
8993 #endif
8994 || (info_ptr->first_fp_reg_save != 64
8995 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
8996 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
8997 || (abi == ABI_V4 && current_function_calls_alloca)
8998 || (DEFAULT_ABI == ABI_DARWIN
8999 && flag_pic
9000 && current_function_uses_pic_offset_table)
9001 || info_ptr->calls_p)
9002 {
9003 info_ptr->lr_save_p = 1;
9004 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
9005 }
9006
9007 /* Determine if we need to save the condition code registers. */
9008 if (regs_ever_live[CR2_REGNO]
9009 || regs_ever_live[CR3_REGNO]
9010 || regs_ever_live[CR4_REGNO])
9011 {
9012 info_ptr->cr_save_p = 1;
9013 if (abi == ABI_V4)
9014 info_ptr->cr_size = reg_size;
9015 }
9016
9017 /* If the current function calls __builtin_eh_return, then we need
9018 to allocate stack space for registers that will hold data for
9019 the exception handler. */
9020 if (current_function_calls_eh_return)
9021 {
9022 unsigned int i;
9023 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
9024 continue;
9025
9026 /* SPE saves EH registers in 64-bits. */
9027 ehrd_size = i * (TARGET_SPE_ABI ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
9028 }
9029 else
9030 ehrd_size = 0;
9031
9032 /* Determine various sizes. */
9033 info_ptr->reg_size = reg_size;
9034 info_ptr->fixed_size = RS6000_SAVE_AREA;
9035 info_ptr->varargs_size = RS6000_VARARGS_AREA;
9036 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
9037 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
9038 8);
9039
9040 if (TARGET_SPE_ABI)
9041 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
9042 else
9043 info_ptr->spe_gp_size = 0;
9044
9045 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
9046 {
9047 info_ptr->vrsave_mask = compute_vrsave_mask ();
9048 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
9049 }
9050 else
9051 {
9052 info_ptr->vrsave_mask = 0;
9053 info_ptr->vrsave_size = 0;
9054 }
9055
9056 /* Calculate the offsets. */
9057 switch (abi)
9058 {
9059 case ABI_NONE:
9060 default:
9061 abort ();
9062
9063 case ABI_AIX:
9064 case ABI_AIX_NODESC:
9065 case ABI_DARWIN:
9066 info_ptr->fp_save_offset = - info_ptr->fp_size;
9067 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9068
9069 if (TARGET_ALTIVEC_ABI)
9070 {
9071 info_ptr->vrsave_save_offset
9072 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
9073
9074 /* Align stack so vector save area is on a quadword boundary. */
9075 if (info_ptr->altivec_size != 0)
9076 info_ptr->altivec_padding_size
9077 = 16 - (-info_ptr->vrsave_save_offset % 16);
9078 else
9079 info_ptr->altivec_padding_size = 0;
9080
9081 info_ptr->altivec_save_offset
9082 = info_ptr->vrsave_save_offset
9083 - info_ptr->altivec_padding_size
9084 - info_ptr->altivec_size;
9085
9086 /* Adjust for AltiVec case. */
9087 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
9088 }
9089 else
9090 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
9091 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
9092 info_ptr->lr_save_offset = 2*reg_size;
9093 break;
9094
9095 case ABI_V4:
9096 info_ptr->fp_save_offset = - info_ptr->fp_size;
9097 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9098 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
9099
9100 if (TARGET_SPE_ABI)
9101 {
9102 /* Align stack so SPE GPR save area is aligned on a
9103 double-word boundary. */
9104 if (info_ptr->spe_gp_size != 0)
9105 info_ptr->spe_padding_size
9106 = 8 - (-info_ptr->cr_save_offset % 8);
9107 else
9108 info_ptr->spe_padding_size = 0;
9109
9110 info_ptr->spe_gp_save_offset
9111 = info_ptr->cr_save_offset
9112 - info_ptr->spe_padding_size
9113 - info_ptr->spe_gp_size;
9114
9115 /* Adjust for SPE case. */
9116 info_ptr->toc_save_offset
9117 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
9118 }
9119 else if (TARGET_ALTIVEC_ABI)
9120 {
9121 info_ptr->vrsave_save_offset
9122 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
9123
9124 /* Align stack so vector save area is on a quadword boundary. */
9125 if (info_ptr->altivec_size != 0)
9126 info_ptr->altivec_padding_size
9127 = 16 - (-info_ptr->vrsave_save_offset % 16);
9128 else
9129 info_ptr->altivec_padding_size = 0;
9130
9131 info_ptr->altivec_save_offset
9132 = info_ptr->vrsave_save_offset
9133 - info_ptr->altivec_padding_size
9134 - info_ptr->altivec_size;
9135
9136 /* Adjust for AltiVec case. */
9137 info_ptr->toc_save_offset
9138 = info_ptr->altivec_save_offset - info_ptr->toc_size;
9139 }
9140 else
9141 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
9142 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
9143 info_ptr->lr_save_offset = reg_size;
9144 break;
9145 }
9146
9147 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
9148 + info_ptr->gp_size
9149 + info_ptr->altivec_size
9150 + info_ptr->altivec_padding_size
9151 + info_ptr->vrsave_size
9152 + info_ptr->spe_gp_size
9153 + info_ptr->spe_padding_size
9154 + ehrd_size
9155 + info_ptr->cr_size
9156 + info_ptr->lr_size
9157 + info_ptr->vrsave_size
9158 + info_ptr->toc_size,
9159 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
9160 ? 16 : 8);
9161
9162 total_raw_size = (info_ptr->vars_size
9163 + info_ptr->parm_size
9164 + info_ptr->save_size
9165 + info_ptr->varargs_size
9166 + info_ptr->fixed_size);
9167
9168 info_ptr->total_size =
9169 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
9170
9171 /* Determine if we need to allocate any stack frame:
9172
9173 For AIX we need to push the stack if a frame pointer is needed
9174 (because the stack might be dynamically adjusted), if we are
9175 debugging, if we make calls, or if the sum of fp_save, gp_save,
9176 and local variables are more than the space needed to save all
9177 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9178 + 18*8 = 288 (GPR13 reserved).
9179
9180 For V.4 we don't have the stack cushion that AIX uses, but assume
9181 that the debugger can handle stackless frames. */
9182
9183 if (info_ptr->calls_p)
9184 info_ptr->push_p = 1;
9185
9186 else if (abi == ABI_V4)
9187 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
9188
9189 else
9190 info_ptr->push_p = (frame_pointer_needed
9191 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
9192 || ((total_raw_size - info_ptr->fixed_size)
9193 > (TARGET_32BIT ? 220 : 288)));
9194
9195 /* Zero offsets if we're not saving those registers. */
9196 if (info_ptr->fp_size == 0)
9197 info_ptr->fp_save_offset = 0;
9198
9199 if (info_ptr->gp_size == 0)
9200 info_ptr->gp_save_offset = 0;
9201
9202 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
9203 info_ptr->altivec_save_offset = 0;
9204
9205 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
9206 info_ptr->vrsave_save_offset = 0;
9207
9208 if (! TARGET_SPE_ABI || info_ptr->spe_gp_size == 0)
9209 info_ptr->spe_gp_save_offset = 0;
9210
9211 if (! info_ptr->lr_save_p)
9212 info_ptr->lr_save_offset = 0;
9213
9214 if (! info_ptr->cr_save_p)
9215 info_ptr->cr_save_offset = 0;
9216
9217 if (! info_ptr->toc_save_p)
9218 info_ptr->toc_save_offset = 0;
9219
9220 return info_ptr;
9221 }
9222
9223 void
9224 debug_stack_info (info)
9225 rs6000_stack_t *info;
9226 {
9227 const char *abi_string;
9228
9229 if (! info)
9230 info = rs6000_stack_info ();
9231
9232 fprintf (stderr, "\nStack information for function %s:\n",
9233 ((current_function_decl && DECL_NAME (current_function_decl))
9234 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
9235 : "<unknown>"));
9236
9237 switch (info->abi)
9238 {
9239 default: abi_string = "Unknown"; break;
9240 case ABI_NONE: abi_string = "NONE"; break;
9241 case ABI_AIX:
9242 case ABI_AIX_NODESC: abi_string = "AIX"; break;
9243 case ABI_DARWIN: abi_string = "Darwin"; break;
9244 case ABI_V4: abi_string = "V.4"; break;
9245 }
9246
9247 fprintf (stderr, "\tABI = %5s\n", abi_string);
9248
9249 if (TARGET_ALTIVEC_ABI)
9250 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
9251
9252 if (TARGET_SPE_ABI)
9253 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
9254
9255 if (info->first_gp_reg_save != 32)
9256 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
9257
9258 if (info->first_fp_reg_save != 64)
9259 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9260
9261 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
9262 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
9263 info->first_altivec_reg_save);
9264
9265 if (info->lr_save_p)
9266 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9267
9268 if (info->cr_save_p)
9269 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
9270
9271 if (info->toc_save_p)
9272 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
9273
9274 if (info->vrsave_mask)
9275 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
9276
9277 if (info->push_p)
9278 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
9279
9280 if (info->calls_p)
9281 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
9282
9283 if (info->gp_save_offset)
9284 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
9285
9286 if (info->fp_save_offset)
9287 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
9288
9289 if (info->altivec_save_offset)
9290 fprintf (stderr, "\taltivec_save_offset = %5d\n",
9291 info->altivec_save_offset);
9292
9293 if (info->spe_gp_save_offset)
9294 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
9295 info->spe_gp_save_offset);
9296
9297 if (info->vrsave_save_offset)
9298 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
9299 info->vrsave_save_offset);
9300
9301 if (info->lr_save_offset)
9302 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
9303
9304 if (info->cr_save_offset)
9305 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
9306
9307 if (info->toc_save_offset)
9308 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
9309
9310 if (info->varargs_save_offset)
9311 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
9312
9313 if (info->total_size)
9314 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
9315
9316 if (info->varargs_size)
9317 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
9318
9319 if (info->vars_size)
9320 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
9321
9322 if (info->parm_size)
9323 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
9324
9325 if (info->fixed_size)
9326 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
9327
9328 if (info->gp_size)
9329 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
9330
9331 if (info->spe_gp_size)
9332 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
9333
9334 if (info->fp_size)
9335 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
9336
9337 if (info->altivec_size)
9338 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
9339
9340 if (info->vrsave_size)
9341 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
9342
9343 if (info->altivec_padding_size)
9344 fprintf (stderr, "\taltivec_padding_size= %5d\n",
9345 info->altivec_padding_size);
9346
9347 if (info->spe_padding_size)
9348 fprintf (stderr, "\tspe_padding_size = %5d\n",
9349 info->spe_padding_size);
9350
9351 if (info->lr_size)
9352 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
9353
9354 if (info->cr_size)
9355 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
9356
9357 if (info->toc_size)
9358 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
9359
9360 if (info->save_size)
9361 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
9362
9363 if (info->reg_size != 4)
9364 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
9365
9366 fprintf (stderr, "\n");
9367 }
9368
9369 rtx
9370 rs6000_return_addr (count, frame)
9371 int count;
9372 rtx frame;
9373 {
9374 /* Currently we don't optimize very well between prolog and body
9375 code and for PIC code the code can be actually quite bad, so
9376 don't try to be too clever here. */
9377 if (count != 0 || flag_pic != 0)
9378 {
9379 cfun->machine->ra_needs_full_frame = 1;
9380
9381 return
9382 gen_rtx_MEM
9383 (Pmode,
9384 memory_address
9385 (Pmode,
9386 plus_constant (copy_to_reg
9387 (gen_rtx_MEM (Pmode,
9388 memory_address (Pmode, frame))),
9389 RETURN_ADDRESS_OFFSET)));
9390 }
9391
9392 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
9393 }
9394
9395 /* Say whether a function is a candidate for sibcall handling or not.
9396 We do not allow indirect calls to be optimized into sibling calls.
9397 Also, we can't do it if there are any vector parameters; there's
9398 nowhere to put the VRsave code so it works; note that functions with
9399 vector parameters are required to have a prototype, so the argument
9400 type info must be available here. (The tail recursion case can work
9401 with vector parameters, but there's no way to distinguish here.) */
9402 int
9403 function_ok_for_sibcall (fndecl)
9404 tree fndecl;
9405 {
9406 tree type;
9407 if (fndecl)
9408 {
9409 if (TARGET_ALTIVEC_VRSAVE)
9410 {
9411 for (type = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
9412 type; type = TREE_CHAIN (type))
9413 {
9414 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
9415 return 0;
9416 }
9417 }
9418 if (DEFAULT_ABI == ABI_DARWIN
9419 || (*targetm.binds_local_p) (fndecl))
9420 {
9421 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (fndecl));
9422
9423 if (!lookup_attribute ("longcall", attr_list)
9424 || lookup_attribute ("shortcall", attr_list))
9425 return 1;
9426 }
9427 }
9428 return 0;
9429 }
9430
9431 /* function rewritten to handle sibcalls */
9432 static int
9433 rs6000_ra_ever_killed ()
9434 {
9435 rtx top;
9436 rtx reg;
9437 rtx insn;
9438
9439 #ifdef ASM_OUTPUT_MI_THUNK
9440 if (current_function_is_thunk)
9441 return 0;
9442 #endif
9443 /* regs_ever_live has LR marked as used if any sibcalls
9444 are present. Which it is, but this should not force
9445 saving and restoring in the prologue/epilog. Likewise,
9446 reg_set_between_p thinks a sibcall clobbers LR, so
9447 that is inappropriate. */
9448 /* Also, the prologue can generate a store into LR that
9449 doesn't really count, like this:
9450 move LR->R0
9451 bcl to set PIC register
9452 move LR->R31
9453 move R0->LR
9454 When we're called from the epilog, we need to avoid counting
9455 this as a store; thus we ignore any insns with a REG_MAYBE_DEAD note. */
9456
9457 push_topmost_sequence ();
9458 top = get_insns ();
9459 pop_topmost_sequence ();
9460 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
9461
9462 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
9463 {
9464 if (INSN_P (insn))
9465 {
9466 if (FIND_REG_INC_NOTE (insn, reg))
9467 return 1;
9468 else if (GET_CODE (insn) == CALL_INSN
9469 && !SIBLING_CALL_P (insn))
9470 return 1;
9471 else if (set_of (reg, insn) != NULL_RTX
9472 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
9473 return 1;
9474 }
9475 }
9476 return 0;
9477 }
9478 \f
9479 /* Add a REG_MAYBE_DEAD note to the insn. */
9480 static void
9481 rs6000_maybe_dead (insn)
9482 rtx insn;
9483 {
9484 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
9485 const0_rtx,
9486 REG_NOTES (insn));
9487 }
9488
9489 /* Emit instructions needed to load the TOC register.
9490 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9491 a constant pool; or for SVR4 -fpic. */
9492
9493 void
9494 rs6000_emit_load_toc_table (fromprolog)
9495 int fromprolog;
9496 {
9497 rtx dest;
9498 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
9499
9500 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9501 {
9502 rtx temp = (fromprolog
9503 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9504 : gen_reg_rtx (Pmode));
9505 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
9506 rs6000_maybe_dead (emit_move_insn (dest, temp));
9507 }
9508 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
9509 {
9510 char buf[30];
9511 rtx tempLR = (fromprolog
9512 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9513 : gen_reg_rtx (Pmode));
9514 rtx temp0 = (fromprolog
9515 ? gen_rtx_REG (Pmode, 0)
9516 : gen_reg_rtx (Pmode));
9517 rtx symF;
9518
9519 /* possibly create the toc section */
9520 if (! toc_initialized)
9521 {
9522 toc_section ();
9523 function_section (current_function_decl);
9524 }
9525
9526 if (fromprolog)
9527 {
9528 rtx symL;
9529
9530 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
9531 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9532
9533 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
9534 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9535
9536 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
9537 symF)));
9538 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
9539 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
9540 symL,
9541 symF)));
9542 }
9543 else
9544 {
9545 rtx tocsym;
9546 static int reload_toc_labelno = 0;
9547
9548 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
9549
9550 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
9551 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9552
9553 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
9554 symF,
9555 tocsym)));
9556 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
9557 rs6000_maybe_dead (emit_move_insn (temp0,
9558 gen_rtx_MEM (Pmode, dest)));
9559 }
9560 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
9561 }
9562 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
9563 {
9564 /* This is for AIX code running in non-PIC ELF32. */
9565 char buf[30];
9566 rtx realsym;
9567 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
9568 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9569
9570 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
9571 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
9572 }
9573 else if (DEFAULT_ABI == ABI_AIX)
9574 {
9575 if (TARGET_32BIT)
9576 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
9577 else
9578 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
9579 }
9580 else
9581 abort ();
9582 }
9583
9584 int
9585 get_TOC_alias_set ()
9586 {
9587 static int set = -1;
9588 if (set == -1)
9589 set = new_alias_set ();
9590 return set;
9591 }
9592
9593 /* This retuns nonzero if the current function uses the TOC. This is
9594 determined by the presence of (unspec ... 7), which is generated by
9595 the various load_toc_* patterns. */
9596
9597 int
9598 uses_TOC ()
9599 {
9600 rtx insn;
9601
9602 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9603 if (INSN_P (insn))
9604 {
9605 rtx pat = PATTERN (insn);
9606 int i;
9607
9608 if (GET_CODE (pat) == PARALLEL)
9609 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
9610 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
9611 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
9612 return 1;
9613 }
9614 return 0;
9615 }
9616
9617 rtx
9618 create_TOC_reference (symbol)
9619 rtx symbol;
9620 {
9621 return gen_rtx_PLUS (Pmode,
9622 gen_rtx_REG (Pmode, TOC_REGISTER),
9623 gen_rtx_CONST (Pmode,
9624 gen_rtx_MINUS (Pmode, symbol,
9625 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9626 }
9627
9628 #if TARGET_AIX
9629 /* __throw will restore its own return address to be the same as the
9630 return address of the function that the throw is being made to.
9631 This is unfortunate, because we want to check the original
9632 return address to see if we need to restore the TOC.
9633 So we have to squirrel it away here.
9634 This is used only in compiling __throw and __rethrow.
9635
9636 Most of this code should be removed by CSE. */
9637 static rtx insn_after_throw;
9638
9639 /* This does the saving... */
9640 void
9641 rs6000_aix_emit_builtin_unwind_init ()
9642 {
9643 rtx mem;
9644 rtx stack_top = gen_reg_rtx (Pmode);
9645 rtx opcode_addr = gen_reg_rtx (Pmode);
9646
9647 insn_after_throw = gen_reg_rtx (SImode);
9648
9649 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
9650 emit_move_insn (stack_top, mem);
9651
9652 mem = gen_rtx_MEM (Pmode,
9653 gen_rtx_PLUS (Pmode, stack_top,
9654 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9655 emit_move_insn (opcode_addr, mem);
9656 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
9657 }
9658
9659 /* Emit insns to _restore_ the TOC register, at runtime (specifically
9660 in _eh.o). Only used on AIX.
9661
9662 The idea is that on AIX, function calls look like this:
9663 bl somefunction-trampoline
9664 lwz r2,20(sp)
9665
9666 and later,
9667 somefunction-trampoline:
9668 stw r2,20(sp)
9669 ... load function address in the count register ...
9670 bctr
9671 or like this, if the linker determines that this is not a cross-module call
9672 and so the TOC need not be restored:
9673 bl somefunction
9674 nop
9675 or like this, if the compiler could determine that this is not a
9676 cross-module call:
9677 bl somefunction
9678 now, the tricky bit here is that register 2 is saved and restored
9679 by the _linker_, so we can't readily generate debugging information
9680 for it. So we need to go back up the call chain looking at the
9681 insns at return addresses to see which calls saved the TOC register
9682 and so see where it gets restored from.
9683
9684 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
9685 just before the actual epilogue.
9686
9687 On the bright side, this incurs no space or time overhead unless an
9688 exception is thrown, except for the extra code in libgcc.a.
9689
9690 The parameter STACKSIZE is a register containing (at runtime)
9691 the amount to be popped off the stack in addition to the stack frame
9692 of this routine (which will be __throw or __rethrow, and so is
9693 guaranteed to have a stack frame). */
9694
9695 void
9696 rs6000_emit_eh_toc_restore (stacksize)
9697 rtx stacksize;
9698 {
9699 rtx top_of_stack;
9700 rtx bottom_of_stack = gen_reg_rtx (Pmode);
9701 rtx tocompare = gen_reg_rtx (SImode);
9702 rtx opcode = gen_reg_rtx (SImode);
9703 rtx opcode_addr = gen_reg_rtx (Pmode);
9704 rtx mem;
9705 rtx loop_start = gen_label_rtx ();
9706 rtx no_toc_restore_needed = gen_label_rtx ();
9707 rtx loop_exit = gen_label_rtx ();
9708
9709 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
9710 set_mem_alias_set (mem, rs6000_sr_alias_set);
9711 emit_move_insn (bottom_of_stack, mem);
9712
9713 top_of_stack = expand_binop (Pmode, add_optab,
9714 bottom_of_stack, stacksize,
9715 NULL_RTX, 1, OPTAB_WIDEN);
9716
9717 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
9718 : 0xE8410028, SImode));
9719
9720 if (insn_after_throw == NULL_RTX)
9721 abort ();
9722 emit_move_insn (opcode, insn_after_throw);
9723
9724 emit_note (NULL, NOTE_INSN_LOOP_BEG);
9725 emit_label (loop_start);
9726
9727 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
9728 SImode, NULL_RTX, NULL_RTX,
9729 no_toc_restore_needed);
9730
9731 mem = gen_rtx_MEM (Pmode,
9732 gen_rtx_PLUS (Pmode, bottom_of_stack,
9733 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
9734 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
9735
9736 emit_label (no_toc_restore_needed);
9737 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
9738 Pmode, NULL_RTX, NULL_RTX,
9739 loop_exit);
9740
9741 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
9742 set_mem_alias_set (mem, rs6000_sr_alias_set);
9743 emit_move_insn (bottom_of_stack, mem);
9744
9745 mem = gen_rtx_MEM (Pmode,
9746 gen_rtx_PLUS (Pmode, bottom_of_stack,
9747 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9748 emit_move_insn (opcode_addr, mem);
9749 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
9750
9751 emit_note (NULL, NOTE_INSN_LOOP_CONT);
9752 emit_jump (loop_start);
9753 emit_note (NULL, NOTE_INSN_LOOP_END);
9754 emit_label (loop_exit);
9755 }
9756 #endif /* TARGET_AIX */
9757 \f
9758 /* This ties together stack memory (MEM with an alias set of
9759 rs6000_sr_alias_set) and the change to the stack pointer. */
9760
9761 static void
9762 rs6000_emit_stack_tie ()
9763 {
9764 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
9765
9766 set_mem_alias_set (mem, rs6000_sr_alias_set);
9767 emit_insn (gen_stack_tie (mem));
9768 }
9769
9770 /* Emit the correct code for allocating stack space, as insns.
9771 If COPY_R12, make sure a copy of the old frame is left in r12.
9772 The generated code may use hard register 0 as a temporary. */
9773
9774 static void
9775 rs6000_emit_allocate_stack (size, copy_r12)
9776 HOST_WIDE_INT size;
9777 int copy_r12;
9778 {
9779 rtx insn;
9780 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
9781 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
9782 rtx todec = GEN_INT (-size);
9783
9784 if (current_function_limit_stack)
9785 {
9786 if (REG_P (stack_limit_rtx)
9787 && REGNO (stack_limit_rtx) > 1
9788 && REGNO (stack_limit_rtx) <= 31)
9789 {
9790 emit_insn (Pmode == SImode
9791 ? gen_addsi3 (tmp_reg,
9792 stack_limit_rtx,
9793 GEN_INT (size))
9794 : gen_adddi3 (tmp_reg,
9795 stack_limit_rtx,
9796 GEN_INT (size)));
9797
9798 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
9799 const0_rtx));
9800 }
9801 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9802 && TARGET_32BIT
9803 && DEFAULT_ABI == ABI_V4)
9804 {
9805 rtx toload = gen_rtx_CONST (VOIDmode,
9806 gen_rtx_PLUS (Pmode,
9807 stack_limit_rtx,
9808 GEN_INT (size)));
9809
9810 emit_insn (gen_elf_high (tmp_reg, toload));
9811 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
9812 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
9813 const0_rtx));
9814 }
9815 else
9816 warning ("stack limit expression is not supported");
9817 }
9818
9819 if (copy_r12 || ! TARGET_UPDATE)
9820 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
9821
9822 if (TARGET_UPDATE)
9823 {
9824 if (size > 32767)
9825 {
9826 /* Need a note here so that try_split doesn't get confused. */
9827 if (get_last_insn() == NULL_RTX)
9828 emit_note (0, NOTE_INSN_DELETED);
9829 insn = emit_move_insn (tmp_reg, todec);
9830 try_split (PATTERN (insn), insn, 0);
9831 todec = tmp_reg;
9832 }
9833
9834 if (Pmode == SImode)
9835 insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
9836 todec, stack_reg));
9837 else
9838 insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
9839 todec, stack_reg));
9840 }
9841 else
9842 {
9843 if (Pmode == SImode)
9844 insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
9845 else
9846 insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
9847 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
9848 gen_rtx_REG (Pmode, 12));
9849 }
9850
9851 RTX_FRAME_RELATED_P (insn) = 1;
9852 REG_NOTES (insn) =
9853 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
9854 gen_rtx_SET (VOIDmode, stack_reg,
9855 gen_rtx_PLUS (Pmode, stack_reg,
9856 GEN_INT (-size))),
9857 REG_NOTES (insn));
9858 }
9859
9860 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
9861 knows that:
9862
9863 (mem (plus (blah) (regXX)))
9864
9865 is really:
9866
9867 (mem (plus (blah) (const VALUE_OF_REGXX))). */
9868
9869 static void
9870 altivec_frame_fixup (insn, reg, val)
9871 rtx insn, reg;
9872 HOST_WIDE_INT val;
9873 {
9874 rtx real;
9875
9876 real = copy_rtx (PATTERN (insn));
9877
9878 real = replace_rtx (real, reg, GEN_INT (val));
9879
9880 RTX_FRAME_RELATED_P (insn) = 1;
9881 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
9882 real,
9883 REG_NOTES (insn));
9884 }
9885
9886 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
9887 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
9888 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
9889 deduce these equivalences by itself so it wasn't necessary to hold
9890 its hand so much. */
9891
9892 static void
9893 rs6000_frame_related (insn, reg, val, reg2, rreg)
9894 rtx insn;
9895 rtx reg;
9896 HOST_WIDE_INT val;
9897 rtx reg2;
9898 rtx rreg;
9899 {
9900 rtx real, temp;
9901
9902 /* copy_rtx will not make unique copies of registers, so we need to
9903 ensure we don't have unwanted sharing here. */
9904 if (reg == reg2)
9905 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
9906
9907 if (reg == rreg)
9908 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
9909
9910 real = copy_rtx (PATTERN (insn));
9911
9912 if (reg2 != NULL_RTX)
9913 real = replace_rtx (real, reg2, rreg);
9914
9915 real = replace_rtx (real, reg,
9916 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
9917 STACK_POINTER_REGNUM),
9918 GEN_INT (val)));
9919
9920 /* We expect that 'real' is either a SET or a PARALLEL containing
9921 SETs (and possibly other stuff). In a PARALLEL, all the SETs
9922 are important so they all have to be marked RTX_FRAME_RELATED_P. */
9923
9924 if (GET_CODE (real) == SET)
9925 {
9926 rtx set = real;
9927
9928 temp = simplify_rtx (SET_SRC (set));
9929 if (temp)
9930 SET_SRC (set) = temp;
9931 temp = simplify_rtx (SET_DEST (set));
9932 if (temp)
9933 SET_DEST (set) = temp;
9934 if (GET_CODE (SET_DEST (set)) == MEM)
9935 {
9936 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
9937 if (temp)
9938 XEXP (SET_DEST (set), 0) = temp;
9939 }
9940 }
9941 else if (GET_CODE (real) == PARALLEL)
9942 {
9943 int i;
9944 for (i = 0; i < XVECLEN (real, 0); i++)
9945 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
9946 {
9947 rtx set = XVECEXP (real, 0, i);
9948
9949 temp = simplify_rtx (SET_SRC (set));
9950 if (temp)
9951 SET_SRC (set) = temp;
9952 temp = simplify_rtx (SET_DEST (set));
9953 if (temp)
9954 SET_DEST (set) = temp;
9955 if (GET_CODE (SET_DEST (set)) == MEM)
9956 {
9957 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
9958 if (temp)
9959 XEXP (SET_DEST (set), 0) = temp;
9960 }
9961 RTX_FRAME_RELATED_P (set) = 1;
9962 }
9963 }
9964 else
9965 abort ();
9966
9967 RTX_FRAME_RELATED_P (insn) = 1;
9968 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
9969 real,
9970 REG_NOTES (insn));
9971 }
9972
9973 /* Returns an insn that has a vrsave set operation with the
9974 appropriate CLOBBERs. */
9975
9976 static rtx
9977 generate_set_vrsave (reg, info, epiloguep)
9978 rtx reg;
9979 rs6000_stack_t *info;
9980 int epiloguep;
9981 {
9982 int nclobs, i;
9983 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
9984 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
9985
9986 clobs[0]
9987 = gen_rtx_SET (VOIDmode,
9988 vrsave,
9989 gen_rtx_UNSPEC_VOLATILE (SImode,
9990 gen_rtvec (2, reg, vrsave),
9991 30));
9992
9993 nclobs = 1;
9994
9995 /* We need to clobber the registers in the mask so the scheduler
9996 does not move sets to VRSAVE before sets of AltiVec registers.
9997
9998 However, if the function receives nonlocal gotos, reload will set
9999 all call saved registers live. We will end up with:
10000
10001 (set (reg 999) (mem))
10002 (parallel [ (set (reg vrsave) (unspec blah))
10003 (clobber (reg 999))])
10004
10005 The clobber will cause the store into reg 999 to be dead, and
10006 flow will attempt to delete an epilogue insn. In this case, we
10007 need an unspec use/set of the register. */
10008
10009 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10010 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
10011 {
10012 if (!epiloguep || call_used_regs [i])
10013 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
10014 gen_rtx_REG (V4SImode, i));
10015 else
10016 {
10017 rtx reg = gen_rtx_REG (V4SImode, i);
10018
10019 clobs[nclobs++]
10020 = gen_rtx_SET (VOIDmode,
10021 reg,
10022 gen_rtx_UNSPEC (V4SImode,
10023 gen_rtvec (1, reg), 27));
10024 }
10025 }
10026
10027 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
10028
10029 for (i = 0; i < nclobs; ++i)
10030 XVECEXP (insn, 0, i) = clobs[i];
10031
10032 return insn;
10033 }
10034
10035 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10036 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
10037
10038 static void
10039 emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
10040 rtx frame_reg;
10041 rtx frame_ptr;
10042 enum machine_mode mode;
10043 unsigned int regno;
10044 int offset;
10045 int total_size;
10046 {
10047 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
10048 rtx replacea, replaceb;
10049
10050 int_rtx = GEN_INT (offset);
10051
10052 /* Some cases that need register indexed addressing. */
10053 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
10054 || (TARGET_SPE_ABI
10055 && SPE_VECTOR_MODE (mode)
10056 && !SPE_CONST_OFFSET_OK (offset)))
10057 {
10058 /* Whomever calls us must make sure r11 is available in the
10059 flow path of instructions in the prologue. */
10060 offset_rtx = gen_rtx_REG (Pmode, 11);
10061 emit_move_insn (offset_rtx, int_rtx);
10062
10063 replacea = offset_rtx;
10064 replaceb = int_rtx;
10065 }
10066 else
10067 {
10068 offset_rtx = int_rtx;
10069 replacea = NULL_RTX;
10070 replaceb = NULL_RTX;
10071 }
10072
10073 reg = gen_rtx_REG (mode, regno);
10074 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
10075 mem = gen_rtx_MEM (mode, addr);
10076 set_mem_alias_set (mem, rs6000_sr_alias_set);
10077
10078 insn = emit_move_insn (mem, reg);
10079
10080 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
10081 }
10082
10083 /* Emit an offset memory reference suitable for a frame store, while
10084 converting to a valid addressing mode. */
10085
10086 static rtx
10087 gen_frame_mem_offset (mode, reg, offset)
10088 enum machine_mode mode;
10089 rtx reg;
10090 int offset;
10091 {
10092 rtx int_rtx, offset_rtx;
10093
10094 int_rtx = GEN_INT (offset);
10095
10096 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
10097 {
10098 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10099 emit_move_insn (offset_rtx, int_rtx);
10100 }
10101 else
10102 offset_rtx = int_rtx;
10103
10104 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
10105 }
10106
10107 /* Emit function prologue as insns. */
10108
10109 void
10110 rs6000_emit_prologue ()
10111 {
10112 rs6000_stack_t *info = rs6000_stack_info ();
10113 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10114 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10115 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10116 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
10117 rtx frame_reg_rtx = sp_reg_rtx;
10118 rtx cr_save_rtx = NULL;
10119 rtx insn;
10120 int saving_FPRs_inline;
10121 int using_store_multiple;
10122 HOST_WIDE_INT sp_offset = 0;
10123
10124 if (TARGET_SPE_ABI)
10125 {
10126 reg_mode = V2SImode;
10127 reg_size = 8;
10128 }
10129
10130 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10131 && !TARGET_SPE_ABI
10132 && info->first_gp_reg_save < 31);
10133 saving_FPRs_inline = (info->first_fp_reg_save == 64
10134 || FP_SAVE_INLINE (info->first_fp_reg_save));
10135
10136 /* For V.4, update stack before we do any saving and set back pointer. */
10137 if (info->push_p && DEFAULT_ABI == ABI_V4)
10138 {
10139 if (info->total_size < 32767)
10140 sp_offset = info->total_size;
10141 else
10142 frame_reg_rtx = frame_ptr_rtx;
10143 rs6000_emit_allocate_stack (info->total_size,
10144 (frame_reg_rtx != sp_reg_rtx
10145 && (info->cr_save_p
10146 || info->lr_save_p
10147 || info->first_fp_reg_save < 64
10148 || info->first_gp_reg_save < 32
10149 )));
10150 if (frame_reg_rtx != sp_reg_rtx)
10151 rs6000_emit_stack_tie ();
10152 }
10153
10154 /* Save AltiVec registers if needed. */
10155 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10156 {
10157 int i;
10158
10159 /* There should be a non inline version of this, for when we
10160 are saving lots of vector registers. */
10161 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10162 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10163 {
10164 rtx areg, savereg, mem;
10165 int offset;
10166
10167 offset = info->altivec_save_offset + sp_offset
10168 + 16 * (i - info->first_altivec_reg_save);
10169
10170 savereg = gen_rtx_REG (V4SImode, i);
10171
10172 areg = gen_rtx_REG (Pmode, 0);
10173 emit_move_insn (areg, GEN_INT (offset));
10174
10175 /* AltiVec addressing mode is [reg+reg]. */
10176 mem = gen_rtx_MEM (V4SImode,
10177 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
10178
10179 set_mem_alias_set (mem, rs6000_sr_alias_set);
10180
10181 insn = emit_move_insn (mem, savereg);
10182
10183 altivec_frame_fixup (insn, areg, offset);
10184 }
10185 }
10186
10187 /* VRSAVE is a bit vector representing which AltiVec registers
10188 are used. The OS uses this to determine which vector
10189 registers to save on a context switch. We need to save
10190 VRSAVE on the stack frame, add whatever AltiVec registers we
10191 used in this function, and do the corresponding magic in the
10192 epilogue. */
10193
10194 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
10195 {
10196 rtx reg, mem, vrsave;
10197 int offset;
10198
10199 /* Get VRSAVE onto a GPR. */
10200 reg = gen_rtx_REG (SImode, 12);
10201 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10202 if (TARGET_MACHO)
10203 emit_insn (gen_get_vrsave_internal (reg));
10204 else
10205 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
10206
10207 /* Save VRSAVE. */
10208 offset = info->vrsave_save_offset + sp_offset;
10209 mem
10210 = gen_rtx_MEM (SImode,
10211 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
10212 set_mem_alias_set (mem, rs6000_sr_alias_set);
10213 insn = emit_move_insn (mem, reg);
10214
10215 /* Include the registers in the mask. */
10216 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
10217
10218 insn = emit_insn (generate_set_vrsave (reg, info, 0));
10219 }
10220
10221 /* If we use the link register, get it into r0. */
10222 if (info->lr_save_p)
10223 emit_move_insn (gen_rtx_REG (Pmode, 0),
10224 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10225
10226 /* If we need to save CR, put it into r12. */
10227 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
10228 {
10229 cr_save_rtx = gen_rtx_REG (SImode, 12);
10230 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10231 }
10232
10233 /* Do any required saving of fpr's. If only one or two to save, do
10234 it ourselves. Otherwise, call function. */
10235 if (saving_FPRs_inline)
10236 {
10237 int i;
10238 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10239 if ((regs_ever_live[info->first_fp_reg_save+i]
10240 && ! call_used_regs[info->first_fp_reg_save+i]))
10241 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
10242 info->first_fp_reg_save + i,
10243 info->fp_save_offset + sp_offset + 8 * i,
10244 info->total_size);
10245 }
10246 else if (info->first_fp_reg_save != 64)
10247 {
10248 int i;
10249 char rname[30];
10250 const char *alloc_rname;
10251 rtvec p;
10252 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
10253
10254 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
10255 gen_rtx_REG (Pmode,
10256 LINK_REGISTER_REGNUM));
10257 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
10258 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
10259 alloc_rname = ggc_strdup (rname);
10260 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
10261 gen_rtx_SYMBOL_REF (Pmode,
10262 alloc_rname));
10263 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10264 {
10265 rtx addr, reg, mem;
10266 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
10267 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10268 GEN_INT (info->fp_save_offset
10269 + sp_offset + 8*i));
10270 mem = gen_rtx_MEM (DFmode, addr);
10271 set_mem_alias_set (mem, rs6000_sr_alias_set);
10272
10273 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
10274 }
10275 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10276 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10277 NULL_RTX, NULL_RTX);
10278 }
10279
10280 /* Save GPRs. This is done as a PARALLEL if we are using
10281 the store-multiple instructions. */
10282 if (using_store_multiple)
10283 {
10284 rtvec p;
10285 int i;
10286 p = rtvec_alloc (32 - info->first_gp_reg_save);
10287 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10288 {
10289 rtx addr, reg, mem;
10290 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10291 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10292 GEN_INT (info->gp_save_offset
10293 + sp_offset
10294 + reg_size * i));
10295 mem = gen_rtx_MEM (reg_mode, addr);
10296 set_mem_alias_set (mem, rs6000_sr_alias_set);
10297
10298 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
10299 }
10300 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10301 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10302 NULL_RTX, NULL_RTX);
10303 }
10304 else
10305 {
10306 int i;
10307 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10308 if ((regs_ever_live[info->first_gp_reg_save+i]
10309 && ! call_used_regs[info->first_gp_reg_save+i])
10310 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10311 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10312 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10313 {
10314 rtx addr, reg, mem;
10315 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10316
10317 if (TARGET_SPE_ABI)
10318 {
10319 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10320 rtx b;
10321
10322 if (!SPE_CONST_OFFSET_OK (offset))
10323 {
10324 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10325 emit_move_insn (b, GEN_INT (offset));
10326 }
10327 else
10328 b = GEN_INT (offset);
10329
10330 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10331 mem = gen_rtx_MEM (V2SImode, addr);
10332 set_mem_alias_set (mem, rs6000_sr_alias_set);
10333 insn = emit_move_insn (mem, reg);
10334
10335 if (GET_CODE (b) == CONST_INT)
10336 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10337 NULL_RTX, NULL_RTX);
10338 else
10339 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10340 b, GEN_INT (offset));
10341 }
10342 else
10343 {
10344 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10345 GEN_INT (info->gp_save_offset
10346 + sp_offset
10347 + reg_size * i));
10348 mem = gen_rtx_MEM (reg_mode, addr);
10349 set_mem_alias_set (mem, rs6000_sr_alias_set);
10350
10351 insn = emit_move_insn (mem, reg);
10352 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10353 NULL_RTX, NULL_RTX);
10354 }
10355 }
10356 }
10357
10358 /* ??? There's no need to emit actual instructions here, but it's the
10359 easiest way to get the frame unwind information emitted. */
10360 if (current_function_calls_eh_return)
10361 {
10362 unsigned int i, regno;
10363
10364 for (i = 0; ; ++i)
10365 {
10366 regno = EH_RETURN_DATA_REGNO (i);
10367 if (regno == INVALID_REGNUM)
10368 break;
10369
10370 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
10371 info->ehrd_offset + sp_offset
10372 + reg_size * (int) i,
10373 info->total_size);
10374 }
10375 }
10376
10377 /* Save lr if we used it. */
10378 if (info->lr_save_p)
10379 {
10380 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10381 GEN_INT (info->lr_save_offset + sp_offset));
10382 rtx reg = gen_rtx_REG (Pmode, 0);
10383 rtx mem = gen_rtx_MEM (Pmode, addr);
10384 /* This should not be of rs6000_sr_alias_set, because of
10385 __builtin_return_address. */
10386
10387 insn = emit_move_insn (mem, reg);
10388 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10389 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10390 }
10391
10392 /* Save CR if we use any that must be preserved. */
10393 if (info->cr_save_p)
10394 {
10395 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10396 GEN_INT (info->cr_save_offset + sp_offset));
10397 rtx mem = gen_rtx_MEM (SImode, addr);
10398
10399 set_mem_alias_set (mem, rs6000_sr_alias_set);
10400
10401 /* If r12 was used to hold the original sp, copy cr into r0 now
10402 that it's free. */
10403 if (REGNO (frame_reg_rtx) == 12)
10404 {
10405 cr_save_rtx = gen_rtx_REG (SImode, 0);
10406 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10407 }
10408 insn = emit_move_insn (mem, cr_save_rtx);
10409
10410 /* Now, there's no way that dwarf2out_frame_debug_expr is going
10411 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
10412 OK. All we have to do is specify that _one_ condition code
10413 register is saved in this stack slot. The thrower's epilogue
10414 will then restore all the call-saved registers.
10415 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
10416 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10417 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
10418 }
10419
10420 /* Update stack and set back pointer unless this is V.4,
10421 for which it was done previously. */
10422 if (info->push_p && DEFAULT_ABI != ABI_V4)
10423 rs6000_emit_allocate_stack (info->total_size, FALSE);
10424
10425 /* Set frame pointer, if needed. */
10426 if (frame_pointer_needed)
10427 {
10428 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
10429 sp_reg_rtx);
10430 RTX_FRAME_RELATED_P (insn) = 1;
10431 }
10432
10433 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
10434 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
10435 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
10436 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
10437 {
10438 /* If emit_load_toc_table will use the link register, we need to save
10439 it. We use R11 for this purpose because emit_load_toc_table
10440 can use register 0. This allows us to use a plain 'blr' to return
10441 from the procedure more often. */
10442 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
10443 && ! info->lr_save_p
10444 && EXIT_BLOCK_PTR->pred != NULL);
10445 if (save_LR_around_toc_setup)
10446 emit_move_insn (gen_rtx_REG (Pmode, 11),
10447 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10448
10449 rs6000_emit_load_toc_table (TRUE);
10450
10451 if (save_LR_around_toc_setup)
10452 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10453 gen_rtx_REG (Pmode, 11));
10454 }
10455
10456 if (DEFAULT_ABI == ABI_DARWIN
10457 && flag_pic && current_function_uses_pic_offset_table)
10458 {
10459 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10460 #if TARGET_MACHO
10461 char *picbase = machopic_function_base_name ();
10462 rtx src = gen_rtx_SYMBOL_REF (Pmode, ggc_alloc_string (picbase, -1));
10463
10464 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
10465 #endif
10466
10467 rs6000_maybe_dead (
10468 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
10469 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
10470 }
10471 }
10472
10473 /* Write function prologue. */
10474
10475 static void
10476 rs6000_output_function_prologue (file, size)
10477 FILE *file;
10478 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10479 {
10480 rs6000_stack_t *info = rs6000_stack_info ();
10481
10482 if (TARGET_DEBUG_STACK)
10483 debug_stack_info (info);
10484
10485 /* Write .extern for any function we will call to save and restore
10486 fp values. */
10487 if (info->first_fp_reg_save < 64
10488 && !FP_SAVE_INLINE (info->first_fp_reg_save))
10489 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
10490 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
10491 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
10492 RESTORE_FP_SUFFIX);
10493
10494 /* Write .extern for AIX common mode routines, if needed. */
10495 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
10496 {
10497 fputs ("\t.extern __mulh\n", file);
10498 fputs ("\t.extern __mull\n", file);
10499 fputs ("\t.extern __divss\n", file);
10500 fputs ("\t.extern __divus\n", file);
10501 fputs ("\t.extern __quoss\n", file);
10502 fputs ("\t.extern __quous\n", file);
10503 common_mode_defined = 1;
10504 }
10505
10506 if (! HAVE_prologue)
10507 {
10508 start_sequence ();
10509
10510 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
10511 the "toplevel" insn chain. */
10512 emit_note (0, NOTE_INSN_DELETED);
10513 rs6000_emit_prologue ();
10514 emit_note (0, NOTE_INSN_DELETED);
10515
10516 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10517 {
10518 rtx insn;
10519 unsigned addr = 0;
10520 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
10521 {
10522 INSN_ADDRESSES_NEW (insn, addr);
10523 addr += 4;
10524 }
10525 }
10526
10527 if (TARGET_DEBUG_STACK)
10528 debug_rtx_list (get_insns (), 100);
10529 final (get_insns (), file, FALSE, FALSE);
10530 end_sequence ();
10531 }
10532
10533 rs6000_pic_labelno++;
10534 }
10535
10536 /* Emit function epilogue as insns.
10537
10538 At present, dwarf2out_frame_debug_expr doesn't understand
10539 register restores, so we don't bother setting RTX_FRAME_RELATED_P
10540 anywhere in the epilogue. Most of the insns below would in any case
10541 need special notes to explain where r11 is in relation to the stack. */
10542
10543 void
10544 rs6000_emit_epilogue (sibcall)
10545 int sibcall;
10546 {
10547 rs6000_stack_t *info;
10548 int restoring_FPRs_inline;
10549 int using_load_multiple;
10550 int using_mfcr_multiple;
10551 int use_backchain_to_restore_sp;
10552 int sp_offset = 0;
10553 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
10554 rtx frame_reg_rtx = sp_reg_rtx;
10555 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10556 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10557 int i;
10558
10559 if (TARGET_SPE_ABI)
10560 {
10561 reg_mode = V2SImode;
10562 reg_size = 8;
10563 }
10564
10565 info = rs6000_stack_info ();
10566 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10567 && !TARGET_SPE_ABI
10568 && info->first_gp_reg_save < 31);
10569 restoring_FPRs_inline = (sibcall
10570 || current_function_calls_eh_return
10571 || info->first_fp_reg_save == 64
10572 || FP_SAVE_INLINE (info->first_fp_reg_save));
10573 use_backchain_to_restore_sp = (frame_pointer_needed
10574 || current_function_calls_alloca
10575 || info->total_size > 32767);
10576 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
10577 || rs6000_cpu == PROCESSOR_PPC603
10578 || rs6000_cpu == PROCESSOR_PPC750
10579 || optimize_size);
10580
10581 /* If we have a frame pointer, a call to alloca, or a large stack
10582 frame, restore the old stack pointer using the backchain. Otherwise,
10583 we know what size to update it with. */
10584 if (use_backchain_to_restore_sp)
10585 {
10586 /* Under V.4, don't reset the stack pointer until after we're done
10587 loading the saved registers. */
10588 if (DEFAULT_ABI == ABI_V4)
10589 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
10590
10591 emit_move_insn (frame_reg_rtx,
10592 gen_rtx_MEM (Pmode, sp_reg_rtx));
10593
10594 }
10595 else if (info->push_p)
10596 {
10597 if (DEFAULT_ABI == ABI_V4)
10598 sp_offset = info->total_size;
10599 else
10600 {
10601 emit_insn (TARGET_32BIT
10602 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
10603 GEN_INT (info->total_size))
10604 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
10605 GEN_INT (info->total_size)));
10606 }
10607 }
10608
10609 /* Restore AltiVec registers if needed. */
10610 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10611 {
10612 int i;
10613
10614 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10615 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10616 {
10617 rtx addr, areg, mem;
10618
10619 areg = gen_rtx_REG (Pmode, 0);
10620 emit_move_insn
10621 (areg, GEN_INT (info->altivec_save_offset
10622 + sp_offset
10623 + 16 * (i - info->first_altivec_reg_save)));
10624
10625 /* AltiVec addressing mode is [reg+reg]. */
10626 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
10627 mem = gen_rtx_MEM (V4SImode, addr);
10628 set_mem_alias_set (mem, rs6000_sr_alias_set);
10629
10630 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
10631 }
10632 }
10633
10634 /* Restore VRSAVE if needed. */
10635 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
10636 {
10637 rtx addr, mem, reg;
10638
10639 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10640 GEN_INT (info->vrsave_save_offset + sp_offset));
10641 mem = gen_rtx_MEM (SImode, addr);
10642 set_mem_alias_set (mem, rs6000_sr_alias_set);
10643 reg = gen_rtx_REG (SImode, 12);
10644 emit_move_insn (reg, mem);
10645
10646 emit_insn (generate_set_vrsave (reg, info, 1));
10647 }
10648
10649 /* Get the old lr if we saved it. */
10650 if (info->lr_save_p)
10651 {
10652 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
10653 info->lr_save_offset + sp_offset);
10654
10655 set_mem_alias_set (mem, rs6000_sr_alias_set);
10656
10657 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
10658 }
10659
10660 /* Get the old cr if we saved it. */
10661 if (info->cr_save_p)
10662 {
10663 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10664 GEN_INT (info->cr_save_offset + sp_offset));
10665 rtx mem = gen_rtx_MEM (SImode, addr);
10666
10667 set_mem_alias_set (mem, rs6000_sr_alias_set);
10668
10669 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
10670 }
10671
10672 /* Set LR here to try to overlap restores below. */
10673 if (info->lr_save_p)
10674 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10675 gen_rtx_REG (Pmode, 0));
10676
10677 /* Load exception handler data registers, if needed. */
10678 if (current_function_calls_eh_return)
10679 {
10680 unsigned int i, regno;
10681
10682 for (i = 0; ; ++i)
10683 {
10684 rtx mem;
10685
10686 regno = EH_RETURN_DATA_REGNO (i);
10687 if (regno == INVALID_REGNUM)
10688 break;
10689
10690 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
10691 info->ehrd_offset + sp_offset
10692 + reg_size * (int) i);
10693 set_mem_alias_set (mem, rs6000_sr_alias_set);
10694
10695 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
10696 }
10697 }
10698
10699 /* Restore GPRs. This is done as a PARALLEL if we are using
10700 the load-multiple instructions. */
10701 if (using_load_multiple)
10702 {
10703 rtvec p;
10704 p = rtvec_alloc (32 - info->first_gp_reg_save);
10705 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10706 {
10707 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10708 GEN_INT (info->gp_save_offset
10709 + sp_offset
10710 + reg_size * i));
10711 rtx mem = gen_rtx_MEM (reg_mode, addr);
10712
10713 set_mem_alias_set (mem, rs6000_sr_alias_set);
10714
10715 RTVEC_ELT (p, i) =
10716 gen_rtx_SET (VOIDmode,
10717 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
10718 mem);
10719 }
10720 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10721 }
10722 else
10723 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10724 if ((regs_ever_live[info->first_gp_reg_save+i]
10725 && ! call_used_regs[info->first_gp_reg_save+i])
10726 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10727 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10728 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10729 {
10730 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10731 GEN_INT (info->gp_save_offset
10732 + sp_offset
10733 + reg_size * i));
10734 rtx mem = gen_rtx_MEM (reg_mode, addr);
10735
10736 /* Restore 64-bit quantities for SPE. */
10737 if (TARGET_SPE_ABI)
10738 {
10739 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10740 rtx b;
10741
10742 if (!SPE_CONST_OFFSET_OK (offset))
10743 {
10744 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10745 emit_move_insn (b, GEN_INT (offset));
10746 }
10747 else
10748 b = GEN_INT (offset);
10749
10750 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10751 mem = gen_rtx_MEM (V2SImode, addr);
10752 }
10753
10754 set_mem_alias_set (mem, rs6000_sr_alias_set);
10755
10756 emit_move_insn (gen_rtx_REG (reg_mode,
10757 info->first_gp_reg_save + i), mem);
10758 }
10759
10760 /* Restore fpr's if we need to do it without calling a function. */
10761 if (restoring_FPRs_inline)
10762 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10763 if ((regs_ever_live[info->first_fp_reg_save+i]
10764 && ! call_used_regs[info->first_fp_reg_save+i]))
10765 {
10766 rtx addr, mem;
10767 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10768 GEN_INT (info->fp_save_offset
10769 + sp_offset
10770 + 8 * i));
10771 mem = gen_rtx_MEM (DFmode, addr);
10772 set_mem_alias_set (mem, rs6000_sr_alias_set);
10773
10774 emit_move_insn (gen_rtx_REG (DFmode,
10775 info->first_fp_reg_save + i),
10776 mem);
10777 }
10778
10779 /* If we saved cr, restore it here. Just those that were used. */
10780 if (info->cr_save_p)
10781 {
10782 rtx r12_rtx = gen_rtx_REG (SImode, 12);
10783 int count = 0;
10784
10785 if (using_mfcr_multiple)
10786 {
10787 for (i = 0; i < 8; i++)
10788 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10789 count++;
10790 if (count == 0)
10791 abort ();
10792 }
10793
10794 if (using_mfcr_multiple && count > 1)
10795 {
10796 rtvec p;
10797 int ndx;
10798
10799 p = rtvec_alloc (count);
10800
10801 ndx = 0;
10802 for (i = 0; i < 8; i++)
10803 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10804 {
10805 rtvec r = rtvec_alloc (2);
10806 RTVEC_ELT (r, 0) = r12_rtx;
10807 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
10808 RTVEC_ELT (p, ndx) =
10809 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
10810 gen_rtx_UNSPEC (CCmode, r, 20));
10811 ndx++;
10812 }
10813 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10814 if (ndx != count)
10815 abort ();
10816 }
10817 else
10818 for (i = 0; i < 8; i++)
10819 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10820 {
10821 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
10822 CR0_REGNO+i),
10823 r12_rtx));
10824 }
10825 }
10826
10827 /* If this is V.4, unwind the stack pointer after all of the loads
10828 have been done. We need to emit a block here so that sched
10829 doesn't decide to move the sp change before the register restores
10830 (which may not have any obvious dependency on the stack). This
10831 doesn't hurt performance, because there is no scheduling that can
10832 be done after this point. */
10833 if (DEFAULT_ABI == ABI_V4)
10834 {
10835 if (frame_reg_rtx != sp_reg_rtx)
10836 rs6000_emit_stack_tie ();
10837
10838 if (use_backchain_to_restore_sp)
10839 {
10840 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
10841 }
10842 else if (sp_offset != 0)
10843 {
10844 emit_insn (Pmode == SImode
10845 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
10846 GEN_INT (sp_offset))
10847 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
10848 GEN_INT (sp_offset)));
10849 }
10850 }
10851
10852 if (current_function_calls_eh_return)
10853 {
10854 rtx sa = EH_RETURN_STACKADJ_RTX;
10855 emit_insn (Pmode == SImode
10856 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
10857 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
10858 }
10859
10860 if (!sibcall)
10861 {
10862 rtvec p;
10863 if (! restoring_FPRs_inline)
10864 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
10865 else
10866 p = rtvec_alloc (2);
10867
10868 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
10869 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
10870 gen_rtx_REG (Pmode,
10871 LINK_REGISTER_REGNUM));
10872
10873 /* If we have to restore more than two FP registers, branch to the
10874 restore function. It will return to our caller. */
10875 if (! restoring_FPRs_inline)
10876 {
10877 int i;
10878 char rname[30];
10879 const char *alloc_rname;
10880
10881 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
10882 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
10883 alloc_rname = ggc_strdup (rname);
10884 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
10885 gen_rtx_SYMBOL_REF (Pmode,
10886 alloc_rname));
10887
10888 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10889 {
10890 rtx addr, mem;
10891 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
10892 GEN_INT (info->fp_save_offset + 8*i));
10893 mem = gen_rtx_MEM (DFmode, addr);
10894 set_mem_alias_set (mem, rs6000_sr_alias_set);
10895
10896 RTVEC_ELT (p, i+3) =
10897 gen_rtx_SET (VOIDmode,
10898 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
10899 mem);
10900 }
10901 }
10902
10903 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
10904 }
10905 }
10906
10907 /* Write function epilogue. */
10908
10909 static void
10910 rs6000_output_function_epilogue (file, size)
10911 FILE *file;
10912 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10913 {
10914 rs6000_stack_t *info = rs6000_stack_info ();
10915
10916 if (! HAVE_epilogue)
10917 {
10918 rtx insn = get_last_insn ();
10919 /* If the last insn was a BARRIER, we don't have to write anything except
10920 the trace table. */
10921 if (GET_CODE (insn) == NOTE)
10922 insn = prev_nonnote_insn (insn);
10923 if (insn == 0 || GET_CODE (insn) != BARRIER)
10924 {
10925 /* This is slightly ugly, but at least we don't have two
10926 copies of the epilogue-emitting code. */
10927 start_sequence ();
10928
10929 /* A NOTE_INSN_DELETED is supposed to be at the start
10930 and end of the "toplevel" insn chain. */
10931 emit_note (0, NOTE_INSN_DELETED);
10932 rs6000_emit_epilogue (FALSE);
10933 emit_note (0, NOTE_INSN_DELETED);
10934
10935 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10936 {
10937 rtx insn;
10938 unsigned addr = 0;
10939 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
10940 {
10941 INSN_ADDRESSES_NEW (insn, addr);
10942 addr += 4;
10943 }
10944 }
10945
10946 if (TARGET_DEBUG_STACK)
10947 debug_rtx_list (get_insns (), 100);
10948 final (get_insns (), file, FALSE, FALSE);
10949 end_sequence ();
10950 }
10951 }
10952
10953 /* Output a traceback table here. See /usr/include/sys/debug.h for info
10954 on its format.
10955
10956 We don't output a traceback table if -finhibit-size-directive was
10957 used. The documentation for -finhibit-size-directive reads
10958 ``don't output a @code{.size} assembler directive, or anything
10959 else that would cause trouble if the function is split in the
10960 middle, and the two halves are placed at locations far apart in
10961 memory.'' The traceback table has this property, since it
10962 includes the offset from the start of the function to the
10963 traceback table itself.
10964
10965 System V.4 Powerpc's (and the embedded ABI derived from it) use a
10966 different traceback table. */
10967 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
10968 && rs6000_traceback != traceback_none)
10969 {
10970 const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
10971 const char *language_string = lang_hooks.name;
10972 int fixed_parms = 0, float_parms = 0, parm_info = 0;
10973 int i;
10974 int optional_tbtab;
10975
10976 if (rs6000_traceback == traceback_full)
10977 optional_tbtab = 1;
10978 else if (rs6000_traceback == traceback_part)
10979 optional_tbtab = 0;
10980 else
10981 optional_tbtab = !optimize_size && !TARGET_ELF;
10982
10983 while (*fname == '.') /* V.4 encodes . in the name */
10984 fname++;
10985
10986 /* Need label immediately before tbtab, so we can compute its offset
10987 from the function start. */
10988 if (*fname == '*')
10989 ++fname;
10990 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
10991 ASM_OUTPUT_LABEL (file, fname);
10992
10993 /* The .tbtab pseudo-op can only be used for the first eight
10994 expressions, since it can't handle the possibly variable
10995 length fields that follow. However, if you omit the optional
10996 fields, the assembler outputs zeros for all optional fields
10997 anyways, giving each variable length field is minimum length
10998 (as defined in sys/debug.h). Thus we can not use the .tbtab
10999 pseudo-op at all. */
11000
11001 /* An all-zero word flags the start of the tbtab, for debuggers
11002 that have to find it by searching forward from the entry
11003 point or from the current pc. */
11004 fputs ("\t.long 0\n", file);
11005
11006 /* Tbtab format type. Use format type 0. */
11007 fputs ("\t.byte 0,", file);
11008
11009 /* Language type. Unfortunately, there doesn't seem to be any
11010 official way to get this info, so we use language_string. C
11011 is 0. C++ is 9. No number defined for Obj-C, so use the
11012 value for C for now. There is no official value for Java,
11013 although IBM appears to be using 13. There is no official value
11014 for Chill, so we've chosen 44 pseudo-randomly. */
11015 if (! strcmp (language_string, "GNU C")
11016 || ! strcmp (language_string, "GNU Objective-C"))
11017 i = 0;
11018 else if (! strcmp (language_string, "GNU F77"))
11019 i = 1;
11020 else if (! strcmp (language_string, "GNU Ada"))
11021 i = 3;
11022 else if (! strcmp (language_string, "GNU Pascal"))
11023 i = 2;
11024 else if (! strcmp (language_string, "GNU C++"))
11025 i = 9;
11026 else if (! strcmp (language_string, "GNU Java"))
11027 i = 13;
11028 else if (! strcmp (language_string, "GNU CHILL"))
11029 i = 44;
11030 else
11031 abort ();
11032 fprintf (file, "%d,", i);
11033
11034 /* 8 single bit fields: global linkage (not set for C extern linkage,
11035 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
11036 from start of procedure stored in tbtab, internal function, function
11037 has controlled storage, function has no toc, function uses fp,
11038 function logs/aborts fp operations. */
11039 /* Assume that fp operations are used if any fp reg must be saved. */
11040 fprintf (file, "%d,",
11041 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
11042
11043 /* 6 bitfields: function is interrupt handler, name present in
11044 proc table, function calls alloca, on condition directives
11045 (controls stack walks, 3 bits), saves condition reg, saves
11046 link reg. */
11047 /* The `function calls alloca' bit seems to be set whenever reg 31 is
11048 set up as a frame pointer, even when there is no alloca call. */
11049 fprintf (file, "%d,",
11050 ((optional_tbtab << 6)
11051 | ((optional_tbtab & frame_pointer_needed) << 5)
11052 | (info->cr_save_p << 1)
11053 | (info->lr_save_p)));
11054
11055 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
11056 (6 bits). */
11057 fprintf (file, "%d,",
11058 (info->push_p << 7) | (64 - info->first_fp_reg_save));
11059
11060 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
11061 fprintf (file, "%d,", (32 - first_reg_to_save ()));
11062
11063 if (optional_tbtab)
11064 {
11065 /* Compute the parameter info from the function decl argument
11066 list. */
11067 tree decl;
11068 int next_parm_info_bit = 31;
11069
11070 for (decl = DECL_ARGUMENTS (current_function_decl);
11071 decl; decl = TREE_CHAIN (decl))
11072 {
11073 rtx parameter = DECL_INCOMING_RTL (decl);
11074 enum machine_mode mode = GET_MODE (parameter);
11075
11076 if (GET_CODE (parameter) == REG)
11077 {
11078 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
11079 {
11080 int bits;
11081
11082 float_parms++;
11083
11084 if (mode == SFmode)
11085 bits = 0x2;
11086 else if (mode == DFmode)
11087 bits = 0x3;
11088 else
11089 abort ();
11090
11091 /* If only one bit will fit, don't or in this entry. */
11092 if (next_parm_info_bit > 0)
11093 parm_info |= (bits << (next_parm_info_bit - 1));
11094 next_parm_info_bit -= 2;
11095 }
11096 else
11097 {
11098 fixed_parms += ((GET_MODE_SIZE (mode)
11099 + (UNITS_PER_WORD - 1))
11100 / UNITS_PER_WORD);
11101 next_parm_info_bit -= 1;
11102 }
11103 }
11104 }
11105 }
11106
11107 /* Number of fixed point parameters. */
11108 /* This is actually the number of words of fixed point parameters; thus
11109 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11110 fprintf (file, "%d,", fixed_parms);
11111
11112 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11113 all on stack. */
11114 /* This is actually the number of fp registers that hold parameters;
11115 and thus the maximum value is 13. */
11116 /* Set parameters on stack bit if parameters are not in their original
11117 registers, regardless of whether they are on the stack? Xlc
11118 seems to set the bit when not optimizing. */
11119 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
11120
11121 if (! optional_tbtab)
11122 return;
11123
11124 /* Optional fields follow. Some are variable length. */
11125
11126 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11127 11 double float. */
11128 /* There is an entry for each parameter in a register, in the order that
11129 they occur in the parameter list. Any intervening arguments on the
11130 stack are ignored. If the list overflows a long (max possible length
11131 34 bits) then completely leave off all elements that don't fit. */
11132 /* Only emit this long if there was at least one parameter. */
11133 if (fixed_parms || float_parms)
11134 fprintf (file, "\t.long %d\n", parm_info);
11135
11136 /* Offset from start of code to tb table. */
11137 fputs ("\t.long ", file);
11138 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11139 #if TARGET_AIX
11140 RS6000_OUTPUT_BASENAME (file, fname);
11141 #else
11142 assemble_name (file, fname);
11143 #endif
11144 fputs ("-.", file);
11145 #if TARGET_AIX
11146 RS6000_OUTPUT_BASENAME (file, fname);
11147 #else
11148 assemble_name (file, fname);
11149 #endif
11150 putc ('\n', file);
11151
11152 /* Interrupt handler mask. */
11153 /* Omit this long, since we never set the interrupt handler bit
11154 above. */
11155
11156 /* Number of CTL (controlled storage) anchors. */
11157 /* Omit this long, since the has_ctl bit is never set above. */
11158
11159 /* Displacement into stack of each CTL anchor. */
11160 /* Omit this list of longs, because there are no CTL anchors. */
11161
11162 /* Length of function name. */
11163 fprintf (file, "\t.short %d\n", (int) strlen (fname));
11164
11165 /* Function name. */
11166 assemble_string (fname, strlen (fname));
11167
11168 /* Register for alloca automatic storage; this is always reg 31.
11169 Only emit this if the alloca bit was set above. */
11170 if (frame_pointer_needed)
11171 fputs ("\t.byte 31\n", file);
11172
11173 fputs ("\t.align 2\n", file);
11174 }
11175 }
11176 \f
11177 /* A C compound statement that outputs the assembler code for a thunk
11178 function, used to implement C++ virtual function calls with
11179 multiple inheritance. The thunk acts as a wrapper around a virtual
11180 function, adjusting the implicit object parameter before handing
11181 control off to the real function.
11182
11183 First, emit code to add the integer DELTA to the location that
11184 contains the incoming first argument. Assume that this argument
11185 contains a pointer, and is the one used to pass the `this' pointer
11186 in C++. This is the incoming argument *before* the function
11187 prologue, e.g. `%o0' on a sparc. The addition must preserve the
11188 values of all other incoming arguments.
11189
11190 After the addition, emit code to jump to FUNCTION, which is a
11191 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
11192 not touch the return address. Hence returning from FUNCTION will
11193 return to whoever called the current `thunk'.
11194
11195 The effect must be as if FUNCTION had been called directly with the
11196 adjusted first argument. This macro is responsible for emitting
11197 all of the code for a thunk function; output_function_prologue()
11198 and output_function_epilogue() are not invoked.
11199
11200 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
11201 been extracted from it.) It might possibly be useful on some
11202 targets, but probably not.
11203
11204 If you do not define this macro, the target-independent code in the
11205 C++ frontend will generate a less efficient heavyweight thunk that
11206 calls FUNCTION instead of jumping to it. The generic approach does
11207 not support varargs. */
11208
11209 void
11210 output_mi_thunk (file, thunk_fndecl, delta, function)
11211 FILE *file;
11212 tree thunk_fndecl ATTRIBUTE_UNUSED;
11213 int delta;
11214 tree function;
11215 {
11216 const char *this_reg =
11217 reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
11218 const char *prefix;
11219 const char *fname;
11220 const char *r0 = reg_names[0];
11221 const char *toc = reg_names[2];
11222 const char *schain = reg_names[11];
11223 const char *r12 = reg_names[12];
11224 char buf[512];
11225 static int labelno = 0;
11226
11227 /* Small constants that can be done by one add instruction. */
11228 if (delta >= -32768 && delta <= 32767)
11229 {
11230 if (! TARGET_NEW_MNEMONICS)
11231 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
11232 else
11233 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
11234 }
11235
11236 /* 64-bit constants. If "int" is 32 bits, we'll never hit this abort. */
11237 else if (TARGET_64BIT && (delta < -2147483647 - 1 || delta > 2147483647))
11238 abort ();
11239
11240 /* Large constants that can be done by one addis instruction. */
11241 else if ((delta & 0xffff) == 0)
11242 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
11243 delta >> 16);
11244
11245 /* 32-bit constants that can be done by an add and addis instruction. */
11246 else
11247 {
11248 /* Break into two pieces, propagating the sign bit from the low
11249 word to the upper word. */
11250 int delta_low = ((delta & 0xffff) ^ 0x8000) - 0x8000;
11251 int delta_high = (delta - delta_low) >> 16;
11252
11253 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
11254 delta_high);
11255
11256 if (! TARGET_NEW_MNEMONICS)
11257 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
11258 else
11259 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
11260 }
11261
11262 /* Get the prefix in front of the names. */
11263 switch (DEFAULT_ABI)
11264 {
11265 default:
11266 abort ();
11267
11268 case ABI_AIX:
11269 prefix = ".";
11270 break;
11271
11272 case ABI_V4:
11273 case ABI_AIX_NODESC:
11274 prefix = "";
11275 break;
11276 }
11277
11278 /* If the function is compiled in this module, jump to it directly.
11279 Otherwise, load up its address and jump to it. */
11280
11281 fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
11282
11283 if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
11284 && (! lookup_attribute ("longcall",
11285 TYPE_ATTRIBUTES (TREE_TYPE (function)))
11286 || lookup_attribute ("shortcall",
11287 TYPE_ATTRIBUTES (TREE_TYPE (function)))))
11288
11289 {
11290 fprintf (file, "\tb %s", prefix);
11291 assemble_name (file, fname);
11292 if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
11293 putc ('\n', file);
11294 }
11295
11296 else
11297 {
11298 switch (DEFAULT_ABI)
11299 {
11300 default:
11301 abort ();
11302
11303 case ABI_AIX:
11304 /* Set up a TOC entry for the function. */
11305 ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
11306 toc_section ();
11307 ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
11308 labelno++;
11309
11310 if (TARGET_MINIMAL_TOC)
11311 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
11312 else
11313 {
11314 fputs ("\t.tc ", file);
11315 assemble_name (file, fname);
11316 fputs ("[TC],", file);
11317 }
11318 assemble_name (file, fname);
11319 putc ('\n', file);
11320 if (TARGET_ELF)
11321 function_section (current_function_decl);
11322 else
11323 text_section();
11324 if (TARGET_MINIMAL_TOC)
11325 asm_fprintf (file, (TARGET_32BIT)
11326 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
11327 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
11328 asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
11329 assemble_name (file, buf);
11330 if (TARGET_ELF && TARGET_MINIMAL_TOC)
11331 fputs ("-(.LCTOC1)", file);
11332 asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
11333 asm_fprintf (file,
11334 (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
11335 r0, r12);
11336
11337 asm_fprintf (file,
11338 (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
11339 toc, r12);
11340
11341 asm_fprintf (file, "\tmtctr %s\n", r0);
11342 asm_fprintf (file,
11343 (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
11344 schain, r12);
11345
11346 asm_fprintf (file, "\tbctr\n");
11347 break;
11348
11349 case ABI_AIX_NODESC:
11350 case ABI_V4:
11351 fprintf (file, "\tb %s", prefix);
11352 assemble_name (file, fname);
11353 if (flag_pic) fputs ("@plt", file);
11354 putc ('\n', file);
11355 break;
11356
11357 #if TARGET_MACHO
11358 case ABI_DARWIN:
11359 fprintf (file, "\tb %s", prefix);
11360 if (flag_pic && !machopic_name_defined_p (fname))
11361 assemble_name (file, machopic_stub_name (fname));
11362 else
11363 assemble_name (file, fname);
11364 putc ('\n', file);
11365 break;
11366 #endif
11367 }
11368 }
11369 }
11370
11371 \f
11372 /* A quick summary of the various types of 'constant-pool tables'
11373 under PowerPC:
11374
11375 Target Flags Name One table per
11376 AIX (none) AIX TOC object file
11377 AIX -mfull-toc AIX TOC object file
11378 AIX -mminimal-toc AIX minimal TOC translation unit
11379 SVR4/EABI (none) SVR4 SDATA object file
11380 SVR4/EABI -fpic SVR4 pic object file
11381 SVR4/EABI -fPIC SVR4 PIC translation unit
11382 SVR4/EABI -mrelocatable EABI TOC function
11383 SVR4/EABI -maix AIX TOC object file
11384 SVR4/EABI -maix -mminimal-toc
11385 AIX minimal TOC translation unit
11386
11387 Name Reg. Set by entries contains:
11388 made by addrs? fp? sum?
11389
11390 AIX TOC 2 crt0 as Y option option
11391 AIX minimal TOC 30 prolog gcc Y Y option
11392 SVR4 SDATA 13 crt0 gcc N Y N
11393 SVR4 pic 30 prolog ld Y not yet N
11394 SVR4 PIC 30 prolog gcc Y option option
11395 EABI TOC 30 prolog gcc Y option option
11396
11397 */
11398
11399 /* Hash table stuff for keeping track of TOC entries. */
11400
11401 struct toc_hash_struct
11402 {
11403 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
11404 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
11405 rtx key;
11406 enum machine_mode key_mode;
11407 int labelno;
11408 };
11409
11410 static htab_t toc_hash_table;
11411
11412 /* Hash functions for the hash table. */
11413
11414 static unsigned
11415 rs6000_hash_constant (k)
11416 rtx k;
11417 {
11418 enum rtx_code code = GET_CODE (k);
11419 enum machine_mode mode = GET_MODE (k);
11420 unsigned result = (code << 3) ^ mode;
11421 const char *format;
11422 int flen, fidx;
11423
11424 format = GET_RTX_FORMAT (code);
11425 flen = strlen (format);
11426 fidx = 0;
11427
11428 switch (code)
11429 {
11430 case LABEL_REF:
11431 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
11432
11433 case CONST_DOUBLE:
11434 if (mode != VOIDmode)
11435 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
11436 flen = 2;
11437 break;
11438
11439 case CODE_LABEL:
11440 fidx = 3;
11441 break;
11442
11443 default:
11444 break;
11445 }
11446
11447 for (; fidx < flen; fidx++)
11448 switch (format[fidx])
11449 {
11450 case 's':
11451 {
11452 unsigned i, len;
11453 const char *str = XSTR (k, fidx);
11454 len = strlen (str);
11455 result = result * 613 + len;
11456 for (i = 0; i < len; i++)
11457 result = result * 613 + (unsigned) str[i];
11458 break;
11459 }
11460 case 'u':
11461 case 'e':
11462 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
11463 break;
11464 case 'i':
11465 case 'n':
11466 result = result * 613 + (unsigned) XINT (k, fidx);
11467 break;
11468 case 'w':
11469 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
11470 result = result * 613 + (unsigned) XWINT (k, fidx);
11471 else
11472 {
11473 size_t i;
11474 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
11475 result = result * 613 + (unsigned) (XWINT (k, fidx)
11476 >> CHAR_BIT * i);
11477 }
11478 break;
11479 default:
11480 abort ();
11481 }
11482
11483 return result;
11484 }
11485
11486 static unsigned
11487 toc_hash_function (hash_entry)
11488 const void * hash_entry;
11489 {
11490 const struct toc_hash_struct *thc =
11491 (const struct toc_hash_struct *) hash_entry;
11492 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
11493 }
11494
11495 /* Compare H1 and H2 for equivalence. */
11496
11497 static int
11498 toc_hash_eq (h1, h2)
11499 const void * h1;
11500 const void * h2;
11501 {
11502 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
11503 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
11504
11505 if (((const struct toc_hash_struct *) h1)->key_mode
11506 != ((const struct toc_hash_struct *) h2)->key_mode)
11507 return 0;
11508
11509 return rtx_equal_p (r1, r2);
11510 }
11511
11512 /* Mark the hash table-entry HASH_ENTRY. */
11513
11514 static int
11515 toc_hash_mark_entry (hash_slot, unused)
11516 void ** hash_slot;
11517 void * unused ATTRIBUTE_UNUSED;
11518 {
11519 const struct toc_hash_struct * hash_entry =
11520 *(const struct toc_hash_struct **) hash_slot;
11521 rtx r = hash_entry->key;
11522 ggc_set_mark (hash_entry);
11523 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
11524 if (GET_CODE (r) == LABEL_REF)
11525 {
11526 ggc_set_mark (r);
11527 ggc_set_mark (XEXP (r, 0));
11528 }
11529 else
11530 ggc_mark_rtx (r);
11531 return 1;
11532 }
11533
11534 /* Mark all the elements of the TOC hash-table *HT. */
11535
11536 static void
11537 toc_hash_mark_table (vht)
11538 void *vht;
11539 {
11540 htab_t *ht = vht;
11541
11542 htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
11543 }
11544
11545 /* These are the names given by the C++ front-end to vtables, and
11546 vtable-like objects. Ideally, this logic should not be here;
11547 instead, there should be some programmatic way of inquiring as
11548 to whether or not an object is a vtable. */
11549
11550 #define VTABLE_NAME_P(NAME) \
11551 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
11552 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
11553 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
11554 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
11555
11556 void
11557 rs6000_output_symbol_ref (file, x)
11558 FILE *file;
11559 rtx x;
11560 {
11561 /* Currently C++ toc references to vtables can be emitted before it
11562 is decided whether the vtable is public or private. If this is
11563 the case, then the linker will eventually complain that there is
11564 a reference to an unknown section. Thus, for vtables only,
11565 we emit the TOC reference to reference the symbol and not the
11566 section. */
11567 const char *name = XSTR (x, 0);
11568
11569 if (VTABLE_NAME_P (name))
11570 {
11571 RS6000_OUTPUT_BASENAME (file, name);
11572 }
11573 else
11574 assemble_name (file, name);
11575 }
11576
11577 /* Output a TOC entry. We derive the entry name from what is being
11578 written. */
11579
11580 void
11581 output_toc (file, x, labelno, mode)
11582 FILE *file;
11583 rtx x;
11584 int labelno;
11585 enum machine_mode mode;
11586 {
11587 char buf[256];
11588 const char *name = buf;
11589 const char *real_name;
11590 rtx base = x;
11591 int offset = 0;
11592
11593 if (TARGET_NO_TOC)
11594 abort ();
11595
11596 /* When the linker won't eliminate them, don't output duplicate
11597 TOC entries (this happens on AIX if there is any kind of TOC,
11598 and on SVR4 under -fPIC or -mrelocatable). */
11599 if (TARGET_TOC)
11600 {
11601 struct toc_hash_struct *h;
11602 void * * found;
11603
11604 h = ggc_alloc (sizeof (*h));
11605 h->key = x;
11606 h->key_mode = mode;
11607 h->labelno = labelno;
11608
11609 found = htab_find_slot (toc_hash_table, h, 1);
11610 if (*found == NULL)
11611 *found = h;
11612 else /* This is indeed a duplicate.
11613 Set this label equal to that label. */
11614 {
11615 fputs ("\t.set ", file);
11616 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11617 fprintf (file, "%d,", labelno);
11618 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11619 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
11620 found)->labelno));
11621 return;
11622 }
11623 }
11624
11625 /* If we're going to put a double constant in the TOC, make sure it's
11626 aligned properly when strict alignment is on. */
11627 if (GET_CODE (x) == CONST_DOUBLE
11628 && STRICT_ALIGNMENT
11629 && GET_MODE_BITSIZE (mode) >= 64
11630 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
11631 ASM_OUTPUT_ALIGN (file, 3);
11632 }
11633
11634 ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
11635
11636 /* Handle FP constants specially. Note that if we have a minimal
11637 TOC, things we put here aren't actually in the TOC, so we can allow
11638 FP constants. */
11639 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
11640 {
11641 REAL_VALUE_TYPE rv;
11642 long k[2];
11643
11644 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11645 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
11646
11647 if (TARGET_64BIT)
11648 {
11649 if (TARGET_MINIMAL_TOC)
11650 fputs (DOUBLE_INT_ASM_OP, file);
11651 else
11652 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
11653 k[0] & 0xffffffff, k[1] & 0xffffffff);
11654 fprintf (file, "0x%lx%08lx\n",
11655 k[0] & 0xffffffff, k[1] & 0xffffffff);
11656 return;
11657 }
11658 else
11659 {
11660 if (TARGET_MINIMAL_TOC)
11661 fputs ("\t.long ", file);
11662 else
11663 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
11664 k[0] & 0xffffffff, k[1] & 0xffffffff);
11665 fprintf (file, "0x%lx,0x%lx\n",
11666 k[0] & 0xffffffff, k[1] & 0xffffffff);
11667 return;
11668 }
11669 }
11670 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
11671 {
11672 REAL_VALUE_TYPE rv;
11673 long l;
11674
11675 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11676 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
11677
11678 if (TARGET_64BIT)
11679 {
11680 if (TARGET_MINIMAL_TOC)
11681 fputs (DOUBLE_INT_ASM_OP, file);
11682 else
11683 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
11684 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
11685 return;
11686 }
11687 else
11688 {
11689 if (TARGET_MINIMAL_TOC)
11690 fputs ("\t.long ", file);
11691 else
11692 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
11693 fprintf (file, "0x%lx\n", l & 0xffffffff);
11694 return;
11695 }
11696 }
11697 else if (GET_MODE (x) == VOIDmode
11698 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
11699 {
11700 unsigned HOST_WIDE_INT low;
11701 HOST_WIDE_INT high;
11702
11703 if (GET_CODE (x) == CONST_DOUBLE)
11704 {
11705 low = CONST_DOUBLE_LOW (x);
11706 high = CONST_DOUBLE_HIGH (x);
11707 }
11708 else
11709 #if HOST_BITS_PER_WIDE_INT == 32
11710 {
11711 low = INTVAL (x);
11712 high = (low & 0x80000000) ? ~0 : 0;
11713 }
11714 #else
11715 {
11716 low = INTVAL (x) & 0xffffffff;
11717 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
11718 }
11719 #endif
11720
11721 /* TOC entries are always Pmode-sized, but since this
11722 is a bigendian machine then if we're putting smaller
11723 integer constants in the TOC we have to pad them.
11724 (This is still a win over putting the constants in
11725 a separate constant pool, because then we'd have
11726 to have both a TOC entry _and_ the actual constant.)
11727
11728 For a 32-bit target, CONST_INT values are loaded and shifted
11729 entirely within `low' and can be stored in one TOC entry. */
11730
11731 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
11732 abort ();/* It would be easy to make this work, but it doesn't now. */
11733
11734 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
11735 {
11736 #if HOST_BITS_PER_WIDE_INT == 32
11737 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
11738 POINTER_SIZE, &low, &high, 0);
11739 #else
11740 low |= high << 32;
11741 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
11742 high = (HOST_WIDE_INT) low >> 32;
11743 low &= 0xffffffff;
11744 #endif
11745 }
11746
11747 if (TARGET_64BIT)
11748 {
11749 if (TARGET_MINIMAL_TOC)
11750 fputs (DOUBLE_INT_ASM_OP, file);
11751 else
11752 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
11753 (long) high & 0xffffffff, (long) low & 0xffffffff);
11754 fprintf (file, "0x%lx%08lx\n",
11755 (long) high & 0xffffffff, (long) low & 0xffffffff);
11756 return;
11757 }
11758 else
11759 {
11760 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
11761 {
11762 if (TARGET_MINIMAL_TOC)
11763 fputs ("\t.long ", file);
11764 else
11765 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
11766 (long) high & 0xffffffff, (long) low & 0xffffffff);
11767 fprintf (file, "0x%lx,0x%lx\n",
11768 (long) high & 0xffffffff, (long) low & 0xffffffff);
11769 }
11770 else
11771 {
11772 if (TARGET_MINIMAL_TOC)
11773 fputs ("\t.long ", file);
11774 else
11775 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
11776 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
11777 }
11778 return;
11779 }
11780 }
11781
11782 if (GET_CODE (x) == CONST)
11783 {
11784 if (GET_CODE (XEXP (x, 0)) != PLUS)
11785 abort ();
11786
11787 base = XEXP (XEXP (x, 0), 0);
11788 offset = INTVAL (XEXP (XEXP (x, 0), 1));
11789 }
11790
11791 if (GET_CODE (base) == SYMBOL_REF)
11792 name = XSTR (base, 0);
11793 else if (GET_CODE (base) == LABEL_REF)
11794 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
11795 else if (GET_CODE (base) == CODE_LABEL)
11796 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
11797 else
11798 abort ();
11799
11800 real_name = (*targetm.strip_name_encoding) (name);
11801 if (TARGET_MINIMAL_TOC)
11802 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
11803 else
11804 {
11805 fprintf (file, "\t.tc %s", real_name);
11806
11807 if (offset < 0)
11808 fprintf (file, ".N%d", - offset);
11809 else if (offset)
11810 fprintf (file, ".P%d", offset);
11811
11812 fputs ("[TC],", file);
11813 }
11814
11815 /* Currently C++ toc references to vtables can be emitted before it
11816 is decided whether the vtable is public or private. If this is
11817 the case, then the linker will eventually complain that there is
11818 a TOC reference to an unknown section. Thus, for vtables only,
11819 we emit the TOC reference to reference the symbol and not the
11820 section. */
11821 if (VTABLE_NAME_P (name))
11822 {
11823 RS6000_OUTPUT_BASENAME (file, name);
11824 if (offset < 0)
11825 fprintf (file, "%d", offset);
11826 else if (offset > 0)
11827 fprintf (file, "+%d", offset);
11828 }
11829 else
11830 output_addr_const (file, x);
11831 putc ('\n', file);
11832 }
11833 \f
11834 /* Output an assembler pseudo-op to write an ASCII string of N characters
11835 starting at P to FILE.
11836
11837 On the RS/6000, we have to do this using the .byte operation and
11838 write out special characters outside the quoted string.
11839 Also, the assembler is broken; very long strings are truncated,
11840 so we must artificially break them up early. */
11841
11842 void
11843 output_ascii (file, p, n)
11844 FILE *file;
11845 const char *p;
11846 int n;
11847 {
11848 char c;
11849 int i, count_string;
11850 const char *for_string = "\t.byte \"";
11851 const char *for_decimal = "\t.byte ";
11852 const char *to_close = NULL;
11853
11854 count_string = 0;
11855 for (i = 0; i < n; i++)
11856 {
11857 c = *p++;
11858 if (c >= ' ' && c < 0177)
11859 {
11860 if (for_string)
11861 fputs (for_string, file);
11862 putc (c, file);
11863
11864 /* Write two quotes to get one. */
11865 if (c == '"')
11866 {
11867 putc (c, file);
11868 ++count_string;
11869 }
11870
11871 for_string = NULL;
11872 for_decimal = "\"\n\t.byte ";
11873 to_close = "\"\n";
11874 ++count_string;
11875
11876 if (count_string >= 512)
11877 {
11878 fputs (to_close, file);
11879
11880 for_string = "\t.byte \"";
11881 for_decimal = "\t.byte ";
11882 to_close = NULL;
11883 count_string = 0;
11884 }
11885 }
11886 else
11887 {
11888 if (for_decimal)
11889 fputs (for_decimal, file);
11890 fprintf (file, "%d", c);
11891
11892 for_string = "\n\t.byte \"";
11893 for_decimal = ", ";
11894 to_close = "\n";
11895 count_string = 0;
11896 }
11897 }
11898
11899 /* Now close the string if we have written one. Then end the line. */
11900 if (to_close)
11901 fputs (to_close, file);
11902 }
11903 \f
11904 /* Generate a unique section name for FILENAME for a section type
11905 represented by SECTION_DESC. Output goes into BUF.
11906
11907 SECTION_DESC can be any string, as long as it is different for each
11908 possible section type.
11909
11910 We name the section in the same manner as xlc. The name begins with an
11911 underscore followed by the filename (after stripping any leading directory
11912 names) with the last period replaced by the string SECTION_DESC. If
11913 FILENAME does not contain a period, SECTION_DESC is appended to the end of
11914 the name. */
11915
11916 void
11917 rs6000_gen_section_name (buf, filename, section_desc)
11918 char **buf;
11919 const char *filename;
11920 const char *section_desc;
11921 {
11922 const char *q, *after_last_slash, *last_period = 0;
11923 char *p;
11924 int len;
11925
11926 after_last_slash = filename;
11927 for (q = filename; *q; q++)
11928 {
11929 if (*q == '/')
11930 after_last_slash = q + 1;
11931 else if (*q == '.')
11932 last_period = q;
11933 }
11934
11935 len = strlen (after_last_slash) + strlen (section_desc) + 2;
11936 *buf = (char *) xmalloc (len);
11937
11938 p = *buf;
11939 *p++ = '_';
11940
11941 for (q = after_last_slash; *q; q++)
11942 {
11943 if (q == last_period)
11944 {
11945 strcpy (p, section_desc);
11946 p += strlen (section_desc);
11947 }
11948
11949 else if (ISALNUM (*q))
11950 *p++ = *q;
11951 }
11952
11953 if (last_period == 0)
11954 strcpy (p, section_desc);
11955 else
11956 *p = '\0';
11957 }
11958 \f
11959 /* Emit profile function. */
11960
11961 void
11962 output_profile_hook (labelno)
11963 int labelno ATTRIBUTE_UNUSED;
11964 {
11965 if (DEFAULT_ABI == ABI_AIX)
11966 {
11967 #ifdef NO_PROFILE_COUNTERS
11968 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
11969 #else
11970 char buf[30];
11971 const char *label_name;
11972 rtx fun;
11973
11974 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
11975 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
11976 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
11977
11978 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
11979 fun, Pmode);
11980 #endif
11981 }
11982 else if (DEFAULT_ABI == ABI_DARWIN)
11983 {
11984 const char *mcount_name = RS6000_MCOUNT;
11985 int caller_addr_regno = LINK_REGISTER_REGNUM;
11986
11987 /* Be conservative and always set this, at least for now. */
11988 current_function_uses_pic_offset_table = 1;
11989
11990 #if TARGET_MACHO
11991 /* For PIC code, set up a stub and collect the caller's address
11992 from r0, which is where the prologue puts it. */
11993 if (flag_pic)
11994 {
11995 mcount_name = machopic_stub_name (mcount_name);
11996 if (current_function_uses_pic_offset_table)
11997 caller_addr_regno = 0;
11998 }
11999 #endif
12000 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
12001 0, VOIDmode, 1,
12002 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
12003 }
12004 }
12005
12006 /* Write function profiler code. */
12007
12008 void
12009 output_function_profiler (file, labelno)
12010 FILE *file;
12011 int labelno;
12012 {
12013 char buf[100];
12014 int save_lr = 8;
12015
12016 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12017 switch (DEFAULT_ABI)
12018 {
12019 default:
12020 abort ();
12021
12022 case ABI_V4:
12023 save_lr = 4;
12024 /* Fall through. */
12025
12026 case ABI_AIX_NODESC:
12027 if (!TARGET_32BIT)
12028 {
12029 warning ("no profiling of 64-bit code for this ABI");
12030 return;
12031 }
12032 fprintf (file, "\tmflr %s\n", reg_names[0]);
12033 if (flag_pic == 1)
12034 {
12035 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
12036 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12037 reg_names[0], save_lr, reg_names[1]);
12038 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
12039 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
12040 assemble_name (file, buf);
12041 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
12042 }
12043 else if (flag_pic > 1)
12044 {
12045 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12046 reg_names[0], save_lr, reg_names[1]);
12047 /* Now, we need to get the address of the label. */
12048 fputs ("\tbl 1f\n\t.long ", file);
12049 assemble_name (file, buf);
12050 fputs ("-.\n1:", file);
12051 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
12052 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
12053 reg_names[0], reg_names[11]);
12054 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
12055 reg_names[0], reg_names[0], reg_names[11]);
12056 }
12057 else
12058 {
12059 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
12060 assemble_name (file, buf);
12061 fputs ("@ha\n", file);
12062 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12063 reg_names[0], save_lr, reg_names[1]);
12064 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
12065 assemble_name (file, buf);
12066 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
12067 }
12068
12069 if (current_function_needs_context && DEFAULT_ABI == ABI_AIX_NODESC)
12070 {
12071 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12072 reg_names[STATIC_CHAIN_REGNUM],
12073 12, reg_names[1]);
12074 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12075 asm_fprintf (file, "\t{l|lwz} %s,%d(%s)\n",
12076 reg_names[STATIC_CHAIN_REGNUM],
12077 12, reg_names[1]);
12078 }
12079 else
12080 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12081 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12082 break;
12083
12084 case ABI_AIX:
12085 case ABI_DARWIN:
12086 /* Don't do anything, done in output_profile_hook (). */
12087 break;
12088 }
12089 }
12090
12091 /* Adjust the cost of a scheduling dependency. Return the new cost of
12092 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
12093
12094 static int
12095 rs6000_adjust_cost (insn, link, dep_insn, cost)
12096 rtx insn;
12097 rtx link;
12098 rtx dep_insn ATTRIBUTE_UNUSED;
12099 int cost;
12100 {
12101 if (! recog_memoized (insn))
12102 return 0;
12103
12104 if (REG_NOTE_KIND (link) != 0)
12105 return 0;
12106
12107 if (REG_NOTE_KIND (link) == 0)
12108 {
12109 /* Data dependency; DEP_INSN writes a register that INSN reads
12110 some cycles later. */
12111 switch (get_attr_type (insn))
12112 {
12113 case TYPE_JMPREG:
12114 /* Tell the first scheduling pass about the latency between
12115 a mtctr and bctr (and mtlr and br/blr). The first
12116 scheduling pass will not know about this latency since
12117 the mtctr instruction, which has the latency associated
12118 to it, will be generated by reload. */
12119 return TARGET_POWER ? 5 : 4;
12120 case TYPE_BRANCH:
12121 /* Leave some extra cycles between a compare and its
12122 dependent branch, to inhibit expensive mispredicts. */
12123 if ((rs6000_cpu_attr == CPU_PPC603
12124 || rs6000_cpu_attr == CPU_PPC604
12125 || rs6000_cpu_attr == CPU_PPC604E
12126 || rs6000_cpu_attr == CPU_PPC620
12127 || rs6000_cpu_attr == CPU_PPC630
12128 || rs6000_cpu_attr == CPU_PPC750
12129 || rs6000_cpu_attr == CPU_PPC7400
12130 || rs6000_cpu_attr == CPU_PPC7450
12131 || rs6000_cpu_attr == CPU_POWER4)
12132 && recog_memoized (dep_insn)
12133 && (INSN_CODE (dep_insn) >= 0)
12134 && (get_attr_type (dep_insn) == TYPE_COMPARE
12135 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
12136 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
12137 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
12138 return cost + 2;
12139 default:
12140 break;
12141 }
12142 /* Fall out to return default cost. */
12143 }
12144
12145 return cost;
12146 }
12147
12148 /* A C statement (sans semicolon) to update the integer scheduling
12149 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12150 INSN earlier, increase the priority to execute INSN later. Do not
12151 define this macro if you do not need to adjust the scheduling
12152 priorities of insns. */
12153
12154 static int
12155 rs6000_adjust_priority (insn, priority)
12156 rtx insn ATTRIBUTE_UNUSED;
12157 int priority;
12158 {
12159 /* On machines (like the 750) which have asymmetric integer units,
12160 where one integer unit can do multiply and divides and the other
12161 can't, reduce the priority of multiply/divide so it is scheduled
12162 before other integer operations. */
12163
12164 #if 0
12165 if (! INSN_P (insn))
12166 return priority;
12167
12168 if (GET_CODE (PATTERN (insn)) == USE)
12169 return priority;
12170
12171 switch (rs6000_cpu_attr) {
12172 case CPU_PPC750:
12173 switch (get_attr_type (insn))
12174 {
12175 default:
12176 break;
12177
12178 case TYPE_IMUL:
12179 case TYPE_IDIV:
12180 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
12181 priority, priority);
12182 if (priority >= 0 && priority < 0x01000000)
12183 priority >>= 3;
12184 break;
12185 }
12186 }
12187 #endif
12188
12189 return priority;
12190 }
12191
12192 /* Return how many instructions the machine can issue per cycle. */
12193
12194 static int
12195 rs6000_issue_rate ()
12196 {
12197 switch (rs6000_cpu_attr) {
12198 case CPU_RIOS1: /* ? */
12199 case CPU_RS64A:
12200 case CPU_PPC601: /* ? */
12201 case CPU_PPC7450:
12202 return 3;
12203 case CPU_PPC603:
12204 case CPU_PPC750:
12205 case CPU_PPC7400:
12206 return 2;
12207 case CPU_RIOS2:
12208 case CPU_PPC604:
12209 case CPU_PPC604E:
12210 case CPU_PPC620:
12211 case CPU_PPC630:
12212 case CPU_POWER4:
12213 return 4;
12214 default:
12215 return 1;
12216 }
12217 }
12218
12219 \f
12220 /* Length in units of the trampoline for entering a nested function. */
12221
12222 int
12223 rs6000_trampoline_size ()
12224 {
12225 int ret = 0;
12226
12227 switch (DEFAULT_ABI)
12228 {
12229 default:
12230 abort ();
12231
12232 case ABI_AIX:
12233 ret = (TARGET_32BIT) ? 12 : 24;
12234 break;
12235
12236 case ABI_DARWIN:
12237 case ABI_V4:
12238 case ABI_AIX_NODESC:
12239 ret = (TARGET_32BIT) ? 40 : 48;
12240 break;
12241 }
12242
12243 return ret;
12244 }
12245
12246 /* Emit RTL insns to initialize the variable parts of a trampoline.
12247 FNADDR is an RTX for the address of the function's pure code.
12248 CXT is an RTX for the static chain value for the function. */
12249
12250 void
12251 rs6000_initialize_trampoline (addr, fnaddr, cxt)
12252 rtx addr;
12253 rtx fnaddr;
12254 rtx cxt;
12255 {
12256 enum machine_mode pmode = Pmode;
12257 int regsize = (TARGET_32BIT) ? 4 : 8;
12258 rtx ctx_reg = force_reg (pmode, cxt);
12259
12260 switch (DEFAULT_ABI)
12261 {
12262 default:
12263 abort ();
12264
12265 /* Macros to shorten the code expansions below. */
12266 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12267 #define MEM_PLUS(addr,offset) \
12268 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12269
12270 /* Under AIX, just build the 3 word function descriptor */
12271 case ABI_AIX:
12272 {
12273 rtx fn_reg = gen_reg_rtx (pmode);
12274 rtx toc_reg = gen_reg_rtx (pmode);
12275 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
12276 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
12277 emit_move_insn (MEM_DEREF (addr), fn_reg);
12278 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
12279 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
12280 }
12281 break;
12282
12283 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
12284 case ABI_DARWIN:
12285 case ABI_V4:
12286 case ABI_AIX_NODESC:
12287 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
12288 FALSE, VOIDmode, 4,
12289 addr, pmode,
12290 GEN_INT (rs6000_trampoline_size ()), SImode,
12291 fnaddr, pmode,
12292 ctx_reg, pmode);
12293 break;
12294 }
12295
12296 return;
12297 }
12298
12299 \f
12300 /* Table of valid machine attributes. */
12301
12302 const struct attribute_spec rs6000_attribute_table[] =
12303 {
12304 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12305 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12306 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12307 { NULL, 0, 0, false, false, false, NULL }
12308 };
12309
12310 /* Handle a "longcall" or "shortcall" attribute; arguments as in
12311 struct attribute_spec.handler. */
12312
12313 static tree
12314 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
12315 tree *node;
12316 tree name;
12317 tree args ATTRIBUTE_UNUSED;
12318 int flags ATTRIBUTE_UNUSED;
12319 bool *no_add_attrs;
12320 {
12321 if (TREE_CODE (*node) != FUNCTION_TYPE
12322 && TREE_CODE (*node) != FIELD_DECL
12323 && TREE_CODE (*node) != TYPE_DECL)
12324 {
12325 warning ("`%s' attribute only applies to functions",
12326 IDENTIFIER_POINTER (name));
12327 *no_add_attrs = true;
12328 }
12329
12330 return NULL_TREE;
12331 }
12332
12333 /* Set longcall attributes on all functions declared when
12334 rs6000_default_long_calls is true. */
12335 static void
12336 rs6000_set_default_type_attributes (type)
12337 tree type;
12338 {
12339 if (rs6000_default_long_calls
12340 && (TREE_CODE (type) == FUNCTION_TYPE
12341 || TREE_CODE (type) == METHOD_TYPE))
12342 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
12343 NULL_TREE,
12344 TYPE_ATTRIBUTES (type));
12345 }
12346
12347 /* Return a reference suitable for calling a function with the
12348 longcall attribute. */
12349
12350 struct rtx_def *
12351 rs6000_longcall_ref (call_ref)
12352 rtx call_ref;
12353 {
12354 const char *call_name;
12355 tree node;
12356
12357 if (GET_CODE (call_ref) != SYMBOL_REF)
12358 return call_ref;
12359
12360 /* System V adds '.' to the internal name, so skip them. */
12361 call_name = XSTR (call_ref, 0);
12362 if (*call_name == '.')
12363 {
12364 while (*call_name == '.')
12365 call_name++;
12366
12367 node = get_identifier (call_name);
12368 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
12369 }
12370
12371 return force_reg (Pmode, call_ref);
12372 }
12373
12374 \f
12375 #ifdef USING_ELFOS_H
12376
12377 /* A C statement or statements to switch to the appropriate section
12378 for output of RTX in mode MODE. You can assume that RTX is some
12379 kind of constant in RTL. The argument MODE is redundant except in
12380 the case of a `const_int' rtx. Select the section by calling
12381 `text_section' or one of the alternatives for other sections.
12382
12383 Do not define this macro if you put all constants in the read-only
12384 data section. */
12385
12386 static void
12387 rs6000_elf_select_rtx_section (mode, x, align)
12388 enum machine_mode mode;
12389 rtx x;
12390 unsigned HOST_WIDE_INT align;
12391 {
12392 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
12393 toc_section ();
12394 else
12395 default_elf_select_rtx_section (mode, x, align);
12396 }
12397
12398 /* A C statement or statements to switch to the appropriate
12399 section for output of DECL. DECL is either a `VAR_DECL' node
12400 or a constant of some sort. RELOC indicates whether forming
12401 the initial value of DECL requires link-time relocations. */
12402
12403 static void
12404 rs6000_elf_select_section (decl, reloc, align)
12405 tree decl;
12406 int reloc;
12407 unsigned HOST_WIDE_INT align;
12408 {
12409 default_elf_select_section_1 (decl, reloc, align,
12410 flag_pic || DEFAULT_ABI == ABI_AIX);
12411 }
12412
12413 /* A C statement to build up a unique section name, expressed as a
12414 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
12415 RELOC indicates whether the initial value of EXP requires
12416 link-time relocations. If you do not define this macro, GCC will use
12417 the symbol name prefixed by `.' as the section name. Note - this
12418 macro can now be called for uninitialized data items as well as
12419 initialized data and functions. */
12420
12421 static void
12422 rs6000_elf_unique_section (decl, reloc)
12423 tree decl;
12424 int reloc;
12425 {
12426 default_unique_section_1 (decl, reloc,
12427 flag_pic || DEFAULT_ABI == ABI_AIX);
12428 }
12429
12430 \f
12431 /* If we are referencing a function that is static or is known to be
12432 in this file, make the SYMBOL_REF special. We can use this to indicate
12433 that we can branch to this function without emitting a no-op after the
12434 call. For real AIX calling sequences, we also replace the
12435 function name with the real name (1 or 2 leading .'s), rather than
12436 the function descriptor name. This saves a lot of overriding code
12437 to read the prefixes. */
12438
12439 static void
12440 rs6000_elf_encode_section_info (decl, first)
12441 tree decl;
12442 int first;
12443 {
12444 if (!first)
12445 return;
12446
12447 if (TREE_CODE (decl) == FUNCTION_DECL)
12448 {
12449 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12450 if ((*targetm.binds_local_p) (decl))
12451 SYMBOL_REF_FLAG (sym_ref) = 1;
12452
12453 if (DEFAULT_ABI == ABI_AIX)
12454 {
12455 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
12456 size_t len2 = strlen (XSTR (sym_ref, 0));
12457 char *str = alloca (len1 + len2 + 1);
12458 str[0] = '.';
12459 str[1] = '.';
12460 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
12461
12462 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
12463 }
12464 }
12465 else if (rs6000_sdata != SDATA_NONE
12466 && DEFAULT_ABI == ABI_V4
12467 && TREE_CODE (decl) == VAR_DECL)
12468 {
12469 int size = int_size_in_bytes (TREE_TYPE (decl));
12470 tree section_name = DECL_SECTION_NAME (decl);
12471 const char *name = (char *)0;
12472 int len = 0;
12473
12474 if (section_name)
12475 {
12476 if (TREE_CODE (section_name) == STRING_CST)
12477 {
12478 name = TREE_STRING_POINTER (section_name);
12479 len = TREE_STRING_LENGTH (section_name);
12480 }
12481 else
12482 abort ();
12483 }
12484
12485 if ((size > 0 && size <= g_switch_value)
12486 || (name
12487 && ((len == sizeof (".sdata") - 1
12488 && strcmp (name, ".sdata") == 0)
12489 || (len == sizeof (".sdata2") - 1
12490 && strcmp (name, ".sdata2") == 0)
12491 || (len == sizeof (".sbss") - 1
12492 && strcmp (name, ".sbss") == 0)
12493 || (len == sizeof (".sbss2") - 1
12494 && strcmp (name, ".sbss2") == 0)
12495 || (len == sizeof (".PPC.EMB.sdata0") - 1
12496 && strcmp (name, ".PPC.EMB.sdata0") == 0)
12497 || (len == sizeof (".PPC.EMB.sbss0") - 1
12498 && strcmp (name, ".PPC.EMB.sbss0") == 0))))
12499 {
12500 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12501 size_t len = strlen (XSTR (sym_ref, 0));
12502 char *str = alloca (len + 2);
12503
12504 str[0] = '@';
12505 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
12506 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
12507 }
12508 }
12509 }
12510
12511 static const char *
12512 rs6000_elf_strip_name_encoding (str)
12513 const char *str;
12514 {
12515 while (*str == '*' || *str == '@')
12516 str++;
12517 return str;
12518 }
12519
12520 static bool
12521 rs6000_elf_in_small_data_p (decl)
12522 tree decl;
12523 {
12524 if (rs6000_sdata == SDATA_NONE)
12525 return false;
12526
12527 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
12528 {
12529 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
12530 if (strcmp (section, ".sdata") == 0
12531 || strcmp (section, ".sdata2") == 0
12532 || strcmp (section, ".sbss") == 0)
12533 return true;
12534 }
12535 else
12536 {
12537 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
12538
12539 if (size > 0
12540 && size <= g_switch_value
12541 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
12542 return true;
12543 }
12544
12545 return false;
12546 }
12547
12548 #endif /* USING_ELFOS_H */
12549
12550 \f
12551 /* Return a REG that occurs in ADDR with coefficient 1.
12552 ADDR can be effectively incremented by incrementing REG.
12553
12554 r0 is special and we must not select it as an address
12555 register by this routine since our caller will try to
12556 increment the returned register via an "la" instruction. */
12557
12558 struct rtx_def *
12559 find_addr_reg (addr)
12560 rtx addr;
12561 {
12562 while (GET_CODE (addr) == PLUS)
12563 {
12564 if (GET_CODE (XEXP (addr, 0)) == REG
12565 && REGNO (XEXP (addr, 0)) != 0)
12566 addr = XEXP (addr, 0);
12567 else if (GET_CODE (XEXP (addr, 1)) == REG
12568 && REGNO (XEXP (addr, 1)) != 0)
12569 addr = XEXP (addr, 1);
12570 else if (CONSTANT_P (XEXP (addr, 0)))
12571 addr = XEXP (addr, 1);
12572 else if (CONSTANT_P (XEXP (addr, 1)))
12573 addr = XEXP (addr, 0);
12574 else
12575 abort ();
12576 }
12577 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
12578 return addr;
12579 abort ();
12580 }
12581
12582 void
12583 rs6000_fatal_bad_address (op)
12584 rtx op;
12585 {
12586 fatal_insn ("bad address", op);
12587 }
12588
12589 /* Called to register all of our global variables with the garbage
12590 collector. */
12591
12592 static void
12593 rs6000_add_gc_roots ()
12594 {
12595 toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
12596 ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
12597 toc_hash_mark_table);
12598 }
12599
12600 #if TARGET_MACHO
12601
12602 #if 0
12603 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
12604 reference and a constant. */
12605
12606 int
12607 symbolic_operand (op)
12608 rtx op;
12609 {
12610 switch (GET_CODE (op))
12611 {
12612 case SYMBOL_REF:
12613 case LABEL_REF:
12614 return 1;
12615 case CONST:
12616 op = XEXP (op, 0);
12617 return (GET_CODE (op) == SYMBOL_REF ||
12618 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
12619 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
12620 && GET_CODE (XEXP (op, 1)) == CONST_INT);
12621 default:
12622 return 0;
12623 }
12624 }
12625 #endif
12626
12627 #ifdef RS6000_LONG_BRANCH
12628
12629 static tree stub_list = 0;
12630
12631 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
12632 procedure calls to the linked list. */
12633
12634 void
12635 add_compiler_stub (label_name, function_name, line_number)
12636 tree label_name;
12637 tree function_name;
12638 int line_number;
12639 {
12640 tree stub = build_tree_list (function_name, label_name);
12641 TREE_TYPE (stub) = build_int_2 (line_number, 0);
12642 TREE_CHAIN (stub) = stub_list;
12643 stub_list = stub;
12644 }
12645
12646 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
12647 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
12648 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
12649
12650 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
12651 handling procedure calls from the linked list and initializes the
12652 linked list. */
12653
12654 void
12655 output_compiler_stub ()
12656 {
12657 char tmp_buf[256];
12658 char label_buf[256];
12659 tree stub;
12660
12661 if (!flag_pic)
12662 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12663 {
12664 fprintf (asm_out_file,
12665 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
12666
12667 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12668 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
12669 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
12670 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12671
12672 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
12673 strcpy (label_buf,
12674 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
12675 else
12676 {
12677 label_buf[0] = '_';
12678 strcpy (label_buf+1,
12679 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
12680 }
12681
12682 strcpy (tmp_buf, "lis r12,hi16(");
12683 strcat (tmp_buf, label_buf);
12684 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
12685 strcat (tmp_buf, label_buf);
12686 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
12687 output_asm_insn (tmp_buf, 0);
12688
12689 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12690 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
12691 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
12692 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12693 }
12694
12695 stub_list = 0;
12696 }
12697
12698 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
12699 already there or not. */
12700
12701 int
12702 no_previous_def (function_name)
12703 tree function_name;
12704 {
12705 tree stub;
12706 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12707 if (function_name == STUB_FUNCTION_NAME (stub))
12708 return 0;
12709 return 1;
12710 }
12711
12712 /* GET_PREV_LABEL gets the label name from the previous definition of
12713 the function. */
12714
12715 tree
12716 get_prev_label (function_name)
12717 tree function_name;
12718 {
12719 tree stub;
12720 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12721 if (function_name == STUB_FUNCTION_NAME (stub))
12722 return STUB_LABEL_NAME (stub);
12723 return 0;
12724 }
12725
12726 /* INSN is either a function call or a millicode call. It may have an
12727 unconditional jump in its delay slot.
12728
12729 CALL_DEST is the routine we are calling. */
12730
12731 char *
12732 output_call (insn, call_dest, operand_number)
12733 rtx insn;
12734 rtx call_dest;
12735 int operand_number;
12736 {
12737 static char buf[256];
12738 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
12739 {
12740 tree labelname;
12741 tree funname = get_identifier (XSTR (call_dest, 0));
12742
12743 if (no_previous_def (funname))
12744 {
12745 int line_number = 0;
12746 rtx label_rtx = gen_label_rtx ();
12747 char *label_buf, temp_buf[256];
12748 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
12749 CODE_LABEL_NUMBER (label_rtx));
12750 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
12751 labelname = get_identifier (label_buf);
12752 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
12753 if (insn)
12754 line_number = NOTE_LINE_NUMBER (insn);
12755 add_compiler_stub (labelname, funname, line_number);
12756 }
12757 else
12758 labelname = get_prev_label (funname);
12759
12760 sprintf (buf, "jbsr %%z%d,%.246s",
12761 operand_number, IDENTIFIER_POINTER (labelname));
12762 return buf;
12763 }
12764 else
12765 {
12766 sprintf (buf, "bl %%z%d", operand_number);
12767 return buf;
12768 }
12769 }
12770
12771 #endif /* RS6000_LONG_BRANCH */
12772
12773 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
12774 do { \
12775 const char *const symbol_ = (SYMBOL); \
12776 char *buffer_ = (BUF); \
12777 if (symbol_[0] == '"') \
12778 { \
12779 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
12780 } \
12781 else if (name_needs_quotes(symbol_)) \
12782 { \
12783 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
12784 } \
12785 else \
12786 { \
12787 sprintf(buffer_, "L%d$%s", (N), symbol_); \
12788 } \
12789 } while (0)
12790
12791
12792 /* Generate PIC and indirect symbol stubs. */
12793
12794 void
12795 machopic_output_stub (file, symb, stub)
12796 FILE *file;
12797 const char *symb, *stub;
12798 {
12799 unsigned int length;
12800 char *symbol_name, *lazy_ptr_name;
12801 char *local_label_0;
12802 static int label = 0;
12803
12804 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
12805 symb = (*targetm.strip_name_encoding) (symb);
12806
12807 label += 1;
12808
12809 length = strlen (symb);
12810 symbol_name = alloca (length + 32);
12811 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
12812
12813 lazy_ptr_name = alloca (length + 32);
12814 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
12815
12816 local_label_0 = alloca (length + 32);
12817 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
12818
12819 if (flag_pic == 2)
12820 machopic_picsymbol_stub_section ();
12821 else
12822 machopic_symbol_stub_section ();
12823
12824 fprintf (file, "%s:\n", stub);
12825 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
12826
12827 if (flag_pic == 2)
12828 {
12829 fprintf (file, "\tmflr r0\n");
12830 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
12831 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
12832 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
12833 lazy_ptr_name, local_label_0);
12834 fprintf (file, "\tmtlr r0\n");
12835 fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
12836 lazy_ptr_name, local_label_0);
12837 fprintf (file, "\tmtctr r12\n");
12838 fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
12839 lazy_ptr_name, local_label_0);
12840 fprintf (file, "\tbctr\n");
12841 }
12842 else
12843 fprintf (file, "non-pure not supported\n");
12844
12845 machopic_lazy_symbol_ptr_section ();
12846 fprintf (file, "%s:\n", lazy_ptr_name);
12847 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
12848 fprintf (file, "\t.long dyld_stub_binding_helper\n");
12849 }
12850
12851 /* Legitimize PIC addresses. If the address is already
12852 position-independent, we return ORIG. Newly generated
12853 position-independent addresses go into a reg. This is REG if non
12854 zero, otherwise we allocate register(s) as necessary. */
12855
12856 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
12857
12858 rtx
12859 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
12860 rtx orig;
12861 enum machine_mode mode;
12862 rtx reg;
12863 {
12864 rtx base, offset;
12865
12866 if (reg == NULL && ! reload_in_progress && ! reload_completed)
12867 reg = gen_reg_rtx (Pmode);
12868
12869 if (GET_CODE (orig) == CONST)
12870 {
12871 if (GET_CODE (XEXP (orig, 0)) == PLUS
12872 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
12873 return orig;
12874
12875 if (GET_CODE (XEXP (orig, 0)) == PLUS)
12876 {
12877 base =
12878 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
12879 Pmode, reg);
12880 offset =
12881 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
12882 Pmode, reg);
12883 }
12884 else
12885 abort ();
12886
12887 if (GET_CODE (offset) == CONST_INT)
12888 {
12889 if (SMALL_INT (offset))
12890 return plus_constant (base, INTVAL (offset));
12891 else if (! reload_in_progress && ! reload_completed)
12892 offset = force_reg (Pmode, offset);
12893 else
12894 {
12895 rtx mem = force_const_mem (Pmode, orig);
12896 return machopic_legitimize_pic_address (mem, Pmode, reg);
12897 }
12898 }
12899 return gen_rtx (PLUS, Pmode, base, offset);
12900 }
12901
12902 /* Fall back on generic machopic code. */
12903 return machopic_legitimize_pic_address (orig, mode, reg);
12904 }
12905
12906 /* This is just a placeholder to make linking work without having to
12907 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
12908 ever needed for Darwin (not too likely!) this would have to get a
12909 real definition. */
12910
12911 void
12912 toc_section ()
12913 {
12914 }
12915
12916 #endif /* TARGET_MACHO */
12917
12918 #if TARGET_ELF
12919 static unsigned int
12920 rs6000_elf_section_type_flags (decl, name, reloc)
12921 tree decl;
12922 const char *name;
12923 int reloc;
12924 {
12925 unsigned int flags
12926 = default_section_type_flags_1 (decl, name, reloc,
12927 flag_pic || DEFAULT_ABI == ABI_AIX);
12928
12929 if (TARGET_RELOCATABLE)
12930 flags |= SECTION_WRITE;
12931
12932 return flags;
12933 }
12934
12935 /* Record an element in the table of global constructors. SYMBOL is
12936 a SYMBOL_REF of the function to be called; PRIORITY is a number
12937 between 0 and MAX_INIT_PRIORITY.
12938
12939 This differs from default_named_section_asm_out_constructor in
12940 that we have special handling for -mrelocatable. */
12941
12942 static void
12943 rs6000_elf_asm_out_constructor (symbol, priority)
12944 rtx symbol;
12945 int priority;
12946 {
12947 const char *section = ".ctors";
12948 char buf[16];
12949
12950 if (priority != DEFAULT_INIT_PRIORITY)
12951 {
12952 sprintf (buf, ".ctors.%.5u",
12953 /* Invert the numbering so the linker puts us in the proper
12954 order; constructors are run from right to left, and the
12955 linker sorts in increasing order. */
12956 MAX_INIT_PRIORITY - priority);
12957 section = buf;
12958 }
12959
12960 named_section_flags (section, SECTION_WRITE);
12961 assemble_align (POINTER_SIZE);
12962
12963 if (TARGET_RELOCATABLE)
12964 {
12965 fputs ("\t.long (", asm_out_file);
12966 output_addr_const (asm_out_file, symbol);
12967 fputs (")@fixup\n", asm_out_file);
12968 }
12969 else
12970 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
12971 }
12972
12973 static void
12974 rs6000_elf_asm_out_destructor (symbol, priority)
12975 rtx symbol;
12976 int priority;
12977 {
12978 const char *section = ".dtors";
12979 char buf[16];
12980
12981 if (priority != DEFAULT_INIT_PRIORITY)
12982 {
12983 sprintf (buf, ".dtors.%.5u",
12984 /* Invert the numbering so the linker puts us in the proper
12985 order; constructors are run from right to left, and the
12986 linker sorts in increasing order. */
12987 MAX_INIT_PRIORITY - priority);
12988 section = buf;
12989 }
12990
12991 named_section_flags (section, SECTION_WRITE);
12992 assemble_align (POINTER_SIZE);
12993
12994 if (TARGET_RELOCATABLE)
12995 {
12996 fputs ("\t.long (", asm_out_file);
12997 output_addr_const (asm_out_file, symbol);
12998 fputs (")@fixup\n", asm_out_file);
12999 }
13000 else
13001 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13002 }
13003 #endif
13004
13005 #if TARGET_XCOFF
13006 static void
13007 rs6000_xcoff_asm_globalize_label (stream, name)
13008 FILE *stream;
13009 const char *name;
13010 {
13011 fputs (GLOBAL_ASM_OP, stream);
13012 RS6000_OUTPUT_BASENAME (stream, name);
13013 putc ('\n', stream);
13014 }
13015
13016 static void
13017 rs6000_xcoff_asm_named_section (name, flags)
13018 const char *name;
13019 unsigned int flags;
13020 {
13021 int smclass;
13022 static const char * const suffix[3] = { "PR", "RO", "RW" };
13023
13024 if (flags & SECTION_CODE)
13025 smclass = 0;
13026 else if (flags & SECTION_WRITE)
13027 smclass = 2;
13028 else
13029 smclass = 1;
13030
13031 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
13032 (flags & SECTION_CODE) ? "." : "",
13033 name, suffix[smclass], flags & SECTION_ENTSIZE);
13034 }
13035
13036 static void
13037 rs6000_xcoff_select_section (decl, reloc, align)
13038 tree decl;
13039 int reloc;
13040 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13041 {
13042 if (decl_readonly_section_1 (decl, reloc, 1))
13043 {
13044 if (TREE_PUBLIC (decl))
13045 read_only_data_section ();
13046 else
13047 read_only_private_data_section ();
13048 }
13049 else
13050 {
13051 if (TREE_PUBLIC (decl))
13052 data_section ();
13053 else
13054 private_data_section ();
13055 }
13056 }
13057
13058 static void
13059 rs6000_xcoff_unique_section (decl, reloc)
13060 tree decl;
13061 int reloc ATTRIBUTE_UNUSED;
13062 {
13063 const char *name;
13064
13065 /* Use select_section for private and uninitialized data. */
13066 if (!TREE_PUBLIC (decl)
13067 || DECL_COMMON (decl)
13068 || DECL_INITIAL (decl) == NULL_TREE
13069 || DECL_INITIAL (decl) == error_mark_node
13070 || (flag_zero_initialized_in_bss
13071 && initializer_zerop (DECL_INITIAL (decl))))
13072 return;
13073
13074 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
13075 name = (*targetm.strip_name_encoding) (name);
13076 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
13077 }
13078
13079 /* Select section for constant in constant pool.
13080
13081 On RS/6000, all constants are in the private read-only data area.
13082 However, if this is being placed in the TOC it must be output as a
13083 toc entry. */
13084
13085 static void
13086 rs6000_xcoff_select_rtx_section (mode, x, align)
13087 enum machine_mode mode;
13088 rtx x;
13089 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13090 {
13091 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13092 toc_section ();
13093 else
13094 read_only_private_data_section ();
13095 }
13096
13097 /* Remove any trailing [DS] or the like from the symbol name. */
13098
13099 static const char *
13100 rs6000_xcoff_strip_name_encoding (name)
13101 const char *name;
13102 {
13103 size_t len;
13104 if (*name == '*')
13105 name++;
13106 len = strlen (name);
13107 if (name[len - 1] == ']')
13108 return ggc_alloc_string (name, len - 4);
13109 else
13110 return name;
13111 }
13112
13113 /* Section attributes. AIX is always PIC. */
13114
13115 static unsigned int
13116 rs6000_xcoff_section_type_flags (decl, name, reloc)
13117 tree decl;
13118 const char *name;
13119 int reloc;
13120 {
13121 unsigned int align;
13122 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
13123
13124 /* Align to at least UNIT size. */
13125 if (flags & SECTION_CODE)
13126 align = MIN_UNITS_PER_WORD;
13127 else
13128 /* Increase alignment of large objects if not already stricter. */
13129 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
13130 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
13131 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
13132
13133 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
13134 }
13135
13136 #endif /* TARGET_XCOFF */
13137
13138 /* Note that this is also used for PPC64 Linux. */
13139
13140 static void
13141 rs6000_xcoff_encode_section_info (decl, first)
13142 tree decl;
13143 int first ATTRIBUTE_UNUSED;
13144 {
13145 if (TREE_CODE (decl) == FUNCTION_DECL
13146 && (*targetm.binds_local_p) (decl))
13147 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
13148 }
13149
13150 /* Cross-module name binding. For AIX and PPC64 Linux, which always are
13151 PIC, use private copy of flag_pic. */
13152
13153 static bool
13154 rs6000_binds_local_p (decl)
13155 tree decl;
13156 {
13157 return default_binds_local_p_1 (decl, flag_pic || rs6000_flag_pic);
13158 }
This page took 0.652954 seconds and 6 git commands to generate.