]> gcc.gnu.org Git - gcc.git/blob - gcc/builtins.c
configure.in (noconfigdirs): Disable target-newlib and target-libgloss.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "toplev.h"
43 #include "predict.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "langhooks.h"
47
48 #define CALLED_AS_BUILT_IN(NODE) \
49 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
50
51 /* Register mappings for target machines without register windows. */
52 #ifndef INCOMING_REGNO
53 #define INCOMING_REGNO(OUT) (OUT)
54 #endif
55 #ifndef OUTGOING_REGNO
56 #define OUTGOING_REGNO(IN) (IN)
57 #endif
58
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 #endif
62
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
66
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM) #X,
68 const char *const built_in_names[(int) END_BUILTINS] =
69 {
70 #include "builtins.def"
71 };
72 #undef DEF_BUILTIN
73
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases. */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
81
82 /* Trigonometric and mathematical constants used in builtin folding. */
83 static bool builtin_dconsts_init = 0;
84 static REAL_VALUE_TYPE dconstpi;
85 static REAL_VALUE_TYPE dconste;
86
87 static int get_pointer_alignment (tree, unsigned int);
88 static tree c_strlen (tree);
89 static const char *c_getstr (tree);
90 static rtx c_readstr (const char *, enum machine_mode);
91 static int target_char_cast (tree, char *);
92 static rtx get_memory_rtx (tree);
93 static int apply_args_size (void);
94 static int apply_result_size (void);
95 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
96 static rtx result_vector (int, rtx);
97 #endif
98 static rtx expand_builtin_setjmp (tree, rtx);
99 static void expand_builtin_prefetch (tree);
100 static rtx expand_builtin_apply_args (void);
101 static rtx expand_builtin_apply_args_1 (void);
102 static rtx expand_builtin_apply (rtx, rtx, rtx);
103 static void expand_builtin_return (rtx);
104 static enum type_class type_to_class (tree);
105 static rtx expand_builtin_classify_type (tree);
106 static void expand_errno_check (tree, rtx);
107 static rtx expand_builtin_mathfn (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
109 static rtx expand_builtin_constant_p (tree, enum machine_mode);
110 static rtx expand_builtin_args_info (tree);
111 static rtx expand_builtin_next_arg (tree);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_memcmp (tree, tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
118 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
119 static rtx expand_builtin_strcat (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode, int);
125 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_bcopy (tree);
127 static rtx expand_builtin_strcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
129 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
131 static rtx builtin_memset_read_str (void *, HOST_WIDE_INT, enum machine_mode);
132 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
133 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_bzero (tree);
135 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
140 static rtx expand_builtin_alloca (tree, rtx);
141 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
142 static rtx expand_builtin_frame_address (tree, tree);
143 static rtx expand_builtin_fputs (tree, int, int);
144 static tree stabilize_va_list (tree, int);
145 static rtx expand_builtin_expect (tree, rtx);
146 static tree fold_builtin_constant_p (tree);
147 static tree fold_builtin_classify_type (tree);
148 static tree fold_builtin_inf (tree, int);
149 static tree fold_builtin_nan (tree, tree, int);
150 static int validate_arglist (tree, ...);
151 static tree fold_trunc_transparent_mathfn (tree);
152 static bool readonly_data_expr (tree);
153 static rtx expand_builtin_fabs (tree, rtx, rtx);
154 static rtx expand_builtin_cabs (tree, rtx);
155 static void init_builtin_dconsts (void);
156 static tree fold_builtin_cabs (tree, tree, tree);
157
158 /* Initialize mathematical constants for constant folding builtins.
159 These constants need to be given to at least 160 bits precision. */
160
161 static void
162 init_builtin_dconsts (void)
163 {
164 real_from_string (&dconstpi,
165 "3.1415926535897932384626433832795028841971693993751058209749445923078");
166 real_from_string (&dconste,
167 "2.7182818284590452353602874713526624977572470936999595749669676277241");
168
169 builtin_dconsts_init = true;
170 }
171
172 /* Return the alignment in bits of EXP, a pointer valued expression.
173 But don't return more than MAX_ALIGN no matter what.
174 The alignment returned is, by default, the alignment of the thing that
175 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
176
177 Otherwise, look at the expression to see if we can do better, i.e., if the
178 expression is actually pointing at an object whose alignment is tighter. */
179
180 static int
181 get_pointer_alignment (tree exp, unsigned int max_align)
182 {
183 unsigned int align, inner;
184
185 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
186 return 0;
187
188 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
189 align = MIN (align, max_align);
190
191 while (1)
192 {
193 switch (TREE_CODE (exp))
194 {
195 case NOP_EXPR:
196 case CONVERT_EXPR:
197 case NON_LVALUE_EXPR:
198 exp = TREE_OPERAND (exp, 0);
199 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
200 return align;
201
202 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
203 align = MIN (inner, max_align);
204 break;
205
206 case PLUS_EXPR:
207 /* If sum of pointer + int, restrict our maximum alignment to that
208 imposed by the integer. If not, we can't do any better than
209 ALIGN. */
210 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
211 return align;
212
213 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
214 & (max_align / BITS_PER_UNIT - 1))
215 != 0)
216 max_align >>= 1;
217
218 exp = TREE_OPERAND (exp, 0);
219 break;
220
221 case ADDR_EXPR:
222 /* See what we are pointing at and look at its alignment. */
223 exp = TREE_OPERAND (exp, 0);
224 if (TREE_CODE (exp) == FUNCTION_DECL)
225 align = FUNCTION_BOUNDARY;
226 else if (DECL_P (exp))
227 align = DECL_ALIGN (exp);
228 #ifdef CONSTANT_ALIGNMENT
229 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
230 align = CONSTANT_ALIGNMENT (exp, align);
231 #endif
232 return MIN (align, max_align);
233
234 default:
235 return align;
236 }
237 }
238 }
239
240 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
241 way, because it could contain a zero byte in the middle.
242 TREE_STRING_LENGTH is the size of the character array, not the string.
243
244 The value returned is of type `ssizetype'.
245
246 Unfortunately, string_constant can't access the values of const char
247 arrays with initializers, so neither can we do so here. */
248
249 static tree
250 c_strlen (tree src)
251 {
252 tree offset_node;
253 HOST_WIDE_INT offset;
254 int max;
255 const char *ptr;
256
257 src = string_constant (src, &offset_node);
258 if (src == 0)
259 return 0;
260
261 max = TREE_STRING_LENGTH (src) - 1;
262 ptr = TREE_STRING_POINTER (src);
263
264 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
265 {
266 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
267 compute the offset to the following null if we don't know where to
268 start searching for it. */
269 int i;
270
271 for (i = 0; i < max; i++)
272 if (ptr[i] == 0)
273 return 0;
274
275 /* We don't know the starting offset, but we do know that the string
276 has no internal zero bytes. We can assume that the offset falls
277 within the bounds of the string; otherwise, the programmer deserves
278 what he gets. Subtract the offset from the length of the string,
279 and return that. This would perhaps not be valid if we were dealing
280 with named arrays in addition to literal string constants. */
281
282 return size_diffop (size_int (max), offset_node);
283 }
284
285 /* We have a known offset into the string. Start searching there for
286 a null character if we can represent it as a single HOST_WIDE_INT. */
287 if (offset_node == 0)
288 offset = 0;
289 else if (! host_integerp (offset_node, 0))
290 offset = -1;
291 else
292 offset = tree_low_cst (offset_node, 0);
293
294 /* If the offset is known to be out of bounds, warn, and call strlen at
295 runtime. */
296 if (offset < 0 || offset > max)
297 {
298 warning ("offset outside bounds of constant string");
299 return 0;
300 }
301
302 /* Use strlen to search for the first zero byte. Since any strings
303 constructed with build_string will have nulls appended, we win even
304 if we get handed something like (char[4])"abcd".
305
306 Since OFFSET is our starting index into the string, no further
307 calculation is needed. */
308 return ssize_int (strlen (ptr + offset));
309 }
310
311 /* Return a char pointer for a C string if it is a string constant
312 or sum of string constant and integer constant. */
313
314 static const char *
315 c_getstr (tree src)
316 {
317 tree offset_node;
318
319 src = string_constant (src, &offset_node);
320 if (src == 0)
321 return 0;
322
323 if (offset_node == 0)
324 return TREE_STRING_POINTER (src);
325 else if (!host_integerp (offset_node, 1)
326 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
327 return 0;
328
329 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
330 }
331
332 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
333 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
334
335 static rtx
336 c_readstr (const char *str, enum machine_mode mode)
337 {
338 HOST_WIDE_INT c[2];
339 HOST_WIDE_INT ch;
340 unsigned int i, j;
341
342 if (GET_MODE_CLASS (mode) != MODE_INT)
343 abort ();
344 c[0] = 0;
345 c[1] = 0;
346 ch = 1;
347 for (i = 0; i < GET_MODE_SIZE (mode); i++)
348 {
349 j = i;
350 if (WORDS_BIG_ENDIAN)
351 j = GET_MODE_SIZE (mode) - i - 1;
352 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
353 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
354 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
355 j *= BITS_PER_UNIT;
356 if (j > 2 * HOST_BITS_PER_WIDE_INT)
357 abort ();
358 if (ch)
359 ch = (unsigned char) str[i];
360 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
361 }
362 return immed_double_const (c[0], c[1], mode);
363 }
364
365 /* Cast a target constant CST to target CHAR and if that value fits into
366 host char type, return zero and put that value into variable pointed by
367 P. */
368
369 static int
370 target_char_cast (tree cst, char *p)
371 {
372 unsigned HOST_WIDE_INT val, hostval;
373
374 if (!host_integerp (cst, 1)
375 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
376 return 1;
377
378 val = tree_low_cst (cst, 1);
379 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
380 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
381
382 hostval = val;
383 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
384 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
385
386 if (val != hostval)
387 return 1;
388
389 *p = hostval;
390 return 0;
391 }
392
393 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
394 times to get the address of either a higher stack frame, or a return
395 address located within it (depending on FNDECL_CODE). */
396
397 rtx
398 expand_builtin_return_addr (enum built_in_function fndecl_code, int count,
399 rtx tem)
400 {
401 int i;
402
403 /* Some machines need special handling before we can access
404 arbitrary frames. For example, on the sparc, we must first flush
405 all register windows to the stack. */
406 #ifdef SETUP_FRAME_ADDRESSES
407 if (count > 0)
408 SETUP_FRAME_ADDRESSES ();
409 #endif
410
411 /* On the sparc, the return address is not in the frame, it is in a
412 register. There is no way to access it off of the current frame
413 pointer, but it can be accessed off the previous frame pointer by
414 reading the value from the register window save area. */
415 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
416 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
417 count--;
418 #endif
419
420 /* Scan back COUNT frames to the specified frame. */
421 for (i = 0; i < count; i++)
422 {
423 /* Assume the dynamic chain pointer is in the word that the
424 frame address points to, unless otherwise specified. */
425 #ifdef DYNAMIC_CHAIN_ADDRESS
426 tem = DYNAMIC_CHAIN_ADDRESS (tem);
427 #endif
428 tem = memory_address (Pmode, tem);
429 tem = gen_rtx_MEM (Pmode, tem);
430 set_mem_alias_set (tem, get_frame_alias_set ());
431 tem = copy_to_reg (tem);
432 }
433
434 /* For __builtin_frame_address, return what we've got. */
435 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
436 return tem;
437
438 /* For __builtin_return_address, Get the return address from that
439 frame. */
440 #ifdef RETURN_ADDR_RTX
441 tem = RETURN_ADDR_RTX (count, tem);
442 #else
443 tem = memory_address (Pmode,
444 plus_constant (tem, GET_MODE_SIZE (Pmode)));
445 tem = gen_rtx_MEM (Pmode, tem);
446 set_mem_alias_set (tem, get_frame_alias_set ());
447 #endif
448 return tem;
449 }
450
451 /* Alias set used for setjmp buffer. */
452 static HOST_WIDE_INT setjmp_alias_set = -1;
453
454 /* Construct the leading half of a __builtin_setjmp call. Control will
455 return to RECEIVER_LABEL. This is used directly by sjlj exception
456 handling code. */
457
458 void
459 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
460 {
461 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
462 rtx stack_save;
463 rtx mem;
464
465 if (setjmp_alias_set == -1)
466 setjmp_alias_set = new_alias_set ();
467
468 #ifdef POINTERS_EXTEND_UNSIGNED
469 if (GET_MODE (buf_addr) != Pmode)
470 buf_addr = convert_memory_address (Pmode, buf_addr);
471 #endif
472
473 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
474
475 emit_queue ();
476
477 /* We store the frame pointer and the address of receiver_label in
478 the buffer and use the rest of it for the stack save area, which
479 is machine-dependent. */
480
481 #ifndef BUILTIN_SETJMP_FRAME_VALUE
482 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
483 #endif
484
485 mem = gen_rtx_MEM (Pmode, buf_addr);
486 set_mem_alias_set (mem, setjmp_alias_set);
487 emit_move_insn (mem, BUILTIN_SETJMP_FRAME_VALUE);
488
489 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
490 set_mem_alias_set (mem, setjmp_alias_set);
491
492 emit_move_insn (validize_mem (mem),
493 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
494
495 stack_save = gen_rtx_MEM (sa_mode,
496 plus_constant (buf_addr,
497 2 * GET_MODE_SIZE (Pmode)));
498 set_mem_alias_set (stack_save, setjmp_alias_set);
499 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
500
501 /* If there is further processing to do, do it. */
502 #ifdef HAVE_builtin_setjmp_setup
503 if (HAVE_builtin_setjmp_setup)
504 emit_insn (gen_builtin_setjmp_setup (buf_addr));
505 #endif
506
507 /* Tell optimize_save_area_alloca that extra work is going to
508 need to go on during alloca. */
509 current_function_calls_setjmp = 1;
510
511 /* Set this so all the registers get saved in our frame; we need to be
512 able to copy the saved values for any registers from frames we unwind. */
513 current_function_has_nonlocal_label = 1;
514 }
515
516 /* Construct the trailing part of a __builtin_setjmp call.
517 This is used directly by sjlj exception handling code. */
518
519 void
520 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
521 {
522 /* Clobber the FP when we get here, so we have to make sure it's
523 marked as used by this function. */
524 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
525
526 /* Mark the static chain as clobbered here so life information
527 doesn't get messed up for it. */
528 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
529
530 /* Now put in the code to restore the frame pointer, and argument
531 pointer, if needed. The code below is from expand_end_bindings
532 in stmt.c; see detailed documentation there. */
533 #ifdef HAVE_nonlocal_goto
534 if (! HAVE_nonlocal_goto)
535 #endif
536 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
537
538 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
539 if (fixed_regs[ARG_POINTER_REGNUM])
540 {
541 #ifdef ELIMINABLE_REGS
542 size_t i;
543 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
544
545 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
546 if (elim_regs[i].from == ARG_POINTER_REGNUM
547 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
548 break;
549
550 if (i == ARRAY_SIZE (elim_regs))
551 #endif
552 {
553 /* Now restore our arg pointer from the address at which it
554 was saved in our stack frame. */
555 emit_move_insn (virtual_incoming_args_rtx,
556 copy_to_reg (get_arg_pointer_save_area (cfun)));
557 }
558 }
559 #endif
560
561 #ifdef HAVE_builtin_setjmp_receiver
562 if (HAVE_builtin_setjmp_receiver)
563 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
564 else
565 #endif
566 #ifdef HAVE_nonlocal_goto_receiver
567 if (HAVE_nonlocal_goto_receiver)
568 emit_insn (gen_nonlocal_goto_receiver ());
569 else
570 #endif
571 { /* Nothing */ }
572
573 /* @@@ This is a kludge. Not all machine descriptions define a blockage
574 insn, but we must not allow the code we just generated to be reordered
575 by scheduling. Specifically, the update of the frame pointer must
576 happen immediately, not later. So emit an ASM_INPUT to act as blockage
577 insn. */
578 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
579 }
580
581 /* __builtin_setjmp is passed a pointer to an array of five words (not
582 all will be used on all machines). It operates similarly to the C
583 library function of the same name, but is more efficient. Much of
584 the code below (and for longjmp) is copied from the handling of
585 non-local gotos.
586
587 NOTE: This is intended for use by GNAT and the exception handling
588 scheme in the compiler and will only work in the method used by
589 them. */
590
591 static rtx
592 expand_builtin_setjmp (tree arglist, rtx target)
593 {
594 rtx buf_addr, next_lab, cont_lab;
595
596 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
597 return NULL_RTX;
598
599 if (target == 0 || GET_CODE (target) != REG
600 || REGNO (target) < FIRST_PSEUDO_REGISTER)
601 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
602
603 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
604
605 next_lab = gen_label_rtx ();
606 cont_lab = gen_label_rtx ();
607
608 expand_builtin_setjmp_setup (buf_addr, next_lab);
609
610 /* Set TARGET to zero and branch to the continue label. */
611 emit_move_insn (target, const0_rtx);
612 emit_jump_insn (gen_jump (cont_lab));
613 emit_barrier ();
614 emit_label (next_lab);
615
616 expand_builtin_setjmp_receiver (next_lab);
617
618 /* Set TARGET to one. */
619 emit_move_insn (target, const1_rtx);
620 emit_label (cont_lab);
621
622 /* Tell flow about the strange goings on. Putting `next_lab' on
623 `nonlocal_goto_handler_labels' to indicates that function
624 calls may traverse the arc back to this label. */
625
626 current_function_has_nonlocal_label = 1;
627 nonlocal_goto_handler_labels
628 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
629
630 return target;
631 }
632
633 /* __builtin_longjmp is passed a pointer to an array of five words (not
634 all will be used on all machines). It operates similarly to the C
635 library function of the same name, but is more efficient. Much of
636 the code below is copied from the handling of non-local gotos.
637
638 NOTE: This is intended for use by GNAT and the exception handling
639 scheme in the compiler and will only work in the method used by
640 them. */
641
642 void
643 expand_builtin_longjmp (rtx buf_addr, rtx value)
644 {
645 rtx fp, lab, stack, insn, last;
646 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
647
648 if (setjmp_alias_set == -1)
649 setjmp_alias_set = new_alias_set ();
650
651 #ifdef POINTERS_EXTEND_UNSIGNED
652 if (GET_MODE (buf_addr) != Pmode)
653 buf_addr = convert_memory_address (Pmode, buf_addr);
654 #endif
655
656 buf_addr = force_reg (Pmode, buf_addr);
657
658 /* We used to store value in static_chain_rtx, but that fails if pointers
659 are smaller than integers. We instead require that the user must pass
660 a second argument of 1, because that is what builtin_setjmp will
661 return. This also makes EH slightly more efficient, since we are no
662 longer copying around a value that we don't care about. */
663 if (value != const1_rtx)
664 abort ();
665
666 current_function_calls_longjmp = 1;
667
668 last = get_last_insn ();
669 #ifdef HAVE_builtin_longjmp
670 if (HAVE_builtin_longjmp)
671 emit_insn (gen_builtin_longjmp (buf_addr));
672 else
673 #endif
674 {
675 fp = gen_rtx_MEM (Pmode, buf_addr);
676 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
677 GET_MODE_SIZE (Pmode)));
678
679 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
680 2 * GET_MODE_SIZE (Pmode)));
681 set_mem_alias_set (fp, setjmp_alias_set);
682 set_mem_alias_set (lab, setjmp_alias_set);
683 set_mem_alias_set (stack, setjmp_alias_set);
684
685 /* Pick up FP, label, and SP from the block and jump. This code is
686 from expand_goto in stmt.c; see there for detailed comments. */
687 #if HAVE_nonlocal_goto
688 if (HAVE_nonlocal_goto)
689 /* We have to pass a value to the nonlocal_goto pattern that will
690 get copied into the static_chain pointer, but it does not matter
691 what that value is, because builtin_setjmp does not use it. */
692 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
693 else
694 #endif
695 {
696 lab = copy_to_reg (lab);
697
698 emit_move_insn (hard_frame_pointer_rtx, fp);
699 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
700
701 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
702 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
703 emit_indirect_jump (lab);
704 }
705 }
706
707 /* Search backwards and mark the jump insn as a non-local goto.
708 Note that this precludes the use of __builtin_longjmp to a
709 __builtin_setjmp target in the same function. However, we've
710 already cautioned the user that these functions are for
711 internal exception handling use only. */
712 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
713 {
714 if (insn == last)
715 abort ();
716 if (GET_CODE (insn) == JUMP_INSN)
717 {
718 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
719 REG_NOTES (insn));
720 break;
721 }
722 else if (GET_CODE (insn) == CALL_INSN)
723 break;
724 }
725 }
726
727 /* Expand a call to __builtin_prefetch. For a target that does not support
728 data prefetch, evaluate the memory address argument in case it has side
729 effects. */
730
731 static void
732 expand_builtin_prefetch (tree arglist)
733 {
734 tree arg0, arg1, arg2;
735 rtx op0, op1, op2;
736
737 if (!validate_arglist (arglist, POINTER_TYPE, 0))
738 return;
739
740 arg0 = TREE_VALUE (arglist);
741 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
742 zero (read) and argument 2 (locality) defaults to 3 (high degree of
743 locality). */
744 if (TREE_CHAIN (arglist))
745 {
746 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
747 if (TREE_CHAIN (TREE_CHAIN (arglist)))
748 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
749 else
750 arg2 = build_int_2 (3, 0);
751 }
752 else
753 {
754 arg1 = integer_zero_node;
755 arg2 = build_int_2 (3, 0);
756 }
757
758 /* Argument 0 is an address. */
759 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
760
761 /* Argument 1 (read/write flag) must be a compile-time constant int. */
762 if (TREE_CODE (arg1) != INTEGER_CST)
763 {
764 error ("second arg to `__builtin_prefetch' must be a constant");
765 arg1 = integer_zero_node;
766 }
767 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
768 /* Argument 1 must be either zero or one. */
769 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
770 {
771 warning ("invalid second arg to __builtin_prefetch; using zero");
772 op1 = const0_rtx;
773 }
774
775 /* Argument 2 (locality) must be a compile-time constant int. */
776 if (TREE_CODE (arg2) != INTEGER_CST)
777 {
778 error ("third arg to `__builtin_prefetch' must be a constant");
779 arg2 = integer_zero_node;
780 }
781 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
782 /* Argument 2 must be 0, 1, 2, or 3. */
783 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
784 {
785 warning ("invalid third arg to __builtin_prefetch; using zero");
786 op2 = const0_rtx;
787 }
788
789 #ifdef HAVE_prefetch
790 if (HAVE_prefetch)
791 {
792 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
793 (op0,
794 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
795 || (GET_MODE(op0) != Pmode))
796 {
797 #ifdef POINTERS_EXTEND_UNSIGNED
798 if (GET_MODE(op0) != Pmode)
799 op0 = convert_memory_address (Pmode, op0);
800 #endif
801 op0 = force_reg (Pmode, op0);
802 }
803 emit_insn (gen_prefetch (op0, op1, op2));
804 }
805 else
806 #endif
807 op0 = protect_from_queue (op0, 0);
808 /* Don't do anything with direct references to volatile memory, but
809 generate code to handle other side effects. */
810 if (GET_CODE (op0) != MEM && side_effects_p (op0))
811 emit_insn (op0);
812 }
813
814 /* Get a MEM rtx for expression EXP which is the address of an operand
815 to be used to be used in a string instruction (cmpstrsi, movstrsi, ..). */
816
817 static rtx
818 get_memory_rtx (tree exp)
819 {
820 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_SUM);
821 rtx mem;
822
823 #ifdef POINTERS_EXTEND_UNSIGNED
824 if (GET_MODE (addr) != Pmode)
825 addr = convert_memory_address (Pmode, addr);
826 #endif
827
828 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
829
830 /* Get an expression we can use to find the attributes to assign to MEM.
831 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
832 we can. First remove any nops. */
833 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
834 || TREE_CODE (exp) == NON_LVALUE_EXPR)
835 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
836 exp = TREE_OPERAND (exp, 0);
837
838 if (TREE_CODE (exp) == ADDR_EXPR)
839 {
840 exp = TREE_OPERAND (exp, 0);
841 set_mem_attributes (mem, exp, 0);
842 }
843 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
844 {
845 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
846 /* memcpy, memset and other builtin stringops can alias with anything. */
847 set_mem_alias_set (mem, 0);
848 }
849
850 return mem;
851 }
852 \f
853 /* Built-in functions to perform an untyped call and return. */
854
855 /* For each register that may be used for calling a function, this
856 gives a mode used to copy the register's value. VOIDmode indicates
857 the register is not used for calling a function. If the machine
858 has register windows, this gives only the outbound registers.
859 INCOMING_REGNO gives the corresponding inbound register. */
860 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
861
862 /* For each register that may be used for returning values, this gives
863 a mode used to copy the register's value. VOIDmode indicates the
864 register is not used for returning values. If the machine has
865 register windows, this gives only the outbound registers.
866 INCOMING_REGNO gives the corresponding inbound register. */
867 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
868
869 /* For each register that may be used for calling a function, this
870 gives the offset of that register into the block returned by
871 __builtin_apply_args. 0 indicates that the register is not
872 used for calling a function. */
873 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
874
875 /* Return the offset of register REGNO into the block returned by
876 __builtin_apply_args. This is not declared static, since it is
877 needed in objc-act.c. */
878
879 int
880 apply_args_register_offset (int regno)
881 {
882 apply_args_size ();
883
884 /* Arguments are always put in outgoing registers (in the argument
885 block) if such make sense. */
886 #ifdef OUTGOING_REGNO
887 regno = OUTGOING_REGNO (regno);
888 #endif
889 return apply_args_reg_offset[regno];
890 }
891
892 /* Return the size required for the block returned by __builtin_apply_args,
893 and initialize apply_args_mode. */
894
895 static int
896 apply_args_size (void)
897 {
898 static int size = -1;
899 int align;
900 unsigned int regno;
901 enum machine_mode mode;
902
903 /* The values computed by this function never change. */
904 if (size < 0)
905 {
906 /* The first value is the incoming arg-pointer. */
907 size = GET_MODE_SIZE (Pmode);
908
909 /* The second value is the structure value address unless this is
910 passed as an "invisible" first argument. */
911 if (struct_value_rtx)
912 size += GET_MODE_SIZE (Pmode);
913
914 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
915 if (FUNCTION_ARG_REGNO_P (regno))
916 {
917 /* Search for the proper mode for copying this register's
918 value. I'm not sure this is right, but it works so far. */
919 enum machine_mode best_mode = VOIDmode;
920
921 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
922 mode != VOIDmode;
923 mode = GET_MODE_WIDER_MODE (mode))
924 if (HARD_REGNO_MODE_OK (regno, mode)
925 && HARD_REGNO_NREGS (regno, mode) == 1)
926 best_mode = mode;
927
928 if (best_mode == VOIDmode)
929 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
930 mode != VOIDmode;
931 mode = GET_MODE_WIDER_MODE (mode))
932 if (HARD_REGNO_MODE_OK (regno, mode)
933 && have_insn_for (SET, mode))
934 best_mode = mode;
935
936 if (best_mode == VOIDmode)
937 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
938 mode != VOIDmode;
939 mode = GET_MODE_WIDER_MODE (mode))
940 if (HARD_REGNO_MODE_OK (regno, mode)
941 && have_insn_for (SET, mode))
942 best_mode = mode;
943
944 if (best_mode == VOIDmode)
945 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
946 mode != VOIDmode;
947 mode = GET_MODE_WIDER_MODE (mode))
948 if (HARD_REGNO_MODE_OK (regno, mode)
949 && have_insn_for (SET, mode))
950 best_mode = mode;
951
952 mode = best_mode;
953 if (mode == VOIDmode)
954 abort ();
955
956 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
957 if (size % align != 0)
958 size = CEIL (size, align) * align;
959 apply_args_reg_offset[regno] = size;
960 size += GET_MODE_SIZE (mode);
961 apply_args_mode[regno] = mode;
962 }
963 else
964 {
965 apply_args_mode[regno] = VOIDmode;
966 apply_args_reg_offset[regno] = 0;
967 }
968 }
969 return size;
970 }
971
972 /* Return the size required for the block returned by __builtin_apply,
973 and initialize apply_result_mode. */
974
975 static int
976 apply_result_size (void)
977 {
978 static int size = -1;
979 int align, regno;
980 enum machine_mode mode;
981
982 /* The values computed by this function never change. */
983 if (size < 0)
984 {
985 size = 0;
986
987 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
988 if (FUNCTION_VALUE_REGNO_P (regno))
989 {
990 /* Search for the proper mode for copying this register's
991 value. I'm not sure this is right, but it works so far. */
992 enum machine_mode best_mode = VOIDmode;
993
994 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
995 mode != TImode;
996 mode = GET_MODE_WIDER_MODE (mode))
997 if (HARD_REGNO_MODE_OK (regno, mode))
998 best_mode = mode;
999
1000 if (best_mode == VOIDmode)
1001 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1002 mode != VOIDmode;
1003 mode = GET_MODE_WIDER_MODE (mode))
1004 if (HARD_REGNO_MODE_OK (regno, mode)
1005 && have_insn_for (SET, mode))
1006 best_mode = mode;
1007
1008 if (best_mode == VOIDmode)
1009 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
1010 mode != VOIDmode;
1011 mode = GET_MODE_WIDER_MODE (mode))
1012 if (HARD_REGNO_MODE_OK (regno, mode)
1013 && have_insn_for (SET, mode))
1014 best_mode = mode;
1015
1016 if (best_mode == VOIDmode)
1017 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
1018 mode != VOIDmode;
1019 mode = GET_MODE_WIDER_MODE (mode))
1020 if (HARD_REGNO_MODE_OK (regno, mode)
1021 && have_insn_for (SET, mode))
1022 best_mode = mode;
1023
1024 mode = best_mode;
1025 if (mode == VOIDmode)
1026 abort ();
1027
1028 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1029 if (size % align != 0)
1030 size = CEIL (size, align) * align;
1031 size += GET_MODE_SIZE (mode);
1032 apply_result_mode[regno] = mode;
1033 }
1034 else
1035 apply_result_mode[regno] = VOIDmode;
1036
1037 /* Allow targets that use untyped_call and untyped_return to override
1038 the size so that machine-specific information can be stored here. */
1039 #ifdef APPLY_RESULT_SIZE
1040 size = APPLY_RESULT_SIZE;
1041 #endif
1042 }
1043 return size;
1044 }
1045
1046 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1047 /* Create a vector describing the result block RESULT. If SAVEP is true,
1048 the result block is used to save the values; otherwise it is used to
1049 restore the values. */
1050
1051 static rtx
1052 result_vector (int savep, rtx result)
1053 {
1054 int regno, size, align, nelts;
1055 enum machine_mode mode;
1056 rtx reg, mem;
1057 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1058
1059 size = nelts = 0;
1060 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1061 if ((mode = apply_result_mode[regno]) != VOIDmode)
1062 {
1063 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1064 if (size % align != 0)
1065 size = CEIL (size, align) * align;
1066 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1067 mem = adjust_address (result, mode, size);
1068 savevec[nelts++] = (savep
1069 ? gen_rtx_SET (VOIDmode, mem, reg)
1070 : gen_rtx_SET (VOIDmode, reg, mem));
1071 size += GET_MODE_SIZE (mode);
1072 }
1073 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1074 }
1075 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1076
1077 /* Save the state required to perform an untyped call with the same
1078 arguments as were passed to the current function. */
1079
1080 static rtx
1081 expand_builtin_apply_args_1 (void)
1082 {
1083 rtx registers;
1084 int size, align, regno;
1085 enum machine_mode mode;
1086
1087 /* Create a block where the arg-pointer, structure value address,
1088 and argument registers can be saved. */
1089 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1090
1091 /* Walk past the arg-pointer and structure value address. */
1092 size = GET_MODE_SIZE (Pmode);
1093 if (struct_value_rtx)
1094 size += GET_MODE_SIZE (Pmode);
1095
1096 /* Save each register used in calling a function to the block. */
1097 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1098 if ((mode = apply_args_mode[regno]) != VOIDmode)
1099 {
1100 rtx tem;
1101
1102 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1103 if (size % align != 0)
1104 size = CEIL (size, align) * align;
1105
1106 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1107
1108 emit_move_insn (adjust_address (registers, mode, size), tem);
1109 size += GET_MODE_SIZE (mode);
1110 }
1111
1112 /* Save the arg pointer to the block. */
1113 emit_move_insn (adjust_address (registers, Pmode, 0),
1114 copy_to_reg (virtual_incoming_args_rtx));
1115 size = GET_MODE_SIZE (Pmode);
1116
1117 /* Save the structure value address unless this is passed as an
1118 "invisible" first argument. */
1119 if (struct_value_incoming_rtx)
1120 {
1121 emit_move_insn (adjust_address (registers, Pmode, size),
1122 copy_to_reg (struct_value_incoming_rtx));
1123 size += GET_MODE_SIZE (Pmode);
1124 }
1125
1126 /* Return the address of the block. */
1127 return copy_addr_to_reg (XEXP (registers, 0));
1128 }
1129
1130 /* __builtin_apply_args returns block of memory allocated on
1131 the stack into which is stored the arg pointer, structure
1132 value address, static chain, and all the registers that might
1133 possibly be used in performing a function call. The code is
1134 moved to the start of the function so the incoming values are
1135 saved. */
1136
1137 static rtx
1138 expand_builtin_apply_args (void)
1139 {
1140 /* Don't do __builtin_apply_args more than once in a function.
1141 Save the result of the first call and reuse it. */
1142 if (apply_args_value != 0)
1143 return apply_args_value;
1144 {
1145 /* When this function is called, it means that registers must be
1146 saved on entry to this function. So we migrate the
1147 call to the first insn of this function. */
1148 rtx temp;
1149 rtx seq;
1150
1151 start_sequence ();
1152 temp = expand_builtin_apply_args_1 ();
1153 seq = get_insns ();
1154 end_sequence ();
1155
1156 apply_args_value = temp;
1157
1158 /* Put the insns after the NOTE that starts the function.
1159 If this is inside a start_sequence, make the outer-level insn
1160 chain current, so the code is placed at the start of the
1161 function. */
1162 push_topmost_sequence ();
1163 emit_insn_before (seq, NEXT_INSN (get_insns ()));
1164 pop_topmost_sequence ();
1165 return temp;
1166 }
1167 }
1168
1169 /* Perform an untyped call and save the state required to perform an
1170 untyped return of whatever value was returned by the given function. */
1171
1172 static rtx
1173 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1174 {
1175 int size, align, regno;
1176 enum machine_mode mode;
1177 rtx incoming_args, result, reg, dest, src, call_insn;
1178 rtx old_stack_level = 0;
1179 rtx call_fusage = 0;
1180
1181 #ifdef POINTERS_EXTEND_UNSIGNED
1182 if (GET_MODE (arguments) != Pmode)
1183 arguments = convert_memory_address (Pmode, arguments);
1184 #endif
1185
1186 /* Create a block where the return registers can be saved. */
1187 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1188
1189 /* Fetch the arg pointer from the ARGUMENTS block. */
1190 incoming_args = gen_reg_rtx (Pmode);
1191 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1192 #ifndef STACK_GROWS_DOWNWARD
1193 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1194 incoming_args, 0, OPTAB_LIB_WIDEN);
1195 #endif
1196
1197 /* Perform postincrements before actually calling the function. */
1198 emit_queue ();
1199
1200 /* Push a new argument block and copy the arguments. Do not allow
1201 the (potential) memcpy call below to interfere with our stack
1202 manipulations. */
1203 do_pending_stack_adjust ();
1204 NO_DEFER_POP;
1205
1206 /* Save the stack with nonlocal if available */
1207 #ifdef HAVE_save_stack_nonlocal
1208 if (HAVE_save_stack_nonlocal)
1209 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1210 else
1211 #endif
1212 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1213
1214 /* Push a block of memory onto the stack to store the memory arguments.
1215 Save the address in a register, and copy the memory arguments. ??? I
1216 haven't figured out how the calling convention macros effect this,
1217 but it's likely that the source and/or destination addresses in
1218 the block copy will need updating in machine specific ways. */
1219 dest = allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1220 dest = gen_rtx_MEM (BLKmode, dest);
1221 set_mem_align (dest, PARM_BOUNDARY);
1222 src = gen_rtx_MEM (BLKmode, incoming_args);
1223 set_mem_align (src, PARM_BOUNDARY);
1224 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1225
1226 /* Refer to the argument block. */
1227 apply_args_size ();
1228 arguments = gen_rtx_MEM (BLKmode, arguments);
1229 set_mem_align (arguments, PARM_BOUNDARY);
1230
1231 /* Walk past the arg-pointer and structure value address. */
1232 size = GET_MODE_SIZE (Pmode);
1233 if (struct_value_rtx)
1234 size += GET_MODE_SIZE (Pmode);
1235
1236 /* Restore each of the registers previously saved. Make USE insns
1237 for each of these registers for use in making the call. */
1238 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1239 if ((mode = apply_args_mode[regno]) != VOIDmode)
1240 {
1241 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1242 if (size % align != 0)
1243 size = CEIL (size, align) * align;
1244 reg = gen_rtx_REG (mode, regno);
1245 emit_move_insn (reg, adjust_address (arguments, mode, size));
1246 use_reg (&call_fusage, reg);
1247 size += GET_MODE_SIZE (mode);
1248 }
1249
1250 /* Restore the structure value address unless this is passed as an
1251 "invisible" first argument. */
1252 size = GET_MODE_SIZE (Pmode);
1253 if (struct_value_rtx)
1254 {
1255 rtx value = gen_reg_rtx (Pmode);
1256 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1257 emit_move_insn (struct_value_rtx, value);
1258 if (GET_CODE (struct_value_rtx) == REG)
1259 use_reg (&call_fusage, struct_value_rtx);
1260 size += GET_MODE_SIZE (Pmode);
1261 }
1262
1263 /* All arguments and registers used for the call are set up by now! */
1264 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0, 0);
1265
1266 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1267 and we don't want to load it into a register as an optimization,
1268 because prepare_call_address already did it if it should be done. */
1269 if (GET_CODE (function) != SYMBOL_REF)
1270 function = memory_address (FUNCTION_MODE, function);
1271
1272 /* Generate the actual call instruction and save the return value. */
1273 #ifdef HAVE_untyped_call
1274 if (HAVE_untyped_call)
1275 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1276 result, result_vector (1, result)));
1277 else
1278 #endif
1279 #ifdef HAVE_call_value
1280 if (HAVE_call_value)
1281 {
1282 rtx valreg = 0;
1283
1284 /* Locate the unique return register. It is not possible to
1285 express a call that sets more than one return register using
1286 call_value; use untyped_call for that. In fact, untyped_call
1287 only needs to save the return registers in the given block. */
1288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1289 if ((mode = apply_result_mode[regno]) != VOIDmode)
1290 {
1291 if (valreg)
1292 abort (); /* HAVE_untyped_call required. */
1293 valreg = gen_rtx_REG (mode, regno);
1294 }
1295
1296 emit_call_insn (GEN_CALL_VALUE (valreg,
1297 gen_rtx_MEM (FUNCTION_MODE, function),
1298 const0_rtx, NULL_RTX, const0_rtx));
1299
1300 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1301 }
1302 else
1303 #endif
1304 abort ();
1305
1306 /* Find the CALL insn we just emitted, and attach the register usage
1307 information. */
1308 call_insn = last_call_insn ();
1309 add_function_usage_to (call_insn, call_fusage);
1310
1311 /* Restore the stack. */
1312 #ifdef HAVE_save_stack_nonlocal
1313 if (HAVE_save_stack_nonlocal)
1314 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1315 else
1316 #endif
1317 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1318
1319 OK_DEFER_POP;
1320
1321 /* Return the address of the result block. */
1322 return copy_addr_to_reg (XEXP (result, 0));
1323 }
1324
1325 /* Perform an untyped return. */
1326
1327 static void
1328 expand_builtin_return (rtx result)
1329 {
1330 int size, align, regno;
1331 enum machine_mode mode;
1332 rtx reg;
1333 rtx call_fusage = 0;
1334
1335 #ifdef POINTERS_EXTEND_UNSIGNED
1336 if (GET_MODE (result) != Pmode)
1337 result = convert_memory_address (Pmode, result);
1338 #endif
1339
1340 apply_result_size ();
1341 result = gen_rtx_MEM (BLKmode, result);
1342
1343 #ifdef HAVE_untyped_return
1344 if (HAVE_untyped_return)
1345 {
1346 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1347 emit_barrier ();
1348 return;
1349 }
1350 #endif
1351
1352 /* Restore the return value and note that each value is used. */
1353 size = 0;
1354 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1355 if ((mode = apply_result_mode[regno]) != VOIDmode)
1356 {
1357 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1358 if (size % align != 0)
1359 size = CEIL (size, align) * align;
1360 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1361 emit_move_insn (reg, adjust_address (result, mode, size));
1362
1363 push_to_sequence (call_fusage);
1364 emit_insn (gen_rtx_USE (VOIDmode, reg));
1365 call_fusage = get_insns ();
1366 end_sequence ();
1367 size += GET_MODE_SIZE (mode);
1368 }
1369
1370 /* Put the USE insns before the return. */
1371 emit_insn (call_fusage);
1372
1373 /* Return whatever values was restored by jumping directly to the end
1374 of the function. */
1375 expand_null_return ();
1376 }
1377
1378 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1379
1380 static enum type_class
1381 type_to_class (tree type)
1382 {
1383 switch (TREE_CODE (type))
1384 {
1385 case VOID_TYPE: return void_type_class;
1386 case INTEGER_TYPE: return integer_type_class;
1387 case CHAR_TYPE: return char_type_class;
1388 case ENUMERAL_TYPE: return enumeral_type_class;
1389 case BOOLEAN_TYPE: return boolean_type_class;
1390 case POINTER_TYPE: return pointer_type_class;
1391 case REFERENCE_TYPE: return reference_type_class;
1392 case OFFSET_TYPE: return offset_type_class;
1393 case REAL_TYPE: return real_type_class;
1394 case COMPLEX_TYPE: return complex_type_class;
1395 case FUNCTION_TYPE: return function_type_class;
1396 case METHOD_TYPE: return method_type_class;
1397 case RECORD_TYPE: return record_type_class;
1398 case UNION_TYPE:
1399 case QUAL_UNION_TYPE: return union_type_class;
1400 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1401 ? string_type_class : array_type_class);
1402 case SET_TYPE: return set_type_class;
1403 case FILE_TYPE: return file_type_class;
1404 case LANG_TYPE: return lang_type_class;
1405 default: return no_type_class;
1406 }
1407 }
1408
1409 /* Expand a call to __builtin_classify_type with arguments found in
1410 ARGLIST. */
1411
1412 static rtx
1413 expand_builtin_classify_type (tree arglist)
1414 {
1415 if (arglist != 0)
1416 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1417 return GEN_INT (no_type_class);
1418 }
1419
1420 /* Expand expression EXP, which is a call to __builtin_constant_p. */
1421
1422 static rtx
1423 expand_builtin_constant_p (tree arglist, enum machine_mode target_mode)
1424 {
1425 rtx tmp;
1426
1427 if (arglist == 0)
1428 return const0_rtx;
1429 arglist = TREE_VALUE (arglist);
1430
1431 /* We have taken care of the easy cases during constant folding. This
1432 case is not obvious, so emit (constant_p_rtx (ARGLIST)) and let CSE
1433 get a chance to see if it can deduce whether ARGLIST is constant. */
1434
1435 current_function_calls_constant_p = 1;
1436
1437 tmp = expand_expr (arglist, NULL_RTX, VOIDmode, 0);
1438 tmp = gen_rtx_CONSTANT_P_RTX (target_mode, tmp);
1439 return tmp;
1440 }
1441
1442 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1443 if available. */
1444 tree
1445 mathfn_built_in (tree type, enum built_in_function fn)
1446 {
1447 enum built_in_function fcode = NOT_BUILT_IN;
1448 if (TYPE_MODE (type) == TYPE_MODE (double_type_node))
1449 switch (fn)
1450 {
1451 case BUILT_IN_SQRT:
1452 case BUILT_IN_SQRTF:
1453 case BUILT_IN_SQRTL:
1454 fcode = BUILT_IN_SQRT;
1455 break;
1456 case BUILT_IN_SIN:
1457 case BUILT_IN_SINF:
1458 case BUILT_IN_SINL:
1459 fcode = BUILT_IN_SIN;
1460 break;
1461 case BUILT_IN_COS:
1462 case BUILT_IN_COSF:
1463 case BUILT_IN_COSL:
1464 fcode = BUILT_IN_COS;
1465 break;
1466 case BUILT_IN_EXP:
1467 case BUILT_IN_EXPF:
1468 case BUILT_IN_EXPL:
1469 fcode = BUILT_IN_EXP;
1470 break;
1471 case BUILT_IN_LOG:
1472 case BUILT_IN_LOGF:
1473 case BUILT_IN_LOGL:
1474 fcode = BUILT_IN_LOG;
1475 break;
1476 case BUILT_IN_TAN:
1477 case BUILT_IN_TANF:
1478 case BUILT_IN_TANL:
1479 fcode = BUILT_IN_TAN;
1480 break;
1481 case BUILT_IN_ATAN:
1482 case BUILT_IN_ATANF:
1483 case BUILT_IN_ATANL:
1484 fcode = BUILT_IN_ATAN;
1485 break;
1486 case BUILT_IN_FLOOR:
1487 case BUILT_IN_FLOORF:
1488 case BUILT_IN_FLOORL:
1489 fcode = BUILT_IN_FLOOR;
1490 break;
1491 case BUILT_IN_CEIL:
1492 case BUILT_IN_CEILF:
1493 case BUILT_IN_CEILL:
1494 fcode = BUILT_IN_CEIL;
1495 break;
1496 case BUILT_IN_TRUNC:
1497 case BUILT_IN_TRUNCF:
1498 case BUILT_IN_TRUNCL:
1499 fcode = BUILT_IN_TRUNC;
1500 break;
1501 case BUILT_IN_ROUND:
1502 case BUILT_IN_ROUNDF:
1503 case BUILT_IN_ROUNDL:
1504 fcode = BUILT_IN_ROUND;
1505 break;
1506 case BUILT_IN_NEARBYINT:
1507 case BUILT_IN_NEARBYINTF:
1508 case BUILT_IN_NEARBYINTL:
1509 fcode = BUILT_IN_NEARBYINT;
1510 break;
1511 default:
1512 abort ();
1513 }
1514 else if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
1515 switch (fn)
1516 {
1517 case BUILT_IN_SQRT:
1518 case BUILT_IN_SQRTF:
1519 case BUILT_IN_SQRTL:
1520 fcode = BUILT_IN_SQRTF;
1521 break;
1522 case BUILT_IN_SIN:
1523 case BUILT_IN_SINF:
1524 case BUILT_IN_SINL:
1525 fcode = BUILT_IN_SINF;
1526 break;
1527 case BUILT_IN_COS:
1528 case BUILT_IN_COSF:
1529 case BUILT_IN_COSL:
1530 fcode = BUILT_IN_COSF;
1531 break;
1532 case BUILT_IN_EXP:
1533 case BUILT_IN_EXPF:
1534 case BUILT_IN_EXPL:
1535 fcode = BUILT_IN_EXPF;
1536 break;
1537 case BUILT_IN_LOG:
1538 case BUILT_IN_LOGF:
1539 case BUILT_IN_LOGL:
1540 fcode = BUILT_IN_LOGF;
1541 break;
1542 case BUILT_IN_TAN:
1543 case BUILT_IN_TANF:
1544 case BUILT_IN_TANL:
1545 fcode = BUILT_IN_TANF;
1546 break;
1547 case BUILT_IN_ATAN:
1548 case BUILT_IN_ATANF:
1549 case BUILT_IN_ATANL:
1550 fcode = BUILT_IN_ATANF;
1551 break;
1552 case BUILT_IN_FLOOR:
1553 case BUILT_IN_FLOORF:
1554 case BUILT_IN_FLOORL:
1555 fcode = BUILT_IN_FLOORF;
1556 break;
1557 case BUILT_IN_CEIL:
1558 case BUILT_IN_CEILF:
1559 case BUILT_IN_CEILL:
1560 fcode = BUILT_IN_CEILF;
1561 break;
1562 case BUILT_IN_TRUNC:
1563 case BUILT_IN_TRUNCF:
1564 case BUILT_IN_TRUNCL:
1565 fcode = BUILT_IN_TRUNCF;
1566 break;
1567 case BUILT_IN_ROUND:
1568 case BUILT_IN_ROUNDF:
1569 case BUILT_IN_ROUNDL:
1570 fcode = BUILT_IN_ROUNDF;
1571 break;
1572 case BUILT_IN_NEARBYINT:
1573 case BUILT_IN_NEARBYINTF:
1574 case BUILT_IN_NEARBYINTL:
1575 fcode = BUILT_IN_NEARBYINTF;
1576 break;
1577 default:
1578 abort ();
1579 }
1580 else if (TYPE_MODE (type) == TYPE_MODE (long_double_type_node))
1581 switch (fn)
1582 {
1583 case BUILT_IN_SQRT:
1584 case BUILT_IN_SQRTF:
1585 case BUILT_IN_SQRTL:
1586 fcode = BUILT_IN_SQRTL;
1587 break;
1588 case BUILT_IN_SIN:
1589 case BUILT_IN_SINF:
1590 case BUILT_IN_SINL:
1591 fcode = BUILT_IN_SINL;
1592 break;
1593 case BUILT_IN_COS:
1594 case BUILT_IN_COSF:
1595 case BUILT_IN_COSL:
1596 fcode = BUILT_IN_COSL;
1597 break;
1598 case BUILT_IN_EXP:
1599 case BUILT_IN_EXPF:
1600 case BUILT_IN_EXPL:
1601 fcode = BUILT_IN_EXPL;
1602 break;
1603 case BUILT_IN_LOG:
1604 case BUILT_IN_LOGF:
1605 case BUILT_IN_LOGL:
1606 fcode = BUILT_IN_LOGL;
1607 break;
1608 case BUILT_IN_TAN:
1609 case BUILT_IN_TANF:
1610 case BUILT_IN_TANL:
1611 fcode = BUILT_IN_TANL;
1612 break;
1613 case BUILT_IN_ATAN:
1614 case BUILT_IN_ATANF:
1615 case BUILT_IN_ATANL:
1616 fcode = BUILT_IN_ATANL;
1617 break;
1618 case BUILT_IN_FLOOR:
1619 case BUILT_IN_FLOORF:
1620 case BUILT_IN_FLOORL:
1621 fcode = BUILT_IN_FLOORL;
1622 break;
1623 case BUILT_IN_CEIL:
1624 case BUILT_IN_CEILF:
1625 case BUILT_IN_CEILL:
1626 fcode = BUILT_IN_CEILL;
1627 break;
1628 case BUILT_IN_TRUNC:
1629 case BUILT_IN_TRUNCF:
1630 case BUILT_IN_TRUNCL:
1631 fcode = BUILT_IN_TRUNCL;
1632 break;
1633 case BUILT_IN_ROUND:
1634 case BUILT_IN_ROUNDF:
1635 case BUILT_IN_ROUNDL:
1636 fcode = BUILT_IN_ROUNDL;
1637 break;
1638 case BUILT_IN_NEARBYINT:
1639 case BUILT_IN_NEARBYINTF:
1640 case BUILT_IN_NEARBYINTL:
1641 fcode = BUILT_IN_NEARBYINTL;
1642 break;
1643 default:
1644 abort ();
1645 }
1646 return implicit_built_in_decls[fcode];
1647 }
1648
1649 /* If errno must be maintained, expand the RTL to check if the result,
1650 TARGET, of a built-in function call, EXP, is NaN, and if so set
1651 errno to EDOM. */
1652
1653 static void
1654 expand_errno_check (tree exp, rtx target)
1655 {
1656 rtx lab = gen_label_rtx ();
1657
1658 /* Test the result; if it is NaN, set errno=EDOM because
1659 the argument was not in the domain. */
1660 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1661 0, lab);
1662
1663 #ifdef TARGET_EDOM
1664 /* If this built-in doesn't throw an exception, set errno directly. */
1665 if (TREE_NOTHROW (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
1666 {
1667 #ifdef GEN_ERRNO_RTX
1668 rtx errno_rtx = GEN_ERRNO_RTX;
1669 #else
1670 rtx errno_rtx
1671 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1672 #endif
1673 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1674 emit_label (lab);
1675 return;
1676 }
1677 #endif
1678
1679 /* We can't set errno=EDOM directly; let the library call do it.
1680 Pop the arguments right away in case the call gets deleted. */
1681 NO_DEFER_POP;
1682 expand_call (exp, target, 0);
1683 OK_DEFER_POP;
1684 emit_label (lab);
1685 }
1686
1687
1688 /* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
1689 Return 0 if a normal call should be emitted rather than expanding the
1690 function in-line. EXP is the expression that is a call to the builtin
1691 function; if convenient, the result should be placed in TARGET.
1692 SUBTARGET may be used as the target for computing one of EXP's operands. */
1693
1694 static rtx
1695 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1696 {
1697 optab builtin_optab;
1698 rtx op0, insns;
1699 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1700 tree arglist = TREE_OPERAND (exp, 1);
1701 enum machine_mode mode;
1702 bool errno_set = false;
1703 tree arg;
1704
1705 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1706 return 0;
1707
1708 arg = TREE_VALUE (arglist);
1709
1710 switch (DECL_FUNCTION_CODE (fndecl))
1711 {
1712 case BUILT_IN_SIN:
1713 case BUILT_IN_SINF:
1714 case BUILT_IN_SINL:
1715 builtin_optab = sin_optab; break;
1716 case BUILT_IN_COS:
1717 case BUILT_IN_COSF:
1718 case BUILT_IN_COSL:
1719 builtin_optab = cos_optab; break;
1720 case BUILT_IN_SQRT:
1721 case BUILT_IN_SQRTF:
1722 case BUILT_IN_SQRTL:
1723 errno_set = ! tree_expr_nonnegative_p (arg);
1724 builtin_optab = sqrt_optab;
1725 break;
1726 case BUILT_IN_EXP:
1727 case BUILT_IN_EXPF:
1728 case BUILT_IN_EXPL:
1729 errno_set = true; builtin_optab = exp_optab; break;
1730 case BUILT_IN_LOG:
1731 case BUILT_IN_LOGF:
1732 case BUILT_IN_LOGL:
1733 errno_set = true; builtin_optab = log_optab; break;
1734 case BUILT_IN_TAN:
1735 case BUILT_IN_TANF:
1736 case BUILT_IN_TANL:
1737 builtin_optab = tan_optab; break;
1738 case BUILT_IN_ATAN:
1739 case BUILT_IN_ATANF:
1740 case BUILT_IN_ATANL:
1741 builtin_optab = atan_optab; break;
1742 case BUILT_IN_FLOOR:
1743 case BUILT_IN_FLOORF:
1744 case BUILT_IN_FLOORL:
1745 builtin_optab = floor_optab; break;
1746 case BUILT_IN_CEIL:
1747 case BUILT_IN_CEILF:
1748 case BUILT_IN_CEILL:
1749 builtin_optab = ceil_optab; break;
1750 case BUILT_IN_TRUNC:
1751 case BUILT_IN_TRUNCF:
1752 case BUILT_IN_TRUNCL:
1753 builtin_optab = trunc_optab; break;
1754 case BUILT_IN_ROUND:
1755 case BUILT_IN_ROUNDF:
1756 case BUILT_IN_ROUNDL:
1757 builtin_optab = round_optab; break;
1758 case BUILT_IN_NEARBYINT:
1759 case BUILT_IN_NEARBYINTF:
1760 case BUILT_IN_NEARBYINTL:
1761 builtin_optab = nearbyint_optab; break;
1762 default:
1763 abort ();
1764 }
1765
1766 /* Make a suitable register to place result in. */
1767 mode = TYPE_MODE (TREE_TYPE (exp));
1768 target = gen_reg_rtx (mode);
1769
1770 if (! flag_errno_math || ! HONOR_NANS (mode))
1771 errno_set = false;
1772
1773 /* Stabilize and compute the argument. */
1774 if (errno_set)
1775 switch (TREE_CODE (arg))
1776 {
1777 case VAR_DECL:
1778 case PARM_DECL:
1779 case SAVE_EXPR:
1780 case REAL_CST:
1781 break;
1782
1783 default:
1784 /* Wrap the computation of the argument in a SAVE_EXPR, as we
1785 need to expand the argument again in expand_errno_check. This
1786 way, we will not perform side-effects more the once. */
1787 arg = save_expr (arg);
1788 arglist = build_tree_list (NULL_TREE, arg);
1789 exp = build_function_call_expr (fndecl, arglist);
1790 break;
1791 }
1792
1793 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
1794
1795 emit_queue ();
1796 start_sequence ();
1797
1798 /* Compute into TARGET.
1799 Set TARGET to wherever the result comes back. */
1800 target = expand_unop (mode, builtin_optab, op0, target, 0);
1801
1802 /* If we were unable to expand via the builtin, stop the
1803 sequence (without outputting the insns) and return 0, causing
1804 a call to the library function. */
1805 if (target == 0)
1806 {
1807 end_sequence ();
1808 return 0;
1809 }
1810
1811 if (errno_set)
1812 expand_errno_check (exp, target);
1813
1814 /* Output the entire sequence. */
1815 insns = get_insns ();
1816 end_sequence ();
1817 emit_insn (insns);
1818
1819 return target;
1820 }
1821
1822 /* Expand a call to the builtin binary math functions (pow and atan2).
1823 Return 0 if a normal call should be emitted rather than expanding the
1824 function in-line. EXP is the expression that is a call to the builtin
1825 function; if convenient, the result should be placed in TARGET.
1826 SUBTARGET may be used as the target for computing one of EXP's
1827 operands. */
1828
1829 static rtx
1830 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1831 {
1832 optab builtin_optab;
1833 rtx op0, op1, insns;
1834 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1835 tree arglist = TREE_OPERAND (exp, 1);
1836 tree arg0, arg1, temp;
1837 enum machine_mode mode;
1838 bool errno_set = true;
1839 bool stable = true;
1840
1841 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1842 return 0;
1843
1844 arg0 = TREE_VALUE (arglist);
1845 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1846
1847 switch (DECL_FUNCTION_CODE (fndecl))
1848 {
1849 case BUILT_IN_POW:
1850 case BUILT_IN_POWF:
1851 case BUILT_IN_POWL:
1852 builtin_optab = pow_optab; break;
1853 case BUILT_IN_ATAN2:
1854 case BUILT_IN_ATAN2F:
1855 case BUILT_IN_ATAN2L:
1856 builtin_optab = atan2_optab; break;
1857 default:
1858 abort ();
1859 }
1860
1861 /* Make a suitable register to place result in. */
1862 mode = TYPE_MODE (TREE_TYPE (exp));
1863 target = gen_reg_rtx (mode);
1864
1865 if (! flag_errno_math || ! HONOR_NANS (mode))
1866 errno_set = false;
1867
1868 /* Stabilize the arguments. */
1869 if (errno_set)
1870 {
1871 switch (TREE_CODE (arg1))
1872 {
1873 case VAR_DECL:
1874 case PARM_DECL:
1875 case SAVE_EXPR:
1876 case REAL_CST:
1877 temp = TREE_CHAIN (arglist);
1878 break;
1879
1880 default:
1881 stable = false;
1882 arg1 = save_expr (arg1);
1883 temp = build_tree_list (NULL_TREE, arg1);
1884 break;
1885 }
1886
1887 switch (TREE_CODE (arg0))
1888 {
1889 case VAR_DECL:
1890 case PARM_DECL:
1891 case SAVE_EXPR:
1892 case REAL_CST:
1893 if (! stable)
1894 arglist = build_tree_list (temp, arg0);
1895 break;
1896
1897 default:
1898 stable = false;
1899 arg0 = save_expr (arg0);
1900 arglist = build_tree_list (temp, arg0);
1901 break;
1902 }
1903
1904 if (! stable)
1905 exp = build_function_call_expr (fndecl, arglist);
1906 }
1907
1908 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
1909 op1 = expand_expr (arg1, 0, VOIDmode, 0);
1910
1911 emit_queue ();
1912 start_sequence ();
1913
1914 /* Compute into TARGET.
1915 Set TARGET to wherever the result comes back. */
1916 target = expand_binop (mode, builtin_optab, op0, op1,
1917 target, 0, OPTAB_DIRECT);
1918
1919 /* If we were unable to expand via the builtin, stop the
1920 sequence (without outputting the insns) and return 0, causing
1921 a call to the library function. */
1922 if (target == 0)
1923 {
1924 end_sequence ();
1925 return 0;
1926 }
1927
1928 if (errno_set)
1929 expand_errno_check (exp, target);
1930
1931 /* Output the entire sequence. */
1932 insns = get_insns ();
1933 end_sequence ();
1934 emit_insn (insns);
1935
1936 return target;
1937 }
1938
1939 /* Expand expression EXP which is a call to the strlen builtin. Return 0
1940 if we failed the caller should emit a normal call, otherwise
1941 try to get the result in TARGET, if convenient. */
1942
1943 static rtx
1944 expand_builtin_strlen (tree arglist, rtx target,
1945 enum machine_mode target_mode)
1946 {
1947 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
1948 return 0;
1949 else
1950 {
1951 rtx pat;
1952 tree len, src = TREE_VALUE (arglist);
1953 rtx result, src_reg, char_rtx, before_strlen;
1954 enum machine_mode insn_mode = target_mode, char_mode;
1955 enum insn_code icode = CODE_FOR_nothing;
1956 int align;
1957
1958 /* If the length can be computed at compile-time, return it. */
1959 len = c_strlen (src);
1960 if (len)
1961 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
1962
1963 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
1964
1965 /* If SRC is not a pointer type, don't do this operation inline. */
1966 if (align == 0)
1967 return 0;
1968
1969 /* Bail out if we can't compute strlen in the right mode. */
1970 while (insn_mode != VOIDmode)
1971 {
1972 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
1973 if (icode != CODE_FOR_nothing)
1974 break;
1975
1976 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
1977 }
1978 if (insn_mode == VOIDmode)
1979 return 0;
1980
1981 /* Make a place to write the result of the instruction. */
1982 result = target;
1983 if (! (result != 0
1984 && GET_CODE (result) == REG
1985 && GET_MODE (result) == insn_mode
1986 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
1987 result = gen_reg_rtx (insn_mode);
1988
1989 /* Make a place to hold the source address. We will not expand
1990 the actual source until we are sure that the expansion will
1991 not fail -- there are trees that cannot be expanded twice. */
1992 src_reg = gen_reg_rtx (Pmode);
1993
1994 /* Mark the beginning of the strlen sequence so we can emit the
1995 source operand later. */
1996 before_strlen = get_last_insn ();
1997
1998 char_rtx = const0_rtx;
1999 char_mode = insn_data[(int) icode].operand[2].mode;
2000 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
2001 char_mode))
2002 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
2003
2004 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
2005 char_rtx, GEN_INT (align));
2006 if (! pat)
2007 return 0;
2008 emit_insn (pat);
2009
2010 /* Now that we are assured of success, expand the source. */
2011 start_sequence ();
2012 pat = memory_address (BLKmode,
2013 expand_expr (src, src_reg, ptr_mode, EXPAND_SUM));
2014 if (pat != src_reg)
2015 emit_move_insn (src_reg, pat);
2016 pat = get_insns ();
2017 end_sequence ();
2018
2019 if (before_strlen)
2020 emit_insn_after (pat, before_strlen);
2021 else
2022 emit_insn_before (pat, get_insns ());
2023
2024 /* Return the value in the proper mode for this function. */
2025 if (GET_MODE (result) == target_mode)
2026 target = result;
2027 else if (target != 0)
2028 convert_move (target, result, 0);
2029 else
2030 target = convert_to_mode (target_mode, result, 0);
2031
2032 return target;
2033 }
2034 }
2035
2036 /* Expand a call to the strstr builtin. Return 0 if we failed the
2037 caller should emit a normal call, otherwise try to get the result
2038 in TARGET, if convenient (and in mode MODE if that's convenient). */
2039
2040 static rtx
2041 expand_builtin_strstr (tree arglist, rtx target, enum machine_mode mode)
2042 {
2043 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2044 return 0;
2045 else
2046 {
2047 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2048 tree fn;
2049 const char *p1, *p2;
2050
2051 p2 = c_getstr (s2);
2052 if (p2 == NULL)
2053 return 0;
2054
2055 p1 = c_getstr (s1);
2056 if (p1 != NULL)
2057 {
2058 const char *r = strstr (p1, p2);
2059
2060 if (r == NULL)
2061 return const0_rtx;
2062
2063 /* Return an offset into the constant string argument. */
2064 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2065 s1, ssize_int (r - p1))),
2066 target, mode, EXPAND_NORMAL);
2067 }
2068
2069 if (p2[0] == '\0')
2070 return expand_expr (s1, target, mode, EXPAND_NORMAL);
2071
2072 if (p2[1] != '\0')
2073 return 0;
2074
2075 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2076 if (!fn)
2077 return 0;
2078
2079 /* New argument list transforming strstr(s1, s2) to
2080 strchr(s1, s2[0]). */
2081 arglist =
2082 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2083 arglist = tree_cons (NULL_TREE, s1, arglist);
2084 return expand_expr (build_function_call_expr (fn, arglist),
2085 target, mode, EXPAND_NORMAL);
2086 }
2087 }
2088
2089 /* Expand a call to the strchr builtin. Return 0 if we failed the
2090 caller should emit a normal call, otherwise try to get the result
2091 in TARGET, if convenient (and in mode MODE if that's convenient). */
2092
2093 static rtx
2094 expand_builtin_strchr (tree arglist, rtx target, enum machine_mode mode)
2095 {
2096 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2097 return 0;
2098 else
2099 {
2100 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2101 const char *p1;
2102
2103 if (TREE_CODE (s2) != INTEGER_CST)
2104 return 0;
2105
2106 p1 = c_getstr (s1);
2107 if (p1 != NULL)
2108 {
2109 char c;
2110 const char *r;
2111
2112 if (target_char_cast (s2, &c))
2113 return 0;
2114
2115 r = strchr (p1, c);
2116
2117 if (r == NULL)
2118 return const0_rtx;
2119
2120 /* Return an offset into the constant string argument. */
2121 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2122 s1, ssize_int (r - p1))),
2123 target, mode, EXPAND_NORMAL);
2124 }
2125
2126 /* FIXME: Should use here strchrM optab so that ports can optimize
2127 this. */
2128 return 0;
2129 }
2130 }
2131
2132 /* Expand a call to the strrchr builtin. Return 0 if we failed the
2133 caller should emit a normal call, otherwise try to get the result
2134 in TARGET, if convenient (and in mode MODE if that's convenient). */
2135
2136 static rtx
2137 expand_builtin_strrchr (tree arglist, rtx target, enum machine_mode mode)
2138 {
2139 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2140 return 0;
2141 else
2142 {
2143 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2144 tree fn;
2145 const char *p1;
2146
2147 if (TREE_CODE (s2) != INTEGER_CST)
2148 return 0;
2149
2150 p1 = c_getstr (s1);
2151 if (p1 != NULL)
2152 {
2153 char c;
2154 const char *r;
2155
2156 if (target_char_cast (s2, &c))
2157 return 0;
2158
2159 r = strrchr (p1, c);
2160
2161 if (r == NULL)
2162 return const0_rtx;
2163
2164 /* Return an offset into the constant string argument. */
2165 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2166 s1, ssize_int (r - p1))),
2167 target, mode, EXPAND_NORMAL);
2168 }
2169
2170 if (! integer_zerop (s2))
2171 return 0;
2172
2173 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2174 if (!fn)
2175 return 0;
2176
2177 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
2178 return expand_expr (build_function_call_expr (fn, arglist),
2179 target, mode, EXPAND_NORMAL);
2180 }
2181 }
2182
2183 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
2184 caller should emit a normal call, otherwise try to get the result
2185 in TARGET, if convenient (and in mode MODE if that's convenient). */
2186
2187 static rtx
2188 expand_builtin_strpbrk (tree arglist, rtx target, enum machine_mode mode)
2189 {
2190 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2191 return 0;
2192 else
2193 {
2194 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2195 tree fn;
2196 const char *p1, *p2;
2197
2198 p2 = c_getstr (s2);
2199 if (p2 == NULL)
2200 return 0;
2201
2202 p1 = c_getstr (s1);
2203 if (p1 != NULL)
2204 {
2205 const char *r = strpbrk (p1, p2);
2206
2207 if (r == NULL)
2208 return const0_rtx;
2209
2210 /* Return an offset into the constant string argument. */
2211 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2212 s1, ssize_int (r - p1))),
2213 target, mode, EXPAND_NORMAL);
2214 }
2215
2216 if (p2[0] == '\0')
2217 {
2218 /* strpbrk(x, "") == NULL.
2219 Evaluate and ignore the arguments in case they had
2220 side-effects. */
2221 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2222 return const0_rtx;
2223 }
2224
2225 if (p2[1] != '\0')
2226 return 0; /* Really call strpbrk. */
2227
2228 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2229 if (!fn)
2230 return 0;
2231
2232 /* New argument list transforming strpbrk(s1, s2) to
2233 strchr(s1, s2[0]). */
2234 arglist =
2235 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2236 arglist = tree_cons (NULL_TREE, s1, arglist);
2237 return expand_expr (build_function_call_expr (fn, arglist),
2238 target, mode, EXPAND_NORMAL);
2239 }
2240 }
2241
2242 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2243 bytes from constant string DATA + OFFSET and return it as target
2244 constant. */
2245
2246 static rtx
2247 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2248 enum machine_mode mode)
2249 {
2250 const char *str = (const char *) data;
2251
2252 if (offset < 0
2253 || ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2254 > strlen (str) + 1))
2255 abort (); /* Attempt to read past the end of constant string. */
2256
2257 return c_readstr (str + offset, mode);
2258 }
2259
2260 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
2261 Return 0 if we failed, the caller should emit a normal call,
2262 otherwise try to get the result in TARGET, if convenient (and in
2263 mode MODE if that's convenient). */
2264 static rtx
2265 expand_builtin_memcpy (tree arglist, rtx target, enum machine_mode mode)
2266 {
2267 if (!validate_arglist (arglist,
2268 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2269 return 0;
2270 else
2271 {
2272 tree dest = TREE_VALUE (arglist);
2273 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2274 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2275 const char *src_str;
2276 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2277 unsigned int dest_align
2278 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2279 rtx dest_mem, src_mem, dest_addr, len_rtx;
2280
2281 /* If DEST is not a pointer type, call the normal function. */
2282 if (dest_align == 0)
2283 return 0;
2284
2285 /* If the LEN parameter is zero, return DEST. */
2286 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2287 {
2288 /* Evaluate and ignore SRC in case it has side-effects. */
2289 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2290 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2291 }
2292
2293 /* If either SRC is not a pointer type, don't do this
2294 operation in-line. */
2295 if (src_align == 0)
2296 return 0;
2297
2298 dest_mem = get_memory_rtx (dest);
2299 set_mem_align (dest_mem, dest_align);
2300 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2301 src_str = c_getstr (src);
2302
2303 /* If SRC is a string constant and block move would be done
2304 by pieces, we can avoid loading the string from memory
2305 and only stored the computed constants. */
2306 if (src_str
2307 && GET_CODE (len_rtx) == CONST_INT
2308 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2309 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2310 (void *) src_str, dest_align))
2311 {
2312 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2313 builtin_memcpy_read_str,
2314 (void *) src_str, dest_align, 0);
2315 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2316 #ifdef POINTERS_EXTEND_UNSIGNED
2317 if (GET_MODE (dest_mem) != ptr_mode)
2318 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2319 #endif
2320 return dest_mem;
2321 }
2322
2323 src_mem = get_memory_rtx (src);
2324 set_mem_align (src_mem, src_align);
2325
2326 /* Copy word part most expediently. */
2327 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx,
2328 BLOCK_OP_NORMAL);
2329
2330 if (dest_addr == 0)
2331 {
2332 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2333 #ifdef POINTERS_EXTEND_UNSIGNED
2334 if (GET_MODE (dest_addr) != ptr_mode)
2335 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2336 #endif
2337 }
2338 return dest_addr;
2339 }
2340 }
2341
2342 /* Expand a call to the mempcpy builtin, with arguments in ARGLIST.
2343 Return 0 if we failed the caller should emit a normal call,
2344 otherwise try to get the result in TARGET, if convenient (and in
2345 mode MODE if that's convenient). If ENDP is 0 return the
2346 destination pointer, if ENDP is 1 return the end pointer ala
2347 mempcpy, and if ENDP is 2 return the end pointer minus one ala
2348 stpcpy. */
2349
2350 static rtx
2351 expand_builtin_mempcpy (tree arglist, rtx target, enum machine_mode mode,
2352 int endp)
2353 {
2354 if (!validate_arglist (arglist,
2355 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2356 return 0;
2357 /* If return value is ignored, transform mempcpy into memcpy. */
2358 else if (target == const0_rtx)
2359 {
2360 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2361
2362 if (!fn)
2363 return 0;
2364
2365 return expand_expr (build_function_call_expr (fn, arglist),
2366 target, mode, EXPAND_NORMAL);
2367 }
2368 else
2369 {
2370 tree dest = TREE_VALUE (arglist);
2371 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2372 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2373 const char *src_str;
2374 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2375 unsigned int dest_align
2376 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2377 rtx dest_mem, src_mem, len_rtx;
2378
2379 /* If DEST is not a pointer type or LEN is not constant,
2380 call the normal function. */
2381 if (dest_align == 0 || !host_integerp (len, 1))
2382 return 0;
2383
2384 /* If the LEN parameter is zero, return DEST. */
2385 if (tree_low_cst (len, 1) == 0)
2386 {
2387 /* Evaluate and ignore SRC in case it has side-effects. */
2388 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2389 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2390 }
2391
2392 /* If either SRC is not a pointer type, don't do this
2393 operation in-line. */
2394 if (src_align == 0)
2395 return 0;
2396
2397 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2398 src_str = c_getstr (src);
2399
2400 /* If SRC is a string constant and block move would be done
2401 by pieces, we can avoid loading the string from memory
2402 and only stored the computed constants. */
2403 if (src_str
2404 && GET_CODE (len_rtx) == CONST_INT
2405 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2406 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2407 (void *) src_str, dest_align))
2408 {
2409 dest_mem = get_memory_rtx (dest);
2410 set_mem_align (dest_mem, dest_align);
2411 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2412 builtin_memcpy_read_str,
2413 (void *) src_str, dest_align, endp);
2414 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2415 #ifdef POINTERS_EXTEND_UNSIGNED
2416 if (GET_MODE (dest_mem) != ptr_mode)
2417 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2418 #endif
2419 return dest_mem;
2420 }
2421
2422 if (GET_CODE (len_rtx) == CONST_INT
2423 && can_move_by_pieces (INTVAL (len_rtx),
2424 MIN (dest_align, src_align)))
2425 {
2426 dest_mem = get_memory_rtx (dest);
2427 set_mem_align (dest_mem, dest_align);
2428 src_mem = get_memory_rtx (src);
2429 set_mem_align (src_mem, src_align);
2430 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
2431 MIN (dest_align, src_align), endp);
2432 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2433 #ifdef POINTERS_EXTEND_UNSIGNED
2434 if (GET_MODE (dest_mem) != ptr_mode)
2435 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2436 #endif
2437 return dest_mem;
2438 }
2439
2440 return 0;
2441 }
2442 }
2443
2444 /* Expand expression EXP, which is a call to the memmove builtin. Return 0
2445 if we failed the caller should emit a normal call. */
2446
2447 static rtx
2448 expand_builtin_memmove (tree arglist, rtx target, enum machine_mode mode)
2449 {
2450 if (!validate_arglist (arglist,
2451 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2452 return 0;
2453 else
2454 {
2455 tree dest = TREE_VALUE (arglist);
2456 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2457 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2458
2459 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2460 unsigned int dest_align
2461 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2462
2463 /* If DEST is not a pointer type, call the normal function. */
2464 if (dest_align == 0)
2465 return 0;
2466
2467 /* If the LEN parameter is zero, return DEST. */
2468 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2469 {
2470 /* Evaluate and ignore SRC in case it has side-effects. */
2471 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2472 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2473 }
2474
2475 /* If either SRC is not a pointer type, don't do this
2476 operation in-line. */
2477 if (src_align == 0)
2478 return 0;
2479
2480 /* If src is categorized for a readonly section we can use
2481 normal memcpy. */
2482 if (readonly_data_expr (src))
2483 {
2484 tree const fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2485 if (!fn)
2486 return 0;
2487 return expand_expr (build_function_call_expr (fn, arglist),
2488 target, mode, EXPAND_NORMAL);
2489 }
2490
2491 /* Otherwise, call the normal function. */
2492 return 0;
2493 }
2494 }
2495
2496 /* Expand expression EXP, which is a call to the bcopy builtin. Return 0
2497 if we failed the caller should emit a normal call. */
2498
2499 static rtx
2500 expand_builtin_bcopy (tree arglist)
2501 {
2502 tree src, dest, size, newarglist;
2503
2504 if (!validate_arglist (arglist,
2505 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2506 return NULL_RTX;
2507
2508 src = TREE_VALUE (arglist);
2509 dest = TREE_VALUE (TREE_CHAIN (arglist));
2510 size = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2511
2512 /* New argument list transforming bcopy(ptr x, ptr y, int z) to
2513 memmove(ptr y, ptr x, size_t z). This is done this way
2514 so that if it isn't expanded inline, we fallback to
2515 calling bcopy instead of memmove. */
2516
2517 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2518 newarglist = tree_cons (NULL_TREE, src, newarglist);
2519 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2520
2521 return expand_builtin_memmove (newarglist, const0_rtx, VOIDmode);
2522 }
2523
2524 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
2525 if we failed the caller should emit a normal call, otherwise try to get
2526 the result in TARGET, if convenient (and in mode MODE if that's
2527 convenient). */
2528
2529 static rtx
2530 expand_builtin_strcpy (tree arglist, rtx target, enum machine_mode mode)
2531 {
2532 tree fn, len;
2533
2534 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2535 return 0;
2536
2537 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2538 if (!fn)
2539 return 0;
2540
2541 len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2542 if (len == 0)
2543 return 0;
2544
2545 len = size_binop (PLUS_EXPR, len, ssize_int (1));
2546 chainon (arglist, build_tree_list (NULL_TREE, len));
2547 return expand_expr (build_function_call_expr (fn, arglist),
2548 target, mode, EXPAND_NORMAL);
2549 }
2550
2551 /* Expand a call to the stpcpy builtin, with arguments in ARGLIST.
2552 Return 0 if we failed the caller should emit a normal call,
2553 otherwise try to get the result in TARGET, if convenient (and in
2554 mode MODE if that's convenient). */
2555
2556 static rtx
2557 expand_builtin_stpcpy (tree arglist, rtx target, enum machine_mode mode)
2558 {
2559 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2560 return 0;
2561 else
2562 {
2563 tree newarglist;
2564 tree src, len;
2565
2566 /* If return value is ignored, transform stpcpy into strcpy. */
2567 if (target == const0_rtx)
2568 {
2569 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
2570 if (!fn)
2571 return 0;
2572
2573 return expand_expr (build_function_call_expr (fn, arglist),
2574 target, mode, EXPAND_NORMAL);
2575 }
2576
2577 /* Ensure we get an actual string whose length can be evaluated at
2578 compile-time, not an expression containing a string. This is
2579 because the latter will potentially produce pessimized code
2580 when used to produce the return value. */
2581 src = TREE_VALUE (TREE_CHAIN (arglist));
2582 if (! c_getstr (src) || ! (len = c_strlen (src)))
2583 return 0;
2584
2585 len = fold (size_binop (PLUS_EXPR, len, ssize_int (1)));
2586 newarglist = copy_list (arglist);
2587 chainon (newarglist, build_tree_list (NULL_TREE, len));
2588 return expand_builtin_mempcpy (newarglist, target, mode, /*endp=*/2);
2589 }
2590 }
2591
2592 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2593 bytes from constant string DATA + OFFSET and return it as target
2594 constant. */
2595
2596 static rtx
2597 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
2598 enum machine_mode mode)
2599 {
2600 const char *str = (const char *) data;
2601
2602 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
2603 return const0_rtx;
2604
2605 return c_readstr (str + offset, mode);
2606 }
2607
2608 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
2609 if we failed the caller should emit a normal call. */
2610
2611 static rtx
2612 expand_builtin_strncpy (tree arglist, rtx target, enum machine_mode mode)
2613 {
2614 if (!validate_arglist (arglist,
2615 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2616 return 0;
2617 else
2618 {
2619 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2620 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2621 tree fn;
2622
2623 /* We must be passed a constant len parameter. */
2624 if (TREE_CODE (len) != INTEGER_CST)
2625 return 0;
2626
2627 /* If the len parameter is zero, return the dst parameter. */
2628 if (integer_zerop (len))
2629 {
2630 /* Evaluate and ignore the src argument in case it has
2631 side-effects. */
2632 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
2633 VOIDmode, EXPAND_NORMAL);
2634 /* Return the dst parameter. */
2635 return expand_expr (TREE_VALUE (arglist), target, mode,
2636 EXPAND_NORMAL);
2637 }
2638
2639 /* Now, we must be passed a constant src ptr parameter. */
2640 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
2641 return 0;
2642
2643 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
2644
2645 /* We're required to pad with trailing zeros if the requested
2646 len is greater than strlen(s2)+1. In that case try to
2647 use store_by_pieces, if it fails, punt. */
2648 if (tree_int_cst_lt (slen, len))
2649 {
2650 tree dest = TREE_VALUE (arglist);
2651 unsigned int dest_align
2652 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2653 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
2654 rtx dest_mem;
2655
2656 if (!p || dest_align == 0 || !host_integerp (len, 1)
2657 || !can_store_by_pieces (tree_low_cst (len, 1),
2658 builtin_strncpy_read_str,
2659 (void *) p, dest_align))
2660 return 0;
2661
2662 dest_mem = get_memory_rtx (dest);
2663 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2664 builtin_strncpy_read_str,
2665 (void *) p, dest_align, 0);
2666 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2667 #ifdef POINTERS_EXTEND_UNSIGNED
2668 if (GET_MODE (dest_mem) != ptr_mode)
2669 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2670 #endif
2671 return dest_mem;
2672 }
2673
2674 /* OK transform into builtin memcpy. */
2675 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2676 if (!fn)
2677 return 0;
2678 return expand_expr (build_function_call_expr (fn, arglist),
2679 target, mode, EXPAND_NORMAL);
2680 }
2681 }
2682
2683 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2684 bytes from constant string DATA + OFFSET and return it as target
2685 constant. */
2686
2687 static rtx
2688 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2689 enum machine_mode mode)
2690 {
2691 const char *c = (const char *) data;
2692 char *p = alloca (GET_MODE_SIZE (mode));
2693
2694 memset (p, *c, GET_MODE_SIZE (mode));
2695
2696 return c_readstr (p, mode);
2697 }
2698
2699 /* Callback routine for store_by_pieces. Return the RTL of a register
2700 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
2701 char value given in the RTL register data. For example, if mode is
2702 4 bytes wide, return the RTL for 0x01010101*data. */
2703
2704 static rtx
2705 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2706 enum machine_mode mode)
2707 {
2708 rtx target, coeff;
2709 size_t size;
2710 char *p;
2711
2712 size = GET_MODE_SIZE (mode);
2713 if (size == 1)
2714 return (rtx) data;
2715
2716 p = alloca (size);
2717 memset (p, 1, size);
2718 coeff = c_readstr (p, mode);
2719
2720 target = convert_to_mode (mode, (rtx) data, 1);
2721 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
2722 return force_reg (mode, target);
2723 }
2724
2725 /* Expand expression EXP, which is a call to the memset builtin. Return 0
2726 if we failed the caller should emit a normal call, otherwise try to get
2727 the result in TARGET, if convenient (and in mode MODE if that's
2728 convenient). */
2729
2730 static rtx
2731 expand_builtin_memset (tree arglist, rtx target, enum machine_mode mode)
2732 {
2733 if (!validate_arglist (arglist,
2734 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
2735 return 0;
2736 else
2737 {
2738 tree dest = TREE_VALUE (arglist);
2739 tree val = TREE_VALUE (TREE_CHAIN (arglist));
2740 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2741 char c;
2742
2743 unsigned int dest_align
2744 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2745 rtx dest_mem, dest_addr, len_rtx;
2746
2747 /* If DEST is not a pointer type, don't do this
2748 operation in-line. */
2749 if (dest_align == 0)
2750 return 0;
2751
2752 /* If the LEN parameter is zero, return DEST. */
2753 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2754 {
2755 /* Evaluate and ignore VAL in case it has side-effects. */
2756 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
2757 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2758 }
2759
2760 if (TREE_CODE (val) != INTEGER_CST)
2761 {
2762 rtx val_rtx;
2763
2764 if (!host_integerp (len, 1))
2765 return 0;
2766
2767 if (optimize_size && tree_low_cst (len, 1) > 1)
2768 return 0;
2769
2770 /* Assume that we can memset by pieces if we can store the
2771 * the coefficients by pieces (in the required modes).
2772 * We can't pass builtin_memset_gen_str as that emits RTL. */
2773 c = 1;
2774 if (!can_store_by_pieces (tree_low_cst (len, 1),
2775 builtin_memset_read_str,
2776 &c, dest_align))
2777 return 0;
2778
2779 val = fold (build1 (CONVERT_EXPR, unsigned_char_type_node, val));
2780 val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
2781 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
2782 val_rtx);
2783 dest_mem = get_memory_rtx (dest);
2784 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2785 builtin_memset_gen_str,
2786 val_rtx, dest_align, 0);
2787 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2788 #ifdef POINTERS_EXTEND_UNSIGNED
2789 if (GET_MODE (dest_mem) != ptr_mode)
2790 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2791 #endif
2792 return dest_mem;
2793 }
2794
2795 if (target_char_cast (val, &c))
2796 return 0;
2797
2798 if (c)
2799 {
2800 if (!host_integerp (len, 1))
2801 return 0;
2802 if (!can_store_by_pieces (tree_low_cst (len, 1),
2803 builtin_memset_read_str, &c,
2804 dest_align))
2805 return 0;
2806
2807 dest_mem = get_memory_rtx (dest);
2808 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2809 builtin_memset_read_str,
2810 &c, dest_align, 0);
2811 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2812 #ifdef POINTERS_EXTEND_UNSIGNED
2813 if (GET_MODE (dest_mem) != ptr_mode)
2814 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2815 #endif
2816 return dest_mem;
2817 }
2818
2819 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2820
2821 dest_mem = get_memory_rtx (dest);
2822 set_mem_align (dest_mem, dest_align);
2823 dest_addr = clear_storage (dest_mem, len_rtx);
2824
2825 if (dest_addr == 0)
2826 {
2827 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2828 #ifdef POINTERS_EXTEND_UNSIGNED
2829 if (GET_MODE (dest_addr) != ptr_mode)
2830 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2831 #endif
2832 }
2833
2834 return dest_addr;
2835 }
2836 }
2837
2838 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
2839 if we failed the caller should emit a normal call. */
2840
2841 static rtx
2842 expand_builtin_bzero (tree arglist)
2843 {
2844 tree dest, size, newarglist;
2845
2846 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2847 return NULL_RTX;
2848
2849 dest = TREE_VALUE (arglist);
2850 size = TREE_VALUE (TREE_CHAIN (arglist));
2851
2852 /* New argument list transforming bzero(ptr x, int y) to
2853 memset(ptr x, int 0, size_t y). This is done this way
2854 so that if it isn't expanded inline, we fallback to
2855 calling bzero instead of memset. */
2856
2857 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2858 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
2859 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2860
2861 return expand_builtin_memset (newarglist, const0_rtx, VOIDmode);
2862 }
2863
2864 /* Expand expression EXP, which is a call to the memcmp built-in function.
2865 ARGLIST is the argument list for this call. Return 0 if we failed and the
2866 caller should emit a normal call, otherwise try to get the result in
2867 TARGET, if convenient (and in mode MODE, if that's convenient). */
2868
2869 static rtx
2870 expand_builtin_memcmp (tree exp ATTRIBUTE_UNUSED, tree arglist, rtx target,
2871 enum machine_mode mode)
2872 {
2873 tree arg1, arg2, len;
2874 const char *p1, *p2;
2875
2876 if (!validate_arglist (arglist,
2877 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2878 return 0;
2879
2880 arg1 = TREE_VALUE (arglist);
2881 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2882 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2883
2884 /* If the len parameter is zero, return zero. */
2885 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2886 {
2887 /* Evaluate and ignore arg1 and arg2 in case they have
2888 side-effects. */
2889 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2890 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2891 return const0_rtx;
2892 }
2893
2894 p1 = c_getstr (arg1);
2895 p2 = c_getstr (arg2);
2896
2897 /* If all arguments are constant, and the value of len is not greater
2898 than the lengths of arg1 and arg2, evaluate at compile-time. */
2899 if (host_integerp (len, 1) && p1 && p2
2900 && compare_tree_int (len, strlen (p1) + 1) <= 0
2901 && compare_tree_int (len, strlen (p2) + 1) <= 0)
2902 {
2903 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
2904
2905 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
2906 }
2907
2908 /* If len parameter is one, return an expression corresponding to
2909 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2910 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
2911 {
2912 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2913 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2914 tree ind1 =
2915 fold (build1 (CONVERT_EXPR, integer_type_node,
2916 build1 (INDIRECT_REF, cst_uchar_node,
2917 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2918 tree ind2 =
2919 fold (build1 (CONVERT_EXPR, integer_type_node,
2920 build1 (INDIRECT_REF, cst_uchar_node,
2921 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2922 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2923 return expand_expr (result, target, mode, EXPAND_NORMAL);
2924 }
2925
2926 #ifdef HAVE_cmpstrsi
2927 {
2928 rtx arg1_rtx, arg2_rtx, arg3_rtx;
2929 rtx result;
2930 rtx insn;
2931
2932 int arg1_align
2933 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2934 int arg2_align
2935 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2936 enum machine_mode insn_mode
2937 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
2938
2939 /* If we don't have POINTER_TYPE, call the function. */
2940 if (arg1_align == 0 || arg2_align == 0)
2941 return 0;
2942
2943 /* Make a place to write the result of the instruction. */
2944 result = target;
2945 if (! (result != 0
2946 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
2947 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2948 result = gen_reg_rtx (insn_mode);
2949
2950 arg1_rtx = get_memory_rtx (arg1);
2951 arg2_rtx = get_memory_rtx (arg2);
2952 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2953 if (!HAVE_cmpstrsi)
2954 insn = NULL_RTX;
2955 else
2956 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
2957 GEN_INT (MIN (arg1_align, arg2_align)));
2958
2959 if (insn)
2960 emit_insn (insn);
2961 else
2962 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
2963 TYPE_MODE (integer_type_node), 3,
2964 XEXP (arg1_rtx, 0), Pmode,
2965 XEXP (arg2_rtx, 0), Pmode,
2966 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
2967 TREE_UNSIGNED (sizetype)),
2968 TYPE_MODE (sizetype));
2969
2970 /* Return the value in the proper mode for this function. */
2971 mode = TYPE_MODE (TREE_TYPE (exp));
2972 if (GET_MODE (result) == mode)
2973 return result;
2974 else if (target != 0)
2975 {
2976 convert_move (target, result, 0);
2977 return target;
2978 }
2979 else
2980 return convert_to_mode (mode, result, 0);
2981 }
2982 #endif
2983
2984 return 0;
2985 }
2986
2987 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
2988 if we failed the caller should emit a normal call, otherwise try to get
2989 the result in TARGET, if convenient. */
2990
2991 static rtx
2992 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
2993 {
2994 tree arglist = TREE_OPERAND (exp, 1);
2995 tree arg1, arg2;
2996 const char *p1, *p2;
2997
2998 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2999 return 0;
3000
3001 arg1 = TREE_VALUE (arglist);
3002 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3003
3004 p1 = c_getstr (arg1);
3005 p2 = c_getstr (arg2);
3006
3007 if (p1 && p2)
3008 {
3009 const int i = strcmp (p1, p2);
3010 return (i < 0 ? constm1_rtx : (i > 0 ? const1_rtx : const0_rtx));
3011 }
3012
3013 /* If either arg is "", return an expression corresponding to
3014 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
3015 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
3016 {
3017 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3018 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3019 tree ind1 =
3020 fold (build1 (CONVERT_EXPR, integer_type_node,
3021 build1 (INDIRECT_REF, cst_uchar_node,
3022 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3023 tree ind2 =
3024 fold (build1 (CONVERT_EXPR, integer_type_node,
3025 build1 (INDIRECT_REF, cst_uchar_node,
3026 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3027 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3028 return expand_expr (result, target, mode, EXPAND_NORMAL);
3029 }
3030
3031 #ifdef HAVE_cmpstrsi
3032 if (HAVE_cmpstrsi)
3033 {
3034 tree len, len1, len2;
3035 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3036 rtx result, insn;
3037
3038 int arg1_align
3039 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3040 int arg2_align
3041 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3042 enum machine_mode insn_mode
3043 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3044
3045 len1 = c_strlen (arg1);
3046 len2 = c_strlen (arg2);
3047
3048 if (len1)
3049 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3050 if (len2)
3051 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3052
3053 /* If we don't have a constant length for the first, use the length
3054 of the second, if we know it. We don't require a constant for
3055 this case; some cost analysis could be done if both are available
3056 but neither is constant. For now, assume they're equally cheap,
3057 unless one has side effects. If both strings have constant lengths,
3058 use the smaller. */
3059
3060 if (!len1)
3061 len = len2;
3062 else if (!len2)
3063 len = len1;
3064 else if (TREE_SIDE_EFFECTS (len1))
3065 len = len2;
3066 else if (TREE_SIDE_EFFECTS (len2))
3067 len = len1;
3068 else if (TREE_CODE (len1) != INTEGER_CST)
3069 len = len2;
3070 else if (TREE_CODE (len2) != INTEGER_CST)
3071 len = len1;
3072 else if (tree_int_cst_lt (len1, len2))
3073 len = len1;
3074 else
3075 len = len2;
3076
3077 /* If both arguments have side effects, we cannot optimize. */
3078 if (!len || TREE_SIDE_EFFECTS (len))
3079 return 0;
3080
3081 /* If we don't have POINTER_TYPE, call the function. */
3082 if (arg1_align == 0 || arg2_align == 0)
3083 return 0;
3084
3085 /* Make a place to write the result of the instruction. */
3086 result = target;
3087 if (! (result != 0
3088 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3089 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3090 result = gen_reg_rtx (insn_mode);
3091
3092 arg1_rtx = get_memory_rtx (arg1);
3093 arg2_rtx = get_memory_rtx (arg2);
3094 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3095 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3096 GEN_INT (MIN (arg1_align, arg2_align)));
3097 if (!insn)
3098 return 0;
3099
3100 emit_insn (insn);
3101
3102 /* Return the value in the proper mode for this function. */
3103 mode = TYPE_MODE (TREE_TYPE (exp));
3104 if (GET_MODE (result) == mode)
3105 return result;
3106 if (target == 0)
3107 return convert_to_mode (mode, result, 0);
3108 convert_move (target, result, 0);
3109 return target;
3110 }
3111 #endif
3112 return 0;
3113 }
3114
3115 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
3116 if we failed the caller should emit a normal call, otherwise try to get
3117 the result in TARGET, if convenient. */
3118
3119 static rtx
3120 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
3121 {
3122 tree arglist = TREE_OPERAND (exp, 1);
3123 tree arg1, arg2, arg3;
3124 const char *p1, *p2;
3125
3126 if (!validate_arglist (arglist,
3127 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3128 return 0;
3129
3130 arg1 = TREE_VALUE (arglist);
3131 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3132 arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3133
3134 /* If the len parameter is zero, return zero. */
3135 if (host_integerp (arg3, 1) && tree_low_cst (arg3, 1) == 0)
3136 {
3137 /* Evaluate and ignore arg1 and arg2 in case they have
3138 side-effects. */
3139 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3140 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3141 return const0_rtx;
3142 }
3143
3144 p1 = c_getstr (arg1);
3145 p2 = c_getstr (arg2);
3146
3147 /* If all arguments are constant, evaluate at compile-time. */
3148 if (host_integerp (arg3, 1) && p1 && p2)
3149 {
3150 const int r = strncmp (p1, p2, tree_low_cst (arg3, 1));
3151 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
3152 }
3153
3154 /* If len == 1 or (either string parameter is "" and (len >= 1)),
3155 return (*(const u_char*)arg1 - *(const u_char*)arg2). */
3156 if (host_integerp (arg3, 1)
3157 && (tree_low_cst (arg3, 1) == 1
3158 || (tree_low_cst (arg3, 1) > 1
3159 && ((p1 && *p1 == '\0') || (p2 && *p2 == '\0')))))
3160 {
3161 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3162 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3163 tree ind1 =
3164 fold (build1 (CONVERT_EXPR, integer_type_node,
3165 build1 (INDIRECT_REF, cst_uchar_node,
3166 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3167 tree ind2 =
3168 fold (build1 (CONVERT_EXPR, integer_type_node,
3169 build1 (INDIRECT_REF, cst_uchar_node,
3170 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3171 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3172 return expand_expr (result, target, mode, EXPAND_NORMAL);
3173 }
3174
3175 /* If c_strlen can determine an expression for one of the string
3176 lengths, and it doesn't have side effects, then emit cmpstrsi
3177 using length MIN(strlen(string)+1, arg3). */
3178 #ifdef HAVE_cmpstrsi
3179 if (HAVE_cmpstrsi)
3180 {
3181 tree len, len1, len2;
3182 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3183 rtx result, insn;
3184
3185 int arg1_align
3186 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3187 int arg2_align
3188 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3189 enum machine_mode insn_mode
3190 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3191
3192 len1 = c_strlen (arg1);
3193 len2 = c_strlen (arg2);
3194
3195 if (len1)
3196 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3197 if (len2)
3198 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3199
3200 /* If we don't have a constant length for the first, use the length
3201 of the second, if we know it. We don't require a constant for
3202 this case; some cost analysis could be done if both are available
3203 but neither is constant. For now, assume they're equally cheap,
3204 unless one has side effects. If both strings have constant lengths,
3205 use the smaller. */
3206
3207 if (!len1)
3208 len = len2;
3209 else if (!len2)
3210 len = len1;
3211 else if (TREE_SIDE_EFFECTS (len1))
3212 len = len2;
3213 else if (TREE_SIDE_EFFECTS (len2))
3214 len = len1;
3215 else if (TREE_CODE (len1) != INTEGER_CST)
3216 len = len2;
3217 else if (TREE_CODE (len2) != INTEGER_CST)
3218 len = len1;
3219 else if (tree_int_cst_lt (len1, len2))
3220 len = len1;
3221 else
3222 len = len2;
3223
3224 /* If both arguments have side effects, we cannot optimize. */
3225 if (!len || TREE_SIDE_EFFECTS (len))
3226 return 0;
3227
3228 /* The actual new length parameter is MIN(len,arg3). */
3229 len = fold (build (MIN_EXPR, TREE_TYPE (len), len, arg3));
3230
3231 /* If we don't have POINTER_TYPE, call the function. */
3232 if (arg1_align == 0 || arg2_align == 0)
3233 return 0;
3234
3235 /* Make a place to write the result of the instruction. */
3236 result = target;
3237 if (! (result != 0
3238 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3239 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3240 result = gen_reg_rtx (insn_mode);
3241
3242 arg1_rtx = get_memory_rtx (arg1);
3243 arg2_rtx = get_memory_rtx (arg2);
3244 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3245 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3246 GEN_INT (MIN (arg1_align, arg2_align)));
3247 if (!insn)
3248 return 0;
3249
3250 emit_insn (insn);
3251
3252 /* Return the value in the proper mode for this function. */
3253 mode = TYPE_MODE (TREE_TYPE (exp));
3254 if (GET_MODE (result) == mode)
3255 return result;
3256 if (target == 0)
3257 return convert_to_mode (mode, result, 0);
3258 convert_move (target, result, 0);
3259 return target;
3260 }
3261 #endif
3262 return 0;
3263 }
3264
3265 /* Expand expression EXP, which is a call to the strcat builtin.
3266 Return 0 if we failed the caller should emit a normal call,
3267 otherwise try to get the result in TARGET, if convenient. */
3268
3269 static rtx
3270 expand_builtin_strcat (tree arglist, rtx target, enum machine_mode mode)
3271 {
3272 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3273 return 0;
3274 else
3275 {
3276 tree dst = TREE_VALUE (arglist),
3277 src = TREE_VALUE (TREE_CHAIN (arglist));
3278 const char *p = c_getstr (src);
3279
3280 /* If the string length is zero, return the dst parameter. */
3281 if (p && *p == '\0')
3282 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3283
3284 return 0;
3285 }
3286 }
3287
3288 /* Expand expression EXP, which is a call to the strncat builtin.
3289 Return 0 if we failed the caller should emit a normal call,
3290 otherwise try to get the result in TARGET, if convenient. */
3291
3292 static rtx
3293 expand_builtin_strncat (tree arglist, rtx target, enum machine_mode mode)
3294 {
3295 if (!validate_arglist (arglist,
3296 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3297 return 0;
3298 else
3299 {
3300 tree dst = TREE_VALUE (arglist),
3301 src = TREE_VALUE (TREE_CHAIN (arglist)),
3302 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3303 const char *p = c_getstr (src);
3304
3305 /* If the requested length is zero, or the src parameter string
3306 length is zero, return the dst parameter. */
3307 if (integer_zerop (len) || (p && *p == '\0'))
3308 {
3309 /* Evaluate and ignore the src and len parameters in case
3310 they have side-effects. */
3311 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3312 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
3313 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3314 }
3315
3316 /* If the requested len is greater than or equal to the string
3317 length, call strcat. */
3318 if (TREE_CODE (len) == INTEGER_CST && p
3319 && compare_tree_int (len, strlen (p)) >= 0)
3320 {
3321 tree newarglist
3322 = tree_cons (NULL_TREE, dst, build_tree_list (NULL_TREE, src));
3323 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
3324
3325 /* If the replacement _DECL isn't initialized, don't do the
3326 transformation. */
3327 if (!fn)
3328 return 0;
3329
3330 return expand_expr (build_function_call_expr (fn, newarglist),
3331 target, mode, EXPAND_NORMAL);
3332 }
3333 return 0;
3334 }
3335 }
3336
3337 /* Expand expression EXP, which is a call to the strspn builtin.
3338 Return 0 if we failed the caller should emit a normal call,
3339 otherwise try to get the result in TARGET, if convenient. */
3340
3341 static rtx
3342 expand_builtin_strspn (tree arglist, rtx target, enum machine_mode mode)
3343 {
3344 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3345 return 0;
3346 else
3347 {
3348 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
3349 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
3350
3351 /* If both arguments are constants, evaluate at compile-time. */
3352 if (p1 && p2)
3353 {
3354 const size_t r = strspn (p1, p2);
3355 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
3356 }
3357
3358 /* If either argument is "", return 0. */
3359 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
3360 {
3361 /* Evaluate and ignore both arguments in case either one has
3362 side-effects. */
3363 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3364 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3365 return const0_rtx;
3366 }
3367 return 0;
3368 }
3369 }
3370
3371 /* Expand expression EXP, which is a call to the strcspn builtin.
3372 Return 0 if we failed the caller should emit a normal call,
3373 otherwise try to get the result in TARGET, if convenient. */
3374
3375 static rtx
3376 expand_builtin_strcspn (tree arglist, rtx target, enum machine_mode mode)
3377 {
3378 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3379 return 0;
3380 else
3381 {
3382 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
3383 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
3384
3385 /* If both arguments are constants, evaluate at compile-time. */
3386 if (p1 && p2)
3387 {
3388 const size_t r = strcspn (p1, p2);
3389 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
3390 }
3391
3392 /* If the first argument is "", return 0. */
3393 if (p1 && *p1 == '\0')
3394 {
3395 /* Evaluate and ignore argument s2 in case it has
3396 side-effects. */
3397 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3398 return const0_rtx;
3399 }
3400
3401 /* If the second argument is "", return __builtin_strlen(s1). */
3402 if (p2 && *p2 == '\0')
3403 {
3404 tree newarglist = build_tree_list (NULL_TREE, s1),
3405 fn = implicit_built_in_decls[BUILT_IN_STRLEN];
3406
3407 /* If the replacement _DECL isn't initialized, don't do the
3408 transformation. */
3409 if (!fn)
3410 return 0;
3411
3412 return expand_expr (build_function_call_expr (fn, newarglist),
3413 target, mode, EXPAND_NORMAL);
3414 }
3415 return 0;
3416 }
3417 }
3418
3419 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3420 if that's convenient. */
3421
3422 rtx
3423 expand_builtin_saveregs (void)
3424 {
3425 rtx val, seq;
3426
3427 /* Don't do __builtin_saveregs more than once in a function.
3428 Save the result of the first call and reuse it. */
3429 if (saveregs_value != 0)
3430 return saveregs_value;
3431
3432 /* When this function is called, it means that registers must be
3433 saved on entry to this function. So we migrate the call to the
3434 first insn of this function. */
3435
3436 start_sequence ();
3437
3438 #ifdef EXPAND_BUILTIN_SAVEREGS
3439 /* Do whatever the machine needs done in this case. */
3440 val = EXPAND_BUILTIN_SAVEREGS ();
3441 #else
3442 /* ??? We used to try and build up a call to the out of line function,
3443 guessing about what registers needed saving etc. This became much
3444 harder with __builtin_va_start, since we don't have a tree for a
3445 call to __builtin_saveregs to fall back on. There was exactly one
3446 port (i860) that used this code, and I'm unconvinced it could actually
3447 handle the general case. So we no longer try to handle anything
3448 weird and make the backend absorb the evil. */
3449
3450 error ("__builtin_saveregs not supported by this target");
3451 val = const0_rtx;
3452 #endif
3453
3454 seq = get_insns ();
3455 end_sequence ();
3456
3457 saveregs_value = val;
3458
3459 /* Put the insns after the NOTE that starts the function. If this
3460 is inside a start_sequence, make the outer-level insn chain current, so
3461 the code is placed at the start of the function. */
3462 push_topmost_sequence ();
3463 emit_insn_after (seq, get_insns ());
3464 pop_topmost_sequence ();
3465
3466 return val;
3467 }
3468
3469 /* __builtin_args_info (N) returns word N of the arg space info
3470 for the current function. The number and meanings of words
3471 is controlled by the definition of CUMULATIVE_ARGS. */
3472
3473 static rtx
3474 expand_builtin_args_info (tree arglist)
3475 {
3476 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
3477 int *word_ptr = (int *) &current_function_args_info;
3478
3479 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
3480 abort ();
3481
3482 if (arglist != 0)
3483 {
3484 if (!host_integerp (TREE_VALUE (arglist), 0))
3485 error ("argument of `__builtin_args_info' must be constant");
3486 else
3487 {
3488 HOST_WIDE_INT wordnum = tree_low_cst (TREE_VALUE (arglist), 0);
3489
3490 if (wordnum < 0 || wordnum >= nwords)
3491 error ("argument of `__builtin_args_info' out of range");
3492 else
3493 return GEN_INT (word_ptr[wordnum]);
3494 }
3495 }
3496 else
3497 error ("missing argument in `__builtin_args_info'");
3498
3499 return const0_rtx;
3500 }
3501
3502 /* Expand ARGLIST, from a call to __builtin_next_arg. */
3503
3504 static rtx
3505 expand_builtin_next_arg (tree arglist)
3506 {
3507 tree fntype = TREE_TYPE (current_function_decl);
3508
3509 if (TYPE_ARG_TYPES (fntype) == 0
3510 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3511 == void_type_node))
3512 {
3513 error ("`va_start' used in function with fixed args");
3514 return const0_rtx;
3515 }
3516
3517 if (arglist)
3518 {
3519 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
3520 tree arg = TREE_VALUE (arglist);
3521
3522 /* Strip off all nops for the sake of the comparison. This
3523 is not quite the same as STRIP_NOPS. It does more.
3524 We must also strip off INDIRECT_EXPR for C++ reference
3525 parameters. */
3526 while (TREE_CODE (arg) == NOP_EXPR
3527 || TREE_CODE (arg) == CONVERT_EXPR
3528 || TREE_CODE (arg) == NON_LVALUE_EXPR
3529 || TREE_CODE (arg) == INDIRECT_REF)
3530 arg = TREE_OPERAND (arg, 0);
3531 if (arg != last_parm)
3532 warning ("second parameter of `va_start' not last named argument");
3533 }
3534 else
3535 /* Evidently an out of date version of <stdarg.h>; can't validate
3536 va_start's second argument, but can still work as intended. */
3537 warning ("`__builtin_next_arg' called without an argument");
3538
3539 return expand_binop (Pmode, add_optab,
3540 current_function_internal_arg_pointer,
3541 current_function_arg_offset_rtx,
3542 NULL_RTX, 0, OPTAB_LIB_WIDEN);
3543 }
3544
3545 /* Make it easier for the backends by protecting the valist argument
3546 from multiple evaluations. */
3547
3548 static tree
3549 stabilize_va_list (tree valist, int needs_lvalue)
3550 {
3551 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
3552 {
3553 if (TREE_SIDE_EFFECTS (valist))
3554 valist = save_expr (valist);
3555
3556 /* For this case, the backends will be expecting a pointer to
3557 TREE_TYPE (va_list_type_node), but it's possible we've
3558 actually been given an array (an actual va_list_type_node).
3559 So fix it. */
3560 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
3561 {
3562 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
3563 tree p2 = build_pointer_type (va_list_type_node);
3564
3565 valist = build1 (ADDR_EXPR, p2, valist);
3566 valist = fold (build1 (NOP_EXPR, p1, valist));
3567 }
3568 }
3569 else
3570 {
3571 tree pt;
3572
3573 if (! needs_lvalue)
3574 {
3575 if (! TREE_SIDE_EFFECTS (valist))
3576 return valist;
3577
3578 pt = build_pointer_type (va_list_type_node);
3579 valist = fold (build1 (ADDR_EXPR, pt, valist));
3580 TREE_SIDE_EFFECTS (valist) = 1;
3581 }
3582
3583 if (TREE_SIDE_EFFECTS (valist))
3584 valist = save_expr (valist);
3585 valist = fold (build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)),
3586 valist));
3587 }
3588
3589 return valist;
3590 }
3591
3592 /* The "standard" implementation of va_start: just assign `nextarg' to
3593 the variable. */
3594
3595 void
3596 std_expand_builtin_va_start (tree valist, rtx nextarg)
3597 {
3598 tree t;
3599
3600 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3601 make_tree (ptr_type_node, nextarg));
3602 TREE_SIDE_EFFECTS (t) = 1;
3603
3604 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3605 }
3606
3607 /* Expand ARGLIST, from a call to __builtin_va_start. */
3608
3609 static rtx
3610 expand_builtin_va_start (tree arglist)
3611 {
3612 rtx nextarg;
3613 tree chain, valist;
3614
3615 chain = TREE_CHAIN (arglist);
3616
3617 if (TREE_CHAIN (chain))
3618 error ("too many arguments to function `va_start'");
3619
3620 nextarg = expand_builtin_next_arg (chain);
3621 valist = stabilize_va_list (TREE_VALUE (arglist), 1);
3622
3623 #ifdef EXPAND_BUILTIN_VA_START
3624 EXPAND_BUILTIN_VA_START (valist, nextarg);
3625 #else
3626 std_expand_builtin_va_start (valist, nextarg);
3627 #endif
3628
3629 return const0_rtx;
3630 }
3631
3632 /* The "standard" implementation of va_arg: read the value from the
3633 current (padded) address and increment by the (padded) size. */
3634
3635 rtx
3636 std_expand_builtin_va_arg (tree valist, tree type)
3637 {
3638 tree addr_tree, t, type_size = NULL;
3639 tree align, alignm1;
3640 tree rounded_size;
3641 rtx addr;
3642
3643 /* Compute the rounded size of the type. */
3644 align = size_int (PARM_BOUNDARY / BITS_PER_UNIT);
3645 alignm1 = size_int (PARM_BOUNDARY / BITS_PER_UNIT - 1);
3646 if (type == error_mark_node
3647 || (type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type))) == NULL
3648 || TREE_OVERFLOW (type_size))
3649 rounded_size = size_zero_node;
3650 else
3651 rounded_size = fold (build (MULT_EXPR, sizetype,
3652 fold (build (TRUNC_DIV_EXPR, sizetype,
3653 fold (build (PLUS_EXPR, sizetype,
3654 type_size, alignm1)),
3655 align)),
3656 align));
3657
3658 /* Get AP. */
3659 addr_tree = valist;
3660 if (PAD_VARARGS_DOWN && ! integer_zerop (rounded_size))
3661 {
3662 /* Small args are padded downward. */
3663 addr_tree = fold (build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3664 fold (build (COND_EXPR, sizetype,
3665 fold (build (GT_EXPR, sizetype,
3666 rounded_size,
3667 align)),
3668 size_zero_node,
3669 fold (build (MINUS_EXPR, sizetype,
3670 rounded_size,
3671 type_size))))));
3672 }
3673
3674 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3675 addr = copy_to_reg (addr);
3676
3677 /* Compute new value for AP. */
3678 if (! integer_zerop (rounded_size))
3679 {
3680 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3681 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3682 rounded_size));
3683 TREE_SIDE_EFFECTS (t) = 1;
3684 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3685 }
3686
3687 return addr;
3688 }
3689
3690 /* Expand __builtin_va_arg, which is not really a builtin function, but
3691 a very special sort of operator. */
3692
3693 rtx
3694 expand_builtin_va_arg (tree valist, tree type)
3695 {
3696 rtx addr, result;
3697 tree promoted_type, want_va_type, have_va_type;
3698
3699 /* Verify that valist is of the proper type. */
3700
3701 want_va_type = va_list_type_node;
3702 have_va_type = TREE_TYPE (valist);
3703 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
3704 {
3705 /* If va_list is an array type, the argument may have decayed
3706 to a pointer type, e.g. by being passed to another function.
3707 In that case, unwrap both types so that we can compare the
3708 underlying records. */
3709 if (TREE_CODE (have_va_type) == ARRAY_TYPE
3710 || TREE_CODE (have_va_type) == POINTER_TYPE)
3711 {
3712 want_va_type = TREE_TYPE (want_va_type);
3713 have_va_type = TREE_TYPE (have_va_type);
3714 }
3715 }
3716 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
3717 {
3718 error ("first argument to `va_arg' not of type `va_list'");
3719 addr = const0_rtx;
3720 }
3721
3722 /* Generate a diagnostic for requesting data of a type that cannot
3723 be passed through `...' due to type promotion at the call site. */
3724 else if ((promoted_type = (*lang_hooks.types.type_promotes_to) (type))
3725 != type)
3726 {
3727 const char *name = "<anonymous type>", *pname = 0;
3728 static bool gave_help;
3729
3730 if (TYPE_NAME (type))
3731 {
3732 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE)
3733 name = IDENTIFIER_POINTER (TYPE_NAME (type));
3734 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
3735 && DECL_NAME (TYPE_NAME (type)))
3736 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
3737 }
3738 if (TYPE_NAME (promoted_type))
3739 {
3740 if (TREE_CODE (TYPE_NAME (promoted_type)) == IDENTIFIER_NODE)
3741 pname = IDENTIFIER_POINTER (TYPE_NAME (promoted_type));
3742 else if (TREE_CODE (TYPE_NAME (promoted_type)) == TYPE_DECL
3743 && DECL_NAME (TYPE_NAME (promoted_type)))
3744 pname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (promoted_type)));
3745 }
3746
3747 /* Unfortunately, this is merely undefined, rather than a constraint
3748 violation, so we cannot make this an error. If this call is never
3749 executed, the program is still strictly conforming. */
3750 warning ("`%s' is promoted to `%s' when passed through `...'",
3751 name, pname);
3752 if (! gave_help)
3753 {
3754 gave_help = true;
3755 warning ("(so you should pass `%s' not `%s' to `va_arg')",
3756 pname, name);
3757 }
3758
3759 /* We can, however, treat "undefined" any way we please.
3760 Call abort to encourage the user to fix the program. */
3761 expand_builtin_trap ();
3762
3763 /* This is dead code, but go ahead and finish so that the
3764 mode of the result comes out right. */
3765 addr = const0_rtx;
3766 }
3767 else
3768 {
3769 /* Make it easier for the backends by protecting the valist argument
3770 from multiple evaluations. */
3771 valist = stabilize_va_list (valist, 0);
3772
3773 #ifdef EXPAND_BUILTIN_VA_ARG
3774 addr = EXPAND_BUILTIN_VA_ARG (valist, type);
3775 #else
3776 addr = std_expand_builtin_va_arg (valist, type);
3777 #endif
3778 }
3779
3780 #ifdef POINTERS_EXTEND_UNSIGNED
3781 if (GET_MODE (addr) != Pmode)
3782 addr = convert_memory_address (Pmode, addr);
3783 #endif
3784
3785 result = gen_rtx_MEM (TYPE_MODE (type), addr);
3786 set_mem_alias_set (result, get_varargs_alias_set ());
3787
3788 return result;
3789 }
3790
3791 /* Expand ARGLIST, from a call to __builtin_va_end. */
3792
3793 static rtx
3794 expand_builtin_va_end (tree arglist)
3795 {
3796 tree valist = TREE_VALUE (arglist);
3797
3798 #ifdef EXPAND_BUILTIN_VA_END
3799 valist = stabilize_va_list (valist, 0);
3800 EXPAND_BUILTIN_VA_END (arglist);
3801 #else
3802 /* Evaluate for side effects, if needed. I hate macros that don't
3803 do that. */
3804 if (TREE_SIDE_EFFECTS (valist))
3805 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
3806 #endif
3807
3808 return const0_rtx;
3809 }
3810
3811 /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a
3812 builtin rather than just as an assignment in stdarg.h because of the
3813 nastiness of array-type va_list types. */
3814
3815 static rtx
3816 expand_builtin_va_copy (tree arglist)
3817 {
3818 tree dst, src, t;
3819
3820 dst = TREE_VALUE (arglist);
3821 src = TREE_VALUE (TREE_CHAIN (arglist));
3822
3823 dst = stabilize_va_list (dst, 1);
3824 src = stabilize_va_list (src, 0);
3825
3826 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
3827 {
3828 t = build (MODIFY_EXPR, va_list_type_node, dst, src);
3829 TREE_SIDE_EFFECTS (t) = 1;
3830 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3831 }
3832 else
3833 {
3834 rtx dstb, srcb, size;
3835
3836 /* Evaluate to pointers. */
3837 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
3838 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
3839 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
3840 VOIDmode, EXPAND_NORMAL);
3841
3842 #ifdef POINTERS_EXTEND_UNSIGNED
3843 if (GET_MODE (dstb) != Pmode)
3844 dstb = convert_memory_address (Pmode, dstb);
3845
3846 if (GET_MODE (srcb) != Pmode)
3847 srcb = convert_memory_address (Pmode, srcb);
3848 #endif
3849
3850 /* "Dereference" to BLKmode memories. */
3851 dstb = gen_rtx_MEM (BLKmode, dstb);
3852 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
3853 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
3854 srcb = gen_rtx_MEM (BLKmode, srcb);
3855 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
3856 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
3857
3858 /* Copy. */
3859 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
3860 }
3861
3862 return const0_rtx;
3863 }
3864
3865 /* Expand a call to one of the builtin functions __builtin_frame_address or
3866 __builtin_return_address. */
3867
3868 static rtx
3869 expand_builtin_frame_address (tree fndecl, tree arglist)
3870 {
3871 /* The argument must be a nonnegative integer constant.
3872 It counts the number of frames to scan up the stack.
3873 The value is the return address saved in that frame. */
3874 if (arglist == 0)
3875 /* Warning about missing arg was already issued. */
3876 return const0_rtx;
3877 else if (! host_integerp (TREE_VALUE (arglist), 1))
3878 {
3879 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3880 error ("invalid arg to `__builtin_frame_address'");
3881 else
3882 error ("invalid arg to `__builtin_return_address'");
3883 return const0_rtx;
3884 }
3885 else
3886 {
3887 rtx tem
3888 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
3889 tree_low_cst (TREE_VALUE (arglist), 1),
3890 hard_frame_pointer_rtx);
3891
3892 /* Some ports cannot access arbitrary stack frames. */
3893 if (tem == NULL)
3894 {
3895 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3896 warning ("unsupported arg to `__builtin_frame_address'");
3897 else
3898 warning ("unsupported arg to `__builtin_return_address'");
3899 return const0_rtx;
3900 }
3901
3902 /* For __builtin_frame_address, return what we've got. */
3903 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3904 return tem;
3905
3906 if (GET_CODE (tem) != REG
3907 && ! CONSTANT_P (tem))
3908 tem = copy_to_mode_reg (Pmode, tem);
3909 return tem;
3910 }
3911 }
3912
3913 /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if
3914 we failed and the caller should emit a normal call, otherwise try to get
3915 the result in TARGET, if convenient. */
3916
3917 static rtx
3918 expand_builtin_alloca (tree arglist, rtx target)
3919 {
3920 rtx op0;
3921 rtx result;
3922
3923 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3924 return 0;
3925
3926 /* Compute the argument. */
3927 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
3928
3929 /* Allocate the desired space. */
3930 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
3931
3932 #ifdef POINTERS_EXTEND_UNSIGNED
3933 if (GET_MODE (result) != ptr_mode)
3934 result = convert_memory_address (ptr_mode, result);
3935 #endif
3936
3937 return result;
3938 }
3939
3940 /* Expand a call to a unary builtin. The arguments are in ARGLIST.
3941 Return 0 if a normal call should be emitted rather than expanding the
3942 function in-line. If convenient, the result should be placed in TARGET.
3943 SUBTARGET may be used as the target for computing one of EXP's operands. */
3944
3945 static rtx
3946 expand_builtin_unop (enum machine_mode target_mode, tree arglist, rtx target,
3947 rtx subtarget, optab op_optab)
3948 {
3949 rtx op0;
3950 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3951 return 0;
3952
3953 /* Compute the argument. */
3954 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
3955 /* Compute op, into TARGET if possible.
3956 Set TARGET to wherever the result comes back. */
3957 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
3958 op_optab, op0, target, 1);
3959 if (target == 0)
3960 abort ();
3961
3962 return convert_to_mode (target_mode, target, 0);
3963 }
3964
3965 /* If the string passed to fputs is a constant and is one character
3966 long, we attempt to transform this call into __builtin_fputc(). */
3967
3968 static rtx
3969 expand_builtin_fputs (tree arglist, int ignore, int unlocked)
3970 {
3971 tree len, fn;
3972 tree fn_fputc = unlocked ? implicit_built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
3973 : implicit_built_in_decls[BUILT_IN_FPUTC];
3974 tree fn_fwrite = unlocked ? implicit_built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
3975 : implicit_built_in_decls[BUILT_IN_FWRITE];
3976
3977 /* If the return value is used, or the replacement _DECL isn't
3978 initialized, don't do the transformation. */
3979 if (!ignore || !fn_fputc || !fn_fwrite)
3980 return 0;
3981
3982 /* Verify the arguments in the original call. */
3983 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3984 return 0;
3985
3986 /* Get the length of the string passed to fputs. If the length
3987 can't be determined, punt. */
3988 if (!(len = c_strlen (TREE_VALUE (arglist)))
3989 || TREE_CODE (len) != INTEGER_CST)
3990 return 0;
3991
3992 switch (compare_tree_int (len, 1))
3993 {
3994 case -1: /* length is 0, delete the call entirely . */
3995 {
3996 /* Evaluate and ignore the argument in case it has
3997 side-effects. */
3998 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
3999 VOIDmode, EXPAND_NORMAL);
4000 return const0_rtx;
4001 }
4002 case 0: /* length is 1, call fputc. */
4003 {
4004 const char *p = c_getstr (TREE_VALUE (arglist));
4005
4006 if (p != NULL)
4007 {
4008 /* New argument list transforming fputs(string, stream) to
4009 fputc(string[0], stream). */
4010 arglist =
4011 build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
4012 arglist =
4013 tree_cons (NULL_TREE, build_int_2 (p[0], 0), arglist);
4014 fn = fn_fputc;
4015 break;
4016 }
4017 }
4018 /* FALLTHROUGH */
4019 case 1: /* length is greater than 1, call fwrite. */
4020 {
4021 tree string_arg;
4022
4023 /* If optimizing for size keep fputs. */
4024 if (optimize_size)
4025 return 0;
4026 string_arg = TREE_VALUE (arglist);
4027 /* New argument list transforming fputs(string, stream) to
4028 fwrite(string, 1, len, stream). */
4029 arglist = build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
4030 arglist = tree_cons (NULL_TREE, len, arglist);
4031 arglist = tree_cons (NULL_TREE, size_one_node, arglist);
4032 arglist = tree_cons (NULL_TREE, string_arg, arglist);
4033 fn = fn_fwrite;
4034 break;
4035 }
4036 default:
4037 abort ();
4038 }
4039
4040 return expand_expr (build_function_call_expr (fn, arglist),
4041 (ignore ? const0_rtx : NULL_RTX),
4042 VOIDmode, EXPAND_NORMAL);
4043 }
4044
4045 /* Expand a call to __builtin_expect. We return our argument and emit a
4046 NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
4047 a non-jump context. */
4048
4049 static rtx
4050 expand_builtin_expect (tree arglist, rtx target)
4051 {
4052 tree exp, c;
4053 rtx note, rtx_c;
4054
4055 if (arglist == NULL_TREE
4056 || TREE_CHAIN (arglist) == NULL_TREE)
4057 return const0_rtx;
4058 exp = TREE_VALUE (arglist);
4059 c = TREE_VALUE (TREE_CHAIN (arglist));
4060
4061 if (TREE_CODE (c) != INTEGER_CST)
4062 {
4063 error ("second arg to `__builtin_expect' must be a constant");
4064 c = integer_zero_node;
4065 }
4066
4067 target = expand_expr (exp, target, VOIDmode, EXPAND_NORMAL);
4068
4069 /* Don't bother with expected value notes for integral constants. */
4070 if (flag_guess_branch_prob && GET_CODE (target) != CONST_INT)
4071 {
4072 /* We do need to force this into a register so that we can be
4073 moderately sure to be able to correctly interpret the branch
4074 condition later. */
4075 target = force_reg (GET_MODE (target), target);
4076
4077 rtx_c = expand_expr (c, NULL_RTX, GET_MODE (target), EXPAND_NORMAL);
4078
4079 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
4080 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, target, rtx_c);
4081 }
4082
4083 return target;
4084 }
4085
4086 /* Like expand_builtin_expect, except do this in a jump context. This is
4087 called from do_jump if the conditional is a __builtin_expect. Return either
4088 a list of insns to emit the jump or NULL if we cannot optimize
4089 __builtin_expect. We need to optimize this at jump time so that machines
4090 like the PowerPC don't turn the test into a SCC operation, and then jump
4091 based on the test being 0/1. */
4092
4093 rtx
4094 expand_builtin_expect_jump (tree exp, rtx if_false_label, rtx if_true_label)
4095 {
4096 tree arglist = TREE_OPERAND (exp, 1);
4097 tree arg0 = TREE_VALUE (arglist);
4098 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4099 rtx ret = NULL_RTX;
4100
4101 /* Only handle __builtin_expect (test, 0) and
4102 __builtin_expect (test, 1). */
4103 if (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
4104 && (integer_zerop (arg1) || integer_onep (arg1)))
4105 {
4106 int num_jumps = 0;
4107 rtx insn;
4108
4109 /* If we fail to locate an appropriate conditional jump, we'll
4110 fall back to normal evaluation. Ensure that the expression
4111 can be re-evaluated. */
4112 switch (unsafe_for_reeval (arg0))
4113 {
4114 case 0: /* Safe. */
4115 break;
4116
4117 case 1: /* Mildly unsafe. */
4118 arg0 = unsave_expr (arg0);
4119 break;
4120
4121 case 2: /* Wildly unsafe. */
4122 return NULL_RTX;
4123 }
4124
4125 /* Expand the jump insns. */
4126 start_sequence ();
4127 do_jump (arg0, if_false_label, if_true_label);
4128 ret = get_insns ();
4129 end_sequence ();
4130
4131 /* Now that the __builtin_expect has been validated, go through and add
4132 the expect's to each of the conditional jumps. If we run into an
4133 error, just give up and generate the 'safe' code of doing a SCC
4134 operation and then doing a branch on that. */
4135 insn = ret;
4136 while (insn != NULL_RTX)
4137 {
4138 rtx next = NEXT_INSN (insn);
4139
4140 if (GET_CODE (insn) == JUMP_INSN && any_condjump_p (insn))
4141 {
4142 rtx ifelse = SET_SRC (pc_set (insn));
4143 rtx label;
4144 int taken;
4145
4146 if (GET_CODE (XEXP (ifelse, 1)) == LABEL_REF)
4147 {
4148 taken = 1;
4149 label = XEXP (XEXP (ifelse, 1), 0);
4150 }
4151 /* An inverted jump reverses the probabilities. */
4152 else if (GET_CODE (XEXP (ifelse, 2)) == LABEL_REF)
4153 {
4154 taken = 0;
4155 label = XEXP (XEXP (ifelse, 2), 0);
4156 }
4157 /* We shouldn't have to worry about conditional returns during
4158 the expansion stage, but handle it gracefully anyway. */
4159 else if (GET_CODE (XEXP (ifelse, 1)) == RETURN)
4160 {
4161 taken = 1;
4162 label = NULL_RTX;
4163 }
4164 /* An inverted return reverses the probabilities. */
4165 else if (GET_CODE (XEXP (ifelse, 2)) == RETURN)
4166 {
4167 taken = 0;
4168 label = NULL_RTX;
4169 }
4170 else
4171 goto do_next_insn;
4172
4173 /* If the test is expected to fail, reverse the
4174 probabilities. */
4175 if (integer_zerop (arg1))
4176 taken = 1 - taken;
4177
4178 /* If we are jumping to the false label, reverse the
4179 probabilities. */
4180 if (label == NULL_RTX)
4181 ; /* conditional return */
4182 else if (label == if_false_label)
4183 taken = 1 - taken;
4184 else if (label != if_true_label)
4185 goto do_next_insn;
4186
4187 num_jumps++;
4188 predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
4189 }
4190
4191 do_next_insn:
4192 insn = next;
4193 }
4194
4195 /* If no jumps were modified, fail and do __builtin_expect the normal
4196 way. */
4197 if (num_jumps == 0)
4198 ret = NULL_RTX;
4199 }
4200
4201 return ret;
4202 }
4203
4204 void
4205 expand_builtin_trap (void)
4206 {
4207 #ifdef HAVE_trap
4208 if (HAVE_trap)
4209 emit_insn (gen_trap ());
4210 else
4211 #endif
4212 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4213 emit_barrier ();
4214 }
4215
4216 /* Expand a call to fabs, fabsf or fabsl with arguments ARGLIST.
4217 Return 0 if a normal call should be emitted rather than expanding
4218 the function inline. If convenient, the result should be placed
4219 in TARGET. SUBTARGET may be used as the target for computing
4220 the operand. */
4221
4222 static rtx
4223 expand_builtin_fabs (tree arglist, rtx target, rtx subtarget)
4224 {
4225 enum machine_mode mode;
4226 tree arg;
4227 rtx op0;
4228
4229 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4230 return 0;
4231
4232 arg = TREE_VALUE (arglist);
4233 mode = TYPE_MODE (TREE_TYPE (arg));
4234 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
4235 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4236 }
4237
4238 /* Expand a call to cabs, cabsf or cabsl with arguments ARGLIST.
4239 Return 0 if a normal call should be emitted rather than expanding
4240 the function inline. If convenient, the result should be placed
4241 in target. */
4242
4243 static rtx
4244 expand_builtin_cabs (tree arglist, rtx target)
4245 {
4246 enum machine_mode mode;
4247 tree arg;
4248 rtx op0;
4249
4250 if (arglist == 0 || TREE_CHAIN (arglist))
4251 return 0;
4252 arg = TREE_VALUE (arglist);
4253 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
4254 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
4255 return 0;
4256
4257 mode = TYPE_MODE (TREE_TYPE (arg));
4258 op0 = expand_expr (arg, NULL_RTX, VOIDmode, 0);
4259 return expand_complex_abs (mode, op0, target, 0);
4260 }
4261
4262 \f
4263 /* Expand an expression EXP that calls a built-in function,
4264 with result going to TARGET if that's convenient
4265 (and in mode MODE if that's convenient).
4266 SUBTARGET may be used as the target for computing one of EXP's operands.
4267 IGNORE is nonzero if the value is to be ignored. */
4268
4269 rtx
4270 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
4271 int ignore)
4272 {
4273 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4274 tree arglist = TREE_OPERAND (exp, 1);
4275 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4276 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
4277
4278 /* Perform postincrements before expanding builtin functions.  */
4279 emit_queue ();
4280
4281 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4282 return (*targetm.expand_builtin) (exp, target, subtarget, mode, ignore);
4283
4284 /* When not optimizing, generate calls to library functions for a certain
4285 set of builtins. */
4286 if (!optimize && !CALLED_AS_BUILT_IN (fndecl))
4287 switch (fcode)
4288 {
4289 case BUILT_IN_SQRT:
4290 case BUILT_IN_SQRTF:
4291 case BUILT_IN_SQRTL:
4292 case BUILT_IN_SIN:
4293 case BUILT_IN_SINF:
4294 case BUILT_IN_SINL:
4295 case BUILT_IN_COS:
4296 case BUILT_IN_COSF:
4297 case BUILT_IN_COSL:
4298 case BUILT_IN_EXP:
4299 case BUILT_IN_EXPF:
4300 case BUILT_IN_EXPL:
4301 case BUILT_IN_LOG:
4302 case BUILT_IN_LOGF:
4303 case BUILT_IN_LOGL:
4304 case BUILT_IN_TAN:
4305 case BUILT_IN_TANF:
4306 case BUILT_IN_TANL:
4307 case BUILT_IN_ATAN:
4308 case BUILT_IN_ATANF:
4309 case BUILT_IN_ATANL:
4310 case BUILT_IN_POW:
4311 case BUILT_IN_POWF:
4312 case BUILT_IN_POWL:
4313 case BUILT_IN_ATAN2:
4314 case BUILT_IN_ATAN2F:
4315 case BUILT_IN_ATAN2L:
4316 case BUILT_IN_MEMSET:
4317 case BUILT_IN_MEMCPY:
4318 case BUILT_IN_MEMCMP:
4319 case BUILT_IN_MEMPCPY:
4320 case BUILT_IN_MEMMOVE:
4321 case BUILT_IN_BCMP:
4322 case BUILT_IN_BZERO:
4323 case BUILT_IN_BCOPY:
4324 case BUILT_IN_INDEX:
4325 case BUILT_IN_RINDEX:
4326 case BUILT_IN_STPCPY:
4327 case BUILT_IN_STRCHR:
4328 case BUILT_IN_STRRCHR:
4329 case BUILT_IN_STRLEN:
4330 case BUILT_IN_STRCPY:
4331 case BUILT_IN_STRNCPY:
4332 case BUILT_IN_STRNCMP:
4333 case BUILT_IN_STRSTR:
4334 case BUILT_IN_STRPBRK:
4335 case BUILT_IN_STRCAT:
4336 case BUILT_IN_STRNCAT:
4337 case BUILT_IN_STRSPN:
4338 case BUILT_IN_STRCSPN:
4339 case BUILT_IN_STRCMP:
4340 case BUILT_IN_FFS:
4341 case BUILT_IN_PUTCHAR:
4342 case BUILT_IN_PUTS:
4343 case BUILT_IN_PRINTF:
4344 case BUILT_IN_FPUTC:
4345 case BUILT_IN_FPUTS:
4346 case BUILT_IN_FWRITE:
4347 case BUILT_IN_PUTCHAR_UNLOCKED:
4348 case BUILT_IN_PUTS_UNLOCKED:
4349 case BUILT_IN_PRINTF_UNLOCKED:
4350 case BUILT_IN_FPUTC_UNLOCKED:
4351 case BUILT_IN_FPUTS_UNLOCKED:
4352 case BUILT_IN_FWRITE_UNLOCKED:
4353 case BUILT_IN_FLOOR:
4354 case BUILT_IN_FLOORF:
4355 case BUILT_IN_FLOORL:
4356 case BUILT_IN_CEIL:
4357 case BUILT_IN_CEILF:
4358 case BUILT_IN_CEILL:
4359 case BUILT_IN_TRUNC:
4360 case BUILT_IN_TRUNCF:
4361 case BUILT_IN_TRUNCL:
4362 case BUILT_IN_ROUND:
4363 case BUILT_IN_ROUNDF:
4364 case BUILT_IN_ROUNDL:
4365 case BUILT_IN_NEARBYINT:
4366 case BUILT_IN_NEARBYINTF:
4367 case BUILT_IN_NEARBYINTL:
4368 return expand_call (exp, target, ignore);
4369
4370 default:
4371 break;
4372 }
4373
4374 /* The built-in function expanders test for target == const0_rtx
4375 to determine whether the function's result will be ignored. */
4376 if (ignore)
4377 target = const0_rtx;
4378
4379 /* If the result of a pure or const built-in function is ignored, and
4380 none of its arguments are volatile, we can avoid expanding the
4381 built-in call and just evaluate the arguments for side-effects. */
4382 if (target == const0_rtx
4383 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
4384 {
4385 bool volatilep = false;
4386 tree arg;
4387
4388 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
4389 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
4390 {
4391 volatilep = true;
4392 break;
4393 }
4394
4395 if (! volatilep)
4396 {
4397 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
4398 expand_expr (TREE_VALUE (arg), const0_rtx,
4399 VOIDmode, EXPAND_NORMAL);
4400 return const0_rtx;
4401 }
4402 }
4403
4404 switch (fcode)
4405 {
4406 case BUILT_IN_ABS:
4407 case BUILT_IN_LABS:
4408 case BUILT_IN_LLABS:
4409 case BUILT_IN_IMAXABS:
4410 /* build_function_call changes these into ABS_EXPR. */
4411 abort ();
4412
4413 case BUILT_IN_FABS:
4414 case BUILT_IN_FABSF:
4415 case BUILT_IN_FABSL:
4416 target = expand_builtin_fabs (arglist, target, subtarget);
4417 if (target)
4418 return target;
4419 break;
4420
4421 case BUILT_IN_CABS:
4422 case BUILT_IN_CABSF:
4423 case BUILT_IN_CABSL:
4424 if (flag_unsafe_math_optimizations)
4425 {
4426 target = expand_builtin_cabs (arglist, target);
4427 if (target)
4428 return target;
4429 }
4430 break;
4431
4432 case BUILT_IN_CONJ:
4433 case BUILT_IN_CONJF:
4434 case BUILT_IN_CONJL:
4435 case BUILT_IN_CREAL:
4436 case BUILT_IN_CREALF:
4437 case BUILT_IN_CREALL:
4438 case BUILT_IN_CIMAG:
4439 case BUILT_IN_CIMAGF:
4440 case BUILT_IN_CIMAGL:
4441 /* expand_tree_builtin changes these into CONJ_EXPR, REALPART_EXPR
4442 and IMAGPART_EXPR. */
4443 abort ();
4444
4445 case BUILT_IN_SIN:
4446 case BUILT_IN_SINF:
4447 case BUILT_IN_SINL:
4448 case BUILT_IN_COS:
4449 case BUILT_IN_COSF:
4450 case BUILT_IN_COSL:
4451 case BUILT_IN_EXP:
4452 case BUILT_IN_EXPF:
4453 case BUILT_IN_EXPL:
4454 case BUILT_IN_LOG:
4455 case BUILT_IN_LOGF:
4456 case BUILT_IN_LOGL:
4457 case BUILT_IN_TAN:
4458 case BUILT_IN_TANF:
4459 case BUILT_IN_TANL:
4460 case BUILT_IN_ATAN:
4461 case BUILT_IN_ATANF:
4462 case BUILT_IN_ATANL:
4463 /* Treat these like sqrt only if unsafe math optimizations are allowed,
4464 because of possible accuracy problems. */
4465 if (! flag_unsafe_math_optimizations)
4466 break;
4467 case BUILT_IN_SQRT:
4468 case BUILT_IN_SQRTF:
4469 case BUILT_IN_SQRTL:
4470 case BUILT_IN_FLOOR:
4471 case BUILT_IN_FLOORF:
4472 case BUILT_IN_FLOORL:
4473 case BUILT_IN_CEIL:
4474 case BUILT_IN_CEILF:
4475 case BUILT_IN_CEILL:
4476 case BUILT_IN_TRUNC:
4477 case BUILT_IN_TRUNCF:
4478 case BUILT_IN_TRUNCL:
4479 case BUILT_IN_ROUND:
4480 case BUILT_IN_ROUNDF:
4481 case BUILT_IN_ROUNDL:
4482 case BUILT_IN_NEARBYINT:
4483 case BUILT_IN_NEARBYINTF:
4484 case BUILT_IN_NEARBYINTL:
4485 target = expand_builtin_mathfn (exp, target, subtarget);
4486 if (target)
4487 return target;
4488 break;
4489
4490 case BUILT_IN_POW:
4491 case BUILT_IN_POWF:
4492 case BUILT_IN_POWL:
4493 case BUILT_IN_ATAN2:
4494 case BUILT_IN_ATAN2F:
4495 case BUILT_IN_ATAN2L:
4496 if (! flag_unsafe_math_optimizations)
4497 break;
4498 target = expand_builtin_mathfn_2 (exp, target, subtarget);
4499 if (target)
4500 return target;
4501 break;
4502
4503 case BUILT_IN_APPLY_ARGS:
4504 return expand_builtin_apply_args ();
4505
4506 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
4507 FUNCTION with a copy of the parameters described by
4508 ARGUMENTS, and ARGSIZE. It returns a block of memory
4509 allocated on the stack into which is stored all the registers
4510 that might possibly be used for returning the result of a
4511 function. ARGUMENTS is the value returned by
4512 __builtin_apply_args. ARGSIZE is the number of bytes of
4513 arguments that must be copied. ??? How should this value be
4514 computed? We'll also need a safe worst case value for varargs
4515 functions. */
4516 case BUILT_IN_APPLY:
4517 if (!validate_arglist (arglist, POINTER_TYPE,
4518 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4519 && !validate_arglist (arglist, REFERENCE_TYPE,
4520 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4521 return const0_rtx;
4522 else
4523 {
4524 int i;
4525 tree t;
4526 rtx ops[3];
4527
4528 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
4529 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
4530
4531 return expand_builtin_apply (ops[0], ops[1], ops[2]);
4532 }
4533
4534 /* __builtin_return (RESULT) causes the function to return the
4535 value described by RESULT. RESULT is address of the block of
4536 memory returned by __builtin_apply. */
4537 case BUILT_IN_RETURN:
4538 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4539 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
4540 NULL_RTX, VOIDmode, 0));
4541 return const0_rtx;
4542
4543 case BUILT_IN_SAVEREGS:
4544 return expand_builtin_saveregs ();
4545
4546 case BUILT_IN_ARGS_INFO:
4547 return expand_builtin_args_info (arglist);
4548
4549 /* Return the address of the first anonymous stack arg. */
4550 case BUILT_IN_NEXT_ARG:
4551 return expand_builtin_next_arg (arglist);
4552
4553 case BUILT_IN_CLASSIFY_TYPE:
4554 return expand_builtin_classify_type (arglist);
4555
4556 case BUILT_IN_CONSTANT_P:
4557 return expand_builtin_constant_p (arglist, target_mode);
4558
4559 case BUILT_IN_FRAME_ADDRESS:
4560 case BUILT_IN_RETURN_ADDRESS:
4561 return expand_builtin_frame_address (fndecl, arglist);
4562
4563 /* Returns the address of the area where the structure is returned.
4564 0 otherwise. */
4565 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
4566 if (arglist != 0
4567 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
4568 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
4569 return const0_rtx;
4570 else
4571 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4572
4573 case BUILT_IN_ALLOCA:
4574 target = expand_builtin_alloca (arglist, target);
4575 if (target)
4576 return target;
4577 break;
4578
4579 case BUILT_IN_FFS:
4580 case BUILT_IN_FFSL:
4581 case BUILT_IN_FFSLL:
4582 target = expand_builtin_unop (target_mode, arglist, target,
4583 subtarget, ffs_optab);
4584 if (target)
4585 return target;
4586 break;
4587
4588 case BUILT_IN_CLZ:
4589 case BUILT_IN_CLZL:
4590 case BUILT_IN_CLZLL:
4591 target = expand_builtin_unop (target_mode, arglist, target,
4592 subtarget, clz_optab);
4593 if (target)
4594 return target;
4595 break;
4596
4597 case BUILT_IN_CTZ:
4598 case BUILT_IN_CTZL:
4599 case BUILT_IN_CTZLL:
4600 target = expand_builtin_unop (target_mode, arglist, target,
4601 subtarget, ctz_optab);
4602 if (target)
4603 return target;
4604 break;
4605
4606 case BUILT_IN_POPCOUNT:
4607 case BUILT_IN_POPCOUNTL:
4608 case BUILT_IN_POPCOUNTLL:
4609 target = expand_builtin_unop (target_mode, arglist, target,
4610 subtarget, popcount_optab);
4611 if (target)
4612 return target;
4613 break;
4614
4615 case BUILT_IN_PARITY:
4616 case BUILT_IN_PARITYL:
4617 case BUILT_IN_PARITYLL:
4618 target = expand_builtin_unop (target_mode, arglist, target,
4619 subtarget, parity_optab);
4620 if (target)
4621 return target;
4622 break;
4623
4624 case BUILT_IN_STRLEN:
4625 target = expand_builtin_strlen (arglist, target, target_mode);
4626 if (target)
4627 return target;
4628 break;
4629
4630 case BUILT_IN_STRCPY:
4631 target = expand_builtin_strcpy (arglist, target, mode);
4632 if (target)
4633 return target;
4634 break;
4635
4636 case BUILT_IN_STRNCPY:
4637 target = expand_builtin_strncpy (arglist, target, mode);
4638 if (target)
4639 return target;
4640 break;
4641
4642 case BUILT_IN_STPCPY:
4643 target = expand_builtin_stpcpy (arglist, target, mode);
4644 if (target)
4645 return target;
4646 break;
4647
4648 case BUILT_IN_STRCAT:
4649 target = expand_builtin_strcat (arglist, target, mode);
4650 if (target)
4651 return target;
4652 break;
4653
4654 case BUILT_IN_STRNCAT:
4655 target = expand_builtin_strncat (arglist, target, mode);
4656 if (target)
4657 return target;
4658 break;
4659
4660 case BUILT_IN_STRSPN:
4661 target = expand_builtin_strspn (arglist, target, mode);
4662 if (target)
4663 return target;
4664 break;
4665
4666 case BUILT_IN_STRCSPN:
4667 target = expand_builtin_strcspn (arglist, target, mode);
4668 if (target)
4669 return target;
4670 break;
4671
4672 case BUILT_IN_STRSTR:
4673 target = expand_builtin_strstr (arglist, target, mode);
4674 if (target)
4675 return target;
4676 break;
4677
4678 case BUILT_IN_STRPBRK:
4679 target = expand_builtin_strpbrk (arglist, target, mode);
4680 if (target)
4681 return target;
4682 break;
4683
4684 case BUILT_IN_INDEX:
4685 case BUILT_IN_STRCHR:
4686 target = expand_builtin_strchr (arglist, target, mode);
4687 if (target)
4688 return target;
4689 break;
4690
4691 case BUILT_IN_RINDEX:
4692 case BUILT_IN_STRRCHR:
4693 target = expand_builtin_strrchr (arglist, target, mode);
4694 if (target)
4695 return target;
4696 break;
4697
4698 case BUILT_IN_MEMCPY:
4699 target = expand_builtin_memcpy (arglist, target, mode);
4700 if (target)
4701 return target;
4702 break;
4703
4704 case BUILT_IN_MEMPCPY:
4705 target = expand_builtin_mempcpy (arglist, target, mode, /*endp=*/ 1);
4706 if (target)
4707 return target;
4708 break;
4709
4710 case BUILT_IN_MEMMOVE:
4711 target = expand_builtin_memmove (arglist, target, mode);
4712 if (target)
4713 return target;
4714 break;
4715
4716 case BUILT_IN_BCOPY:
4717 target = expand_builtin_bcopy (arglist);
4718 if (target)
4719 return target;
4720 break;
4721
4722 case BUILT_IN_MEMSET:
4723 target = expand_builtin_memset (arglist, target, mode);
4724 if (target)
4725 return target;
4726 break;
4727
4728 case BUILT_IN_BZERO:
4729 target = expand_builtin_bzero (arglist);
4730 if (target)
4731 return target;
4732 break;
4733
4734 case BUILT_IN_STRCMP:
4735 target = expand_builtin_strcmp (exp, target, mode);
4736 if (target)
4737 return target;
4738 break;
4739
4740 case BUILT_IN_STRNCMP:
4741 target = expand_builtin_strncmp (exp, target, mode);
4742 if (target)
4743 return target;
4744 break;
4745
4746 case BUILT_IN_BCMP:
4747 case BUILT_IN_MEMCMP:
4748 target = expand_builtin_memcmp (exp, arglist, target, mode);
4749 if (target)
4750 return target;
4751 break;
4752
4753 case BUILT_IN_SETJMP:
4754 target = expand_builtin_setjmp (arglist, target);
4755 if (target)
4756 return target;
4757 break;
4758
4759 /* __builtin_longjmp is passed a pointer to an array of five words.
4760 It's similar to the C library longjmp function but works with
4761 __builtin_setjmp above. */
4762 case BUILT_IN_LONGJMP:
4763 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4764 break;
4765 else
4766 {
4767 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
4768 VOIDmode, 0);
4769 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
4770 NULL_RTX, VOIDmode, 0);
4771
4772 if (value != const1_rtx)
4773 {
4774 error ("__builtin_longjmp second argument must be 1");
4775 return const0_rtx;
4776 }
4777
4778 expand_builtin_longjmp (buf_addr, value);
4779 return const0_rtx;
4780 }
4781
4782 case BUILT_IN_TRAP:
4783 expand_builtin_trap ();
4784 return const0_rtx;
4785
4786 case BUILT_IN_FPUTS:
4787 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 0);
4788 if (target)
4789 return target;
4790 break;
4791 case BUILT_IN_FPUTS_UNLOCKED:
4792 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 1);
4793 if (target)
4794 return target;
4795 break;
4796
4797 /* Various hooks for the DWARF 2 __throw routine. */
4798 case BUILT_IN_UNWIND_INIT:
4799 expand_builtin_unwind_init ();
4800 return const0_rtx;
4801 case BUILT_IN_DWARF_CFA:
4802 return virtual_cfa_rtx;
4803 #ifdef DWARF2_UNWIND_INFO
4804 case BUILT_IN_DWARF_SP_COLUMN:
4805 return expand_builtin_dwarf_sp_column ();
4806 case BUILT_IN_INIT_DWARF_REG_SIZES:
4807 expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist));
4808 return const0_rtx;
4809 #endif
4810 case BUILT_IN_FROB_RETURN_ADDR:
4811 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
4812 case BUILT_IN_EXTRACT_RETURN_ADDR:
4813 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
4814 case BUILT_IN_EH_RETURN:
4815 expand_builtin_eh_return (TREE_VALUE (arglist),
4816 TREE_VALUE (TREE_CHAIN (arglist)));
4817 return const0_rtx;
4818 #ifdef EH_RETURN_DATA_REGNO
4819 case BUILT_IN_EH_RETURN_DATA_REGNO:
4820 return expand_builtin_eh_return_data_regno (arglist);
4821 #endif
4822 case BUILT_IN_VA_START:
4823 case BUILT_IN_STDARG_START:
4824 return expand_builtin_va_start (arglist);
4825 case BUILT_IN_VA_END:
4826 return expand_builtin_va_end (arglist);
4827 case BUILT_IN_VA_COPY:
4828 return expand_builtin_va_copy (arglist);
4829 case BUILT_IN_EXPECT:
4830 return expand_builtin_expect (arglist, target);
4831 case BUILT_IN_PREFETCH:
4832 expand_builtin_prefetch (arglist);
4833 return const0_rtx;
4834
4835
4836 default: /* just do library call, if unknown builtin */
4837 if (!DECL_ASSEMBLER_NAME_SET_P (fndecl))
4838 error ("built-in function `%s' not currently supported",
4839 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
4840 }
4841
4842 /* The switch statement above can drop through to cause the function
4843 to be called normally. */
4844 return expand_call (exp, target, ignore);
4845 }
4846
4847 /* Determine whether a tree node represents a call to a built-in
4848 math function. If the tree T is a call to a built-in function
4849 taking a single real argument, then the return value is the
4850 DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT. Otherwise
4851 the return value is END_BUILTINS. */
4852
4853 enum built_in_function
4854 builtin_mathfn_code (tree t)
4855 {
4856 tree fndecl, arglist;
4857
4858 if (TREE_CODE (t) != CALL_EXPR
4859 || TREE_CODE (TREE_OPERAND (t, 0)) != ADDR_EXPR)
4860 return END_BUILTINS;
4861
4862 fndecl = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
4863 if (TREE_CODE (fndecl) != FUNCTION_DECL
4864 || ! DECL_BUILT_IN (fndecl)
4865 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4866 return END_BUILTINS;
4867
4868 arglist = TREE_OPERAND (t, 1);
4869 if (! arglist
4870 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
4871 return END_BUILTINS;
4872
4873 arglist = TREE_CHAIN (arglist);
4874 switch (DECL_FUNCTION_CODE (fndecl))
4875 {
4876 case BUILT_IN_POW:
4877 case BUILT_IN_POWF:
4878 case BUILT_IN_POWL:
4879 case BUILT_IN_ATAN2:
4880 case BUILT_IN_ATAN2F:
4881 case BUILT_IN_ATAN2L:
4882 if (! arglist
4883 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE
4884 || TREE_CHAIN (arglist))
4885 return END_BUILTINS;
4886 break;
4887
4888 default:
4889 if (arglist)
4890 return END_BUILTINS;
4891 break;
4892 }
4893
4894 return DECL_FUNCTION_CODE (fndecl);
4895 }
4896
4897 /* Fold a call to __builtin_constant_p, if we know it will evaluate to a
4898 constant. ARGLIST is the argument list of the call. */
4899
4900 static tree
4901 fold_builtin_constant_p (tree arglist)
4902 {
4903 if (arglist == 0)
4904 return 0;
4905
4906 arglist = TREE_VALUE (arglist);
4907
4908 /* We return 1 for a numeric type that's known to be a constant
4909 value at compile-time or for an aggregate type that's a
4910 literal constant. */
4911 STRIP_NOPS (arglist);
4912
4913 /* If we know this is a constant, emit the constant of one. */
4914 if (TREE_CODE_CLASS (TREE_CODE (arglist)) == 'c'
4915 || (TREE_CODE (arglist) == CONSTRUCTOR
4916 && TREE_CONSTANT (arglist))
4917 || (TREE_CODE (arglist) == ADDR_EXPR
4918 && TREE_CODE (TREE_OPERAND (arglist, 0)) == STRING_CST))
4919 return integer_one_node;
4920
4921 /* If we aren't going to be running CSE or this expression
4922 has side effects, show we don't know it to be a constant.
4923 Likewise if it's a pointer or aggregate type since in those
4924 case we only want literals, since those are only optimized
4925 when generating RTL, not later.
4926 And finally, if we are compiling an initializer, not code, we
4927 need to return a definite result now; there's not going to be any
4928 more optimization done. */
4929 if (TREE_SIDE_EFFECTS (arglist) || cse_not_expected
4930 || AGGREGATE_TYPE_P (TREE_TYPE (arglist))
4931 || POINTER_TYPE_P (TREE_TYPE (arglist))
4932 || cfun == 0)
4933 return integer_zero_node;
4934
4935 return 0;
4936 }
4937
4938 /* Fold a call to __builtin_classify_type. */
4939
4940 static tree
4941 fold_builtin_classify_type (tree arglist)
4942 {
4943 if (arglist == 0)
4944 return build_int_2 (no_type_class, 0);
4945
4946 return build_int_2 (type_to_class (TREE_TYPE (TREE_VALUE (arglist))), 0);
4947 }
4948
4949 /* Fold a call to __builtin_inf or __builtin_huge_val. */
4950
4951 static tree
4952 fold_builtin_inf (tree type, int warn)
4953 {
4954 REAL_VALUE_TYPE real;
4955
4956 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
4957 warning ("target format does not support infinity");
4958
4959 real_inf (&real);
4960 return build_real (type, real);
4961 }
4962
4963 /* Fold a call to __builtin_nan or __builtin_nans. */
4964
4965 static tree
4966 fold_builtin_nan (tree arglist, tree type, int quiet)
4967 {
4968 REAL_VALUE_TYPE real;
4969 const char *str;
4970
4971 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4972 return 0;
4973 str = c_getstr (TREE_VALUE (arglist));
4974 if (!str)
4975 return 0;
4976
4977 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
4978 return 0;
4979
4980 return build_real (type, real);
4981 }
4982
4983 /* EXP is assumed to me builtin call where truncation can be propagated
4984 across (for instance floor((double)f) == (double)floorf (f).
4985 Do the transformation. */
4986 static tree
4987 fold_trunc_transparent_mathfn (tree exp)
4988 {
4989 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4990 tree arglist = TREE_OPERAND (exp, 1);
4991 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4992
4993 if (optimize && validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4994 {
4995 tree arg0 = strip_float_extensions (TREE_VALUE (arglist));
4996 tree ftype = TREE_TYPE (exp);
4997 tree newtype = TREE_TYPE (arg0);
4998 tree decl;
4999
5000 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
5001 && (decl = mathfn_built_in (newtype, fcode)))
5002 {
5003 arglist =
5004 build_tree_list (NULL_TREE, fold (convert (newtype, arg0)));
5005 return convert (ftype,
5006 build_function_call_expr (decl, arglist));
5007 }
5008 }
5009 return 0;
5010 }
5011
5012 /* Fold function call to builtin cabs, cabsf or cabsl. FNDECL is the
5013 function's DECL, ARGLIST is the argument list and TYPE is the return
5014 type. Return NULL_TREE if no simplification can be made. */
5015
5016 static tree
5017 fold_builtin_cabs (tree fndecl, tree arglist, tree type)
5018 {
5019 tree arg;
5020
5021 if (!arglist || TREE_CHAIN (arglist))
5022 return NULL_TREE;
5023
5024 arg = TREE_VALUE (arglist);
5025 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
5026 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
5027 return NULL_TREE;
5028
5029 /* Evaluate cabs of a constant at compile-time. */
5030 if (flag_unsafe_math_optimizations
5031 && TREE_CODE (arg) == COMPLEX_CST
5032 && TREE_CODE (TREE_REALPART (arg)) == REAL_CST
5033 && TREE_CODE (TREE_IMAGPART (arg)) == REAL_CST
5034 && ! TREE_CONSTANT_OVERFLOW (TREE_REALPART (arg))
5035 && ! TREE_CONSTANT_OVERFLOW (TREE_IMAGPART (arg)))
5036 {
5037 REAL_VALUE_TYPE r, i;
5038
5039 r = TREE_REAL_CST (TREE_REALPART (arg));
5040 i = TREE_REAL_CST (TREE_IMAGPART (arg));
5041
5042 real_arithmetic (&r, MULT_EXPR, &r, &r);
5043 real_arithmetic (&i, MULT_EXPR, &i, &i);
5044 real_arithmetic (&r, PLUS_EXPR, &r, &i);
5045 if (real_sqrt (&r, TYPE_MODE (type), &r)
5046 || ! flag_trapping_math)
5047 return build_real (type, r);
5048 }
5049
5050 /* If either part is zero, cabs is fabs of the other. */
5051 if (TREE_CODE (arg) == COMPLEX_EXPR
5052 && real_zerop (TREE_OPERAND (arg, 0)))
5053 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg, 1)));
5054 if (TREE_CODE (arg) == COMPLEX_EXPR
5055 && real_zerop (TREE_OPERAND (arg, 1)))
5056 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg, 0)));
5057
5058 if (flag_unsafe_math_optimizations)
5059 {
5060 enum built_in_function fcode;
5061 tree sqrtfn;
5062
5063 fcode = DECL_FUNCTION_CODE (fndecl);
5064 if (fcode == BUILT_IN_CABS)
5065 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRT];
5066 else if (fcode == BUILT_IN_CABSF)
5067 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTF];
5068 else if (fcode == BUILT_IN_CABSL)
5069 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTL];
5070 else
5071 sqrtfn = NULL_TREE;
5072
5073 if (sqrtfn != NULL_TREE)
5074 {
5075 tree rpart, ipart, result, arglist;
5076
5077 rpart = fold (build1 (REALPART_EXPR, type, arg));
5078 ipart = fold (build1 (IMAGPART_EXPR, type, arg));
5079
5080 rpart = save_expr (rpart);
5081 ipart = save_expr (ipart);
5082
5083 result = fold (build (PLUS_EXPR, type,
5084 fold (build (MULT_EXPR, type,
5085 rpart, rpart)),
5086 fold (build (MULT_EXPR, type,
5087 ipart, ipart))));
5088
5089 arglist = build_tree_list (NULL_TREE, result);
5090 return build_function_call_expr (sqrtfn, arglist);
5091 }
5092 }
5093
5094 return NULL_TREE;
5095 }
5096
5097 /* Used by constant folding to eliminate some builtin calls early. EXP is
5098 the CALL_EXPR of a call to a builtin function. */
5099
5100 tree
5101 fold_builtin (tree exp)
5102 {
5103 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5104 tree arglist = TREE_OPERAND (exp, 1);
5105 tree type = TREE_TYPE (TREE_TYPE (fndecl));
5106
5107 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5108 return 0;
5109
5110 switch (DECL_FUNCTION_CODE (fndecl))
5111 {
5112 case BUILT_IN_CONSTANT_P:
5113 return fold_builtin_constant_p (arglist);
5114
5115 case BUILT_IN_CLASSIFY_TYPE:
5116 return fold_builtin_classify_type (arglist);
5117
5118 case BUILT_IN_STRLEN:
5119 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
5120 {
5121 tree len = c_strlen (TREE_VALUE (arglist));
5122 if (len)
5123 {
5124 /* Convert from the internal "sizetype" type to "size_t". */
5125 if (size_type_node)
5126 len = convert (size_type_node, len);
5127 return len;
5128 }
5129 }
5130 break;
5131
5132 case BUILT_IN_FABS:
5133 case BUILT_IN_FABSF:
5134 case BUILT_IN_FABSL:
5135 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5136 return fold (build1 (ABS_EXPR, type, TREE_VALUE (arglist)));
5137 break;
5138
5139 case BUILT_IN_CABS:
5140 case BUILT_IN_CABSF:
5141 case BUILT_IN_CABSL:
5142 return fold_builtin_cabs (fndecl, arglist, type);
5143
5144 case BUILT_IN_SQRT:
5145 case BUILT_IN_SQRTF:
5146 case BUILT_IN_SQRTL:
5147 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5148 {
5149 enum built_in_function fcode;
5150 tree arg = TREE_VALUE (arglist);
5151
5152 /* Optimize sqrt of constant value. */
5153 if (TREE_CODE (arg) == REAL_CST
5154 && ! TREE_CONSTANT_OVERFLOW (arg))
5155 {
5156 REAL_VALUE_TYPE r, x;
5157
5158 x = TREE_REAL_CST (arg);
5159 if (real_sqrt (&r, TYPE_MODE (type), &x)
5160 || (!flag_trapping_math && !flag_errno_math))
5161 return build_real (type, r);
5162 }
5163
5164 /* Optimize sqrt(exp(x)) = exp(x*0.5). */
5165 fcode = builtin_mathfn_code (arg);
5166 if (flag_unsafe_math_optimizations
5167 && (fcode == BUILT_IN_EXP
5168 || fcode == BUILT_IN_EXPF
5169 || fcode == BUILT_IN_EXPL))
5170 {
5171 tree expfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
5172 arg = fold (build (MULT_EXPR, type,
5173 TREE_VALUE (TREE_OPERAND (arg, 1)),
5174 build_real (type, dconsthalf)));
5175 arglist = build_tree_list (NULL_TREE, arg);
5176 return build_function_call_expr (expfn, arglist);
5177 }
5178
5179 /* Optimize sqrt(pow(x,y)) = pow(x,y*0.5). */
5180 if (flag_unsafe_math_optimizations
5181 && (fcode == BUILT_IN_POW
5182 || fcode == BUILT_IN_POWF
5183 || fcode == BUILT_IN_POWL))
5184 {
5185 tree powfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
5186 tree arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
5187 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
5188 tree narg1 = fold (build (MULT_EXPR, type, arg1,
5189 build_real (type, dconsthalf)));
5190 arglist = tree_cons (NULL_TREE, arg0,
5191 build_tree_list (NULL_TREE, narg1));
5192 return build_function_call_expr (powfn, arglist);
5193 }
5194 }
5195 break;
5196
5197 case BUILT_IN_SIN:
5198 case BUILT_IN_SINF:
5199 case BUILT_IN_SINL:
5200 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5201 {
5202 tree arg = TREE_VALUE (arglist);
5203
5204 /* Optimize sin(0.0) = 0.0. */
5205 if (real_zerop (arg))
5206 return arg;
5207 }
5208 break;
5209
5210 case BUILT_IN_COS:
5211 case BUILT_IN_COSF:
5212 case BUILT_IN_COSL:
5213 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5214 {
5215 tree arg = TREE_VALUE (arglist);
5216
5217 /* Optimize cos(0.0) = 1.0. */
5218 if (real_zerop (arg))
5219 return build_real (type, dconst1);
5220
5221 /* Optimize cos(-x) into cos(x). */
5222 if (TREE_CODE (arg) == NEGATE_EXPR)
5223 {
5224 tree arglist = build_tree_list (NULL_TREE,
5225 TREE_OPERAND (arg, 0));
5226 return build_function_call_expr (fndecl, arglist);
5227 }
5228 }
5229 break;
5230
5231 case BUILT_IN_EXP:
5232 case BUILT_IN_EXPF:
5233 case BUILT_IN_EXPL:
5234 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5235 {
5236 enum built_in_function fcode;
5237 tree arg = TREE_VALUE (arglist);
5238
5239 /* Optimize exp(0.0) = 1.0. */
5240 if (real_zerop (arg))
5241 return build_real (type, dconst1);
5242
5243 /* Optimize exp(1.0) = e. */
5244 if (real_onep (arg))
5245 {
5246 REAL_VALUE_TYPE cst;
5247
5248 if (! builtin_dconsts_init)
5249 init_builtin_dconsts ();
5250 real_convert (&cst, TYPE_MODE (type), &dconste);
5251 return build_real (type, cst);
5252 }
5253
5254 /* Attempt to evaluate exp at compile-time. */
5255 if (flag_unsafe_math_optimizations
5256 && TREE_CODE (arg) == REAL_CST
5257 && ! TREE_CONSTANT_OVERFLOW (arg))
5258 {
5259 REAL_VALUE_TYPE cint;
5260 REAL_VALUE_TYPE c;
5261 HOST_WIDE_INT n;
5262
5263 c = TREE_REAL_CST (arg);
5264 n = real_to_integer (&c);
5265 real_from_integer (&cint, VOIDmode, n,
5266 n < 0 ? -1 : 0, 0);
5267 if (real_identical (&c, &cint))
5268 {
5269 REAL_VALUE_TYPE x;
5270
5271 if (! builtin_dconsts_init)
5272 init_builtin_dconsts ();
5273 real_powi (&x, TYPE_MODE (type), &dconste, n);
5274 return build_real (type, x);
5275 }
5276 }
5277
5278 /* Optimize exp(log(x)) = x. */
5279 fcode = builtin_mathfn_code (arg);
5280 if (flag_unsafe_math_optimizations
5281 && (fcode == BUILT_IN_LOG
5282 || fcode == BUILT_IN_LOGF
5283 || fcode == BUILT_IN_LOGL))
5284 return TREE_VALUE (TREE_OPERAND (arg, 1));
5285 }
5286 break;
5287
5288 case BUILT_IN_LOG:
5289 case BUILT_IN_LOGF:
5290 case BUILT_IN_LOGL:
5291 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5292 {
5293 enum built_in_function fcode;
5294 tree arg = TREE_VALUE (arglist);
5295
5296 /* Optimize log(1.0) = 0.0. */
5297 if (real_onep (arg))
5298 return build_real (type, dconst0);
5299
5300 /* Optimize log(exp(x)) = x. */
5301 fcode = builtin_mathfn_code (arg);
5302 if (flag_unsafe_math_optimizations
5303 && (fcode == BUILT_IN_EXP
5304 || fcode == BUILT_IN_EXPF
5305 || fcode == BUILT_IN_EXPL))
5306 return TREE_VALUE (TREE_OPERAND (arg, 1));
5307
5308 /* Optimize log(sqrt(x)) = log(x)*0.5. */
5309 if (flag_unsafe_math_optimizations
5310 && (fcode == BUILT_IN_SQRT
5311 || fcode == BUILT_IN_SQRTF
5312 || fcode == BUILT_IN_SQRTL))
5313 {
5314 tree logfn = build_function_call_expr (fndecl,
5315 TREE_OPERAND (arg, 1));
5316 return fold (build (MULT_EXPR, type, logfn,
5317 build_real (type, dconsthalf)));
5318 }
5319
5320 /* Optimize log(pow(x,y)) = y*log(x). */
5321 if (flag_unsafe_math_optimizations
5322 && (fcode == BUILT_IN_POW
5323 || fcode == BUILT_IN_POWF
5324 || fcode == BUILT_IN_POWL))
5325 {
5326 tree arg0, arg1, logfn;
5327
5328 arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
5329 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
5330 arglist = build_tree_list (NULL_TREE, arg0);
5331 logfn = build_function_call_expr (fndecl, arglist);
5332 return fold (build (MULT_EXPR, type, arg1, logfn));
5333 }
5334 }
5335 break;
5336
5337 case BUILT_IN_TAN:
5338 case BUILT_IN_TANF:
5339 case BUILT_IN_TANL:
5340 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5341 {
5342 enum built_in_function fcode;
5343 tree arg = TREE_VALUE (arglist);
5344
5345 /* Optimize tan(0.0) = 0.0. */
5346 if (real_zerop (arg))
5347 return arg;
5348
5349 /* Optimize tan(atan(x)) = x. */
5350 fcode = builtin_mathfn_code (arg);
5351 if (flag_unsafe_math_optimizations
5352 && (fcode == BUILT_IN_ATAN
5353 || fcode == BUILT_IN_ATANF
5354 || fcode == BUILT_IN_ATANL))
5355 return TREE_VALUE (TREE_OPERAND (arg, 1));
5356 }
5357 break;
5358
5359 case BUILT_IN_ATAN:
5360 case BUILT_IN_ATANF:
5361 case BUILT_IN_ATANL:
5362 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5363 {
5364 tree arg = TREE_VALUE (arglist);
5365
5366 /* Optimize atan(0.0) = 0.0. */
5367 if (real_zerop (arg))
5368 return arg;
5369
5370 /* Optimize atan(1.0) = pi/4. */
5371 if (real_onep (arg))
5372 {
5373 REAL_VALUE_TYPE cst;
5374
5375 if (! builtin_dconsts_init)
5376 init_builtin_dconsts ();
5377 real_convert (&cst, TYPE_MODE (type), &dconstpi);
5378 cst.exp -= 2;
5379 return build_real (type, cst);
5380 }
5381 }
5382 break;
5383
5384 case BUILT_IN_POW:
5385 case BUILT_IN_POWF:
5386 case BUILT_IN_POWL:
5387 if (validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5388 {
5389 enum built_in_function fcode;
5390 tree arg0 = TREE_VALUE (arglist);
5391 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5392
5393 /* Optimize pow(1.0,y) = 1.0. */
5394 if (real_onep (arg0))
5395 return omit_one_operand (type, build_real (type, dconst1), arg1);
5396
5397 if (TREE_CODE (arg1) == REAL_CST
5398 && ! TREE_CONSTANT_OVERFLOW (arg1))
5399 {
5400 REAL_VALUE_TYPE c;
5401 c = TREE_REAL_CST (arg1);
5402
5403 /* Optimize pow(x,0.0) = 1.0. */
5404 if (REAL_VALUES_EQUAL (c, dconst0))
5405 return omit_one_operand (type, build_real (type, dconst1),
5406 arg0);
5407
5408 /* Optimize pow(x,1.0) = x. */
5409 if (REAL_VALUES_EQUAL (c, dconst1))
5410 return arg0;
5411
5412 /* Optimize pow(x,-1.0) = 1.0/x. */
5413 if (REAL_VALUES_EQUAL (c, dconstm1))
5414 return fold (build (RDIV_EXPR, type,
5415 build_real (type, dconst1),
5416 arg0));
5417
5418 /* Optimize pow(x,2.0) = x*x. */
5419 if (REAL_VALUES_EQUAL (c, dconst2)
5420 && (*lang_hooks.decls.global_bindings_p) () == 0
5421 && ! CONTAINS_PLACEHOLDER_P (arg0))
5422 {
5423 arg0 = save_expr (arg0);
5424 return fold (build (MULT_EXPR, type, arg0, arg0));
5425 }
5426
5427 /* Optimize pow(x,-2.0) = 1.0/(x*x). */
5428 if (flag_unsafe_math_optimizations
5429 && REAL_VALUES_EQUAL (c, dconstm2)
5430 && (*lang_hooks.decls.global_bindings_p) () == 0
5431 && ! CONTAINS_PLACEHOLDER_P (arg0))
5432 {
5433 arg0 = save_expr (arg0);
5434 return fold (build (RDIV_EXPR, type,
5435 build_real (type, dconst1),
5436 fold (build (MULT_EXPR, type,
5437 arg0, arg0))));
5438 }
5439
5440 /* Optimize pow(x,0.5) = sqrt(x). */
5441 if (flag_unsafe_math_optimizations
5442 && REAL_VALUES_EQUAL (c, dconsthalf))
5443 {
5444 tree sqrtfn;
5445
5446 fcode = DECL_FUNCTION_CODE (fndecl);
5447 if (fcode == BUILT_IN_POW)
5448 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRT];
5449 else if (fcode == BUILT_IN_POWF)
5450 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTF];
5451 else if (fcode == BUILT_IN_POWL)
5452 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTL];
5453 else
5454 sqrtfn = NULL_TREE;
5455
5456 if (sqrtfn != NULL_TREE)
5457 {
5458 tree arglist = build_tree_list (NULL_TREE, arg0);
5459 return build_function_call_expr (sqrtfn, arglist);
5460 }
5461 }
5462
5463 /* Attempt to evaluate pow at compile-time. */
5464 if (TREE_CODE (arg0) == REAL_CST
5465 && ! TREE_CONSTANT_OVERFLOW (arg0))
5466 {
5467 REAL_VALUE_TYPE cint;
5468 HOST_WIDE_INT n;
5469
5470 n = real_to_integer (&c);
5471 real_from_integer (&cint, VOIDmode, n,
5472 n < 0 ? -1 : 0, 0);
5473 if (real_identical (&c, &cint))
5474 {
5475 REAL_VALUE_TYPE x;
5476 bool inexact;
5477
5478 x = TREE_REAL_CST (arg0);
5479 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
5480 if (flag_unsafe_math_optimizations || !inexact)
5481 return build_real (type, x);
5482 }
5483 }
5484 }
5485
5486 /* Optimize pow(exp(x),y) = exp(x*y). */
5487 fcode = builtin_mathfn_code (arg0);
5488 if (flag_unsafe_math_optimizations
5489 && (fcode == BUILT_IN_EXP
5490 || fcode == BUILT_IN_EXPF
5491 || fcode == BUILT_IN_EXPL))
5492 {
5493 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5494 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5495 arg = fold (build (MULT_EXPR, type, arg, arg1));
5496 arglist = build_tree_list (NULL_TREE, arg);
5497 return build_function_call_expr (expfn, arglist);
5498 }
5499
5500 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
5501 if (flag_unsafe_math_optimizations
5502 && (fcode == BUILT_IN_SQRT
5503 || fcode == BUILT_IN_SQRTF
5504 || fcode == BUILT_IN_SQRTL))
5505 {
5506 tree narg0 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5507 tree narg1 = fold (build (MULT_EXPR, type, arg1,
5508 build_real (type, dconsthalf)));
5509
5510 arglist = tree_cons (NULL_TREE, narg0,
5511 build_tree_list (NULL_TREE, narg1));
5512 return build_function_call_expr (fndecl, arglist);
5513 }
5514
5515 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
5516 if (flag_unsafe_math_optimizations
5517 && (fcode == BUILT_IN_POW
5518 || fcode == BUILT_IN_POWF
5519 || fcode == BUILT_IN_POWL))
5520 {
5521 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5522 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
5523 tree narg1 = fold (build (MULT_EXPR, type, arg01, arg1));
5524 arglist = tree_cons (NULL_TREE, arg00,
5525 build_tree_list (NULL_TREE, narg1));
5526 return build_function_call_expr (fndecl, arglist);
5527 }
5528 }
5529 break;
5530
5531 case BUILT_IN_INF:
5532 case BUILT_IN_INFF:
5533 case BUILT_IN_INFL:
5534 return fold_builtin_inf (type, true);
5535
5536 case BUILT_IN_HUGE_VAL:
5537 case BUILT_IN_HUGE_VALF:
5538 case BUILT_IN_HUGE_VALL:
5539 return fold_builtin_inf (type, false);
5540
5541 case BUILT_IN_NAN:
5542 case BUILT_IN_NANF:
5543 case BUILT_IN_NANL:
5544 return fold_builtin_nan (arglist, type, true);
5545
5546 case BUILT_IN_NANS:
5547 case BUILT_IN_NANSF:
5548 case BUILT_IN_NANSL:
5549 return fold_builtin_nan (arglist, type, false);
5550
5551 case BUILT_IN_FLOOR:
5552 case BUILT_IN_FLOORF:
5553 case BUILT_IN_FLOORL:
5554 case BUILT_IN_CEIL:
5555 case BUILT_IN_CEILF:
5556 case BUILT_IN_CEILL:
5557 case BUILT_IN_TRUNC:
5558 case BUILT_IN_TRUNCF:
5559 case BUILT_IN_TRUNCL:
5560 case BUILT_IN_ROUND:
5561 case BUILT_IN_ROUNDF:
5562 case BUILT_IN_ROUNDL:
5563 case BUILT_IN_NEARBYINT:
5564 case BUILT_IN_NEARBYINTF:
5565 case BUILT_IN_NEARBYINTL:
5566 return fold_trunc_transparent_mathfn (exp);
5567
5568 default:
5569 break;
5570 }
5571
5572 return 0;
5573 }
5574
5575 /* Conveniently construct a function call expression. */
5576
5577 tree
5578 build_function_call_expr (tree fn, tree arglist)
5579 {
5580 tree call_expr;
5581
5582 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
5583 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
5584 call_expr, arglist);
5585 TREE_SIDE_EFFECTS (call_expr) = 1;
5586 return fold (call_expr);
5587 }
5588
5589 /* This function validates the types of a function call argument list
5590 represented as a tree chain of parameters against a specified list
5591 of tree_codes. If the last specifier is a 0, that represents an
5592 ellipses, otherwise the last specifier must be a VOID_TYPE. */
5593
5594 static int
5595 validate_arglist (tree arglist, ...)
5596 {
5597 enum tree_code code;
5598 int res = 0;
5599 va_list ap;
5600
5601 va_start (ap, arglist);
5602
5603 do
5604 {
5605 code = va_arg (ap, enum tree_code);
5606 switch (code)
5607 {
5608 case 0:
5609 /* This signifies an ellipses, any further arguments are all ok. */
5610 res = 1;
5611 goto end;
5612 case VOID_TYPE:
5613 /* This signifies an endlink, if no arguments remain, return
5614 true, otherwise return false. */
5615 res = arglist == 0;
5616 goto end;
5617 default:
5618 /* If no parameters remain or the parameter's code does not
5619 match the specified code, return false. Otherwise continue
5620 checking any remaining arguments. */
5621 if (arglist == 0
5622 || code != TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))))
5623 goto end;
5624 break;
5625 }
5626 arglist = TREE_CHAIN (arglist);
5627 }
5628 while (1);
5629
5630 /* We need gotos here since we can only have one VA_CLOSE in a
5631 function. */
5632 end: ;
5633 va_end (ap);
5634
5635 return res;
5636 }
5637
5638 /* Default version of target-specific builtin setup that does nothing. */
5639
5640 void
5641 default_init_builtins (void)
5642 {
5643 }
5644
5645 /* Default target-specific builtin expander that does nothing. */
5646
5647 rtx
5648 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
5649 rtx target ATTRIBUTE_UNUSED,
5650 rtx subtarget ATTRIBUTE_UNUSED,
5651 enum machine_mode mode ATTRIBUTE_UNUSED,
5652 int ignore ATTRIBUTE_UNUSED)
5653 {
5654 return NULL_RTX;
5655 }
5656
5657 /* Instantiate all remaining CONSTANT_P_RTX nodes. */
5658
5659 void
5660 purge_builtin_constant_p (void)
5661 {
5662 rtx insn, set, arg, new, note;
5663
5664 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5665 if (INSN_P (insn)
5666 && (set = single_set (insn)) != NULL_RTX
5667 && (GET_CODE (arg = SET_SRC (set)) == CONSTANT_P_RTX
5668 || (GET_CODE (arg) == SUBREG
5669 && (GET_CODE (arg = SUBREG_REG (arg))
5670 == CONSTANT_P_RTX))))
5671 {
5672 arg = XEXP (arg, 0);
5673 new = CONSTANT_P (arg) ? const1_rtx : const0_rtx;
5674 validate_change (insn, &SET_SRC (set), new, 0);
5675
5676 /* Remove the REG_EQUAL note from the insn. */
5677 if ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0)
5678 remove_note (insn, note);
5679 }
5680 }
5681
5682 /* Returns true is EXP represents data that would potentially reside
5683 in a readonly section. */
5684
5685 static bool
5686 readonly_data_expr (tree exp)
5687 {
5688 STRIP_NOPS (exp);
5689
5690 if (TREE_CODE (exp) == ADDR_EXPR)
5691 return decl_readonly_section (TREE_OPERAND (exp, 0), 0);
5692 else
5693 return false;
5694 }
This page took 0.29182 seconds and 6 git commands to generate.