]> gcc.gnu.org Git - gcc.git/blob - gcc/builtins.c
builtins.c (expand_builtin_strlen): Evaluate the lengths of string literals at compil...
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "toplev.h"
43 #include "predict.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "langhooks.h"
47
48 #define CALLED_AS_BUILT_IN(NODE) \
49 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
50
51 /* Register mappings for target machines without register windows. */
52 #ifndef INCOMING_REGNO
53 #define INCOMING_REGNO(OUT) (OUT)
54 #endif
55 #ifndef OUTGOING_REGNO
56 #define OUTGOING_REGNO(IN) (IN)
57 #endif
58
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 #endif
62
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
66
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM) STRINGX(X),
68 const char *const built_in_names[(int) END_BUILTINS] =
69 {
70 #include "builtins.def"
71 };
72 #undef DEF_BUILTIN
73
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases. */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
81
82 static int get_pointer_alignment PARAMS ((tree, unsigned int));
83 static tree c_strlen PARAMS ((tree));
84 static const char *c_getstr PARAMS ((tree));
85 static rtx c_readstr PARAMS ((const char *,
86 enum machine_mode));
87 static int target_char_cast PARAMS ((tree, char *));
88 static rtx get_memory_rtx PARAMS ((tree));
89 static int apply_args_size PARAMS ((void));
90 static int apply_result_size PARAMS ((void));
91 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
92 static rtx result_vector PARAMS ((int, rtx));
93 #endif
94 static rtx expand_builtin_setjmp PARAMS ((tree, rtx));
95 static void expand_builtin_prefetch PARAMS ((tree));
96 static rtx expand_builtin_apply_args PARAMS ((void));
97 static rtx expand_builtin_apply_args_1 PARAMS ((void));
98 static rtx expand_builtin_apply PARAMS ((rtx, rtx, rtx));
99 static void expand_builtin_return PARAMS ((rtx));
100 static enum type_class type_to_class PARAMS ((tree));
101 static rtx expand_builtin_classify_type PARAMS ((tree));
102 static void expand_errno_check PARAMS ((tree, rtx));
103 static rtx expand_builtin_mathfn PARAMS ((tree, rtx, rtx));
104 static rtx expand_builtin_mathfn_2 PARAMS ((tree, rtx, rtx));
105 static rtx expand_builtin_constant_p PARAMS ((tree));
106 static rtx expand_builtin_args_info PARAMS ((tree));
107 static rtx expand_builtin_next_arg PARAMS ((tree));
108 static rtx expand_builtin_va_start PARAMS ((tree));
109 static rtx expand_builtin_va_end PARAMS ((tree));
110 static rtx expand_builtin_va_copy PARAMS ((tree));
111 static rtx expand_builtin_memcmp PARAMS ((tree, tree, rtx,
112 enum machine_mode));
113 static rtx expand_builtin_strcmp PARAMS ((tree, rtx,
114 enum machine_mode));
115 static rtx expand_builtin_strncmp PARAMS ((tree, rtx,
116 enum machine_mode));
117 static rtx builtin_memcpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
118 enum machine_mode));
119 static rtx expand_builtin_strcat PARAMS ((tree, rtx,
120 enum machine_mode));
121 static rtx expand_builtin_strncat PARAMS ((tree, rtx,
122 enum machine_mode));
123 static rtx expand_builtin_strspn PARAMS ((tree, rtx,
124 enum machine_mode));
125 static rtx expand_builtin_strcspn PARAMS ((tree, rtx,
126 enum machine_mode));
127 static rtx expand_builtin_memcpy PARAMS ((tree, rtx,
128 enum machine_mode, int));
129 static rtx expand_builtin_strcpy PARAMS ((tree, rtx,
130 enum machine_mode));
131 static rtx expand_builtin_stpcpy PARAMS ((tree, rtx,
132 enum machine_mode));
133 static rtx builtin_strncpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
134 enum machine_mode));
135 static rtx expand_builtin_strncpy PARAMS ((tree, rtx,
136 enum machine_mode));
137 static rtx builtin_memset_read_str PARAMS ((PTR, HOST_WIDE_INT,
138 enum machine_mode));
139 static rtx builtin_memset_gen_str PARAMS ((PTR, HOST_WIDE_INT,
140 enum machine_mode));
141 static rtx expand_builtin_memset PARAMS ((tree, rtx,
142 enum machine_mode));
143 static rtx expand_builtin_bzero PARAMS ((tree));
144 static rtx expand_builtin_strlen PARAMS ((tree, rtx));
145 static rtx expand_builtin_strstr PARAMS ((tree, rtx,
146 enum machine_mode));
147 static rtx expand_builtin_strpbrk PARAMS ((tree, rtx,
148 enum machine_mode));
149 static rtx expand_builtin_strchr PARAMS ((tree, rtx,
150 enum machine_mode));
151 static rtx expand_builtin_strrchr PARAMS ((tree, rtx,
152 enum machine_mode));
153 static rtx expand_builtin_alloca PARAMS ((tree, rtx));
154 static rtx expand_builtin_unop PARAMS ((enum machine_mode,
155 tree, rtx, rtx, optab));
156 static rtx expand_builtin_frame_address PARAMS ((tree));
157 static rtx expand_builtin_fputs PARAMS ((tree, int, int));
158 static tree stabilize_va_list PARAMS ((tree, int));
159 static rtx expand_builtin_expect PARAMS ((tree, rtx));
160 static tree fold_builtin_constant_p PARAMS ((tree));
161 static tree fold_builtin_classify_type PARAMS ((tree));
162 static tree fold_builtin_inf PARAMS ((tree, int));
163 static tree fold_builtin_nan PARAMS ((tree, tree, int));
164 static int validate_arglist PARAMS ((tree, ...));
165 static tree fold_trunc_transparent_mathfn PARAMS ((tree));
166
167 /* Return the alignment in bits of EXP, a pointer valued expression.
168 But don't return more than MAX_ALIGN no matter what.
169 The alignment returned is, by default, the alignment of the thing that
170 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
171
172 Otherwise, look at the expression to see if we can do better, i.e., if the
173 expression is actually pointing at an object whose alignment is tighter. */
174
175 static int
176 get_pointer_alignment (exp, max_align)
177 tree exp;
178 unsigned int max_align;
179 {
180 unsigned int align, inner;
181
182 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
183 return 0;
184
185 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
186 align = MIN (align, max_align);
187
188 while (1)
189 {
190 switch (TREE_CODE (exp))
191 {
192 case NOP_EXPR:
193 case CONVERT_EXPR:
194 case NON_LVALUE_EXPR:
195 exp = TREE_OPERAND (exp, 0);
196 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
197 return align;
198
199 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
200 align = MIN (inner, max_align);
201 break;
202
203 case PLUS_EXPR:
204 /* If sum of pointer + int, restrict our maximum alignment to that
205 imposed by the integer. If not, we can't do any better than
206 ALIGN. */
207 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
208 return align;
209
210 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
211 & (max_align / BITS_PER_UNIT - 1))
212 != 0)
213 max_align >>= 1;
214
215 exp = TREE_OPERAND (exp, 0);
216 break;
217
218 case ADDR_EXPR:
219 /* See what we are pointing at and look at its alignment. */
220 exp = TREE_OPERAND (exp, 0);
221 if (TREE_CODE (exp) == FUNCTION_DECL)
222 align = FUNCTION_BOUNDARY;
223 else if (DECL_P (exp))
224 align = DECL_ALIGN (exp);
225 #ifdef CONSTANT_ALIGNMENT
226 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
227 align = CONSTANT_ALIGNMENT (exp, align);
228 #endif
229 return MIN (align, max_align);
230
231 default:
232 return align;
233 }
234 }
235 }
236
237 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
238 way, because it could contain a zero byte in the middle.
239 TREE_STRING_LENGTH is the size of the character array, not the string.
240
241 The value returned is of type `ssizetype'.
242
243 Unfortunately, string_constant can't access the values of const char
244 arrays with initializers, so neither can we do so here. */
245
246 static tree
247 c_strlen (src)
248 tree src;
249 {
250 tree offset_node;
251 HOST_WIDE_INT offset;
252 int max;
253 const char *ptr;
254
255 src = string_constant (src, &offset_node);
256 if (src == 0)
257 return 0;
258
259 max = TREE_STRING_LENGTH (src) - 1;
260 ptr = TREE_STRING_POINTER (src);
261
262 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
263 {
264 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
265 compute the offset to the following null if we don't know where to
266 start searching for it. */
267 int i;
268
269 for (i = 0; i < max; i++)
270 if (ptr[i] == 0)
271 return 0;
272
273 /* We don't know the starting offset, but we do know that the string
274 has no internal zero bytes. We can assume that the offset falls
275 within the bounds of the string; otherwise, the programmer deserves
276 what he gets. Subtract the offset from the length of the string,
277 and return that. This would perhaps not be valid if we were dealing
278 with named arrays in addition to literal string constants. */
279
280 return size_diffop (size_int (max), offset_node);
281 }
282
283 /* We have a known offset into the string. Start searching there for
284 a null character if we can represent it as a single HOST_WIDE_INT. */
285 if (offset_node == 0)
286 offset = 0;
287 else if (! host_integerp (offset_node, 0))
288 offset = -1;
289 else
290 offset = tree_low_cst (offset_node, 0);
291
292 /* If the offset is known to be out of bounds, warn, and call strlen at
293 runtime. */
294 if (offset < 0 || offset > max)
295 {
296 warning ("offset outside bounds of constant string");
297 return 0;
298 }
299
300 /* Use strlen to search for the first zero byte. Since any strings
301 constructed with build_string will have nulls appended, we win even
302 if we get handed something like (char[4])"abcd".
303
304 Since OFFSET is our starting index into the string, no further
305 calculation is needed. */
306 return ssize_int (strlen (ptr + offset));
307 }
308
309 /* Return a char pointer for a C string if it is a string constant
310 or sum of string constant and integer constant. */
311
312 static const char *
313 c_getstr (src)
314 tree src;
315 {
316 tree offset_node;
317
318 src = string_constant (src, &offset_node);
319 if (src == 0)
320 return 0;
321
322 if (offset_node == 0)
323 return TREE_STRING_POINTER (src);
324 else if (!host_integerp (offset_node, 1)
325 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
326 return 0;
327
328 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
329 }
330
331 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
332 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
333
334 static rtx
335 c_readstr (str, mode)
336 const char *str;
337 enum machine_mode mode;
338 {
339 HOST_WIDE_INT c[2];
340 HOST_WIDE_INT ch;
341 unsigned int i, j;
342
343 if (GET_MODE_CLASS (mode) != MODE_INT)
344 abort ();
345 c[0] = 0;
346 c[1] = 0;
347 ch = 1;
348 for (i = 0; i < GET_MODE_SIZE (mode); i++)
349 {
350 j = i;
351 if (WORDS_BIG_ENDIAN)
352 j = GET_MODE_SIZE (mode) - i - 1;
353 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
354 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
355 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
356 j *= BITS_PER_UNIT;
357 if (j > 2 * HOST_BITS_PER_WIDE_INT)
358 abort ();
359 if (ch)
360 ch = (unsigned char) str[i];
361 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
362 }
363 return immed_double_const (c[0], c[1], mode);
364 }
365
366 /* Cast a target constant CST to target CHAR and if that value fits into
367 host char type, return zero and put that value into variable pointed by
368 P. */
369
370 static int
371 target_char_cast (cst, p)
372 tree cst;
373 char *p;
374 {
375 unsigned HOST_WIDE_INT val, hostval;
376
377 if (!host_integerp (cst, 1)
378 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
379 return 1;
380
381 val = tree_low_cst (cst, 1);
382 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
383 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
384
385 hostval = val;
386 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
387 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
388
389 if (val != hostval)
390 return 1;
391
392 *p = hostval;
393 return 0;
394 }
395
396 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
397 times to get the address of either a higher stack frame, or a return
398 address located within it (depending on FNDECL_CODE). */
399
400 rtx
401 expand_builtin_return_addr (fndecl_code, count, tem)
402 enum built_in_function fndecl_code;
403 int count;
404 rtx tem;
405 {
406 int i;
407
408 /* Some machines need special handling before we can access
409 arbitrary frames. For example, on the sparc, we must first flush
410 all register windows to the stack. */
411 #ifdef SETUP_FRAME_ADDRESSES
412 if (count > 0)
413 SETUP_FRAME_ADDRESSES ();
414 #endif
415
416 /* On the sparc, the return address is not in the frame, it is in a
417 register. There is no way to access it off of the current frame
418 pointer, but it can be accessed off the previous frame pointer by
419 reading the value from the register window save area. */
420 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
421 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
422 count--;
423 #endif
424
425 /* Scan back COUNT frames to the specified frame. */
426 for (i = 0; i < count; i++)
427 {
428 /* Assume the dynamic chain pointer is in the word that the
429 frame address points to, unless otherwise specified. */
430 #ifdef DYNAMIC_CHAIN_ADDRESS
431 tem = DYNAMIC_CHAIN_ADDRESS (tem);
432 #endif
433 tem = memory_address (Pmode, tem);
434 tem = gen_rtx_MEM (Pmode, tem);
435 set_mem_alias_set (tem, get_frame_alias_set ());
436 tem = copy_to_reg (tem);
437 }
438
439 /* For __builtin_frame_address, return what we've got. */
440 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
441 return tem;
442
443 /* For __builtin_return_address, Get the return address from that
444 frame. */
445 #ifdef RETURN_ADDR_RTX
446 tem = RETURN_ADDR_RTX (count, tem);
447 #else
448 tem = memory_address (Pmode,
449 plus_constant (tem, GET_MODE_SIZE (Pmode)));
450 tem = gen_rtx_MEM (Pmode, tem);
451 set_mem_alias_set (tem, get_frame_alias_set ());
452 #endif
453 return tem;
454 }
455
456 /* Alias set used for setjmp buffer. */
457 static HOST_WIDE_INT setjmp_alias_set = -1;
458
459 /* Construct the leading half of a __builtin_setjmp call. Control will
460 return to RECEIVER_LABEL. This is used directly by sjlj exception
461 handling code. */
462
463 void
464 expand_builtin_setjmp_setup (buf_addr, receiver_label)
465 rtx buf_addr;
466 rtx receiver_label;
467 {
468 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
469 rtx stack_save;
470 rtx mem;
471
472 if (setjmp_alias_set == -1)
473 setjmp_alias_set = new_alias_set ();
474
475 #ifdef POINTERS_EXTEND_UNSIGNED
476 if (GET_MODE (buf_addr) != Pmode)
477 buf_addr = convert_memory_address (Pmode, buf_addr);
478 #endif
479
480 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
481
482 emit_queue ();
483
484 /* We store the frame pointer and the address of receiver_label in
485 the buffer and use the rest of it for the stack save area, which
486 is machine-dependent. */
487
488 #ifndef BUILTIN_SETJMP_FRAME_VALUE
489 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
490 #endif
491
492 mem = gen_rtx_MEM (Pmode, buf_addr);
493 set_mem_alias_set (mem, setjmp_alias_set);
494 emit_move_insn (mem, BUILTIN_SETJMP_FRAME_VALUE);
495
496 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
497 set_mem_alias_set (mem, setjmp_alias_set);
498
499 emit_move_insn (validize_mem (mem),
500 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
501
502 stack_save = gen_rtx_MEM (sa_mode,
503 plus_constant (buf_addr,
504 2 * GET_MODE_SIZE (Pmode)));
505 set_mem_alias_set (stack_save, setjmp_alias_set);
506 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
507
508 /* If there is further processing to do, do it. */
509 #ifdef HAVE_builtin_setjmp_setup
510 if (HAVE_builtin_setjmp_setup)
511 emit_insn (gen_builtin_setjmp_setup (buf_addr));
512 #endif
513
514 /* Tell optimize_save_area_alloca that extra work is going to
515 need to go on during alloca. */
516 current_function_calls_setjmp = 1;
517
518 /* Set this so all the registers get saved in our frame; we need to be
519 able to copy the saved values for any registers from frames we unwind. */
520 current_function_has_nonlocal_label = 1;
521 }
522
523 /* Construct the trailing part of a __builtin_setjmp call.
524 This is used directly by sjlj exception handling code. */
525
526 void
527 expand_builtin_setjmp_receiver (receiver_label)
528 rtx receiver_label ATTRIBUTE_UNUSED;
529 {
530 /* Clobber the FP when we get here, so we have to make sure it's
531 marked as used by this function. */
532 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
533
534 /* Mark the static chain as clobbered here so life information
535 doesn't get messed up for it. */
536 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
537
538 /* Now put in the code to restore the frame pointer, and argument
539 pointer, if needed. The code below is from expand_end_bindings
540 in stmt.c; see detailed documentation there. */
541 #ifdef HAVE_nonlocal_goto
542 if (! HAVE_nonlocal_goto)
543 #endif
544 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
545
546 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
547 if (fixed_regs[ARG_POINTER_REGNUM])
548 {
549 #ifdef ELIMINABLE_REGS
550 size_t i;
551 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
552
553 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
554 if (elim_regs[i].from == ARG_POINTER_REGNUM
555 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
556 break;
557
558 if (i == ARRAY_SIZE (elim_regs))
559 #endif
560 {
561 /* Now restore our arg pointer from the address at which it
562 was saved in our stack frame. */
563 emit_move_insn (virtual_incoming_args_rtx,
564 copy_to_reg (get_arg_pointer_save_area (cfun)));
565 }
566 }
567 #endif
568
569 #ifdef HAVE_builtin_setjmp_receiver
570 if (HAVE_builtin_setjmp_receiver)
571 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
572 else
573 #endif
574 #ifdef HAVE_nonlocal_goto_receiver
575 if (HAVE_nonlocal_goto_receiver)
576 emit_insn (gen_nonlocal_goto_receiver ());
577 else
578 #endif
579 { /* Nothing */ }
580
581 /* @@@ This is a kludge. Not all machine descriptions define a blockage
582 insn, but we must not allow the code we just generated to be reordered
583 by scheduling. Specifically, the update of the frame pointer must
584 happen immediately, not later. So emit an ASM_INPUT to act as blockage
585 insn. */
586 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
587 }
588
589 /* __builtin_setjmp is passed a pointer to an array of five words (not
590 all will be used on all machines). It operates similarly to the C
591 library function of the same name, but is more efficient. Much of
592 the code below (and for longjmp) is copied from the handling of
593 non-local gotos.
594
595 NOTE: This is intended for use by GNAT and the exception handling
596 scheme in the compiler and will only work in the method used by
597 them. */
598
599 static rtx
600 expand_builtin_setjmp (arglist, target)
601 tree arglist;
602 rtx target;
603 {
604 rtx buf_addr, next_lab, cont_lab;
605
606 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
607 return NULL_RTX;
608
609 if (target == 0 || GET_CODE (target) != REG
610 || REGNO (target) < FIRST_PSEUDO_REGISTER)
611 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
612
613 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
614
615 next_lab = gen_label_rtx ();
616 cont_lab = gen_label_rtx ();
617
618 expand_builtin_setjmp_setup (buf_addr, next_lab);
619
620 /* Set TARGET to zero and branch to the continue label. */
621 emit_move_insn (target, const0_rtx);
622 emit_jump_insn (gen_jump (cont_lab));
623 emit_barrier ();
624 emit_label (next_lab);
625
626 expand_builtin_setjmp_receiver (next_lab);
627
628 /* Set TARGET to one. */
629 emit_move_insn (target, const1_rtx);
630 emit_label (cont_lab);
631
632 /* Tell flow about the strange goings on. Putting `next_lab' on
633 `nonlocal_goto_handler_labels' to indicates that function
634 calls may traverse the arc back to this label. */
635
636 current_function_has_nonlocal_label = 1;
637 nonlocal_goto_handler_labels
638 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
639
640 return target;
641 }
642
643 /* __builtin_longjmp is passed a pointer to an array of five words (not
644 all will be used on all machines). It operates similarly to the C
645 library function of the same name, but is more efficient. Much of
646 the code below is copied from the handling of non-local gotos.
647
648 NOTE: This is intended for use by GNAT and the exception handling
649 scheme in the compiler and will only work in the method used by
650 them. */
651
652 void
653 expand_builtin_longjmp (buf_addr, value)
654 rtx buf_addr, value;
655 {
656 rtx fp, lab, stack, insn, last;
657 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
658
659 if (setjmp_alias_set == -1)
660 setjmp_alias_set = new_alias_set ();
661
662 #ifdef POINTERS_EXTEND_UNSIGNED
663 if (GET_MODE (buf_addr) != Pmode)
664 buf_addr = convert_memory_address (Pmode, buf_addr);
665 #endif
666
667 buf_addr = force_reg (Pmode, buf_addr);
668
669 /* We used to store value in static_chain_rtx, but that fails if pointers
670 are smaller than integers. We instead require that the user must pass
671 a second argument of 1, because that is what builtin_setjmp will
672 return. This also makes EH slightly more efficient, since we are no
673 longer copying around a value that we don't care about. */
674 if (value != const1_rtx)
675 abort ();
676
677 current_function_calls_longjmp = 1;
678
679 last = get_last_insn ();
680 #ifdef HAVE_builtin_longjmp
681 if (HAVE_builtin_longjmp)
682 emit_insn (gen_builtin_longjmp (buf_addr));
683 else
684 #endif
685 {
686 fp = gen_rtx_MEM (Pmode, buf_addr);
687 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
688 GET_MODE_SIZE (Pmode)));
689
690 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
691 2 * GET_MODE_SIZE (Pmode)));
692 set_mem_alias_set (fp, setjmp_alias_set);
693 set_mem_alias_set (lab, setjmp_alias_set);
694 set_mem_alias_set (stack, setjmp_alias_set);
695
696 /* Pick up FP, label, and SP from the block and jump. This code is
697 from expand_goto in stmt.c; see there for detailed comments. */
698 #if HAVE_nonlocal_goto
699 if (HAVE_nonlocal_goto)
700 /* We have to pass a value to the nonlocal_goto pattern that will
701 get copied into the static_chain pointer, but it does not matter
702 what that value is, because builtin_setjmp does not use it. */
703 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
704 else
705 #endif
706 {
707 lab = copy_to_reg (lab);
708
709 emit_move_insn (hard_frame_pointer_rtx, fp);
710 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
711
712 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
713 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
714 emit_indirect_jump (lab);
715 }
716 }
717
718 /* Search backwards and mark the jump insn as a non-local goto.
719 Note that this precludes the use of __builtin_longjmp to a
720 __builtin_setjmp target in the same function. However, we've
721 already cautioned the user that these functions are for
722 internal exception handling use only. */
723 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
724 {
725 if (insn == last)
726 abort ();
727 if (GET_CODE (insn) == JUMP_INSN)
728 {
729 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
730 REG_NOTES (insn));
731 break;
732 }
733 else if (GET_CODE (insn) == CALL_INSN)
734 break;
735 }
736 }
737
738 /* Expand a call to __builtin_prefetch. For a target that does not support
739 data prefetch, evaluate the memory address argument in case it has side
740 effects. */
741
742 static void
743 expand_builtin_prefetch (arglist)
744 tree arglist;
745 {
746 tree arg0, arg1, arg2;
747 rtx op0, op1, op2;
748
749 if (!validate_arglist (arglist, POINTER_TYPE, 0))
750 return;
751
752 arg0 = TREE_VALUE (arglist);
753 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
754 zero (read) and argument 2 (locality) defaults to 3 (high degree of
755 locality). */
756 if (TREE_CHAIN (arglist))
757 {
758 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
759 if (TREE_CHAIN (TREE_CHAIN (arglist)))
760 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
761 else
762 arg2 = build_int_2 (3, 0);
763 }
764 else
765 {
766 arg1 = integer_zero_node;
767 arg2 = build_int_2 (3, 0);
768 }
769
770 /* Argument 0 is an address. */
771 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
772
773 /* Argument 1 (read/write flag) must be a compile-time constant int. */
774 if (TREE_CODE (arg1) != INTEGER_CST)
775 {
776 error ("second arg to `__builtin_prefetch' must be a constant");
777 arg1 = integer_zero_node;
778 }
779 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
780 /* Argument 1 must be either zero or one. */
781 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
782 {
783 warning ("invalid second arg to __builtin_prefetch; using zero");
784 op1 = const0_rtx;
785 }
786
787 /* Argument 2 (locality) must be a compile-time constant int. */
788 if (TREE_CODE (arg2) != INTEGER_CST)
789 {
790 error ("third arg to `__builtin_prefetch' must be a constant");
791 arg2 = integer_zero_node;
792 }
793 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
794 /* Argument 2 must be 0, 1, 2, or 3. */
795 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
796 {
797 warning ("invalid third arg to __builtin_prefetch; using zero");
798 op2 = const0_rtx;
799 }
800
801 #ifdef HAVE_prefetch
802 if (HAVE_prefetch)
803 {
804 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
805 (op0,
806 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
807 || (GET_MODE(op0) != Pmode))
808 {
809 #ifdef POINTERS_EXTEND_UNSIGNED
810 if (GET_MODE(op0) != Pmode)
811 op0 = convert_memory_address (Pmode, op0);
812 #endif
813 op0 = force_reg (Pmode, op0);
814 }
815 emit_insn (gen_prefetch (op0, op1, op2));
816 }
817 else
818 #endif
819 op0 = protect_from_queue (op0, 0);
820 /* Don't do anything with direct references to volatile memory, but
821 generate code to handle other side effects. */
822 if (GET_CODE (op0) != MEM && side_effects_p (op0))
823 emit_insn (op0);
824 }
825
826 /* Get a MEM rtx for expression EXP which is the address of an operand
827 to be used to be used in a string instruction (cmpstrsi, movstrsi, ..). */
828
829 static rtx
830 get_memory_rtx (exp)
831 tree exp;
832 {
833 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_SUM);
834 rtx mem;
835
836 #ifdef POINTERS_EXTEND_UNSIGNED
837 if (GET_MODE (addr) != Pmode)
838 addr = convert_memory_address (Pmode, addr);
839 #endif
840
841 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
842
843 /* Get an expression we can use to find the attributes to assign to MEM.
844 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
845 we can. First remove any nops. */
846 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
847 || TREE_CODE (exp) == NON_LVALUE_EXPR)
848 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
849 exp = TREE_OPERAND (exp, 0);
850
851 if (TREE_CODE (exp) == ADDR_EXPR)
852 {
853 exp = TREE_OPERAND (exp, 0);
854 set_mem_attributes (mem, exp, 0);
855 }
856 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
857 {
858 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
859 /* memcpy, memset and other builtin stringops can alias with anything. */
860 set_mem_alias_set (mem, 0);
861 }
862
863 return mem;
864 }
865 \f
866 /* Built-in functions to perform an untyped call and return. */
867
868 /* For each register that may be used for calling a function, this
869 gives a mode used to copy the register's value. VOIDmode indicates
870 the register is not used for calling a function. If the machine
871 has register windows, this gives only the outbound registers.
872 INCOMING_REGNO gives the corresponding inbound register. */
873 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
874
875 /* For each register that may be used for returning values, this gives
876 a mode used to copy the register's value. VOIDmode indicates the
877 register is not used for returning values. If the machine has
878 register windows, this gives only the outbound registers.
879 INCOMING_REGNO gives the corresponding inbound register. */
880 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
881
882 /* For each register that may be used for calling a function, this
883 gives the offset of that register into the block returned by
884 __builtin_apply_args. 0 indicates that the register is not
885 used for calling a function. */
886 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
887
888 /* Return the offset of register REGNO into the block returned by
889 __builtin_apply_args. This is not declared static, since it is
890 needed in objc-act.c. */
891
892 int
893 apply_args_register_offset (regno)
894 int regno;
895 {
896 apply_args_size ();
897
898 /* Arguments are always put in outgoing registers (in the argument
899 block) if such make sense. */
900 #ifdef OUTGOING_REGNO
901 regno = OUTGOING_REGNO (regno);
902 #endif
903 return apply_args_reg_offset[regno];
904 }
905
906 /* Return the size required for the block returned by __builtin_apply_args,
907 and initialize apply_args_mode. */
908
909 static int
910 apply_args_size ()
911 {
912 static int size = -1;
913 int align;
914 unsigned int regno;
915 enum machine_mode mode;
916
917 /* The values computed by this function never change. */
918 if (size < 0)
919 {
920 /* The first value is the incoming arg-pointer. */
921 size = GET_MODE_SIZE (Pmode);
922
923 /* The second value is the structure value address unless this is
924 passed as an "invisible" first argument. */
925 if (struct_value_rtx)
926 size += GET_MODE_SIZE (Pmode);
927
928 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
929 if (FUNCTION_ARG_REGNO_P (regno))
930 {
931 /* Search for the proper mode for copying this register's
932 value. I'm not sure this is right, but it works so far. */
933 enum machine_mode best_mode = VOIDmode;
934
935 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
936 mode != VOIDmode;
937 mode = GET_MODE_WIDER_MODE (mode))
938 if (HARD_REGNO_MODE_OK (regno, mode)
939 && HARD_REGNO_NREGS (regno, mode) == 1)
940 best_mode = mode;
941
942 if (best_mode == VOIDmode)
943 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
944 mode != VOIDmode;
945 mode = GET_MODE_WIDER_MODE (mode))
946 if (HARD_REGNO_MODE_OK (regno, mode)
947 && have_insn_for (SET, mode))
948 best_mode = mode;
949
950 if (best_mode == VOIDmode)
951 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
952 mode != VOIDmode;
953 mode = GET_MODE_WIDER_MODE (mode))
954 if (HARD_REGNO_MODE_OK (regno, mode)
955 && have_insn_for (SET, mode))
956 best_mode = mode;
957
958 if (best_mode == VOIDmode)
959 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
960 mode != VOIDmode;
961 mode = GET_MODE_WIDER_MODE (mode))
962 if (HARD_REGNO_MODE_OK (regno, mode)
963 && have_insn_for (SET, mode))
964 best_mode = mode;
965
966 mode = best_mode;
967 if (mode == VOIDmode)
968 abort ();
969
970 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
971 if (size % align != 0)
972 size = CEIL (size, align) * align;
973 apply_args_reg_offset[regno] = size;
974 size += GET_MODE_SIZE (mode);
975 apply_args_mode[regno] = mode;
976 }
977 else
978 {
979 apply_args_mode[regno] = VOIDmode;
980 apply_args_reg_offset[regno] = 0;
981 }
982 }
983 return size;
984 }
985
986 /* Return the size required for the block returned by __builtin_apply,
987 and initialize apply_result_mode. */
988
989 static int
990 apply_result_size ()
991 {
992 static int size = -1;
993 int align, regno;
994 enum machine_mode mode;
995
996 /* The values computed by this function never change. */
997 if (size < 0)
998 {
999 size = 0;
1000
1001 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1002 if (FUNCTION_VALUE_REGNO_P (regno))
1003 {
1004 /* Search for the proper mode for copying this register's
1005 value. I'm not sure this is right, but it works so far. */
1006 enum machine_mode best_mode = VOIDmode;
1007
1008 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1009 mode != TImode;
1010 mode = GET_MODE_WIDER_MODE (mode))
1011 if (HARD_REGNO_MODE_OK (regno, mode))
1012 best_mode = mode;
1013
1014 if (best_mode == VOIDmode)
1015 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1016 mode != VOIDmode;
1017 mode = GET_MODE_WIDER_MODE (mode))
1018 if (HARD_REGNO_MODE_OK (regno, mode)
1019 && have_insn_for (SET, mode))
1020 best_mode = mode;
1021
1022 if (best_mode == VOIDmode)
1023 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
1024 mode != VOIDmode;
1025 mode = GET_MODE_WIDER_MODE (mode))
1026 if (HARD_REGNO_MODE_OK (regno, mode)
1027 && have_insn_for (SET, mode))
1028 best_mode = mode;
1029
1030 if (best_mode == VOIDmode)
1031 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
1032 mode != VOIDmode;
1033 mode = GET_MODE_WIDER_MODE (mode))
1034 if (HARD_REGNO_MODE_OK (regno, mode)
1035 && have_insn_for (SET, mode))
1036 best_mode = mode;
1037
1038 mode = best_mode;
1039 if (mode == VOIDmode)
1040 abort ();
1041
1042 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1043 if (size % align != 0)
1044 size = CEIL (size, align) * align;
1045 size += GET_MODE_SIZE (mode);
1046 apply_result_mode[regno] = mode;
1047 }
1048 else
1049 apply_result_mode[regno] = VOIDmode;
1050
1051 /* Allow targets that use untyped_call and untyped_return to override
1052 the size so that machine-specific information can be stored here. */
1053 #ifdef APPLY_RESULT_SIZE
1054 size = APPLY_RESULT_SIZE;
1055 #endif
1056 }
1057 return size;
1058 }
1059
1060 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1061 /* Create a vector describing the result block RESULT. If SAVEP is true,
1062 the result block is used to save the values; otherwise it is used to
1063 restore the values. */
1064
1065 static rtx
1066 result_vector (savep, result)
1067 int savep;
1068 rtx result;
1069 {
1070 int regno, size, align, nelts;
1071 enum machine_mode mode;
1072 rtx reg, mem;
1073 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1074
1075 size = nelts = 0;
1076 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1077 if ((mode = apply_result_mode[regno]) != VOIDmode)
1078 {
1079 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1080 if (size % align != 0)
1081 size = CEIL (size, align) * align;
1082 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1083 mem = adjust_address (result, mode, size);
1084 savevec[nelts++] = (savep
1085 ? gen_rtx_SET (VOIDmode, mem, reg)
1086 : gen_rtx_SET (VOIDmode, reg, mem));
1087 size += GET_MODE_SIZE (mode);
1088 }
1089 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1090 }
1091 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1092
1093 /* Save the state required to perform an untyped call with the same
1094 arguments as were passed to the current function. */
1095
1096 static rtx
1097 expand_builtin_apply_args_1 ()
1098 {
1099 rtx registers;
1100 int size, align, regno;
1101 enum machine_mode mode;
1102
1103 /* Create a block where the arg-pointer, structure value address,
1104 and argument registers can be saved. */
1105 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1106
1107 /* Walk past the arg-pointer and structure value address. */
1108 size = GET_MODE_SIZE (Pmode);
1109 if (struct_value_rtx)
1110 size += GET_MODE_SIZE (Pmode);
1111
1112 /* Save each register used in calling a function to the block. */
1113 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1114 if ((mode = apply_args_mode[regno]) != VOIDmode)
1115 {
1116 rtx tem;
1117
1118 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1119 if (size % align != 0)
1120 size = CEIL (size, align) * align;
1121
1122 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1123
1124 emit_move_insn (adjust_address (registers, mode, size), tem);
1125 size += GET_MODE_SIZE (mode);
1126 }
1127
1128 /* Save the arg pointer to the block. */
1129 emit_move_insn (adjust_address (registers, Pmode, 0),
1130 copy_to_reg (virtual_incoming_args_rtx));
1131 size = GET_MODE_SIZE (Pmode);
1132
1133 /* Save the structure value address unless this is passed as an
1134 "invisible" first argument. */
1135 if (struct_value_incoming_rtx)
1136 {
1137 emit_move_insn (adjust_address (registers, Pmode, size),
1138 copy_to_reg (struct_value_incoming_rtx));
1139 size += GET_MODE_SIZE (Pmode);
1140 }
1141
1142 /* Return the address of the block. */
1143 return copy_addr_to_reg (XEXP (registers, 0));
1144 }
1145
1146 /* __builtin_apply_args returns block of memory allocated on
1147 the stack into which is stored the arg pointer, structure
1148 value address, static chain, and all the registers that might
1149 possibly be used in performing a function call. The code is
1150 moved to the start of the function so the incoming values are
1151 saved. */
1152
1153 static rtx
1154 expand_builtin_apply_args ()
1155 {
1156 /* Don't do __builtin_apply_args more than once in a function.
1157 Save the result of the first call and reuse it. */
1158 if (apply_args_value != 0)
1159 return apply_args_value;
1160 {
1161 /* When this function is called, it means that registers must be
1162 saved on entry to this function. So we migrate the
1163 call to the first insn of this function. */
1164 rtx temp;
1165 rtx seq;
1166
1167 start_sequence ();
1168 temp = expand_builtin_apply_args_1 ();
1169 seq = get_insns ();
1170 end_sequence ();
1171
1172 apply_args_value = temp;
1173
1174 /* Put the insns after the NOTE that starts the function.
1175 If this is inside a start_sequence, make the outer-level insn
1176 chain current, so the code is placed at the start of the
1177 function. */
1178 push_topmost_sequence ();
1179 emit_insn_before (seq, NEXT_INSN (get_insns ()));
1180 pop_topmost_sequence ();
1181 return temp;
1182 }
1183 }
1184
1185 /* Perform an untyped call and save the state required to perform an
1186 untyped return of whatever value was returned by the given function. */
1187
1188 static rtx
1189 expand_builtin_apply (function, arguments, argsize)
1190 rtx function, arguments, argsize;
1191 {
1192 int size, align, regno;
1193 enum machine_mode mode;
1194 rtx incoming_args, result, reg, dest, src, call_insn;
1195 rtx old_stack_level = 0;
1196 rtx call_fusage = 0;
1197
1198 #ifdef POINTERS_EXTEND_UNSIGNED
1199 if (GET_MODE (arguments) != Pmode)
1200 arguments = convert_memory_address (Pmode, arguments);
1201 #endif
1202
1203 /* Create a block where the return registers can be saved. */
1204 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1205
1206 /* Fetch the arg pointer from the ARGUMENTS block. */
1207 incoming_args = gen_reg_rtx (Pmode);
1208 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1209 #ifndef STACK_GROWS_DOWNWARD
1210 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1211 incoming_args, 0, OPTAB_LIB_WIDEN);
1212 #endif
1213
1214 /* Perform postincrements before actually calling the function. */
1215 emit_queue ();
1216
1217 /* Push a new argument block and copy the arguments. Do not allow
1218 the (potential) memcpy call below to interfere with our stack
1219 manipulations. */
1220 do_pending_stack_adjust ();
1221 NO_DEFER_POP;
1222
1223 /* Save the stack with nonlocal if available */
1224 #ifdef HAVE_save_stack_nonlocal
1225 if (HAVE_save_stack_nonlocal)
1226 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1227 else
1228 #endif
1229 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1230
1231 /* Push a block of memory onto the stack to store the memory arguments.
1232 Save the address in a register, and copy the memory arguments. ??? I
1233 haven't figured out how the calling convention macros effect this,
1234 but it's likely that the source and/or destination addresses in
1235 the block copy will need updating in machine specific ways. */
1236 dest = allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1237 dest = gen_rtx_MEM (BLKmode, dest);
1238 set_mem_align (dest, PARM_BOUNDARY);
1239 src = gen_rtx_MEM (BLKmode, incoming_args);
1240 set_mem_align (src, PARM_BOUNDARY);
1241 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1242
1243 /* Refer to the argument block. */
1244 apply_args_size ();
1245 arguments = gen_rtx_MEM (BLKmode, arguments);
1246 set_mem_align (arguments, PARM_BOUNDARY);
1247
1248 /* Walk past the arg-pointer and structure value address. */
1249 size = GET_MODE_SIZE (Pmode);
1250 if (struct_value_rtx)
1251 size += GET_MODE_SIZE (Pmode);
1252
1253 /* Restore each of the registers previously saved. Make USE insns
1254 for each of these registers for use in making the call. */
1255 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1256 if ((mode = apply_args_mode[regno]) != VOIDmode)
1257 {
1258 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1259 if (size % align != 0)
1260 size = CEIL (size, align) * align;
1261 reg = gen_rtx_REG (mode, regno);
1262 emit_move_insn (reg, adjust_address (arguments, mode, size));
1263 use_reg (&call_fusage, reg);
1264 size += GET_MODE_SIZE (mode);
1265 }
1266
1267 /* Restore the structure value address unless this is passed as an
1268 "invisible" first argument. */
1269 size = GET_MODE_SIZE (Pmode);
1270 if (struct_value_rtx)
1271 {
1272 rtx value = gen_reg_rtx (Pmode);
1273 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1274 emit_move_insn (struct_value_rtx, value);
1275 if (GET_CODE (struct_value_rtx) == REG)
1276 use_reg (&call_fusage, struct_value_rtx);
1277 size += GET_MODE_SIZE (Pmode);
1278 }
1279
1280 /* All arguments and registers used for the call are set up by now! */
1281 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0, 0);
1282
1283 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1284 and we don't want to load it into a register as an optimization,
1285 because prepare_call_address already did it if it should be done. */
1286 if (GET_CODE (function) != SYMBOL_REF)
1287 function = memory_address (FUNCTION_MODE, function);
1288
1289 /* Generate the actual call instruction and save the return value. */
1290 #ifdef HAVE_untyped_call
1291 if (HAVE_untyped_call)
1292 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1293 result, result_vector (1, result)));
1294 else
1295 #endif
1296 #ifdef HAVE_call_value
1297 if (HAVE_call_value)
1298 {
1299 rtx valreg = 0;
1300
1301 /* Locate the unique return register. It is not possible to
1302 express a call that sets more than one return register using
1303 call_value; use untyped_call for that. In fact, untyped_call
1304 only needs to save the return registers in the given block. */
1305 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1306 if ((mode = apply_result_mode[regno]) != VOIDmode)
1307 {
1308 if (valreg)
1309 abort (); /* HAVE_untyped_call required. */
1310 valreg = gen_rtx_REG (mode, regno);
1311 }
1312
1313 emit_call_insn (GEN_CALL_VALUE (valreg,
1314 gen_rtx_MEM (FUNCTION_MODE, function),
1315 const0_rtx, NULL_RTX, const0_rtx));
1316
1317 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1318 }
1319 else
1320 #endif
1321 abort ();
1322
1323 /* Find the CALL insn we just emitted. */
1324 for (call_insn = get_last_insn ();
1325 call_insn && GET_CODE (call_insn) != CALL_INSN;
1326 call_insn = PREV_INSN (call_insn))
1327 ;
1328
1329 if (! call_insn)
1330 abort ();
1331
1332 /* Put the register usage information on the CALL. If there is already
1333 some usage information, put ours at the end. */
1334 if (CALL_INSN_FUNCTION_USAGE (call_insn))
1335 {
1336 rtx link;
1337
1338 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
1339 link = XEXP (link, 1))
1340 ;
1341
1342 XEXP (link, 1) = call_fusage;
1343 }
1344 else
1345 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
1346
1347 /* Restore the stack. */
1348 #ifdef HAVE_save_stack_nonlocal
1349 if (HAVE_save_stack_nonlocal)
1350 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1351 else
1352 #endif
1353 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1354
1355 OK_DEFER_POP;
1356
1357 /* Return the address of the result block. */
1358 return copy_addr_to_reg (XEXP (result, 0));
1359 }
1360
1361 /* Perform an untyped return. */
1362
1363 static void
1364 expand_builtin_return (result)
1365 rtx result;
1366 {
1367 int size, align, regno;
1368 enum machine_mode mode;
1369 rtx reg;
1370 rtx call_fusage = 0;
1371
1372 #ifdef POINTERS_EXTEND_UNSIGNED
1373 if (GET_MODE (result) != Pmode)
1374 result = convert_memory_address (Pmode, result);
1375 #endif
1376
1377 apply_result_size ();
1378 result = gen_rtx_MEM (BLKmode, result);
1379
1380 #ifdef HAVE_untyped_return
1381 if (HAVE_untyped_return)
1382 {
1383 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1384 emit_barrier ();
1385 return;
1386 }
1387 #endif
1388
1389 /* Restore the return value and note that each value is used. */
1390 size = 0;
1391 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1392 if ((mode = apply_result_mode[regno]) != VOIDmode)
1393 {
1394 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1395 if (size % align != 0)
1396 size = CEIL (size, align) * align;
1397 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1398 emit_move_insn (reg, adjust_address (result, mode, size));
1399
1400 push_to_sequence (call_fusage);
1401 emit_insn (gen_rtx_USE (VOIDmode, reg));
1402 call_fusage = get_insns ();
1403 end_sequence ();
1404 size += GET_MODE_SIZE (mode);
1405 }
1406
1407 /* Put the USE insns before the return. */
1408 emit_insn (call_fusage);
1409
1410 /* Return whatever values was restored by jumping directly to the end
1411 of the function. */
1412 expand_null_return ();
1413 }
1414
1415 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1416
1417 static enum type_class
1418 type_to_class (type)
1419 tree type;
1420 {
1421 switch (TREE_CODE (type))
1422 {
1423 case VOID_TYPE: return void_type_class;
1424 case INTEGER_TYPE: return integer_type_class;
1425 case CHAR_TYPE: return char_type_class;
1426 case ENUMERAL_TYPE: return enumeral_type_class;
1427 case BOOLEAN_TYPE: return boolean_type_class;
1428 case POINTER_TYPE: return pointer_type_class;
1429 case REFERENCE_TYPE: return reference_type_class;
1430 case OFFSET_TYPE: return offset_type_class;
1431 case REAL_TYPE: return real_type_class;
1432 case COMPLEX_TYPE: return complex_type_class;
1433 case FUNCTION_TYPE: return function_type_class;
1434 case METHOD_TYPE: return method_type_class;
1435 case RECORD_TYPE: return record_type_class;
1436 case UNION_TYPE:
1437 case QUAL_UNION_TYPE: return union_type_class;
1438 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1439 ? string_type_class : array_type_class);
1440 case SET_TYPE: return set_type_class;
1441 case FILE_TYPE: return file_type_class;
1442 case LANG_TYPE: return lang_type_class;
1443 default: return no_type_class;
1444 }
1445 }
1446
1447 /* Expand a call to __builtin_classify_type with arguments found in
1448 ARGLIST. */
1449
1450 static rtx
1451 expand_builtin_classify_type (arglist)
1452 tree arglist;
1453 {
1454 if (arglist != 0)
1455 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1456 return GEN_INT (no_type_class);
1457 }
1458
1459 /* Expand expression EXP, which is a call to __builtin_constant_p. */
1460
1461 static rtx
1462 expand_builtin_constant_p (exp)
1463 tree exp;
1464 {
1465 tree arglist = TREE_OPERAND (exp, 1);
1466 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1467 rtx tmp;
1468
1469 if (arglist == 0)
1470 return const0_rtx;
1471 arglist = TREE_VALUE (arglist);
1472
1473 /* We have taken care of the easy cases during constant folding. This
1474 case is not obvious, so emit (constant_p_rtx (ARGLIST)) and let CSE
1475 get a chance to see if it can deduce whether ARGLIST is constant. */
1476
1477 current_function_calls_constant_p = 1;
1478
1479 tmp = expand_expr (arglist, NULL_RTX, VOIDmode, 0);
1480 tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
1481 return tmp;
1482 }
1483
1484 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1485 if available. */
1486 tree
1487 mathfn_built_in (type, fn)
1488 tree type;
1489 enum built_in_function fn;
1490 {
1491 enum built_in_function fcode = NOT_BUILT_IN;
1492 if (TYPE_MODE (type) == TYPE_MODE (double_type_node))
1493 switch (fn)
1494 {
1495 case BUILT_IN_SQRT:
1496 case BUILT_IN_SQRTF:
1497 case BUILT_IN_SQRTL:
1498 fcode = BUILT_IN_SQRT;
1499 break;
1500 case BUILT_IN_SIN:
1501 case BUILT_IN_SINF:
1502 case BUILT_IN_SINL:
1503 fcode = BUILT_IN_SIN;
1504 break;
1505 case BUILT_IN_COS:
1506 case BUILT_IN_COSF:
1507 case BUILT_IN_COSL:
1508 fcode = BUILT_IN_COS;
1509 break;
1510 case BUILT_IN_EXP:
1511 case BUILT_IN_EXPF:
1512 case BUILT_IN_EXPL:
1513 fcode = BUILT_IN_EXP;
1514 break;
1515 case BUILT_IN_LOG:
1516 case BUILT_IN_LOGF:
1517 case BUILT_IN_LOGL:
1518 fcode = BUILT_IN_LOG;
1519 break;
1520 case BUILT_IN_FLOOR:
1521 case BUILT_IN_FLOORF:
1522 case BUILT_IN_FLOORL:
1523 fcode = BUILT_IN_FLOOR;
1524 break;
1525 case BUILT_IN_CEIL:
1526 case BUILT_IN_CEILF:
1527 case BUILT_IN_CEILL:
1528 fcode = BUILT_IN_CEIL;
1529 break;
1530 case BUILT_IN_TRUNC:
1531 case BUILT_IN_TRUNCF:
1532 case BUILT_IN_TRUNCL:
1533 fcode = BUILT_IN_TRUNC;
1534 break;
1535 case BUILT_IN_ROUND:
1536 case BUILT_IN_ROUNDF:
1537 case BUILT_IN_ROUNDL:
1538 fcode = BUILT_IN_ROUND;
1539 break;
1540 case BUILT_IN_NEARBYINT:
1541 case BUILT_IN_NEARBYINTF:
1542 case BUILT_IN_NEARBYINTL:
1543 fcode = BUILT_IN_NEARBYINT;
1544 break;
1545 default:
1546 abort ();
1547 }
1548 else if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
1549 switch (fn)
1550 {
1551 case BUILT_IN_SQRT:
1552 case BUILT_IN_SQRTF:
1553 case BUILT_IN_SQRTL:
1554 fcode = BUILT_IN_SQRTF;
1555 break;
1556 case BUILT_IN_SIN:
1557 case BUILT_IN_SINF:
1558 case BUILT_IN_SINL:
1559 fcode = BUILT_IN_SINF;
1560 break;
1561 case BUILT_IN_COS:
1562 case BUILT_IN_COSF:
1563 case BUILT_IN_COSL:
1564 fcode = BUILT_IN_COSF;
1565 break;
1566 case BUILT_IN_EXP:
1567 case BUILT_IN_EXPF:
1568 case BUILT_IN_EXPL:
1569 fcode = BUILT_IN_EXPF;
1570 break;
1571 case BUILT_IN_LOG:
1572 case BUILT_IN_LOGF:
1573 case BUILT_IN_LOGL:
1574 fcode = BUILT_IN_LOGF;
1575 break;
1576 case BUILT_IN_FLOOR:
1577 case BUILT_IN_FLOORF:
1578 case BUILT_IN_FLOORL:
1579 fcode = BUILT_IN_FLOORF;
1580 break;
1581 case BUILT_IN_CEIL:
1582 case BUILT_IN_CEILF:
1583 case BUILT_IN_CEILL:
1584 fcode = BUILT_IN_CEILF;
1585 break;
1586 case BUILT_IN_TRUNC:
1587 case BUILT_IN_TRUNCF:
1588 case BUILT_IN_TRUNCL:
1589 fcode = BUILT_IN_TRUNCF;
1590 break;
1591 case BUILT_IN_ROUND:
1592 case BUILT_IN_ROUNDF:
1593 case BUILT_IN_ROUNDL:
1594 fcode = BUILT_IN_ROUNDF;
1595 break;
1596 case BUILT_IN_NEARBYINT:
1597 case BUILT_IN_NEARBYINTF:
1598 case BUILT_IN_NEARBYINTL:
1599 fcode = BUILT_IN_NEARBYINTF;
1600 break;
1601 default:
1602 abort ();
1603 }
1604 else if (TYPE_MODE (type) == TYPE_MODE (long_double_type_node))
1605 switch (fn)
1606 {
1607 case BUILT_IN_SQRT:
1608 case BUILT_IN_SQRTF:
1609 case BUILT_IN_SQRTL:
1610 fcode = BUILT_IN_SQRTL;
1611 break;
1612 case BUILT_IN_SIN:
1613 case BUILT_IN_SINF:
1614 case BUILT_IN_SINL:
1615 fcode = BUILT_IN_SINL;
1616 break;
1617 case BUILT_IN_COS:
1618 case BUILT_IN_COSF:
1619 case BUILT_IN_COSL:
1620 fcode = BUILT_IN_COSL;
1621 break;
1622 case BUILT_IN_EXP:
1623 case BUILT_IN_EXPF:
1624 case BUILT_IN_EXPL:
1625 fcode = BUILT_IN_EXPL;
1626 break;
1627 case BUILT_IN_LOG:
1628 case BUILT_IN_LOGF:
1629 case BUILT_IN_LOGL:
1630 fcode = BUILT_IN_LOGL;
1631 break;
1632 case BUILT_IN_FLOOR:
1633 case BUILT_IN_FLOORF:
1634 case BUILT_IN_FLOORL:
1635 fcode = BUILT_IN_FLOORL;
1636 break;
1637 case BUILT_IN_CEIL:
1638 case BUILT_IN_CEILF:
1639 case BUILT_IN_CEILL:
1640 fcode = BUILT_IN_CEILL;
1641 break;
1642 case BUILT_IN_TRUNC:
1643 case BUILT_IN_TRUNCF:
1644 case BUILT_IN_TRUNCL:
1645 fcode = BUILT_IN_TRUNCL;
1646 break;
1647 case BUILT_IN_ROUND:
1648 case BUILT_IN_ROUNDF:
1649 case BUILT_IN_ROUNDL:
1650 fcode = BUILT_IN_ROUNDL;
1651 break;
1652 case BUILT_IN_NEARBYINT:
1653 case BUILT_IN_NEARBYINTF:
1654 case BUILT_IN_NEARBYINTL:
1655 fcode = BUILT_IN_NEARBYINTL;
1656 break;
1657 default:
1658 abort ();
1659 }
1660 return implicit_built_in_decls[fcode];
1661 }
1662
1663 /* If errno must be maintained, expand the RTL to check if the result,
1664 TARGET, of a built-in function call, EXP, is NaN, and if so set
1665 errno to EDOM. */
1666
1667 static void
1668 expand_errno_check (exp, target)
1669 tree exp;
1670 rtx target;
1671 {
1672 rtx lab;
1673
1674 if (flag_errno_math && HONOR_NANS (GET_MODE (target)))
1675 {
1676 lab = gen_label_rtx ();
1677
1678 /* Test the result; if it is NaN, set errno=EDOM because
1679 the argument was not in the domain. */
1680 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1681 0, lab);
1682
1683 #ifdef TARGET_EDOM
1684 {
1685 #ifdef GEN_ERRNO_RTX
1686 rtx errno_rtx = GEN_ERRNO_RTX;
1687 #else
1688 rtx errno_rtx
1689 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1690 #endif
1691
1692 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1693 }
1694 #else
1695 /* We can't set errno=EDOM directly; let the library call do it.
1696 Pop the arguments right away in case the call gets deleted. */
1697 NO_DEFER_POP;
1698 expand_call (exp, target, 0);
1699 OK_DEFER_POP;
1700 #endif
1701
1702 emit_label (lab);
1703 }
1704 }
1705
1706
1707 /* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
1708 Return 0 if a normal call should be emitted rather than expanding the
1709 function in-line. EXP is the expression that is a call to the builtin
1710 function; if convenient, the result should be placed in TARGET.
1711 SUBTARGET may be used as the target for computing one of EXP's operands. */
1712
1713 static rtx
1714 expand_builtin_mathfn (exp, target, subtarget)
1715 tree exp;
1716 rtx target, subtarget;
1717 {
1718 optab builtin_optab;
1719 rtx op0, insns;
1720 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1721 tree arglist = TREE_OPERAND (exp, 1);
1722 enum machine_mode argmode;
1723 bool errno_set = true;
1724
1725 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1726 return 0;
1727
1728 /* Stabilize and compute the argument. */
1729 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
1730 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
1731 {
1732 exp = copy_node (exp);
1733 TREE_OPERAND (exp, 1) = arglist;
1734 /* Wrap the computation of the argument in a SAVE_EXPR. That
1735 way, if we need to expand the argument again (as in the
1736 flag_errno_math case below where we cannot directly set
1737 errno), we will not perform side-effects more than once.
1738 Note that here we're mutating the original EXP as well as the
1739 copy; that's the right thing to do in case the original EXP
1740 is expanded later. */
1741 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
1742 arglist = copy_node (arglist);
1743 }
1744 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
1745
1746 /* Make a suitable register to place result in. */
1747 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
1748
1749 emit_queue ();
1750 start_sequence ();
1751
1752 switch (DECL_FUNCTION_CODE (fndecl))
1753 {
1754 case BUILT_IN_SIN:
1755 case BUILT_IN_SINF:
1756 case BUILT_IN_SINL:
1757 builtin_optab = sin_optab; break;
1758 case BUILT_IN_COS:
1759 case BUILT_IN_COSF:
1760 case BUILT_IN_COSL:
1761 builtin_optab = cos_optab; break;
1762 case BUILT_IN_SQRT:
1763 case BUILT_IN_SQRTF:
1764 case BUILT_IN_SQRTL:
1765 builtin_optab = sqrt_optab; break;
1766 case BUILT_IN_EXP:
1767 case BUILT_IN_EXPF:
1768 case BUILT_IN_EXPL:
1769 builtin_optab = exp_optab; break;
1770 case BUILT_IN_LOG:
1771 case BUILT_IN_LOGF:
1772 case BUILT_IN_LOGL:
1773 builtin_optab = log_optab; break;
1774 case BUILT_IN_FLOOR:
1775 case BUILT_IN_FLOORF:
1776 case BUILT_IN_FLOORL:
1777 errno_set = false ; builtin_optab = floor_optab; break;
1778 case BUILT_IN_CEIL:
1779 case BUILT_IN_CEILF:
1780 case BUILT_IN_CEILL:
1781 errno_set = false ; builtin_optab = ceil_optab; break;
1782 case BUILT_IN_TRUNC:
1783 case BUILT_IN_TRUNCF:
1784 case BUILT_IN_TRUNCL:
1785 errno_set = false ; builtin_optab = trunc_optab; break;
1786 case BUILT_IN_ROUND:
1787 case BUILT_IN_ROUNDF:
1788 case BUILT_IN_ROUNDL:
1789 errno_set = false ; builtin_optab = round_optab; break;
1790 case BUILT_IN_NEARBYINT:
1791 case BUILT_IN_NEARBYINTF:
1792 case BUILT_IN_NEARBYINTL:
1793 errno_set = false ; builtin_optab = nearbyint_optab; break;
1794 default:
1795 abort ();
1796 }
1797
1798 /* Compute into TARGET.
1799 Set TARGET to wherever the result comes back. */
1800 argmode = TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist)));
1801 target = expand_unop (argmode, builtin_optab, op0, target, 0);
1802
1803 /* If we were unable to expand via the builtin, stop the
1804 sequence (without outputting the insns) and return 0, causing
1805 a call to the library function. */
1806 if (target == 0)
1807 {
1808 end_sequence ();
1809 return 0;
1810 }
1811
1812 if (errno_set)
1813 expand_errno_check (exp, target);
1814
1815 /* Output the entire sequence. */
1816 insns = get_insns ();
1817 end_sequence ();
1818 emit_insn (insns);
1819
1820 return target;
1821 }
1822
1823 /* Expand a call to the builtin binary math functions (pow and atan2).
1824 Return 0 if a normal call should be emitted rather than expanding the
1825 function in-line. EXP is the expression that is a call to the builtin
1826 function; if convenient, the result should be placed in TARGET.
1827 SUBTARGET may be used as the target for computing one of EXP's
1828 operands. */
1829
1830 static rtx
1831 expand_builtin_mathfn_2 (exp, target, subtarget)
1832 tree exp;
1833 rtx target, subtarget;
1834 {
1835 optab builtin_optab;
1836 rtx op0, op1, insns;
1837 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1838 tree arglist = TREE_OPERAND (exp, 1);
1839 tree arg0, arg1;
1840 enum machine_mode argmode;
1841 bool errno_set = true;
1842 bool stable = true;
1843
1844 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1845 return 0;
1846
1847 arg0 = TREE_VALUE (arglist);
1848 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1849
1850 /* Stabilize the arguments. */
1851 if (TREE_CODE (arg0) != VAR_DECL && TREE_CODE (arg0) != PARM_DECL)
1852 {
1853 arg0 = save_expr (arg0);
1854 TREE_VALUE (arglist) = arg0;
1855 stable = false;
1856 }
1857 if (TREE_CODE (arg1) != VAR_DECL && TREE_CODE (arg1) != PARM_DECL)
1858 {
1859 arg1 = save_expr (arg1);
1860 TREE_VALUE (TREE_CHAIN (arglist)) = arg1;
1861 stable = false;
1862 }
1863
1864 if (! stable)
1865 {
1866 exp = copy_node (exp);
1867 arglist = tree_cons (NULL_TREE, arg0,
1868 build_tree_list (NULL_TREE, arg1));
1869 TREE_OPERAND (exp, 1) = arglist;
1870 }
1871
1872 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
1873 op1 = expand_expr (arg1, 0, VOIDmode, 0);
1874
1875 /* Make a suitable register to place result in. */
1876 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
1877
1878 emit_queue ();
1879 start_sequence ();
1880
1881 switch (DECL_FUNCTION_CODE (fndecl))
1882 {
1883 case BUILT_IN_POW:
1884 case BUILT_IN_POWF:
1885 case BUILT_IN_POWL:
1886 builtin_optab = pow_optab; break;
1887 case BUILT_IN_ATAN2:
1888 case BUILT_IN_ATAN2F:
1889 case BUILT_IN_ATAN2L:
1890 builtin_optab = atan2_optab; break;
1891 default:
1892 abort ();
1893 }
1894
1895 /* Compute into TARGET.
1896 Set TARGET to wherever the result comes back. */
1897 argmode = TYPE_MODE (TREE_TYPE (arg0));
1898 target = expand_binop (argmode, builtin_optab, op0, op1,
1899 target, 0, OPTAB_DIRECT);
1900
1901 /* If we were unable to expand via the builtin, stop the
1902 sequence (without outputting the insns) and return 0, causing
1903 a call to the library function. */
1904 if (target == 0)
1905 {
1906 end_sequence ();
1907 return 0;
1908 }
1909
1910 if (errno_set)
1911 expand_errno_check (exp, target);
1912
1913 /* Output the entire sequence. */
1914 insns = get_insns ();
1915 end_sequence ();
1916 emit_insn (insns);
1917
1918 return target;
1919 }
1920
1921 /* Expand expression EXP which is a call to the strlen builtin. Return 0
1922 if we failed the caller should emit a normal call, otherwise
1923 try to get the result in TARGET, if convenient. */
1924
1925 static rtx
1926 expand_builtin_strlen (exp, target)
1927 tree exp;
1928 rtx target;
1929 {
1930 tree arglist = TREE_OPERAND (exp, 1);
1931 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1932
1933 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
1934 return 0;
1935 else
1936 {
1937 rtx pat;
1938 tree len, src = TREE_VALUE (arglist);
1939 rtx result, src_reg, char_rtx, before_strlen;
1940 enum machine_mode insn_mode = value_mode, char_mode;
1941 enum insn_code icode = CODE_FOR_nothing;
1942 int align;
1943
1944 /* If the length can be computed at compile-time, return it. */
1945 len = c_strlen (src);
1946 if (len)
1947 return expand_expr (len, target, value_mode, EXPAND_NORMAL);
1948
1949 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
1950
1951 /* If SRC is not a pointer type, don't do this operation inline. */
1952 if (align == 0)
1953 return 0;
1954
1955 /* Bail out if we can't compute strlen in the right mode. */
1956 while (insn_mode != VOIDmode)
1957 {
1958 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
1959 if (icode != CODE_FOR_nothing)
1960 break;
1961
1962 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
1963 }
1964 if (insn_mode == VOIDmode)
1965 return 0;
1966
1967 /* Make a place to write the result of the instruction. */
1968 result = target;
1969 if (! (result != 0
1970 && GET_CODE (result) == REG
1971 && GET_MODE (result) == insn_mode
1972 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
1973 result = gen_reg_rtx (insn_mode);
1974
1975 /* Make a place to hold the source address. We will not expand
1976 the actual source until we are sure that the expansion will
1977 not fail -- there are trees that cannot be expanded twice. */
1978 src_reg = gen_reg_rtx (Pmode);
1979
1980 /* Mark the beginning of the strlen sequence so we can emit the
1981 source operand later. */
1982 before_strlen = get_last_insn ();
1983
1984 char_rtx = const0_rtx;
1985 char_mode = insn_data[(int) icode].operand[2].mode;
1986 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
1987 char_mode))
1988 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
1989
1990 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
1991 char_rtx, GEN_INT (align));
1992 if (! pat)
1993 return 0;
1994 emit_insn (pat);
1995
1996 /* Now that we are assured of success, expand the source. */
1997 start_sequence ();
1998 pat = memory_address (BLKmode,
1999 expand_expr (src, src_reg, ptr_mode, EXPAND_SUM));
2000 if (pat != src_reg)
2001 emit_move_insn (src_reg, pat);
2002 pat = get_insns ();
2003 end_sequence ();
2004
2005 if (before_strlen)
2006 emit_insn_after (pat, before_strlen);
2007 else
2008 emit_insn_before (pat, get_insns ());
2009
2010 /* Return the value in the proper mode for this function. */
2011 if (GET_MODE (result) == value_mode)
2012 target = result;
2013 else if (target != 0)
2014 convert_move (target, result, 0);
2015 else
2016 target = convert_to_mode (value_mode, result, 0);
2017
2018 return target;
2019 }
2020 }
2021
2022 /* Expand a call to the strstr builtin. Return 0 if we failed the
2023 caller should emit a normal call, otherwise try to get the result
2024 in TARGET, if convenient (and in mode MODE if that's convenient). */
2025
2026 static rtx
2027 expand_builtin_strstr (arglist, target, mode)
2028 tree arglist;
2029 rtx target;
2030 enum machine_mode mode;
2031 {
2032 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2033 return 0;
2034 else
2035 {
2036 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2037 tree fn;
2038 const char *p1, *p2;
2039
2040 p2 = c_getstr (s2);
2041 if (p2 == NULL)
2042 return 0;
2043
2044 p1 = c_getstr (s1);
2045 if (p1 != NULL)
2046 {
2047 const char *r = strstr (p1, p2);
2048
2049 if (r == NULL)
2050 return const0_rtx;
2051
2052 /* Return an offset into the constant string argument. */
2053 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2054 s1, ssize_int (r - p1))),
2055 target, mode, EXPAND_NORMAL);
2056 }
2057
2058 if (p2[0] == '\0')
2059 return expand_expr (s1, target, mode, EXPAND_NORMAL);
2060
2061 if (p2[1] != '\0')
2062 return 0;
2063
2064 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2065 if (!fn)
2066 return 0;
2067
2068 /* New argument list transforming strstr(s1, s2) to
2069 strchr(s1, s2[0]). */
2070 arglist =
2071 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2072 arglist = tree_cons (NULL_TREE, s1, arglist);
2073 return expand_expr (build_function_call_expr (fn, arglist),
2074 target, mode, EXPAND_NORMAL);
2075 }
2076 }
2077
2078 /* Expand a call to the strchr builtin. Return 0 if we failed the
2079 caller should emit a normal call, otherwise try to get the result
2080 in TARGET, if convenient (and in mode MODE if that's convenient). */
2081
2082 static rtx
2083 expand_builtin_strchr (arglist, target, mode)
2084 tree arglist;
2085 rtx target;
2086 enum machine_mode mode;
2087 {
2088 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2089 return 0;
2090 else
2091 {
2092 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2093 const char *p1;
2094
2095 if (TREE_CODE (s2) != INTEGER_CST)
2096 return 0;
2097
2098 p1 = c_getstr (s1);
2099 if (p1 != NULL)
2100 {
2101 char c;
2102 const char *r;
2103
2104 if (target_char_cast (s2, &c))
2105 return 0;
2106
2107 r = strchr (p1, c);
2108
2109 if (r == NULL)
2110 return const0_rtx;
2111
2112 /* Return an offset into the constant string argument. */
2113 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2114 s1, ssize_int (r - p1))),
2115 target, mode, EXPAND_NORMAL);
2116 }
2117
2118 /* FIXME: Should use here strchrM optab so that ports can optimize
2119 this. */
2120 return 0;
2121 }
2122 }
2123
2124 /* Expand a call to the strrchr builtin. Return 0 if we failed the
2125 caller should emit a normal call, otherwise try to get the result
2126 in TARGET, if convenient (and in mode MODE if that's convenient). */
2127
2128 static rtx
2129 expand_builtin_strrchr (arglist, target, mode)
2130 tree arglist;
2131 rtx target;
2132 enum machine_mode mode;
2133 {
2134 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2135 return 0;
2136 else
2137 {
2138 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2139 tree fn;
2140 const char *p1;
2141
2142 if (TREE_CODE (s2) != INTEGER_CST)
2143 return 0;
2144
2145 p1 = c_getstr (s1);
2146 if (p1 != NULL)
2147 {
2148 char c;
2149 const char *r;
2150
2151 if (target_char_cast (s2, &c))
2152 return 0;
2153
2154 r = strrchr (p1, c);
2155
2156 if (r == NULL)
2157 return const0_rtx;
2158
2159 /* Return an offset into the constant string argument. */
2160 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2161 s1, ssize_int (r - p1))),
2162 target, mode, EXPAND_NORMAL);
2163 }
2164
2165 if (! integer_zerop (s2))
2166 return 0;
2167
2168 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2169 if (!fn)
2170 return 0;
2171
2172 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
2173 return expand_expr (build_function_call_expr (fn, arglist),
2174 target, mode, EXPAND_NORMAL);
2175 }
2176 }
2177
2178 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
2179 caller should emit a normal call, otherwise try to get the result
2180 in TARGET, if convenient (and in mode MODE if that's convenient). */
2181
2182 static rtx
2183 expand_builtin_strpbrk (arglist, target, mode)
2184 tree arglist;
2185 rtx target;
2186 enum machine_mode mode;
2187 {
2188 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2189 return 0;
2190 else
2191 {
2192 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2193 tree fn;
2194 const char *p1, *p2;
2195
2196 p2 = c_getstr (s2);
2197 if (p2 == NULL)
2198 return 0;
2199
2200 p1 = c_getstr (s1);
2201 if (p1 != NULL)
2202 {
2203 const char *r = strpbrk (p1, p2);
2204
2205 if (r == NULL)
2206 return const0_rtx;
2207
2208 /* Return an offset into the constant string argument. */
2209 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2210 s1, ssize_int (r - p1))),
2211 target, mode, EXPAND_NORMAL);
2212 }
2213
2214 if (p2[0] == '\0')
2215 {
2216 /* strpbrk(x, "") == NULL.
2217 Evaluate and ignore the arguments in case they had
2218 side-effects. */
2219 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2220 return const0_rtx;
2221 }
2222
2223 if (p2[1] != '\0')
2224 return 0; /* Really call strpbrk. */
2225
2226 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2227 if (!fn)
2228 return 0;
2229
2230 /* New argument list transforming strpbrk(s1, s2) to
2231 strchr(s1, s2[0]). */
2232 arglist =
2233 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2234 arglist = tree_cons (NULL_TREE, s1, arglist);
2235 return expand_expr (build_function_call_expr (fn, arglist),
2236 target, mode, EXPAND_NORMAL);
2237 }
2238 }
2239
2240 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2241 bytes from constant string DATA + OFFSET and return it as target
2242 constant. */
2243
2244 static rtx
2245 builtin_memcpy_read_str (data, offset, mode)
2246 PTR data;
2247 HOST_WIDE_INT offset;
2248 enum machine_mode mode;
2249 {
2250 const char *str = (const char *) data;
2251
2252 if (offset < 0
2253 || ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2254 > strlen (str) + 1))
2255 abort (); /* Attempt to read past the end of constant string. */
2256
2257 return c_readstr (str + offset, mode);
2258 }
2259
2260 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
2261 Return 0 if we failed, the caller should emit a normal call,
2262 otherwise try to get the result in TARGET, if convenient (and in
2263 mode MODE if that's convenient). If ENDP is 0 return the
2264 destination pointer, if ENDP is 1 return the end pointer ala
2265 mempcpy, and if ENDP is 2 return the end pointer minus one ala
2266 stpcpy. */
2267 static rtx
2268 expand_builtin_memcpy (arglist, target, mode, endp)
2269 tree arglist;
2270 rtx target;
2271 enum machine_mode mode;
2272 int endp;
2273 {
2274 if (!validate_arglist (arglist,
2275 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2276 return 0;
2277 else
2278 {
2279 tree dest = TREE_VALUE (arglist);
2280 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2281 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2282 const char *src_str;
2283
2284 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2285 unsigned int dest_align
2286 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2287 rtx dest_mem, src_mem, dest_addr, len_rtx;
2288
2289 /* If DEST is not a pointer type, call the normal function. */
2290 if (dest_align == 0)
2291 return 0;
2292
2293 /* If the LEN parameter is zero, return DEST. */
2294 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2295 {
2296 /* Evaluate and ignore SRC in case it has side-effects. */
2297 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2298 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2299 }
2300
2301 /* If either SRC is not a pointer type, don't do this
2302 operation in-line. */
2303 if (src_align == 0)
2304 return 0;
2305
2306 dest_mem = get_memory_rtx (dest);
2307 set_mem_align (dest_mem, dest_align);
2308 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2309 src_str = c_getstr (src);
2310
2311 /* If SRC is a string constant and block move would be done
2312 by pieces, we can avoid loading the string from memory
2313 and only stored the computed constants. */
2314 if (src_str
2315 && GET_CODE (len_rtx) == CONST_INT
2316 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2317 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2318 (PTR) src_str, dest_align))
2319 {
2320 store_by_pieces (dest_mem, INTVAL (len_rtx),
2321 builtin_memcpy_read_str,
2322 (PTR) src_str, dest_align);
2323 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2324 #ifdef POINTERS_EXTEND_UNSIGNED
2325 if (GET_MODE (dest_mem) != ptr_mode)
2326 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2327 #endif
2328 if (endp)
2329 {
2330 rtx result = gen_rtx_PLUS (GET_MODE (dest_mem), dest_mem, len_rtx);
2331 if (endp == 2)
2332 result = simplify_gen_binary (MINUS, GET_MODE (result), result, const1_rtx);
2333 return result;
2334 }
2335 else
2336 return dest_mem;
2337 }
2338
2339 src_mem = get_memory_rtx (src);
2340 set_mem_align (src_mem, src_align);
2341
2342 /* Copy word part most expediently. */
2343 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx,
2344 BLOCK_OP_NORMAL);
2345
2346 if (dest_addr == 0)
2347 {
2348 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2349 #ifdef POINTERS_EXTEND_UNSIGNED
2350 if (GET_MODE (dest_addr) != ptr_mode)
2351 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2352 #endif
2353 }
2354
2355 if (endp)
2356 {
2357 rtx result = gen_rtx_PLUS (GET_MODE (dest_addr), dest_addr, len_rtx);
2358 if (endp == 2)
2359 result = simplify_gen_binary (MINUS, GET_MODE (result), result, const1_rtx);
2360 return result;
2361 }
2362 else
2363 return dest_addr;
2364 }
2365 }
2366
2367 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
2368 if we failed the caller should emit a normal call, otherwise try to get
2369 the result in TARGET, if convenient (and in mode MODE if that's
2370 convenient). */
2371
2372 static rtx
2373 expand_builtin_strcpy (exp, target, mode)
2374 tree exp;
2375 rtx target;
2376 enum machine_mode mode;
2377 {
2378 tree arglist = TREE_OPERAND (exp, 1);
2379 tree fn, len;
2380
2381 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2382 return 0;
2383
2384 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2385 if (!fn)
2386 return 0;
2387
2388 len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2389 if (len == 0)
2390 return 0;
2391
2392 len = size_binop (PLUS_EXPR, len, ssize_int (1));
2393 chainon (arglist, build_tree_list (NULL_TREE, len));
2394 return expand_expr (build_function_call_expr (fn, arglist),
2395 target, mode, EXPAND_NORMAL);
2396 }
2397
2398 /* Expand a call to the stpcpy builtin, with arguments in ARGLIST.
2399 Return 0 if we failed the caller should emit a normal call,
2400 otherwise try to get the result in TARGET, if convenient (and in
2401 mode MODE if that's convenient). */
2402
2403 static rtx
2404 expand_builtin_stpcpy (arglist, target, mode)
2405 tree arglist;
2406 rtx target;
2407 enum machine_mode mode;
2408 {
2409 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2410 return 0;
2411 else
2412 {
2413 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2414 if (len == 0)
2415 return 0;
2416
2417 len = fold (size_binop (PLUS_EXPR, len, ssize_int (1)));
2418 chainon (arglist, build_tree_list (NULL_TREE, len));
2419 return expand_builtin_memcpy (arglist, target, mode, /*endp=*/2);
2420 }
2421 }
2422
2423 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2424 bytes from constant string DATA + OFFSET and return it as target
2425 constant. */
2426
2427 static rtx
2428 builtin_strncpy_read_str (data, offset, mode)
2429 PTR data;
2430 HOST_WIDE_INT offset;
2431 enum machine_mode mode;
2432 {
2433 const char *str = (const char *) data;
2434
2435 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
2436 return const0_rtx;
2437
2438 return c_readstr (str + offset, mode);
2439 }
2440
2441 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
2442 if we failed the caller should emit a normal call. */
2443
2444 static rtx
2445 expand_builtin_strncpy (arglist, target, mode)
2446 tree arglist;
2447 rtx target;
2448 enum machine_mode mode;
2449 {
2450 if (!validate_arglist (arglist,
2451 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2452 return 0;
2453 else
2454 {
2455 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2456 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2457 tree fn;
2458
2459 /* We must be passed a constant len parameter. */
2460 if (TREE_CODE (len) != INTEGER_CST)
2461 return 0;
2462
2463 /* If the len parameter is zero, return the dst parameter. */
2464 if (integer_zerop (len))
2465 {
2466 /* Evaluate and ignore the src argument in case it has
2467 side-effects. */
2468 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
2469 VOIDmode, EXPAND_NORMAL);
2470 /* Return the dst parameter. */
2471 return expand_expr (TREE_VALUE (arglist), target, mode,
2472 EXPAND_NORMAL);
2473 }
2474
2475 /* Now, we must be passed a constant src ptr parameter. */
2476 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
2477 return 0;
2478
2479 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
2480
2481 /* We're required to pad with trailing zeros if the requested
2482 len is greater than strlen(s2)+1. In that case try to
2483 use store_by_pieces, if it fails, punt. */
2484 if (tree_int_cst_lt (slen, len))
2485 {
2486 tree dest = TREE_VALUE (arglist);
2487 unsigned int dest_align
2488 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2489 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
2490 rtx dest_mem;
2491
2492 if (!p || dest_align == 0 || !host_integerp (len, 1)
2493 || !can_store_by_pieces (tree_low_cst (len, 1),
2494 builtin_strncpy_read_str,
2495 (PTR) p, dest_align))
2496 return 0;
2497
2498 dest_mem = get_memory_rtx (dest);
2499 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2500 builtin_strncpy_read_str,
2501 (PTR) p, dest_align);
2502 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2503 #ifdef POINTERS_EXTEND_UNSIGNED
2504 if (GET_MODE (dest_mem) != ptr_mode)
2505 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2506 #endif
2507 return dest_mem;
2508 }
2509
2510 /* OK transform into builtin memcpy. */
2511 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2512 if (!fn)
2513 return 0;
2514 return expand_expr (build_function_call_expr (fn, arglist),
2515 target, mode, EXPAND_NORMAL);
2516 }
2517 }
2518
2519 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2520 bytes from constant string DATA + OFFSET and return it as target
2521 constant. */
2522
2523 static rtx
2524 builtin_memset_read_str (data, offset, mode)
2525 PTR data;
2526 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2527 enum machine_mode mode;
2528 {
2529 const char *c = (const char *) data;
2530 char *p = alloca (GET_MODE_SIZE (mode));
2531
2532 memset (p, *c, GET_MODE_SIZE (mode));
2533
2534 return c_readstr (p, mode);
2535 }
2536
2537 /* Callback routine for store_by_pieces. Return the RTL of a register
2538 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
2539 char value given in the RTL register data. For example, if mode is
2540 4 bytes wide, return the RTL for 0x01010101*data. */
2541
2542 static rtx
2543 builtin_memset_gen_str (data, offset, mode)
2544 PTR data;
2545 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2546 enum machine_mode mode;
2547 {
2548 rtx target, coeff;
2549 size_t size;
2550 char *p;
2551
2552 size = GET_MODE_SIZE (mode);
2553 if (size == 1)
2554 return (rtx) data;
2555
2556 p = alloca (size);
2557 memset (p, 1, size);
2558 coeff = c_readstr (p, mode);
2559
2560 target = convert_to_mode (mode, (rtx) data, 1);
2561 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
2562 return force_reg (mode, target);
2563 }
2564
2565 /* Expand expression EXP, which is a call to the memset builtin. Return 0
2566 if we failed the caller should emit a normal call, otherwise try to get
2567 the result in TARGET, if convenient (and in mode MODE if that's
2568 convenient). */
2569
2570 static rtx
2571 expand_builtin_memset (exp, target, mode)
2572 tree exp;
2573 rtx target;
2574 enum machine_mode mode;
2575 {
2576 tree arglist = TREE_OPERAND (exp, 1);
2577
2578 if (!validate_arglist (arglist,
2579 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
2580 return 0;
2581 else
2582 {
2583 tree dest = TREE_VALUE (arglist);
2584 tree val = TREE_VALUE (TREE_CHAIN (arglist));
2585 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2586 char c;
2587
2588 unsigned int dest_align
2589 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2590 rtx dest_mem, dest_addr, len_rtx;
2591
2592 /* If DEST is not a pointer type, don't do this
2593 operation in-line. */
2594 if (dest_align == 0)
2595 return 0;
2596
2597 /* If the LEN parameter is zero, return DEST. */
2598 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2599 {
2600 /* Evaluate and ignore VAL in case it has side-effects. */
2601 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
2602 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2603 }
2604
2605 if (TREE_CODE (val) != INTEGER_CST)
2606 {
2607 rtx val_rtx;
2608
2609 if (!host_integerp (len, 1))
2610 return 0;
2611
2612 if (optimize_size && tree_low_cst (len, 1) > 1)
2613 return 0;
2614
2615 /* Assume that we can memset by pieces if we can store the
2616 * the coefficients by pieces (in the required modes).
2617 * We can't pass builtin_memset_gen_str as that emits RTL. */
2618 c = 1;
2619 if (!can_store_by_pieces (tree_low_cst (len, 1),
2620 builtin_memset_read_str,
2621 (PTR) &c, dest_align))
2622 return 0;
2623
2624 val = fold (build1 (CONVERT_EXPR, unsigned_char_type_node, val));
2625 val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
2626 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
2627 val_rtx);
2628 dest_mem = get_memory_rtx (dest);
2629 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2630 builtin_memset_gen_str,
2631 (PTR) val_rtx, dest_align);
2632 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2633 #ifdef POINTERS_EXTEND_UNSIGNED
2634 if (GET_MODE (dest_mem) != ptr_mode)
2635 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2636 #endif
2637 return dest_mem;
2638 }
2639
2640 if (target_char_cast (val, &c))
2641 return 0;
2642
2643 if (c)
2644 {
2645 if (!host_integerp (len, 1))
2646 return 0;
2647 if (!can_store_by_pieces (tree_low_cst (len, 1),
2648 builtin_memset_read_str, (PTR) &c,
2649 dest_align))
2650 return 0;
2651
2652 dest_mem = get_memory_rtx (dest);
2653 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2654 builtin_memset_read_str,
2655 (PTR) &c, dest_align);
2656 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2657 #ifdef POINTERS_EXTEND_UNSIGNED
2658 if (GET_MODE (dest_mem) != ptr_mode)
2659 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2660 #endif
2661 return dest_mem;
2662 }
2663
2664 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2665
2666 dest_mem = get_memory_rtx (dest);
2667 set_mem_align (dest_mem, dest_align);
2668 dest_addr = clear_storage (dest_mem, len_rtx);
2669
2670 if (dest_addr == 0)
2671 {
2672 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2673 #ifdef POINTERS_EXTEND_UNSIGNED
2674 if (GET_MODE (dest_addr) != ptr_mode)
2675 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2676 #endif
2677 }
2678
2679 return dest_addr;
2680 }
2681 }
2682
2683 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
2684 if we failed the caller should emit a normal call. */
2685
2686 static rtx
2687 expand_builtin_bzero (exp)
2688 tree exp;
2689 {
2690 tree arglist = TREE_OPERAND (exp, 1);
2691 tree dest, size, newarglist;
2692 rtx result;
2693
2694 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2695 return NULL_RTX;
2696
2697 dest = TREE_VALUE (arglist);
2698 size = TREE_VALUE (TREE_CHAIN (arglist));
2699
2700 /* New argument list transforming bzero(ptr x, int y) to
2701 memset(ptr x, int 0, size_t y). This is done this way
2702 so that if it isn't expanded inline, we fallback to
2703 calling bzero instead of memset. */
2704
2705 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2706 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
2707 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2708
2709 TREE_OPERAND (exp, 1) = newarglist;
2710 result = expand_builtin_memset (exp, const0_rtx, VOIDmode);
2711
2712 /* Always restore the original arguments. */
2713 TREE_OPERAND (exp, 1) = arglist;
2714
2715 return result;
2716 }
2717
2718 /* Expand expression EXP, which is a call to the memcmp or the strcmp builtin.
2719 ARGLIST is the argument list for this call. Return 0 if we failed and the
2720 caller should emit a normal call, otherwise try to get the result in
2721 TARGET, if convenient (and in mode MODE, if that's convenient). */
2722
2723 static rtx
2724 expand_builtin_memcmp (exp, arglist, target, mode)
2725 tree exp ATTRIBUTE_UNUSED;
2726 tree arglist;
2727 rtx target;
2728 enum machine_mode mode;
2729 {
2730 tree arg1, arg2, len;
2731 const char *p1, *p2;
2732
2733 if (!validate_arglist (arglist,
2734 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2735 return 0;
2736
2737 arg1 = TREE_VALUE (arglist);
2738 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2739 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2740
2741 /* If the len parameter is zero, return zero. */
2742 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2743 {
2744 /* Evaluate and ignore arg1 and arg2 in case they have
2745 side-effects. */
2746 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2747 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2748 return const0_rtx;
2749 }
2750
2751 p1 = c_getstr (arg1);
2752 p2 = c_getstr (arg2);
2753
2754 /* If all arguments are constant, and the value of len is not greater
2755 than the lengths of arg1 and arg2, evaluate at compile-time. */
2756 if (host_integerp (len, 1) && p1 && p2
2757 && compare_tree_int (len, strlen (p1) + 1) <= 0
2758 && compare_tree_int (len, strlen (p2) + 1) <= 0)
2759 {
2760 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
2761
2762 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
2763 }
2764
2765 /* If len parameter is one, return an expression corresponding to
2766 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2767 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
2768 {
2769 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2770 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2771 tree ind1 =
2772 fold (build1 (CONVERT_EXPR, integer_type_node,
2773 build1 (INDIRECT_REF, cst_uchar_node,
2774 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2775 tree ind2 =
2776 fold (build1 (CONVERT_EXPR, integer_type_node,
2777 build1 (INDIRECT_REF, cst_uchar_node,
2778 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2779 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2780 return expand_expr (result, target, mode, EXPAND_NORMAL);
2781 }
2782
2783 #ifdef HAVE_cmpstrsi
2784 {
2785 rtx arg1_rtx, arg2_rtx, arg3_rtx;
2786 rtx result;
2787 rtx insn;
2788
2789 int arg1_align
2790 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2791 int arg2_align
2792 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2793 enum machine_mode insn_mode
2794 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
2795
2796 /* If we don't have POINTER_TYPE, call the function. */
2797 if (arg1_align == 0 || arg2_align == 0)
2798 return 0;
2799
2800 /* Make a place to write the result of the instruction. */
2801 result = target;
2802 if (! (result != 0
2803 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
2804 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2805 result = gen_reg_rtx (insn_mode);
2806
2807 arg1_rtx = get_memory_rtx (arg1);
2808 arg2_rtx = get_memory_rtx (arg2);
2809 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2810 if (!HAVE_cmpstrsi)
2811 insn = NULL_RTX;
2812 else
2813 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
2814 GEN_INT (MIN (arg1_align, arg2_align)));
2815
2816 if (insn)
2817 emit_insn (insn);
2818 else
2819 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
2820 TYPE_MODE (integer_type_node), 3,
2821 XEXP (arg1_rtx, 0), Pmode,
2822 XEXP (arg2_rtx, 0), Pmode,
2823 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
2824 TREE_UNSIGNED (sizetype)),
2825 TYPE_MODE (sizetype));
2826
2827 /* Return the value in the proper mode for this function. */
2828 mode = TYPE_MODE (TREE_TYPE (exp));
2829 if (GET_MODE (result) == mode)
2830 return result;
2831 else if (target != 0)
2832 {
2833 convert_move (target, result, 0);
2834 return target;
2835 }
2836 else
2837 return convert_to_mode (mode, result, 0);
2838 }
2839 #endif
2840
2841 return 0;
2842 }
2843
2844 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
2845 if we failed the caller should emit a normal call, otherwise try to get
2846 the result in TARGET, if convenient. */
2847
2848 static rtx
2849 expand_builtin_strcmp (exp, target, mode)
2850 tree exp;
2851 rtx target;
2852 enum machine_mode mode;
2853 {
2854 tree arglist = TREE_OPERAND (exp, 1);
2855 tree arg1, arg2, len, len2, fn;
2856 const char *p1, *p2;
2857
2858 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2859 return 0;
2860
2861 arg1 = TREE_VALUE (arglist);
2862 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2863
2864 p1 = c_getstr (arg1);
2865 p2 = c_getstr (arg2);
2866
2867 if (p1 && p2)
2868 {
2869 const int i = strcmp (p1, p2);
2870 return (i < 0 ? constm1_rtx : (i > 0 ? const1_rtx : const0_rtx));
2871 }
2872
2873 /* If either arg is "", return an expression corresponding to
2874 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2875 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
2876 {
2877 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2878 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2879 tree ind1 =
2880 fold (build1 (CONVERT_EXPR, integer_type_node,
2881 build1 (INDIRECT_REF, cst_uchar_node,
2882 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2883 tree ind2 =
2884 fold (build1 (CONVERT_EXPR, integer_type_node,
2885 build1 (INDIRECT_REF, cst_uchar_node,
2886 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2887 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2888 return expand_expr (result, target, mode, EXPAND_NORMAL);
2889 }
2890
2891 len = c_strlen (arg1);
2892 len2 = c_strlen (arg2);
2893
2894 if (len)
2895 len = size_binop (PLUS_EXPR, ssize_int (1), len);
2896
2897 if (len2)
2898 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
2899
2900 /* If we don't have a constant length for the first, use the length
2901 of the second, if we know it. We don't require a constant for
2902 this case; some cost analysis could be done if both are available
2903 but neither is constant. For now, assume they're equally cheap
2904 unless one has side effects.
2905
2906 If both strings have constant lengths, use the smaller. This
2907 could arise if optimization results in strcpy being called with
2908 two fixed strings, or if the code was machine-generated. We should
2909 add some code to the `memcmp' handler below to deal with such
2910 situations, someday. */
2911
2912 if (!len || TREE_CODE (len) != INTEGER_CST)
2913 {
2914 if (len2 && !TREE_SIDE_EFFECTS (len2))
2915 len = len2;
2916 else if (len == 0)
2917 return 0;
2918 }
2919 else if (len2 && TREE_CODE (len2) == INTEGER_CST
2920 && tree_int_cst_lt (len2, len))
2921 len = len2;
2922
2923 /* If both arguments have side effects, we cannot optimize. */
2924 if (TREE_SIDE_EFFECTS (len))
2925 return 0;
2926
2927 fn = implicit_built_in_decls[BUILT_IN_MEMCMP];
2928 if (!fn)
2929 return 0;
2930
2931 chainon (arglist, build_tree_list (NULL_TREE, len));
2932 return expand_expr (build_function_call_expr (fn, arglist),
2933 target, mode, EXPAND_NORMAL);
2934 }
2935
2936 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
2937 if we failed the caller should emit a normal call, otherwise try to get
2938 the result in TARGET, if convenient. */
2939
2940 static rtx
2941 expand_builtin_strncmp (exp, target, mode)
2942 tree exp;
2943 rtx target;
2944 enum machine_mode mode;
2945 {
2946 tree arglist = TREE_OPERAND (exp, 1);
2947 tree fn, newarglist, len = 0;
2948 tree arg1, arg2, arg3;
2949 const char *p1, *p2;
2950
2951 if (!validate_arglist (arglist,
2952 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2953 return 0;
2954
2955 arg1 = TREE_VALUE (arglist);
2956 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2957 arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2958
2959 /* If the len parameter is zero, return zero. */
2960 if (host_integerp (arg3, 1) && tree_low_cst (arg3, 1) == 0)
2961 {
2962 /* Evaluate and ignore arg1 and arg2 in case they have
2963 side-effects. */
2964 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2965 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2966 return const0_rtx;
2967 }
2968
2969 p1 = c_getstr (arg1);
2970 p2 = c_getstr (arg2);
2971
2972 /* If all arguments are constant, evaluate at compile-time. */
2973 if (host_integerp (arg3, 1) && p1 && p2)
2974 {
2975 const int r = strncmp (p1, p2, tree_low_cst (arg3, 1));
2976 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
2977 }
2978
2979 /* If len == 1 or (either string parameter is "" and (len >= 1)),
2980 return (*(const u_char*)arg1 - *(const u_char*)arg2). */
2981 if (host_integerp (arg3, 1)
2982 && (tree_low_cst (arg3, 1) == 1
2983 || (tree_low_cst (arg3, 1) > 1
2984 && ((p1 && *p1 == '\0') || (p2 && *p2 == '\0')))))
2985 {
2986 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2987 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2988 tree ind1 =
2989 fold (build1 (CONVERT_EXPR, integer_type_node,
2990 build1 (INDIRECT_REF, cst_uchar_node,
2991 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2992 tree ind2 =
2993 fold (build1 (CONVERT_EXPR, integer_type_node,
2994 build1 (INDIRECT_REF, cst_uchar_node,
2995 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2996 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2997 return expand_expr (result, target, mode, EXPAND_NORMAL);
2998 }
2999
3000 /* If c_strlen can determine an expression for one of the string
3001 lengths, and it doesn't have side effects, then call
3002 expand_builtin_memcmp() using length MIN(strlen(string)+1, arg3). */
3003
3004 /* Perhaps one of the strings is really constant, if so prefer
3005 that constant length over the other string's length. */
3006 if (p1)
3007 len = c_strlen (arg1);
3008 else if (p2)
3009 len = c_strlen (arg2);
3010
3011 /* If we still don't have a len, try either string arg as long
3012 as they don't have side effects. */
3013 if (!len && !TREE_SIDE_EFFECTS (arg1))
3014 len = c_strlen (arg1);
3015 if (!len && !TREE_SIDE_EFFECTS (arg2))
3016 len = c_strlen (arg2);
3017 /* If we still don't have a length, punt. */
3018 if (!len)
3019 return 0;
3020
3021 fn = implicit_built_in_decls[BUILT_IN_MEMCMP];
3022 if (!fn)
3023 return 0;
3024
3025 /* Add one to the string length. */
3026 len = fold (size_binop (PLUS_EXPR, len, ssize_int (1)));
3027
3028 /* The actual new length parameter is MIN(len,arg3). */
3029 len = fold (build (MIN_EXPR, TREE_TYPE (len), len, arg3));
3030
3031 newarglist = build_tree_list (NULL_TREE, len);
3032 newarglist = tree_cons (NULL_TREE, arg2, newarglist);
3033 newarglist = tree_cons (NULL_TREE, arg1, newarglist);
3034 return expand_expr (build_function_call_expr (fn, newarglist),
3035 target, mode, EXPAND_NORMAL);
3036 }
3037
3038 /* Expand expression EXP, which is a call to the strcat builtin.
3039 Return 0 if we failed the caller should emit a normal call,
3040 otherwise try to get the result in TARGET, if convenient. */
3041
3042 static rtx
3043 expand_builtin_strcat (arglist, target, mode)
3044 tree arglist;
3045 rtx target;
3046 enum machine_mode mode;
3047 {
3048 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3049 return 0;
3050 else
3051 {
3052 tree dst = TREE_VALUE (arglist),
3053 src = TREE_VALUE (TREE_CHAIN (arglist));
3054 const char *p = c_getstr (src);
3055
3056 /* If the string length is zero, return the dst parameter. */
3057 if (p && *p == '\0')
3058 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3059
3060 return 0;
3061 }
3062 }
3063
3064 /* Expand expression EXP, which is a call to the strncat builtin.
3065 Return 0 if we failed the caller should emit a normal call,
3066 otherwise try to get the result in TARGET, if convenient. */
3067
3068 static rtx
3069 expand_builtin_strncat (arglist, target, mode)
3070 tree arglist;
3071 rtx target;
3072 enum machine_mode mode;
3073 {
3074 if (!validate_arglist (arglist,
3075 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3076 return 0;
3077 else
3078 {
3079 tree dst = TREE_VALUE (arglist),
3080 src = TREE_VALUE (TREE_CHAIN (arglist)),
3081 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3082 const char *p = c_getstr (src);
3083
3084 /* If the requested length is zero, or the src parameter string
3085 length is zero, return the dst parameter. */
3086 if (integer_zerop (len) || (p && *p == '\0'))
3087 {
3088 /* Evaluate and ignore the src and len parameters in case
3089 they have side-effects. */
3090 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3091 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
3092 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3093 }
3094
3095 /* If the requested len is greater than or equal to the string
3096 length, call strcat. */
3097 if (TREE_CODE (len) == INTEGER_CST && p
3098 && compare_tree_int (len, strlen (p)) >= 0)
3099 {
3100 tree newarglist
3101 = tree_cons (NULL_TREE, dst, build_tree_list (NULL_TREE, src));
3102 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
3103
3104 /* If the replacement _DECL isn't initialized, don't do the
3105 transformation. */
3106 if (!fn)
3107 return 0;
3108
3109 return expand_expr (build_function_call_expr (fn, newarglist),
3110 target, mode, EXPAND_NORMAL);
3111 }
3112 return 0;
3113 }
3114 }
3115
3116 /* Expand expression EXP, which is a call to the strspn builtin.
3117 Return 0 if we failed the caller should emit a normal call,
3118 otherwise try to get the result in TARGET, if convenient. */
3119
3120 static rtx
3121 expand_builtin_strspn (arglist, target, mode)
3122 tree arglist;
3123 rtx target;
3124 enum machine_mode mode;
3125 {
3126 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3127 return 0;
3128 else
3129 {
3130 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
3131 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
3132
3133 /* If both arguments are constants, evaluate at compile-time. */
3134 if (p1 && p2)
3135 {
3136 const size_t r = strspn (p1, p2);
3137 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
3138 }
3139
3140 /* If either argument is "", return 0. */
3141 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
3142 {
3143 /* Evaluate and ignore both arguments in case either one has
3144 side-effects. */
3145 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3146 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3147 return const0_rtx;
3148 }
3149 return 0;
3150 }
3151 }
3152
3153 /* Expand expression EXP, which is a call to the strcspn builtin.
3154 Return 0 if we failed the caller should emit a normal call,
3155 otherwise try to get the result in TARGET, if convenient. */
3156
3157 static rtx
3158 expand_builtin_strcspn (arglist, target, mode)
3159 tree arglist;
3160 rtx target;
3161 enum machine_mode mode;
3162 {
3163 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3164 return 0;
3165 else
3166 {
3167 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
3168 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
3169
3170 /* If both arguments are constants, evaluate at compile-time. */
3171 if (p1 && p2)
3172 {
3173 const size_t r = strcspn (p1, p2);
3174 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
3175 }
3176
3177 /* If the first argument is "", return 0. */
3178 if (p1 && *p1 == '\0')
3179 {
3180 /* Evaluate and ignore argument s2 in case it has
3181 side-effects. */
3182 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3183 return const0_rtx;
3184 }
3185
3186 /* If the second argument is "", return __builtin_strlen(s1). */
3187 if (p2 && *p2 == '\0')
3188 {
3189 tree newarglist = build_tree_list (NULL_TREE, s1),
3190 fn = implicit_built_in_decls[BUILT_IN_STRLEN];
3191
3192 /* If the replacement _DECL isn't initialized, don't do the
3193 transformation. */
3194 if (!fn)
3195 return 0;
3196
3197 return expand_expr (build_function_call_expr (fn, newarglist),
3198 target, mode, EXPAND_NORMAL);
3199 }
3200 return 0;
3201 }
3202 }
3203
3204 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3205 if that's convenient. */
3206
3207 rtx
3208 expand_builtin_saveregs ()
3209 {
3210 rtx val, seq;
3211
3212 /* Don't do __builtin_saveregs more than once in a function.
3213 Save the result of the first call and reuse it. */
3214 if (saveregs_value != 0)
3215 return saveregs_value;
3216
3217 /* When this function is called, it means that registers must be
3218 saved on entry to this function. So we migrate the call to the
3219 first insn of this function. */
3220
3221 start_sequence ();
3222
3223 #ifdef EXPAND_BUILTIN_SAVEREGS
3224 /* Do whatever the machine needs done in this case. */
3225 val = EXPAND_BUILTIN_SAVEREGS ();
3226 #else
3227 /* ??? We used to try and build up a call to the out of line function,
3228 guessing about what registers needed saving etc. This became much
3229 harder with __builtin_va_start, since we don't have a tree for a
3230 call to __builtin_saveregs to fall back on. There was exactly one
3231 port (i860) that used this code, and I'm unconvinced it could actually
3232 handle the general case. So we no longer try to handle anything
3233 weird and make the backend absorb the evil. */
3234
3235 error ("__builtin_saveregs not supported by this target");
3236 val = const0_rtx;
3237 #endif
3238
3239 seq = get_insns ();
3240 end_sequence ();
3241
3242 saveregs_value = val;
3243
3244 /* Put the insns after the NOTE that starts the function. If this
3245 is inside a start_sequence, make the outer-level insn chain current, so
3246 the code is placed at the start of the function. */
3247 push_topmost_sequence ();
3248 emit_insn_after (seq, get_insns ());
3249 pop_topmost_sequence ();
3250
3251 return val;
3252 }
3253
3254 /* __builtin_args_info (N) returns word N of the arg space info
3255 for the current function. The number and meanings of words
3256 is controlled by the definition of CUMULATIVE_ARGS. */
3257
3258 static rtx
3259 expand_builtin_args_info (exp)
3260 tree exp;
3261 {
3262 tree arglist = TREE_OPERAND (exp, 1);
3263 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
3264 int *word_ptr = (int *) &current_function_args_info;
3265
3266 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
3267 abort ();
3268
3269 if (arglist != 0)
3270 {
3271 if (!host_integerp (TREE_VALUE (arglist), 0))
3272 error ("argument of `__builtin_args_info' must be constant");
3273 else
3274 {
3275 HOST_WIDE_INT wordnum = tree_low_cst (TREE_VALUE (arglist), 0);
3276
3277 if (wordnum < 0 || wordnum >= nwords)
3278 error ("argument of `__builtin_args_info' out of range");
3279 else
3280 return GEN_INT (word_ptr[wordnum]);
3281 }
3282 }
3283 else
3284 error ("missing argument in `__builtin_args_info'");
3285
3286 return const0_rtx;
3287 }
3288
3289 /* Expand ARGLIST, from a call to __builtin_next_arg. */
3290
3291 static rtx
3292 expand_builtin_next_arg (arglist)
3293 tree arglist;
3294 {
3295 tree fntype = TREE_TYPE (current_function_decl);
3296
3297 if (TYPE_ARG_TYPES (fntype) == 0
3298 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3299 == void_type_node))
3300 {
3301 error ("`va_start' used in function with fixed args");
3302 return const0_rtx;
3303 }
3304
3305 if (arglist)
3306 {
3307 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
3308 tree arg = TREE_VALUE (arglist);
3309
3310 /* Strip off all nops for the sake of the comparison. This
3311 is not quite the same as STRIP_NOPS. It does more.
3312 We must also strip off INDIRECT_EXPR for C++ reference
3313 parameters. */
3314 while (TREE_CODE (arg) == NOP_EXPR
3315 || TREE_CODE (arg) == CONVERT_EXPR
3316 || TREE_CODE (arg) == NON_LVALUE_EXPR
3317 || TREE_CODE (arg) == INDIRECT_REF)
3318 arg = TREE_OPERAND (arg, 0);
3319 if (arg != last_parm)
3320 warning ("second parameter of `va_start' not last named argument");
3321 }
3322 else
3323 /* Evidently an out of date version of <stdarg.h>; can't validate
3324 va_start's second argument, but can still work as intended. */
3325 warning ("`__builtin_next_arg' called without an argument");
3326
3327 return expand_binop (Pmode, add_optab,
3328 current_function_internal_arg_pointer,
3329 current_function_arg_offset_rtx,
3330 NULL_RTX, 0, OPTAB_LIB_WIDEN);
3331 }
3332
3333 /* Make it easier for the backends by protecting the valist argument
3334 from multiple evaluations. */
3335
3336 static tree
3337 stabilize_va_list (valist, needs_lvalue)
3338 tree valist;
3339 int needs_lvalue;
3340 {
3341 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
3342 {
3343 if (TREE_SIDE_EFFECTS (valist))
3344 valist = save_expr (valist);
3345
3346 /* For this case, the backends will be expecting a pointer to
3347 TREE_TYPE (va_list_type_node), but it's possible we've
3348 actually been given an array (an actual va_list_type_node).
3349 So fix it. */
3350 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
3351 {
3352 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
3353 tree p2 = build_pointer_type (va_list_type_node);
3354
3355 valist = build1 (ADDR_EXPR, p2, valist);
3356 valist = fold (build1 (NOP_EXPR, p1, valist));
3357 }
3358 }
3359 else
3360 {
3361 tree pt;
3362
3363 if (! needs_lvalue)
3364 {
3365 if (! TREE_SIDE_EFFECTS (valist))
3366 return valist;
3367
3368 pt = build_pointer_type (va_list_type_node);
3369 valist = fold (build1 (ADDR_EXPR, pt, valist));
3370 TREE_SIDE_EFFECTS (valist) = 1;
3371 }
3372
3373 if (TREE_SIDE_EFFECTS (valist))
3374 valist = save_expr (valist);
3375 valist = fold (build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)),
3376 valist));
3377 }
3378
3379 return valist;
3380 }
3381
3382 /* The "standard" implementation of va_start: just assign `nextarg' to
3383 the variable. */
3384
3385 void
3386 std_expand_builtin_va_start (valist, nextarg)
3387 tree valist;
3388 rtx nextarg;
3389 {
3390 tree t;
3391
3392 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3393 make_tree (ptr_type_node, nextarg));
3394 TREE_SIDE_EFFECTS (t) = 1;
3395
3396 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3397 }
3398
3399 /* Expand ARGLIST, from a call to __builtin_va_start. */
3400
3401 static rtx
3402 expand_builtin_va_start (arglist)
3403 tree arglist;
3404 {
3405 rtx nextarg;
3406 tree chain, valist;
3407
3408 chain = TREE_CHAIN (arglist);
3409
3410 if (TREE_CHAIN (chain))
3411 error ("too many arguments to function `va_start'");
3412
3413 nextarg = expand_builtin_next_arg (chain);
3414 valist = stabilize_va_list (TREE_VALUE (arglist), 1);
3415
3416 #ifdef EXPAND_BUILTIN_VA_START
3417 EXPAND_BUILTIN_VA_START (valist, nextarg);
3418 #else
3419 std_expand_builtin_va_start (valist, nextarg);
3420 #endif
3421
3422 return const0_rtx;
3423 }
3424
3425 /* The "standard" implementation of va_arg: read the value from the
3426 current (padded) address and increment by the (padded) size. */
3427
3428 rtx
3429 std_expand_builtin_va_arg (valist, type)
3430 tree valist, type;
3431 {
3432 tree addr_tree, t, type_size = NULL;
3433 tree align, alignm1;
3434 tree rounded_size;
3435 rtx addr;
3436
3437 /* Compute the rounded size of the type. */
3438 align = size_int (PARM_BOUNDARY / BITS_PER_UNIT);
3439 alignm1 = size_int (PARM_BOUNDARY / BITS_PER_UNIT - 1);
3440 if (type == error_mark_node
3441 || (type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type))) == NULL
3442 || TREE_OVERFLOW (type_size))
3443 rounded_size = size_zero_node;
3444 else
3445 rounded_size = fold (build (MULT_EXPR, sizetype,
3446 fold (build (TRUNC_DIV_EXPR, sizetype,
3447 fold (build (PLUS_EXPR, sizetype,
3448 type_size, alignm1)),
3449 align)),
3450 align));
3451
3452 /* Get AP. */
3453 addr_tree = valist;
3454 if (PAD_VARARGS_DOWN && ! integer_zerop (rounded_size))
3455 {
3456 /* Small args are padded downward. */
3457 addr_tree = fold (build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3458 fold (build (COND_EXPR, sizetype,
3459 fold (build (GT_EXPR, sizetype,
3460 rounded_size,
3461 align)),
3462 size_zero_node,
3463 fold (build (MINUS_EXPR, sizetype,
3464 rounded_size,
3465 type_size))))));
3466 }
3467
3468 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3469 addr = copy_to_reg (addr);
3470
3471 /* Compute new value for AP. */
3472 if (! integer_zerop (rounded_size))
3473 {
3474 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3475 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3476 rounded_size));
3477 TREE_SIDE_EFFECTS (t) = 1;
3478 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3479 }
3480
3481 return addr;
3482 }
3483
3484 /* Expand __builtin_va_arg, which is not really a builtin function, but
3485 a very special sort of operator. */
3486
3487 rtx
3488 expand_builtin_va_arg (valist, type)
3489 tree valist, type;
3490 {
3491 rtx addr, result;
3492 tree promoted_type, want_va_type, have_va_type;
3493
3494 /* Verify that valist is of the proper type. */
3495
3496 want_va_type = va_list_type_node;
3497 have_va_type = TREE_TYPE (valist);
3498 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
3499 {
3500 /* If va_list is an array type, the argument may have decayed
3501 to a pointer type, e.g. by being passed to another function.
3502 In that case, unwrap both types so that we can compare the
3503 underlying records. */
3504 if (TREE_CODE (have_va_type) == ARRAY_TYPE
3505 || TREE_CODE (have_va_type) == POINTER_TYPE)
3506 {
3507 want_va_type = TREE_TYPE (want_va_type);
3508 have_va_type = TREE_TYPE (have_va_type);
3509 }
3510 }
3511 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
3512 {
3513 error ("first argument to `va_arg' not of type `va_list'");
3514 addr = const0_rtx;
3515 }
3516
3517 /* Generate a diagnostic for requesting data of a type that cannot
3518 be passed through `...' due to type promotion at the call site. */
3519 else if ((promoted_type = (*lang_hooks.types.type_promotes_to) (type))
3520 != type)
3521 {
3522 const char *name = "<anonymous type>", *pname = 0;
3523 static bool gave_help;
3524
3525 if (TYPE_NAME (type))
3526 {
3527 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE)
3528 name = IDENTIFIER_POINTER (TYPE_NAME (type));
3529 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
3530 && DECL_NAME (TYPE_NAME (type)))
3531 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
3532 }
3533 if (TYPE_NAME (promoted_type))
3534 {
3535 if (TREE_CODE (TYPE_NAME (promoted_type)) == IDENTIFIER_NODE)
3536 pname = IDENTIFIER_POINTER (TYPE_NAME (promoted_type));
3537 else if (TREE_CODE (TYPE_NAME (promoted_type)) == TYPE_DECL
3538 && DECL_NAME (TYPE_NAME (promoted_type)))
3539 pname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (promoted_type)));
3540 }
3541
3542 /* Unfortunately, this is merely undefined, rather than a constraint
3543 violation, so we cannot make this an error. If this call is never
3544 executed, the program is still strictly conforming. */
3545 warning ("`%s' is promoted to `%s' when passed through `...'",
3546 name, pname);
3547 if (! gave_help)
3548 {
3549 gave_help = true;
3550 warning ("(so you should pass `%s' not `%s' to `va_arg')",
3551 pname, name);
3552 }
3553
3554 /* We can, however, treat "undefined" any way we please.
3555 Call abort to encourage the user to fix the program. */
3556 expand_builtin_trap ();
3557
3558 /* This is dead code, but go ahead and finish so that the
3559 mode of the result comes out right. */
3560 addr = const0_rtx;
3561 }
3562 else
3563 {
3564 /* Make it easier for the backends by protecting the valist argument
3565 from multiple evaluations. */
3566 valist = stabilize_va_list (valist, 0);
3567
3568 #ifdef EXPAND_BUILTIN_VA_ARG
3569 addr = EXPAND_BUILTIN_VA_ARG (valist, type);
3570 #else
3571 addr = std_expand_builtin_va_arg (valist, type);
3572 #endif
3573 }
3574
3575 #ifdef POINTERS_EXTEND_UNSIGNED
3576 if (GET_MODE (addr) != Pmode)
3577 addr = convert_memory_address (Pmode, addr);
3578 #endif
3579
3580 result = gen_rtx_MEM (TYPE_MODE (type), addr);
3581 set_mem_alias_set (result, get_varargs_alias_set ());
3582
3583 return result;
3584 }
3585
3586 /* Expand ARGLIST, from a call to __builtin_va_end. */
3587
3588 static rtx
3589 expand_builtin_va_end (arglist)
3590 tree arglist;
3591 {
3592 tree valist = TREE_VALUE (arglist);
3593
3594 #ifdef EXPAND_BUILTIN_VA_END
3595 valist = stabilize_va_list (valist, 0);
3596 EXPAND_BUILTIN_VA_END (arglist);
3597 #else
3598 /* Evaluate for side effects, if needed. I hate macros that don't
3599 do that. */
3600 if (TREE_SIDE_EFFECTS (valist))
3601 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
3602 #endif
3603
3604 return const0_rtx;
3605 }
3606
3607 /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a
3608 builtin rather than just as an assignment in stdarg.h because of the
3609 nastiness of array-type va_list types. */
3610
3611 static rtx
3612 expand_builtin_va_copy (arglist)
3613 tree arglist;
3614 {
3615 tree dst, src, t;
3616
3617 dst = TREE_VALUE (arglist);
3618 src = TREE_VALUE (TREE_CHAIN (arglist));
3619
3620 dst = stabilize_va_list (dst, 1);
3621 src = stabilize_va_list (src, 0);
3622
3623 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
3624 {
3625 t = build (MODIFY_EXPR, va_list_type_node, dst, src);
3626 TREE_SIDE_EFFECTS (t) = 1;
3627 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3628 }
3629 else
3630 {
3631 rtx dstb, srcb, size;
3632
3633 /* Evaluate to pointers. */
3634 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
3635 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
3636 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
3637 VOIDmode, EXPAND_NORMAL);
3638
3639 #ifdef POINTERS_EXTEND_UNSIGNED
3640 if (GET_MODE (dstb) != Pmode)
3641 dstb = convert_memory_address (Pmode, dstb);
3642
3643 if (GET_MODE (srcb) != Pmode)
3644 srcb = convert_memory_address (Pmode, srcb);
3645 #endif
3646
3647 /* "Dereference" to BLKmode memories. */
3648 dstb = gen_rtx_MEM (BLKmode, dstb);
3649 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
3650 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
3651 srcb = gen_rtx_MEM (BLKmode, srcb);
3652 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
3653 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
3654
3655 /* Copy. */
3656 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
3657 }
3658
3659 return const0_rtx;
3660 }
3661
3662 /* Expand a call to one of the builtin functions __builtin_frame_address or
3663 __builtin_return_address. */
3664
3665 static rtx
3666 expand_builtin_frame_address (exp)
3667 tree exp;
3668 {
3669 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3670 tree arglist = TREE_OPERAND (exp, 1);
3671
3672 /* The argument must be a nonnegative integer constant.
3673 It counts the number of frames to scan up the stack.
3674 The value is the return address saved in that frame. */
3675 if (arglist == 0)
3676 /* Warning about missing arg was already issued. */
3677 return const0_rtx;
3678 else if (! host_integerp (TREE_VALUE (arglist), 1))
3679 {
3680 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3681 error ("invalid arg to `__builtin_frame_address'");
3682 else
3683 error ("invalid arg to `__builtin_return_address'");
3684 return const0_rtx;
3685 }
3686 else
3687 {
3688 rtx tem
3689 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
3690 tree_low_cst (TREE_VALUE (arglist), 1),
3691 hard_frame_pointer_rtx);
3692
3693 /* Some ports cannot access arbitrary stack frames. */
3694 if (tem == NULL)
3695 {
3696 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3697 warning ("unsupported arg to `__builtin_frame_address'");
3698 else
3699 warning ("unsupported arg to `__builtin_return_address'");
3700 return const0_rtx;
3701 }
3702
3703 /* For __builtin_frame_address, return what we've got. */
3704 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3705 return tem;
3706
3707 if (GET_CODE (tem) != REG
3708 && ! CONSTANT_P (tem))
3709 tem = copy_to_mode_reg (Pmode, tem);
3710 return tem;
3711 }
3712 }
3713
3714 /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if
3715 we failed and the caller should emit a normal call, otherwise try to get
3716 the result in TARGET, if convenient. */
3717
3718 static rtx
3719 expand_builtin_alloca (arglist, target)
3720 tree arglist;
3721 rtx target;
3722 {
3723 rtx op0;
3724 rtx result;
3725
3726 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3727 return 0;
3728
3729 /* Compute the argument. */
3730 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
3731
3732 /* Allocate the desired space. */
3733 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
3734
3735 #ifdef POINTERS_EXTEND_UNSIGNED
3736 if (GET_MODE (result) != ptr_mode)
3737 result = convert_memory_address (ptr_mode, result);
3738 #endif
3739
3740 return result;
3741 }
3742
3743 /* Expand a call to a unary builtin. The arguments are in ARGLIST.
3744 Return 0 if a normal call should be emitted rather than expanding the
3745 function in-line. If convenient, the result should be placed in TARGET.
3746 SUBTARGET may be used as the target for computing one of EXP's operands. */
3747
3748 static rtx
3749 expand_builtin_unop (target_mode, arglist, target, subtarget, op_optab)
3750 enum machine_mode target_mode;
3751 tree arglist;
3752 rtx target, subtarget;
3753 optab op_optab;
3754 {
3755 rtx op0;
3756 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3757 return 0;
3758
3759 /* Compute the argument. */
3760 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
3761 /* Compute op, into TARGET if possible.
3762 Set TARGET to wherever the result comes back. */
3763 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
3764 op_optab, op0, target, 1);
3765 if (target == 0)
3766 abort ();
3767
3768 return convert_to_mode (target_mode, target, 0);
3769 }
3770
3771 /* If the string passed to fputs is a constant and is one character
3772 long, we attempt to transform this call into __builtin_fputc(). */
3773
3774 static rtx
3775 expand_builtin_fputs (arglist, ignore, unlocked)
3776 tree arglist;
3777 int ignore;
3778 int unlocked;
3779 {
3780 tree len, fn;
3781 tree fn_fputc = unlocked ? implicit_built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
3782 : implicit_built_in_decls[BUILT_IN_FPUTC];
3783 tree fn_fwrite = unlocked ? implicit_built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
3784 : implicit_built_in_decls[BUILT_IN_FWRITE];
3785
3786 /* If the return value is used, or the replacement _DECL isn't
3787 initialized, don't do the transformation. */
3788 if (!ignore || !fn_fputc || !fn_fwrite)
3789 return 0;
3790
3791 /* Verify the arguments in the original call. */
3792 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3793 return 0;
3794
3795 /* Get the length of the string passed to fputs. If the length
3796 can't be determined, punt. */
3797 if (!(len = c_strlen (TREE_VALUE (arglist)))
3798 || TREE_CODE (len) != INTEGER_CST)
3799 return 0;
3800
3801 switch (compare_tree_int (len, 1))
3802 {
3803 case -1: /* length is 0, delete the call entirely . */
3804 {
3805 /* Evaluate and ignore the argument in case it has
3806 side-effects. */
3807 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
3808 VOIDmode, EXPAND_NORMAL);
3809 return const0_rtx;
3810 }
3811 case 0: /* length is 1, call fputc. */
3812 {
3813 const char *p = c_getstr (TREE_VALUE (arglist));
3814
3815 if (p != NULL)
3816 {
3817 /* New argument list transforming fputs(string, stream) to
3818 fputc(string[0], stream). */
3819 arglist =
3820 build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
3821 arglist =
3822 tree_cons (NULL_TREE, build_int_2 (p[0], 0), arglist);
3823 fn = fn_fputc;
3824 break;
3825 }
3826 }
3827 /* FALLTHROUGH */
3828 case 1: /* length is greater than 1, call fwrite. */
3829 {
3830 tree string_arg;
3831
3832 /* If optimizing for size keep fputs. */
3833 if (optimize_size)
3834 return 0;
3835 string_arg = TREE_VALUE (arglist);
3836 /* New argument list transforming fputs(string, stream) to
3837 fwrite(string, 1, len, stream). */
3838 arglist = build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
3839 arglist = tree_cons (NULL_TREE, len, arglist);
3840 arglist = tree_cons (NULL_TREE, size_one_node, arglist);
3841 arglist = tree_cons (NULL_TREE, string_arg, arglist);
3842 fn = fn_fwrite;
3843 break;
3844 }
3845 default:
3846 abort ();
3847 }
3848
3849 return expand_expr (build_function_call_expr (fn, arglist),
3850 (ignore ? const0_rtx : NULL_RTX),
3851 VOIDmode, EXPAND_NORMAL);
3852 }
3853
3854 /* Expand a call to __builtin_expect. We return our argument and emit a
3855 NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
3856 a non-jump context. */
3857
3858 static rtx
3859 expand_builtin_expect (arglist, target)
3860 tree arglist;
3861 rtx target;
3862 {
3863 tree exp, c;
3864 rtx note, rtx_c;
3865
3866 if (arglist == NULL_TREE
3867 || TREE_CHAIN (arglist) == NULL_TREE)
3868 return const0_rtx;
3869 exp = TREE_VALUE (arglist);
3870 c = TREE_VALUE (TREE_CHAIN (arglist));
3871
3872 if (TREE_CODE (c) != INTEGER_CST)
3873 {
3874 error ("second arg to `__builtin_expect' must be a constant");
3875 c = integer_zero_node;
3876 }
3877
3878 target = expand_expr (exp, target, VOIDmode, EXPAND_NORMAL);
3879
3880 /* Don't bother with expected value notes for integral constants. */
3881 if (flag_guess_branch_prob && GET_CODE (target) != CONST_INT)
3882 {
3883 /* We do need to force this into a register so that we can be
3884 moderately sure to be able to correctly interpret the branch
3885 condition later. */
3886 target = force_reg (GET_MODE (target), target);
3887
3888 rtx_c = expand_expr (c, NULL_RTX, GET_MODE (target), EXPAND_NORMAL);
3889
3890 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
3891 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, target, rtx_c);
3892 }
3893
3894 return target;
3895 }
3896
3897 /* Like expand_builtin_expect, except do this in a jump context. This is
3898 called from do_jump if the conditional is a __builtin_expect. Return either
3899 a list of insns to emit the jump or NULL if we cannot optimize
3900 __builtin_expect. We need to optimize this at jump time so that machines
3901 like the PowerPC don't turn the test into a SCC operation, and then jump
3902 based on the test being 0/1. */
3903
3904 rtx
3905 expand_builtin_expect_jump (exp, if_false_label, if_true_label)
3906 tree exp;
3907 rtx if_false_label;
3908 rtx if_true_label;
3909 {
3910 tree arglist = TREE_OPERAND (exp, 1);
3911 tree arg0 = TREE_VALUE (arglist);
3912 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3913 rtx ret = NULL_RTX;
3914
3915 /* Only handle __builtin_expect (test, 0) and
3916 __builtin_expect (test, 1). */
3917 if (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
3918 && (integer_zerop (arg1) || integer_onep (arg1)))
3919 {
3920 int num_jumps = 0;
3921 rtx insn;
3922
3923 /* If we fail to locate an appropriate conditional jump, we'll
3924 fall back to normal evaluation. Ensure that the expression
3925 can be re-evaluated. */
3926 switch (unsafe_for_reeval (arg0))
3927 {
3928 case 0: /* Safe. */
3929 break;
3930
3931 case 1: /* Mildly unsafe. */
3932 arg0 = unsave_expr (arg0);
3933 break;
3934
3935 case 2: /* Wildly unsafe. */
3936 return NULL_RTX;
3937 }
3938
3939 /* Expand the jump insns. */
3940 start_sequence ();
3941 do_jump (arg0, if_false_label, if_true_label);
3942 ret = get_insns ();
3943 end_sequence ();
3944
3945 /* Now that the __builtin_expect has been validated, go through and add
3946 the expect's to each of the conditional jumps. If we run into an
3947 error, just give up and generate the 'safe' code of doing a SCC
3948 operation and then doing a branch on that. */
3949 insn = ret;
3950 while (insn != NULL_RTX)
3951 {
3952 rtx next = NEXT_INSN (insn);
3953 rtx pattern;
3954
3955 if (GET_CODE (insn) == JUMP_INSN && any_condjump_p (insn)
3956 && (pattern = pc_set (insn)) != NULL_RTX)
3957 {
3958 rtx ifelse = SET_SRC (pattern);
3959 rtx label;
3960 int taken;
3961
3962 if (GET_CODE (ifelse) != IF_THEN_ELSE)
3963 goto do_next_insn;
3964
3965 if (GET_CODE (XEXP (ifelse, 1)) == LABEL_REF)
3966 {
3967 taken = 1;
3968 label = XEXP (XEXP (ifelse, 1), 0);
3969 }
3970 /* An inverted jump reverses the probabilities. */
3971 else if (GET_CODE (XEXP (ifelse, 2)) == LABEL_REF)
3972 {
3973 taken = 0;
3974 label = XEXP (XEXP (ifelse, 2), 0);
3975 }
3976 /* We shouldn't have to worry about conditional returns during
3977 the expansion stage, but handle it gracefully anyway. */
3978 else if (GET_CODE (XEXP (ifelse, 1)) == RETURN)
3979 {
3980 taken = 1;
3981 label = NULL_RTX;
3982 }
3983 /* An inverted return reverses the probabilities. */
3984 else if (GET_CODE (XEXP (ifelse, 2)) == RETURN)
3985 {
3986 taken = 0;
3987 label = NULL_RTX;
3988 }
3989 else
3990 goto do_next_insn;
3991
3992 /* If the test is expected to fail, reverse the
3993 probabilities. */
3994 if (integer_zerop (arg1))
3995 taken = 1 - taken;
3996
3997 /* If we are jumping to the false label, reverse the
3998 probabilities. */
3999 if (label == NULL_RTX)
4000 ; /* conditional return */
4001 else if (label == if_false_label)
4002 taken = 1 - taken;
4003 else if (label != if_true_label)
4004 goto do_next_insn;
4005
4006 num_jumps++;
4007 predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
4008 }
4009
4010 do_next_insn:
4011 insn = next;
4012 }
4013
4014 /* If no jumps were modified, fail and do __builtin_expect the normal
4015 way. */
4016 if (num_jumps == 0)
4017 ret = NULL_RTX;
4018 }
4019
4020 return ret;
4021 }
4022
4023 void
4024 expand_builtin_trap ()
4025 {
4026 #ifdef HAVE_trap
4027 if (HAVE_trap)
4028 emit_insn (gen_trap ());
4029 else
4030 #endif
4031 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4032 emit_barrier ();
4033 }
4034 \f
4035 /* Expand an expression EXP that calls a built-in function,
4036 with result going to TARGET if that's convenient
4037 (and in mode MODE if that's convenient).
4038 SUBTARGET may be used as the target for computing one of EXP's operands.
4039 IGNORE is nonzero if the value is to be ignored. */
4040
4041 rtx
4042 expand_builtin (exp, target, subtarget, mode, ignore)
4043 tree exp;
4044 rtx target;
4045 rtx subtarget;
4046 enum machine_mode mode;
4047 int ignore;
4048 {
4049 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4050 tree arglist = TREE_OPERAND (exp, 1);
4051 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4052 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
4053
4054 /* Perform postincrements before expanding builtin functions.  */
4055 emit_queue ();
4056
4057 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4058 return (*targetm.expand_builtin) (exp, target, subtarget, mode, ignore);
4059
4060 /* When not optimizing, generate calls to library functions for a certain
4061 set of builtins. */
4062 if (!optimize && !CALLED_AS_BUILT_IN (fndecl))
4063 switch (fcode)
4064 {
4065 case BUILT_IN_SQRT:
4066 case BUILT_IN_SQRTF:
4067 case BUILT_IN_SQRTL:
4068 case BUILT_IN_SIN:
4069 case BUILT_IN_SINF:
4070 case BUILT_IN_SINL:
4071 case BUILT_IN_COS:
4072 case BUILT_IN_COSF:
4073 case BUILT_IN_COSL:
4074 case BUILT_IN_EXP:
4075 case BUILT_IN_EXPF:
4076 case BUILT_IN_EXPL:
4077 case BUILT_IN_LOG:
4078 case BUILT_IN_LOGF:
4079 case BUILT_IN_LOGL:
4080 case BUILT_IN_POW:
4081 case BUILT_IN_POWF:
4082 case BUILT_IN_POWL:
4083 case BUILT_IN_ATAN2:
4084 case BUILT_IN_ATAN2F:
4085 case BUILT_IN_ATAN2L:
4086 case BUILT_IN_MEMSET:
4087 case BUILT_IN_MEMCPY:
4088 case BUILT_IN_MEMCMP:
4089 case BUILT_IN_MEMPCPY:
4090 case BUILT_IN_BCMP:
4091 case BUILT_IN_BZERO:
4092 case BUILT_IN_INDEX:
4093 case BUILT_IN_RINDEX:
4094 case BUILT_IN_STPCPY:
4095 case BUILT_IN_STRCHR:
4096 case BUILT_IN_STRRCHR:
4097 case BUILT_IN_STRLEN:
4098 case BUILT_IN_STRCPY:
4099 case BUILT_IN_STRNCPY:
4100 case BUILT_IN_STRNCMP:
4101 case BUILT_IN_STRSTR:
4102 case BUILT_IN_STRPBRK:
4103 case BUILT_IN_STRCAT:
4104 case BUILT_IN_STRNCAT:
4105 case BUILT_IN_STRSPN:
4106 case BUILT_IN_STRCSPN:
4107 case BUILT_IN_STRCMP:
4108 case BUILT_IN_FFS:
4109 case BUILT_IN_PUTCHAR:
4110 case BUILT_IN_PUTS:
4111 case BUILT_IN_PRINTF:
4112 case BUILT_IN_FPUTC:
4113 case BUILT_IN_FPUTS:
4114 case BUILT_IN_FWRITE:
4115 case BUILT_IN_PUTCHAR_UNLOCKED:
4116 case BUILT_IN_PUTS_UNLOCKED:
4117 case BUILT_IN_PRINTF_UNLOCKED:
4118 case BUILT_IN_FPUTC_UNLOCKED:
4119 case BUILT_IN_FPUTS_UNLOCKED:
4120 case BUILT_IN_FWRITE_UNLOCKED:
4121 case BUILT_IN_FLOOR:
4122 case BUILT_IN_FLOORF:
4123 case BUILT_IN_FLOORL:
4124 case BUILT_IN_CEIL:
4125 case BUILT_IN_CEILF:
4126 case BUILT_IN_CEILL:
4127 case BUILT_IN_TRUNC:
4128 case BUILT_IN_TRUNCF:
4129 case BUILT_IN_TRUNCL:
4130 case BUILT_IN_ROUND:
4131 case BUILT_IN_ROUNDF:
4132 case BUILT_IN_ROUNDL:
4133 case BUILT_IN_NEARBYINT:
4134 case BUILT_IN_NEARBYINTF:
4135 case BUILT_IN_NEARBYINTL:
4136 return expand_call (exp, target, ignore);
4137
4138 default:
4139 break;
4140 }
4141
4142 switch (fcode)
4143 {
4144 case BUILT_IN_ABS:
4145 case BUILT_IN_LABS:
4146 case BUILT_IN_LLABS:
4147 case BUILT_IN_IMAXABS:
4148 case BUILT_IN_FABS:
4149 case BUILT_IN_FABSF:
4150 case BUILT_IN_FABSL:
4151 /* build_function_call changes these into ABS_EXPR. */
4152 abort ();
4153
4154 case BUILT_IN_CONJ:
4155 case BUILT_IN_CONJF:
4156 case BUILT_IN_CONJL:
4157 case BUILT_IN_CREAL:
4158 case BUILT_IN_CREALF:
4159 case BUILT_IN_CREALL:
4160 case BUILT_IN_CIMAG:
4161 case BUILT_IN_CIMAGF:
4162 case BUILT_IN_CIMAGL:
4163 /* expand_tree_builtin changes these into CONJ_EXPR, REALPART_EXPR
4164 and IMAGPART_EXPR. */
4165 abort ();
4166
4167 case BUILT_IN_SIN:
4168 case BUILT_IN_SINF:
4169 case BUILT_IN_SINL:
4170 case BUILT_IN_COS:
4171 case BUILT_IN_COSF:
4172 case BUILT_IN_COSL:
4173 case BUILT_IN_EXP:
4174 case BUILT_IN_EXPF:
4175 case BUILT_IN_EXPL:
4176 case BUILT_IN_LOG:
4177 case BUILT_IN_LOGF:
4178 case BUILT_IN_LOGL:
4179 /* Treat these like sqrt only if unsafe math optimizations are allowed,
4180 because of possible accuracy problems. */
4181 if (! flag_unsafe_math_optimizations)
4182 break;
4183 case BUILT_IN_SQRT:
4184 case BUILT_IN_SQRTF:
4185 case BUILT_IN_SQRTL:
4186 case BUILT_IN_FLOOR:
4187 case BUILT_IN_FLOORF:
4188 case BUILT_IN_FLOORL:
4189 case BUILT_IN_CEIL:
4190 case BUILT_IN_CEILF:
4191 case BUILT_IN_CEILL:
4192 case BUILT_IN_TRUNC:
4193 case BUILT_IN_TRUNCF:
4194 case BUILT_IN_TRUNCL:
4195 case BUILT_IN_ROUND:
4196 case BUILT_IN_ROUNDF:
4197 case BUILT_IN_ROUNDL:
4198 case BUILT_IN_NEARBYINT:
4199 case BUILT_IN_NEARBYINTF:
4200 case BUILT_IN_NEARBYINTL:
4201 target = expand_builtin_mathfn (exp, target, subtarget);
4202 if (target)
4203 return target;
4204 break;
4205
4206 case BUILT_IN_POW:
4207 case BUILT_IN_POWF:
4208 case BUILT_IN_POWL:
4209 case BUILT_IN_ATAN2:
4210 case BUILT_IN_ATAN2F:
4211 case BUILT_IN_ATAN2L:
4212 if (! flag_unsafe_math_optimizations)
4213 break;
4214 target = expand_builtin_mathfn_2 (exp, target, subtarget);
4215 if (target)
4216 return target;
4217 break;
4218
4219 case BUILT_IN_APPLY_ARGS:
4220 return expand_builtin_apply_args ();
4221
4222 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
4223 FUNCTION with a copy of the parameters described by
4224 ARGUMENTS, and ARGSIZE. It returns a block of memory
4225 allocated on the stack into which is stored all the registers
4226 that might possibly be used for returning the result of a
4227 function. ARGUMENTS is the value returned by
4228 __builtin_apply_args. ARGSIZE is the number of bytes of
4229 arguments that must be copied. ??? How should this value be
4230 computed? We'll also need a safe worst case value for varargs
4231 functions. */
4232 case BUILT_IN_APPLY:
4233 if (!validate_arglist (arglist, POINTER_TYPE,
4234 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4235 && !validate_arglist (arglist, REFERENCE_TYPE,
4236 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4237 return const0_rtx;
4238 else
4239 {
4240 int i;
4241 tree t;
4242 rtx ops[3];
4243
4244 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
4245 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
4246
4247 return expand_builtin_apply (ops[0], ops[1], ops[2]);
4248 }
4249
4250 /* __builtin_return (RESULT) causes the function to return the
4251 value described by RESULT. RESULT is address of the block of
4252 memory returned by __builtin_apply. */
4253 case BUILT_IN_RETURN:
4254 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4255 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
4256 NULL_RTX, VOIDmode, 0));
4257 return const0_rtx;
4258
4259 case BUILT_IN_SAVEREGS:
4260 return expand_builtin_saveregs ();
4261
4262 case BUILT_IN_ARGS_INFO:
4263 return expand_builtin_args_info (exp);
4264
4265 /* Return the address of the first anonymous stack arg. */
4266 case BUILT_IN_NEXT_ARG:
4267 return expand_builtin_next_arg (arglist);
4268
4269 case BUILT_IN_CLASSIFY_TYPE:
4270 return expand_builtin_classify_type (arglist);
4271
4272 case BUILT_IN_CONSTANT_P:
4273 return expand_builtin_constant_p (exp);
4274
4275 case BUILT_IN_FRAME_ADDRESS:
4276 case BUILT_IN_RETURN_ADDRESS:
4277 return expand_builtin_frame_address (exp);
4278
4279 /* Returns the address of the area where the structure is returned.
4280 0 otherwise. */
4281 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
4282 if (arglist != 0
4283 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
4284 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
4285 return const0_rtx;
4286 else
4287 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4288
4289 case BUILT_IN_ALLOCA:
4290 target = expand_builtin_alloca (arglist, target);
4291 if (target)
4292 return target;
4293 break;
4294
4295 case BUILT_IN_FFS:
4296 case BUILT_IN_FFSL:
4297 case BUILT_IN_FFSLL:
4298 target = expand_builtin_unop (target_mode, arglist, target,
4299 subtarget, ffs_optab);
4300 if (target)
4301 return target;
4302 break;
4303
4304 case BUILT_IN_CLZ:
4305 case BUILT_IN_CLZL:
4306 case BUILT_IN_CLZLL:
4307 target = expand_builtin_unop (target_mode, arglist, target,
4308 subtarget, clz_optab);
4309 if (target)
4310 return target;
4311 break;
4312
4313 case BUILT_IN_CTZ:
4314 case BUILT_IN_CTZL:
4315 case BUILT_IN_CTZLL:
4316 target = expand_builtin_unop (target_mode, arglist, target,
4317 subtarget, ctz_optab);
4318 if (target)
4319 return target;
4320 break;
4321
4322 case BUILT_IN_POPCOUNT:
4323 case BUILT_IN_POPCOUNTL:
4324 case BUILT_IN_POPCOUNTLL:
4325 target = expand_builtin_unop (target_mode, arglist, target,
4326 subtarget, popcount_optab);
4327 if (target)
4328 return target;
4329 break;
4330
4331 case BUILT_IN_PARITY:
4332 case BUILT_IN_PARITYL:
4333 case BUILT_IN_PARITYLL:
4334 target = expand_builtin_unop (target_mode, arglist, target,
4335 subtarget, parity_optab);
4336 if (target)
4337 return target;
4338 break;
4339
4340 case BUILT_IN_STRLEN:
4341 target = expand_builtin_strlen (exp, target);
4342 if (target)
4343 return target;
4344 break;
4345
4346 case BUILT_IN_STRCPY:
4347 target = expand_builtin_strcpy (exp, target, mode);
4348 if (target)
4349 return target;
4350 break;
4351
4352 case BUILT_IN_STRNCPY:
4353 target = expand_builtin_strncpy (arglist, target, mode);
4354 if (target)
4355 return target;
4356 break;
4357
4358 case BUILT_IN_STPCPY:
4359 target = expand_builtin_stpcpy (arglist, target, mode);
4360 if (target)
4361 return target;
4362 break;
4363
4364 case BUILT_IN_STRCAT:
4365 target = expand_builtin_strcat (arglist, target, mode);
4366 if (target)
4367 return target;
4368 break;
4369
4370 case BUILT_IN_STRNCAT:
4371 target = expand_builtin_strncat (arglist, target, mode);
4372 if (target)
4373 return target;
4374 break;
4375
4376 case BUILT_IN_STRSPN:
4377 target = expand_builtin_strspn (arglist, target, mode);
4378 if (target)
4379 return target;
4380 break;
4381
4382 case BUILT_IN_STRCSPN:
4383 target = expand_builtin_strcspn (arglist, target, mode);
4384 if (target)
4385 return target;
4386 break;
4387
4388 case BUILT_IN_STRSTR:
4389 target = expand_builtin_strstr (arglist, target, mode);
4390 if (target)
4391 return target;
4392 break;
4393
4394 case BUILT_IN_STRPBRK:
4395 target = expand_builtin_strpbrk (arglist, target, mode);
4396 if (target)
4397 return target;
4398 break;
4399
4400 case BUILT_IN_INDEX:
4401 case BUILT_IN_STRCHR:
4402 target = expand_builtin_strchr (arglist, target, mode);
4403 if (target)
4404 return target;
4405 break;
4406
4407 case BUILT_IN_RINDEX:
4408 case BUILT_IN_STRRCHR:
4409 target = expand_builtin_strrchr (arglist, target, mode);
4410 if (target)
4411 return target;
4412 break;
4413
4414 case BUILT_IN_MEMCPY:
4415 target = expand_builtin_memcpy (arglist, target, mode, /*endp=*/0);
4416 if (target)
4417 return target;
4418 break;
4419
4420 case BUILT_IN_MEMPCPY:
4421 target = expand_builtin_memcpy (arglist, target, mode, /*endp=*/1);
4422 if (target)
4423 return target;
4424 break;
4425
4426 case BUILT_IN_MEMSET:
4427 target = expand_builtin_memset (exp, target, mode);
4428 if (target)
4429 return target;
4430 break;
4431
4432 case BUILT_IN_BZERO:
4433 target = expand_builtin_bzero (exp);
4434 if (target)
4435 return target;
4436 break;
4437
4438 case BUILT_IN_STRCMP:
4439 target = expand_builtin_strcmp (exp, target, mode);
4440 if (target)
4441 return target;
4442 break;
4443
4444 case BUILT_IN_STRNCMP:
4445 target = expand_builtin_strncmp (exp, target, mode);
4446 if (target)
4447 return target;
4448 break;
4449
4450 case BUILT_IN_BCMP:
4451 case BUILT_IN_MEMCMP:
4452 target = expand_builtin_memcmp (exp, arglist, target, mode);
4453 if (target)
4454 return target;
4455 break;
4456
4457 case BUILT_IN_SETJMP:
4458 target = expand_builtin_setjmp (arglist, target);
4459 if (target)
4460 return target;
4461 break;
4462
4463 /* __builtin_longjmp is passed a pointer to an array of five words.
4464 It's similar to the C library longjmp function but works with
4465 __builtin_setjmp above. */
4466 case BUILT_IN_LONGJMP:
4467 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4468 break;
4469 else
4470 {
4471 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
4472 VOIDmode, 0);
4473 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
4474 NULL_RTX, VOIDmode, 0);
4475
4476 if (value != const1_rtx)
4477 {
4478 error ("__builtin_longjmp second argument must be 1");
4479 return const0_rtx;
4480 }
4481
4482 expand_builtin_longjmp (buf_addr, value);
4483 return const0_rtx;
4484 }
4485
4486 case BUILT_IN_TRAP:
4487 expand_builtin_trap ();
4488 return const0_rtx;
4489
4490 case BUILT_IN_FPUTS:
4491 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 0);
4492 if (target)
4493 return target;
4494 break;
4495 case BUILT_IN_FPUTS_UNLOCKED:
4496 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 1);
4497 if (target)
4498 return target;
4499 break;
4500
4501 /* Various hooks for the DWARF 2 __throw routine. */
4502 case BUILT_IN_UNWIND_INIT:
4503 expand_builtin_unwind_init ();
4504 return const0_rtx;
4505 case BUILT_IN_DWARF_CFA:
4506 return virtual_cfa_rtx;
4507 #ifdef DWARF2_UNWIND_INFO
4508 case BUILT_IN_DWARF_FP_REGNUM:
4509 return expand_builtin_dwarf_fp_regnum ();
4510 case BUILT_IN_INIT_DWARF_REG_SIZES:
4511 expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist));
4512 return const0_rtx;
4513 #endif
4514 case BUILT_IN_FROB_RETURN_ADDR:
4515 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
4516 case BUILT_IN_EXTRACT_RETURN_ADDR:
4517 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
4518 case BUILT_IN_EH_RETURN:
4519 expand_builtin_eh_return (TREE_VALUE (arglist),
4520 TREE_VALUE (TREE_CHAIN (arglist)));
4521 return const0_rtx;
4522 #ifdef EH_RETURN_DATA_REGNO
4523 case BUILT_IN_EH_RETURN_DATA_REGNO:
4524 return expand_builtin_eh_return_data_regno (arglist);
4525 #endif
4526 case BUILT_IN_VA_START:
4527 case BUILT_IN_STDARG_START:
4528 return expand_builtin_va_start (arglist);
4529 case BUILT_IN_VA_END:
4530 return expand_builtin_va_end (arglist);
4531 case BUILT_IN_VA_COPY:
4532 return expand_builtin_va_copy (arglist);
4533 case BUILT_IN_EXPECT:
4534 return expand_builtin_expect (arglist, target);
4535 case BUILT_IN_PREFETCH:
4536 expand_builtin_prefetch (arglist);
4537 return const0_rtx;
4538
4539
4540 default: /* just do library call, if unknown builtin */
4541 if (!DECL_ASSEMBLER_NAME_SET_P (fndecl))
4542 error ("built-in function `%s' not currently supported",
4543 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
4544 }
4545
4546 /* The switch statement above can drop through to cause the function
4547 to be called normally. */
4548 return expand_call (exp, target, ignore);
4549 }
4550
4551 /* Determine whether a tree node represents a call to a built-in
4552 math function. If the tree T is a call to a built-in function
4553 taking a single real argument, then the return value is the
4554 DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT. Otherwise
4555 the return value is END_BUILTINS. */
4556
4557 enum built_in_function
4558 builtin_mathfn_code (t)
4559 tree t;
4560 {
4561 tree fndecl, arglist;
4562
4563 if (TREE_CODE (t) != CALL_EXPR
4564 || TREE_CODE (TREE_OPERAND (t, 0)) != ADDR_EXPR)
4565 return END_BUILTINS;
4566
4567 fndecl = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
4568 if (TREE_CODE (fndecl) != FUNCTION_DECL
4569 || ! DECL_BUILT_IN (fndecl)
4570 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4571 return END_BUILTINS;
4572
4573 arglist = TREE_OPERAND (t, 1);
4574 if (! arglist
4575 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
4576 return END_BUILTINS;
4577
4578 arglist = TREE_CHAIN (arglist);
4579 switch (DECL_FUNCTION_CODE (fndecl))
4580 {
4581 case BUILT_IN_POW:
4582 case BUILT_IN_POWF:
4583 case BUILT_IN_POWL:
4584 case BUILT_IN_ATAN2:
4585 case BUILT_IN_ATAN2F:
4586 case BUILT_IN_ATAN2L:
4587 if (! arglist
4588 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE
4589 || TREE_CHAIN (arglist))
4590 return END_BUILTINS;
4591 break;
4592
4593 default:
4594 if (arglist)
4595 return END_BUILTINS;
4596 break;
4597 }
4598
4599 return DECL_FUNCTION_CODE (fndecl);
4600 }
4601
4602 /* Fold a call to __builtin_constant_p, if we know it will evaluate to a
4603 constant. ARGLIST is the argument list of the call. */
4604
4605 static tree
4606 fold_builtin_constant_p (arglist)
4607 tree arglist;
4608 {
4609 if (arglist == 0)
4610 return 0;
4611
4612 arglist = TREE_VALUE (arglist);
4613
4614 /* We return 1 for a numeric type that's known to be a constant
4615 value at compile-time or for an aggregate type that's a
4616 literal constant. */
4617 STRIP_NOPS (arglist);
4618
4619 /* If we know this is a constant, emit the constant of one. */
4620 if (TREE_CODE_CLASS (TREE_CODE (arglist)) == 'c'
4621 || (TREE_CODE (arglist) == CONSTRUCTOR
4622 && TREE_CONSTANT (arglist))
4623 || (TREE_CODE (arglist) == ADDR_EXPR
4624 && TREE_CODE (TREE_OPERAND (arglist, 0)) == STRING_CST))
4625 return integer_one_node;
4626
4627 /* If we aren't going to be running CSE or this expression
4628 has side effects, show we don't know it to be a constant.
4629 Likewise if it's a pointer or aggregate type since in those
4630 case we only want literals, since those are only optimized
4631 when generating RTL, not later.
4632 And finally, if we are compiling an initializer, not code, we
4633 need to return a definite result now; there's not going to be any
4634 more optimization done. */
4635 if (TREE_SIDE_EFFECTS (arglist) || cse_not_expected
4636 || AGGREGATE_TYPE_P (TREE_TYPE (arglist))
4637 || POINTER_TYPE_P (TREE_TYPE (arglist))
4638 || cfun == 0)
4639 return integer_zero_node;
4640
4641 return 0;
4642 }
4643
4644 /* Fold a call to __builtin_classify_type. */
4645
4646 static tree
4647 fold_builtin_classify_type (arglist)
4648 tree arglist;
4649 {
4650 if (arglist == 0)
4651 return build_int_2 (no_type_class, 0);
4652
4653 return build_int_2 (type_to_class (TREE_TYPE (TREE_VALUE (arglist))), 0);
4654 }
4655
4656 /* Fold a call to __builtin_inf or __builtin_huge_val. */
4657
4658 static tree
4659 fold_builtin_inf (type, warn)
4660 tree type;
4661 int warn;
4662 {
4663 REAL_VALUE_TYPE real;
4664
4665 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
4666 warning ("target format does not support infinity");
4667
4668 real_inf (&real);
4669 return build_real (type, real);
4670 }
4671
4672 /* Fold a call to __builtin_nan or __builtin_nans. */
4673
4674 static tree
4675 fold_builtin_nan (arglist, type, quiet)
4676 tree arglist, type;
4677 int quiet;
4678 {
4679 REAL_VALUE_TYPE real;
4680 const char *str;
4681
4682 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4683 return 0;
4684 str = c_getstr (TREE_VALUE (arglist));
4685 if (!str)
4686 return 0;
4687
4688 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
4689 return 0;
4690
4691 return build_real (type, real);
4692 }
4693
4694 /* EXP is assumed to me builtin call where truncation can be propagated
4695 across (for instance floor((double)f) == (double)floorf (f).
4696 Do the transformation. */
4697 static tree
4698 fold_trunc_transparent_mathfn (exp)
4699 tree exp;
4700 {
4701 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4702 tree arglist = TREE_OPERAND (exp, 1);
4703 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4704
4705 if (optimize && validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4706 {
4707 tree arg0 = strip_float_extensions (TREE_VALUE (arglist));
4708 tree ftype = TREE_TYPE (exp);
4709 tree newtype = TREE_TYPE (arg0);
4710 tree decl;
4711
4712 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
4713 && (decl = mathfn_built_in (newtype, fcode)))
4714 {
4715 arglist =
4716 build_tree_list (NULL_TREE, fold (convert (newtype, arg0)));
4717 return convert (ftype,
4718 build_function_call_expr (decl, arglist));
4719 }
4720 }
4721 return 0;
4722 }
4723
4724 /* Used by constant folding to eliminate some builtin calls early. EXP is
4725 the CALL_EXPR of a call to a builtin function. */
4726
4727 tree
4728 fold_builtin (exp)
4729 tree exp;
4730 {
4731 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4732 tree arglist = TREE_OPERAND (exp, 1);
4733 tree type = TREE_TYPE (TREE_TYPE (fndecl));
4734
4735 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4736 return 0;
4737
4738 switch (DECL_FUNCTION_CODE (fndecl))
4739 {
4740 case BUILT_IN_CONSTANT_P:
4741 return fold_builtin_constant_p (arglist);
4742
4743 case BUILT_IN_CLASSIFY_TYPE:
4744 return fold_builtin_classify_type (arglist);
4745
4746 case BUILT_IN_STRLEN:
4747 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4748 {
4749 tree len = c_strlen (TREE_VALUE (arglist));
4750 if (len)
4751 {
4752 /* Convert from the internal "sizetype" type to "size_t". */
4753 if (size_type_node)
4754 len = convert (size_type_node, len);
4755 return len;
4756 }
4757 }
4758 break;
4759
4760 case BUILT_IN_SQRT:
4761 case BUILT_IN_SQRTF:
4762 case BUILT_IN_SQRTL:
4763 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4764 {
4765 enum built_in_function fcode;
4766 tree arg = TREE_VALUE (arglist);
4767
4768 /* Optimize sqrt of constant value. */
4769 if (TREE_CODE (arg) == REAL_CST
4770 && ! TREE_CONSTANT_OVERFLOW (arg))
4771 {
4772 enum machine_mode mode;
4773 REAL_VALUE_TYPE r, x;
4774
4775 x = TREE_REAL_CST (arg);
4776 mode = TYPE_MODE (type);
4777 if (real_sqrt (&r, mode, &x)
4778 || (!flag_trapping_math && !flag_errno_math))
4779 return build_real (type, r);
4780 }
4781
4782 /* Optimize sqrt(exp(x)) = exp(x*0.5). */
4783 fcode = builtin_mathfn_code (arg);
4784 if (flag_unsafe_math_optimizations
4785 && (fcode == BUILT_IN_EXP
4786 || fcode == BUILT_IN_EXPF
4787 || fcode == BUILT_IN_EXPL))
4788 {
4789 tree expfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
4790 arg = fold (build (MULT_EXPR, type,
4791 TREE_VALUE (TREE_OPERAND (arg, 1)),
4792 build_real (type, dconsthalf)));
4793 arglist = build_tree_list (NULL_TREE, arg);
4794 return build_function_call_expr (expfn, arglist);
4795 }
4796
4797 /* Optimize sqrt(pow(x,y)) = pow(x,y*0.5). */
4798 if (flag_unsafe_math_optimizations
4799 && (fcode == BUILT_IN_POW
4800 || fcode == BUILT_IN_POWF
4801 || fcode == BUILT_IN_POWL))
4802 {
4803 tree powfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
4804 tree arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
4805 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
4806 tree narg1 = fold (build (MULT_EXPR, type, arg1,
4807 build_real (type, dconsthalf)));
4808 arglist = tree_cons (NULL_TREE, arg0,
4809 build_tree_list (NULL_TREE, narg1));
4810 return build_function_call_expr (powfn, arglist);
4811 }
4812 }
4813 break;
4814
4815 case BUILT_IN_SIN:
4816 case BUILT_IN_SINF:
4817 case BUILT_IN_SINL:
4818 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4819 {
4820 tree arg = TREE_VALUE (arglist);
4821
4822 /* Optimize sin(0.0) = 0.0. */
4823 if (real_zerop (arg))
4824 return build_real (type, dconst0);
4825 }
4826 break;
4827
4828 case BUILT_IN_COS:
4829 case BUILT_IN_COSF:
4830 case BUILT_IN_COSL:
4831 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4832 {
4833 tree arg = TREE_VALUE (arglist);
4834
4835 /* Optimize cos(0.0) = 1.0. */
4836 if (real_zerop (arg))
4837 return build_real (type, dconst1);
4838 }
4839 break;
4840
4841 case BUILT_IN_EXP:
4842 case BUILT_IN_EXPF:
4843 case BUILT_IN_EXPL:
4844 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4845 {
4846 enum built_in_function fcode;
4847 tree arg = TREE_VALUE (arglist);
4848
4849 /* Optimize exp(0.0) = 1.0. */
4850 if (real_zerop (arg))
4851 return build_real (type, dconst1);
4852
4853 /* Optimize exp(log(x)) = x. */
4854 fcode = builtin_mathfn_code (arg);
4855 if (flag_unsafe_math_optimizations
4856 && (fcode == BUILT_IN_LOG
4857 || fcode == BUILT_IN_LOGF
4858 || fcode == BUILT_IN_LOGL))
4859 return TREE_VALUE (TREE_OPERAND (arg, 1));
4860 }
4861 break;
4862
4863 case BUILT_IN_LOG:
4864 case BUILT_IN_LOGF:
4865 case BUILT_IN_LOGL:
4866 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4867 {
4868 enum built_in_function fcode;
4869 tree arg = TREE_VALUE (arglist);
4870
4871 /* Optimize log(1.0) = 0.0. */
4872 if (real_onep (arg))
4873 return build_real (type, dconst0);
4874
4875 /* Optimize log(exp(x)) = x. */
4876 fcode = builtin_mathfn_code (arg);
4877 if (flag_unsafe_math_optimizations
4878 && (fcode == BUILT_IN_EXP
4879 || fcode == BUILT_IN_EXPF
4880 || fcode == BUILT_IN_EXPL))
4881 return TREE_VALUE (TREE_OPERAND (arg, 1));
4882
4883 /* Optimize log(sqrt(x)) = log(x)*0.5. */
4884 if (flag_unsafe_math_optimizations
4885 && (fcode == BUILT_IN_SQRT
4886 || fcode == BUILT_IN_SQRTF
4887 || fcode == BUILT_IN_SQRTL))
4888 {
4889 tree logfn = build_function_call_expr (fndecl,
4890 TREE_OPERAND (arg, 1));
4891 return fold (build (MULT_EXPR, type, logfn,
4892 build_real (type, dconsthalf)));
4893 }
4894
4895 /* Optimize log(pow(x,y)) = y*log(x). */
4896 if (flag_unsafe_math_optimizations
4897 && (fcode == BUILT_IN_POW
4898 || fcode == BUILT_IN_POWF
4899 || fcode == BUILT_IN_POWL))
4900 {
4901 tree arg0, arg1, logfn;
4902
4903 arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
4904 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
4905 arglist = build_tree_list (NULL_TREE, arg0);
4906 logfn = build_function_call_expr (fndecl, arglist);
4907 return fold (build (MULT_EXPR, type, arg1, logfn));
4908 }
4909 }
4910 break;
4911
4912 case BUILT_IN_POW:
4913 case BUILT_IN_POWF:
4914 case BUILT_IN_POWL:
4915 if (validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4916 {
4917 enum built_in_function fcode;
4918 tree arg0 = TREE_VALUE (arglist);
4919 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4920
4921 /* Optimize pow(1.0,y) = 1.0. */
4922 if (real_onep (arg0))
4923 return omit_one_operand (type, build_real (type, dconst1), arg1);
4924
4925 if (TREE_CODE (arg1) == REAL_CST
4926 && ! TREE_CONSTANT_OVERFLOW (arg1))
4927 {
4928 REAL_VALUE_TYPE c;
4929 c = TREE_REAL_CST (arg1);
4930
4931 /* Optimize pow(x,0.0) = 1.0. */
4932 if (REAL_VALUES_EQUAL (c, dconst0))
4933 return omit_one_operand (type, build_real (type, dconst1),
4934 arg0);
4935
4936 /* Optimize pow(x,1.0) = x. */
4937 if (REAL_VALUES_EQUAL (c, dconst1))
4938 return arg0;
4939
4940 /* Optimize pow(x,-1.0) = 1.0/x. */
4941 if (REAL_VALUES_EQUAL (c, dconstm1))
4942 return fold (build (RDIV_EXPR, type,
4943 build_real (type, dconst1),
4944 arg0));
4945
4946 /* Optimize pow(x,2.0) = x*x. */
4947 if (REAL_VALUES_EQUAL (c, dconst2)
4948 && (*lang_hooks.decls.global_bindings_p) () == 0
4949 && ! contains_placeholder_p (arg0))
4950 {
4951 arg0 = save_expr (arg0);
4952 return fold (build (MULT_EXPR, type, arg0, arg0));
4953 }
4954
4955 /* Optimize pow(x,-2.0) = 1.0/(x*x). */
4956 if (flag_unsafe_math_optimizations
4957 && REAL_VALUES_EQUAL (c, dconstm2)
4958 && (*lang_hooks.decls.global_bindings_p) () == 0
4959 && ! contains_placeholder_p (arg0))
4960 {
4961 arg0 = save_expr (arg0);
4962 return fold (build (RDIV_EXPR, type,
4963 build_real (type, dconst1),
4964 fold (build (MULT_EXPR, type,
4965 arg0, arg0))));
4966 }
4967
4968 /* Optimize pow(x,0.5) = sqrt(x). */
4969 if (flag_unsafe_math_optimizations
4970 && REAL_VALUES_EQUAL (c, dconsthalf))
4971 {
4972 tree sqrtfn;
4973
4974 fcode = DECL_FUNCTION_CODE (fndecl);
4975 if (fcode == BUILT_IN_POW)
4976 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRT];
4977 else if (fcode == BUILT_IN_POWF)
4978 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTF];
4979 else if (fcode == BUILT_IN_POWL)
4980 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTL];
4981 else
4982 sqrtfn = NULL_TREE;
4983
4984 if (sqrtfn != NULL_TREE)
4985 {
4986 tree arglist = build_tree_list (NULL_TREE, arg0);
4987 return build_function_call_expr (sqrtfn, arglist);
4988 }
4989 }
4990 }
4991
4992 /* Optimize pow(exp(x),y) = exp(x*y). */
4993 fcode = builtin_mathfn_code (arg0);
4994 if (flag_unsafe_math_optimizations
4995 && (fcode == BUILT_IN_EXP
4996 || fcode == BUILT_IN_EXPF
4997 || fcode == BUILT_IN_EXPL))
4998 {
4999 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5000 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5001 arg = fold (build (MULT_EXPR, type, arg, arg1));
5002 arglist = build_tree_list (NULL_TREE, arg);
5003 return build_function_call_expr (expfn, arglist);
5004 }
5005
5006 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
5007 if (flag_unsafe_math_optimizations
5008 && (fcode == BUILT_IN_SQRT
5009 || fcode == BUILT_IN_SQRTF
5010 || fcode == BUILT_IN_SQRTL))
5011 {
5012 tree narg0 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5013 tree narg1 = fold (build (MULT_EXPR, type, arg1,
5014 build_real (type, dconsthalf)));
5015
5016 arglist = tree_cons (NULL_TREE, narg0,
5017 build_tree_list (NULL_TREE, narg1));
5018 return build_function_call_expr (fndecl, arglist);
5019 }
5020
5021 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
5022 if (flag_unsafe_math_optimizations
5023 && (fcode == BUILT_IN_POW
5024 || fcode == BUILT_IN_POWF
5025 || fcode == BUILT_IN_POWL))
5026 {
5027 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5028 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
5029 tree narg1 = fold (build (MULT_EXPR, type, arg01, arg1));
5030 arglist = tree_cons (NULL_TREE, arg00,
5031 build_tree_list (NULL_TREE, narg1));
5032 return build_function_call_expr (fndecl, arglist);
5033 }
5034 }
5035 break;
5036
5037 case BUILT_IN_INF:
5038 case BUILT_IN_INFF:
5039 case BUILT_IN_INFL:
5040 return fold_builtin_inf (type, true);
5041
5042 case BUILT_IN_HUGE_VAL:
5043 case BUILT_IN_HUGE_VALF:
5044 case BUILT_IN_HUGE_VALL:
5045 return fold_builtin_inf (type, false);
5046
5047 case BUILT_IN_NAN:
5048 case BUILT_IN_NANF:
5049 case BUILT_IN_NANL:
5050 return fold_builtin_nan (arglist, type, true);
5051
5052 case BUILT_IN_NANS:
5053 case BUILT_IN_NANSF:
5054 case BUILT_IN_NANSL:
5055 return fold_builtin_nan (arglist, type, false);
5056
5057 case BUILT_IN_FLOOR:
5058 case BUILT_IN_FLOORF:
5059 case BUILT_IN_FLOORL:
5060 case BUILT_IN_CEIL:
5061 case BUILT_IN_CEILF:
5062 case BUILT_IN_CEILL:
5063 case BUILT_IN_TRUNC:
5064 case BUILT_IN_TRUNCF:
5065 case BUILT_IN_TRUNCL:
5066 case BUILT_IN_ROUND:
5067 case BUILT_IN_ROUNDF:
5068 case BUILT_IN_ROUNDL:
5069 case BUILT_IN_NEARBYINT:
5070 case BUILT_IN_NEARBYINTF:
5071 case BUILT_IN_NEARBYINTL:
5072 return fold_trunc_transparent_mathfn (exp);
5073
5074 default:
5075 break;
5076 }
5077
5078 return 0;
5079 }
5080
5081 /* Conveniently construct a function call expression. */
5082
5083 tree
5084 build_function_call_expr (fn, arglist)
5085 tree fn, arglist;
5086 {
5087 tree call_expr;
5088
5089 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
5090 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
5091 call_expr, arglist);
5092 TREE_SIDE_EFFECTS (call_expr) = 1;
5093 return fold (call_expr);
5094 }
5095
5096 /* This function validates the types of a function call argument list
5097 represented as a tree chain of parameters against a specified list
5098 of tree_codes. If the last specifier is a 0, that represents an
5099 ellipses, otherwise the last specifier must be a VOID_TYPE. */
5100
5101 static int
5102 validate_arglist VPARAMS ((tree arglist, ...))
5103 {
5104 enum tree_code code;
5105 int res = 0;
5106
5107 VA_OPEN (ap, arglist);
5108 VA_FIXEDARG (ap, tree, arglist);
5109
5110 do
5111 {
5112 code = va_arg (ap, enum tree_code);
5113 switch (code)
5114 {
5115 case 0:
5116 /* This signifies an ellipses, any further arguments are all ok. */
5117 res = 1;
5118 goto end;
5119 case VOID_TYPE:
5120 /* This signifies an endlink, if no arguments remain, return
5121 true, otherwise return false. */
5122 res = arglist == 0;
5123 goto end;
5124 default:
5125 /* If no parameters remain or the parameter's code does not
5126 match the specified code, return false. Otherwise continue
5127 checking any remaining arguments. */
5128 if (arglist == 0
5129 || code != TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))))
5130 goto end;
5131 break;
5132 }
5133 arglist = TREE_CHAIN (arglist);
5134 }
5135 while (1);
5136
5137 /* We need gotos here since we can only have one VA_CLOSE in a
5138 function. */
5139 end: ;
5140 VA_CLOSE (ap);
5141
5142 return res;
5143 }
5144
5145 /* Default version of target-specific builtin setup that does nothing. */
5146
5147 void
5148 default_init_builtins ()
5149 {
5150 }
5151
5152 /* Default target-specific builtin expander that does nothing. */
5153
5154 rtx
5155 default_expand_builtin (exp, target, subtarget, mode, ignore)
5156 tree exp ATTRIBUTE_UNUSED;
5157 rtx target ATTRIBUTE_UNUSED;
5158 rtx subtarget ATTRIBUTE_UNUSED;
5159 enum machine_mode mode ATTRIBUTE_UNUSED;
5160 int ignore ATTRIBUTE_UNUSED;
5161 {
5162 return NULL_RTX;
5163 }
5164
5165 /* Instantiate all remaining CONSTANT_P_RTX nodes. */
5166
5167 void
5168 purge_builtin_constant_p ()
5169 {
5170 rtx insn, set, arg, new, note;
5171
5172 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5173 if (INSN_P (insn)
5174 && (set = single_set (insn)) != NULL_RTX
5175 && (GET_CODE (arg = SET_SRC (set)) == CONSTANT_P_RTX
5176 || (GET_CODE (arg) == SUBREG
5177 && (GET_CODE (arg = SUBREG_REG (arg))
5178 == CONSTANT_P_RTX))))
5179 {
5180 arg = XEXP (arg, 0);
5181 new = CONSTANT_P (arg) ? const1_rtx : const0_rtx;
5182 validate_change (insn, &SET_SRC (set), new, 0);
5183
5184 /* Remove the REG_EQUAL note from the insn. */
5185 if ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0)
5186 remove_note (insn, note);
5187 }
5188 }
5189
This page took 0.265952 seconds and 6 git commands to generate.