]> gcc.gnu.org Git - gcc.git/blob - gcc/builtins.c
6745300fe803825d635fddaa4233b66880e8e69b
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "toplev.h"
43 #include "predict.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "langhooks.h"
47
48 #define CALLED_AS_BUILT_IN(NODE) \
49 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
50
51 /* Register mappings for target machines without register windows. */
52 #ifndef INCOMING_REGNO
53 #define INCOMING_REGNO(OUT) (OUT)
54 #endif
55 #ifndef OUTGOING_REGNO
56 #define OUTGOING_REGNO(IN) (IN)
57 #endif
58
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 #endif
62
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
66
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM) STRINGX(X),
68 const char *const built_in_names[(int) END_BUILTINS] =
69 {
70 #include "builtins.def"
71 };
72 #undef DEF_BUILTIN
73
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases. */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
81
82 static int get_pointer_alignment PARAMS ((tree, unsigned int));
83 static tree c_strlen PARAMS ((tree));
84 static const char *c_getstr PARAMS ((tree));
85 static rtx c_readstr PARAMS ((const char *,
86 enum machine_mode));
87 static int target_char_cast PARAMS ((tree, char *));
88 static rtx get_memory_rtx PARAMS ((tree));
89 static int apply_args_size PARAMS ((void));
90 static int apply_result_size PARAMS ((void));
91 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
92 static rtx result_vector PARAMS ((int, rtx));
93 #endif
94 static rtx expand_builtin_setjmp PARAMS ((tree, rtx));
95 static void expand_builtin_prefetch PARAMS ((tree));
96 static rtx expand_builtin_apply_args PARAMS ((void));
97 static rtx expand_builtin_apply_args_1 PARAMS ((void));
98 static rtx expand_builtin_apply PARAMS ((rtx, rtx, rtx));
99 static void expand_builtin_return PARAMS ((rtx));
100 static enum type_class type_to_class PARAMS ((tree));
101 static rtx expand_builtin_classify_type PARAMS ((tree));
102 static void expand_errno_check PARAMS ((tree, rtx));
103 static rtx expand_builtin_mathfn PARAMS ((tree, rtx, rtx));
104 static rtx expand_builtin_mathfn_2 PARAMS ((tree, rtx, rtx));
105 static rtx expand_builtin_constant_p PARAMS ((tree, enum machine_mode));
106 static rtx expand_builtin_args_info PARAMS ((tree));
107 static rtx expand_builtin_next_arg PARAMS ((tree));
108 static rtx expand_builtin_va_start PARAMS ((tree));
109 static rtx expand_builtin_va_end PARAMS ((tree));
110 static rtx expand_builtin_va_copy PARAMS ((tree));
111 static rtx expand_builtin_memcmp PARAMS ((tree, tree, rtx,
112 enum machine_mode));
113 static rtx expand_builtin_strcmp PARAMS ((tree, rtx,
114 enum machine_mode));
115 static rtx expand_builtin_strncmp PARAMS ((tree, rtx,
116 enum machine_mode));
117 static rtx builtin_memcpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
118 enum machine_mode));
119 static rtx expand_builtin_strcat PARAMS ((tree, rtx,
120 enum machine_mode));
121 static rtx expand_builtin_strncat PARAMS ((tree, rtx,
122 enum machine_mode));
123 static rtx expand_builtin_strspn PARAMS ((tree, rtx,
124 enum machine_mode));
125 static rtx expand_builtin_strcspn PARAMS ((tree, rtx,
126 enum machine_mode));
127 static rtx expand_builtin_memcpy PARAMS ((tree, rtx,
128 enum machine_mode, int));
129 static rtx expand_builtin_mempcpy PARAMS ((tree, rtx,
130 enum machine_mode));
131 static rtx expand_builtin_memmove PARAMS ((tree, rtx,
132 enum machine_mode));
133 static rtx expand_builtin_bcopy PARAMS ((tree));
134 static rtx expand_builtin_strcpy PARAMS ((tree, rtx,
135 enum machine_mode));
136 static rtx expand_builtin_stpcpy PARAMS ((tree, rtx,
137 enum machine_mode));
138 static rtx builtin_strncpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
139 enum machine_mode));
140 static rtx expand_builtin_strncpy PARAMS ((tree, rtx,
141 enum machine_mode));
142 static rtx builtin_memset_read_str PARAMS ((PTR, HOST_WIDE_INT,
143 enum machine_mode));
144 static rtx builtin_memset_gen_str PARAMS ((PTR, HOST_WIDE_INT,
145 enum machine_mode));
146 static rtx expand_builtin_memset PARAMS ((tree, rtx,
147 enum machine_mode));
148 static rtx expand_builtin_bzero PARAMS ((tree));
149 static rtx expand_builtin_strlen PARAMS ((tree, rtx, enum machine_mode));
150 static rtx expand_builtin_strstr PARAMS ((tree, rtx,
151 enum machine_mode));
152 static rtx expand_builtin_strpbrk PARAMS ((tree, rtx,
153 enum machine_mode));
154 static rtx expand_builtin_strchr PARAMS ((tree, rtx,
155 enum machine_mode));
156 static rtx expand_builtin_strrchr PARAMS ((tree, rtx,
157 enum machine_mode));
158 static rtx expand_builtin_alloca PARAMS ((tree, rtx));
159 static rtx expand_builtin_unop PARAMS ((enum machine_mode,
160 tree, rtx, rtx, optab));
161 static rtx expand_builtin_frame_address PARAMS ((tree, tree));
162 static rtx expand_builtin_fputs PARAMS ((tree, int, int));
163 static tree stabilize_va_list PARAMS ((tree, int));
164 static rtx expand_builtin_expect PARAMS ((tree, rtx));
165 static tree fold_builtin_constant_p PARAMS ((tree));
166 static tree fold_builtin_classify_type PARAMS ((tree));
167 static tree fold_builtin_inf PARAMS ((tree, int));
168 static tree fold_builtin_nan PARAMS ((tree, tree, int));
169 static int validate_arglist PARAMS ((tree, ...));
170 static tree fold_trunc_transparent_mathfn PARAMS ((tree));
171 static bool readonly_data_expr PARAMS ((tree));
172
173 /* Return the alignment in bits of EXP, a pointer valued expression.
174 But don't return more than MAX_ALIGN no matter what.
175 The alignment returned is, by default, the alignment of the thing that
176 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
177
178 Otherwise, look at the expression to see if we can do better, i.e., if the
179 expression is actually pointing at an object whose alignment is tighter. */
180
181 static int
182 get_pointer_alignment (exp, max_align)
183 tree exp;
184 unsigned int max_align;
185 {
186 unsigned int align, inner;
187
188 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
189 return 0;
190
191 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
192 align = MIN (align, max_align);
193
194 while (1)
195 {
196 switch (TREE_CODE (exp))
197 {
198 case NOP_EXPR:
199 case CONVERT_EXPR:
200 case NON_LVALUE_EXPR:
201 exp = TREE_OPERAND (exp, 0);
202 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
203 return align;
204
205 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
206 align = MIN (inner, max_align);
207 break;
208
209 case PLUS_EXPR:
210 /* If sum of pointer + int, restrict our maximum alignment to that
211 imposed by the integer. If not, we can't do any better than
212 ALIGN. */
213 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
214 return align;
215
216 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
217 & (max_align / BITS_PER_UNIT - 1))
218 != 0)
219 max_align >>= 1;
220
221 exp = TREE_OPERAND (exp, 0);
222 break;
223
224 case ADDR_EXPR:
225 /* See what we are pointing at and look at its alignment. */
226 exp = TREE_OPERAND (exp, 0);
227 if (TREE_CODE (exp) == FUNCTION_DECL)
228 align = FUNCTION_BOUNDARY;
229 else if (DECL_P (exp))
230 align = DECL_ALIGN (exp);
231 #ifdef CONSTANT_ALIGNMENT
232 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
233 align = CONSTANT_ALIGNMENT (exp, align);
234 #endif
235 return MIN (align, max_align);
236
237 default:
238 return align;
239 }
240 }
241 }
242
243 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
244 way, because it could contain a zero byte in the middle.
245 TREE_STRING_LENGTH is the size of the character array, not the string.
246
247 The value returned is of type `ssizetype'.
248
249 Unfortunately, string_constant can't access the values of const char
250 arrays with initializers, so neither can we do so here. */
251
252 static tree
253 c_strlen (src)
254 tree src;
255 {
256 tree offset_node;
257 HOST_WIDE_INT offset;
258 int max;
259 const char *ptr;
260
261 src = string_constant (src, &offset_node);
262 if (src == 0)
263 return 0;
264
265 max = TREE_STRING_LENGTH (src) - 1;
266 ptr = TREE_STRING_POINTER (src);
267
268 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
269 {
270 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
271 compute the offset to the following null if we don't know where to
272 start searching for it. */
273 int i;
274
275 for (i = 0; i < max; i++)
276 if (ptr[i] == 0)
277 return 0;
278
279 /* We don't know the starting offset, but we do know that the string
280 has no internal zero bytes. We can assume that the offset falls
281 within the bounds of the string; otherwise, the programmer deserves
282 what he gets. Subtract the offset from the length of the string,
283 and return that. This would perhaps not be valid if we were dealing
284 with named arrays in addition to literal string constants. */
285
286 return size_diffop (size_int (max), offset_node);
287 }
288
289 /* We have a known offset into the string. Start searching there for
290 a null character if we can represent it as a single HOST_WIDE_INT. */
291 if (offset_node == 0)
292 offset = 0;
293 else if (! host_integerp (offset_node, 0))
294 offset = -1;
295 else
296 offset = tree_low_cst (offset_node, 0);
297
298 /* If the offset is known to be out of bounds, warn, and call strlen at
299 runtime. */
300 if (offset < 0 || offset > max)
301 {
302 warning ("offset outside bounds of constant string");
303 return 0;
304 }
305
306 /* Use strlen to search for the first zero byte. Since any strings
307 constructed with build_string will have nulls appended, we win even
308 if we get handed something like (char[4])"abcd".
309
310 Since OFFSET is our starting index into the string, no further
311 calculation is needed. */
312 return ssize_int (strlen (ptr + offset));
313 }
314
315 /* Return a char pointer for a C string if it is a string constant
316 or sum of string constant and integer constant. */
317
318 static const char *
319 c_getstr (src)
320 tree src;
321 {
322 tree offset_node;
323
324 src = string_constant (src, &offset_node);
325 if (src == 0)
326 return 0;
327
328 if (offset_node == 0)
329 return TREE_STRING_POINTER (src);
330 else if (!host_integerp (offset_node, 1)
331 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
332 return 0;
333
334 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
335 }
336
337 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
338 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
339
340 static rtx
341 c_readstr (str, mode)
342 const char *str;
343 enum machine_mode mode;
344 {
345 HOST_WIDE_INT c[2];
346 HOST_WIDE_INT ch;
347 unsigned int i, j;
348
349 if (GET_MODE_CLASS (mode) != MODE_INT)
350 abort ();
351 c[0] = 0;
352 c[1] = 0;
353 ch = 1;
354 for (i = 0; i < GET_MODE_SIZE (mode); i++)
355 {
356 j = i;
357 if (WORDS_BIG_ENDIAN)
358 j = GET_MODE_SIZE (mode) - i - 1;
359 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
360 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
361 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
362 j *= BITS_PER_UNIT;
363 if (j > 2 * HOST_BITS_PER_WIDE_INT)
364 abort ();
365 if (ch)
366 ch = (unsigned char) str[i];
367 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
368 }
369 return immed_double_const (c[0], c[1], mode);
370 }
371
372 /* Cast a target constant CST to target CHAR and if that value fits into
373 host char type, return zero and put that value into variable pointed by
374 P. */
375
376 static int
377 target_char_cast (cst, p)
378 tree cst;
379 char *p;
380 {
381 unsigned HOST_WIDE_INT val, hostval;
382
383 if (!host_integerp (cst, 1)
384 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
385 return 1;
386
387 val = tree_low_cst (cst, 1);
388 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
389 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
390
391 hostval = val;
392 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
393 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
394
395 if (val != hostval)
396 return 1;
397
398 *p = hostval;
399 return 0;
400 }
401
402 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
403 times to get the address of either a higher stack frame, or a return
404 address located within it (depending on FNDECL_CODE). */
405
406 rtx
407 expand_builtin_return_addr (fndecl_code, count, tem)
408 enum built_in_function fndecl_code;
409 int count;
410 rtx tem;
411 {
412 int i;
413
414 /* Some machines need special handling before we can access
415 arbitrary frames. For example, on the sparc, we must first flush
416 all register windows to the stack. */
417 #ifdef SETUP_FRAME_ADDRESSES
418 if (count > 0)
419 SETUP_FRAME_ADDRESSES ();
420 #endif
421
422 /* On the sparc, the return address is not in the frame, it is in a
423 register. There is no way to access it off of the current frame
424 pointer, but it can be accessed off the previous frame pointer by
425 reading the value from the register window save area. */
426 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
427 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
428 count--;
429 #endif
430
431 /* Scan back COUNT frames to the specified frame. */
432 for (i = 0; i < count; i++)
433 {
434 /* Assume the dynamic chain pointer is in the word that the
435 frame address points to, unless otherwise specified. */
436 #ifdef DYNAMIC_CHAIN_ADDRESS
437 tem = DYNAMIC_CHAIN_ADDRESS (tem);
438 #endif
439 tem = memory_address (Pmode, tem);
440 tem = gen_rtx_MEM (Pmode, tem);
441 set_mem_alias_set (tem, get_frame_alias_set ());
442 tem = copy_to_reg (tem);
443 }
444
445 /* For __builtin_frame_address, return what we've got. */
446 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
447 return tem;
448
449 /* For __builtin_return_address, Get the return address from that
450 frame. */
451 #ifdef RETURN_ADDR_RTX
452 tem = RETURN_ADDR_RTX (count, tem);
453 #else
454 tem = memory_address (Pmode,
455 plus_constant (tem, GET_MODE_SIZE (Pmode)));
456 tem = gen_rtx_MEM (Pmode, tem);
457 set_mem_alias_set (tem, get_frame_alias_set ());
458 #endif
459 return tem;
460 }
461
462 /* Alias set used for setjmp buffer. */
463 static HOST_WIDE_INT setjmp_alias_set = -1;
464
465 /* Construct the leading half of a __builtin_setjmp call. Control will
466 return to RECEIVER_LABEL. This is used directly by sjlj exception
467 handling code. */
468
469 void
470 expand_builtin_setjmp_setup (buf_addr, receiver_label)
471 rtx buf_addr;
472 rtx receiver_label;
473 {
474 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
475 rtx stack_save;
476 rtx mem;
477
478 if (setjmp_alias_set == -1)
479 setjmp_alias_set = new_alias_set ();
480
481 #ifdef POINTERS_EXTEND_UNSIGNED
482 if (GET_MODE (buf_addr) != Pmode)
483 buf_addr = convert_memory_address (Pmode, buf_addr);
484 #endif
485
486 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
487
488 emit_queue ();
489
490 /* We store the frame pointer and the address of receiver_label in
491 the buffer and use the rest of it for the stack save area, which
492 is machine-dependent. */
493
494 #ifndef BUILTIN_SETJMP_FRAME_VALUE
495 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
496 #endif
497
498 mem = gen_rtx_MEM (Pmode, buf_addr);
499 set_mem_alias_set (mem, setjmp_alias_set);
500 emit_move_insn (mem, BUILTIN_SETJMP_FRAME_VALUE);
501
502 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
503 set_mem_alias_set (mem, setjmp_alias_set);
504
505 emit_move_insn (validize_mem (mem),
506 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
507
508 stack_save = gen_rtx_MEM (sa_mode,
509 plus_constant (buf_addr,
510 2 * GET_MODE_SIZE (Pmode)));
511 set_mem_alias_set (stack_save, setjmp_alias_set);
512 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
513
514 /* If there is further processing to do, do it. */
515 #ifdef HAVE_builtin_setjmp_setup
516 if (HAVE_builtin_setjmp_setup)
517 emit_insn (gen_builtin_setjmp_setup (buf_addr));
518 #endif
519
520 /* Tell optimize_save_area_alloca that extra work is going to
521 need to go on during alloca. */
522 current_function_calls_setjmp = 1;
523
524 /* Set this so all the registers get saved in our frame; we need to be
525 able to copy the saved values for any registers from frames we unwind. */
526 current_function_has_nonlocal_label = 1;
527 }
528
529 /* Construct the trailing part of a __builtin_setjmp call.
530 This is used directly by sjlj exception handling code. */
531
532 void
533 expand_builtin_setjmp_receiver (receiver_label)
534 rtx receiver_label ATTRIBUTE_UNUSED;
535 {
536 /* Clobber the FP when we get here, so we have to make sure it's
537 marked as used by this function. */
538 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
539
540 /* Mark the static chain as clobbered here so life information
541 doesn't get messed up for it. */
542 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
543
544 /* Now put in the code to restore the frame pointer, and argument
545 pointer, if needed. The code below is from expand_end_bindings
546 in stmt.c; see detailed documentation there. */
547 #ifdef HAVE_nonlocal_goto
548 if (! HAVE_nonlocal_goto)
549 #endif
550 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
551
552 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
553 if (fixed_regs[ARG_POINTER_REGNUM])
554 {
555 #ifdef ELIMINABLE_REGS
556 size_t i;
557 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
558
559 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
560 if (elim_regs[i].from == ARG_POINTER_REGNUM
561 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
562 break;
563
564 if (i == ARRAY_SIZE (elim_regs))
565 #endif
566 {
567 /* Now restore our arg pointer from the address at which it
568 was saved in our stack frame. */
569 emit_move_insn (virtual_incoming_args_rtx,
570 copy_to_reg (get_arg_pointer_save_area (cfun)));
571 }
572 }
573 #endif
574
575 #ifdef HAVE_builtin_setjmp_receiver
576 if (HAVE_builtin_setjmp_receiver)
577 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
578 else
579 #endif
580 #ifdef HAVE_nonlocal_goto_receiver
581 if (HAVE_nonlocal_goto_receiver)
582 emit_insn (gen_nonlocal_goto_receiver ());
583 else
584 #endif
585 { /* Nothing */ }
586
587 /* @@@ This is a kludge. Not all machine descriptions define a blockage
588 insn, but we must not allow the code we just generated to be reordered
589 by scheduling. Specifically, the update of the frame pointer must
590 happen immediately, not later. So emit an ASM_INPUT to act as blockage
591 insn. */
592 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
593 }
594
595 /* __builtin_setjmp is passed a pointer to an array of five words (not
596 all will be used on all machines). It operates similarly to the C
597 library function of the same name, but is more efficient. Much of
598 the code below (and for longjmp) is copied from the handling of
599 non-local gotos.
600
601 NOTE: This is intended for use by GNAT and the exception handling
602 scheme in the compiler and will only work in the method used by
603 them. */
604
605 static rtx
606 expand_builtin_setjmp (arglist, target)
607 tree arglist;
608 rtx target;
609 {
610 rtx buf_addr, next_lab, cont_lab;
611
612 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
613 return NULL_RTX;
614
615 if (target == 0 || GET_CODE (target) != REG
616 || REGNO (target) < FIRST_PSEUDO_REGISTER)
617 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
618
619 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
620
621 next_lab = gen_label_rtx ();
622 cont_lab = gen_label_rtx ();
623
624 expand_builtin_setjmp_setup (buf_addr, next_lab);
625
626 /* Set TARGET to zero and branch to the continue label. */
627 emit_move_insn (target, const0_rtx);
628 emit_jump_insn (gen_jump (cont_lab));
629 emit_barrier ();
630 emit_label (next_lab);
631
632 expand_builtin_setjmp_receiver (next_lab);
633
634 /* Set TARGET to one. */
635 emit_move_insn (target, const1_rtx);
636 emit_label (cont_lab);
637
638 /* Tell flow about the strange goings on. Putting `next_lab' on
639 `nonlocal_goto_handler_labels' to indicates that function
640 calls may traverse the arc back to this label. */
641
642 current_function_has_nonlocal_label = 1;
643 nonlocal_goto_handler_labels
644 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
645
646 return target;
647 }
648
649 /* __builtin_longjmp is passed a pointer to an array of five words (not
650 all will be used on all machines). It operates similarly to the C
651 library function of the same name, but is more efficient. Much of
652 the code below is copied from the handling of non-local gotos.
653
654 NOTE: This is intended for use by GNAT and the exception handling
655 scheme in the compiler and will only work in the method used by
656 them. */
657
658 void
659 expand_builtin_longjmp (buf_addr, value)
660 rtx buf_addr, value;
661 {
662 rtx fp, lab, stack, insn, last;
663 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
664
665 if (setjmp_alias_set == -1)
666 setjmp_alias_set = new_alias_set ();
667
668 #ifdef POINTERS_EXTEND_UNSIGNED
669 if (GET_MODE (buf_addr) != Pmode)
670 buf_addr = convert_memory_address (Pmode, buf_addr);
671 #endif
672
673 buf_addr = force_reg (Pmode, buf_addr);
674
675 /* We used to store value in static_chain_rtx, but that fails if pointers
676 are smaller than integers. We instead require that the user must pass
677 a second argument of 1, because that is what builtin_setjmp will
678 return. This also makes EH slightly more efficient, since we are no
679 longer copying around a value that we don't care about. */
680 if (value != const1_rtx)
681 abort ();
682
683 current_function_calls_longjmp = 1;
684
685 last = get_last_insn ();
686 #ifdef HAVE_builtin_longjmp
687 if (HAVE_builtin_longjmp)
688 emit_insn (gen_builtin_longjmp (buf_addr));
689 else
690 #endif
691 {
692 fp = gen_rtx_MEM (Pmode, buf_addr);
693 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
694 GET_MODE_SIZE (Pmode)));
695
696 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
697 2 * GET_MODE_SIZE (Pmode)));
698 set_mem_alias_set (fp, setjmp_alias_set);
699 set_mem_alias_set (lab, setjmp_alias_set);
700 set_mem_alias_set (stack, setjmp_alias_set);
701
702 /* Pick up FP, label, and SP from the block and jump. This code is
703 from expand_goto in stmt.c; see there for detailed comments. */
704 #if HAVE_nonlocal_goto
705 if (HAVE_nonlocal_goto)
706 /* We have to pass a value to the nonlocal_goto pattern that will
707 get copied into the static_chain pointer, but it does not matter
708 what that value is, because builtin_setjmp does not use it. */
709 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
710 else
711 #endif
712 {
713 lab = copy_to_reg (lab);
714
715 emit_move_insn (hard_frame_pointer_rtx, fp);
716 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
717
718 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
719 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
720 emit_indirect_jump (lab);
721 }
722 }
723
724 /* Search backwards and mark the jump insn as a non-local goto.
725 Note that this precludes the use of __builtin_longjmp to a
726 __builtin_setjmp target in the same function. However, we've
727 already cautioned the user that these functions are for
728 internal exception handling use only. */
729 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
730 {
731 if (insn == last)
732 abort ();
733 if (GET_CODE (insn) == JUMP_INSN)
734 {
735 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
736 REG_NOTES (insn));
737 break;
738 }
739 else if (GET_CODE (insn) == CALL_INSN)
740 break;
741 }
742 }
743
744 /* Expand a call to __builtin_prefetch. For a target that does not support
745 data prefetch, evaluate the memory address argument in case it has side
746 effects. */
747
748 static void
749 expand_builtin_prefetch (arglist)
750 tree arglist;
751 {
752 tree arg0, arg1, arg2;
753 rtx op0, op1, op2;
754
755 if (!validate_arglist (arglist, POINTER_TYPE, 0))
756 return;
757
758 arg0 = TREE_VALUE (arglist);
759 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
760 zero (read) and argument 2 (locality) defaults to 3 (high degree of
761 locality). */
762 if (TREE_CHAIN (arglist))
763 {
764 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
765 if (TREE_CHAIN (TREE_CHAIN (arglist)))
766 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
767 else
768 arg2 = build_int_2 (3, 0);
769 }
770 else
771 {
772 arg1 = integer_zero_node;
773 arg2 = build_int_2 (3, 0);
774 }
775
776 /* Argument 0 is an address. */
777 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
778
779 /* Argument 1 (read/write flag) must be a compile-time constant int. */
780 if (TREE_CODE (arg1) != INTEGER_CST)
781 {
782 error ("second arg to `__builtin_prefetch' must be a constant");
783 arg1 = integer_zero_node;
784 }
785 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
786 /* Argument 1 must be either zero or one. */
787 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
788 {
789 warning ("invalid second arg to __builtin_prefetch; using zero");
790 op1 = const0_rtx;
791 }
792
793 /* Argument 2 (locality) must be a compile-time constant int. */
794 if (TREE_CODE (arg2) != INTEGER_CST)
795 {
796 error ("third arg to `__builtin_prefetch' must be a constant");
797 arg2 = integer_zero_node;
798 }
799 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
800 /* Argument 2 must be 0, 1, 2, or 3. */
801 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
802 {
803 warning ("invalid third arg to __builtin_prefetch; using zero");
804 op2 = const0_rtx;
805 }
806
807 #ifdef HAVE_prefetch
808 if (HAVE_prefetch)
809 {
810 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
811 (op0,
812 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
813 || (GET_MODE(op0) != Pmode))
814 {
815 #ifdef POINTERS_EXTEND_UNSIGNED
816 if (GET_MODE(op0) != Pmode)
817 op0 = convert_memory_address (Pmode, op0);
818 #endif
819 op0 = force_reg (Pmode, op0);
820 }
821 emit_insn (gen_prefetch (op0, op1, op2));
822 }
823 else
824 #endif
825 op0 = protect_from_queue (op0, 0);
826 /* Don't do anything with direct references to volatile memory, but
827 generate code to handle other side effects. */
828 if (GET_CODE (op0) != MEM && side_effects_p (op0))
829 emit_insn (op0);
830 }
831
832 /* Get a MEM rtx for expression EXP which is the address of an operand
833 to be used to be used in a string instruction (cmpstrsi, movstrsi, ..). */
834
835 static rtx
836 get_memory_rtx (exp)
837 tree exp;
838 {
839 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_SUM);
840 rtx mem;
841
842 #ifdef POINTERS_EXTEND_UNSIGNED
843 if (GET_MODE (addr) != Pmode)
844 addr = convert_memory_address (Pmode, addr);
845 #endif
846
847 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
848
849 /* Get an expression we can use to find the attributes to assign to MEM.
850 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
851 we can. First remove any nops. */
852 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
853 || TREE_CODE (exp) == NON_LVALUE_EXPR)
854 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
855 exp = TREE_OPERAND (exp, 0);
856
857 if (TREE_CODE (exp) == ADDR_EXPR)
858 {
859 exp = TREE_OPERAND (exp, 0);
860 set_mem_attributes (mem, exp, 0);
861 }
862 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
863 {
864 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
865 /* memcpy, memset and other builtin stringops can alias with anything. */
866 set_mem_alias_set (mem, 0);
867 }
868
869 return mem;
870 }
871 \f
872 /* Built-in functions to perform an untyped call and return. */
873
874 /* For each register that may be used for calling a function, this
875 gives a mode used to copy the register's value. VOIDmode indicates
876 the register is not used for calling a function. If the machine
877 has register windows, this gives only the outbound registers.
878 INCOMING_REGNO gives the corresponding inbound register. */
879 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
880
881 /* For each register that may be used for returning values, this gives
882 a mode used to copy the register's value. VOIDmode indicates the
883 register is not used for returning values. If the machine has
884 register windows, this gives only the outbound registers.
885 INCOMING_REGNO gives the corresponding inbound register. */
886 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
887
888 /* For each register that may be used for calling a function, this
889 gives the offset of that register into the block returned by
890 __builtin_apply_args. 0 indicates that the register is not
891 used for calling a function. */
892 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
893
894 /* Return the offset of register REGNO into the block returned by
895 __builtin_apply_args. This is not declared static, since it is
896 needed in objc-act.c. */
897
898 int
899 apply_args_register_offset (regno)
900 int regno;
901 {
902 apply_args_size ();
903
904 /* Arguments are always put in outgoing registers (in the argument
905 block) if such make sense. */
906 #ifdef OUTGOING_REGNO
907 regno = OUTGOING_REGNO (regno);
908 #endif
909 return apply_args_reg_offset[regno];
910 }
911
912 /* Return the size required for the block returned by __builtin_apply_args,
913 and initialize apply_args_mode. */
914
915 static int
916 apply_args_size ()
917 {
918 static int size = -1;
919 int align;
920 unsigned int regno;
921 enum machine_mode mode;
922
923 /* The values computed by this function never change. */
924 if (size < 0)
925 {
926 /* The first value is the incoming arg-pointer. */
927 size = GET_MODE_SIZE (Pmode);
928
929 /* The second value is the structure value address unless this is
930 passed as an "invisible" first argument. */
931 if (struct_value_rtx)
932 size += GET_MODE_SIZE (Pmode);
933
934 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
935 if (FUNCTION_ARG_REGNO_P (regno))
936 {
937 /* Search for the proper mode for copying this register's
938 value. I'm not sure this is right, but it works so far. */
939 enum machine_mode best_mode = VOIDmode;
940
941 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
942 mode != VOIDmode;
943 mode = GET_MODE_WIDER_MODE (mode))
944 if (HARD_REGNO_MODE_OK (regno, mode)
945 && HARD_REGNO_NREGS (regno, mode) == 1)
946 best_mode = mode;
947
948 if (best_mode == VOIDmode)
949 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
950 mode != VOIDmode;
951 mode = GET_MODE_WIDER_MODE (mode))
952 if (HARD_REGNO_MODE_OK (regno, mode)
953 && have_insn_for (SET, mode))
954 best_mode = mode;
955
956 if (best_mode == VOIDmode)
957 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
958 mode != VOIDmode;
959 mode = GET_MODE_WIDER_MODE (mode))
960 if (HARD_REGNO_MODE_OK (regno, mode)
961 && have_insn_for (SET, mode))
962 best_mode = mode;
963
964 if (best_mode == VOIDmode)
965 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
966 mode != VOIDmode;
967 mode = GET_MODE_WIDER_MODE (mode))
968 if (HARD_REGNO_MODE_OK (regno, mode)
969 && have_insn_for (SET, mode))
970 best_mode = mode;
971
972 mode = best_mode;
973 if (mode == VOIDmode)
974 abort ();
975
976 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
977 if (size % align != 0)
978 size = CEIL (size, align) * align;
979 apply_args_reg_offset[regno] = size;
980 size += GET_MODE_SIZE (mode);
981 apply_args_mode[regno] = mode;
982 }
983 else
984 {
985 apply_args_mode[regno] = VOIDmode;
986 apply_args_reg_offset[regno] = 0;
987 }
988 }
989 return size;
990 }
991
992 /* Return the size required for the block returned by __builtin_apply,
993 and initialize apply_result_mode. */
994
995 static int
996 apply_result_size ()
997 {
998 static int size = -1;
999 int align, regno;
1000 enum machine_mode mode;
1001
1002 /* The values computed by this function never change. */
1003 if (size < 0)
1004 {
1005 size = 0;
1006
1007 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1008 if (FUNCTION_VALUE_REGNO_P (regno))
1009 {
1010 /* Search for the proper mode for copying this register's
1011 value. I'm not sure this is right, but it works so far. */
1012 enum machine_mode best_mode = VOIDmode;
1013
1014 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1015 mode != TImode;
1016 mode = GET_MODE_WIDER_MODE (mode))
1017 if (HARD_REGNO_MODE_OK (regno, mode))
1018 best_mode = mode;
1019
1020 if (best_mode == VOIDmode)
1021 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1022 mode != VOIDmode;
1023 mode = GET_MODE_WIDER_MODE (mode))
1024 if (HARD_REGNO_MODE_OK (regno, mode)
1025 && have_insn_for (SET, mode))
1026 best_mode = mode;
1027
1028 if (best_mode == VOIDmode)
1029 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
1030 mode != VOIDmode;
1031 mode = GET_MODE_WIDER_MODE (mode))
1032 if (HARD_REGNO_MODE_OK (regno, mode)
1033 && have_insn_for (SET, mode))
1034 best_mode = mode;
1035
1036 if (best_mode == VOIDmode)
1037 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
1038 mode != VOIDmode;
1039 mode = GET_MODE_WIDER_MODE (mode))
1040 if (HARD_REGNO_MODE_OK (regno, mode)
1041 && have_insn_for (SET, mode))
1042 best_mode = mode;
1043
1044 mode = best_mode;
1045 if (mode == VOIDmode)
1046 abort ();
1047
1048 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1049 if (size % align != 0)
1050 size = CEIL (size, align) * align;
1051 size += GET_MODE_SIZE (mode);
1052 apply_result_mode[regno] = mode;
1053 }
1054 else
1055 apply_result_mode[regno] = VOIDmode;
1056
1057 /* Allow targets that use untyped_call and untyped_return to override
1058 the size so that machine-specific information can be stored here. */
1059 #ifdef APPLY_RESULT_SIZE
1060 size = APPLY_RESULT_SIZE;
1061 #endif
1062 }
1063 return size;
1064 }
1065
1066 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1067 /* Create a vector describing the result block RESULT. If SAVEP is true,
1068 the result block is used to save the values; otherwise it is used to
1069 restore the values. */
1070
1071 static rtx
1072 result_vector (savep, result)
1073 int savep;
1074 rtx result;
1075 {
1076 int regno, size, align, nelts;
1077 enum machine_mode mode;
1078 rtx reg, mem;
1079 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1080
1081 size = nelts = 0;
1082 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1083 if ((mode = apply_result_mode[regno]) != VOIDmode)
1084 {
1085 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1086 if (size % align != 0)
1087 size = CEIL (size, align) * align;
1088 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1089 mem = adjust_address (result, mode, size);
1090 savevec[nelts++] = (savep
1091 ? gen_rtx_SET (VOIDmode, mem, reg)
1092 : gen_rtx_SET (VOIDmode, reg, mem));
1093 size += GET_MODE_SIZE (mode);
1094 }
1095 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1096 }
1097 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1098
1099 /* Save the state required to perform an untyped call with the same
1100 arguments as were passed to the current function. */
1101
1102 static rtx
1103 expand_builtin_apply_args_1 ()
1104 {
1105 rtx registers;
1106 int size, align, regno;
1107 enum machine_mode mode;
1108
1109 /* Create a block where the arg-pointer, structure value address,
1110 and argument registers can be saved. */
1111 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1112
1113 /* Walk past the arg-pointer and structure value address. */
1114 size = GET_MODE_SIZE (Pmode);
1115 if (struct_value_rtx)
1116 size += GET_MODE_SIZE (Pmode);
1117
1118 /* Save each register used in calling a function to the block. */
1119 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1120 if ((mode = apply_args_mode[regno]) != VOIDmode)
1121 {
1122 rtx tem;
1123
1124 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1125 if (size % align != 0)
1126 size = CEIL (size, align) * align;
1127
1128 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1129
1130 emit_move_insn (adjust_address (registers, mode, size), tem);
1131 size += GET_MODE_SIZE (mode);
1132 }
1133
1134 /* Save the arg pointer to the block. */
1135 emit_move_insn (adjust_address (registers, Pmode, 0),
1136 copy_to_reg (virtual_incoming_args_rtx));
1137 size = GET_MODE_SIZE (Pmode);
1138
1139 /* Save the structure value address unless this is passed as an
1140 "invisible" first argument. */
1141 if (struct_value_incoming_rtx)
1142 {
1143 emit_move_insn (adjust_address (registers, Pmode, size),
1144 copy_to_reg (struct_value_incoming_rtx));
1145 size += GET_MODE_SIZE (Pmode);
1146 }
1147
1148 /* Return the address of the block. */
1149 return copy_addr_to_reg (XEXP (registers, 0));
1150 }
1151
1152 /* __builtin_apply_args returns block of memory allocated on
1153 the stack into which is stored the arg pointer, structure
1154 value address, static chain, and all the registers that might
1155 possibly be used in performing a function call. The code is
1156 moved to the start of the function so the incoming values are
1157 saved. */
1158
1159 static rtx
1160 expand_builtin_apply_args ()
1161 {
1162 /* Don't do __builtin_apply_args more than once in a function.
1163 Save the result of the first call and reuse it. */
1164 if (apply_args_value != 0)
1165 return apply_args_value;
1166 {
1167 /* When this function is called, it means that registers must be
1168 saved on entry to this function. So we migrate the
1169 call to the first insn of this function. */
1170 rtx temp;
1171 rtx seq;
1172
1173 start_sequence ();
1174 temp = expand_builtin_apply_args_1 ();
1175 seq = get_insns ();
1176 end_sequence ();
1177
1178 apply_args_value = temp;
1179
1180 /* Put the insns after the NOTE that starts the function.
1181 If this is inside a start_sequence, make the outer-level insn
1182 chain current, so the code is placed at the start of the
1183 function. */
1184 push_topmost_sequence ();
1185 emit_insn_before (seq, NEXT_INSN (get_insns ()));
1186 pop_topmost_sequence ();
1187 return temp;
1188 }
1189 }
1190
1191 /* Perform an untyped call and save the state required to perform an
1192 untyped return of whatever value was returned by the given function. */
1193
1194 static rtx
1195 expand_builtin_apply (function, arguments, argsize)
1196 rtx function, arguments, argsize;
1197 {
1198 int size, align, regno;
1199 enum machine_mode mode;
1200 rtx incoming_args, result, reg, dest, src, call_insn;
1201 rtx old_stack_level = 0;
1202 rtx call_fusage = 0;
1203
1204 #ifdef POINTERS_EXTEND_UNSIGNED
1205 if (GET_MODE (arguments) != Pmode)
1206 arguments = convert_memory_address (Pmode, arguments);
1207 #endif
1208
1209 /* Create a block where the return registers can be saved. */
1210 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1211
1212 /* Fetch the arg pointer from the ARGUMENTS block. */
1213 incoming_args = gen_reg_rtx (Pmode);
1214 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1215 #ifndef STACK_GROWS_DOWNWARD
1216 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1217 incoming_args, 0, OPTAB_LIB_WIDEN);
1218 #endif
1219
1220 /* Perform postincrements before actually calling the function. */
1221 emit_queue ();
1222
1223 /* Push a new argument block and copy the arguments. Do not allow
1224 the (potential) memcpy call below to interfere with our stack
1225 manipulations. */
1226 do_pending_stack_adjust ();
1227 NO_DEFER_POP;
1228
1229 /* Save the stack with nonlocal if available */
1230 #ifdef HAVE_save_stack_nonlocal
1231 if (HAVE_save_stack_nonlocal)
1232 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1233 else
1234 #endif
1235 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1236
1237 /* Push a block of memory onto the stack to store the memory arguments.
1238 Save the address in a register, and copy the memory arguments. ??? I
1239 haven't figured out how the calling convention macros effect this,
1240 but it's likely that the source and/or destination addresses in
1241 the block copy will need updating in machine specific ways. */
1242 dest = allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1243 dest = gen_rtx_MEM (BLKmode, dest);
1244 set_mem_align (dest, PARM_BOUNDARY);
1245 src = gen_rtx_MEM (BLKmode, incoming_args);
1246 set_mem_align (src, PARM_BOUNDARY);
1247 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1248
1249 /* Refer to the argument block. */
1250 apply_args_size ();
1251 arguments = gen_rtx_MEM (BLKmode, arguments);
1252 set_mem_align (arguments, PARM_BOUNDARY);
1253
1254 /* Walk past the arg-pointer and structure value address. */
1255 size = GET_MODE_SIZE (Pmode);
1256 if (struct_value_rtx)
1257 size += GET_MODE_SIZE (Pmode);
1258
1259 /* Restore each of the registers previously saved. Make USE insns
1260 for each of these registers for use in making the call. */
1261 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1262 if ((mode = apply_args_mode[regno]) != VOIDmode)
1263 {
1264 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1265 if (size % align != 0)
1266 size = CEIL (size, align) * align;
1267 reg = gen_rtx_REG (mode, regno);
1268 emit_move_insn (reg, adjust_address (arguments, mode, size));
1269 use_reg (&call_fusage, reg);
1270 size += GET_MODE_SIZE (mode);
1271 }
1272
1273 /* Restore the structure value address unless this is passed as an
1274 "invisible" first argument. */
1275 size = GET_MODE_SIZE (Pmode);
1276 if (struct_value_rtx)
1277 {
1278 rtx value = gen_reg_rtx (Pmode);
1279 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1280 emit_move_insn (struct_value_rtx, value);
1281 if (GET_CODE (struct_value_rtx) == REG)
1282 use_reg (&call_fusage, struct_value_rtx);
1283 size += GET_MODE_SIZE (Pmode);
1284 }
1285
1286 /* All arguments and registers used for the call are set up by now! */
1287 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0, 0);
1288
1289 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1290 and we don't want to load it into a register as an optimization,
1291 because prepare_call_address already did it if it should be done. */
1292 if (GET_CODE (function) != SYMBOL_REF)
1293 function = memory_address (FUNCTION_MODE, function);
1294
1295 /* Generate the actual call instruction and save the return value. */
1296 #ifdef HAVE_untyped_call
1297 if (HAVE_untyped_call)
1298 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1299 result, result_vector (1, result)));
1300 else
1301 #endif
1302 #ifdef HAVE_call_value
1303 if (HAVE_call_value)
1304 {
1305 rtx valreg = 0;
1306
1307 /* Locate the unique return register. It is not possible to
1308 express a call that sets more than one return register using
1309 call_value; use untyped_call for that. In fact, untyped_call
1310 only needs to save the return registers in the given block. */
1311 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1312 if ((mode = apply_result_mode[regno]) != VOIDmode)
1313 {
1314 if (valreg)
1315 abort (); /* HAVE_untyped_call required. */
1316 valreg = gen_rtx_REG (mode, regno);
1317 }
1318
1319 emit_call_insn (GEN_CALL_VALUE (valreg,
1320 gen_rtx_MEM (FUNCTION_MODE, function),
1321 const0_rtx, NULL_RTX, const0_rtx));
1322
1323 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1324 }
1325 else
1326 #endif
1327 abort ();
1328
1329 /* Find the CALL insn we just emitted, and attach the register usage
1330 information. */
1331 call_insn = last_call_insn ();
1332 add_function_usage_to (call_insn, call_fusage);
1333
1334 /* Restore the stack. */
1335 #ifdef HAVE_save_stack_nonlocal
1336 if (HAVE_save_stack_nonlocal)
1337 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1338 else
1339 #endif
1340 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1341
1342 OK_DEFER_POP;
1343
1344 /* Return the address of the result block. */
1345 return copy_addr_to_reg (XEXP (result, 0));
1346 }
1347
1348 /* Perform an untyped return. */
1349
1350 static void
1351 expand_builtin_return (result)
1352 rtx result;
1353 {
1354 int size, align, regno;
1355 enum machine_mode mode;
1356 rtx reg;
1357 rtx call_fusage = 0;
1358
1359 #ifdef POINTERS_EXTEND_UNSIGNED
1360 if (GET_MODE (result) != Pmode)
1361 result = convert_memory_address (Pmode, result);
1362 #endif
1363
1364 apply_result_size ();
1365 result = gen_rtx_MEM (BLKmode, result);
1366
1367 #ifdef HAVE_untyped_return
1368 if (HAVE_untyped_return)
1369 {
1370 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1371 emit_barrier ();
1372 return;
1373 }
1374 #endif
1375
1376 /* Restore the return value and note that each value is used. */
1377 size = 0;
1378 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1379 if ((mode = apply_result_mode[regno]) != VOIDmode)
1380 {
1381 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1382 if (size % align != 0)
1383 size = CEIL (size, align) * align;
1384 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1385 emit_move_insn (reg, adjust_address (result, mode, size));
1386
1387 push_to_sequence (call_fusage);
1388 emit_insn (gen_rtx_USE (VOIDmode, reg));
1389 call_fusage = get_insns ();
1390 end_sequence ();
1391 size += GET_MODE_SIZE (mode);
1392 }
1393
1394 /* Put the USE insns before the return. */
1395 emit_insn (call_fusage);
1396
1397 /* Return whatever values was restored by jumping directly to the end
1398 of the function. */
1399 expand_null_return ();
1400 }
1401
1402 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1403
1404 static enum type_class
1405 type_to_class (type)
1406 tree type;
1407 {
1408 switch (TREE_CODE (type))
1409 {
1410 case VOID_TYPE: return void_type_class;
1411 case INTEGER_TYPE: return integer_type_class;
1412 case CHAR_TYPE: return char_type_class;
1413 case ENUMERAL_TYPE: return enumeral_type_class;
1414 case BOOLEAN_TYPE: return boolean_type_class;
1415 case POINTER_TYPE: return pointer_type_class;
1416 case REFERENCE_TYPE: return reference_type_class;
1417 case OFFSET_TYPE: return offset_type_class;
1418 case REAL_TYPE: return real_type_class;
1419 case COMPLEX_TYPE: return complex_type_class;
1420 case FUNCTION_TYPE: return function_type_class;
1421 case METHOD_TYPE: return method_type_class;
1422 case RECORD_TYPE: return record_type_class;
1423 case UNION_TYPE:
1424 case QUAL_UNION_TYPE: return union_type_class;
1425 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1426 ? string_type_class : array_type_class);
1427 case SET_TYPE: return set_type_class;
1428 case FILE_TYPE: return file_type_class;
1429 case LANG_TYPE: return lang_type_class;
1430 default: return no_type_class;
1431 }
1432 }
1433
1434 /* Expand a call to __builtin_classify_type with arguments found in
1435 ARGLIST. */
1436
1437 static rtx
1438 expand_builtin_classify_type (arglist)
1439 tree arglist;
1440 {
1441 if (arglist != 0)
1442 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1443 return GEN_INT (no_type_class);
1444 }
1445
1446 /* Expand expression EXP, which is a call to __builtin_constant_p. */
1447
1448 static rtx
1449 expand_builtin_constant_p (arglist, target_mode)
1450 tree arglist;
1451 enum machine_mode target_mode;
1452 {
1453 rtx tmp;
1454
1455 if (arglist == 0)
1456 return const0_rtx;
1457 arglist = TREE_VALUE (arglist);
1458
1459 /* We have taken care of the easy cases during constant folding. This
1460 case is not obvious, so emit (constant_p_rtx (ARGLIST)) and let CSE
1461 get a chance to see if it can deduce whether ARGLIST is constant. */
1462
1463 current_function_calls_constant_p = 1;
1464
1465 tmp = expand_expr (arglist, NULL_RTX, VOIDmode, 0);
1466 tmp = gen_rtx_CONSTANT_P_RTX (target_mode, tmp);
1467 return tmp;
1468 }
1469
1470 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1471 if available. */
1472 tree
1473 mathfn_built_in (type, fn)
1474 tree type;
1475 enum built_in_function fn;
1476 {
1477 enum built_in_function fcode = NOT_BUILT_IN;
1478 if (TYPE_MODE (type) == TYPE_MODE (double_type_node))
1479 switch (fn)
1480 {
1481 case BUILT_IN_SQRT:
1482 case BUILT_IN_SQRTF:
1483 case BUILT_IN_SQRTL:
1484 fcode = BUILT_IN_SQRT;
1485 break;
1486 case BUILT_IN_SIN:
1487 case BUILT_IN_SINF:
1488 case BUILT_IN_SINL:
1489 fcode = BUILT_IN_SIN;
1490 break;
1491 case BUILT_IN_COS:
1492 case BUILT_IN_COSF:
1493 case BUILT_IN_COSL:
1494 fcode = BUILT_IN_COS;
1495 break;
1496 case BUILT_IN_EXP:
1497 case BUILT_IN_EXPF:
1498 case BUILT_IN_EXPL:
1499 fcode = BUILT_IN_EXP;
1500 break;
1501 case BUILT_IN_LOG:
1502 case BUILT_IN_LOGF:
1503 case BUILT_IN_LOGL:
1504 fcode = BUILT_IN_LOG;
1505 break;
1506 case BUILT_IN_FLOOR:
1507 case BUILT_IN_FLOORF:
1508 case BUILT_IN_FLOORL:
1509 fcode = BUILT_IN_FLOOR;
1510 break;
1511 case BUILT_IN_CEIL:
1512 case BUILT_IN_CEILF:
1513 case BUILT_IN_CEILL:
1514 fcode = BUILT_IN_CEIL;
1515 break;
1516 case BUILT_IN_TRUNC:
1517 case BUILT_IN_TRUNCF:
1518 case BUILT_IN_TRUNCL:
1519 fcode = BUILT_IN_TRUNC;
1520 break;
1521 case BUILT_IN_ROUND:
1522 case BUILT_IN_ROUNDF:
1523 case BUILT_IN_ROUNDL:
1524 fcode = BUILT_IN_ROUND;
1525 break;
1526 case BUILT_IN_NEARBYINT:
1527 case BUILT_IN_NEARBYINTF:
1528 case BUILT_IN_NEARBYINTL:
1529 fcode = BUILT_IN_NEARBYINT;
1530 break;
1531 default:
1532 abort ();
1533 }
1534 else if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
1535 switch (fn)
1536 {
1537 case BUILT_IN_SQRT:
1538 case BUILT_IN_SQRTF:
1539 case BUILT_IN_SQRTL:
1540 fcode = BUILT_IN_SQRTF;
1541 break;
1542 case BUILT_IN_SIN:
1543 case BUILT_IN_SINF:
1544 case BUILT_IN_SINL:
1545 fcode = BUILT_IN_SINF;
1546 break;
1547 case BUILT_IN_COS:
1548 case BUILT_IN_COSF:
1549 case BUILT_IN_COSL:
1550 fcode = BUILT_IN_COSF;
1551 break;
1552 case BUILT_IN_EXP:
1553 case BUILT_IN_EXPF:
1554 case BUILT_IN_EXPL:
1555 fcode = BUILT_IN_EXPF;
1556 break;
1557 case BUILT_IN_LOG:
1558 case BUILT_IN_LOGF:
1559 case BUILT_IN_LOGL:
1560 fcode = BUILT_IN_LOGF;
1561 break;
1562 case BUILT_IN_FLOOR:
1563 case BUILT_IN_FLOORF:
1564 case BUILT_IN_FLOORL:
1565 fcode = BUILT_IN_FLOORF;
1566 break;
1567 case BUILT_IN_CEIL:
1568 case BUILT_IN_CEILF:
1569 case BUILT_IN_CEILL:
1570 fcode = BUILT_IN_CEILF;
1571 break;
1572 case BUILT_IN_TRUNC:
1573 case BUILT_IN_TRUNCF:
1574 case BUILT_IN_TRUNCL:
1575 fcode = BUILT_IN_TRUNCF;
1576 break;
1577 case BUILT_IN_ROUND:
1578 case BUILT_IN_ROUNDF:
1579 case BUILT_IN_ROUNDL:
1580 fcode = BUILT_IN_ROUNDF;
1581 break;
1582 case BUILT_IN_NEARBYINT:
1583 case BUILT_IN_NEARBYINTF:
1584 case BUILT_IN_NEARBYINTL:
1585 fcode = BUILT_IN_NEARBYINTF;
1586 break;
1587 default:
1588 abort ();
1589 }
1590 else if (TYPE_MODE (type) == TYPE_MODE (long_double_type_node))
1591 switch (fn)
1592 {
1593 case BUILT_IN_SQRT:
1594 case BUILT_IN_SQRTF:
1595 case BUILT_IN_SQRTL:
1596 fcode = BUILT_IN_SQRTL;
1597 break;
1598 case BUILT_IN_SIN:
1599 case BUILT_IN_SINF:
1600 case BUILT_IN_SINL:
1601 fcode = BUILT_IN_SINL;
1602 break;
1603 case BUILT_IN_COS:
1604 case BUILT_IN_COSF:
1605 case BUILT_IN_COSL:
1606 fcode = BUILT_IN_COSL;
1607 break;
1608 case BUILT_IN_EXP:
1609 case BUILT_IN_EXPF:
1610 case BUILT_IN_EXPL:
1611 fcode = BUILT_IN_EXPL;
1612 break;
1613 case BUILT_IN_LOG:
1614 case BUILT_IN_LOGF:
1615 case BUILT_IN_LOGL:
1616 fcode = BUILT_IN_LOGL;
1617 break;
1618 case BUILT_IN_FLOOR:
1619 case BUILT_IN_FLOORF:
1620 case BUILT_IN_FLOORL:
1621 fcode = BUILT_IN_FLOORL;
1622 break;
1623 case BUILT_IN_CEIL:
1624 case BUILT_IN_CEILF:
1625 case BUILT_IN_CEILL:
1626 fcode = BUILT_IN_CEILL;
1627 break;
1628 case BUILT_IN_TRUNC:
1629 case BUILT_IN_TRUNCF:
1630 case BUILT_IN_TRUNCL:
1631 fcode = BUILT_IN_TRUNCL;
1632 break;
1633 case BUILT_IN_ROUND:
1634 case BUILT_IN_ROUNDF:
1635 case BUILT_IN_ROUNDL:
1636 fcode = BUILT_IN_ROUNDL;
1637 break;
1638 case BUILT_IN_NEARBYINT:
1639 case BUILT_IN_NEARBYINTF:
1640 case BUILT_IN_NEARBYINTL:
1641 fcode = BUILT_IN_NEARBYINTL;
1642 break;
1643 default:
1644 abort ();
1645 }
1646 return implicit_built_in_decls[fcode];
1647 }
1648
1649 /* If errno must be maintained, expand the RTL to check if the result,
1650 TARGET, of a built-in function call, EXP, is NaN, and if so set
1651 errno to EDOM. */
1652
1653 static void
1654 expand_errno_check (exp, target)
1655 tree exp;
1656 rtx target;
1657 {
1658 rtx lab;
1659
1660 if (flag_errno_math && HONOR_NANS (GET_MODE (target)))
1661 {
1662 lab = gen_label_rtx ();
1663
1664 /* Test the result; if it is NaN, set errno=EDOM because
1665 the argument was not in the domain. */
1666 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1667 0, lab);
1668
1669 #ifdef TARGET_EDOM
1670 {
1671 #ifdef GEN_ERRNO_RTX
1672 rtx errno_rtx = GEN_ERRNO_RTX;
1673 #else
1674 rtx errno_rtx
1675 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1676 #endif
1677
1678 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1679 }
1680 #else
1681 /* We can't set errno=EDOM directly; let the library call do it.
1682 Pop the arguments right away in case the call gets deleted. */
1683 NO_DEFER_POP;
1684 expand_call (exp, target, 0);
1685 OK_DEFER_POP;
1686 #endif
1687
1688 emit_label (lab);
1689 }
1690 }
1691
1692
1693 /* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
1694 Return 0 if a normal call should be emitted rather than expanding the
1695 function in-line. EXP is the expression that is a call to the builtin
1696 function; if convenient, the result should be placed in TARGET.
1697 SUBTARGET may be used as the target for computing one of EXP's operands. */
1698
1699 static rtx
1700 expand_builtin_mathfn (exp, target, subtarget)
1701 tree exp;
1702 rtx target, subtarget;
1703 {
1704 optab builtin_optab;
1705 rtx op0, insns;
1706 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1707 tree arglist = TREE_OPERAND (exp, 1);
1708 enum machine_mode argmode;
1709 bool errno_set = true;
1710
1711 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1712 return 0;
1713
1714 /* Stabilize and compute the argument. */
1715 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
1716 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
1717 {
1718 exp = copy_node (exp);
1719 TREE_OPERAND (exp, 1) = arglist;
1720 /* Wrap the computation of the argument in a SAVE_EXPR. That
1721 way, if we need to expand the argument again (as in the
1722 flag_errno_math case below where we cannot directly set
1723 errno), we will not perform side-effects more than once.
1724 Note that here we're mutating the original EXP as well as the
1725 copy; that's the right thing to do in case the original EXP
1726 is expanded later. */
1727 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
1728 arglist = copy_node (arglist);
1729 }
1730 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
1731
1732 /* Make a suitable register to place result in. */
1733 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
1734
1735 emit_queue ();
1736 start_sequence ();
1737
1738 switch (DECL_FUNCTION_CODE (fndecl))
1739 {
1740 case BUILT_IN_SIN:
1741 case BUILT_IN_SINF:
1742 case BUILT_IN_SINL:
1743 builtin_optab = sin_optab; break;
1744 case BUILT_IN_COS:
1745 case BUILT_IN_COSF:
1746 case BUILT_IN_COSL:
1747 builtin_optab = cos_optab; break;
1748 case BUILT_IN_SQRT:
1749 case BUILT_IN_SQRTF:
1750 case BUILT_IN_SQRTL:
1751 builtin_optab = sqrt_optab; break;
1752 case BUILT_IN_EXP:
1753 case BUILT_IN_EXPF:
1754 case BUILT_IN_EXPL:
1755 builtin_optab = exp_optab; break;
1756 case BUILT_IN_LOG:
1757 case BUILT_IN_LOGF:
1758 case BUILT_IN_LOGL:
1759 builtin_optab = log_optab; break;
1760 case BUILT_IN_FLOOR:
1761 case BUILT_IN_FLOORF:
1762 case BUILT_IN_FLOORL:
1763 errno_set = false ; builtin_optab = floor_optab; break;
1764 case BUILT_IN_CEIL:
1765 case BUILT_IN_CEILF:
1766 case BUILT_IN_CEILL:
1767 errno_set = false ; builtin_optab = ceil_optab; break;
1768 case BUILT_IN_TRUNC:
1769 case BUILT_IN_TRUNCF:
1770 case BUILT_IN_TRUNCL:
1771 errno_set = false ; builtin_optab = trunc_optab; break;
1772 case BUILT_IN_ROUND:
1773 case BUILT_IN_ROUNDF:
1774 case BUILT_IN_ROUNDL:
1775 errno_set = false ; builtin_optab = round_optab; break;
1776 case BUILT_IN_NEARBYINT:
1777 case BUILT_IN_NEARBYINTF:
1778 case BUILT_IN_NEARBYINTL:
1779 errno_set = false ; builtin_optab = nearbyint_optab; break;
1780 default:
1781 abort ();
1782 }
1783
1784 /* Compute into TARGET.
1785 Set TARGET to wherever the result comes back. */
1786 argmode = TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist)));
1787 target = expand_unop (argmode, builtin_optab, op0, target, 0);
1788
1789 /* If we were unable to expand via the builtin, stop the
1790 sequence (without outputting the insns) and return 0, causing
1791 a call to the library function. */
1792 if (target == 0)
1793 {
1794 end_sequence ();
1795 return 0;
1796 }
1797
1798 if (errno_set)
1799 expand_errno_check (exp, target);
1800
1801 /* Output the entire sequence. */
1802 insns = get_insns ();
1803 end_sequence ();
1804 emit_insn (insns);
1805
1806 return target;
1807 }
1808
1809 /* Expand a call to the builtin binary math functions (pow and atan2).
1810 Return 0 if a normal call should be emitted rather than expanding the
1811 function in-line. EXP is the expression that is a call to the builtin
1812 function; if convenient, the result should be placed in TARGET.
1813 SUBTARGET may be used as the target for computing one of EXP's
1814 operands. */
1815
1816 static rtx
1817 expand_builtin_mathfn_2 (exp, target, subtarget)
1818 tree exp;
1819 rtx target, subtarget;
1820 {
1821 optab builtin_optab;
1822 rtx op0, op1, insns;
1823 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1824 tree arglist = TREE_OPERAND (exp, 1);
1825 tree arg0, arg1;
1826 enum machine_mode argmode;
1827 bool errno_set = true;
1828 bool stable = true;
1829
1830 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1831 return 0;
1832
1833 arg0 = TREE_VALUE (arglist);
1834 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1835
1836 /* Stabilize the arguments. */
1837 if (TREE_CODE (arg0) != VAR_DECL && TREE_CODE (arg0) != PARM_DECL)
1838 {
1839 arg0 = save_expr (arg0);
1840 TREE_VALUE (arglist) = arg0;
1841 stable = false;
1842 }
1843 if (TREE_CODE (arg1) != VAR_DECL && TREE_CODE (arg1) != PARM_DECL)
1844 {
1845 arg1 = save_expr (arg1);
1846 TREE_VALUE (TREE_CHAIN (arglist)) = arg1;
1847 stable = false;
1848 }
1849
1850 if (! stable)
1851 {
1852 exp = copy_node (exp);
1853 arglist = tree_cons (NULL_TREE, arg0,
1854 build_tree_list (NULL_TREE, arg1));
1855 TREE_OPERAND (exp, 1) = arglist;
1856 }
1857
1858 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
1859 op1 = expand_expr (arg1, 0, VOIDmode, 0);
1860
1861 /* Make a suitable register to place result in. */
1862 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
1863
1864 emit_queue ();
1865 start_sequence ();
1866
1867 switch (DECL_FUNCTION_CODE (fndecl))
1868 {
1869 case BUILT_IN_POW:
1870 case BUILT_IN_POWF:
1871 case BUILT_IN_POWL:
1872 builtin_optab = pow_optab; break;
1873 case BUILT_IN_ATAN2:
1874 case BUILT_IN_ATAN2F:
1875 case BUILT_IN_ATAN2L:
1876 builtin_optab = atan2_optab; break;
1877 default:
1878 abort ();
1879 }
1880
1881 /* Compute into TARGET.
1882 Set TARGET to wherever the result comes back. */
1883 argmode = TYPE_MODE (TREE_TYPE (arg0));
1884 target = expand_binop (argmode, builtin_optab, op0, op1,
1885 target, 0, OPTAB_DIRECT);
1886
1887 /* If we were unable to expand via the builtin, stop the
1888 sequence (without outputting the insns) and return 0, causing
1889 a call to the library function. */
1890 if (target == 0)
1891 {
1892 end_sequence ();
1893 return 0;
1894 }
1895
1896 if (errno_set)
1897 expand_errno_check (exp, target);
1898
1899 /* Output the entire sequence. */
1900 insns = get_insns ();
1901 end_sequence ();
1902 emit_insn (insns);
1903
1904 return target;
1905 }
1906
1907 /* Expand expression EXP which is a call to the strlen builtin. Return 0
1908 if we failed the caller should emit a normal call, otherwise
1909 try to get the result in TARGET, if convenient. */
1910
1911 static rtx
1912 expand_builtin_strlen (arglist, target, target_mode)
1913 tree arglist;
1914 rtx target;
1915 enum machine_mode target_mode;
1916 {
1917 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
1918 return 0;
1919 else
1920 {
1921 rtx pat;
1922 tree len, src = TREE_VALUE (arglist);
1923 rtx result, src_reg, char_rtx, before_strlen;
1924 enum machine_mode insn_mode = target_mode, char_mode;
1925 enum insn_code icode = CODE_FOR_nothing;
1926 int align;
1927
1928 /* If the length can be computed at compile-time, return it. */
1929 len = c_strlen (src);
1930 if (len)
1931 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
1932
1933 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
1934
1935 /* If SRC is not a pointer type, don't do this operation inline. */
1936 if (align == 0)
1937 return 0;
1938
1939 /* Bail out if we can't compute strlen in the right mode. */
1940 while (insn_mode != VOIDmode)
1941 {
1942 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
1943 if (icode != CODE_FOR_nothing)
1944 break;
1945
1946 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
1947 }
1948 if (insn_mode == VOIDmode)
1949 return 0;
1950
1951 /* Make a place to write the result of the instruction. */
1952 result = target;
1953 if (! (result != 0
1954 && GET_CODE (result) == REG
1955 && GET_MODE (result) == insn_mode
1956 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
1957 result = gen_reg_rtx (insn_mode);
1958
1959 /* Make a place to hold the source address. We will not expand
1960 the actual source until we are sure that the expansion will
1961 not fail -- there are trees that cannot be expanded twice. */
1962 src_reg = gen_reg_rtx (Pmode);
1963
1964 /* Mark the beginning of the strlen sequence so we can emit the
1965 source operand later. */
1966 before_strlen = get_last_insn ();
1967
1968 char_rtx = const0_rtx;
1969 char_mode = insn_data[(int) icode].operand[2].mode;
1970 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
1971 char_mode))
1972 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
1973
1974 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
1975 char_rtx, GEN_INT (align));
1976 if (! pat)
1977 return 0;
1978 emit_insn (pat);
1979
1980 /* Now that we are assured of success, expand the source. */
1981 start_sequence ();
1982 pat = memory_address (BLKmode,
1983 expand_expr (src, src_reg, ptr_mode, EXPAND_SUM));
1984 if (pat != src_reg)
1985 emit_move_insn (src_reg, pat);
1986 pat = get_insns ();
1987 end_sequence ();
1988
1989 if (before_strlen)
1990 emit_insn_after (pat, before_strlen);
1991 else
1992 emit_insn_before (pat, get_insns ());
1993
1994 /* Return the value in the proper mode for this function. */
1995 if (GET_MODE (result) == target_mode)
1996 target = result;
1997 else if (target != 0)
1998 convert_move (target, result, 0);
1999 else
2000 target = convert_to_mode (target_mode, result, 0);
2001
2002 return target;
2003 }
2004 }
2005
2006 /* Expand a call to the strstr builtin. Return 0 if we failed the
2007 caller should emit a normal call, otherwise try to get the result
2008 in TARGET, if convenient (and in mode MODE if that's convenient). */
2009
2010 static rtx
2011 expand_builtin_strstr (arglist, target, mode)
2012 tree arglist;
2013 rtx target;
2014 enum machine_mode mode;
2015 {
2016 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2017 return 0;
2018 else
2019 {
2020 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2021 tree fn;
2022 const char *p1, *p2;
2023
2024 p2 = c_getstr (s2);
2025 if (p2 == NULL)
2026 return 0;
2027
2028 p1 = c_getstr (s1);
2029 if (p1 != NULL)
2030 {
2031 const char *r = strstr (p1, p2);
2032
2033 if (r == NULL)
2034 return const0_rtx;
2035
2036 /* Return an offset into the constant string argument. */
2037 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2038 s1, ssize_int (r - p1))),
2039 target, mode, EXPAND_NORMAL);
2040 }
2041
2042 if (p2[0] == '\0')
2043 return expand_expr (s1, target, mode, EXPAND_NORMAL);
2044
2045 if (p2[1] != '\0')
2046 return 0;
2047
2048 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2049 if (!fn)
2050 return 0;
2051
2052 /* New argument list transforming strstr(s1, s2) to
2053 strchr(s1, s2[0]). */
2054 arglist =
2055 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2056 arglist = tree_cons (NULL_TREE, s1, arglist);
2057 return expand_expr (build_function_call_expr (fn, arglist),
2058 target, mode, EXPAND_NORMAL);
2059 }
2060 }
2061
2062 /* Expand a call to the strchr builtin. Return 0 if we failed the
2063 caller should emit a normal call, otherwise try to get the result
2064 in TARGET, if convenient (and in mode MODE if that's convenient). */
2065
2066 static rtx
2067 expand_builtin_strchr (arglist, target, mode)
2068 tree arglist;
2069 rtx target;
2070 enum machine_mode mode;
2071 {
2072 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2073 return 0;
2074 else
2075 {
2076 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2077 const char *p1;
2078
2079 if (TREE_CODE (s2) != INTEGER_CST)
2080 return 0;
2081
2082 p1 = c_getstr (s1);
2083 if (p1 != NULL)
2084 {
2085 char c;
2086 const char *r;
2087
2088 if (target_char_cast (s2, &c))
2089 return 0;
2090
2091 r = strchr (p1, c);
2092
2093 if (r == NULL)
2094 return const0_rtx;
2095
2096 /* Return an offset into the constant string argument. */
2097 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2098 s1, ssize_int (r - p1))),
2099 target, mode, EXPAND_NORMAL);
2100 }
2101
2102 /* FIXME: Should use here strchrM optab so that ports can optimize
2103 this. */
2104 return 0;
2105 }
2106 }
2107
2108 /* Expand a call to the strrchr builtin. Return 0 if we failed the
2109 caller should emit a normal call, otherwise try to get the result
2110 in TARGET, if convenient (and in mode MODE if that's convenient). */
2111
2112 static rtx
2113 expand_builtin_strrchr (arglist, target, mode)
2114 tree arglist;
2115 rtx target;
2116 enum machine_mode mode;
2117 {
2118 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2119 return 0;
2120 else
2121 {
2122 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2123 tree fn;
2124 const char *p1;
2125
2126 if (TREE_CODE (s2) != INTEGER_CST)
2127 return 0;
2128
2129 p1 = c_getstr (s1);
2130 if (p1 != NULL)
2131 {
2132 char c;
2133 const char *r;
2134
2135 if (target_char_cast (s2, &c))
2136 return 0;
2137
2138 r = strrchr (p1, c);
2139
2140 if (r == NULL)
2141 return const0_rtx;
2142
2143 /* Return an offset into the constant string argument. */
2144 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2145 s1, ssize_int (r - p1))),
2146 target, mode, EXPAND_NORMAL);
2147 }
2148
2149 if (! integer_zerop (s2))
2150 return 0;
2151
2152 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2153 if (!fn)
2154 return 0;
2155
2156 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
2157 return expand_expr (build_function_call_expr (fn, arglist),
2158 target, mode, EXPAND_NORMAL);
2159 }
2160 }
2161
2162 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
2163 caller should emit a normal call, otherwise try to get the result
2164 in TARGET, if convenient (and in mode MODE if that's convenient). */
2165
2166 static rtx
2167 expand_builtin_strpbrk (arglist, target, mode)
2168 tree arglist;
2169 rtx target;
2170 enum machine_mode mode;
2171 {
2172 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2173 return 0;
2174 else
2175 {
2176 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2177 tree fn;
2178 const char *p1, *p2;
2179
2180 p2 = c_getstr (s2);
2181 if (p2 == NULL)
2182 return 0;
2183
2184 p1 = c_getstr (s1);
2185 if (p1 != NULL)
2186 {
2187 const char *r = strpbrk (p1, p2);
2188
2189 if (r == NULL)
2190 return const0_rtx;
2191
2192 /* Return an offset into the constant string argument. */
2193 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2194 s1, ssize_int (r - p1))),
2195 target, mode, EXPAND_NORMAL);
2196 }
2197
2198 if (p2[0] == '\0')
2199 {
2200 /* strpbrk(x, "") == NULL.
2201 Evaluate and ignore the arguments in case they had
2202 side-effects. */
2203 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2204 return const0_rtx;
2205 }
2206
2207 if (p2[1] != '\0')
2208 return 0; /* Really call strpbrk. */
2209
2210 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2211 if (!fn)
2212 return 0;
2213
2214 /* New argument list transforming strpbrk(s1, s2) to
2215 strchr(s1, s2[0]). */
2216 arglist =
2217 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2218 arglist = tree_cons (NULL_TREE, s1, arglist);
2219 return expand_expr (build_function_call_expr (fn, arglist),
2220 target, mode, EXPAND_NORMAL);
2221 }
2222 }
2223
2224 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2225 bytes from constant string DATA + OFFSET and return it as target
2226 constant. */
2227
2228 static rtx
2229 builtin_memcpy_read_str (data, offset, mode)
2230 PTR data;
2231 HOST_WIDE_INT offset;
2232 enum machine_mode mode;
2233 {
2234 const char *str = (const char *) data;
2235
2236 if (offset < 0
2237 || ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2238 > strlen (str) + 1))
2239 abort (); /* Attempt to read past the end of constant string. */
2240
2241 return c_readstr (str + offset, mode);
2242 }
2243
2244 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
2245 Return 0 if we failed, the caller should emit a normal call,
2246 otherwise try to get the result in TARGET, if convenient (and in
2247 mode MODE if that's convenient). If ENDP is 0 return the
2248 destination pointer, if ENDP is 1 return the end pointer ala
2249 mempcpy, and if ENDP is 2 return the end pointer minus one ala
2250 stpcpy. */
2251 static rtx
2252 expand_builtin_memcpy (arglist, target, mode, endp)
2253 tree arglist;
2254 rtx target;
2255 enum machine_mode mode;
2256 int endp;
2257 {
2258 if (!validate_arglist (arglist,
2259 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2260 return 0;
2261 else
2262 {
2263 tree dest = TREE_VALUE (arglist);
2264 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2265 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2266 const char *src_str;
2267
2268 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2269 unsigned int dest_align
2270 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2271 rtx dest_mem, src_mem, dest_addr, len_rtx;
2272
2273 /* If DEST is not a pointer type, call the normal function. */
2274 if (dest_align == 0)
2275 return 0;
2276
2277 /* If the LEN parameter is zero, return DEST. */
2278 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2279 {
2280 /* Evaluate and ignore SRC in case it has side-effects. */
2281 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2282 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2283 }
2284
2285 /* If either SRC is not a pointer type, don't do this
2286 operation in-line. */
2287 if (src_align == 0)
2288 return 0;
2289
2290 dest_mem = get_memory_rtx (dest);
2291 set_mem_align (dest_mem, dest_align);
2292 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2293 src_str = c_getstr (src);
2294
2295 /* If SRC is a string constant and block move would be done
2296 by pieces, we can avoid loading the string from memory
2297 and only stored the computed constants. */
2298 if (src_str
2299 && GET_CODE (len_rtx) == CONST_INT
2300 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2301 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2302 (PTR) src_str, dest_align))
2303 {
2304 store_by_pieces (dest_mem, INTVAL (len_rtx),
2305 builtin_memcpy_read_str,
2306 (PTR) src_str, dest_align);
2307 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2308 #ifdef POINTERS_EXTEND_UNSIGNED
2309 if (GET_MODE (dest_mem) != ptr_mode)
2310 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2311 #endif
2312 if (endp)
2313 {
2314 rtx result = gen_rtx_PLUS (GET_MODE (dest_mem), dest_mem, len_rtx);
2315 if (endp == 2)
2316 result = simplify_gen_binary (MINUS, GET_MODE (result), result, const1_rtx);
2317 return result;
2318 }
2319 else
2320 return dest_mem;
2321 }
2322
2323 src_mem = get_memory_rtx (src);
2324 set_mem_align (src_mem, src_align);
2325
2326 /* Copy word part most expediently. */
2327 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx,
2328 BLOCK_OP_NORMAL);
2329
2330 if (dest_addr == 0)
2331 {
2332 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2333 #ifdef POINTERS_EXTEND_UNSIGNED
2334 if (GET_MODE (dest_addr) != ptr_mode)
2335 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2336 #endif
2337 }
2338
2339 if (endp)
2340 {
2341 rtx result = gen_rtx_PLUS (GET_MODE (dest_addr), dest_addr, len_rtx);
2342 if (endp == 2)
2343 result = simplify_gen_binary (MINUS, GET_MODE (result), result, const1_rtx);
2344 return result;
2345 }
2346 else
2347 return dest_addr;
2348 }
2349 }
2350
2351 /* Expand a call to the mempcpy builtin, with arguments in ARGLIST.
2352 Return 0 if we failed the caller should emit a normal call,
2353 otherwise try to get the result in TARGET, if convenient (and in
2354 mode MODE if that's convenient). */
2355
2356 static rtx
2357 expand_builtin_mempcpy (arglist, target, mode)
2358 tree arglist;
2359 rtx target;
2360 enum machine_mode mode;
2361 {
2362 if (!validate_arglist (arglist,
2363 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2364 return 0;
2365 else
2366 {
2367 /* If return value is ignored, transform mempcpy into memcpy. */
2368 if (target == const0_rtx)
2369 {
2370 tree fn;
2371 rtx ret = expand_builtin_memcpy (arglist, target, mode, /*endp=*/0);
2372
2373 if (ret)
2374 return ret;
2375
2376 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2377 if (!fn)
2378 return 0;
2379
2380 return expand_expr (build_function_call_expr (fn, arglist),
2381 target, mode, EXPAND_NORMAL);
2382 }
2383
2384 return expand_builtin_memcpy (arglist, target, mode, /*endp=*/1);
2385 }
2386 }
2387
2388 /* Expand expression EXP, which is a call to the memmove builtin. Return 0
2389 if we failed the caller should emit a normal call. */
2390
2391 static rtx
2392 expand_builtin_memmove (arglist, target, mode)
2393 tree arglist;
2394 rtx target;
2395 enum machine_mode mode;
2396 {
2397 if (!validate_arglist (arglist,
2398 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2399 return 0;
2400 else
2401 {
2402 tree dest = TREE_VALUE (arglist);
2403 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2404 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2405
2406 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2407 unsigned int dest_align
2408 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2409
2410 /* If DEST is not a pointer type, call the normal function. */
2411 if (dest_align == 0)
2412 return 0;
2413
2414 /* If the LEN parameter is zero, return DEST. */
2415 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2416 {
2417 /* Evaluate and ignore SRC in case it has side-effects. */
2418 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2419 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2420 }
2421
2422 /* If either SRC is not a pointer type, don't do this
2423 operation in-line. */
2424 if (src_align == 0)
2425 return 0;
2426
2427 /* If src is categorized for a readonly section we can use
2428 normal memcpy. */
2429 if (readonly_data_expr (src))
2430 {
2431 tree const fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2432 if (!fn)
2433 return 0;
2434 return expand_expr (build_function_call_expr (fn, arglist),
2435 target, mode, EXPAND_NORMAL);
2436 }
2437
2438 /* Otherwise, call the normal function. */
2439 return 0;
2440 }
2441 }
2442
2443 /* Expand expression EXP, which is a call to the bcopy builtin. Return 0
2444 if we failed the caller should emit a normal call. */
2445
2446 static rtx
2447 expand_builtin_bcopy (arglist)
2448 tree arglist;
2449 {
2450 tree src, dest, size, newarglist;
2451
2452 if (!validate_arglist (arglist,
2453 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2454 return NULL_RTX;
2455
2456 src = TREE_VALUE (arglist);
2457 dest = TREE_VALUE (TREE_CHAIN (arglist));
2458 size = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2459
2460 /* New argument list transforming bcopy(ptr x, ptr y, int z) to
2461 memmove(ptr y, ptr x, size_t z). This is done this way
2462 so that if it isn't expanded inline, we fallback to
2463 calling bcopy instead of memmove. */
2464
2465 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2466 newarglist = tree_cons (NULL_TREE, src, newarglist);
2467 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2468
2469 return expand_builtin_memmove (newarglist, const0_rtx, VOIDmode);
2470 }
2471
2472 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
2473 if we failed the caller should emit a normal call, otherwise try to get
2474 the result in TARGET, if convenient (and in mode MODE if that's
2475 convenient). */
2476
2477 static rtx
2478 expand_builtin_strcpy (arglist, target, mode)
2479 tree arglist;
2480 rtx target;
2481 enum machine_mode mode;
2482 {
2483 tree fn, len;
2484
2485 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2486 return 0;
2487
2488 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2489 if (!fn)
2490 return 0;
2491
2492 len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2493 if (len == 0)
2494 return 0;
2495
2496 len = size_binop (PLUS_EXPR, len, ssize_int (1));
2497 chainon (arglist, build_tree_list (NULL_TREE, len));
2498 return expand_expr (build_function_call_expr (fn, arglist),
2499 target, mode, EXPAND_NORMAL);
2500 }
2501
2502 /* Expand a call to the stpcpy builtin, with arguments in ARGLIST.
2503 Return 0 if we failed the caller should emit a normal call,
2504 otherwise try to get the result in TARGET, if convenient (and in
2505 mode MODE if that's convenient). */
2506
2507 static rtx
2508 expand_builtin_stpcpy (arglist, target, mode)
2509 tree arglist;
2510 rtx target;
2511 enum machine_mode mode;
2512 {
2513 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2514 return 0;
2515 else
2516 {
2517 tree newarglist;
2518 tree src, len;
2519
2520 /* If return value is ignored, transform stpcpy into strcpy. */
2521 if (target == const0_rtx)
2522 {
2523 tree fn;
2524 rtx ret = expand_builtin_strcpy (arglist, target, mode);
2525
2526 if (ret)
2527 return ret;
2528
2529 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
2530 if (!fn)
2531 return 0;
2532
2533 return expand_expr (build_function_call_expr (fn, arglist),
2534 target, mode, EXPAND_NORMAL);
2535 }
2536
2537 /* Ensure we get an actual string who length can be evaluated at
2538 compile-time, not an expression containing a string. This is
2539 because the latter will potentially produce pessimized code
2540 when used to produce the return value. */
2541 src = TREE_VALUE (TREE_CHAIN (arglist));
2542 if (! c_getstr (src) || ! (len = c_strlen (src)))
2543 return 0;
2544
2545 len = fold (size_binop (PLUS_EXPR, len, ssize_int (1)));
2546 newarglist = copy_list (arglist);
2547 chainon (newarglist, build_tree_list (NULL_TREE, len));
2548 return expand_builtin_memcpy (newarglist, target, mode, /*endp=*/2);
2549 }
2550 }
2551
2552 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2553 bytes from constant string DATA + OFFSET and return it as target
2554 constant. */
2555
2556 static rtx
2557 builtin_strncpy_read_str (data, offset, mode)
2558 PTR data;
2559 HOST_WIDE_INT offset;
2560 enum machine_mode mode;
2561 {
2562 const char *str = (const char *) data;
2563
2564 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
2565 return const0_rtx;
2566
2567 return c_readstr (str + offset, mode);
2568 }
2569
2570 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
2571 if we failed the caller should emit a normal call. */
2572
2573 static rtx
2574 expand_builtin_strncpy (arglist, target, mode)
2575 tree arglist;
2576 rtx target;
2577 enum machine_mode mode;
2578 {
2579 if (!validate_arglist (arglist,
2580 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2581 return 0;
2582 else
2583 {
2584 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2585 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2586 tree fn;
2587
2588 /* We must be passed a constant len parameter. */
2589 if (TREE_CODE (len) != INTEGER_CST)
2590 return 0;
2591
2592 /* If the len parameter is zero, return the dst parameter. */
2593 if (integer_zerop (len))
2594 {
2595 /* Evaluate and ignore the src argument in case it has
2596 side-effects. */
2597 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
2598 VOIDmode, EXPAND_NORMAL);
2599 /* Return the dst parameter. */
2600 return expand_expr (TREE_VALUE (arglist), target, mode,
2601 EXPAND_NORMAL);
2602 }
2603
2604 /* Now, we must be passed a constant src ptr parameter. */
2605 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
2606 return 0;
2607
2608 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
2609
2610 /* We're required to pad with trailing zeros if the requested
2611 len is greater than strlen(s2)+1. In that case try to
2612 use store_by_pieces, if it fails, punt. */
2613 if (tree_int_cst_lt (slen, len))
2614 {
2615 tree dest = TREE_VALUE (arglist);
2616 unsigned int dest_align
2617 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2618 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
2619 rtx dest_mem;
2620
2621 if (!p || dest_align == 0 || !host_integerp (len, 1)
2622 || !can_store_by_pieces (tree_low_cst (len, 1),
2623 builtin_strncpy_read_str,
2624 (PTR) p, dest_align))
2625 return 0;
2626
2627 dest_mem = get_memory_rtx (dest);
2628 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2629 builtin_strncpy_read_str,
2630 (PTR) p, dest_align);
2631 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2632 #ifdef POINTERS_EXTEND_UNSIGNED
2633 if (GET_MODE (dest_mem) != ptr_mode)
2634 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2635 #endif
2636 return dest_mem;
2637 }
2638
2639 /* OK transform into builtin memcpy. */
2640 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2641 if (!fn)
2642 return 0;
2643 return expand_expr (build_function_call_expr (fn, arglist),
2644 target, mode, EXPAND_NORMAL);
2645 }
2646 }
2647
2648 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2649 bytes from constant string DATA + OFFSET and return it as target
2650 constant. */
2651
2652 static rtx
2653 builtin_memset_read_str (data, offset, mode)
2654 PTR data;
2655 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2656 enum machine_mode mode;
2657 {
2658 const char *c = (const char *) data;
2659 char *p = alloca (GET_MODE_SIZE (mode));
2660
2661 memset (p, *c, GET_MODE_SIZE (mode));
2662
2663 return c_readstr (p, mode);
2664 }
2665
2666 /* Callback routine for store_by_pieces. Return the RTL of a register
2667 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
2668 char value given in the RTL register data. For example, if mode is
2669 4 bytes wide, return the RTL for 0x01010101*data. */
2670
2671 static rtx
2672 builtin_memset_gen_str (data, offset, mode)
2673 PTR data;
2674 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2675 enum machine_mode mode;
2676 {
2677 rtx target, coeff;
2678 size_t size;
2679 char *p;
2680
2681 size = GET_MODE_SIZE (mode);
2682 if (size == 1)
2683 return (rtx) data;
2684
2685 p = alloca (size);
2686 memset (p, 1, size);
2687 coeff = c_readstr (p, mode);
2688
2689 target = convert_to_mode (mode, (rtx) data, 1);
2690 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
2691 return force_reg (mode, target);
2692 }
2693
2694 /* Expand expression EXP, which is a call to the memset builtin. Return 0
2695 if we failed the caller should emit a normal call, otherwise try to get
2696 the result in TARGET, if convenient (and in mode MODE if that's
2697 convenient). */
2698
2699 static rtx
2700 expand_builtin_memset (arglist, target, mode)
2701 tree arglist;
2702 rtx target;
2703 enum machine_mode mode;
2704 {
2705 if (!validate_arglist (arglist,
2706 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
2707 return 0;
2708 else
2709 {
2710 tree dest = TREE_VALUE (arglist);
2711 tree val = TREE_VALUE (TREE_CHAIN (arglist));
2712 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2713 char c;
2714
2715 unsigned int dest_align
2716 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2717 rtx dest_mem, dest_addr, len_rtx;
2718
2719 /* If DEST is not a pointer type, don't do this
2720 operation in-line. */
2721 if (dest_align == 0)
2722 return 0;
2723
2724 /* If the LEN parameter is zero, return DEST. */
2725 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2726 {
2727 /* Evaluate and ignore VAL in case it has side-effects. */
2728 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
2729 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2730 }
2731
2732 if (TREE_CODE (val) != INTEGER_CST)
2733 {
2734 rtx val_rtx;
2735
2736 if (!host_integerp (len, 1))
2737 return 0;
2738
2739 if (optimize_size && tree_low_cst (len, 1) > 1)
2740 return 0;
2741
2742 /* Assume that we can memset by pieces if we can store the
2743 * the coefficients by pieces (in the required modes).
2744 * We can't pass builtin_memset_gen_str as that emits RTL. */
2745 c = 1;
2746 if (!can_store_by_pieces (tree_low_cst (len, 1),
2747 builtin_memset_read_str,
2748 (PTR) &c, dest_align))
2749 return 0;
2750
2751 val = fold (build1 (CONVERT_EXPR, unsigned_char_type_node, val));
2752 val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
2753 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
2754 val_rtx);
2755 dest_mem = get_memory_rtx (dest);
2756 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2757 builtin_memset_gen_str,
2758 (PTR) val_rtx, dest_align);
2759 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2760 #ifdef POINTERS_EXTEND_UNSIGNED
2761 if (GET_MODE (dest_mem) != ptr_mode)
2762 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2763 #endif
2764 return dest_mem;
2765 }
2766
2767 if (target_char_cast (val, &c))
2768 return 0;
2769
2770 if (c)
2771 {
2772 if (!host_integerp (len, 1))
2773 return 0;
2774 if (!can_store_by_pieces (tree_low_cst (len, 1),
2775 builtin_memset_read_str, (PTR) &c,
2776 dest_align))
2777 return 0;
2778
2779 dest_mem = get_memory_rtx (dest);
2780 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2781 builtin_memset_read_str,
2782 (PTR) &c, dest_align);
2783 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2784 #ifdef POINTERS_EXTEND_UNSIGNED
2785 if (GET_MODE (dest_mem) != ptr_mode)
2786 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2787 #endif
2788 return dest_mem;
2789 }
2790
2791 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2792
2793 dest_mem = get_memory_rtx (dest);
2794 set_mem_align (dest_mem, dest_align);
2795 dest_addr = clear_storage (dest_mem, len_rtx);
2796
2797 if (dest_addr == 0)
2798 {
2799 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2800 #ifdef POINTERS_EXTEND_UNSIGNED
2801 if (GET_MODE (dest_addr) != ptr_mode)
2802 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2803 #endif
2804 }
2805
2806 return dest_addr;
2807 }
2808 }
2809
2810 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
2811 if we failed the caller should emit a normal call. */
2812
2813 static rtx
2814 expand_builtin_bzero (arglist)
2815 tree arglist;
2816 {
2817 tree dest, size, newarglist;
2818
2819 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2820 return NULL_RTX;
2821
2822 dest = TREE_VALUE (arglist);
2823 size = TREE_VALUE (TREE_CHAIN (arglist));
2824
2825 /* New argument list transforming bzero(ptr x, int y) to
2826 memset(ptr x, int 0, size_t y). This is done this way
2827 so that if it isn't expanded inline, we fallback to
2828 calling bzero instead of memset. */
2829
2830 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2831 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
2832 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2833
2834 return expand_builtin_memset (newarglist, const0_rtx, VOIDmode);
2835 }
2836
2837 /* Expand expression EXP, which is a call to the memcmp built-in function.
2838 ARGLIST is the argument list for this call. Return 0 if we failed and the
2839 caller should emit a normal call, otherwise try to get the result in
2840 TARGET, if convenient (and in mode MODE, if that's convenient). */
2841
2842 static rtx
2843 expand_builtin_memcmp (exp, arglist, target, mode)
2844 tree exp ATTRIBUTE_UNUSED;
2845 tree arglist;
2846 rtx target;
2847 enum machine_mode mode;
2848 {
2849 tree arg1, arg2, len;
2850 const char *p1, *p2;
2851
2852 if (!validate_arglist (arglist,
2853 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2854 return 0;
2855
2856 arg1 = TREE_VALUE (arglist);
2857 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2858 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2859
2860 /* If the len parameter is zero, return zero. */
2861 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2862 {
2863 /* Evaluate and ignore arg1 and arg2 in case they have
2864 side-effects. */
2865 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2866 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2867 return const0_rtx;
2868 }
2869
2870 p1 = c_getstr (arg1);
2871 p2 = c_getstr (arg2);
2872
2873 /* If all arguments are constant, and the value of len is not greater
2874 than the lengths of arg1 and arg2, evaluate at compile-time. */
2875 if (host_integerp (len, 1) && p1 && p2
2876 && compare_tree_int (len, strlen (p1) + 1) <= 0
2877 && compare_tree_int (len, strlen (p2) + 1) <= 0)
2878 {
2879 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
2880
2881 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
2882 }
2883
2884 /* If len parameter is one, return an expression corresponding to
2885 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2886 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
2887 {
2888 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2889 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2890 tree ind1 =
2891 fold (build1 (CONVERT_EXPR, integer_type_node,
2892 build1 (INDIRECT_REF, cst_uchar_node,
2893 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2894 tree ind2 =
2895 fold (build1 (CONVERT_EXPR, integer_type_node,
2896 build1 (INDIRECT_REF, cst_uchar_node,
2897 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2898 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2899 return expand_expr (result, target, mode, EXPAND_NORMAL);
2900 }
2901
2902 #ifdef HAVE_cmpstrsi
2903 {
2904 rtx arg1_rtx, arg2_rtx, arg3_rtx;
2905 rtx result;
2906 rtx insn;
2907
2908 int arg1_align
2909 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2910 int arg2_align
2911 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2912 enum machine_mode insn_mode
2913 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
2914
2915 /* If we don't have POINTER_TYPE, call the function. */
2916 if (arg1_align == 0 || arg2_align == 0)
2917 return 0;
2918
2919 /* Make a place to write the result of the instruction. */
2920 result = target;
2921 if (! (result != 0
2922 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
2923 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2924 result = gen_reg_rtx (insn_mode);
2925
2926 arg1_rtx = get_memory_rtx (arg1);
2927 arg2_rtx = get_memory_rtx (arg2);
2928 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2929 if (!HAVE_cmpstrsi)
2930 insn = NULL_RTX;
2931 else
2932 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
2933 GEN_INT (MIN (arg1_align, arg2_align)));
2934
2935 if (insn)
2936 emit_insn (insn);
2937 else
2938 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
2939 TYPE_MODE (integer_type_node), 3,
2940 XEXP (arg1_rtx, 0), Pmode,
2941 XEXP (arg2_rtx, 0), Pmode,
2942 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
2943 TREE_UNSIGNED (sizetype)),
2944 TYPE_MODE (sizetype));
2945
2946 /* Return the value in the proper mode for this function. */
2947 mode = TYPE_MODE (TREE_TYPE (exp));
2948 if (GET_MODE (result) == mode)
2949 return result;
2950 else if (target != 0)
2951 {
2952 convert_move (target, result, 0);
2953 return target;
2954 }
2955 else
2956 return convert_to_mode (mode, result, 0);
2957 }
2958 #endif
2959
2960 return 0;
2961 }
2962
2963 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
2964 if we failed the caller should emit a normal call, otherwise try to get
2965 the result in TARGET, if convenient. */
2966
2967 static rtx
2968 expand_builtin_strcmp (exp, target, mode)
2969 tree exp;
2970 rtx target;
2971 enum machine_mode mode;
2972 {
2973 tree arglist = TREE_OPERAND (exp, 1);
2974 tree arg1, arg2;
2975 const char *p1, *p2;
2976
2977 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2978 return 0;
2979
2980 arg1 = TREE_VALUE (arglist);
2981 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2982
2983 p1 = c_getstr (arg1);
2984 p2 = c_getstr (arg2);
2985
2986 if (p1 && p2)
2987 {
2988 const int i = strcmp (p1, p2);
2989 return (i < 0 ? constm1_rtx : (i > 0 ? const1_rtx : const0_rtx));
2990 }
2991
2992 /* If either arg is "", return an expression corresponding to
2993 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2994 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
2995 {
2996 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2997 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2998 tree ind1 =
2999 fold (build1 (CONVERT_EXPR, integer_type_node,
3000 build1 (INDIRECT_REF, cst_uchar_node,
3001 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3002 tree ind2 =
3003 fold (build1 (CONVERT_EXPR, integer_type_node,
3004 build1 (INDIRECT_REF, cst_uchar_node,
3005 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3006 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3007 return expand_expr (result, target, mode, EXPAND_NORMAL);
3008 }
3009
3010 #ifdef HAVE_cmpstrsi
3011 if (HAVE_cmpstrsi)
3012 {
3013 tree len, len1, len2;
3014 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3015 rtx result, insn;
3016
3017 int arg1_align
3018 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3019 int arg2_align
3020 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3021 enum machine_mode insn_mode
3022 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3023
3024 len1 = c_strlen (arg1);
3025 len2 = c_strlen (arg2);
3026
3027 if (len1)
3028 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3029 if (len2)
3030 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3031
3032 /* If we don't have a constant length for the first, use the length
3033 of the second, if we know it. We don't require a constant for
3034 this case; some cost analysis could be done if both are available
3035 but neither is constant. For now, assume they're equally cheap,
3036 unless one has side effects. If both strings have constant lengths,
3037 use the smaller. */
3038
3039 if (!len1)
3040 len = len2;
3041 else if (!len2)
3042 len = len1;
3043 else if (TREE_SIDE_EFFECTS (len1))
3044 len = len2;
3045 else if (TREE_SIDE_EFFECTS (len2))
3046 len = len1;
3047 else if (TREE_CODE (len1) != INTEGER_CST)
3048 len = len2;
3049 else if (TREE_CODE (len2) != INTEGER_CST)
3050 len = len1;
3051 else if (tree_int_cst_lt (len1, len2))
3052 len = len1;
3053 else
3054 len = len2;
3055
3056 /* If both arguments have side effects, we cannot optimize. */
3057 if (!len || TREE_SIDE_EFFECTS (len))
3058 return 0;
3059
3060 /* If we don't have POINTER_TYPE, call the function. */
3061 if (arg1_align == 0 || arg2_align == 0)
3062 return 0;
3063
3064 /* Make a place to write the result of the instruction. */
3065 result = target;
3066 if (! (result != 0
3067 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3068 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3069 result = gen_reg_rtx (insn_mode);
3070
3071 arg1_rtx = get_memory_rtx (arg1);
3072 arg2_rtx = get_memory_rtx (arg2);
3073 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3074 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3075 GEN_INT (MIN (arg1_align, arg2_align)));
3076 if (!insn)
3077 return 0;
3078
3079 emit_insn (insn);
3080
3081 /* Return the value in the proper mode for this function. */
3082 mode = TYPE_MODE (TREE_TYPE (exp));
3083 if (GET_MODE (result) == mode)
3084 return result;
3085 if (target == 0)
3086 return convert_to_mode (mode, result, 0);
3087 convert_move (target, result, 0);
3088 return target;
3089 }
3090 #endif
3091 return 0;
3092 }
3093
3094 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
3095 if we failed the caller should emit a normal call, otherwise try to get
3096 the result in TARGET, if convenient. */
3097
3098 static rtx
3099 expand_builtin_strncmp (exp, target, mode)
3100 tree exp;
3101 rtx target;
3102 enum machine_mode mode;
3103 {
3104 tree arglist = TREE_OPERAND (exp, 1);
3105 tree arg1, arg2, arg3;
3106 const char *p1, *p2;
3107
3108 if (!validate_arglist (arglist,
3109 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3110 return 0;
3111
3112 arg1 = TREE_VALUE (arglist);
3113 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3114 arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3115
3116 /* If the len parameter is zero, return zero. */
3117 if (host_integerp (arg3, 1) && tree_low_cst (arg3, 1) == 0)
3118 {
3119 /* Evaluate and ignore arg1 and arg2 in case they have
3120 side-effects. */
3121 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3122 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3123 return const0_rtx;
3124 }
3125
3126 p1 = c_getstr (arg1);
3127 p2 = c_getstr (arg2);
3128
3129 /* If all arguments are constant, evaluate at compile-time. */
3130 if (host_integerp (arg3, 1) && p1 && p2)
3131 {
3132 const int r = strncmp (p1, p2, tree_low_cst (arg3, 1));
3133 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
3134 }
3135
3136 /* If len == 1 or (either string parameter is "" and (len >= 1)),
3137 return (*(const u_char*)arg1 - *(const u_char*)arg2). */
3138 if (host_integerp (arg3, 1)
3139 && (tree_low_cst (arg3, 1) == 1
3140 || (tree_low_cst (arg3, 1) > 1
3141 && ((p1 && *p1 == '\0') || (p2 && *p2 == '\0')))))
3142 {
3143 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3144 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3145 tree ind1 =
3146 fold (build1 (CONVERT_EXPR, integer_type_node,
3147 build1 (INDIRECT_REF, cst_uchar_node,
3148 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3149 tree ind2 =
3150 fold (build1 (CONVERT_EXPR, integer_type_node,
3151 build1 (INDIRECT_REF, cst_uchar_node,
3152 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3153 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3154 return expand_expr (result, target, mode, EXPAND_NORMAL);
3155 }
3156
3157 /* If c_strlen can determine an expression for one of the string
3158 lengths, and it doesn't have side effects, then emit cmpstrsi
3159 using length MIN(strlen(string)+1, arg3). */
3160 #ifdef HAVE_cmpstrsi
3161 if (HAVE_cmpstrsi)
3162 {
3163 tree len, len1, len2;
3164 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3165 rtx result, insn;
3166
3167 int arg1_align
3168 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3169 int arg2_align
3170 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3171 enum machine_mode insn_mode
3172 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3173
3174 len1 = c_strlen (arg1);
3175 len2 = c_strlen (arg2);
3176
3177 if (len1)
3178 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3179 if (len2)
3180 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3181
3182 /* If we don't have a constant length for the first, use the length
3183 of the second, if we know it. We don't require a constant for
3184 this case; some cost analysis could be done if both are available
3185 but neither is constant. For now, assume they're equally cheap,
3186 unless one has side effects. If both strings have constant lengths,
3187 use the smaller. */
3188
3189 if (!len1)
3190 len = len2;
3191 else if (!len2)
3192 len = len1;
3193 else if (TREE_SIDE_EFFECTS (len1))
3194 len = len2;
3195 else if (TREE_SIDE_EFFECTS (len2))
3196 len = len1;
3197 else if (TREE_CODE (len1) != INTEGER_CST)
3198 len = len2;
3199 else if (TREE_CODE (len2) != INTEGER_CST)
3200 len = len1;
3201 else if (tree_int_cst_lt (len1, len2))
3202 len = len1;
3203 else
3204 len = len2;
3205
3206 /* If both arguments have side effects, we cannot optimize. */
3207 if (!len || TREE_SIDE_EFFECTS (len))
3208 return 0;
3209
3210 /* The actual new length parameter is MIN(len,arg3). */
3211 len = fold (build (MIN_EXPR, TREE_TYPE (len), len, arg3));
3212
3213 /* If we don't have POINTER_TYPE, call the function. */
3214 if (arg1_align == 0 || arg2_align == 0)
3215 return 0;
3216
3217 /* Make a place to write the result of the instruction. */
3218 result = target;
3219 if (! (result != 0
3220 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3221 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3222 result = gen_reg_rtx (insn_mode);
3223
3224 arg1_rtx = get_memory_rtx (arg1);
3225 arg2_rtx = get_memory_rtx (arg2);
3226 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3227 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3228 GEN_INT (MIN (arg1_align, arg2_align)));
3229 if (!insn)
3230 return 0;
3231
3232 emit_insn (insn);
3233
3234 /* Return the value in the proper mode for this function. */
3235 mode = TYPE_MODE (TREE_TYPE (exp));
3236 if (GET_MODE (result) == mode)
3237 return result;
3238 if (target == 0)
3239 return convert_to_mode (mode, result, 0);
3240 convert_move (target, result, 0);
3241 return target;
3242 }
3243 #endif
3244 return 0;
3245 }
3246
3247 /* Expand expression EXP, which is a call to the strcat builtin.
3248 Return 0 if we failed the caller should emit a normal call,
3249 otherwise try to get the result in TARGET, if convenient. */
3250
3251 static rtx
3252 expand_builtin_strcat (arglist, target, mode)
3253 tree arglist;
3254 rtx target;
3255 enum machine_mode mode;
3256 {
3257 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3258 return 0;
3259 else
3260 {
3261 tree dst = TREE_VALUE (arglist),
3262 src = TREE_VALUE (TREE_CHAIN (arglist));
3263 const char *p = c_getstr (src);
3264
3265 /* If the string length is zero, return the dst parameter. */
3266 if (p && *p == '\0')
3267 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3268
3269 return 0;
3270 }
3271 }
3272
3273 /* Expand expression EXP, which is a call to the strncat builtin.
3274 Return 0 if we failed the caller should emit a normal call,
3275 otherwise try to get the result in TARGET, if convenient. */
3276
3277 static rtx
3278 expand_builtin_strncat (arglist, target, mode)
3279 tree arglist;
3280 rtx target;
3281 enum machine_mode mode;
3282 {
3283 if (!validate_arglist (arglist,
3284 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3285 return 0;
3286 else
3287 {
3288 tree dst = TREE_VALUE (arglist),
3289 src = TREE_VALUE (TREE_CHAIN (arglist)),
3290 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3291 const char *p = c_getstr (src);
3292
3293 /* If the requested length is zero, or the src parameter string
3294 length is zero, return the dst parameter. */
3295 if (integer_zerop (len) || (p && *p == '\0'))
3296 {
3297 /* Evaluate and ignore the src and len parameters in case
3298 they have side-effects. */
3299 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3300 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
3301 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3302 }
3303
3304 /* If the requested len is greater than or equal to the string
3305 length, call strcat. */
3306 if (TREE_CODE (len) == INTEGER_CST && p
3307 && compare_tree_int (len, strlen (p)) >= 0)
3308 {
3309 tree newarglist
3310 = tree_cons (NULL_TREE, dst, build_tree_list (NULL_TREE, src));
3311 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
3312
3313 /* If the replacement _DECL isn't initialized, don't do the
3314 transformation. */
3315 if (!fn)
3316 return 0;
3317
3318 return expand_expr (build_function_call_expr (fn, newarglist),
3319 target, mode, EXPAND_NORMAL);
3320 }
3321 return 0;
3322 }
3323 }
3324
3325 /* Expand expression EXP, which is a call to the strspn builtin.
3326 Return 0 if we failed the caller should emit a normal call,
3327 otherwise try to get the result in TARGET, if convenient. */
3328
3329 static rtx
3330 expand_builtin_strspn (arglist, target, mode)
3331 tree arglist;
3332 rtx target;
3333 enum machine_mode mode;
3334 {
3335 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3336 return 0;
3337 else
3338 {
3339 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
3340 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
3341
3342 /* If both arguments are constants, evaluate at compile-time. */
3343 if (p1 && p2)
3344 {
3345 const size_t r = strspn (p1, p2);
3346 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
3347 }
3348
3349 /* If either argument is "", return 0. */
3350 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
3351 {
3352 /* Evaluate and ignore both arguments in case either one has
3353 side-effects. */
3354 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3355 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3356 return const0_rtx;
3357 }
3358 return 0;
3359 }
3360 }
3361
3362 /* Expand expression EXP, which is a call to the strcspn builtin.
3363 Return 0 if we failed the caller should emit a normal call,
3364 otherwise try to get the result in TARGET, if convenient. */
3365
3366 static rtx
3367 expand_builtin_strcspn (arglist, target, mode)
3368 tree arglist;
3369 rtx target;
3370 enum machine_mode mode;
3371 {
3372 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3373 return 0;
3374 else
3375 {
3376 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
3377 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
3378
3379 /* If both arguments are constants, evaluate at compile-time. */
3380 if (p1 && p2)
3381 {
3382 const size_t r = strcspn (p1, p2);
3383 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
3384 }
3385
3386 /* If the first argument is "", return 0. */
3387 if (p1 && *p1 == '\0')
3388 {
3389 /* Evaluate and ignore argument s2 in case it has
3390 side-effects. */
3391 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3392 return const0_rtx;
3393 }
3394
3395 /* If the second argument is "", return __builtin_strlen(s1). */
3396 if (p2 && *p2 == '\0')
3397 {
3398 tree newarglist = build_tree_list (NULL_TREE, s1),
3399 fn = implicit_built_in_decls[BUILT_IN_STRLEN];
3400
3401 /* If the replacement _DECL isn't initialized, don't do the
3402 transformation. */
3403 if (!fn)
3404 return 0;
3405
3406 return expand_expr (build_function_call_expr (fn, newarglist),
3407 target, mode, EXPAND_NORMAL);
3408 }
3409 return 0;
3410 }
3411 }
3412
3413 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3414 if that's convenient. */
3415
3416 rtx
3417 expand_builtin_saveregs ()
3418 {
3419 rtx val, seq;
3420
3421 /* Don't do __builtin_saveregs more than once in a function.
3422 Save the result of the first call and reuse it. */
3423 if (saveregs_value != 0)
3424 return saveregs_value;
3425
3426 /* When this function is called, it means that registers must be
3427 saved on entry to this function. So we migrate the call to the
3428 first insn of this function. */
3429
3430 start_sequence ();
3431
3432 #ifdef EXPAND_BUILTIN_SAVEREGS
3433 /* Do whatever the machine needs done in this case. */
3434 val = EXPAND_BUILTIN_SAVEREGS ();
3435 #else
3436 /* ??? We used to try and build up a call to the out of line function,
3437 guessing about what registers needed saving etc. This became much
3438 harder with __builtin_va_start, since we don't have a tree for a
3439 call to __builtin_saveregs to fall back on. There was exactly one
3440 port (i860) that used this code, and I'm unconvinced it could actually
3441 handle the general case. So we no longer try to handle anything
3442 weird and make the backend absorb the evil. */
3443
3444 error ("__builtin_saveregs not supported by this target");
3445 val = const0_rtx;
3446 #endif
3447
3448 seq = get_insns ();
3449 end_sequence ();
3450
3451 saveregs_value = val;
3452
3453 /* Put the insns after the NOTE that starts the function. If this
3454 is inside a start_sequence, make the outer-level insn chain current, so
3455 the code is placed at the start of the function. */
3456 push_topmost_sequence ();
3457 emit_insn_after (seq, get_insns ());
3458 pop_topmost_sequence ();
3459
3460 return val;
3461 }
3462
3463 /* __builtin_args_info (N) returns word N of the arg space info
3464 for the current function. The number and meanings of words
3465 is controlled by the definition of CUMULATIVE_ARGS. */
3466
3467 static rtx
3468 expand_builtin_args_info (arglist)
3469 tree arglist;
3470 {
3471 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
3472 int *word_ptr = (int *) &current_function_args_info;
3473
3474 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
3475 abort ();
3476
3477 if (arglist != 0)
3478 {
3479 if (!host_integerp (TREE_VALUE (arglist), 0))
3480 error ("argument of `__builtin_args_info' must be constant");
3481 else
3482 {
3483 HOST_WIDE_INT wordnum = tree_low_cst (TREE_VALUE (arglist), 0);
3484
3485 if (wordnum < 0 || wordnum >= nwords)
3486 error ("argument of `__builtin_args_info' out of range");
3487 else
3488 return GEN_INT (word_ptr[wordnum]);
3489 }
3490 }
3491 else
3492 error ("missing argument in `__builtin_args_info'");
3493
3494 return const0_rtx;
3495 }
3496
3497 /* Expand ARGLIST, from a call to __builtin_next_arg. */
3498
3499 static rtx
3500 expand_builtin_next_arg (arglist)
3501 tree arglist;
3502 {
3503 tree fntype = TREE_TYPE (current_function_decl);
3504
3505 if (TYPE_ARG_TYPES (fntype) == 0
3506 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3507 == void_type_node))
3508 {
3509 error ("`va_start' used in function with fixed args");
3510 return const0_rtx;
3511 }
3512
3513 if (arglist)
3514 {
3515 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
3516 tree arg = TREE_VALUE (arglist);
3517
3518 /* Strip off all nops for the sake of the comparison. This
3519 is not quite the same as STRIP_NOPS. It does more.
3520 We must also strip off INDIRECT_EXPR for C++ reference
3521 parameters. */
3522 while (TREE_CODE (arg) == NOP_EXPR
3523 || TREE_CODE (arg) == CONVERT_EXPR
3524 || TREE_CODE (arg) == NON_LVALUE_EXPR
3525 || TREE_CODE (arg) == INDIRECT_REF)
3526 arg = TREE_OPERAND (arg, 0);
3527 if (arg != last_parm)
3528 warning ("second parameter of `va_start' not last named argument");
3529 }
3530 else
3531 /* Evidently an out of date version of <stdarg.h>; can't validate
3532 va_start's second argument, but can still work as intended. */
3533 warning ("`__builtin_next_arg' called without an argument");
3534
3535 return expand_binop (Pmode, add_optab,
3536 current_function_internal_arg_pointer,
3537 current_function_arg_offset_rtx,
3538 NULL_RTX, 0, OPTAB_LIB_WIDEN);
3539 }
3540
3541 /* Make it easier for the backends by protecting the valist argument
3542 from multiple evaluations. */
3543
3544 static tree
3545 stabilize_va_list (valist, needs_lvalue)
3546 tree valist;
3547 int needs_lvalue;
3548 {
3549 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
3550 {
3551 if (TREE_SIDE_EFFECTS (valist))
3552 valist = save_expr (valist);
3553
3554 /* For this case, the backends will be expecting a pointer to
3555 TREE_TYPE (va_list_type_node), but it's possible we've
3556 actually been given an array (an actual va_list_type_node).
3557 So fix it. */
3558 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
3559 {
3560 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
3561 tree p2 = build_pointer_type (va_list_type_node);
3562
3563 valist = build1 (ADDR_EXPR, p2, valist);
3564 valist = fold (build1 (NOP_EXPR, p1, valist));
3565 }
3566 }
3567 else
3568 {
3569 tree pt;
3570
3571 if (! needs_lvalue)
3572 {
3573 if (! TREE_SIDE_EFFECTS (valist))
3574 return valist;
3575
3576 pt = build_pointer_type (va_list_type_node);
3577 valist = fold (build1 (ADDR_EXPR, pt, valist));
3578 TREE_SIDE_EFFECTS (valist) = 1;
3579 }
3580
3581 if (TREE_SIDE_EFFECTS (valist))
3582 valist = save_expr (valist);
3583 valist = fold (build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)),
3584 valist));
3585 }
3586
3587 return valist;
3588 }
3589
3590 /* The "standard" implementation of va_start: just assign `nextarg' to
3591 the variable. */
3592
3593 void
3594 std_expand_builtin_va_start (valist, nextarg)
3595 tree valist;
3596 rtx nextarg;
3597 {
3598 tree t;
3599
3600 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3601 make_tree (ptr_type_node, nextarg));
3602 TREE_SIDE_EFFECTS (t) = 1;
3603
3604 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3605 }
3606
3607 /* Expand ARGLIST, from a call to __builtin_va_start. */
3608
3609 static rtx
3610 expand_builtin_va_start (arglist)
3611 tree arglist;
3612 {
3613 rtx nextarg;
3614 tree chain, valist;
3615
3616 chain = TREE_CHAIN (arglist);
3617
3618 if (TREE_CHAIN (chain))
3619 error ("too many arguments to function `va_start'");
3620
3621 nextarg = expand_builtin_next_arg (chain);
3622 valist = stabilize_va_list (TREE_VALUE (arglist), 1);
3623
3624 #ifdef EXPAND_BUILTIN_VA_START
3625 EXPAND_BUILTIN_VA_START (valist, nextarg);
3626 #else
3627 std_expand_builtin_va_start (valist, nextarg);
3628 #endif
3629
3630 return const0_rtx;
3631 }
3632
3633 /* The "standard" implementation of va_arg: read the value from the
3634 current (padded) address and increment by the (padded) size. */
3635
3636 rtx
3637 std_expand_builtin_va_arg (valist, type)
3638 tree valist, type;
3639 {
3640 tree addr_tree, t, type_size = NULL;
3641 tree align, alignm1;
3642 tree rounded_size;
3643 rtx addr;
3644
3645 /* Compute the rounded size of the type. */
3646 align = size_int (PARM_BOUNDARY / BITS_PER_UNIT);
3647 alignm1 = size_int (PARM_BOUNDARY / BITS_PER_UNIT - 1);
3648 if (type == error_mark_node
3649 || (type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type))) == NULL
3650 || TREE_OVERFLOW (type_size))
3651 rounded_size = size_zero_node;
3652 else
3653 rounded_size = fold (build (MULT_EXPR, sizetype,
3654 fold (build (TRUNC_DIV_EXPR, sizetype,
3655 fold (build (PLUS_EXPR, sizetype,
3656 type_size, alignm1)),
3657 align)),
3658 align));
3659
3660 /* Get AP. */
3661 addr_tree = valist;
3662 if (PAD_VARARGS_DOWN && ! integer_zerop (rounded_size))
3663 {
3664 /* Small args are padded downward. */
3665 addr_tree = fold (build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3666 fold (build (COND_EXPR, sizetype,
3667 fold (build (GT_EXPR, sizetype,
3668 rounded_size,
3669 align)),
3670 size_zero_node,
3671 fold (build (MINUS_EXPR, sizetype,
3672 rounded_size,
3673 type_size))))));
3674 }
3675
3676 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3677 addr = copy_to_reg (addr);
3678
3679 /* Compute new value for AP. */
3680 if (! integer_zerop (rounded_size))
3681 {
3682 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3683 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3684 rounded_size));
3685 TREE_SIDE_EFFECTS (t) = 1;
3686 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3687 }
3688
3689 return addr;
3690 }
3691
3692 /* Expand __builtin_va_arg, which is not really a builtin function, but
3693 a very special sort of operator. */
3694
3695 rtx
3696 expand_builtin_va_arg (valist, type)
3697 tree valist, type;
3698 {
3699 rtx addr, result;
3700 tree promoted_type, want_va_type, have_va_type;
3701
3702 /* Verify that valist is of the proper type. */
3703
3704 want_va_type = va_list_type_node;
3705 have_va_type = TREE_TYPE (valist);
3706 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
3707 {
3708 /* If va_list is an array type, the argument may have decayed
3709 to a pointer type, e.g. by being passed to another function.
3710 In that case, unwrap both types so that we can compare the
3711 underlying records. */
3712 if (TREE_CODE (have_va_type) == ARRAY_TYPE
3713 || TREE_CODE (have_va_type) == POINTER_TYPE)
3714 {
3715 want_va_type = TREE_TYPE (want_va_type);
3716 have_va_type = TREE_TYPE (have_va_type);
3717 }
3718 }
3719 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
3720 {
3721 error ("first argument to `va_arg' not of type `va_list'");
3722 addr = const0_rtx;
3723 }
3724
3725 /* Generate a diagnostic for requesting data of a type that cannot
3726 be passed through `...' due to type promotion at the call site. */
3727 else if ((promoted_type = (*lang_hooks.types.type_promotes_to) (type))
3728 != type)
3729 {
3730 const char *name = "<anonymous type>", *pname = 0;
3731 static bool gave_help;
3732
3733 if (TYPE_NAME (type))
3734 {
3735 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE)
3736 name = IDENTIFIER_POINTER (TYPE_NAME (type));
3737 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
3738 && DECL_NAME (TYPE_NAME (type)))
3739 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
3740 }
3741 if (TYPE_NAME (promoted_type))
3742 {
3743 if (TREE_CODE (TYPE_NAME (promoted_type)) == IDENTIFIER_NODE)
3744 pname = IDENTIFIER_POINTER (TYPE_NAME (promoted_type));
3745 else if (TREE_CODE (TYPE_NAME (promoted_type)) == TYPE_DECL
3746 && DECL_NAME (TYPE_NAME (promoted_type)))
3747 pname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (promoted_type)));
3748 }
3749
3750 /* Unfortunately, this is merely undefined, rather than a constraint
3751 violation, so we cannot make this an error. If this call is never
3752 executed, the program is still strictly conforming. */
3753 warning ("`%s' is promoted to `%s' when passed through `...'",
3754 name, pname);
3755 if (! gave_help)
3756 {
3757 gave_help = true;
3758 warning ("(so you should pass `%s' not `%s' to `va_arg')",
3759 pname, name);
3760 }
3761
3762 /* We can, however, treat "undefined" any way we please.
3763 Call abort to encourage the user to fix the program. */
3764 expand_builtin_trap ();
3765
3766 /* This is dead code, but go ahead and finish so that the
3767 mode of the result comes out right. */
3768 addr = const0_rtx;
3769 }
3770 else
3771 {
3772 /* Make it easier for the backends by protecting the valist argument
3773 from multiple evaluations. */
3774 valist = stabilize_va_list (valist, 0);
3775
3776 #ifdef EXPAND_BUILTIN_VA_ARG
3777 addr = EXPAND_BUILTIN_VA_ARG (valist, type);
3778 #else
3779 addr = std_expand_builtin_va_arg (valist, type);
3780 #endif
3781 }
3782
3783 #ifdef POINTERS_EXTEND_UNSIGNED
3784 if (GET_MODE (addr) != Pmode)
3785 addr = convert_memory_address (Pmode, addr);
3786 #endif
3787
3788 result = gen_rtx_MEM (TYPE_MODE (type), addr);
3789 set_mem_alias_set (result, get_varargs_alias_set ());
3790
3791 return result;
3792 }
3793
3794 /* Expand ARGLIST, from a call to __builtin_va_end. */
3795
3796 static rtx
3797 expand_builtin_va_end (arglist)
3798 tree arglist;
3799 {
3800 tree valist = TREE_VALUE (arglist);
3801
3802 #ifdef EXPAND_BUILTIN_VA_END
3803 valist = stabilize_va_list (valist, 0);
3804 EXPAND_BUILTIN_VA_END (arglist);
3805 #else
3806 /* Evaluate for side effects, if needed. I hate macros that don't
3807 do that. */
3808 if (TREE_SIDE_EFFECTS (valist))
3809 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
3810 #endif
3811
3812 return const0_rtx;
3813 }
3814
3815 /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a
3816 builtin rather than just as an assignment in stdarg.h because of the
3817 nastiness of array-type va_list types. */
3818
3819 static rtx
3820 expand_builtin_va_copy (arglist)
3821 tree arglist;
3822 {
3823 tree dst, src, t;
3824
3825 dst = TREE_VALUE (arglist);
3826 src = TREE_VALUE (TREE_CHAIN (arglist));
3827
3828 dst = stabilize_va_list (dst, 1);
3829 src = stabilize_va_list (src, 0);
3830
3831 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
3832 {
3833 t = build (MODIFY_EXPR, va_list_type_node, dst, src);
3834 TREE_SIDE_EFFECTS (t) = 1;
3835 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3836 }
3837 else
3838 {
3839 rtx dstb, srcb, size;
3840
3841 /* Evaluate to pointers. */
3842 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
3843 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
3844 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
3845 VOIDmode, EXPAND_NORMAL);
3846
3847 #ifdef POINTERS_EXTEND_UNSIGNED
3848 if (GET_MODE (dstb) != Pmode)
3849 dstb = convert_memory_address (Pmode, dstb);
3850
3851 if (GET_MODE (srcb) != Pmode)
3852 srcb = convert_memory_address (Pmode, srcb);
3853 #endif
3854
3855 /* "Dereference" to BLKmode memories. */
3856 dstb = gen_rtx_MEM (BLKmode, dstb);
3857 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
3858 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
3859 srcb = gen_rtx_MEM (BLKmode, srcb);
3860 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
3861 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
3862
3863 /* Copy. */
3864 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
3865 }
3866
3867 return const0_rtx;
3868 }
3869
3870 /* Expand a call to one of the builtin functions __builtin_frame_address or
3871 __builtin_return_address. */
3872
3873 static rtx
3874 expand_builtin_frame_address (fndecl, arglist)
3875 tree fndecl, arglist;
3876 {
3877 /* The argument must be a nonnegative integer constant.
3878 It counts the number of frames to scan up the stack.
3879 The value is the return address saved in that frame. */
3880 if (arglist == 0)
3881 /* Warning about missing arg was already issued. */
3882 return const0_rtx;
3883 else if (! host_integerp (TREE_VALUE (arglist), 1))
3884 {
3885 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3886 error ("invalid arg to `__builtin_frame_address'");
3887 else
3888 error ("invalid arg to `__builtin_return_address'");
3889 return const0_rtx;
3890 }
3891 else
3892 {
3893 rtx tem
3894 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
3895 tree_low_cst (TREE_VALUE (arglist), 1),
3896 hard_frame_pointer_rtx);
3897
3898 /* Some ports cannot access arbitrary stack frames. */
3899 if (tem == NULL)
3900 {
3901 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3902 warning ("unsupported arg to `__builtin_frame_address'");
3903 else
3904 warning ("unsupported arg to `__builtin_return_address'");
3905 return const0_rtx;
3906 }
3907
3908 /* For __builtin_frame_address, return what we've got. */
3909 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3910 return tem;
3911
3912 if (GET_CODE (tem) != REG
3913 && ! CONSTANT_P (tem))
3914 tem = copy_to_mode_reg (Pmode, tem);
3915 return tem;
3916 }
3917 }
3918
3919 /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if
3920 we failed and the caller should emit a normal call, otherwise try to get
3921 the result in TARGET, if convenient. */
3922
3923 static rtx
3924 expand_builtin_alloca (arglist, target)
3925 tree arglist;
3926 rtx target;
3927 {
3928 rtx op0;
3929 rtx result;
3930
3931 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3932 return 0;
3933
3934 /* Compute the argument. */
3935 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
3936
3937 /* Allocate the desired space. */
3938 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
3939
3940 #ifdef POINTERS_EXTEND_UNSIGNED
3941 if (GET_MODE (result) != ptr_mode)
3942 result = convert_memory_address (ptr_mode, result);
3943 #endif
3944
3945 return result;
3946 }
3947
3948 /* Expand a call to a unary builtin. The arguments are in ARGLIST.
3949 Return 0 if a normal call should be emitted rather than expanding the
3950 function in-line. If convenient, the result should be placed in TARGET.
3951 SUBTARGET may be used as the target for computing one of EXP's operands. */
3952
3953 static rtx
3954 expand_builtin_unop (target_mode, arglist, target, subtarget, op_optab)
3955 enum machine_mode target_mode;
3956 tree arglist;
3957 rtx target, subtarget;
3958 optab op_optab;
3959 {
3960 rtx op0;
3961 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3962 return 0;
3963
3964 /* Compute the argument. */
3965 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
3966 /* Compute op, into TARGET if possible.
3967 Set TARGET to wherever the result comes back. */
3968 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
3969 op_optab, op0, target, 1);
3970 if (target == 0)
3971 abort ();
3972
3973 return convert_to_mode (target_mode, target, 0);
3974 }
3975
3976 /* If the string passed to fputs is a constant and is one character
3977 long, we attempt to transform this call into __builtin_fputc(). */
3978
3979 static rtx
3980 expand_builtin_fputs (arglist, ignore, unlocked)
3981 tree arglist;
3982 int ignore;
3983 int unlocked;
3984 {
3985 tree len, fn;
3986 tree fn_fputc = unlocked ? implicit_built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
3987 : implicit_built_in_decls[BUILT_IN_FPUTC];
3988 tree fn_fwrite = unlocked ? implicit_built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
3989 : implicit_built_in_decls[BUILT_IN_FWRITE];
3990
3991 /* If the return value is used, or the replacement _DECL isn't
3992 initialized, don't do the transformation. */
3993 if (!ignore || !fn_fputc || !fn_fwrite)
3994 return 0;
3995
3996 /* Verify the arguments in the original call. */
3997 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3998 return 0;
3999
4000 /* Get the length of the string passed to fputs. If the length
4001 can't be determined, punt. */
4002 if (!(len = c_strlen (TREE_VALUE (arglist)))
4003 || TREE_CODE (len) != INTEGER_CST)
4004 return 0;
4005
4006 switch (compare_tree_int (len, 1))
4007 {
4008 case -1: /* length is 0, delete the call entirely . */
4009 {
4010 /* Evaluate and ignore the argument in case it has
4011 side-effects. */
4012 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
4013 VOIDmode, EXPAND_NORMAL);
4014 return const0_rtx;
4015 }
4016 case 0: /* length is 1, call fputc. */
4017 {
4018 const char *p = c_getstr (TREE_VALUE (arglist));
4019
4020 if (p != NULL)
4021 {
4022 /* New argument list transforming fputs(string, stream) to
4023 fputc(string[0], stream). */
4024 arglist =
4025 build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
4026 arglist =
4027 tree_cons (NULL_TREE, build_int_2 (p[0], 0), arglist);
4028 fn = fn_fputc;
4029 break;
4030 }
4031 }
4032 /* FALLTHROUGH */
4033 case 1: /* length is greater than 1, call fwrite. */
4034 {
4035 tree string_arg;
4036
4037 /* If optimizing for size keep fputs. */
4038 if (optimize_size)
4039 return 0;
4040 string_arg = TREE_VALUE (arglist);
4041 /* New argument list transforming fputs(string, stream) to
4042 fwrite(string, 1, len, stream). */
4043 arglist = build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
4044 arglist = tree_cons (NULL_TREE, len, arglist);
4045 arglist = tree_cons (NULL_TREE, size_one_node, arglist);
4046 arglist = tree_cons (NULL_TREE, string_arg, arglist);
4047 fn = fn_fwrite;
4048 break;
4049 }
4050 default:
4051 abort ();
4052 }
4053
4054 return expand_expr (build_function_call_expr (fn, arglist),
4055 (ignore ? const0_rtx : NULL_RTX),
4056 VOIDmode, EXPAND_NORMAL);
4057 }
4058
4059 /* Expand a call to __builtin_expect. We return our argument and emit a
4060 NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
4061 a non-jump context. */
4062
4063 static rtx
4064 expand_builtin_expect (arglist, target)
4065 tree arglist;
4066 rtx target;
4067 {
4068 tree exp, c;
4069 rtx note, rtx_c;
4070
4071 if (arglist == NULL_TREE
4072 || TREE_CHAIN (arglist) == NULL_TREE)
4073 return const0_rtx;
4074 exp = TREE_VALUE (arglist);
4075 c = TREE_VALUE (TREE_CHAIN (arglist));
4076
4077 if (TREE_CODE (c) != INTEGER_CST)
4078 {
4079 error ("second arg to `__builtin_expect' must be a constant");
4080 c = integer_zero_node;
4081 }
4082
4083 target = expand_expr (exp, target, VOIDmode, EXPAND_NORMAL);
4084
4085 /* Don't bother with expected value notes for integral constants. */
4086 if (flag_guess_branch_prob && GET_CODE (target) != CONST_INT)
4087 {
4088 /* We do need to force this into a register so that we can be
4089 moderately sure to be able to correctly interpret the branch
4090 condition later. */
4091 target = force_reg (GET_MODE (target), target);
4092
4093 rtx_c = expand_expr (c, NULL_RTX, GET_MODE (target), EXPAND_NORMAL);
4094
4095 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
4096 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, target, rtx_c);
4097 }
4098
4099 return target;
4100 }
4101
4102 /* Like expand_builtin_expect, except do this in a jump context. This is
4103 called from do_jump if the conditional is a __builtin_expect. Return either
4104 a list of insns to emit the jump or NULL if we cannot optimize
4105 __builtin_expect. We need to optimize this at jump time so that machines
4106 like the PowerPC don't turn the test into a SCC operation, and then jump
4107 based on the test being 0/1. */
4108
4109 rtx
4110 expand_builtin_expect_jump (exp, if_false_label, if_true_label)
4111 tree exp;
4112 rtx if_false_label;
4113 rtx if_true_label;
4114 {
4115 tree arglist = TREE_OPERAND (exp, 1);
4116 tree arg0 = TREE_VALUE (arglist);
4117 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4118 rtx ret = NULL_RTX;
4119
4120 /* Only handle __builtin_expect (test, 0) and
4121 __builtin_expect (test, 1). */
4122 if (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
4123 && (integer_zerop (arg1) || integer_onep (arg1)))
4124 {
4125 int num_jumps = 0;
4126 rtx insn;
4127
4128 /* If we fail to locate an appropriate conditional jump, we'll
4129 fall back to normal evaluation. Ensure that the expression
4130 can be re-evaluated. */
4131 switch (unsafe_for_reeval (arg0))
4132 {
4133 case 0: /* Safe. */
4134 break;
4135
4136 case 1: /* Mildly unsafe. */
4137 arg0 = unsave_expr (arg0);
4138 break;
4139
4140 case 2: /* Wildly unsafe. */
4141 return NULL_RTX;
4142 }
4143
4144 /* Expand the jump insns. */
4145 start_sequence ();
4146 do_jump (arg0, if_false_label, if_true_label);
4147 ret = get_insns ();
4148 end_sequence ();
4149
4150 /* Now that the __builtin_expect has been validated, go through and add
4151 the expect's to each of the conditional jumps. If we run into an
4152 error, just give up and generate the 'safe' code of doing a SCC
4153 operation and then doing a branch on that. */
4154 insn = ret;
4155 while (insn != NULL_RTX)
4156 {
4157 rtx next = NEXT_INSN (insn);
4158 rtx pattern;
4159
4160 if (GET_CODE (insn) == JUMP_INSN && any_condjump_p (insn)
4161 && (pattern = pc_set (insn)) != NULL_RTX)
4162 {
4163 rtx ifelse = SET_SRC (pattern);
4164 rtx label;
4165 int taken;
4166
4167 if (GET_CODE (ifelse) != IF_THEN_ELSE)
4168 goto do_next_insn;
4169
4170 if (GET_CODE (XEXP (ifelse, 1)) == LABEL_REF)
4171 {
4172 taken = 1;
4173 label = XEXP (XEXP (ifelse, 1), 0);
4174 }
4175 /* An inverted jump reverses the probabilities. */
4176 else if (GET_CODE (XEXP (ifelse, 2)) == LABEL_REF)
4177 {
4178 taken = 0;
4179 label = XEXP (XEXP (ifelse, 2), 0);
4180 }
4181 /* We shouldn't have to worry about conditional returns during
4182 the expansion stage, but handle it gracefully anyway. */
4183 else if (GET_CODE (XEXP (ifelse, 1)) == RETURN)
4184 {
4185 taken = 1;
4186 label = NULL_RTX;
4187 }
4188 /* An inverted return reverses the probabilities. */
4189 else if (GET_CODE (XEXP (ifelse, 2)) == RETURN)
4190 {
4191 taken = 0;
4192 label = NULL_RTX;
4193 }
4194 else
4195 goto do_next_insn;
4196
4197 /* If the test is expected to fail, reverse the
4198 probabilities. */
4199 if (integer_zerop (arg1))
4200 taken = 1 - taken;
4201
4202 /* If we are jumping to the false label, reverse the
4203 probabilities. */
4204 if (label == NULL_RTX)
4205 ; /* conditional return */
4206 else if (label == if_false_label)
4207 taken = 1 - taken;
4208 else if (label != if_true_label)
4209 goto do_next_insn;
4210
4211 num_jumps++;
4212 predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
4213 }
4214
4215 do_next_insn:
4216 insn = next;
4217 }
4218
4219 /* If no jumps were modified, fail and do __builtin_expect the normal
4220 way. */
4221 if (num_jumps == 0)
4222 ret = NULL_RTX;
4223 }
4224
4225 return ret;
4226 }
4227
4228 void
4229 expand_builtin_trap ()
4230 {
4231 #ifdef HAVE_trap
4232 if (HAVE_trap)
4233 emit_insn (gen_trap ());
4234 else
4235 #endif
4236 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4237 emit_barrier ();
4238 }
4239 \f
4240 /* Expand an expression EXP that calls a built-in function,
4241 with result going to TARGET if that's convenient
4242 (and in mode MODE if that's convenient).
4243 SUBTARGET may be used as the target for computing one of EXP's operands.
4244 IGNORE is nonzero if the value is to be ignored. */
4245
4246 rtx
4247 expand_builtin (exp, target, subtarget, mode, ignore)
4248 tree exp;
4249 rtx target;
4250 rtx subtarget;
4251 enum machine_mode mode;
4252 int ignore;
4253 {
4254 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4255 tree arglist = TREE_OPERAND (exp, 1);
4256 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4257 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
4258
4259 /* Perform postincrements before expanding builtin functions.  */
4260 emit_queue ();
4261
4262 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4263 return (*targetm.expand_builtin) (exp, target, subtarget, mode, ignore);
4264
4265 /* When not optimizing, generate calls to library functions for a certain
4266 set of builtins. */
4267 if (!optimize && !CALLED_AS_BUILT_IN (fndecl))
4268 switch (fcode)
4269 {
4270 case BUILT_IN_SQRT:
4271 case BUILT_IN_SQRTF:
4272 case BUILT_IN_SQRTL:
4273 case BUILT_IN_SIN:
4274 case BUILT_IN_SINF:
4275 case BUILT_IN_SINL:
4276 case BUILT_IN_COS:
4277 case BUILT_IN_COSF:
4278 case BUILT_IN_COSL:
4279 case BUILT_IN_EXP:
4280 case BUILT_IN_EXPF:
4281 case BUILT_IN_EXPL:
4282 case BUILT_IN_LOG:
4283 case BUILT_IN_LOGF:
4284 case BUILT_IN_LOGL:
4285 case BUILT_IN_POW:
4286 case BUILT_IN_POWF:
4287 case BUILT_IN_POWL:
4288 case BUILT_IN_ATAN2:
4289 case BUILT_IN_ATAN2F:
4290 case BUILT_IN_ATAN2L:
4291 case BUILT_IN_MEMSET:
4292 case BUILT_IN_MEMCPY:
4293 case BUILT_IN_MEMCMP:
4294 case BUILT_IN_MEMPCPY:
4295 case BUILT_IN_MEMMOVE:
4296 case BUILT_IN_BCMP:
4297 case BUILT_IN_BZERO:
4298 case BUILT_IN_BCOPY:
4299 case BUILT_IN_INDEX:
4300 case BUILT_IN_RINDEX:
4301 case BUILT_IN_STPCPY:
4302 case BUILT_IN_STRCHR:
4303 case BUILT_IN_STRRCHR:
4304 case BUILT_IN_STRLEN:
4305 case BUILT_IN_STRCPY:
4306 case BUILT_IN_STRNCPY:
4307 case BUILT_IN_STRNCMP:
4308 case BUILT_IN_STRSTR:
4309 case BUILT_IN_STRPBRK:
4310 case BUILT_IN_STRCAT:
4311 case BUILT_IN_STRNCAT:
4312 case BUILT_IN_STRSPN:
4313 case BUILT_IN_STRCSPN:
4314 case BUILT_IN_STRCMP:
4315 case BUILT_IN_FFS:
4316 case BUILT_IN_PUTCHAR:
4317 case BUILT_IN_PUTS:
4318 case BUILT_IN_PRINTF:
4319 case BUILT_IN_FPUTC:
4320 case BUILT_IN_FPUTS:
4321 case BUILT_IN_FWRITE:
4322 case BUILT_IN_PUTCHAR_UNLOCKED:
4323 case BUILT_IN_PUTS_UNLOCKED:
4324 case BUILT_IN_PRINTF_UNLOCKED:
4325 case BUILT_IN_FPUTC_UNLOCKED:
4326 case BUILT_IN_FPUTS_UNLOCKED:
4327 case BUILT_IN_FWRITE_UNLOCKED:
4328 case BUILT_IN_FLOOR:
4329 case BUILT_IN_FLOORF:
4330 case BUILT_IN_FLOORL:
4331 case BUILT_IN_CEIL:
4332 case BUILT_IN_CEILF:
4333 case BUILT_IN_CEILL:
4334 case BUILT_IN_TRUNC:
4335 case BUILT_IN_TRUNCF:
4336 case BUILT_IN_TRUNCL:
4337 case BUILT_IN_ROUND:
4338 case BUILT_IN_ROUNDF:
4339 case BUILT_IN_ROUNDL:
4340 case BUILT_IN_NEARBYINT:
4341 case BUILT_IN_NEARBYINTF:
4342 case BUILT_IN_NEARBYINTL:
4343 return expand_call (exp, target, ignore);
4344
4345 default:
4346 break;
4347 }
4348
4349 /* The built-in function expanders test for target == const0_rtx
4350 to determine whether the function's result will be ignored. */
4351 if (ignore)
4352 target = const0_rtx;
4353
4354 /* If the result of a pure or const built-in function is ignored, and
4355 none of its arguments are volatile, we can avoid expanding the
4356 built-in call and just evaluate the arguments for side-effects. */
4357 if (target == const0_rtx
4358 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
4359 {
4360 bool volatilep = false;
4361 tree arg;
4362
4363 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
4364 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
4365 {
4366 volatilep = true;
4367 break;
4368 }
4369
4370 if (! volatilep)
4371 {
4372 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
4373 expand_expr (TREE_VALUE (arg), const0_rtx,
4374 VOIDmode, EXPAND_NORMAL);
4375 return const0_rtx;
4376 }
4377 }
4378
4379 switch (fcode)
4380 {
4381 case BUILT_IN_ABS:
4382 case BUILT_IN_LABS:
4383 case BUILT_IN_LLABS:
4384 case BUILT_IN_IMAXABS:
4385 case BUILT_IN_FABS:
4386 case BUILT_IN_FABSF:
4387 case BUILT_IN_FABSL:
4388 /* build_function_call changes these into ABS_EXPR. */
4389 abort ();
4390
4391 case BUILT_IN_CONJ:
4392 case BUILT_IN_CONJF:
4393 case BUILT_IN_CONJL:
4394 case BUILT_IN_CREAL:
4395 case BUILT_IN_CREALF:
4396 case BUILT_IN_CREALL:
4397 case BUILT_IN_CIMAG:
4398 case BUILT_IN_CIMAGF:
4399 case BUILT_IN_CIMAGL:
4400 /* expand_tree_builtin changes these into CONJ_EXPR, REALPART_EXPR
4401 and IMAGPART_EXPR. */
4402 abort ();
4403
4404 case BUILT_IN_SIN:
4405 case BUILT_IN_SINF:
4406 case BUILT_IN_SINL:
4407 case BUILT_IN_COS:
4408 case BUILT_IN_COSF:
4409 case BUILT_IN_COSL:
4410 case BUILT_IN_EXP:
4411 case BUILT_IN_EXPF:
4412 case BUILT_IN_EXPL:
4413 case BUILT_IN_LOG:
4414 case BUILT_IN_LOGF:
4415 case BUILT_IN_LOGL:
4416 /* Treat these like sqrt only if unsafe math optimizations are allowed,
4417 because of possible accuracy problems. */
4418 if (! flag_unsafe_math_optimizations)
4419 break;
4420 case BUILT_IN_SQRT:
4421 case BUILT_IN_SQRTF:
4422 case BUILT_IN_SQRTL:
4423 case BUILT_IN_FLOOR:
4424 case BUILT_IN_FLOORF:
4425 case BUILT_IN_FLOORL:
4426 case BUILT_IN_CEIL:
4427 case BUILT_IN_CEILF:
4428 case BUILT_IN_CEILL:
4429 case BUILT_IN_TRUNC:
4430 case BUILT_IN_TRUNCF:
4431 case BUILT_IN_TRUNCL:
4432 case BUILT_IN_ROUND:
4433 case BUILT_IN_ROUNDF:
4434 case BUILT_IN_ROUNDL:
4435 case BUILT_IN_NEARBYINT:
4436 case BUILT_IN_NEARBYINTF:
4437 case BUILT_IN_NEARBYINTL:
4438 target = expand_builtin_mathfn (exp, target, subtarget);
4439 if (target)
4440 return target;
4441 break;
4442
4443 case BUILT_IN_POW:
4444 case BUILT_IN_POWF:
4445 case BUILT_IN_POWL:
4446 case BUILT_IN_ATAN2:
4447 case BUILT_IN_ATAN2F:
4448 case BUILT_IN_ATAN2L:
4449 if (! flag_unsafe_math_optimizations)
4450 break;
4451 target = expand_builtin_mathfn_2 (exp, target, subtarget);
4452 if (target)
4453 return target;
4454 break;
4455
4456 case BUILT_IN_APPLY_ARGS:
4457 return expand_builtin_apply_args ();
4458
4459 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
4460 FUNCTION with a copy of the parameters described by
4461 ARGUMENTS, and ARGSIZE. It returns a block of memory
4462 allocated on the stack into which is stored all the registers
4463 that might possibly be used for returning the result of a
4464 function. ARGUMENTS is the value returned by
4465 __builtin_apply_args. ARGSIZE is the number of bytes of
4466 arguments that must be copied. ??? How should this value be
4467 computed? We'll also need a safe worst case value for varargs
4468 functions. */
4469 case BUILT_IN_APPLY:
4470 if (!validate_arglist (arglist, POINTER_TYPE,
4471 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4472 && !validate_arglist (arglist, REFERENCE_TYPE,
4473 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4474 return const0_rtx;
4475 else
4476 {
4477 int i;
4478 tree t;
4479 rtx ops[3];
4480
4481 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
4482 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
4483
4484 return expand_builtin_apply (ops[0], ops[1], ops[2]);
4485 }
4486
4487 /* __builtin_return (RESULT) causes the function to return the
4488 value described by RESULT. RESULT is address of the block of
4489 memory returned by __builtin_apply. */
4490 case BUILT_IN_RETURN:
4491 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4492 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
4493 NULL_RTX, VOIDmode, 0));
4494 return const0_rtx;
4495
4496 case BUILT_IN_SAVEREGS:
4497 return expand_builtin_saveregs ();
4498
4499 case BUILT_IN_ARGS_INFO:
4500 return expand_builtin_args_info (arglist);
4501
4502 /* Return the address of the first anonymous stack arg. */
4503 case BUILT_IN_NEXT_ARG:
4504 return expand_builtin_next_arg (arglist);
4505
4506 case BUILT_IN_CLASSIFY_TYPE:
4507 return expand_builtin_classify_type (arglist);
4508
4509 case BUILT_IN_CONSTANT_P:
4510 return expand_builtin_constant_p (arglist, target_mode);
4511
4512 case BUILT_IN_FRAME_ADDRESS:
4513 case BUILT_IN_RETURN_ADDRESS:
4514 return expand_builtin_frame_address (fndecl, arglist);
4515
4516 /* Returns the address of the area where the structure is returned.
4517 0 otherwise. */
4518 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
4519 if (arglist != 0
4520 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
4521 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
4522 return const0_rtx;
4523 else
4524 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4525
4526 case BUILT_IN_ALLOCA:
4527 target = expand_builtin_alloca (arglist, target);
4528 if (target)
4529 return target;
4530 break;
4531
4532 case BUILT_IN_FFS:
4533 case BUILT_IN_FFSL:
4534 case BUILT_IN_FFSLL:
4535 target = expand_builtin_unop (target_mode, arglist, target,
4536 subtarget, ffs_optab);
4537 if (target)
4538 return target;
4539 break;
4540
4541 case BUILT_IN_CLZ:
4542 case BUILT_IN_CLZL:
4543 case BUILT_IN_CLZLL:
4544 target = expand_builtin_unop (target_mode, arglist, target,
4545 subtarget, clz_optab);
4546 if (target)
4547 return target;
4548 break;
4549
4550 case BUILT_IN_CTZ:
4551 case BUILT_IN_CTZL:
4552 case BUILT_IN_CTZLL:
4553 target = expand_builtin_unop (target_mode, arglist, target,
4554 subtarget, ctz_optab);
4555 if (target)
4556 return target;
4557 break;
4558
4559 case BUILT_IN_POPCOUNT:
4560 case BUILT_IN_POPCOUNTL:
4561 case BUILT_IN_POPCOUNTLL:
4562 target = expand_builtin_unop (target_mode, arglist, target,
4563 subtarget, popcount_optab);
4564 if (target)
4565 return target;
4566 break;
4567
4568 case BUILT_IN_PARITY:
4569 case BUILT_IN_PARITYL:
4570 case BUILT_IN_PARITYLL:
4571 target = expand_builtin_unop (target_mode, arglist, target,
4572 subtarget, parity_optab);
4573 if (target)
4574 return target;
4575 break;
4576
4577 case BUILT_IN_STRLEN:
4578 target = expand_builtin_strlen (arglist, target, target_mode);
4579 if (target)
4580 return target;
4581 break;
4582
4583 case BUILT_IN_STRCPY:
4584 target = expand_builtin_strcpy (arglist, target, mode);
4585 if (target)
4586 return target;
4587 break;
4588
4589 case BUILT_IN_STRNCPY:
4590 target = expand_builtin_strncpy (arglist, target, mode);
4591 if (target)
4592 return target;
4593 break;
4594
4595 case BUILT_IN_STPCPY:
4596 target = expand_builtin_stpcpy (arglist, target, mode);
4597 if (target)
4598 return target;
4599 break;
4600
4601 case BUILT_IN_STRCAT:
4602 target = expand_builtin_strcat (arglist, target, mode);
4603 if (target)
4604 return target;
4605 break;
4606
4607 case BUILT_IN_STRNCAT:
4608 target = expand_builtin_strncat (arglist, target, mode);
4609 if (target)
4610 return target;
4611 break;
4612
4613 case BUILT_IN_STRSPN:
4614 target = expand_builtin_strspn (arglist, target, mode);
4615 if (target)
4616 return target;
4617 break;
4618
4619 case BUILT_IN_STRCSPN:
4620 target = expand_builtin_strcspn (arglist, target, mode);
4621 if (target)
4622 return target;
4623 break;
4624
4625 case BUILT_IN_STRSTR:
4626 target = expand_builtin_strstr (arglist, target, mode);
4627 if (target)
4628 return target;
4629 break;
4630
4631 case BUILT_IN_STRPBRK:
4632 target = expand_builtin_strpbrk (arglist, target, mode);
4633 if (target)
4634 return target;
4635 break;
4636
4637 case BUILT_IN_INDEX:
4638 case BUILT_IN_STRCHR:
4639 target = expand_builtin_strchr (arglist, target, mode);
4640 if (target)
4641 return target;
4642 break;
4643
4644 case BUILT_IN_RINDEX:
4645 case BUILT_IN_STRRCHR:
4646 target = expand_builtin_strrchr (arglist, target, mode);
4647 if (target)
4648 return target;
4649 break;
4650
4651 case BUILT_IN_MEMCPY:
4652 target = expand_builtin_memcpy (arglist, target, mode, /*endp=*/0);
4653 if (target)
4654 return target;
4655 break;
4656
4657 case BUILT_IN_MEMPCPY:
4658 target = expand_builtin_mempcpy (arglist, target, mode);
4659 if (target)
4660 return target;
4661 break;
4662
4663 case BUILT_IN_MEMMOVE:
4664 target = expand_builtin_memmove (arglist, target, mode);
4665 if (target)
4666 return target;
4667 break;
4668
4669 case BUILT_IN_BCOPY:
4670 target = expand_builtin_bcopy (arglist);
4671 if (target)
4672 return target;
4673 break;
4674
4675 case BUILT_IN_MEMSET:
4676 target = expand_builtin_memset (arglist, target, mode);
4677 if (target)
4678 return target;
4679 break;
4680
4681 case BUILT_IN_BZERO:
4682 target = expand_builtin_bzero (arglist);
4683 if (target)
4684 return target;
4685 break;
4686
4687 case BUILT_IN_STRCMP:
4688 target = expand_builtin_strcmp (exp, target, mode);
4689 if (target)
4690 return target;
4691 break;
4692
4693 case BUILT_IN_STRNCMP:
4694 target = expand_builtin_strncmp (exp, target, mode);
4695 if (target)
4696 return target;
4697 break;
4698
4699 case BUILT_IN_BCMP:
4700 case BUILT_IN_MEMCMP:
4701 target = expand_builtin_memcmp (exp, arglist, target, mode);
4702 if (target)
4703 return target;
4704 break;
4705
4706 case BUILT_IN_SETJMP:
4707 target = expand_builtin_setjmp (arglist, target);
4708 if (target)
4709 return target;
4710 break;
4711
4712 /* __builtin_longjmp is passed a pointer to an array of five words.
4713 It's similar to the C library longjmp function but works with
4714 __builtin_setjmp above. */
4715 case BUILT_IN_LONGJMP:
4716 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4717 break;
4718 else
4719 {
4720 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
4721 VOIDmode, 0);
4722 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
4723 NULL_RTX, VOIDmode, 0);
4724
4725 if (value != const1_rtx)
4726 {
4727 error ("__builtin_longjmp second argument must be 1");
4728 return const0_rtx;
4729 }
4730
4731 expand_builtin_longjmp (buf_addr, value);
4732 return const0_rtx;
4733 }
4734
4735 case BUILT_IN_TRAP:
4736 expand_builtin_trap ();
4737 return const0_rtx;
4738
4739 case BUILT_IN_FPUTS:
4740 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 0);
4741 if (target)
4742 return target;
4743 break;
4744 case BUILT_IN_FPUTS_UNLOCKED:
4745 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 1);
4746 if (target)
4747 return target;
4748 break;
4749
4750 /* Various hooks for the DWARF 2 __throw routine. */
4751 case BUILT_IN_UNWIND_INIT:
4752 expand_builtin_unwind_init ();
4753 return const0_rtx;
4754 case BUILT_IN_DWARF_CFA:
4755 return virtual_cfa_rtx;
4756 #ifdef DWARF2_UNWIND_INFO
4757 case BUILT_IN_DWARF_SP_COLUMN:
4758 return expand_builtin_dwarf_sp_column ();
4759 case BUILT_IN_INIT_DWARF_REG_SIZES:
4760 expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist));
4761 return const0_rtx;
4762 #endif
4763 case BUILT_IN_FROB_RETURN_ADDR:
4764 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
4765 case BUILT_IN_EXTRACT_RETURN_ADDR:
4766 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
4767 case BUILT_IN_EH_RETURN:
4768 expand_builtin_eh_return (TREE_VALUE (arglist),
4769 TREE_VALUE (TREE_CHAIN (arglist)));
4770 return const0_rtx;
4771 #ifdef EH_RETURN_DATA_REGNO
4772 case BUILT_IN_EH_RETURN_DATA_REGNO:
4773 return expand_builtin_eh_return_data_regno (arglist);
4774 #endif
4775 case BUILT_IN_VA_START:
4776 case BUILT_IN_STDARG_START:
4777 return expand_builtin_va_start (arglist);
4778 case BUILT_IN_VA_END:
4779 return expand_builtin_va_end (arglist);
4780 case BUILT_IN_VA_COPY:
4781 return expand_builtin_va_copy (arglist);
4782 case BUILT_IN_EXPECT:
4783 return expand_builtin_expect (arglist, target);
4784 case BUILT_IN_PREFETCH:
4785 expand_builtin_prefetch (arglist);
4786 return const0_rtx;
4787
4788
4789 default: /* just do library call, if unknown builtin */
4790 if (!DECL_ASSEMBLER_NAME_SET_P (fndecl))
4791 error ("built-in function `%s' not currently supported",
4792 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
4793 }
4794
4795 /* The switch statement above can drop through to cause the function
4796 to be called normally. */
4797 return expand_call (exp, target, ignore);
4798 }
4799
4800 /* Determine whether a tree node represents a call to a built-in
4801 math function. If the tree T is a call to a built-in function
4802 taking a single real argument, then the return value is the
4803 DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT. Otherwise
4804 the return value is END_BUILTINS. */
4805
4806 enum built_in_function
4807 builtin_mathfn_code (t)
4808 tree t;
4809 {
4810 tree fndecl, arglist;
4811
4812 if (TREE_CODE (t) != CALL_EXPR
4813 || TREE_CODE (TREE_OPERAND (t, 0)) != ADDR_EXPR)
4814 return END_BUILTINS;
4815
4816 fndecl = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
4817 if (TREE_CODE (fndecl) != FUNCTION_DECL
4818 || ! DECL_BUILT_IN (fndecl)
4819 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4820 return END_BUILTINS;
4821
4822 arglist = TREE_OPERAND (t, 1);
4823 if (! arglist
4824 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
4825 return END_BUILTINS;
4826
4827 arglist = TREE_CHAIN (arglist);
4828 switch (DECL_FUNCTION_CODE (fndecl))
4829 {
4830 case BUILT_IN_POW:
4831 case BUILT_IN_POWF:
4832 case BUILT_IN_POWL:
4833 case BUILT_IN_ATAN2:
4834 case BUILT_IN_ATAN2F:
4835 case BUILT_IN_ATAN2L:
4836 if (! arglist
4837 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE
4838 || TREE_CHAIN (arglist))
4839 return END_BUILTINS;
4840 break;
4841
4842 default:
4843 if (arglist)
4844 return END_BUILTINS;
4845 break;
4846 }
4847
4848 return DECL_FUNCTION_CODE (fndecl);
4849 }
4850
4851 /* Fold a call to __builtin_constant_p, if we know it will evaluate to a
4852 constant. ARGLIST is the argument list of the call. */
4853
4854 static tree
4855 fold_builtin_constant_p (arglist)
4856 tree arglist;
4857 {
4858 if (arglist == 0)
4859 return 0;
4860
4861 arglist = TREE_VALUE (arglist);
4862
4863 /* We return 1 for a numeric type that's known to be a constant
4864 value at compile-time or for an aggregate type that's a
4865 literal constant. */
4866 STRIP_NOPS (arglist);
4867
4868 /* If we know this is a constant, emit the constant of one. */
4869 if (TREE_CODE_CLASS (TREE_CODE (arglist)) == 'c'
4870 || (TREE_CODE (arglist) == CONSTRUCTOR
4871 && TREE_CONSTANT (arglist))
4872 || (TREE_CODE (arglist) == ADDR_EXPR
4873 && TREE_CODE (TREE_OPERAND (arglist, 0)) == STRING_CST))
4874 return integer_one_node;
4875
4876 /* If we aren't going to be running CSE or this expression
4877 has side effects, show we don't know it to be a constant.
4878 Likewise if it's a pointer or aggregate type since in those
4879 case we only want literals, since those are only optimized
4880 when generating RTL, not later.
4881 And finally, if we are compiling an initializer, not code, we
4882 need to return a definite result now; there's not going to be any
4883 more optimization done. */
4884 if (TREE_SIDE_EFFECTS (arglist) || cse_not_expected
4885 || AGGREGATE_TYPE_P (TREE_TYPE (arglist))
4886 || POINTER_TYPE_P (TREE_TYPE (arglist))
4887 || cfun == 0)
4888 return integer_zero_node;
4889
4890 return 0;
4891 }
4892
4893 /* Fold a call to __builtin_classify_type. */
4894
4895 static tree
4896 fold_builtin_classify_type (arglist)
4897 tree arglist;
4898 {
4899 if (arglist == 0)
4900 return build_int_2 (no_type_class, 0);
4901
4902 return build_int_2 (type_to_class (TREE_TYPE (TREE_VALUE (arglist))), 0);
4903 }
4904
4905 /* Fold a call to __builtin_inf or __builtin_huge_val. */
4906
4907 static tree
4908 fold_builtin_inf (type, warn)
4909 tree type;
4910 int warn;
4911 {
4912 REAL_VALUE_TYPE real;
4913
4914 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
4915 warning ("target format does not support infinity");
4916
4917 real_inf (&real);
4918 return build_real (type, real);
4919 }
4920
4921 /* Fold a call to __builtin_nan or __builtin_nans. */
4922
4923 static tree
4924 fold_builtin_nan (arglist, type, quiet)
4925 tree arglist, type;
4926 int quiet;
4927 {
4928 REAL_VALUE_TYPE real;
4929 const char *str;
4930
4931 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4932 return 0;
4933 str = c_getstr (TREE_VALUE (arglist));
4934 if (!str)
4935 return 0;
4936
4937 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
4938 return 0;
4939
4940 return build_real (type, real);
4941 }
4942
4943 /* EXP is assumed to me builtin call where truncation can be propagated
4944 across (for instance floor((double)f) == (double)floorf (f).
4945 Do the transformation. */
4946 static tree
4947 fold_trunc_transparent_mathfn (exp)
4948 tree exp;
4949 {
4950 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4951 tree arglist = TREE_OPERAND (exp, 1);
4952 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4953
4954 if (optimize && validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4955 {
4956 tree arg0 = strip_float_extensions (TREE_VALUE (arglist));
4957 tree ftype = TREE_TYPE (exp);
4958 tree newtype = TREE_TYPE (arg0);
4959 tree decl;
4960
4961 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
4962 && (decl = mathfn_built_in (newtype, fcode)))
4963 {
4964 arglist =
4965 build_tree_list (NULL_TREE, fold (convert (newtype, arg0)));
4966 return convert (ftype,
4967 build_function_call_expr (decl, arglist));
4968 }
4969 }
4970 return 0;
4971 }
4972
4973 /* Used by constant folding to eliminate some builtin calls early. EXP is
4974 the CALL_EXPR of a call to a builtin function. */
4975
4976 tree
4977 fold_builtin (exp)
4978 tree exp;
4979 {
4980 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4981 tree arglist = TREE_OPERAND (exp, 1);
4982 tree type = TREE_TYPE (TREE_TYPE (fndecl));
4983
4984 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4985 return 0;
4986
4987 switch (DECL_FUNCTION_CODE (fndecl))
4988 {
4989 case BUILT_IN_CONSTANT_P:
4990 return fold_builtin_constant_p (arglist);
4991
4992 case BUILT_IN_CLASSIFY_TYPE:
4993 return fold_builtin_classify_type (arglist);
4994
4995 case BUILT_IN_STRLEN:
4996 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4997 {
4998 tree len = c_strlen (TREE_VALUE (arglist));
4999 if (len)
5000 {
5001 /* Convert from the internal "sizetype" type to "size_t". */
5002 if (size_type_node)
5003 len = convert (size_type_node, len);
5004 return len;
5005 }
5006 }
5007 break;
5008
5009 case BUILT_IN_SQRT:
5010 case BUILT_IN_SQRTF:
5011 case BUILT_IN_SQRTL:
5012 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5013 {
5014 enum built_in_function fcode;
5015 tree arg = TREE_VALUE (arglist);
5016
5017 /* Optimize sqrt of constant value. */
5018 if (TREE_CODE (arg) == REAL_CST
5019 && ! TREE_CONSTANT_OVERFLOW (arg))
5020 {
5021 REAL_VALUE_TYPE r, x;
5022
5023 x = TREE_REAL_CST (arg);
5024 if (real_sqrt (&r, TYPE_MODE (type), &x)
5025 || (!flag_trapping_math && !flag_errno_math))
5026 return build_real (type, r);
5027 }
5028
5029 /* Optimize sqrt(exp(x)) = exp(x*0.5). */
5030 fcode = builtin_mathfn_code (arg);
5031 if (flag_unsafe_math_optimizations
5032 && (fcode == BUILT_IN_EXP
5033 || fcode == BUILT_IN_EXPF
5034 || fcode == BUILT_IN_EXPL))
5035 {
5036 tree expfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
5037 arg = fold (build (MULT_EXPR, type,
5038 TREE_VALUE (TREE_OPERAND (arg, 1)),
5039 build_real (type, dconsthalf)));
5040 arglist = build_tree_list (NULL_TREE, arg);
5041 return build_function_call_expr (expfn, arglist);
5042 }
5043
5044 /* Optimize sqrt(pow(x,y)) = pow(x,y*0.5). */
5045 if (flag_unsafe_math_optimizations
5046 && (fcode == BUILT_IN_POW
5047 || fcode == BUILT_IN_POWF
5048 || fcode == BUILT_IN_POWL))
5049 {
5050 tree powfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
5051 tree arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
5052 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
5053 tree narg1 = fold (build (MULT_EXPR, type, arg1,
5054 build_real (type, dconsthalf)));
5055 arglist = tree_cons (NULL_TREE, arg0,
5056 build_tree_list (NULL_TREE, narg1));
5057 return build_function_call_expr (powfn, arglist);
5058 }
5059 }
5060 break;
5061
5062 case BUILT_IN_SIN:
5063 case BUILT_IN_SINF:
5064 case BUILT_IN_SINL:
5065 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5066 {
5067 tree arg = TREE_VALUE (arglist);
5068
5069 /* Optimize sin(0.0) = 0.0. */
5070 if (real_zerop (arg))
5071 return build_real (type, dconst0);
5072 }
5073 break;
5074
5075 case BUILT_IN_COS:
5076 case BUILT_IN_COSF:
5077 case BUILT_IN_COSL:
5078 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5079 {
5080 tree arg = TREE_VALUE (arglist);
5081
5082 /* Optimize cos(0.0) = 1.0. */
5083 if (real_zerop (arg))
5084 return build_real (type, dconst1);
5085 }
5086 break;
5087
5088 case BUILT_IN_EXP:
5089 case BUILT_IN_EXPF:
5090 case BUILT_IN_EXPL:
5091 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5092 {
5093 enum built_in_function fcode;
5094 tree arg = TREE_VALUE (arglist);
5095
5096 /* Optimize exp(0.0) = 1.0. */
5097 if (real_zerop (arg))
5098 return build_real (type, dconst1);
5099
5100 /* Optimize exp(log(x)) = x. */
5101 fcode = builtin_mathfn_code (arg);
5102 if (flag_unsafe_math_optimizations
5103 && (fcode == BUILT_IN_LOG
5104 || fcode == BUILT_IN_LOGF
5105 || fcode == BUILT_IN_LOGL))
5106 return TREE_VALUE (TREE_OPERAND (arg, 1));
5107 }
5108 break;
5109
5110 case BUILT_IN_LOG:
5111 case BUILT_IN_LOGF:
5112 case BUILT_IN_LOGL:
5113 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5114 {
5115 enum built_in_function fcode;
5116 tree arg = TREE_VALUE (arglist);
5117
5118 /* Optimize log(1.0) = 0.0. */
5119 if (real_onep (arg))
5120 return build_real (type, dconst0);
5121
5122 /* Optimize log(exp(x)) = x. */
5123 fcode = builtin_mathfn_code (arg);
5124 if (flag_unsafe_math_optimizations
5125 && (fcode == BUILT_IN_EXP
5126 || fcode == BUILT_IN_EXPF
5127 || fcode == BUILT_IN_EXPL))
5128 return TREE_VALUE (TREE_OPERAND (arg, 1));
5129
5130 /* Optimize log(sqrt(x)) = log(x)*0.5. */
5131 if (flag_unsafe_math_optimizations
5132 && (fcode == BUILT_IN_SQRT
5133 || fcode == BUILT_IN_SQRTF
5134 || fcode == BUILT_IN_SQRTL))
5135 {
5136 tree logfn = build_function_call_expr (fndecl,
5137 TREE_OPERAND (arg, 1));
5138 return fold (build (MULT_EXPR, type, logfn,
5139 build_real (type, dconsthalf)));
5140 }
5141
5142 /* Optimize log(pow(x,y)) = y*log(x). */
5143 if (flag_unsafe_math_optimizations
5144 && (fcode == BUILT_IN_POW
5145 || fcode == BUILT_IN_POWF
5146 || fcode == BUILT_IN_POWL))
5147 {
5148 tree arg0, arg1, logfn;
5149
5150 arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
5151 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
5152 arglist = build_tree_list (NULL_TREE, arg0);
5153 logfn = build_function_call_expr (fndecl, arglist);
5154 return fold (build (MULT_EXPR, type, arg1, logfn));
5155 }
5156 }
5157 break;
5158
5159 case BUILT_IN_POW:
5160 case BUILT_IN_POWF:
5161 case BUILT_IN_POWL:
5162 if (validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5163 {
5164 enum built_in_function fcode;
5165 tree arg0 = TREE_VALUE (arglist);
5166 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5167
5168 /* Optimize pow(1.0,y) = 1.0. */
5169 if (real_onep (arg0))
5170 return omit_one_operand (type, build_real (type, dconst1), arg1);
5171
5172 if (TREE_CODE (arg1) == REAL_CST
5173 && ! TREE_CONSTANT_OVERFLOW (arg1))
5174 {
5175 REAL_VALUE_TYPE c;
5176 c = TREE_REAL_CST (arg1);
5177
5178 /* Optimize pow(x,0.0) = 1.0. */
5179 if (REAL_VALUES_EQUAL (c, dconst0))
5180 return omit_one_operand (type, build_real (type, dconst1),
5181 arg0);
5182
5183 /* Optimize pow(x,1.0) = x. */
5184 if (REAL_VALUES_EQUAL (c, dconst1))
5185 return arg0;
5186
5187 /* Optimize pow(x,-1.0) = 1.0/x. */
5188 if (REAL_VALUES_EQUAL (c, dconstm1))
5189 return fold (build (RDIV_EXPR, type,
5190 build_real (type, dconst1),
5191 arg0));
5192
5193 /* Optimize pow(x,2.0) = x*x. */
5194 if (REAL_VALUES_EQUAL (c, dconst2)
5195 && (*lang_hooks.decls.global_bindings_p) () == 0
5196 && ! contains_placeholder_p (arg0))
5197 {
5198 arg0 = save_expr (arg0);
5199 return fold (build (MULT_EXPR, type, arg0, arg0));
5200 }
5201
5202 /* Optimize pow(x,-2.0) = 1.0/(x*x). */
5203 if (flag_unsafe_math_optimizations
5204 && REAL_VALUES_EQUAL (c, dconstm2)
5205 && (*lang_hooks.decls.global_bindings_p) () == 0
5206 && ! contains_placeholder_p (arg0))
5207 {
5208 arg0 = save_expr (arg0);
5209 return fold (build (RDIV_EXPR, type,
5210 build_real (type, dconst1),
5211 fold (build (MULT_EXPR, type,
5212 arg0, arg0))));
5213 }
5214
5215 /* Optimize pow(x,0.5) = sqrt(x). */
5216 if (flag_unsafe_math_optimizations
5217 && REAL_VALUES_EQUAL (c, dconsthalf))
5218 {
5219 tree sqrtfn;
5220
5221 fcode = DECL_FUNCTION_CODE (fndecl);
5222 if (fcode == BUILT_IN_POW)
5223 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRT];
5224 else if (fcode == BUILT_IN_POWF)
5225 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTF];
5226 else if (fcode == BUILT_IN_POWL)
5227 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTL];
5228 else
5229 sqrtfn = NULL_TREE;
5230
5231 if (sqrtfn != NULL_TREE)
5232 {
5233 tree arglist = build_tree_list (NULL_TREE, arg0);
5234 return build_function_call_expr (sqrtfn, arglist);
5235 }
5236 }
5237
5238 /* Attempt to evaluate pow at compile-time. */
5239 if (TREE_CODE (arg0) == REAL_CST
5240 && ! TREE_CONSTANT_OVERFLOW (arg0))
5241 {
5242 REAL_VALUE_TYPE cint;
5243 HOST_WIDE_INT n;
5244
5245 n = real_to_integer(&c);
5246 real_from_integer (&cint, VOIDmode, n,
5247 n < 0 ? -1 : 0, 0);
5248 if (real_identical (&c, &cint))
5249 {
5250 REAL_VALUE_TYPE x;
5251 bool inexact;
5252
5253 x = TREE_REAL_CST (arg0);
5254 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
5255 if (flag_unsafe_math_optimizations || !inexact)
5256 return build_real (type, x);
5257 }
5258 }
5259 }
5260
5261 /* Optimize pow(exp(x),y) = exp(x*y). */
5262 fcode = builtin_mathfn_code (arg0);
5263 if (flag_unsafe_math_optimizations
5264 && (fcode == BUILT_IN_EXP
5265 || fcode == BUILT_IN_EXPF
5266 || fcode == BUILT_IN_EXPL))
5267 {
5268 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5269 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5270 arg = fold (build (MULT_EXPR, type, arg, arg1));
5271 arglist = build_tree_list (NULL_TREE, arg);
5272 return build_function_call_expr (expfn, arglist);
5273 }
5274
5275 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
5276 if (flag_unsafe_math_optimizations
5277 && (fcode == BUILT_IN_SQRT
5278 || fcode == BUILT_IN_SQRTF
5279 || fcode == BUILT_IN_SQRTL))
5280 {
5281 tree narg0 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5282 tree narg1 = fold (build (MULT_EXPR, type, arg1,
5283 build_real (type, dconsthalf)));
5284
5285 arglist = tree_cons (NULL_TREE, narg0,
5286 build_tree_list (NULL_TREE, narg1));
5287 return build_function_call_expr (fndecl, arglist);
5288 }
5289
5290 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
5291 if (flag_unsafe_math_optimizations
5292 && (fcode == BUILT_IN_POW
5293 || fcode == BUILT_IN_POWF
5294 || fcode == BUILT_IN_POWL))
5295 {
5296 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5297 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
5298 tree narg1 = fold (build (MULT_EXPR, type, arg01, arg1));
5299 arglist = tree_cons (NULL_TREE, arg00,
5300 build_tree_list (NULL_TREE, narg1));
5301 return build_function_call_expr (fndecl, arglist);
5302 }
5303 }
5304 break;
5305
5306 case BUILT_IN_INF:
5307 case BUILT_IN_INFF:
5308 case BUILT_IN_INFL:
5309 return fold_builtin_inf (type, true);
5310
5311 case BUILT_IN_HUGE_VAL:
5312 case BUILT_IN_HUGE_VALF:
5313 case BUILT_IN_HUGE_VALL:
5314 return fold_builtin_inf (type, false);
5315
5316 case BUILT_IN_NAN:
5317 case BUILT_IN_NANF:
5318 case BUILT_IN_NANL:
5319 return fold_builtin_nan (arglist, type, true);
5320
5321 case BUILT_IN_NANS:
5322 case BUILT_IN_NANSF:
5323 case BUILT_IN_NANSL:
5324 return fold_builtin_nan (arglist, type, false);
5325
5326 case BUILT_IN_FLOOR:
5327 case BUILT_IN_FLOORF:
5328 case BUILT_IN_FLOORL:
5329 case BUILT_IN_CEIL:
5330 case BUILT_IN_CEILF:
5331 case BUILT_IN_CEILL:
5332 case BUILT_IN_TRUNC:
5333 case BUILT_IN_TRUNCF:
5334 case BUILT_IN_TRUNCL:
5335 case BUILT_IN_ROUND:
5336 case BUILT_IN_ROUNDF:
5337 case BUILT_IN_ROUNDL:
5338 case BUILT_IN_NEARBYINT:
5339 case BUILT_IN_NEARBYINTF:
5340 case BUILT_IN_NEARBYINTL:
5341 return fold_trunc_transparent_mathfn (exp);
5342
5343 default:
5344 break;
5345 }
5346
5347 return 0;
5348 }
5349
5350 /* Conveniently construct a function call expression. */
5351
5352 tree
5353 build_function_call_expr (fn, arglist)
5354 tree fn, arglist;
5355 {
5356 tree call_expr;
5357
5358 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
5359 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
5360 call_expr, arglist);
5361 TREE_SIDE_EFFECTS (call_expr) = 1;
5362 return fold (call_expr);
5363 }
5364
5365 /* This function validates the types of a function call argument list
5366 represented as a tree chain of parameters against a specified list
5367 of tree_codes. If the last specifier is a 0, that represents an
5368 ellipses, otherwise the last specifier must be a VOID_TYPE. */
5369
5370 static int
5371 validate_arglist VPARAMS ((tree arglist, ...))
5372 {
5373 enum tree_code code;
5374 int res = 0;
5375
5376 VA_OPEN (ap, arglist);
5377 VA_FIXEDARG (ap, tree, arglist);
5378
5379 do
5380 {
5381 code = va_arg (ap, enum tree_code);
5382 switch (code)
5383 {
5384 case 0:
5385 /* This signifies an ellipses, any further arguments are all ok. */
5386 res = 1;
5387 goto end;
5388 case VOID_TYPE:
5389 /* This signifies an endlink, if no arguments remain, return
5390 true, otherwise return false. */
5391 res = arglist == 0;
5392 goto end;
5393 default:
5394 /* If no parameters remain or the parameter's code does not
5395 match the specified code, return false. Otherwise continue
5396 checking any remaining arguments. */
5397 if (arglist == 0
5398 || code != TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))))
5399 goto end;
5400 break;
5401 }
5402 arglist = TREE_CHAIN (arglist);
5403 }
5404 while (1);
5405
5406 /* We need gotos here since we can only have one VA_CLOSE in a
5407 function. */
5408 end: ;
5409 VA_CLOSE (ap);
5410
5411 return res;
5412 }
5413
5414 /* Default version of target-specific builtin setup that does nothing. */
5415
5416 void
5417 default_init_builtins ()
5418 {
5419 }
5420
5421 /* Default target-specific builtin expander that does nothing. */
5422
5423 rtx
5424 default_expand_builtin (exp, target, subtarget, mode, ignore)
5425 tree exp ATTRIBUTE_UNUSED;
5426 rtx target ATTRIBUTE_UNUSED;
5427 rtx subtarget ATTRIBUTE_UNUSED;
5428 enum machine_mode mode ATTRIBUTE_UNUSED;
5429 int ignore ATTRIBUTE_UNUSED;
5430 {
5431 return NULL_RTX;
5432 }
5433
5434 /* Instantiate all remaining CONSTANT_P_RTX nodes. */
5435
5436 void
5437 purge_builtin_constant_p ()
5438 {
5439 rtx insn, set, arg, new, note;
5440
5441 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5442 if (INSN_P (insn)
5443 && (set = single_set (insn)) != NULL_RTX
5444 && (GET_CODE (arg = SET_SRC (set)) == CONSTANT_P_RTX
5445 || (GET_CODE (arg) == SUBREG
5446 && (GET_CODE (arg = SUBREG_REG (arg))
5447 == CONSTANT_P_RTX))))
5448 {
5449 arg = XEXP (arg, 0);
5450 new = CONSTANT_P (arg) ? const1_rtx : const0_rtx;
5451 validate_change (insn, &SET_SRC (set), new, 0);
5452
5453 /* Remove the REG_EQUAL note from the insn. */
5454 if ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0)
5455 remove_note (insn, note);
5456 }
5457 }
5458
5459 /* Returns true is EXP represents data that would potentially reside
5460 in a readonly section. */
5461
5462 static bool
5463 readonly_data_expr (tree exp)
5464 {
5465 STRIP_NOPS (exp);
5466
5467 if (TREE_CODE (exp) == ADDR_EXPR)
5468 return decl_readonly_section (TREE_OPERAND (exp, 0), 0);
5469 else
5470 return false;
5471 }
This page took 0.292745 seconds and 5 git commands to generate.