]> gcc.gnu.org Git - gcc.git/blob - gcc/builtins.c
real.c (real_sqrt): Return a bool result indicating whether a floating point exceptio...
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "toplev.h"
43 #include "predict.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "langhooks.h"
47
48 #define CALLED_AS_BUILT_IN(NODE) \
49 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
50
51 /* Register mappings for target machines without register windows. */
52 #ifndef INCOMING_REGNO
53 #define INCOMING_REGNO(OUT) (OUT)
54 #endif
55 #ifndef OUTGOING_REGNO
56 #define OUTGOING_REGNO(IN) (IN)
57 #endif
58
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 #endif
62
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
66
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT) STRINGX(X),
68 const char *const built_in_names[(int) END_BUILTINS] =
69 {
70 #include "builtins.def"
71 };
72 #undef DEF_BUILTIN
73
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77
78 static int get_pointer_alignment PARAMS ((tree, unsigned int));
79 static tree c_strlen PARAMS ((tree));
80 static const char *c_getstr PARAMS ((tree));
81 static rtx c_readstr PARAMS ((const char *,
82 enum machine_mode));
83 static int target_char_cast PARAMS ((tree, char *));
84 static rtx get_memory_rtx PARAMS ((tree));
85 static int apply_args_size PARAMS ((void));
86 static int apply_result_size PARAMS ((void));
87 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
88 static rtx result_vector PARAMS ((int, rtx));
89 #endif
90 static rtx expand_builtin_setjmp PARAMS ((tree, rtx));
91 static void expand_builtin_prefetch PARAMS ((tree));
92 static rtx expand_builtin_apply_args PARAMS ((void));
93 static rtx expand_builtin_apply_args_1 PARAMS ((void));
94 static rtx expand_builtin_apply PARAMS ((rtx, rtx, rtx));
95 static void expand_builtin_return PARAMS ((rtx));
96 static enum type_class type_to_class PARAMS ((tree));
97 static rtx expand_builtin_classify_type PARAMS ((tree));
98 static rtx expand_builtin_mathfn PARAMS ((tree, rtx, rtx));
99 static rtx expand_builtin_constant_p PARAMS ((tree));
100 static rtx expand_builtin_args_info PARAMS ((tree));
101 static rtx expand_builtin_next_arg PARAMS ((tree));
102 static rtx expand_builtin_va_start PARAMS ((tree));
103 static rtx expand_builtin_va_end PARAMS ((tree));
104 static rtx expand_builtin_va_copy PARAMS ((tree));
105 static rtx expand_builtin_memcmp PARAMS ((tree, tree, rtx,
106 enum machine_mode));
107 static rtx expand_builtin_strcmp PARAMS ((tree, rtx,
108 enum machine_mode));
109 static rtx expand_builtin_strncmp PARAMS ((tree, rtx,
110 enum machine_mode));
111 static rtx builtin_memcpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
112 enum machine_mode));
113 static rtx expand_builtin_strcat PARAMS ((tree, rtx,
114 enum machine_mode));
115 static rtx expand_builtin_strncat PARAMS ((tree, rtx,
116 enum machine_mode));
117 static rtx expand_builtin_strspn PARAMS ((tree, rtx,
118 enum machine_mode));
119 static rtx expand_builtin_strcspn PARAMS ((tree, rtx,
120 enum machine_mode));
121 static rtx expand_builtin_memcpy PARAMS ((tree, rtx,
122 enum machine_mode));
123 static rtx expand_builtin_strcpy PARAMS ((tree, rtx,
124 enum machine_mode));
125 static rtx builtin_strncpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
126 enum machine_mode));
127 static rtx expand_builtin_strncpy PARAMS ((tree, rtx,
128 enum machine_mode));
129 static rtx builtin_memset_read_str PARAMS ((PTR, HOST_WIDE_INT,
130 enum machine_mode));
131 static rtx builtin_memset_gen_str PARAMS ((PTR, HOST_WIDE_INT,
132 enum machine_mode));
133 static rtx expand_builtin_memset PARAMS ((tree, rtx,
134 enum machine_mode));
135 static rtx expand_builtin_bzero PARAMS ((tree));
136 static rtx expand_builtin_strlen PARAMS ((tree, rtx));
137 static rtx expand_builtin_strstr PARAMS ((tree, rtx,
138 enum machine_mode));
139 static rtx expand_builtin_strpbrk PARAMS ((tree, rtx,
140 enum machine_mode));
141 static rtx expand_builtin_strchr PARAMS ((tree, rtx,
142 enum machine_mode));
143 static rtx expand_builtin_strrchr PARAMS ((tree, rtx,
144 enum machine_mode));
145 static rtx expand_builtin_alloca PARAMS ((tree, rtx));
146 static rtx expand_builtin_ffs PARAMS ((tree, rtx, rtx));
147 static rtx expand_builtin_frame_address PARAMS ((tree));
148 static rtx expand_builtin_fputs PARAMS ((tree, int, int));
149 static tree stabilize_va_list PARAMS ((tree, int));
150 static rtx expand_builtin_expect PARAMS ((tree, rtx));
151 static tree fold_builtin_constant_p PARAMS ((tree));
152 static tree fold_builtin_classify_type PARAMS ((tree));
153 static tree fold_builtin_inf PARAMS ((tree, int));
154 static tree fold_builtin_nan PARAMS ((tree, tree, int));
155 static int validate_arglist PARAMS ((tree, ...));
156
157 /* Return the alignment in bits of EXP, a pointer valued expression.
158 But don't return more than MAX_ALIGN no matter what.
159 The alignment returned is, by default, the alignment of the thing that
160 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
161
162 Otherwise, look at the expression to see if we can do better, i.e., if the
163 expression is actually pointing at an object whose alignment is tighter. */
164
165 static int
166 get_pointer_alignment (exp, max_align)
167 tree exp;
168 unsigned int max_align;
169 {
170 unsigned int align, inner;
171
172 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
173 return 0;
174
175 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
176 align = MIN (align, max_align);
177
178 while (1)
179 {
180 switch (TREE_CODE (exp))
181 {
182 case NOP_EXPR:
183 case CONVERT_EXPR:
184 case NON_LVALUE_EXPR:
185 exp = TREE_OPERAND (exp, 0);
186 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
187 return align;
188
189 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
190 align = MIN (inner, max_align);
191 break;
192
193 case PLUS_EXPR:
194 /* If sum of pointer + int, restrict our maximum alignment to that
195 imposed by the integer. If not, we can't do any better than
196 ALIGN. */
197 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
198 return align;
199
200 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
201 & (max_align / BITS_PER_UNIT - 1))
202 != 0)
203 max_align >>= 1;
204
205 exp = TREE_OPERAND (exp, 0);
206 break;
207
208 case ADDR_EXPR:
209 /* See what we are pointing at and look at its alignment. */
210 exp = TREE_OPERAND (exp, 0);
211 if (TREE_CODE (exp) == FUNCTION_DECL)
212 align = FUNCTION_BOUNDARY;
213 else if (DECL_P (exp))
214 align = DECL_ALIGN (exp);
215 #ifdef CONSTANT_ALIGNMENT
216 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
217 align = CONSTANT_ALIGNMENT (exp, align);
218 #endif
219 return MIN (align, max_align);
220
221 default:
222 return align;
223 }
224 }
225 }
226
227 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
228 way, because it could contain a zero byte in the middle.
229 TREE_STRING_LENGTH is the size of the character array, not the string.
230
231 The value returned is of type `ssizetype'.
232
233 Unfortunately, string_constant can't access the values of const char
234 arrays with initializers, so neither can we do so here. */
235
236 static tree
237 c_strlen (src)
238 tree src;
239 {
240 tree offset_node;
241 HOST_WIDE_INT offset;
242 int max;
243 const char *ptr;
244
245 src = string_constant (src, &offset_node);
246 if (src == 0)
247 return 0;
248
249 max = TREE_STRING_LENGTH (src) - 1;
250 ptr = TREE_STRING_POINTER (src);
251
252 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
253 {
254 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
255 compute the offset to the following null if we don't know where to
256 start searching for it. */
257 int i;
258
259 for (i = 0; i < max; i++)
260 if (ptr[i] == 0)
261 return 0;
262
263 /* We don't know the starting offset, but we do know that the string
264 has no internal zero bytes. We can assume that the offset falls
265 within the bounds of the string; otherwise, the programmer deserves
266 what he gets. Subtract the offset from the length of the string,
267 and return that. This would perhaps not be valid if we were dealing
268 with named arrays in addition to literal string constants. */
269
270 return size_diffop (size_int (max), offset_node);
271 }
272
273 /* We have a known offset into the string. Start searching there for
274 a null character if we can represent it as a single HOST_WIDE_INT. */
275 if (offset_node == 0)
276 offset = 0;
277 else if (! host_integerp (offset_node, 0))
278 offset = -1;
279 else
280 offset = tree_low_cst (offset_node, 0);
281
282 /* If the offset is known to be out of bounds, warn, and call strlen at
283 runtime. */
284 if (offset < 0 || offset > max)
285 {
286 warning ("offset outside bounds of constant string");
287 return 0;
288 }
289
290 /* Use strlen to search for the first zero byte. Since any strings
291 constructed with build_string will have nulls appended, we win even
292 if we get handed something like (char[4])"abcd".
293
294 Since OFFSET is our starting index into the string, no further
295 calculation is needed. */
296 return ssize_int (strlen (ptr + offset));
297 }
298
299 /* Return a char pointer for a C string if it is a string constant
300 or sum of string constant and integer constant. */
301
302 static const char *
303 c_getstr (src)
304 tree src;
305 {
306 tree offset_node;
307
308 src = string_constant (src, &offset_node);
309 if (src == 0)
310 return 0;
311
312 if (offset_node == 0)
313 return TREE_STRING_POINTER (src);
314 else if (!host_integerp (offset_node, 1)
315 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
316 return 0;
317
318 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
319 }
320
321 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
322 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
323
324 static rtx
325 c_readstr (str, mode)
326 const char *str;
327 enum machine_mode mode;
328 {
329 HOST_WIDE_INT c[2];
330 HOST_WIDE_INT ch;
331 unsigned int i, j;
332
333 if (GET_MODE_CLASS (mode) != MODE_INT)
334 abort ();
335 c[0] = 0;
336 c[1] = 0;
337 ch = 1;
338 for (i = 0; i < GET_MODE_SIZE (mode); i++)
339 {
340 j = i;
341 if (WORDS_BIG_ENDIAN)
342 j = GET_MODE_SIZE (mode) - i - 1;
343 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
344 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
345 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
346 j *= BITS_PER_UNIT;
347 if (j > 2 * HOST_BITS_PER_WIDE_INT)
348 abort ();
349 if (ch)
350 ch = (unsigned char) str[i];
351 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
352 }
353 return immed_double_const (c[0], c[1], mode);
354 }
355
356 /* Cast a target constant CST to target CHAR and if that value fits into
357 host char type, return zero and put that value into variable pointed by
358 P. */
359
360 static int
361 target_char_cast (cst, p)
362 tree cst;
363 char *p;
364 {
365 unsigned HOST_WIDE_INT val, hostval;
366
367 if (!host_integerp (cst, 1)
368 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
369 return 1;
370
371 val = tree_low_cst (cst, 1);
372 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
373 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
374
375 hostval = val;
376 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
377 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
378
379 if (val != hostval)
380 return 1;
381
382 *p = hostval;
383 return 0;
384 }
385
386 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
387 times to get the address of either a higher stack frame, or a return
388 address located within it (depending on FNDECL_CODE). */
389
390 rtx
391 expand_builtin_return_addr (fndecl_code, count, tem)
392 enum built_in_function fndecl_code;
393 int count;
394 rtx tem;
395 {
396 int i;
397
398 /* Some machines need special handling before we can access
399 arbitrary frames. For example, on the sparc, we must first flush
400 all register windows to the stack. */
401 #ifdef SETUP_FRAME_ADDRESSES
402 if (count > 0)
403 SETUP_FRAME_ADDRESSES ();
404 #endif
405
406 /* On the sparc, the return address is not in the frame, it is in a
407 register. There is no way to access it off of the current frame
408 pointer, but it can be accessed off the previous frame pointer by
409 reading the value from the register window save area. */
410 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
411 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
412 count--;
413 #endif
414
415 /* Scan back COUNT frames to the specified frame. */
416 for (i = 0; i < count; i++)
417 {
418 /* Assume the dynamic chain pointer is in the word that the
419 frame address points to, unless otherwise specified. */
420 #ifdef DYNAMIC_CHAIN_ADDRESS
421 tem = DYNAMIC_CHAIN_ADDRESS (tem);
422 #endif
423 tem = memory_address (Pmode, tem);
424 tem = gen_rtx_MEM (Pmode, tem);
425 set_mem_alias_set (tem, get_frame_alias_set ());
426 tem = copy_to_reg (tem);
427 }
428
429 /* For __builtin_frame_address, return what we've got. */
430 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
431 return tem;
432
433 /* For __builtin_return_address, Get the return address from that
434 frame. */
435 #ifdef RETURN_ADDR_RTX
436 tem = RETURN_ADDR_RTX (count, tem);
437 #else
438 tem = memory_address (Pmode,
439 plus_constant (tem, GET_MODE_SIZE (Pmode)));
440 tem = gen_rtx_MEM (Pmode, tem);
441 set_mem_alias_set (tem, get_frame_alias_set ());
442 #endif
443 return tem;
444 }
445
446 /* Alias set used for setjmp buffer. */
447 static HOST_WIDE_INT setjmp_alias_set = -1;
448
449 /* Construct the leading half of a __builtin_setjmp call. Control will
450 return to RECEIVER_LABEL. This is used directly by sjlj exception
451 handling code. */
452
453 void
454 expand_builtin_setjmp_setup (buf_addr, receiver_label)
455 rtx buf_addr;
456 rtx receiver_label;
457 {
458 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
459 rtx stack_save;
460 rtx mem;
461
462 if (setjmp_alias_set == -1)
463 setjmp_alias_set = new_alias_set ();
464
465 #ifdef POINTERS_EXTEND_UNSIGNED
466 if (GET_MODE (buf_addr) != Pmode)
467 buf_addr = convert_memory_address (Pmode, buf_addr);
468 #endif
469
470 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
471
472 emit_queue ();
473
474 /* We store the frame pointer and the address of receiver_label in
475 the buffer and use the rest of it for the stack save area, which
476 is machine-dependent. */
477
478 #ifndef BUILTIN_SETJMP_FRAME_VALUE
479 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
480 #endif
481
482 mem = gen_rtx_MEM (Pmode, buf_addr);
483 set_mem_alias_set (mem, setjmp_alias_set);
484 emit_move_insn (mem, BUILTIN_SETJMP_FRAME_VALUE);
485
486 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
487 set_mem_alias_set (mem, setjmp_alias_set);
488
489 emit_move_insn (validize_mem (mem),
490 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
491
492 stack_save = gen_rtx_MEM (sa_mode,
493 plus_constant (buf_addr,
494 2 * GET_MODE_SIZE (Pmode)));
495 set_mem_alias_set (stack_save, setjmp_alias_set);
496 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
497
498 /* If there is further processing to do, do it. */
499 #ifdef HAVE_builtin_setjmp_setup
500 if (HAVE_builtin_setjmp_setup)
501 emit_insn (gen_builtin_setjmp_setup (buf_addr));
502 #endif
503
504 /* Tell optimize_save_area_alloca that extra work is going to
505 need to go on during alloca. */
506 current_function_calls_setjmp = 1;
507
508 /* Set this so all the registers get saved in our frame; we need to be
509 able to copy the saved values for any registers from frames we unwind. */
510 current_function_has_nonlocal_label = 1;
511 }
512
513 /* Construct the trailing part of a __builtin_setjmp call.
514 This is used directly by sjlj exception handling code. */
515
516 void
517 expand_builtin_setjmp_receiver (receiver_label)
518 rtx receiver_label ATTRIBUTE_UNUSED;
519 {
520 /* Clobber the FP when we get here, so we have to make sure it's
521 marked as used by this function. */
522 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
523
524 /* Mark the static chain as clobbered here so life information
525 doesn't get messed up for it. */
526 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
527
528 /* Now put in the code to restore the frame pointer, and argument
529 pointer, if needed. The code below is from expand_end_bindings
530 in stmt.c; see detailed documentation there. */
531 #ifdef HAVE_nonlocal_goto
532 if (! HAVE_nonlocal_goto)
533 #endif
534 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
535
536 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
537 if (fixed_regs[ARG_POINTER_REGNUM])
538 {
539 #ifdef ELIMINABLE_REGS
540 size_t i;
541 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
542
543 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
544 if (elim_regs[i].from == ARG_POINTER_REGNUM
545 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
546 break;
547
548 if (i == ARRAY_SIZE (elim_regs))
549 #endif
550 {
551 /* Now restore our arg pointer from the address at which it
552 was saved in our stack frame. */
553 emit_move_insn (virtual_incoming_args_rtx,
554 copy_to_reg (get_arg_pointer_save_area (cfun)));
555 }
556 }
557 #endif
558
559 #ifdef HAVE_builtin_setjmp_receiver
560 if (HAVE_builtin_setjmp_receiver)
561 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
562 else
563 #endif
564 #ifdef HAVE_nonlocal_goto_receiver
565 if (HAVE_nonlocal_goto_receiver)
566 emit_insn (gen_nonlocal_goto_receiver ());
567 else
568 #endif
569 { /* Nothing */ }
570
571 /* @@@ This is a kludge. Not all machine descriptions define a blockage
572 insn, but we must not allow the code we just generated to be reordered
573 by scheduling. Specifically, the update of the frame pointer must
574 happen immediately, not later. So emit an ASM_INPUT to act as blockage
575 insn. */
576 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
577 }
578
579 /* __builtin_setjmp is passed a pointer to an array of five words (not
580 all will be used on all machines). It operates similarly to the C
581 library function of the same name, but is more efficient. Much of
582 the code below (and for longjmp) is copied from the handling of
583 non-local gotos.
584
585 NOTE: This is intended for use by GNAT and the exception handling
586 scheme in the compiler and will only work in the method used by
587 them. */
588
589 static rtx
590 expand_builtin_setjmp (arglist, target)
591 tree arglist;
592 rtx target;
593 {
594 rtx buf_addr, next_lab, cont_lab;
595
596 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
597 return NULL_RTX;
598
599 if (target == 0 || GET_CODE (target) != REG
600 || REGNO (target) < FIRST_PSEUDO_REGISTER)
601 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
602
603 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
604
605 next_lab = gen_label_rtx ();
606 cont_lab = gen_label_rtx ();
607
608 expand_builtin_setjmp_setup (buf_addr, next_lab);
609
610 /* Set TARGET to zero and branch to the continue label. */
611 emit_move_insn (target, const0_rtx);
612 emit_jump_insn (gen_jump (cont_lab));
613 emit_barrier ();
614 emit_label (next_lab);
615
616 expand_builtin_setjmp_receiver (next_lab);
617
618 /* Set TARGET to one. */
619 emit_move_insn (target, const1_rtx);
620 emit_label (cont_lab);
621
622 /* Tell flow about the strange goings on. Putting `next_lab' on
623 `nonlocal_goto_handler_labels' to indicates that function
624 calls may traverse the arc back to this label. */
625
626 current_function_has_nonlocal_label = 1;
627 nonlocal_goto_handler_labels
628 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
629
630 return target;
631 }
632
633 /* __builtin_longjmp is passed a pointer to an array of five words (not
634 all will be used on all machines). It operates similarly to the C
635 library function of the same name, but is more efficient. Much of
636 the code below is copied from the handling of non-local gotos.
637
638 NOTE: This is intended for use by GNAT and the exception handling
639 scheme in the compiler and will only work in the method used by
640 them. */
641
642 void
643 expand_builtin_longjmp (buf_addr, value)
644 rtx buf_addr, value;
645 {
646 rtx fp, lab, stack, insn, last;
647 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
648
649 if (setjmp_alias_set == -1)
650 setjmp_alias_set = new_alias_set ();
651
652 #ifdef POINTERS_EXTEND_UNSIGNED
653 if (GET_MODE (buf_addr) != Pmode)
654 buf_addr = convert_memory_address (Pmode, buf_addr);
655 #endif
656
657 buf_addr = force_reg (Pmode, buf_addr);
658
659 /* We used to store value in static_chain_rtx, but that fails if pointers
660 are smaller than integers. We instead require that the user must pass
661 a second argument of 1, because that is what builtin_setjmp will
662 return. This also makes EH slightly more efficient, since we are no
663 longer copying around a value that we don't care about. */
664 if (value != const1_rtx)
665 abort ();
666
667 current_function_calls_longjmp = 1;
668
669 last = get_last_insn ();
670 #ifdef HAVE_builtin_longjmp
671 if (HAVE_builtin_longjmp)
672 emit_insn (gen_builtin_longjmp (buf_addr));
673 else
674 #endif
675 {
676 fp = gen_rtx_MEM (Pmode, buf_addr);
677 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
678 GET_MODE_SIZE (Pmode)));
679
680 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
681 2 * GET_MODE_SIZE (Pmode)));
682 set_mem_alias_set (fp, setjmp_alias_set);
683 set_mem_alias_set (lab, setjmp_alias_set);
684 set_mem_alias_set (stack, setjmp_alias_set);
685
686 /* Pick up FP, label, and SP from the block and jump. This code is
687 from expand_goto in stmt.c; see there for detailed comments. */
688 #if HAVE_nonlocal_goto
689 if (HAVE_nonlocal_goto)
690 /* We have to pass a value to the nonlocal_goto pattern that will
691 get copied into the static_chain pointer, but it does not matter
692 what that value is, because builtin_setjmp does not use it. */
693 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
694 else
695 #endif
696 {
697 lab = copy_to_reg (lab);
698
699 emit_move_insn (hard_frame_pointer_rtx, fp);
700 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
701
702 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
703 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
704 emit_indirect_jump (lab);
705 }
706 }
707
708 /* Search backwards and mark the jump insn as a non-local goto.
709 Note that this precludes the use of __builtin_longjmp to a
710 __builtin_setjmp target in the same function. However, we've
711 already cautioned the user that these functions are for
712 internal exception handling use only. */
713 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
714 {
715 if (insn == last)
716 abort ();
717 if (GET_CODE (insn) == JUMP_INSN)
718 {
719 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
720 REG_NOTES (insn));
721 break;
722 }
723 else if (GET_CODE (insn) == CALL_INSN)
724 break;
725 }
726 }
727
728 /* Expand a call to __builtin_prefetch. For a target that does not support
729 data prefetch, evaluate the memory address argument in case it has side
730 effects. */
731
732 static void
733 expand_builtin_prefetch (arglist)
734 tree arglist;
735 {
736 tree arg0, arg1, arg2;
737 rtx op0, op1, op2;
738
739 if (!validate_arglist (arglist, POINTER_TYPE, 0))
740 return;
741
742 arg0 = TREE_VALUE (arglist);
743 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
744 zero (read) and argument 2 (locality) defaults to 3 (high degree of
745 locality). */
746 if (TREE_CHAIN (arglist))
747 {
748 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
749 if (TREE_CHAIN (TREE_CHAIN (arglist)))
750 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
751 else
752 arg2 = build_int_2 (3, 0);
753 }
754 else
755 {
756 arg1 = integer_zero_node;
757 arg2 = build_int_2 (3, 0);
758 }
759
760 /* Argument 0 is an address. */
761 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
762
763 /* Argument 1 (read/write flag) must be a compile-time constant int. */
764 if (TREE_CODE (arg1) != INTEGER_CST)
765 {
766 error ("second arg to `__builtin_prefetch' must be a constant");
767 arg1 = integer_zero_node;
768 }
769 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
770 /* Argument 1 must be either zero or one. */
771 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
772 {
773 warning ("invalid second arg to __builtin_prefetch; using zero");
774 op1 = const0_rtx;
775 }
776
777 /* Argument 2 (locality) must be a compile-time constant int. */
778 if (TREE_CODE (arg2) != INTEGER_CST)
779 {
780 error ("third arg to `__builtin_prefetch' must be a constant");
781 arg2 = integer_zero_node;
782 }
783 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
784 /* Argument 2 must be 0, 1, 2, or 3. */
785 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
786 {
787 warning ("invalid third arg to __builtin_prefetch; using zero");
788 op2 = const0_rtx;
789 }
790
791 #ifdef HAVE_prefetch
792 if (HAVE_prefetch)
793 {
794 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
795 (op0,
796 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
797 || (GET_MODE(op0) != Pmode))
798 {
799 #ifdef POINTERS_EXTEND_UNSIGNED
800 if (GET_MODE(op0) != Pmode)
801 op0 = convert_memory_address (Pmode, op0);
802 #endif
803 op0 = force_reg (Pmode, op0);
804 }
805 emit_insn (gen_prefetch (op0, op1, op2));
806 }
807 else
808 #endif
809 op0 = protect_from_queue (op0, 0);
810 /* Don't do anything with direct references to volatile memory, but
811 generate code to handle other side effects. */
812 if (GET_CODE (op0) != MEM && side_effects_p (op0))
813 emit_insn (op0);
814 }
815
816 /* Get a MEM rtx for expression EXP which is the address of an operand
817 to be used to be used in a string instruction (cmpstrsi, movstrsi, ..). */
818
819 static rtx
820 get_memory_rtx (exp)
821 tree exp;
822 {
823 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_SUM);
824 rtx mem;
825
826 #ifdef POINTERS_EXTEND_UNSIGNED
827 if (GET_MODE (addr) != Pmode)
828 addr = convert_memory_address (Pmode, addr);
829 #endif
830
831 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
832
833 /* Get an expression we can use to find the attributes to assign to MEM.
834 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
835 we can. First remove any nops. */
836 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
837 || TREE_CODE (exp) == NON_LVALUE_EXPR)
838 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
839 exp = TREE_OPERAND (exp, 0);
840
841 if (TREE_CODE (exp) == ADDR_EXPR)
842 {
843 exp = TREE_OPERAND (exp, 0);
844 set_mem_attributes (mem, exp, 0);
845 }
846 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
847 {
848 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
849 /* memcpy, memset and other builtin stringops can alias with anything. */
850 set_mem_alias_set (mem, 0);
851 }
852
853 return mem;
854 }
855 \f
856 /* Built-in functions to perform an untyped call and return. */
857
858 /* For each register that may be used for calling a function, this
859 gives a mode used to copy the register's value. VOIDmode indicates
860 the register is not used for calling a function. If the machine
861 has register windows, this gives only the outbound registers.
862 INCOMING_REGNO gives the corresponding inbound register. */
863 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
864
865 /* For each register that may be used for returning values, this gives
866 a mode used to copy the register's value. VOIDmode indicates the
867 register is not used for returning values. If the machine has
868 register windows, this gives only the outbound registers.
869 INCOMING_REGNO gives the corresponding inbound register. */
870 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
871
872 /* For each register that may be used for calling a function, this
873 gives the offset of that register into the block returned by
874 __builtin_apply_args. 0 indicates that the register is not
875 used for calling a function. */
876 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
877
878 /* Return the offset of register REGNO into the block returned by
879 __builtin_apply_args. This is not declared static, since it is
880 needed in objc-act.c. */
881
882 int
883 apply_args_register_offset (regno)
884 int regno;
885 {
886 apply_args_size ();
887
888 /* Arguments are always put in outgoing registers (in the argument
889 block) if such make sense. */
890 #ifdef OUTGOING_REGNO
891 regno = OUTGOING_REGNO (regno);
892 #endif
893 return apply_args_reg_offset[regno];
894 }
895
896 /* Return the size required for the block returned by __builtin_apply_args,
897 and initialize apply_args_mode. */
898
899 static int
900 apply_args_size ()
901 {
902 static int size = -1;
903 int align;
904 unsigned int regno;
905 enum machine_mode mode;
906
907 /* The values computed by this function never change. */
908 if (size < 0)
909 {
910 /* The first value is the incoming arg-pointer. */
911 size = GET_MODE_SIZE (Pmode);
912
913 /* The second value is the structure value address unless this is
914 passed as an "invisible" first argument. */
915 if (struct_value_rtx)
916 size += GET_MODE_SIZE (Pmode);
917
918 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
919 if (FUNCTION_ARG_REGNO_P (regno))
920 {
921 /* Search for the proper mode for copying this register's
922 value. I'm not sure this is right, but it works so far. */
923 enum machine_mode best_mode = VOIDmode;
924
925 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
926 mode != VOIDmode;
927 mode = GET_MODE_WIDER_MODE (mode))
928 if (HARD_REGNO_MODE_OK (regno, mode)
929 && HARD_REGNO_NREGS (regno, mode) == 1)
930 best_mode = mode;
931
932 if (best_mode == VOIDmode)
933 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
934 mode != VOIDmode;
935 mode = GET_MODE_WIDER_MODE (mode))
936 if (HARD_REGNO_MODE_OK (regno, mode)
937 && have_insn_for (SET, mode))
938 best_mode = mode;
939
940 if (best_mode == VOIDmode)
941 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
942 mode != VOIDmode;
943 mode = GET_MODE_WIDER_MODE (mode))
944 if (HARD_REGNO_MODE_OK (regno, mode)
945 && have_insn_for (SET, mode))
946 best_mode = mode;
947
948 if (best_mode == VOIDmode)
949 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
950 mode != VOIDmode;
951 mode = GET_MODE_WIDER_MODE (mode))
952 if (HARD_REGNO_MODE_OK (regno, mode)
953 && have_insn_for (SET, mode))
954 best_mode = mode;
955
956 mode = best_mode;
957 if (mode == VOIDmode)
958 abort ();
959
960 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
961 if (size % align != 0)
962 size = CEIL (size, align) * align;
963 apply_args_reg_offset[regno] = size;
964 size += GET_MODE_SIZE (mode);
965 apply_args_mode[regno] = mode;
966 }
967 else
968 {
969 apply_args_mode[regno] = VOIDmode;
970 apply_args_reg_offset[regno] = 0;
971 }
972 }
973 return size;
974 }
975
976 /* Return the size required for the block returned by __builtin_apply,
977 and initialize apply_result_mode. */
978
979 static int
980 apply_result_size ()
981 {
982 static int size = -1;
983 int align, regno;
984 enum machine_mode mode;
985
986 /* The values computed by this function never change. */
987 if (size < 0)
988 {
989 size = 0;
990
991 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
992 if (FUNCTION_VALUE_REGNO_P (regno))
993 {
994 /* Search for the proper mode for copying this register's
995 value. I'm not sure this is right, but it works so far. */
996 enum machine_mode best_mode = VOIDmode;
997
998 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
999 mode != TImode;
1000 mode = GET_MODE_WIDER_MODE (mode))
1001 if (HARD_REGNO_MODE_OK (regno, mode))
1002 best_mode = mode;
1003
1004 if (best_mode == VOIDmode)
1005 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1006 mode != VOIDmode;
1007 mode = GET_MODE_WIDER_MODE (mode))
1008 if (HARD_REGNO_MODE_OK (regno, mode)
1009 && have_insn_for (SET, mode))
1010 best_mode = mode;
1011
1012 if (best_mode == VOIDmode)
1013 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
1014 mode != VOIDmode;
1015 mode = GET_MODE_WIDER_MODE (mode))
1016 if (HARD_REGNO_MODE_OK (regno, mode)
1017 && have_insn_for (SET, mode))
1018 best_mode = mode;
1019
1020 if (best_mode == VOIDmode)
1021 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
1022 mode != VOIDmode;
1023 mode = GET_MODE_WIDER_MODE (mode))
1024 if (HARD_REGNO_MODE_OK (regno, mode)
1025 && have_insn_for (SET, mode))
1026 best_mode = mode;
1027
1028 mode = best_mode;
1029 if (mode == VOIDmode)
1030 abort ();
1031
1032 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1033 if (size % align != 0)
1034 size = CEIL (size, align) * align;
1035 size += GET_MODE_SIZE (mode);
1036 apply_result_mode[regno] = mode;
1037 }
1038 else
1039 apply_result_mode[regno] = VOIDmode;
1040
1041 /* Allow targets that use untyped_call and untyped_return to override
1042 the size so that machine-specific information can be stored here. */
1043 #ifdef APPLY_RESULT_SIZE
1044 size = APPLY_RESULT_SIZE;
1045 #endif
1046 }
1047 return size;
1048 }
1049
1050 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1051 /* Create a vector describing the result block RESULT. If SAVEP is true,
1052 the result block is used to save the values; otherwise it is used to
1053 restore the values. */
1054
1055 static rtx
1056 result_vector (savep, result)
1057 int savep;
1058 rtx result;
1059 {
1060 int regno, size, align, nelts;
1061 enum machine_mode mode;
1062 rtx reg, mem;
1063 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1064
1065 size = nelts = 0;
1066 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1067 if ((mode = apply_result_mode[regno]) != VOIDmode)
1068 {
1069 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1070 if (size % align != 0)
1071 size = CEIL (size, align) * align;
1072 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1073 mem = adjust_address (result, mode, size);
1074 savevec[nelts++] = (savep
1075 ? gen_rtx_SET (VOIDmode, mem, reg)
1076 : gen_rtx_SET (VOIDmode, reg, mem));
1077 size += GET_MODE_SIZE (mode);
1078 }
1079 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1080 }
1081 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1082
1083 /* Save the state required to perform an untyped call with the same
1084 arguments as were passed to the current function. */
1085
1086 static rtx
1087 expand_builtin_apply_args_1 ()
1088 {
1089 rtx registers;
1090 int size, align, regno;
1091 enum machine_mode mode;
1092
1093 /* Create a block where the arg-pointer, structure value address,
1094 and argument registers can be saved. */
1095 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1096
1097 /* Walk past the arg-pointer and structure value address. */
1098 size = GET_MODE_SIZE (Pmode);
1099 if (struct_value_rtx)
1100 size += GET_MODE_SIZE (Pmode);
1101
1102 /* Save each register used in calling a function to the block. */
1103 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1104 if ((mode = apply_args_mode[regno]) != VOIDmode)
1105 {
1106 rtx tem;
1107
1108 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1109 if (size % align != 0)
1110 size = CEIL (size, align) * align;
1111
1112 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1113
1114 emit_move_insn (adjust_address (registers, mode, size), tem);
1115 size += GET_MODE_SIZE (mode);
1116 }
1117
1118 /* Save the arg pointer to the block. */
1119 emit_move_insn (adjust_address (registers, Pmode, 0),
1120 copy_to_reg (virtual_incoming_args_rtx));
1121 size = GET_MODE_SIZE (Pmode);
1122
1123 /* Save the structure value address unless this is passed as an
1124 "invisible" first argument. */
1125 if (struct_value_incoming_rtx)
1126 {
1127 emit_move_insn (adjust_address (registers, Pmode, size),
1128 copy_to_reg (struct_value_incoming_rtx));
1129 size += GET_MODE_SIZE (Pmode);
1130 }
1131
1132 /* Return the address of the block. */
1133 return copy_addr_to_reg (XEXP (registers, 0));
1134 }
1135
1136 /* __builtin_apply_args returns block of memory allocated on
1137 the stack into which is stored the arg pointer, structure
1138 value address, static chain, and all the registers that might
1139 possibly be used in performing a function call. The code is
1140 moved to the start of the function so the incoming values are
1141 saved. */
1142
1143 static rtx
1144 expand_builtin_apply_args ()
1145 {
1146 /* Don't do __builtin_apply_args more than once in a function.
1147 Save the result of the first call and reuse it. */
1148 if (apply_args_value != 0)
1149 return apply_args_value;
1150 {
1151 /* When this function is called, it means that registers must be
1152 saved on entry to this function. So we migrate the
1153 call to the first insn of this function. */
1154 rtx temp;
1155 rtx seq;
1156
1157 start_sequence ();
1158 temp = expand_builtin_apply_args_1 ();
1159 seq = get_insns ();
1160 end_sequence ();
1161
1162 apply_args_value = temp;
1163
1164 /* Put the insns after the NOTE that starts the function.
1165 If this is inside a start_sequence, make the outer-level insn
1166 chain current, so the code is placed at the start of the
1167 function. */
1168 push_topmost_sequence ();
1169 emit_insn_before (seq, NEXT_INSN (get_insns ()));
1170 pop_topmost_sequence ();
1171 return temp;
1172 }
1173 }
1174
1175 /* Perform an untyped call and save the state required to perform an
1176 untyped return of whatever value was returned by the given function. */
1177
1178 static rtx
1179 expand_builtin_apply (function, arguments, argsize)
1180 rtx function, arguments, argsize;
1181 {
1182 int size, align, regno;
1183 enum machine_mode mode;
1184 rtx incoming_args, result, reg, dest, src, call_insn;
1185 rtx old_stack_level = 0;
1186 rtx call_fusage = 0;
1187
1188 #ifdef POINTERS_EXTEND_UNSIGNED
1189 if (GET_MODE (arguments) != Pmode)
1190 arguments = convert_memory_address (Pmode, arguments);
1191 #endif
1192
1193 /* Create a block where the return registers can be saved. */
1194 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1195
1196 /* Fetch the arg pointer from the ARGUMENTS block. */
1197 incoming_args = gen_reg_rtx (Pmode);
1198 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1199 #ifndef STACK_GROWS_DOWNWARD
1200 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1201 incoming_args, 0, OPTAB_LIB_WIDEN);
1202 #endif
1203
1204 /* Perform postincrements before actually calling the function. */
1205 emit_queue ();
1206
1207 /* Push a new argument block and copy the arguments. Do not allow
1208 the (potential) memcpy call below to interfere with our stack
1209 manipulations. */
1210 do_pending_stack_adjust ();
1211 NO_DEFER_POP;
1212
1213 /* Save the stack with nonlocal if available */
1214 #ifdef HAVE_save_stack_nonlocal
1215 if (HAVE_save_stack_nonlocal)
1216 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1217 else
1218 #endif
1219 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1220
1221 /* Push a block of memory onto the stack to store the memory arguments.
1222 Save the address in a register, and copy the memory arguments. ??? I
1223 haven't figured out how the calling convention macros effect this,
1224 but it's likely that the source and/or destination addresses in
1225 the block copy will need updating in machine specific ways. */
1226 dest = allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1227 dest = gen_rtx_MEM (BLKmode, dest);
1228 set_mem_align (dest, PARM_BOUNDARY);
1229 src = gen_rtx_MEM (BLKmode, incoming_args);
1230 set_mem_align (src, PARM_BOUNDARY);
1231 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1232
1233 /* Refer to the argument block. */
1234 apply_args_size ();
1235 arguments = gen_rtx_MEM (BLKmode, arguments);
1236 set_mem_align (arguments, PARM_BOUNDARY);
1237
1238 /* Walk past the arg-pointer and structure value address. */
1239 size = GET_MODE_SIZE (Pmode);
1240 if (struct_value_rtx)
1241 size += GET_MODE_SIZE (Pmode);
1242
1243 /* Restore each of the registers previously saved. Make USE insns
1244 for each of these registers for use in making the call. */
1245 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1246 if ((mode = apply_args_mode[regno]) != VOIDmode)
1247 {
1248 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1249 if (size % align != 0)
1250 size = CEIL (size, align) * align;
1251 reg = gen_rtx_REG (mode, regno);
1252 emit_move_insn (reg, adjust_address (arguments, mode, size));
1253 use_reg (&call_fusage, reg);
1254 size += GET_MODE_SIZE (mode);
1255 }
1256
1257 /* Restore the structure value address unless this is passed as an
1258 "invisible" first argument. */
1259 size = GET_MODE_SIZE (Pmode);
1260 if (struct_value_rtx)
1261 {
1262 rtx value = gen_reg_rtx (Pmode);
1263 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1264 emit_move_insn (struct_value_rtx, value);
1265 if (GET_CODE (struct_value_rtx) == REG)
1266 use_reg (&call_fusage, struct_value_rtx);
1267 size += GET_MODE_SIZE (Pmode);
1268 }
1269
1270 /* All arguments and registers used for the call are set up by now! */
1271 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0, 0);
1272
1273 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1274 and we don't want to load it into a register as an optimization,
1275 because prepare_call_address already did it if it should be done. */
1276 if (GET_CODE (function) != SYMBOL_REF)
1277 function = memory_address (FUNCTION_MODE, function);
1278
1279 /* Generate the actual call instruction and save the return value. */
1280 #ifdef HAVE_untyped_call
1281 if (HAVE_untyped_call)
1282 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1283 result, result_vector (1, result)));
1284 else
1285 #endif
1286 #ifdef HAVE_call_value
1287 if (HAVE_call_value)
1288 {
1289 rtx valreg = 0;
1290
1291 /* Locate the unique return register. It is not possible to
1292 express a call that sets more than one return register using
1293 call_value; use untyped_call for that. In fact, untyped_call
1294 only needs to save the return registers in the given block. */
1295 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1296 if ((mode = apply_result_mode[regno]) != VOIDmode)
1297 {
1298 if (valreg)
1299 abort (); /* HAVE_untyped_call required. */
1300 valreg = gen_rtx_REG (mode, regno);
1301 }
1302
1303 emit_call_insn (GEN_CALL_VALUE (valreg,
1304 gen_rtx_MEM (FUNCTION_MODE, function),
1305 const0_rtx, NULL_RTX, const0_rtx));
1306
1307 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1308 }
1309 else
1310 #endif
1311 abort ();
1312
1313 /* Find the CALL insn we just emitted. */
1314 for (call_insn = get_last_insn ();
1315 call_insn && GET_CODE (call_insn) != CALL_INSN;
1316 call_insn = PREV_INSN (call_insn))
1317 ;
1318
1319 if (! call_insn)
1320 abort ();
1321
1322 /* Put the register usage information on the CALL. If there is already
1323 some usage information, put ours at the end. */
1324 if (CALL_INSN_FUNCTION_USAGE (call_insn))
1325 {
1326 rtx link;
1327
1328 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
1329 link = XEXP (link, 1))
1330 ;
1331
1332 XEXP (link, 1) = call_fusage;
1333 }
1334 else
1335 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
1336
1337 /* Restore the stack. */
1338 #ifdef HAVE_save_stack_nonlocal
1339 if (HAVE_save_stack_nonlocal)
1340 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1341 else
1342 #endif
1343 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1344
1345 OK_DEFER_POP;
1346
1347 /* Return the address of the result block. */
1348 return copy_addr_to_reg (XEXP (result, 0));
1349 }
1350
1351 /* Perform an untyped return. */
1352
1353 static void
1354 expand_builtin_return (result)
1355 rtx result;
1356 {
1357 int size, align, regno;
1358 enum machine_mode mode;
1359 rtx reg;
1360 rtx call_fusage = 0;
1361
1362 #ifdef POINTERS_EXTEND_UNSIGNED
1363 if (GET_MODE (result) != Pmode)
1364 result = convert_memory_address (Pmode, result);
1365 #endif
1366
1367 apply_result_size ();
1368 result = gen_rtx_MEM (BLKmode, result);
1369
1370 #ifdef HAVE_untyped_return
1371 if (HAVE_untyped_return)
1372 {
1373 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1374 emit_barrier ();
1375 return;
1376 }
1377 #endif
1378
1379 /* Restore the return value and note that each value is used. */
1380 size = 0;
1381 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1382 if ((mode = apply_result_mode[regno]) != VOIDmode)
1383 {
1384 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1385 if (size % align != 0)
1386 size = CEIL (size, align) * align;
1387 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1388 emit_move_insn (reg, adjust_address (result, mode, size));
1389
1390 push_to_sequence (call_fusage);
1391 emit_insn (gen_rtx_USE (VOIDmode, reg));
1392 call_fusage = get_insns ();
1393 end_sequence ();
1394 size += GET_MODE_SIZE (mode);
1395 }
1396
1397 /* Put the USE insns before the return. */
1398 emit_insn (call_fusage);
1399
1400 /* Return whatever values was restored by jumping directly to the end
1401 of the function. */
1402 expand_null_return ();
1403 }
1404
1405 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1406
1407 static enum type_class
1408 type_to_class (type)
1409 tree type;
1410 {
1411 switch (TREE_CODE (type))
1412 {
1413 case VOID_TYPE: return void_type_class;
1414 case INTEGER_TYPE: return integer_type_class;
1415 case CHAR_TYPE: return char_type_class;
1416 case ENUMERAL_TYPE: return enumeral_type_class;
1417 case BOOLEAN_TYPE: return boolean_type_class;
1418 case POINTER_TYPE: return pointer_type_class;
1419 case REFERENCE_TYPE: return reference_type_class;
1420 case OFFSET_TYPE: return offset_type_class;
1421 case REAL_TYPE: return real_type_class;
1422 case COMPLEX_TYPE: return complex_type_class;
1423 case FUNCTION_TYPE: return function_type_class;
1424 case METHOD_TYPE: return method_type_class;
1425 case RECORD_TYPE: return record_type_class;
1426 case UNION_TYPE:
1427 case QUAL_UNION_TYPE: return union_type_class;
1428 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1429 ? string_type_class : array_type_class);
1430 case SET_TYPE: return set_type_class;
1431 case FILE_TYPE: return file_type_class;
1432 case LANG_TYPE: return lang_type_class;
1433 default: return no_type_class;
1434 }
1435 }
1436
1437 /* Expand a call to __builtin_classify_type with arguments found in
1438 ARGLIST. */
1439
1440 static rtx
1441 expand_builtin_classify_type (arglist)
1442 tree arglist;
1443 {
1444 if (arglist != 0)
1445 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1446 return GEN_INT (no_type_class);
1447 }
1448
1449 /* Expand expression EXP, which is a call to __builtin_constant_p. */
1450
1451 static rtx
1452 expand_builtin_constant_p (exp)
1453 tree exp;
1454 {
1455 tree arglist = TREE_OPERAND (exp, 1);
1456 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1457 rtx tmp;
1458
1459 if (arglist == 0)
1460 return const0_rtx;
1461 arglist = TREE_VALUE (arglist);
1462
1463 /* We have taken care of the easy cases during constant folding. This
1464 case is not obvious, so emit (constant_p_rtx (ARGLIST)) and let CSE get a
1465 chance to see if it can deduce whether ARGLIST is constant. */
1466
1467 tmp = expand_expr (arglist, NULL_RTX, VOIDmode, 0);
1468 tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
1469 return tmp;
1470 }
1471
1472 /* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
1473 Return 0 if a normal call should be emitted rather than expanding the
1474 function in-line. EXP is the expression that is a call to the builtin
1475 function; if convenient, the result should be placed in TARGET.
1476 SUBTARGET may be used as the target for computing one of EXP's operands. */
1477
1478 static rtx
1479 expand_builtin_mathfn (exp, target, subtarget)
1480 tree exp;
1481 rtx target, subtarget;
1482 {
1483 optab builtin_optab;
1484 rtx op0, insns;
1485 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1486 tree arglist = TREE_OPERAND (exp, 1);
1487 enum machine_mode argmode;
1488 bool errno_set = true;
1489
1490 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1491 return 0;
1492
1493 /* Stabilize and compute the argument. */
1494 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
1495 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
1496 {
1497 exp = copy_node (exp);
1498 TREE_OPERAND (exp, 1) = arglist;
1499 /* Wrap the computation of the argument in a SAVE_EXPR. That
1500 way, if we need to expand the argument again (as in the
1501 flag_errno_math case below where we cannot directly set
1502 errno), we will not perform side-effects more than once.
1503 Note that here we're mutating the original EXP as well as the
1504 copy; that's the right thing to do in case the original EXP
1505 is expanded later. */
1506 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
1507 arglist = copy_node (arglist);
1508 }
1509 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
1510
1511 /* Make a suitable register to place result in. */
1512 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
1513
1514 emit_queue ();
1515 start_sequence ();
1516
1517 switch (DECL_FUNCTION_CODE (fndecl))
1518 {
1519 case BUILT_IN_SIN:
1520 case BUILT_IN_SINF:
1521 case BUILT_IN_SINL:
1522 builtin_optab = sin_optab; break;
1523 case BUILT_IN_COS:
1524 case BUILT_IN_COSF:
1525 case BUILT_IN_COSL:
1526 builtin_optab = cos_optab; break;
1527 case BUILT_IN_SQRT:
1528 case BUILT_IN_SQRTF:
1529 case BUILT_IN_SQRTL:
1530 builtin_optab = sqrt_optab; break;
1531 case BUILT_IN_EXP:
1532 case BUILT_IN_EXPF:
1533 case BUILT_IN_EXPL:
1534 builtin_optab = exp_optab; break;
1535 case BUILT_IN_LOG:
1536 case BUILT_IN_LOGF:
1537 case BUILT_IN_LOGL:
1538 builtin_optab = log_optab; break;
1539 case BUILT_IN_FLOOR:
1540 case BUILT_IN_FLOORF:
1541 case BUILT_IN_FLOORL:
1542 errno_set = false ; builtin_optab = floor_optab; break;
1543 case BUILT_IN_CEIL:
1544 case BUILT_IN_CEILF:
1545 case BUILT_IN_CEILL:
1546 errno_set = false ; builtin_optab = ceil_optab; break;
1547 case BUILT_IN_TRUNC:
1548 case BUILT_IN_TRUNCF:
1549 case BUILT_IN_TRUNCL:
1550 errno_set = false ; builtin_optab = trunc_optab; break;
1551 case BUILT_IN_ROUND:
1552 case BUILT_IN_ROUNDF:
1553 case BUILT_IN_ROUNDL:
1554 errno_set = false ; builtin_optab = round_optab; break;
1555 case BUILT_IN_NEARBYINT:
1556 case BUILT_IN_NEARBYINTF:
1557 case BUILT_IN_NEARBYINTL:
1558 errno_set = false ; builtin_optab = nearbyint_optab; break;
1559 default:
1560 abort ();
1561 }
1562
1563 /* Compute into TARGET.
1564 Set TARGET to wherever the result comes back. */
1565 argmode = TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist)));
1566 target = expand_unop (argmode, builtin_optab, op0, target, 0);
1567
1568 /* If we were unable to expand via the builtin, stop the
1569 sequence (without outputting the insns) and return 0, causing
1570 a call to the library function. */
1571 if (target == 0)
1572 {
1573 end_sequence ();
1574 return 0;
1575 }
1576
1577 /* If errno must be maintained, we must set it to EDOM for NaN results. */
1578
1579 if (flag_errno_math && errno_set && HONOR_NANS (argmode))
1580 {
1581 rtx lab1;
1582
1583 lab1 = gen_label_rtx ();
1584
1585 /* Test the result; if it is NaN, set errno=EDOM because
1586 the argument was not in the domain. */
1587 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1588 0, lab1);
1589
1590 #ifdef TARGET_EDOM
1591 {
1592 #ifdef GEN_ERRNO_RTX
1593 rtx errno_rtx = GEN_ERRNO_RTX;
1594 #else
1595 rtx errno_rtx
1596 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1597 #endif
1598
1599 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1600 }
1601 #else
1602 /* We can't set errno=EDOM directly; let the library call do it.
1603 Pop the arguments right away in case the call gets deleted. */
1604 NO_DEFER_POP;
1605 expand_call (exp, target, 0);
1606 OK_DEFER_POP;
1607 #endif
1608
1609 emit_label (lab1);
1610 }
1611
1612 /* Output the entire sequence. */
1613 insns = get_insns ();
1614 end_sequence ();
1615 emit_insn (insns);
1616
1617 return target;
1618 }
1619
1620 /* Expand expression EXP which is a call to the strlen builtin. Return 0
1621 if we failed the caller should emit a normal call, otherwise
1622 try to get the result in TARGET, if convenient. */
1623
1624 static rtx
1625 expand_builtin_strlen (exp, target)
1626 tree exp;
1627 rtx target;
1628 {
1629 tree arglist = TREE_OPERAND (exp, 1);
1630 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1631
1632 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
1633 return 0;
1634 else
1635 {
1636 rtx pat;
1637 tree src = TREE_VALUE (arglist);
1638
1639 int align
1640 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
1641
1642 rtx result, src_reg, char_rtx, before_strlen;
1643 enum machine_mode insn_mode = value_mode, char_mode;
1644 enum insn_code icode = CODE_FOR_nothing;
1645
1646 /* If SRC is not a pointer type, don't do this operation inline. */
1647 if (align == 0)
1648 return 0;
1649
1650 /* Bail out if we can't compute strlen in the right mode. */
1651 while (insn_mode != VOIDmode)
1652 {
1653 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
1654 if (icode != CODE_FOR_nothing)
1655 break;
1656
1657 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
1658 }
1659 if (insn_mode == VOIDmode)
1660 return 0;
1661
1662 /* Make a place to write the result of the instruction. */
1663 result = target;
1664 if (! (result != 0
1665 && GET_CODE (result) == REG
1666 && GET_MODE (result) == insn_mode
1667 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
1668 result = gen_reg_rtx (insn_mode);
1669
1670 /* Make a place to hold the source address. We will not expand
1671 the actual source until we are sure that the expansion will
1672 not fail -- there are trees that cannot be expanded twice. */
1673 src_reg = gen_reg_rtx (Pmode);
1674
1675 /* Mark the beginning of the strlen sequence so we can emit the
1676 source operand later. */
1677 before_strlen = get_last_insn ();
1678
1679 char_rtx = const0_rtx;
1680 char_mode = insn_data[(int) icode].operand[2].mode;
1681 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
1682 char_mode))
1683 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
1684
1685 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
1686 char_rtx, GEN_INT (align));
1687 if (! pat)
1688 return 0;
1689 emit_insn (pat);
1690
1691 /* Now that we are assured of success, expand the source. */
1692 start_sequence ();
1693 pat = memory_address (BLKmode,
1694 expand_expr (src, src_reg, ptr_mode, EXPAND_SUM));
1695 if (pat != src_reg)
1696 emit_move_insn (src_reg, pat);
1697 pat = get_insns ();
1698 end_sequence ();
1699
1700 if (before_strlen)
1701 emit_insn_after (pat, before_strlen);
1702 else
1703 emit_insn_before (pat, get_insns ());
1704
1705 /* Return the value in the proper mode for this function. */
1706 if (GET_MODE (result) == value_mode)
1707 target = result;
1708 else if (target != 0)
1709 convert_move (target, result, 0);
1710 else
1711 target = convert_to_mode (value_mode, result, 0);
1712
1713 return target;
1714 }
1715 }
1716
1717 /* Expand a call to the strstr builtin. Return 0 if we failed the
1718 caller should emit a normal call, otherwise try to get the result
1719 in TARGET, if convenient (and in mode MODE if that's convenient). */
1720
1721 static rtx
1722 expand_builtin_strstr (arglist, target, mode)
1723 tree arglist;
1724 rtx target;
1725 enum machine_mode mode;
1726 {
1727 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1728 return 0;
1729 else
1730 {
1731 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1732 tree fn;
1733 const char *p1, *p2;
1734
1735 p2 = c_getstr (s2);
1736 if (p2 == NULL)
1737 return 0;
1738
1739 p1 = c_getstr (s1);
1740 if (p1 != NULL)
1741 {
1742 const char *r = strstr (p1, p2);
1743
1744 if (r == NULL)
1745 return const0_rtx;
1746
1747 /* Return an offset into the constant string argument. */
1748 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1749 s1, ssize_int (r - p1))),
1750 target, mode, EXPAND_NORMAL);
1751 }
1752
1753 if (p2[0] == '\0')
1754 return expand_expr (s1, target, mode, EXPAND_NORMAL);
1755
1756 if (p2[1] != '\0')
1757 return 0;
1758
1759 fn = built_in_decls[BUILT_IN_STRCHR];
1760 if (!fn)
1761 return 0;
1762
1763 /* New argument list transforming strstr(s1, s2) to
1764 strchr(s1, s2[0]). */
1765 arglist =
1766 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
1767 arglist = tree_cons (NULL_TREE, s1, arglist);
1768 return expand_expr (build_function_call_expr (fn, arglist),
1769 target, mode, EXPAND_NORMAL);
1770 }
1771 }
1772
1773 /* Expand a call to the strchr builtin. Return 0 if we failed the
1774 caller should emit a normal call, otherwise try to get the result
1775 in TARGET, if convenient (and in mode MODE if that's convenient). */
1776
1777 static rtx
1778 expand_builtin_strchr (arglist, target, mode)
1779 tree arglist;
1780 rtx target;
1781 enum machine_mode mode;
1782 {
1783 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
1784 return 0;
1785 else
1786 {
1787 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1788 const char *p1;
1789
1790 if (TREE_CODE (s2) != INTEGER_CST)
1791 return 0;
1792
1793 p1 = c_getstr (s1);
1794 if (p1 != NULL)
1795 {
1796 char c;
1797 const char *r;
1798
1799 if (target_char_cast (s2, &c))
1800 return 0;
1801
1802 r = strchr (p1, c);
1803
1804 if (r == NULL)
1805 return const0_rtx;
1806
1807 /* Return an offset into the constant string argument. */
1808 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1809 s1, ssize_int (r - p1))),
1810 target, mode, EXPAND_NORMAL);
1811 }
1812
1813 /* FIXME: Should use here strchrM optab so that ports can optimize
1814 this. */
1815 return 0;
1816 }
1817 }
1818
1819 /* Expand a call to the strrchr builtin. Return 0 if we failed the
1820 caller should emit a normal call, otherwise try to get the result
1821 in TARGET, if convenient (and in mode MODE if that's convenient). */
1822
1823 static rtx
1824 expand_builtin_strrchr (arglist, target, mode)
1825 tree arglist;
1826 rtx target;
1827 enum machine_mode mode;
1828 {
1829 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
1830 return 0;
1831 else
1832 {
1833 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1834 tree fn;
1835 const char *p1;
1836
1837 if (TREE_CODE (s2) != INTEGER_CST)
1838 return 0;
1839
1840 p1 = c_getstr (s1);
1841 if (p1 != NULL)
1842 {
1843 char c;
1844 const char *r;
1845
1846 if (target_char_cast (s2, &c))
1847 return 0;
1848
1849 r = strrchr (p1, c);
1850
1851 if (r == NULL)
1852 return const0_rtx;
1853
1854 /* Return an offset into the constant string argument. */
1855 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1856 s1, ssize_int (r - p1))),
1857 target, mode, EXPAND_NORMAL);
1858 }
1859
1860 if (! integer_zerop (s2))
1861 return 0;
1862
1863 fn = built_in_decls[BUILT_IN_STRCHR];
1864 if (!fn)
1865 return 0;
1866
1867 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
1868 return expand_expr (build_function_call_expr (fn, arglist),
1869 target, mode, EXPAND_NORMAL);
1870 }
1871 }
1872
1873 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
1874 caller should emit a normal call, otherwise try to get the result
1875 in TARGET, if convenient (and in mode MODE if that's convenient). */
1876
1877 static rtx
1878 expand_builtin_strpbrk (arglist, target, mode)
1879 tree arglist;
1880 rtx target;
1881 enum machine_mode mode;
1882 {
1883 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1884 return 0;
1885 else
1886 {
1887 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1888 tree fn;
1889 const char *p1, *p2;
1890
1891 p2 = c_getstr (s2);
1892 if (p2 == NULL)
1893 return 0;
1894
1895 p1 = c_getstr (s1);
1896 if (p1 != NULL)
1897 {
1898 const char *r = strpbrk (p1, p2);
1899
1900 if (r == NULL)
1901 return const0_rtx;
1902
1903 /* Return an offset into the constant string argument. */
1904 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1905 s1, ssize_int (r - p1))),
1906 target, mode, EXPAND_NORMAL);
1907 }
1908
1909 if (p2[0] == '\0')
1910 {
1911 /* strpbrk(x, "") == NULL.
1912 Evaluate and ignore the arguments in case they had
1913 side-effects. */
1914 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
1915 return const0_rtx;
1916 }
1917
1918 if (p2[1] != '\0')
1919 return 0; /* Really call strpbrk. */
1920
1921 fn = built_in_decls[BUILT_IN_STRCHR];
1922 if (!fn)
1923 return 0;
1924
1925 /* New argument list transforming strpbrk(s1, s2) to
1926 strchr(s1, s2[0]). */
1927 arglist =
1928 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
1929 arglist = tree_cons (NULL_TREE, s1, arglist);
1930 return expand_expr (build_function_call_expr (fn, arglist),
1931 target, mode, EXPAND_NORMAL);
1932 }
1933 }
1934
1935 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
1936 bytes from constant string DATA + OFFSET and return it as target
1937 constant. */
1938
1939 static rtx
1940 builtin_memcpy_read_str (data, offset, mode)
1941 PTR data;
1942 HOST_WIDE_INT offset;
1943 enum machine_mode mode;
1944 {
1945 const char *str = (const char *) data;
1946
1947 if (offset < 0
1948 || ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
1949 > strlen (str) + 1))
1950 abort (); /* Attempt to read past the end of constant string. */
1951
1952 return c_readstr (str + offset, mode);
1953 }
1954
1955 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
1956 Return 0 if we failed, the caller should emit a normal call, otherwise
1957 try to get the result in TARGET, if convenient (and in mode MODE if
1958 that's convenient). */
1959
1960 static rtx
1961 expand_builtin_memcpy (arglist, target, mode)
1962 tree arglist;
1963 rtx target;
1964 enum machine_mode mode;
1965 {
1966 if (!validate_arglist (arglist,
1967 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
1968 return 0;
1969 else
1970 {
1971 tree dest = TREE_VALUE (arglist);
1972 tree src = TREE_VALUE (TREE_CHAIN (arglist));
1973 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1974 const char *src_str;
1975
1976 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
1977 unsigned int dest_align
1978 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
1979 rtx dest_mem, src_mem, dest_addr, len_rtx;
1980
1981 /* If DEST is not a pointer type, call the normal function. */
1982 if (dest_align == 0)
1983 return 0;
1984
1985 /* If the LEN parameter is zero, return DEST. */
1986 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
1987 {
1988 /* Evaluate and ignore SRC in case it has side-effects. */
1989 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
1990 return expand_expr (dest, target, mode, EXPAND_NORMAL);
1991 }
1992
1993 /* If either SRC is not a pointer type, don't do this
1994 operation in-line. */
1995 if (src_align == 0)
1996 return 0;
1997
1998 dest_mem = get_memory_rtx (dest);
1999 set_mem_align (dest_mem, dest_align);
2000 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2001 src_str = c_getstr (src);
2002
2003 /* If SRC is a string constant and block move would be done
2004 by pieces, we can avoid loading the string from memory
2005 and only stored the computed constants. */
2006 if (src_str
2007 && GET_CODE (len_rtx) == CONST_INT
2008 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2009 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2010 (PTR) src_str, dest_align))
2011 {
2012 store_by_pieces (dest_mem, INTVAL (len_rtx),
2013 builtin_memcpy_read_str,
2014 (PTR) src_str, dest_align);
2015 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2016 #ifdef POINTERS_EXTEND_UNSIGNED
2017 if (GET_MODE (dest_mem) != ptr_mode)
2018 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2019 #endif
2020 return dest_mem;
2021 }
2022
2023 src_mem = get_memory_rtx (src);
2024 set_mem_align (src_mem, src_align);
2025
2026 /* Copy word part most expediently. */
2027 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx,
2028 BLOCK_OP_NORMAL);
2029
2030 if (dest_addr == 0)
2031 {
2032 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2033 #ifdef POINTERS_EXTEND_UNSIGNED
2034 if (GET_MODE (dest_addr) != ptr_mode)
2035 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2036 #endif
2037 }
2038
2039 return dest_addr;
2040 }
2041 }
2042
2043 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
2044 if we failed the caller should emit a normal call, otherwise try to get
2045 the result in TARGET, if convenient (and in mode MODE if that's
2046 convenient). */
2047
2048 static rtx
2049 expand_builtin_strcpy (exp, target, mode)
2050 tree exp;
2051 rtx target;
2052 enum machine_mode mode;
2053 {
2054 tree arglist = TREE_OPERAND (exp, 1);
2055 tree fn, len;
2056
2057 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2058 return 0;
2059
2060 fn = built_in_decls[BUILT_IN_MEMCPY];
2061 if (!fn)
2062 return 0;
2063
2064 len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2065 if (len == 0)
2066 return 0;
2067
2068 len = size_binop (PLUS_EXPR, len, ssize_int (1));
2069 chainon (arglist, build_tree_list (NULL_TREE, len));
2070 return expand_expr (build_function_call_expr (fn, arglist),
2071 target, mode, EXPAND_NORMAL);
2072 }
2073
2074 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2075 bytes from constant string DATA + OFFSET and return it as target
2076 constant. */
2077
2078 static rtx
2079 builtin_strncpy_read_str (data, offset, mode)
2080 PTR data;
2081 HOST_WIDE_INT offset;
2082 enum machine_mode mode;
2083 {
2084 const char *str = (const char *) data;
2085
2086 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
2087 return const0_rtx;
2088
2089 return c_readstr (str + offset, mode);
2090 }
2091
2092 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
2093 if we failed the caller should emit a normal call. */
2094
2095 static rtx
2096 expand_builtin_strncpy (arglist, target, mode)
2097 tree arglist;
2098 rtx target;
2099 enum machine_mode mode;
2100 {
2101 if (!validate_arglist (arglist,
2102 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2103 return 0;
2104 else
2105 {
2106 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2107 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2108 tree fn;
2109
2110 /* We must be passed a constant len parameter. */
2111 if (TREE_CODE (len) != INTEGER_CST)
2112 return 0;
2113
2114 /* If the len parameter is zero, return the dst parameter. */
2115 if (integer_zerop (len))
2116 {
2117 /* Evaluate and ignore the src argument in case it has
2118 side-effects. */
2119 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
2120 VOIDmode, EXPAND_NORMAL);
2121 /* Return the dst parameter. */
2122 return expand_expr (TREE_VALUE (arglist), target, mode,
2123 EXPAND_NORMAL);
2124 }
2125
2126 /* Now, we must be passed a constant src ptr parameter. */
2127 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
2128 return 0;
2129
2130 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
2131
2132 /* We're required to pad with trailing zeros if the requested
2133 len is greater than strlen(s2)+1. In that case try to
2134 use store_by_pieces, if it fails, punt. */
2135 if (tree_int_cst_lt (slen, len))
2136 {
2137 tree dest = TREE_VALUE (arglist);
2138 unsigned int dest_align
2139 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2140 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
2141 rtx dest_mem;
2142
2143 if (!p || dest_align == 0 || !host_integerp (len, 1)
2144 || !can_store_by_pieces (tree_low_cst (len, 1),
2145 builtin_strncpy_read_str,
2146 (PTR) p, dest_align))
2147 return 0;
2148
2149 dest_mem = get_memory_rtx (dest);
2150 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2151 builtin_strncpy_read_str,
2152 (PTR) p, dest_align);
2153 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2154 #ifdef POINTERS_EXTEND_UNSIGNED
2155 if (GET_MODE (dest_mem) != ptr_mode)
2156 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2157 #endif
2158 return dest_mem;
2159 }
2160
2161 /* OK transform into builtin memcpy. */
2162 fn = built_in_decls[BUILT_IN_MEMCPY];
2163 if (!fn)
2164 return 0;
2165 return expand_expr (build_function_call_expr (fn, arglist),
2166 target, mode, EXPAND_NORMAL);
2167 }
2168 }
2169
2170 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2171 bytes from constant string DATA + OFFSET and return it as target
2172 constant. */
2173
2174 static rtx
2175 builtin_memset_read_str (data, offset, mode)
2176 PTR data;
2177 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2178 enum machine_mode mode;
2179 {
2180 const char *c = (const char *) data;
2181 char *p = alloca (GET_MODE_SIZE (mode));
2182
2183 memset (p, *c, GET_MODE_SIZE (mode));
2184
2185 return c_readstr (p, mode);
2186 }
2187
2188 /* Callback routine for store_by_pieces. Return the RTL of a register
2189 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
2190 char value given in the RTL register data. For example, if mode is
2191 4 bytes wide, return the RTL for 0x01010101*data. */
2192
2193 static rtx
2194 builtin_memset_gen_str (data, offset, mode)
2195 PTR data;
2196 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2197 enum machine_mode mode;
2198 {
2199 rtx target, coeff;
2200 size_t size;
2201 char *p;
2202
2203 size = GET_MODE_SIZE (mode);
2204 if (size == 1)
2205 return (rtx) data;
2206
2207 p = alloca (size);
2208 memset (p, 1, size);
2209 coeff = c_readstr (p, mode);
2210
2211 target = convert_to_mode (mode, (rtx) data, 1);
2212 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
2213 return force_reg (mode, target);
2214 }
2215
2216 /* Expand expression EXP, which is a call to the memset builtin. Return 0
2217 if we failed the caller should emit a normal call, otherwise try to get
2218 the result in TARGET, if convenient (and in mode MODE if that's
2219 convenient). */
2220
2221 static rtx
2222 expand_builtin_memset (exp, target, mode)
2223 tree exp;
2224 rtx target;
2225 enum machine_mode mode;
2226 {
2227 tree arglist = TREE_OPERAND (exp, 1);
2228
2229 if (!validate_arglist (arglist,
2230 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
2231 return 0;
2232 else
2233 {
2234 tree dest = TREE_VALUE (arglist);
2235 tree val = TREE_VALUE (TREE_CHAIN (arglist));
2236 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2237 char c;
2238
2239 unsigned int dest_align
2240 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2241 rtx dest_mem, dest_addr, len_rtx;
2242
2243 /* If DEST is not a pointer type, don't do this
2244 operation in-line. */
2245 if (dest_align == 0)
2246 return 0;
2247
2248 /* If the LEN parameter is zero, return DEST. */
2249 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2250 {
2251 /* Evaluate and ignore VAL in case it has side-effects. */
2252 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
2253 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2254 }
2255
2256 if (TREE_CODE (val) != INTEGER_CST)
2257 {
2258 rtx val_rtx;
2259
2260 if (!host_integerp (len, 1))
2261 return 0;
2262
2263 if (optimize_size && tree_low_cst (len, 1) > 1)
2264 return 0;
2265
2266 /* Assume that we can memset by pieces if we can store the
2267 * the coefficients by pieces (in the required modes).
2268 * We can't pass builtin_memset_gen_str as that emits RTL. */
2269 c = 1;
2270 if (!can_store_by_pieces (tree_low_cst (len, 1),
2271 builtin_memset_read_str,
2272 (PTR) &c, dest_align))
2273 return 0;
2274
2275 val = fold (build1 (CONVERT_EXPR, unsigned_char_type_node, val));
2276 val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
2277 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
2278 val_rtx);
2279 dest_mem = get_memory_rtx (dest);
2280 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2281 builtin_memset_gen_str,
2282 (PTR) val_rtx, dest_align);
2283 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2284 #ifdef POINTERS_EXTEND_UNSIGNED
2285 if (GET_MODE (dest_mem) != ptr_mode)
2286 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2287 #endif
2288 return dest_mem;
2289 }
2290
2291 if (target_char_cast (val, &c))
2292 return 0;
2293
2294 if (c)
2295 {
2296 if (!host_integerp (len, 1))
2297 return 0;
2298 if (!can_store_by_pieces (tree_low_cst (len, 1),
2299 builtin_memset_read_str, (PTR) &c,
2300 dest_align))
2301 return 0;
2302
2303 dest_mem = get_memory_rtx (dest);
2304 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2305 builtin_memset_read_str,
2306 (PTR) &c, dest_align);
2307 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2308 #ifdef POINTERS_EXTEND_UNSIGNED
2309 if (GET_MODE (dest_mem) != ptr_mode)
2310 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2311 #endif
2312 return dest_mem;
2313 }
2314
2315 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2316
2317 dest_mem = get_memory_rtx (dest);
2318 set_mem_align (dest_mem, dest_align);
2319 dest_addr = clear_storage (dest_mem, len_rtx);
2320
2321 if (dest_addr == 0)
2322 {
2323 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2324 #ifdef POINTERS_EXTEND_UNSIGNED
2325 if (GET_MODE (dest_addr) != ptr_mode)
2326 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2327 #endif
2328 }
2329
2330 return dest_addr;
2331 }
2332 }
2333
2334 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
2335 if we failed the caller should emit a normal call. */
2336
2337 static rtx
2338 expand_builtin_bzero (exp)
2339 tree exp;
2340 {
2341 tree arglist = TREE_OPERAND (exp, 1);
2342 tree dest, size, newarglist;
2343 rtx result;
2344
2345 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2346 return NULL_RTX;
2347
2348 dest = TREE_VALUE (arglist);
2349 size = TREE_VALUE (TREE_CHAIN (arglist));
2350
2351 /* New argument list transforming bzero(ptr x, int y) to
2352 memset(ptr x, int 0, size_t y). This is done this way
2353 so that if it isn't expanded inline, we fallback to
2354 calling bzero instead of memset. */
2355
2356 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2357 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
2358 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2359
2360 TREE_OPERAND (exp, 1) = newarglist;
2361 result = expand_builtin_memset (exp, const0_rtx, VOIDmode);
2362
2363 /* Always restore the original arguments. */
2364 TREE_OPERAND (exp, 1) = arglist;
2365
2366 return result;
2367 }
2368
2369 /* Expand expression EXP, which is a call to the memcmp or the strcmp builtin.
2370 ARGLIST is the argument list for this call. Return 0 if we failed and the
2371 caller should emit a normal call, otherwise try to get the result in
2372 TARGET, if convenient (and in mode MODE, if that's convenient). */
2373
2374 static rtx
2375 expand_builtin_memcmp (exp, arglist, target, mode)
2376 tree exp ATTRIBUTE_UNUSED;
2377 tree arglist;
2378 rtx target;
2379 enum machine_mode mode;
2380 {
2381 tree arg1, arg2, len;
2382 const char *p1, *p2;
2383
2384 if (!validate_arglist (arglist,
2385 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2386 return 0;
2387
2388 arg1 = TREE_VALUE (arglist);
2389 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2390 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2391
2392 /* If the len parameter is zero, return zero. */
2393 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2394 {
2395 /* Evaluate and ignore arg1 and arg2 in case they have
2396 side-effects. */
2397 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2398 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2399 return const0_rtx;
2400 }
2401
2402 p1 = c_getstr (arg1);
2403 p2 = c_getstr (arg2);
2404
2405 /* If all arguments are constant, and the value of len is not greater
2406 than the lengths of arg1 and arg2, evaluate at compile-time. */
2407 if (host_integerp (len, 1) && p1 && p2
2408 && compare_tree_int (len, strlen (p1) + 1) <= 0
2409 && compare_tree_int (len, strlen (p2) + 1) <= 0)
2410 {
2411 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
2412
2413 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
2414 }
2415
2416 /* If len parameter is one, return an expression corresponding to
2417 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2418 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
2419 {
2420 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2421 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2422 tree ind1 =
2423 fold (build1 (CONVERT_EXPR, integer_type_node,
2424 build1 (INDIRECT_REF, cst_uchar_node,
2425 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2426 tree ind2 =
2427 fold (build1 (CONVERT_EXPR, integer_type_node,
2428 build1 (INDIRECT_REF, cst_uchar_node,
2429 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2430 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2431 return expand_expr (result, target, mode, EXPAND_NORMAL);
2432 }
2433
2434 #ifdef HAVE_cmpstrsi
2435 {
2436 rtx arg1_rtx, arg2_rtx, arg3_rtx;
2437 rtx result;
2438 rtx insn;
2439
2440 int arg1_align
2441 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2442 int arg2_align
2443 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2444 enum machine_mode insn_mode
2445 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
2446
2447 /* If we don't have POINTER_TYPE, call the function. */
2448 if (arg1_align == 0 || arg2_align == 0)
2449 return 0;
2450
2451 /* Make a place to write the result of the instruction. */
2452 result = target;
2453 if (! (result != 0
2454 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
2455 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2456 result = gen_reg_rtx (insn_mode);
2457
2458 arg1_rtx = get_memory_rtx (arg1);
2459 arg2_rtx = get_memory_rtx (arg2);
2460 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2461 if (!HAVE_cmpstrsi)
2462 insn = NULL_RTX;
2463 else
2464 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
2465 GEN_INT (MIN (arg1_align, arg2_align)));
2466
2467 if (insn)
2468 emit_insn (insn);
2469 else
2470 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
2471 TYPE_MODE (integer_type_node), 3,
2472 XEXP (arg1_rtx, 0), Pmode,
2473 XEXP (arg2_rtx, 0), Pmode,
2474 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
2475 TREE_UNSIGNED (sizetype)),
2476 TYPE_MODE (sizetype));
2477
2478 /* Return the value in the proper mode for this function. */
2479 mode = TYPE_MODE (TREE_TYPE (exp));
2480 if (GET_MODE (result) == mode)
2481 return result;
2482 else if (target != 0)
2483 {
2484 convert_move (target, result, 0);
2485 return target;
2486 }
2487 else
2488 return convert_to_mode (mode, result, 0);
2489 }
2490 #endif
2491
2492 return 0;
2493 }
2494
2495 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
2496 if we failed the caller should emit a normal call, otherwise try to get
2497 the result in TARGET, if convenient. */
2498
2499 static rtx
2500 expand_builtin_strcmp (exp, target, mode)
2501 tree exp;
2502 rtx target;
2503 enum machine_mode mode;
2504 {
2505 tree arglist = TREE_OPERAND (exp, 1);
2506 tree arg1, arg2, len, len2, fn;
2507 const char *p1, *p2;
2508
2509 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2510 return 0;
2511
2512 arg1 = TREE_VALUE (arglist);
2513 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2514
2515 p1 = c_getstr (arg1);
2516 p2 = c_getstr (arg2);
2517
2518 if (p1 && p2)
2519 {
2520 const int i = strcmp (p1, p2);
2521 return (i < 0 ? constm1_rtx : (i > 0 ? const1_rtx : const0_rtx));
2522 }
2523
2524 /* If either arg is "", return an expression corresponding to
2525 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2526 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
2527 {
2528 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2529 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2530 tree ind1 =
2531 fold (build1 (CONVERT_EXPR, integer_type_node,
2532 build1 (INDIRECT_REF, cst_uchar_node,
2533 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2534 tree ind2 =
2535 fold (build1 (CONVERT_EXPR, integer_type_node,
2536 build1 (INDIRECT_REF, cst_uchar_node,
2537 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2538 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2539 return expand_expr (result, target, mode, EXPAND_NORMAL);
2540 }
2541
2542 len = c_strlen (arg1);
2543 len2 = c_strlen (arg2);
2544
2545 if (len)
2546 len = size_binop (PLUS_EXPR, ssize_int (1), len);
2547
2548 if (len2)
2549 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
2550
2551 /* If we don't have a constant length for the first, use the length
2552 of the second, if we know it. We don't require a constant for
2553 this case; some cost analysis could be done if both are available
2554 but neither is constant. For now, assume they're equally cheap
2555 unless one has side effects.
2556
2557 If both strings have constant lengths, use the smaller. This
2558 could arise if optimization results in strcpy being called with
2559 two fixed strings, or if the code was machine-generated. We should
2560 add some code to the `memcmp' handler below to deal with such
2561 situations, someday. */
2562
2563 if (!len || TREE_CODE (len) != INTEGER_CST)
2564 {
2565 if (len2 && !TREE_SIDE_EFFECTS (len2))
2566 len = len2;
2567 else if (len == 0)
2568 return 0;
2569 }
2570 else if (len2 && TREE_CODE (len2) == INTEGER_CST
2571 && tree_int_cst_lt (len2, len))
2572 len = len2;
2573
2574 /* If both arguments have side effects, we cannot optimize. */
2575 if (TREE_SIDE_EFFECTS (len))
2576 return 0;
2577
2578 fn = built_in_decls[BUILT_IN_MEMCMP];
2579 if (!fn)
2580 return 0;
2581
2582 chainon (arglist, build_tree_list (NULL_TREE, len));
2583 return expand_expr (build_function_call_expr (fn, arglist),
2584 target, mode, EXPAND_NORMAL);
2585 }
2586
2587 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
2588 if we failed the caller should emit a normal call, otherwise try to get
2589 the result in TARGET, if convenient. */
2590
2591 static rtx
2592 expand_builtin_strncmp (exp, target, mode)
2593 tree exp;
2594 rtx target;
2595 enum machine_mode mode;
2596 {
2597 tree arglist = TREE_OPERAND (exp, 1);
2598 tree fn, newarglist, len = 0;
2599 tree arg1, arg2, arg3;
2600 const char *p1, *p2;
2601
2602 if (!validate_arglist (arglist,
2603 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2604 return 0;
2605
2606 arg1 = TREE_VALUE (arglist);
2607 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2608 arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2609
2610 /* If the len parameter is zero, return zero. */
2611 if (host_integerp (arg3, 1) && tree_low_cst (arg3, 1) == 0)
2612 {
2613 /* Evaluate and ignore arg1 and arg2 in case they have
2614 side-effects. */
2615 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2616 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2617 return const0_rtx;
2618 }
2619
2620 p1 = c_getstr (arg1);
2621 p2 = c_getstr (arg2);
2622
2623 /* If all arguments are constant, evaluate at compile-time. */
2624 if (host_integerp (arg3, 1) && p1 && p2)
2625 {
2626 const int r = strncmp (p1, p2, tree_low_cst (arg3, 1));
2627 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
2628 }
2629
2630 /* If len == 1 or (either string parameter is "" and (len >= 1)),
2631 return (*(const u_char*)arg1 - *(const u_char*)arg2). */
2632 if (host_integerp (arg3, 1)
2633 && (tree_low_cst (arg3, 1) == 1
2634 || (tree_low_cst (arg3, 1) > 1
2635 && ((p1 && *p1 == '\0') || (p2 && *p2 == '\0')))))
2636 {
2637 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2638 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2639 tree ind1 =
2640 fold (build1 (CONVERT_EXPR, integer_type_node,
2641 build1 (INDIRECT_REF, cst_uchar_node,
2642 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2643 tree ind2 =
2644 fold (build1 (CONVERT_EXPR, integer_type_node,
2645 build1 (INDIRECT_REF, cst_uchar_node,
2646 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2647 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2648 return expand_expr (result, target, mode, EXPAND_NORMAL);
2649 }
2650
2651 /* If c_strlen can determine an expression for one of the string
2652 lengths, and it doesn't have side effects, then call
2653 expand_builtin_memcmp() using length MIN(strlen(string)+1, arg3). */
2654
2655 /* Perhaps one of the strings is really constant, if so prefer
2656 that constant length over the other string's length. */
2657 if (p1)
2658 len = c_strlen (arg1);
2659 else if (p2)
2660 len = c_strlen (arg2);
2661
2662 /* If we still don't have a len, try either string arg as long
2663 as they don't have side effects. */
2664 if (!len && !TREE_SIDE_EFFECTS (arg1))
2665 len = c_strlen (arg1);
2666 if (!len && !TREE_SIDE_EFFECTS (arg2))
2667 len = c_strlen (arg2);
2668 /* If we still don't have a length, punt. */
2669 if (!len)
2670 return 0;
2671
2672 fn = built_in_decls[BUILT_IN_MEMCMP];
2673 if (!fn)
2674 return 0;
2675
2676 /* Add one to the string length. */
2677 len = fold (size_binop (PLUS_EXPR, len, ssize_int (1)));
2678
2679 /* The actual new length parameter is MIN(len,arg3). */
2680 len = fold (build (MIN_EXPR, TREE_TYPE (len), len, arg3));
2681
2682 newarglist = build_tree_list (NULL_TREE, len);
2683 newarglist = tree_cons (NULL_TREE, arg2, newarglist);
2684 newarglist = tree_cons (NULL_TREE, arg1, newarglist);
2685 return expand_expr (build_function_call_expr (fn, newarglist),
2686 target, mode, EXPAND_NORMAL);
2687 }
2688
2689 /* Expand expression EXP, which is a call to the strcat builtin.
2690 Return 0 if we failed the caller should emit a normal call,
2691 otherwise try to get the result in TARGET, if convenient. */
2692
2693 static rtx
2694 expand_builtin_strcat (arglist, target, mode)
2695 tree arglist;
2696 rtx target;
2697 enum machine_mode mode;
2698 {
2699 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2700 return 0;
2701 else
2702 {
2703 tree dst = TREE_VALUE (arglist),
2704 src = TREE_VALUE (TREE_CHAIN (arglist));
2705 const char *p = c_getstr (src);
2706
2707 /* If the string length is zero, return the dst parameter. */
2708 if (p && *p == '\0')
2709 return expand_expr (dst, target, mode, EXPAND_NORMAL);
2710
2711 return 0;
2712 }
2713 }
2714
2715 /* Expand expression EXP, which is a call to the strncat builtin.
2716 Return 0 if we failed the caller should emit a normal call,
2717 otherwise try to get the result in TARGET, if convenient. */
2718
2719 static rtx
2720 expand_builtin_strncat (arglist, target, mode)
2721 tree arglist;
2722 rtx target;
2723 enum machine_mode mode;
2724 {
2725 if (!validate_arglist (arglist,
2726 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2727 return 0;
2728 else
2729 {
2730 tree dst = TREE_VALUE (arglist),
2731 src = TREE_VALUE (TREE_CHAIN (arglist)),
2732 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2733 const char *p = c_getstr (src);
2734
2735 /* If the requested length is zero, or the src parameter string
2736 length is zero, return the dst parameter. */
2737 if (integer_zerop (len) || (p && *p == '\0'))
2738 {
2739 /* Evaluate and ignore the src and len parameters in case
2740 they have side-effects. */
2741 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2742 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
2743 return expand_expr (dst, target, mode, EXPAND_NORMAL);
2744 }
2745
2746 /* If the requested len is greater than or equal to the string
2747 length, call strcat. */
2748 if (TREE_CODE (len) == INTEGER_CST && p
2749 && compare_tree_int (len, strlen (p)) >= 0)
2750 {
2751 tree newarglist
2752 = tree_cons (NULL_TREE, dst, build_tree_list (NULL_TREE, src));
2753 tree fn = built_in_decls[BUILT_IN_STRCAT];
2754
2755 /* If the replacement _DECL isn't initialized, don't do the
2756 transformation. */
2757 if (!fn)
2758 return 0;
2759
2760 return expand_expr (build_function_call_expr (fn, newarglist),
2761 target, mode, EXPAND_NORMAL);
2762 }
2763 return 0;
2764 }
2765 }
2766
2767 /* Expand expression EXP, which is a call to the strspn builtin.
2768 Return 0 if we failed the caller should emit a normal call,
2769 otherwise try to get the result in TARGET, if convenient. */
2770
2771 static rtx
2772 expand_builtin_strspn (arglist, target, mode)
2773 tree arglist;
2774 rtx target;
2775 enum machine_mode mode;
2776 {
2777 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2778 return 0;
2779 else
2780 {
2781 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2782 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
2783
2784 /* If both arguments are constants, evaluate at compile-time. */
2785 if (p1 && p2)
2786 {
2787 const size_t r = strspn (p1, p2);
2788 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
2789 }
2790
2791 /* If either argument is "", return 0. */
2792 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
2793 {
2794 /* Evaluate and ignore both arguments in case either one has
2795 side-effects. */
2796 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2797 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2798 return const0_rtx;
2799 }
2800 return 0;
2801 }
2802 }
2803
2804 /* Expand expression EXP, which is a call to the strcspn builtin.
2805 Return 0 if we failed the caller should emit a normal call,
2806 otherwise try to get the result in TARGET, if convenient. */
2807
2808 static rtx
2809 expand_builtin_strcspn (arglist, target, mode)
2810 tree arglist;
2811 rtx target;
2812 enum machine_mode mode;
2813 {
2814 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2815 return 0;
2816 else
2817 {
2818 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2819 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
2820
2821 /* If both arguments are constants, evaluate at compile-time. */
2822 if (p1 && p2)
2823 {
2824 const size_t r = strcspn (p1, p2);
2825 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
2826 }
2827
2828 /* If the first argument is "", return 0. */
2829 if (p1 && *p1 == '\0')
2830 {
2831 /* Evaluate and ignore argument s2 in case it has
2832 side-effects. */
2833 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2834 return const0_rtx;
2835 }
2836
2837 /* If the second argument is "", return __builtin_strlen(s1). */
2838 if (p2 && *p2 == '\0')
2839 {
2840 tree newarglist = build_tree_list (NULL_TREE, s1),
2841 fn = built_in_decls[BUILT_IN_STRLEN];
2842
2843 /* If the replacement _DECL isn't initialized, don't do the
2844 transformation. */
2845 if (!fn)
2846 return 0;
2847
2848 return expand_expr (build_function_call_expr (fn, newarglist),
2849 target, mode, EXPAND_NORMAL);
2850 }
2851 return 0;
2852 }
2853 }
2854
2855 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
2856 if that's convenient. */
2857
2858 rtx
2859 expand_builtin_saveregs ()
2860 {
2861 rtx val, seq;
2862
2863 /* Don't do __builtin_saveregs more than once in a function.
2864 Save the result of the first call and reuse it. */
2865 if (saveregs_value != 0)
2866 return saveregs_value;
2867
2868 /* When this function is called, it means that registers must be
2869 saved on entry to this function. So we migrate the call to the
2870 first insn of this function. */
2871
2872 start_sequence ();
2873
2874 #ifdef EXPAND_BUILTIN_SAVEREGS
2875 /* Do whatever the machine needs done in this case. */
2876 val = EXPAND_BUILTIN_SAVEREGS ();
2877 #else
2878 /* ??? We used to try and build up a call to the out of line function,
2879 guessing about what registers needed saving etc. This became much
2880 harder with __builtin_va_start, since we don't have a tree for a
2881 call to __builtin_saveregs to fall back on. There was exactly one
2882 port (i860) that used this code, and I'm unconvinced it could actually
2883 handle the general case. So we no longer try to handle anything
2884 weird and make the backend absorb the evil. */
2885
2886 error ("__builtin_saveregs not supported by this target");
2887 val = const0_rtx;
2888 #endif
2889
2890 seq = get_insns ();
2891 end_sequence ();
2892
2893 saveregs_value = val;
2894
2895 /* Put the insns after the NOTE that starts the function. If this
2896 is inside a start_sequence, make the outer-level insn chain current, so
2897 the code is placed at the start of the function. */
2898 push_topmost_sequence ();
2899 emit_insn_after (seq, get_insns ());
2900 pop_topmost_sequence ();
2901
2902 return val;
2903 }
2904
2905 /* __builtin_args_info (N) returns word N of the arg space info
2906 for the current function. The number and meanings of words
2907 is controlled by the definition of CUMULATIVE_ARGS. */
2908
2909 static rtx
2910 expand_builtin_args_info (exp)
2911 tree exp;
2912 {
2913 tree arglist = TREE_OPERAND (exp, 1);
2914 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
2915 int *word_ptr = (int *) &current_function_args_info;
2916 #if 0
2917 /* These are used by the code below that is if 0'ed away */
2918 int i;
2919 tree type, elts, result;
2920 #endif
2921
2922 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
2923 abort ();
2924
2925 if (arglist != 0)
2926 {
2927 if (!host_integerp (TREE_VALUE (arglist), 0))
2928 error ("argument of `__builtin_args_info' must be constant");
2929 else
2930 {
2931 HOST_WIDE_INT wordnum = tree_low_cst (TREE_VALUE (arglist), 0);
2932
2933 if (wordnum < 0 || wordnum >= nwords)
2934 error ("argument of `__builtin_args_info' out of range");
2935 else
2936 return GEN_INT (word_ptr[wordnum]);
2937 }
2938 }
2939 else
2940 error ("missing argument in `__builtin_args_info'");
2941
2942 return const0_rtx;
2943
2944 #if 0
2945 for (i = 0; i < nwords; i++)
2946 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
2947
2948 type = build_array_type (integer_type_node,
2949 build_index_type (build_int_2 (nwords, 0)));
2950 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
2951 TREE_CONSTANT (result) = 1;
2952 TREE_STATIC (result) = 1;
2953 result = build1 (INDIRECT_REF, build_pointer_type (type), result);
2954 TREE_CONSTANT (result) = 1;
2955 return expand_expr (result, NULL_RTX, VOIDmode, 0);
2956 #endif
2957 }
2958
2959 /* Expand ARGLIST, from a call to __builtin_next_arg. */
2960
2961 static rtx
2962 expand_builtin_next_arg (arglist)
2963 tree arglist;
2964 {
2965 tree fntype = TREE_TYPE (current_function_decl);
2966
2967 if (TYPE_ARG_TYPES (fntype) == 0
2968 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2969 == void_type_node))
2970 {
2971 error ("`va_start' used in function with fixed args");
2972 return const0_rtx;
2973 }
2974
2975 if (arglist)
2976 {
2977 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
2978 tree arg = TREE_VALUE (arglist);
2979
2980 /* Strip off all nops for the sake of the comparison. This
2981 is not quite the same as STRIP_NOPS. It does more.
2982 We must also strip off INDIRECT_EXPR for C++ reference
2983 parameters. */
2984 while (TREE_CODE (arg) == NOP_EXPR
2985 || TREE_CODE (arg) == CONVERT_EXPR
2986 || TREE_CODE (arg) == NON_LVALUE_EXPR
2987 || TREE_CODE (arg) == INDIRECT_REF)
2988 arg = TREE_OPERAND (arg, 0);
2989 if (arg != last_parm)
2990 warning ("second parameter of `va_start' not last named argument");
2991 }
2992 else
2993 /* Evidently an out of date version of <stdarg.h>; can't validate
2994 va_start's second argument, but can still work as intended. */
2995 warning ("`__builtin_next_arg' called without an argument");
2996
2997 return expand_binop (Pmode, add_optab,
2998 current_function_internal_arg_pointer,
2999 current_function_arg_offset_rtx,
3000 NULL_RTX, 0, OPTAB_LIB_WIDEN);
3001 }
3002
3003 /* Make it easier for the backends by protecting the valist argument
3004 from multiple evaluations. */
3005
3006 static tree
3007 stabilize_va_list (valist, needs_lvalue)
3008 tree valist;
3009 int needs_lvalue;
3010 {
3011 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
3012 {
3013 if (TREE_SIDE_EFFECTS (valist))
3014 valist = save_expr (valist);
3015
3016 /* For this case, the backends will be expecting a pointer to
3017 TREE_TYPE (va_list_type_node), but it's possible we've
3018 actually been given an array (an actual va_list_type_node).
3019 So fix it. */
3020 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
3021 {
3022 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
3023 tree p2 = build_pointer_type (va_list_type_node);
3024
3025 valist = build1 (ADDR_EXPR, p2, valist);
3026 valist = fold (build1 (NOP_EXPR, p1, valist));
3027 }
3028 }
3029 else
3030 {
3031 tree pt;
3032
3033 if (! needs_lvalue)
3034 {
3035 if (! TREE_SIDE_EFFECTS (valist))
3036 return valist;
3037
3038 pt = build_pointer_type (va_list_type_node);
3039 valist = fold (build1 (ADDR_EXPR, pt, valist));
3040 TREE_SIDE_EFFECTS (valist) = 1;
3041 }
3042
3043 if (TREE_SIDE_EFFECTS (valist))
3044 valist = save_expr (valist);
3045 valist = fold (build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)),
3046 valist));
3047 }
3048
3049 return valist;
3050 }
3051
3052 /* The "standard" implementation of va_start: just assign `nextarg' to
3053 the variable. */
3054
3055 void
3056 std_expand_builtin_va_start (valist, nextarg)
3057 tree valist;
3058 rtx nextarg;
3059 {
3060 tree t;
3061
3062 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3063 make_tree (ptr_type_node, nextarg));
3064 TREE_SIDE_EFFECTS (t) = 1;
3065
3066 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3067 }
3068
3069 /* Expand ARGLIST, from a call to __builtin_va_start. */
3070
3071 static rtx
3072 expand_builtin_va_start (arglist)
3073 tree arglist;
3074 {
3075 rtx nextarg;
3076 tree chain, valist;
3077
3078 chain = TREE_CHAIN (arglist);
3079
3080 if (TREE_CHAIN (chain))
3081 error ("too many arguments to function `va_start'");
3082
3083 nextarg = expand_builtin_next_arg (chain);
3084 valist = stabilize_va_list (TREE_VALUE (arglist), 1);
3085
3086 #ifdef EXPAND_BUILTIN_VA_START
3087 EXPAND_BUILTIN_VA_START (valist, nextarg);
3088 #else
3089 std_expand_builtin_va_start (valist, nextarg);
3090 #endif
3091
3092 return const0_rtx;
3093 }
3094
3095 /* The "standard" implementation of va_arg: read the value from the
3096 current (padded) address and increment by the (padded) size. */
3097
3098 rtx
3099 std_expand_builtin_va_arg (valist, type)
3100 tree valist, type;
3101 {
3102 tree addr_tree, t, type_size = NULL;
3103 tree align, alignm1;
3104 tree rounded_size;
3105 rtx addr;
3106
3107 /* Compute the rounded size of the type. */
3108 align = size_int (PARM_BOUNDARY / BITS_PER_UNIT);
3109 alignm1 = size_int (PARM_BOUNDARY / BITS_PER_UNIT - 1);
3110 if (type == error_mark_node
3111 || (type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type))) == NULL
3112 || TREE_OVERFLOW (type_size))
3113 rounded_size = size_zero_node;
3114 else
3115 rounded_size = fold (build (MULT_EXPR, sizetype,
3116 fold (build (TRUNC_DIV_EXPR, sizetype,
3117 fold (build (PLUS_EXPR, sizetype,
3118 type_size, alignm1)),
3119 align)),
3120 align));
3121
3122 /* Get AP. */
3123 addr_tree = valist;
3124 if (PAD_VARARGS_DOWN && ! integer_zerop (rounded_size))
3125 {
3126 /* Small args are padded downward. */
3127 addr_tree = fold (build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3128 fold (build (COND_EXPR, sizetype,
3129 fold (build (GT_EXPR, sizetype,
3130 rounded_size,
3131 align)),
3132 size_zero_node,
3133 fold (build (MINUS_EXPR, sizetype,
3134 rounded_size,
3135 type_size))))));
3136 }
3137
3138 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3139 addr = copy_to_reg (addr);
3140
3141 /* Compute new value for AP. */
3142 if (! integer_zerop (rounded_size))
3143 {
3144 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3145 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3146 rounded_size));
3147 TREE_SIDE_EFFECTS (t) = 1;
3148 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3149 }
3150
3151 return addr;
3152 }
3153
3154 /* Expand __builtin_va_arg, which is not really a builtin function, but
3155 a very special sort of operator. */
3156
3157 rtx
3158 expand_builtin_va_arg (valist, type)
3159 tree valist, type;
3160 {
3161 rtx addr, result;
3162 tree promoted_type, want_va_type, have_va_type;
3163
3164 /* Verify that valist is of the proper type. */
3165
3166 want_va_type = va_list_type_node;
3167 have_va_type = TREE_TYPE (valist);
3168 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
3169 {
3170 /* If va_list is an array type, the argument may have decayed
3171 to a pointer type, e.g. by being passed to another function.
3172 In that case, unwrap both types so that we can compare the
3173 underlying records. */
3174 if (TREE_CODE (have_va_type) == ARRAY_TYPE
3175 || TREE_CODE (have_va_type) == POINTER_TYPE)
3176 {
3177 want_va_type = TREE_TYPE (want_va_type);
3178 have_va_type = TREE_TYPE (have_va_type);
3179 }
3180 }
3181 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
3182 {
3183 error ("first argument to `va_arg' not of type `va_list'");
3184 addr = const0_rtx;
3185 }
3186
3187 /* Generate a diagnostic for requesting data of a type that cannot
3188 be passed through `...' due to type promotion at the call site. */
3189 else if ((promoted_type = (*lang_hooks.types.type_promotes_to) (type))
3190 != type)
3191 {
3192 const char *name = "<anonymous type>", *pname = 0;
3193 static bool gave_help;
3194
3195 if (TYPE_NAME (type))
3196 {
3197 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE)
3198 name = IDENTIFIER_POINTER (TYPE_NAME (type));
3199 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
3200 && DECL_NAME (TYPE_NAME (type)))
3201 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
3202 }
3203 if (TYPE_NAME (promoted_type))
3204 {
3205 if (TREE_CODE (TYPE_NAME (promoted_type)) == IDENTIFIER_NODE)
3206 pname = IDENTIFIER_POINTER (TYPE_NAME (promoted_type));
3207 else if (TREE_CODE (TYPE_NAME (promoted_type)) == TYPE_DECL
3208 && DECL_NAME (TYPE_NAME (promoted_type)))
3209 pname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (promoted_type)));
3210 }
3211
3212 /* Unfortunately, this is merely undefined, rather than a constraint
3213 violation, so we cannot make this an error. If this call is never
3214 executed, the program is still strictly conforming. */
3215 warning ("`%s' is promoted to `%s' when passed through `...'",
3216 name, pname);
3217 if (! gave_help)
3218 {
3219 gave_help = true;
3220 warning ("(so you should pass `%s' not `%s' to `va_arg')",
3221 pname, name);
3222 }
3223
3224 /* We can, however, treat "undefined" any way we please.
3225 Call abort to encourage the user to fix the program. */
3226 expand_builtin_trap ();
3227
3228 /* This is dead code, but go ahead and finish so that the
3229 mode of the result comes out right. */
3230 addr = const0_rtx;
3231 }
3232 else
3233 {
3234 /* Make it easier for the backends by protecting the valist argument
3235 from multiple evaluations. */
3236 valist = stabilize_va_list (valist, 0);
3237
3238 #ifdef EXPAND_BUILTIN_VA_ARG
3239 addr = EXPAND_BUILTIN_VA_ARG (valist, type);
3240 #else
3241 addr = std_expand_builtin_va_arg (valist, type);
3242 #endif
3243 }
3244
3245 #ifdef POINTERS_EXTEND_UNSIGNED
3246 if (GET_MODE (addr) != Pmode)
3247 addr = convert_memory_address (Pmode, addr);
3248 #endif
3249
3250 result = gen_rtx_MEM (TYPE_MODE (type), addr);
3251 set_mem_alias_set (result, get_varargs_alias_set ());
3252
3253 return result;
3254 }
3255
3256 /* Expand ARGLIST, from a call to __builtin_va_end. */
3257
3258 static rtx
3259 expand_builtin_va_end (arglist)
3260 tree arglist;
3261 {
3262 tree valist = TREE_VALUE (arglist);
3263
3264 #ifdef EXPAND_BUILTIN_VA_END
3265 valist = stabilize_va_list (valist, 0);
3266 EXPAND_BUILTIN_VA_END (arglist);
3267 #else
3268 /* Evaluate for side effects, if needed. I hate macros that don't
3269 do that. */
3270 if (TREE_SIDE_EFFECTS (valist))
3271 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
3272 #endif
3273
3274 return const0_rtx;
3275 }
3276
3277 /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a
3278 builtin rather than just as an assignment in stdarg.h because of the
3279 nastiness of array-type va_list types. */
3280
3281 static rtx
3282 expand_builtin_va_copy (arglist)
3283 tree arglist;
3284 {
3285 tree dst, src, t;
3286
3287 dst = TREE_VALUE (arglist);
3288 src = TREE_VALUE (TREE_CHAIN (arglist));
3289
3290 dst = stabilize_va_list (dst, 1);
3291 src = stabilize_va_list (src, 0);
3292
3293 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
3294 {
3295 t = build (MODIFY_EXPR, va_list_type_node, dst, src);
3296 TREE_SIDE_EFFECTS (t) = 1;
3297 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3298 }
3299 else
3300 {
3301 rtx dstb, srcb, size;
3302
3303 /* Evaluate to pointers. */
3304 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
3305 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
3306 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
3307 VOIDmode, EXPAND_NORMAL);
3308
3309 #ifdef POINTERS_EXTEND_UNSIGNED
3310 if (GET_MODE (dstb) != Pmode)
3311 dstb = convert_memory_address (Pmode, dstb);
3312
3313 if (GET_MODE (srcb) != Pmode)
3314 srcb = convert_memory_address (Pmode, srcb);
3315 #endif
3316
3317 /* "Dereference" to BLKmode memories. */
3318 dstb = gen_rtx_MEM (BLKmode, dstb);
3319 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
3320 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
3321 srcb = gen_rtx_MEM (BLKmode, srcb);
3322 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
3323 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
3324
3325 /* Copy. */
3326 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
3327 }
3328
3329 return const0_rtx;
3330 }
3331
3332 /* Expand a call to one of the builtin functions __builtin_frame_address or
3333 __builtin_return_address. */
3334
3335 static rtx
3336 expand_builtin_frame_address (exp)
3337 tree exp;
3338 {
3339 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3340 tree arglist = TREE_OPERAND (exp, 1);
3341
3342 /* The argument must be a nonnegative integer constant.
3343 It counts the number of frames to scan up the stack.
3344 The value is the return address saved in that frame. */
3345 if (arglist == 0)
3346 /* Warning about missing arg was already issued. */
3347 return const0_rtx;
3348 else if (! host_integerp (TREE_VALUE (arglist), 1))
3349 {
3350 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3351 error ("invalid arg to `__builtin_frame_address'");
3352 else
3353 error ("invalid arg to `__builtin_return_address'");
3354 return const0_rtx;
3355 }
3356 else
3357 {
3358 rtx tem
3359 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
3360 tree_low_cst (TREE_VALUE (arglist), 1),
3361 hard_frame_pointer_rtx);
3362
3363 /* Some ports cannot access arbitrary stack frames. */
3364 if (tem == NULL)
3365 {
3366 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3367 warning ("unsupported arg to `__builtin_frame_address'");
3368 else
3369 warning ("unsupported arg to `__builtin_return_address'");
3370 return const0_rtx;
3371 }
3372
3373 /* For __builtin_frame_address, return what we've got. */
3374 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3375 return tem;
3376
3377 if (GET_CODE (tem) != REG
3378 && ! CONSTANT_P (tem))
3379 tem = copy_to_mode_reg (Pmode, tem);
3380 return tem;
3381 }
3382 }
3383
3384 /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if
3385 we failed and the caller should emit a normal call, otherwise try to get
3386 the result in TARGET, if convenient. */
3387
3388 static rtx
3389 expand_builtin_alloca (arglist, target)
3390 tree arglist;
3391 rtx target;
3392 {
3393 rtx op0;
3394 rtx result;
3395
3396 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3397 return 0;
3398
3399 /* Compute the argument. */
3400 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
3401
3402 /* Allocate the desired space. */
3403 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
3404
3405 #ifdef POINTERS_EXTEND_UNSIGNED
3406 if (GET_MODE (result) != ptr_mode)
3407 result = convert_memory_address (ptr_mode, result);
3408 #endif
3409
3410 return result;
3411 }
3412
3413 /* Expand a call to the ffs builtin. The arguments are in ARGLIST.
3414 Return 0 if a normal call should be emitted rather than expanding the
3415 function in-line. If convenient, the result should be placed in TARGET.
3416 SUBTARGET may be used as the target for computing one of EXP's operands. */
3417
3418 static rtx
3419 expand_builtin_ffs (arglist, target, subtarget)
3420 tree arglist;
3421 rtx target, subtarget;
3422 {
3423 rtx op0;
3424 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3425 return 0;
3426
3427 /* Compute the argument. */
3428 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
3429 /* Compute ffs, into TARGET if possible.
3430 Set TARGET to wherever the result comes back. */
3431 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
3432 ffs_optab, op0, target, 1);
3433 if (target == 0)
3434 abort ();
3435 return target;
3436 }
3437
3438 /* If the string passed to fputs is a constant and is one character
3439 long, we attempt to transform this call into __builtin_fputc(). */
3440
3441 static rtx
3442 expand_builtin_fputs (arglist, ignore, unlocked)
3443 tree arglist;
3444 int ignore;
3445 int unlocked;
3446 {
3447 tree len, fn;
3448 tree fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
3449 : built_in_decls[BUILT_IN_FPUTC];
3450 tree fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
3451 : built_in_decls[BUILT_IN_FWRITE];
3452
3453 /* If the return value is used, or the replacement _DECL isn't
3454 initialized, don't do the transformation. */
3455 if (!ignore || !fn_fputc || !fn_fwrite)
3456 return 0;
3457
3458 /* Verify the arguments in the original call. */
3459 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3460 return 0;
3461
3462 /* Get the length of the string passed to fputs. If the length
3463 can't be determined, punt. */
3464 if (!(len = c_strlen (TREE_VALUE (arglist)))
3465 || TREE_CODE (len) != INTEGER_CST)
3466 return 0;
3467
3468 switch (compare_tree_int (len, 1))
3469 {
3470 case -1: /* length is 0, delete the call entirely . */
3471 {
3472 /* Evaluate and ignore the argument in case it has
3473 side-effects. */
3474 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
3475 VOIDmode, EXPAND_NORMAL);
3476 return const0_rtx;
3477 }
3478 case 0: /* length is 1, call fputc. */
3479 {
3480 const char *p = c_getstr (TREE_VALUE (arglist));
3481
3482 if (p != NULL)
3483 {
3484 /* New argument list transforming fputs(string, stream) to
3485 fputc(string[0], stream). */
3486 arglist =
3487 build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
3488 arglist =
3489 tree_cons (NULL_TREE, build_int_2 (p[0], 0), arglist);
3490 fn = fn_fputc;
3491 break;
3492 }
3493 }
3494 /* FALLTHROUGH */
3495 case 1: /* length is greater than 1, call fwrite. */
3496 {
3497 tree string_arg = TREE_VALUE (arglist);
3498
3499 /* New argument list transforming fputs(string, stream) to
3500 fwrite(string, 1, len, stream). */
3501 arglist = build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
3502 arglist = tree_cons (NULL_TREE, len, arglist);
3503 arglist = tree_cons (NULL_TREE, size_one_node, arglist);
3504 arglist = tree_cons (NULL_TREE, string_arg, arglist);
3505 fn = fn_fwrite;
3506 break;
3507 }
3508 default:
3509 abort ();
3510 }
3511
3512 return expand_expr (build_function_call_expr (fn, arglist),
3513 (ignore ? const0_rtx : NULL_RTX),
3514 VOIDmode, EXPAND_NORMAL);
3515 }
3516
3517 /* Expand a call to __builtin_expect. We return our argument and emit a
3518 NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
3519 a non-jump context. */
3520
3521 static rtx
3522 expand_builtin_expect (arglist, target)
3523 tree arglist;
3524 rtx target;
3525 {
3526 tree exp, c;
3527 rtx note, rtx_c;
3528
3529 if (arglist == NULL_TREE
3530 || TREE_CHAIN (arglist) == NULL_TREE)
3531 return const0_rtx;
3532 exp = TREE_VALUE (arglist);
3533 c = TREE_VALUE (TREE_CHAIN (arglist));
3534
3535 if (TREE_CODE (c) != INTEGER_CST)
3536 {
3537 error ("second arg to `__builtin_expect' must be a constant");
3538 c = integer_zero_node;
3539 }
3540
3541 target = expand_expr (exp, target, VOIDmode, EXPAND_NORMAL);
3542
3543 /* Don't bother with expected value notes for integral constants. */
3544 if (GET_CODE (target) != CONST_INT)
3545 {
3546 /* We do need to force this into a register so that we can be
3547 moderately sure to be able to correctly interpret the branch
3548 condition later. */
3549 target = force_reg (GET_MODE (target), target);
3550
3551 rtx_c = expand_expr (c, NULL_RTX, GET_MODE (target), EXPAND_NORMAL);
3552
3553 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
3554 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, target, rtx_c);
3555 }
3556
3557 return target;
3558 }
3559
3560 /* Like expand_builtin_expect, except do this in a jump context. This is
3561 called from do_jump if the conditional is a __builtin_expect. Return either
3562 a list of insns to emit the jump or NULL if we cannot optimize
3563 __builtin_expect. We need to optimize this at jump time so that machines
3564 like the PowerPC don't turn the test into a SCC operation, and then jump
3565 based on the test being 0/1. */
3566
3567 rtx
3568 expand_builtin_expect_jump (exp, if_false_label, if_true_label)
3569 tree exp;
3570 rtx if_false_label;
3571 rtx if_true_label;
3572 {
3573 tree arglist = TREE_OPERAND (exp, 1);
3574 tree arg0 = TREE_VALUE (arglist);
3575 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3576 rtx ret = NULL_RTX;
3577
3578 /* Only handle __builtin_expect (test, 0) and
3579 __builtin_expect (test, 1). */
3580 if (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
3581 && (integer_zerop (arg1) || integer_onep (arg1)))
3582 {
3583 int num_jumps = 0;
3584 rtx insn;
3585
3586 /* If we fail to locate an appropriate conditional jump, we'll
3587 fall back to normal evaluation. Ensure that the expression
3588 can be re-evaluated. */
3589 switch (unsafe_for_reeval (arg0))
3590 {
3591 case 0: /* Safe. */
3592 break;
3593
3594 case 1: /* Mildly unsafe. */
3595 arg0 = unsave_expr (arg0);
3596 break;
3597
3598 case 2: /* Wildly unsafe. */
3599 return NULL_RTX;
3600 }
3601
3602 /* Expand the jump insns. */
3603 start_sequence ();
3604 do_jump (arg0, if_false_label, if_true_label);
3605 ret = get_insns ();
3606 end_sequence ();
3607
3608 /* Now that the __builtin_expect has been validated, go through and add
3609 the expect's to each of the conditional jumps. If we run into an
3610 error, just give up and generate the 'safe' code of doing a SCC
3611 operation and then doing a branch on that. */
3612 insn = ret;
3613 while (insn != NULL_RTX)
3614 {
3615 rtx next = NEXT_INSN (insn);
3616 rtx pattern;
3617
3618 if (GET_CODE (insn) == JUMP_INSN && any_condjump_p (insn)
3619 && (pattern = pc_set (insn)) != NULL_RTX)
3620 {
3621 rtx ifelse = SET_SRC (pattern);
3622 rtx label;
3623 int taken;
3624
3625 if (GET_CODE (ifelse) != IF_THEN_ELSE)
3626 goto do_next_insn;
3627
3628 if (GET_CODE (XEXP (ifelse, 1)) == LABEL_REF)
3629 {
3630 taken = 1;
3631 label = XEXP (XEXP (ifelse, 1), 0);
3632 }
3633 /* An inverted jump reverses the probabilities. */
3634 else if (GET_CODE (XEXP (ifelse, 2)) == LABEL_REF)
3635 {
3636 taken = 0;
3637 label = XEXP (XEXP (ifelse, 2), 0);
3638 }
3639 /* We shouldn't have to worry about conditional returns during
3640 the expansion stage, but handle it gracefully anyway. */
3641 else if (GET_CODE (XEXP (ifelse, 1)) == RETURN)
3642 {
3643 taken = 1;
3644 label = NULL_RTX;
3645 }
3646 /* An inverted return reverses the probabilities. */
3647 else if (GET_CODE (XEXP (ifelse, 2)) == RETURN)
3648 {
3649 taken = 0;
3650 label = NULL_RTX;
3651 }
3652 else
3653 goto do_next_insn;
3654
3655 /* If the test is expected to fail, reverse the
3656 probabilities. */
3657 if (integer_zerop (arg1))
3658 taken = 1 - taken;
3659
3660 /* If we are jumping to the false label, reverse the
3661 probabilities. */
3662 if (label == NULL_RTX)
3663 ; /* conditional return */
3664 else if (label == if_false_label)
3665 taken = 1 - taken;
3666 else if (label != if_true_label)
3667 goto do_next_insn;
3668
3669 num_jumps++;
3670 predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
3671 }
3672
3673 do_next_insn:
3674 insn = next;
3675 }
3676
3677 /* If no jumps were modified, fail and do __builtin_expect the normal
3678 way. */
3679 if (num_jumps == 0)
3680 ret = NULL_RTX;
3681 }
3682
3683 return ret;
3684 }
3685
3686 void
3687 expand_builtin_trap ()
3688 {
3689 #ifdef HAVE_trap
3690 if (HAVE_trap)
3691 emit_insn (gen_trap ());
3692 else
3693 #endif
3694 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
3695 emit_barrier ();
3696 }
3697 \f
3698 /* Expand an expression EXP that calls a built-in function,
3699 with result going to TARGET if that's convenient
3700 (and in mode MODE if that's convenient).
3701 SUBTARGET may be used as the target for computing one of EXP's operands.
3702 IGNORE is nonzero if the value is to be ignored. */
3703
3704 rtx
3705 expand_builtin (exp, target, subtarget, mode, ignore)
3706 tree exp;
3707 rtx target;
3708 rtx subtarget;
3709 enum machine_mode mode;
3710 int ignore;
3711 {
3712 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3713 tree arglist = TREE_OPERAND (exp, 1);
3714 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
3715
3716 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
3717 return (*targetm.expand_builtin) (exp, target, subtarget, mode, ignore);
3718
3719 /* When not optimizing, generate calls to library functions for a certain
3720 set of builtins. */
3721 if (!optimize && !CALLED_AS_BUILT_IN (fndecl))
3722 switch (fcode)
3723 {
3724 case BUILT_IN_SQRT:
3725 case BUILT_IN_SQRTF:
3726 case BUILT_IN_SQRTL:
3727 case BUILT_IN_SIN:
3728 case BUILT_IN_SINF:
3729 case BUILT_IN_SINL:
3730 case BUILT_IN_COS:
3731 case BUILT_IN_COSF:
3732 case BUILT_IN_COSL:
3733 case BUILT_IN_EXP:
3734 case BUILT_IN_EXPF:
3735 case BUILT_IN_EXPL:
3736 case BUILT_IN_MEMSET:
3737 case BUILT_IN_MEMCPY:
3738 case BUILT_IN_MEMCMP:
3739 case BUILT_IN_BCMP:
3740 case BUILT_IN_BZERO:
3741 case BUILT_IN_INDEX:
3742 case BUILT_IN_RINDEX:
3743 case BUILT_IN_STRCHR:
3744 case BUILT_IN_STRRCHR:
3745 case BUILT_IN_STRLEN:
3746 case BUILT_IN_STRCPY:
3747 case BUILT_IN_STRNCPY:
3748 case BUILT_IN_STRNCMP:
3749 case BUILT_IN_STRSTR:
3750 case BUILT_IN_STRPBRK:
3751 case BUILT_IN_STRCAT:
3752 case BUILT_IN_STRNCAT:
3753 case BUILT_IN_STRSPN:
3754 case BUILT_IN_STRCSPN:
3755 case BUILT_IN_STRCMP:
3756 case BUILT_IN_FFS:
3757 case BUILT_IN_PUTCHAR:
3758 case BUILT_IN_PUTS:
3759 case BUILT_IN_PRINTF:
3760 case BUILT_IN_FPUTC:
3761 case BUILT_IN_FPUTS:
3762 case BUILT_IN_FWRITE:
3763 case BUILT_IN_PUTCHAR_UNLOCKED:
3764 case BUILT_IN_PUTS_UNLOCKED:
3765 case BUILT_IN_PRINTF_UNLOCKED:
3766 case BUILT_IN_FPUTC_UNLOCKED:
3767 case BUILT_IN_FPUTS_UNLOCKED:
3768 case BUILT_IN_FWRITE_UNLOCKED:
3769 case BUILT_IN_FLOOR:
3770 case BUILT_IN_FLOORF:
3771 case BUILT_IN_FLOORL:
3772 case BUILT_IN_CEIL:
3773 case BUILT_IN_CEILF:
3774 case BUILT_IN_CEILL:
3775 case BUILT_IN_TRUNC:
3776 case BUILT_IN_TRUNCF:
3777 case BUILT_IN_TRUNCL:
3778 case BUILT_IN_ROUND:
3779 case BUILT_IN_ROUNDF:
3780 case BUILT_IN_ROUNDL:
3781 case BUILT_IN_NEARBYINT:
3782 case BUILT_IN_NEARBYINTF:
3783 case BUILT_IN_NEARBYINTL:
3784 return expand_call (exp, target, ignore);
3785
3786 default:
3787 break;
3788 }
3789
3790 switch (fcode)
3791 {
3792 case BUILT_IN_ABS:
3793 case BUILT_IN_LABS:
3794 case BUILT_IN_LLABS:
3795 case BUILT_IN_IMAXABS:
3796 case BUILT_IN_FABS:
3797 case BUILT_IN_FABSF:
3798 case BUILT_IN_FABSL:
3799 /* build_function_call changes these into ABS_EXPR. */
3800 abort ();
3801
3802 case BUILT_IN_CONJ:
3803 case BUILT_IN_CONJF:
3804 case BUILT_IN_CONJL:
3805 case BUILT_IN_CREAL:
3806 case BUILT_IN_CREALF:
3807 case BUILT_IN_CREALL:
3808 case BUILT_IN_CIMAG:
3809 case BUILT_IN_CIMAGF:
3810 case BUILT_IN_CIMAGL:
3811 /* expand_tree_builtin changes these into CONJ_EXPR, REALPART_EXPR
3812 and IMAGPART_EXPR. */
3813 abort ();
3814
3815 case BUILT_IN_SIN:
3816 case BUILT_IN_SINF:
3817 case BUILT_IN_SINL:
3818 case BUILT_IN_COS:
3819 case BUILT_IN_COSF:
3820 case BUILT_IN_COSL:
3821 case BUILT_IN_EXP:
3822 case BUILT_IN_EXPF:
3823 case BUILT_IN_EXPL:
3824 case BUILT_IN_LOG:
3825 case BUILT_IN_LOGF:
3826 case BUILT_IN_LOGL:
3827 /* Treat these like sqrt only if unsafe math optimizations are allowed,
3828 because of possible accuracy problems. */
3829 if (! flag_unsafe_math_optimizations)
3830 break;
3831 case BUILT_IN_SQRT:
3832 case BUILT_IN_SQRTF:
3833 case BUILT_IN_SQRTL:
3834 case BUILT_IN_FLOOR:
3835 case BUILT_IN_FLOORF:
3836 case BUILT_IN_FLOORL:
3837 case BUILT_IN_CEIL:
3838 case BUILT_IN_CEILF:
3839 case BUILT_IN_CEILL:
3840 case BUILT_IN_TRUNC:
3841 case BUILT_IN_TRUNCF:
3842 case BUILT_IN_TRUNCL:
3843 case BUILT_IN_ROUND:
3844 case BUILT_IN_ROUNDF:
3845 case BUILT_IN_ROUNDL:
3846 case BUILT_IN_NEARBYINT:
3847 case BUILT_IN_NEARBYINTF:
3848 case BUILT_IN_NEARBYINTL:
3849 target = expand_builtin_mathfn (exp, target, subtarget);
3850 if (target)
3851 return target;
3852 break;
3853
3854 case BUILT_IN_APPLY_ARGS:
3855 return expand_builtin_apply_args ();
3856
3857 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
3858 FUNCTION with a copy of the parameters described by
3859 ARGUMENTS, and ARGSIZE. It returns a block of memory
3860 allocated on the stack into which is stored all the registers
3861 that might possibly be used for returning the result of a
3862 function. ARGUMENTS is the value returned by
3863 __builtin_apply_args. ARGSIZE is the number of bytes of
3864 arguments that must be copied. ??? How should this value be
3865 computed? We'll also need a safe worst case value for varargs
3866 functions. */
3867 case BUILT_IN_APPLY:
3868 if (!validate_arglist (arglist, POINTER_TYPE,
3869 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3870 && !validate_arglist (arglist, REFERENCE_TYPE,
3871 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3872 return const0_rtx;
3873 else
3874 {
3875 int i;
3876 tree t;
3877 rtx ops[3];
3878
3879 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
3880 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
3881
3882 return expand_builtin_apply (ops[0], ops[1], ops[2]);
3883 }
3884
3885 /* __builtin_return (RESULT) causes the function to return the
3886 value described by RESULT. RESULT is address of the block of
3887 memory returned by __builtin_apply. */
3888 case BUILT_IN_RETURN:
3889 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
3890 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
3891 NULL_RTX, VOIDmode, 0));
3892 return const0_rtx;
3893
3894 case BUILT_IN_SAVEREGS:
3895 return expand_builtin_saveregs ();
3896
3897 case BUILT_IN_ARGS_INFO:
3898 return expand_builtin_args_info (exp);
3899
3900 /* Return the address of the first anonymous stack arg. */
3901 case BUILT_IN_NEXT_ARG:
3902 return expand_builtin_next_arg (arglist);
3903
3904 case BUILT_IN_CLASSIFY_TYPE:
3905 return expand_builtin_classify_type (arglist);
3906
3907 case BUILT_IN_CONSTANT_P:
3908 return expand_builtin_constant_p (exp);
3909
3910 case BUILT_IN_FRAME_ADDRESS:
3911 case BUILT_IN_RETURN_ADDRESS:
3912 return expand_builtin_frame_address (exp);
3913
3914 /* Returns the address of the area where the structure is returned.
3915 0 otherwise. */
3916 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
3917 if (arglist != 0
3918 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
3919 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
3920 return const0_rtx;
3921 else
3922 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
3923
3924 case BUILT_IN_ALLOCA:
3925 target = expand_builtin_alloca (arglist, target);
3926 if (target)
3927 return target;
3928 break;
3929
3930 case BUILT_IN_FFS:
3931 target = expand_builtin_ffs (arglist, target, subtarget);
3932 if (target)
3933 return target;
3934 break;
3935
3936 case BUILT_IN_STRLEN:
3937 target = expand_builtin_strlen (exp, target);
3938 if (target)
3939 return target;
3940 break;
3941
3942 case BUILT_IN_STRCPY:
3943 target = expand_builtin_strcpy (exp, target, mode);
3944 if (target)
3945 return target;
3946 break;
3947
3948 case BUILT_IN_STRNCPY:
3949 target = expand_builtin_strncpy (arglist, target, mode);
3950 if (target)
3951 return target;
3952 break;
3953
3954 case BUILT_IN_STRCAT:
3955 target = expand_builtin_strcat (arglist, target, mode);
3956 if (target)
3957 return target;
3958 break;
3959
3960 case BUILT_IN_STRNCAT:
3961 target = expand_builtin_strncat (arglist, target, mode);
3962 if (target)
3963 return target;
3964 break;
3965
3966 case BUILT_IN_STRSPN:
3967 target = expand_builtin_strspn (arglist, target, mode);
3968 if (target)
3969 return target;
3970 break;
3971
3972 case BUILT_IN_STRCSPN:
3973 target = expand_builtin_strcspn (arglist, target, mode);
3974 if (target)
3975 return target;
3976 break;
3977
3978 case BUILT_IN_STRSTR:
3979 target = expand_builtin_strstr (arglist, target, mode);
3980 if (target)
3981 return target;
3982 break;
3983
3984 case BUILT_IN_STRPBRK:
3985 target = expand_builtin_strpbrk (arglist, target, mode);
3986 if (target)
3987 return target;
3988 break;
3989
3990 case BUILT_IN_INDEX:
3991 case BUILT_IN_STRCHR:
3992 target = expand_builtin_strchr (arglist, target, mode);
3993 if (target)
3994 return target;
3995 break;
3996
3997 case BUILT_IN_RINDEX:
3998 case BUILT_IN_STRRCHR:
3999 target = expand_builtin_strrchr (arglist, target, mode);
4000 if (target)
4001 return target;
4002 break;
4003
4004 case BUILT_IN_MEMCPY:
4005 target = expand_builtin_memcpy (arglist, target, mode);
4006 if (target)
4007 return target;
4008 break;
4009
4010 case BUILT_IN_MEMSET:
4011 target = expand_builtin_memset (exp, target, mode);
4012 if (target)
4013 return target;
4014 break;
4015
4016 case BUILT_IN_BZERO:
4017 target = expand_builtin_bzero (exp);
4018 if (target)
4019 return target;
4020 break;
4021
4022 case BUILT_IN_STRCMP:
4023 target = expand_builtin_strcmp (exp, target, mode);
4024 if (target)
4025 return target;
4026 break;
4027
4028 case BUILT_IN_STRNCMP:
4029 target = expand_builtin_strncmp (exp, target, mode);
4030 if (target)
4031 return target;
4032 break;
4033
4034 case BUILT_IN_BCMP:
4035 case BUILT_IN_MEMCMP:
4036 target = expand_builtin_memcmp (exp, arglist, target, mode);
4037 if (target)
4038 return target;
4039 break;
4040
4041 case BUILT_IN_SETJMP:
4042 target = expand_builtin_setjmp (arglist, target);
4043 if (target)
4044 return target;
4045 break;
4046
4047 /* __builtin_longjmp is passed a pointer to an array of five words.
4048 It's similar to the C library longjmp function but works with
4049 __builtin_setjmp above. */
4050 case BUILT_IN_LONGJMP:
4051 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4052 break;
4053 else
4054 {
4055 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
4056 VOIDmode, 0);
4057 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
4058 NULL_RTX, VOIDmode, 0);
4059
4060 if (value != const1_rtx)
4061 {
4062 error ("__builtin_longjmp second argument must be 1");
4063 return const0_rtx;
4064 }
4065
4066 expand_builtin_longjmp (buf_addr, value);
4067 return const0_rtx;
4068 }
4069
4070 case BUILT_IN_TRAP:
4071 expand_builtin_trap ();
4072 return const0_rtx;
4073
4074 case BUILT_IN_FPUTS:
4075 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 0);
4076 if (target)
4077 return target;
4078 break;
4079 case BUILT_IN_FPUTS_UNLOCKED:
4080 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 1);
4081 if (target)
4082 return target;
4083 break;
4084
4085 /* Various hooks for the DWARF 2 __throw routine. */
4086 case BUILT_IN_UNWIND_INIT:
4087 expand_builtin_unwind_init ();
4088 return const0_rtx;
4089 case BUILT_IN_DWARF_CFA:
4090 return virtual_cfa_rtx;
4091 #ifdef DWARF2_UNWIND_INFO
4092 case BUILT_IN_DWARF_FP_REGNUM:
4093 return expand_builtin_dwarf_fp_regnum ();
4094 case BUILT_IN_INIT_DWARF_REG_SIZES:
4095 expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist));
4096 return const0_rtx;
4097 #endif
4098 case BUILT_IN_FROB_RETURN_ADDR:
4099 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
4100 case BUILT_IN_EXTRACT_RETURN_ADDR:
4101 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
4102 case BUILT_IN_EH_RETURN:
4103 expand_builtin_eh_return (TREE_VALUE (arglist),
4104 TREE_VALUE (TREE_CHAIN (arglist)));
4105 return const0_rtx;
4106 #ifdef EH_RETURN_DATA_REGNO
4107 case BUILT_IN_EH_RETURN_DATA_REGNO:
4108 return expand_builtin_eh_return_data_regno (arglist);
4109 #endif
4110 case BUILT_IN_VA_START:
4111 case BUILT_IN_STDARG_START:
4112 return expand_builtin_va_start (arglist);
4113 case BUILT_IN_VA_END:
4114 return expand_builtin_va_end (arglist);
4115 case BUILT_IN_VA_COPY:
4116 return expand_builtin_va_copy (arglist);
4117 case BUILT_IN_EXPECT:
4118 return expand_builtin_expect (arglist, target);
4119 case BUILT_IN_PREFETCH:
4120 expand_builtin_prefetch (arglist);
4121 return const0_rtx;
4122
4123
4124 default: /* just do library call, if unknown builtin */
4125 if (!DECL_ASSEMBLER_NAME_SET_P (fndecl))
4126 error ("built-in function `%s' not currently supported",
4127 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
4128 }
4129
4130 /* The switch statement above can drop through to cause the function
4131 to be called normally. */
4132 return expand_call (exp, target, ignore);
4133 }
4134
4135 /* Determine whether a tree node represents a call to a built-in
4136 math function. If the tree T is a call to a built-in function
4137 taking a single real argument, then the return value is the
4138 DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT. Otherwise
4139 the return value is END_BUILTINS. */
4140
4141 enum built_in_function
4142 builtin_mathfn_code (t)
4143 tree t;
4144 {
4145 tree fndecl, arglist;
4146
4147 if (TREE_CODE (t) != CALL_EXPR
4148 || TREE_CODE (TREE_OPERAND (t, 0)) != ADDR_EXPR)
4149 return END_BUILTINS;
4150
4151 fndecl = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
4152 if (TREE_CODE (fndecl) != FUNCTION_DECL
4153 || ! DECL_BUILT_IN (fndecl)
4154 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4155 return END_BUILTINS;
4156
4157 arglist = TREE_OPERAND (t, 1);
4158 if (! arglist
4159 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE
4160 || TREE_CHAIN (arglist))
4161 return END_BUILTINS;
4162
4163 return DECL_FUNCTION_CODE (fndecl);
4164 }
4165
4166 /* Fold a call to __builtin_constant_p, if we know it will evaluate to a
4167 constant. ARGLIST is the argument list of the call. */
4168
4169 static tree
4170 fold_builtin_constant_p (arglist)
4171 tree arglist;
4172 {
4173 if (arglist == 0)
4174 return 0;
4175
4176 arglist = TREE_VALUE (arglist);
4177
4178 /* We return 1 for a numeric type that's known to be a constant
4179 value at compile-time or for an aggregate type that's a
4180 literal constant. */
4181 STRIP_NOPS (arglist);
4182
4183 /* If we know this is a constant, emit the constant of one. */
4184 if (TREE_CODE_CLASS (TREE_CODE (arglist)) == 'c'
4185 || (TREE_CODE (arglist) == CONSTRUCTOR
4186 && TREE_CONSTANT (arglist))
4187 || (TREE_CODE (arglist) == ADDR_EXPR
4188 && TREE_CODE (TREE_OPERAND (arglist, 0)) == STRING_CST))
4189 return integer_one_node;
4190
4191 /* If we aren't going to be running CSE or this expression
4192 has side effects, show we don't know it to be a constant.
4193 Likewise if it's a pointer or aggregate type since in those
4194 case we only want literals, since those are only optimized
4195 when generating RTL, not later.
4196 And finally, if we are compiling an initializer, not code, we
4197 need to return a definite result now; there's not going to be any
4198 more optimization done. */
4199 if (TREE_SIDE_EFFECTS (arglist) || cse_not_expected
4200 || AGGREGATE_TYPE_P (TREE_TYPE (arglist))
4201 || POINTER_TYPE_P (TREE_TYPE (arglist))
4202 || cfun == 0)
4203 return integer_zero_node;
4204
4205 return 0;
4206 }
4207
4208 /* Fold a call to __builtin_classify_type. */
4209
4210 static tree
4211 fold_builtin_classify_type (arglist)
4212 tree arglist;
4213 {
4214 if (arglist == 0)
4215 return build_int_2 (no_type_class, 0);
4216
4217 return build_int_2 (type_to_class (TREE_TYPE (TREE_VALUE (arglist))), 0);
4218 }
4219
4220 /* Fold a call to __builtin_inf or __builtin_huge_val. */
4221
4222 static tree
4223 fold_builtin_inf (type, warn)
4224 tree type;
4225 int warn;
4226 {
4227 REAL_VALUE_TYPE real;
4228
4229 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
4230 warning ("target format does not support infinity");
4231
4232 real_inf (&real);
4233 return build_real (type, real);
4234 }
4235
4236 /* Fold a call to __builtin_nan or __builtin_nans. */
4237
4238 static tree
4239 fold_builtin_nan (arglist, type, quiet)
4240 tree arglist, type;
4241 int quiet;
4242 {
4243 REAL_VALUE_TYPE real;
4244 const char *str;
4245
4246 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4247 return 0;
4248 str = c_getstr (TREE_VALUE (arglist));
4249 if (!str)
4250 return 0;
4251
4252 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
4253 return 0;
4254
4255 return build_real (type, real);
4256 }
4257
4258 /* Used by constant folding to eliminate some builtin calls early. EXP is
4259 the CALL_EXPR of a call to a builtin function. */
4260
4261 tree
4262 fold_builtin (exp)
4263 tree exp;
4264 {
4265 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4266 tree arglist = TREE_OPERAND (exp, 1);
4267 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4268
4269 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4270 return 0;
4271
4272 switch (fcode)
4273 {
4274 case BUILT_IN_CONSTANT_P:
4275 return fold_builtin_constant_p (arglist);
4276
4277 case BUILT_IN_CLASSIFY_TYPE:
4278 return fold_builtin_classify_type (arglist);
4279
4280 case BUILT_IN_STRLEN:
4281 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4282 {
4283 tree len = c_strlen (TREE_VALUE (arglist));
4284 if (len)
4285 {
4286 /* Convert from the internal "sizetype" type to "size_t". */
4287 if (size_type_node)
4288 len = convert (size_type_node, len);
4289 return len;
4290 }
4291 }
4292 break;
4293
4294 case BUILT_IN_SQRT:
4295 case BUILT_IN_SQRTF:
4296 case BUILT_IN_SQRTL:
4297 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4298 {
4299 enum built_in_function fcode;
4300 tree arg = TREE_VALUE (arglist);
4301
4302 /* Optimize sqrt of constant value. */
4303 if (TREE_CODE (arg) == REAL_CST
4304 && ! TREE_CONSTANT_OVERFLOW (arg))
4305 {
4306 enum machine_mode mode;
4307 REAL_VALUE_TYPE r, x;
4308
4309 x = TREE_REAL_CST (arg);
4310 mode = TYPE_MODE (TREE_TYPE (arg));
4311 if (real_sqrt (&r, mode, &x)
4312 || (!flag_trapping_math && !flag_errno_math))
4313 return build_real (TREE_TYPE (arg), r);
4314 }
4315
4316 /* Optimize sqrt(exp(x)) = exp(x/2.0). */
4317 fcode = builtin_mathfn_code (arg);
4318 if (flag_unsafe_math_optimizations
4319 && (fcode == BUILT_IN_EXP
4320 || fcode == BUILT_IN_EXPF
4321 || fcode == BUILT_IN_EXPL))
4322 {
4323 tree expfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
4324 arg = build (RDIV_EXPR, TREE_TYPE (arg),
4325 TREE_VALUE (TREE_OPERAND (arg, 1)),
4326 build_real (TREE_TYPE (arg), dconst2));
4327 arglist = build_tree_list (NULL_TREE, arg);
4328 return build_function_call_expr (expfn, arglist);
4329 }
4330 }
4331 break;
4332
4333 case BUILT_IN_EXP:
4334 case BUILT_IN_EXPF:
4335 case BUILT_IN_EXPL:
4336 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4337 {
4338 enum built_in_function fcode;
4339 tree arg = TREE_VALUE (arglist);
4340
4341 /* Optimize exp(0.0) = 1.0. */
4342 if (real_zerop (arg))
4343 return build_real (TREE_TYPE (arg), dconst1);
4344
4345 /* Optimize exp(log(x)) = x. */
4346 fcode = builtin_mathfn_code (arg);
4347 if (flag_unsafe_math_optimizations
4348 && (fcode == BUILT_IN_LOG
4349 || fcode == BUILT_IN_LOGF
4350 || fcode == BUILT_IN_LOGL))
4351 return TREE_VALUE (TREE_OPERAND (arg, 1));
4352 }
4353 break;
4354
4355 case BUILT_IN_LOG:
4356 case BUILT_IN_LOGF:
4357 case BUILT_IN_LOGL:
4358 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4359 {
4360 enum built_in_function fcode;
4361 tree arg = TREE_VALUE (arglist);
4362
4363 /* Optimize log(1.0) = 0.0. */
4364 if (real_onep (arg))
4365 return build_real (TREE_TYPE (arg), dconst0);
4366
4367 /* Optimize log(exp(x)) = x. */
4368 fcode = builtin_mathfn_code (arg);
4369 if (flag_unsafe_math_optimizations
4370 && (fcode == BUILT_IN_EXP
4371 || fcode == BUILT_IN_EXPF
4372 || fcode == BUILT_IN_EXPL))
4373 return TREE_VALUE (TREE_OPERAND (arg, 1));
4374
4375 /* Optimize log(sqrt(x)) = log(x)/2.0. */
4376 if (flag_unsafe_math_optimizations
4377 && (fcode == BUILT_IN_SQRT
4378 || fcode == BUILT_IN_SQRTF
4379 || fcode == BUILT_IN_SQRTL))
4380 {
4381 tree logfn = build_function_call_expr (fndecl,
4382 TREE_OPERAND (arg, 1));
4383 return fold (build (RDIV_EXPR, TREE_TYPE (arg), logfn,
4384 build_real (TREE_TYPE (arg), dconst2)));
4385 }
4386 }
4387 break;
4388
4389 case BUILT_IN_INF:
4390 case BUILT_IN_INFF:
4391 case BUILT_IN_INFL:
4392 return fold_builtin_inf (TREE_TYPE (TREE_TYPE (fndecl)), true);
4393
4394 case BUILT_IN_HUGE_VAL:
4395 case BUILT_IN_HUGE_VALF:
4396 case BUILT_IN_HUGE_VALL:
4397 return fold_builtin_inf (TREE_TYPE (TREE_TYPE (fndecl)), false);
4398
4399 case BUILT_IN_NAN:
4400 case BUILT_IN_NANF:
4401 case BUILT_IN_NANL:
4402 return fold_builtin_nan (arglist, TREE_TYPE (TREE_TYPE (fndecl)), true);
4403
4404 case BUILT_IN_NANS:
4405 case BUILT_IN_NANSF:
4406 case BUILT_IN_NANSL:
4407 return fold_builtin_nan (arglist, TREE_TYPE (TREE_TYPE (fndecl)), false);
4408
4409 default:
4410 break;
4411 }
4412
4413 return 0;
4414 }
4415
4416 /* Conveniently construct a function call expression. */
4417
4418 tree
4419 build_function_call_expr (fn, arglist)
4420 tree fn, arglist;
4421 {
4422 tree call_expr;
4423
4424 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
4425 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
4426 call_expr, arglist);
4427 TREE_SIDE_EFFECTS (call_expr) = 1;
4428 return fold (call_expr);
4429 }
4430
4431 /* This function validates the types of a function call argument list
4432 represented as a tree chain of parameters against a specified list
4433 of tree_codes. If the last specifier is a 0, that represents an
4434 ellipses, otherwise the last specifier must be a VOID_TYPE. */
4435
4436 static int
4437 validate_arglist VPARAMS ((tree arglist, ...))
4438 {
4439 enum tree_code code;
4440 int res = 0;
4441
4442 VA_OPEN (ap, arglist);
4443 VA_FIXEDARG (ap, tree, arglist);
4444
4445 do
4446 {
4447 code = va_arg (ap, enum tree_code);
4448 switch (code)
4449 {
4450 case 0:
4451 /* This signifies an ellipses, any further arguments are all ok. */
4452 res = 1;
4453 goto end;
4454 case VOID_TYPE:
4455 /* This signifies an endlink, if no arguments remain, return
4456 true, otherwise return false. */
4457 res = arglist == 0;
4458 goto end;
4459 default:
4460 /* If no parameters remain or the parameter's code does not
4461 match the specified code, return false. Otherwise continue
4462 checking any remaining arguments. */
4463 if (arglist == 0
4464 || code != TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))))
4465 goto end;
4466 break;
4467 }
4468 arglist = TREE_CHAIN (arglist);
4469 }
4470 while (1);
4471
4472 /* We need gotos here since we can only have one VA_CLOSE in a
4473 function. */
4474 end: ;
4475 VA_CLOSE (ap);
4476
4477 return res;
4478 }
4479
4480 /* Default version of target-specific builtin setup that does nothing. */
4481
4482 void
4483 default_init_builtins ()
4484 {
4485 }
4486
4487 /* Default target-specific builtin expander that does nothing. */
4488
4489 rtx
4490 default_expand_builtin (exp, target, subtarget, mode, ignore)
4491 tree exp ATTRIBUTE_UNUSED;
4492 rtx target ATTRIBUTE_UNUSED;
4493 rtx subtarget ATTRIBUTE_UNUSED;
4494 enum machine_mode mode ATTRIBUTE_UNUSED;
4495 int ignore ATTRIBUTE_UNUSED;
4496 {
4497 return NULL_RTX;
4498 }
This page took 0.239962 seconds and 6 git commands to generate.