]> gcc.gnu.org Git - gcc.git/blob - gcc/builtins.c
* builtins.c: Fix formatting.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "real.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 #include "expr.h"
35 #include "optabs.h"
36 #include "libfuncs.h"
37 #include "recog.h"
38 #include "output.h"
39 #include "typeclass.h"
40 #include "toplev.h"
41 #include "predict.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "langhooks.h"
45
46 #define CALLED_AS_BUILT_IN(NODE) \
47 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
48
49 /* Register mappings for target machines without register windows. */
50 #ifndef INCOMING_REGNO
51 #define INCOMING_REGNO(OUT) (OUT)
52 #endif
53 #ifndef OUTGOING_REGNO
54 #define OUTGOING_REGNO(IN) (IN)
55 #endif
56
57 #ifndef PAD_VARARGS_DOWN
58 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
59 #endif
60
61 /* Define the names of the builtin function types and codes. */
62 const char *const built_in_class_names[4]
63 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
64
65 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT) STRINGX(X),
66 const char *const built_in_names[(int) END_BUILTINS] =
67 {
68 #include "builtins.def"
69 };
70 #undef DEF_BUILTIN
71
72 /* Setup an array of _DECL trees, make sure each element is
73 initialized to NULL_TREE. */
74 tree built_in_decls[(int) END_BUILTINS];
75
76 static int get_pointer_alignment PARAMS ((tree, unsigned int));
77 static tree c_strlen PARAMS ((tree));
78 static const char *c_getstr PARAMS ((tree));
79 static rtx c_readstr PARAMS ((const char *,
80 enum machine_mode));
81 static int target_char_cast PARAMS ((tree, char *));
82 static rtx get_memory_rtx PARAMS ((tree));
83 static int apply_args_size PARAMS ((void));
84 static int apply_result_size PARAMS ((void));
85 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
86 static rtx result_vector PARAMS ((int, rtx));
87 #endif
88 static rtx expand_builtin_setjmp PARAMS ((tree, rtx));
89 static void expand_builtin_prefetch PARAMS ((tree));
90 static rtx expand_builtin_apply_args PARAMS ((void));
91 static rtx expand_builtin_apply_args_1 PARAMS ((void));
92 static rtx expand_builtin_apply PARAMS ((rtx, rtx, rtx));
93 static void expand_builtin_return PARAMS ((rtx));
94 static enum type_class type_to_class PARAMS ((tree));
95 static rtx expand_builtin_classify_type PARAMS ((tree));
96 static rtx expand_builtin_mathfn PARAMS ((tree, rtx, rtx));
97 static rtx expand_builtin_constant_p PARAMS ((tree));
98 static rtx expand_builtin_args_info PARAMS ((tree));
99 static rtx expand_builtin_next_arg PARAMS ((tree));
100 static rtx expand_builtin_va_start PARAMS ((tree));
101 static rtx expand_builtin_va_end PARAMS ((tree));
102 static rtx expand_builtin_va_copy PARAMS ((tree));
103 static rtx expand_builtin_memcmp PARAMS ((tree, tree, rtx,
104 enum machine_mode));
105 static rtx expand_builtin_strcmp PARAMS ((tree, rtx,
106 enum machine_mode));
107 static rtx expand_builtin_strncmp PARAMS ((tree, rtx,
108 enum machine_mode));
109 static rtx builtin_memcpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
110 enum machine_mode));
111 static rtx expand_builtin_strcat PARAMS ((tree, rtx,
112 enum machine_mode));
113 static rtx expand_builtin_strncat PARAMS ((tree, rtx,
114 enum machine_mode));
115 static rtx expand_builtin_strspn PARAMS ((tree, rtx,
116 enum machine_mode));
117 static rtx expand_builtin_strcspn PARAMS ((tree, rtx,
118 enum machine_mode));
119 static rtx expand_builtin_memcpy PARAMS ((tree, rtx,
120 enum machine_mode));
121 static rtx expand_builtin_strcpy PARAMS ((tree, rtx,
122 enum machine_mode));
123 static rtx builtin_strncpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
124 enum machine_mode));
125 static rtx expand_builtin_strncpy PARAMS ((tree, rtx,
126 enum machine_mode));
127 static rtx builtin_memset_read_str PARAMS ((PTR, HOST_WIDE_INT,
128 enum machine_mode));
129 static rtx builtin_memset_gen_str PARAMS ((PTR, HOST_WIDE_INT,
130 enum machine_mode));
131 static rtx expand_builtin_memset PARAMS ((tree, rtx,
132 enum machine_mode));
133 static rtx expand_builtin_bzero PARAMS ((tree));
134 static rtx expand_builtin_strlen PARAMS ((tree, rtx));
135 static rtx expand_builtin_strstr PARAMS ((tree, rtx,
136 enum machine_mode));
137 static rtx expand_builtin_strpbrk PARAMS ((tree, rtx,
138 enum machine_mode));
139 static rtx expand_builtin_strchr PARAMS ((tree, rtx,
140 enum machine_mode));
141 static rtx expand_builtin_strrchr PARAMS ((tree, rtx,
142 enum machine_mode));
143 static rtx expand_builtin_alloca PARAMS ((tree, rtx));
144 static rtx expand_builtin_ffs PARAMS ((tree, rtx, rtx));
145 static rtx expand_builtin_frame_address PARAMS ((tree));
146 static rtx expand_builtin_fputs PARAMS ((tree, int, int));
147 static tree stabilize_va_list PARAMS ((tree, int));
148 static rtx expand_builtin_expect PARAMS ((tree, rtx));
149 static tree fold_builtin_constant_p PARAMS ((tree));
150 static tree fold_builtin_classify_type PARAMS ((tree));
151 static tree fold_builtin_inf PARAMS ((tree, int));
152 static tree fold_builtin_nan PARAMS ((tree, tree, int));
153 static tree build_function_call_expr PARAMS ((tree, tree));
154 static int validate_arglist PARAMS ((tree, ...));
155
156 /* Return the alignment in bits of EXP, a pointer valued expression.
157 But don't return more than MAX_ALIGN no matter what.
158 The alignment returned is, by default, the alignment of the thing that
159 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
160
161 Otherwise, look at the expression to see if we can do better, i.e., if the
162 expression is actually pointing at an object whose alignment is tighter. */
163
164 static int
165 get_pointer_alignment (exp, max_align)
166 tree exp;
167 unsigned int max_align;
168 {
169 unsigned int align, inner;
170
171 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
172 return 0;
173
174 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
175 align = MIN (align, max_align);
176
177 while (1)
178 {
179 switch (TREE_CODE (exp))
180 {
181 case NOP_EXPR:
182 case CONVERT_EXPR:
183 case NON_LVALUE_EXPR:
184 exp = TREE_OPERAND (exp, 0);
185 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
186 return align;
187
188 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
189 align = MIN (inner, max_align);
190 break;
191
192 case PLUS_EXPR:
193 /* If sum of pointer + int, restrict our maximum alignment to that
194 imposed by the integer. If not, we can't do any better than
195 ALIGN. */
196 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
197 return align;
198
199 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
200 & (max_align / BITS_PER_UNIT - 1))
201 != 0)
202 max_align >>= 1;
203
204 exp = TREE_OPERAND (exp, 0);
205 break;
206
207 case ADDR_EXPR:
208 /* See what we are pointing at and look at its alignment. */
209 exp = TREE_OPERAND (exp, 0);
210 if (TREE_CODE (exp) == FUNCTION_DECL)
211 align = FUNCTION_BOUNDARY;
212 else if (DECL_P (exp))
213 align = DECL_ALIGN (exp);
214 #ifdef CONSTANT_ALIGNMENT
215 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
216 align = CONSTANT_ALIGNMENT (exp, align);
217 #endif
218 return MIN (align, max_align);
219
220 default:
221 return align;
222 }
223 }
224 }
225
226 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
227 way, because it could contain a zero byte in the middle.
228 TREE_STRING_LENGTH is the size of the character array, not the string.
229
230 The value returned is of type `ssizetype'.
231
232 Unfortunately, string_constant can't access the values of const char
233 arrays with initializers, so neither can we do so here. */
234
235 static tree
236 c_strlen (src)
237 tree src;
238 {
239 tree offset_node;
240 HOST_WIDE_INT offset;
241 int max;
242 const char *ptr;
243
244 src = string_constant (src, &offset_node);
245 if (src == 0)
246 return 0;
247
248 max = TREE_STRING_LENGTH (src) - 1;
249 ptr = TREE_STRING_POINTER (src);
250
251 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
252 {
253 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
254 compute the offset to the following null if we don't know where to
255 start searching for it. */
256 int i;
257
258 for (i = 0; i < max; i++)
259 if (ptr[i] == 0)
260 return 0;
261
262 /* We don't know the starting offset, but we do know that the string
263 has no internal zero bytes. We can assume that the offset falls
264 within the bounds of the string; otherwise, the programmer deserves
265 what he gets. Subtract the offset from the length of the string,
266 and return that. This would perhaps not be valid if we were dealing
267 with named arrays in addition to literal string constants. */
268
269 return size_diffop (size_int (max), offset_node);
270 }
271
272 /* We have a known offset into the string. Start searching there for
273 a null character if we can represent it as a single HOST_WIDE_INT. */
274 if (offset_node == 0)
275 offset = 0;
276 else if (! host_integerp (offset_node, 0))
277 offset = -1;
278 else
279 offset = tree_low_cst (offset_node, 0);
280
281 /* If the offset is known to be out of bounds, warn, and call strlen at
282 runtime. */
283 if (offset < 0 || offset > max)
284 {
285 warning ("offset outside bounds of constant string");
286 return 0;
287 }
288
289 /* Use strlen to search for the first zero byte. Since any strings
290 constructed with build_string will have nulls appended, we win even
291 if we get handed something like (char[4])"abcd".
292
293 Since OFFSET is our starting index into the string, no further
294 calculation is needed. */
295 return ssize_int (strlen (ptr + offset));
296 }
297
298 /* Return a char pointer for a C string if it is a string constant
299 or sum of string constant and integer constant. */
300
301 static const char *
302 c_getstr (src)
303 tree src;
304 {
305 tree offset_node;
306
307 src = string_constant (src, &offset_node);
308 if (src == 0)
309 return 0;
310
311 if (offset_node == 0)
312 return TREE_STRING_POINTER (src);
313 else if (!host_integerp (offset_node, 1)
314 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
315 return 0;
316
317 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
318 }
319
320 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
321 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
322
323 static rtx
324 c_readstr (str, mode)
325 const char *str;
326 enum machine_mode mode;
327 {
328 HOST_WIDE_INT c[2];
329 HOST_WIDE_INT ch;
330 unsigned int i, j;
331
332 if (GET_MODE_CLASS (mode) != MODE_INT)
333 abort ();
334 c[0] = 0;
335 c[1] = 0;
336 ch = 1;
337 for (i = 0; i < GET_MODE_SIZE (mode); i++)
338 {
339 j = i;
340 if (WORDS_BIG_ENDIAN)
341 j = GET_MODE_SIZE (mode) - i - 1;
342 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
343 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
344 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
345 j *= BITS_PER_UNIT;
346 if (j > 2 * HOST_BITS_PER_WIDE_INT)
347 abort ();
348 if (ch)
349 ch = (unsigned char) str[i];
350 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
351 }
352 return immed_double_const (c[0], c[1], mode);
353 }
354
355 /* Cast a target constant CST to target CHAR and if that value fits into
356 host char type, return zero and put that value into variable pointed by
357 P. */
358
359 static int
360 target_char_cast (cst, p)
361 tree cst;
362 char *p;
363 {
364 unsigned HOST_WIDE_INT val, hostval;
365
366 if (!host_integerp (cst, 1)
367 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
368 return 1;
369
370 val = tree_low_cst (cst, 1);
371 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
372 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
373
374 hostval = val;
375 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
376 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
377
378 if (val != hostval)
379 return 1;
380
381 *p = hostval;
382 return 0;
383 }
384
385 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
386 times to get the address of either a higher stack frame, or a return
387 address located within it (depending on FNDECL_CODE). */
388
389 rtx
390 expand_builtin_return_addr (fndecl_code, count, tem)
391 enum built_in_function fndecl_code;
392 int count;
393 rtx tem;
394 {
395 int i;
396
397 /* Some machines need special handling before we can access
398 arbitrary frames. For example, on the sparc, we must first flush
399 all register windows to the stack. */
400 #ifdef SETUP_FRAME_ADDRESSES
401 if (count > 0)
402 SETUP_FRAME_ADDRESSES ();
403 #endif
404
405 /* On the sparc, the return address is not in the frame, it is in a
406 register. There is no way to access it off of the current frame
407 pointer, but it can be accessed off the previous frame pointer by
408 reading the value from the register window save area. */
409 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
410 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
411 count--;
412 #endif
413
414 /* Scan back COUNT frames to the specified frame. */
415 for (i = 0; i < count; i++)
416 {
417 /* Assume the dynamic chain pointer is in the word that the
418 frame address points to, unless otherwise specified. */
419 #ifdef DYNAMIC_CHAIN_ADDRESS
420 tem = DYNAMIC_CHAIN_ADDRESS (tem);
421 #endif
422 tem = memory_address (Pmode, tem);
423 tem = gen_rtx_MEM (Pmode, tem);
424 set_mem_alias_set (tem, get_frame_alias_set ());
425 tem = copy_to_reg (tem);
426 }
427
428 /* For __builtin_frame_address, return what we've got. */
429 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
430 return tem;
431
432 /* For __builtin_return_address, Get the return address from that
433 frame. */
434 #ifdef RETURN_ADDR_RTX
435 tem = RETURN_ADDR_RTX (count, tem);
436 #else
437 tem = memory_address (Pmode,
438 plus_constant (tem, GET_MODE_SIZE (Pmode)));
439 tem = gen_rtx_MEM (Pmode, tem);
440 set_mem_alias_set (tem, get_frame_alias_set ());
441 #endif
442 return tem;
443 }
444
445 /* Alias set used for setjmp buffer. */
446 static HOST_WIDE_INT setjmp_alias_set = -1;
447
448 /* Construct the leading half of a __builtin_setjmp call. Control will
449 return to RECEIVER_LABEL. This is used directly by sjlj exception
450 handling code. */
451
452 void
453 expand_builtin_setjmp_setup (buf_addr, receiver_label)
454 rtx buf_addr;
455 rtx receiver_label;
456 {
457 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
458 rtx stack_save;
459 rtx mem;
460
461 if (setjmp_alias_set == -1)
462 setjmp_alias_set = new_alias_set ();
463
464 #ifdef POINTERS_EXTEND_UNSIGNED
465 if (GET_MODE (buf_addr) != Pmode)
466 buf_addr = convert_memory_address (Pmode, buf_addr);
467 #endif
468
469 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
470
471 emit_queue ();
472
473 /* We store the frame pointer and the address of receiver_label in
474 the buffer and use the rest of it for the stack save area, which
475 is machine-dependent. */
476
477 #ifndef BUILTIN_SETJMP_FRAME_VALUE
478 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
479 #endif
480
481 mem = gen_rtx_MEM (Pmode, buf_addr);
482 set_mem_alias_set (mem, setjmp_alias_set);
483 emit_move_insn (mem, BUILTIN_SETJMP_FRAME_VALUE);
484
485 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
486 set_mem_alias_set (mem, setjmp_alias_set);
487
488 emit_move_insn (validize_mem (mem),
489 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
490
491 stack_save = gen_rtx_MEM (sa_mode,
492 plus_constant (buf_addr,
493 2 * GET_MODE_SIZE (Pmode)));
494 set_mem_alias_set (stack_save, setjmp_alias_set);
495 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
496
497 /* If there is further processing to do, do it. */
498 #ifdef HAVE_builtin_setjmp_setup
499 if (HAVE_builtin_setjmp_setup)
500 emit_insn (gen_builtin_setjmp_setup (buf_addr));
501 #endif
502
503 /* Tell optimize_save_area_alloca that extra work is going to
504 need to go on during alloca. */
505 current_function_calls_setjmp = 1;
506
507 /* Set this so all the registers get saved in our frame; we need to be
508 able to copy the saved values for any registers from frames we unwind. */
509 current_function_has_nonlocal_label = 1;
510 }
511
512 /* Construct the trailing part of a __builtin_setjmp call.
513 This is used directly by sjlj exception handling code. */
514
515 void
516 expand_builtin_setjmp_receiver (receiver_label)
517 rtx receiver_label ATTRIBUTE_UNUSED;
518 {
519 /* Clobber the FP when we get here, so we have to make sure it's
520 marked as used by this function. */
521 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
522
523 /* Mark the static chain as clobbered here so life information
524 doesn't get messed up for it. */
525 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
526
527 /* Now put in the code to restore the frame pointer, and argument
528 pointer, if needed. The code below is from expand_end_bindings
529 in stmt.c; see detailed documentation there. */
530 #ifdef HAVE_nonlocal_goto
531 if (! HAVE_nonlocal_goto)
532 #endif
533 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
534
535 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
536 if (fixed_regs[ARG_POINTER_REGNUM])
537 {
538 #ifdef ELIMINABLE_REGS
539 size_t i;
540 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
541
542 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
543 if (elim_regs[i].from == ARG_POINTER_REGNUM
544 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
545 break;
546
547 if (i == ARRAY_SIZE (elim_regs))
548 #endif
549 {
550 /* Now restore our arg pointer from the address at which it
551 was saved in our stack frame. */
552 emit_move_insn (virtual_incoming_args_rtx,
553 copy_to_reg (get_arg_pointer_save_area (cfun)));
554 }
555 }
556 #endif
557
558 #ifdef HAVE_builtin_setjmp_receiver
559 if (HAVE_builtin_setjmp_receiver)
560 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
561 else
562 #endif
563 #ifdef HAVE_nonlocal_goto_receiver
564 if (HAVE_nonlocal_goto_receiver)
565 emit_insn (gen_nonlocal_goto_receiver ());
566 else
567 #endif
568 { /* Nothing */ }
569
570 /* @@@ This is a kludge. Not all machine descriptions define a blockage
571 insn, but we must not allow the code we just generated to be reordered
572 by scheduling. Specifically, the update of the frame pointer must
573 happen immediately, not later. So emit an ASM_INPUT to act as blockage
574 insn. */
575 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
576 }
577
578 /* __builtin_setjmp is passed a pointer to an array of five words (not
579 all will be used on all machines). It operates similarly to the C
580 library function of the same name, but is more efficient. Much of
581 the code below (and for longjmp) is copied from the handling of
582 non-local gotos.
583
584 NOTE: This is intended for use by GNAT and the exception handling
585 scheme in the compiler and will only work in the method used by
586 them. */
587
588 static rtx
589 expand_builtin_setjmp (arglist, target)
590 tree arglist;
591 rtx target;
592 {
593 rtx buf_addr, next_lab, cont_lab;
594
595 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
596 return NULL_RTX;
597
598 if (target == 0 || GET_CODE (target) != REG
599 || REGNO (target) < FIRST_PSEUDO_REGISTER)
600 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
601
602 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
603
604 next_lab = gen_label_rtx ();
605 cont_lab = gen_label_rtx ();
606
607 expand_builtin_setjmp_setup (buf_addr, next_lab);
608
609 /* Set TARGET to zero and branch to the continue label. */
610 emit_move_insn (target, const0_rtx);
611 emit_jump_insn (gen_jump (cont_lab));
612 emit_barrier ();
613 emit_label (next_lab);
614
615 expand_builtin_setjmp_receiver (next_lab);
616
617 /* Set TARGET to one. */
618 emit_move_insn (target, const1_rtx);
619 emit_label (cont_lab);
620
621 /* Tell flow about the strange goings on. Putting `next_lab' on
622 `nonlocal_goto_handler_labels' to indicates that function
623 calls may traverse the arc back to this label. */
624
625 current_function_has_nonlocal_label = 1;
626 nonlocal_goto_handler_labels
627 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
628
629 return target;
630 }
631
632 /* __builtin_longjmp is passed a pointer to an array of five words (not
633 all will be used on all machines). It operates similarly to the C
634 library function of the same name, but is more efficient. Much of
635 the code below is copied from the handling of non-local gotos.
636
637 NOTE: This is intended for use by GNAT and the exception handling
638 scheme in the compiler and will only work in the method used by
639 them. */
640
641 void
642 expand_builtin_longjmp (buf_addr, value)
643 rtx buf_addr, value;
644 {
645 rtx fp, lab, stack, insn, last;
646 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
647
648 if (setjmp_alias_set == -1)
649 setjmp_alias_set = new_alias_set ();
650
651 #ifdef POINTERS_EXTEND_UNSIGNED
652 if (GET_MODE (buf_addr) != Pmode)
653 buf_addr = convert_memory_address (Pmode, buf_addr);
654 #endif
655
656 buf_addr = force_reg (Pmode, buf_addr);
657
658 /* We used to store value in static_chain_rtx, but that fails if pointers
659 are smaller than integers. We instead require that the user must pass
660 a second argument of 1, because that is what builtin_setjmp will
661 return. This also makes EH slightly more efficient, since we are no
662 longer copying around a value that we don't care about. */
663 if (value != const1_rtx)
664 abort ();
665
666 current_function_calls_longjmp = 1;
667
668 last = get_last_insn ();
669 #ifdef HAVE_builtin_longjmp
670 if (HAVE_builtin_longjmp)
671 emit_insn (gen_builtin_longjmp (buf_addr));
672 else
673 #endif
674 {
675 fp = gen_rtx_MEM (Pmode, buf_addr);
676 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
677 GET_MODE_SIZE (Pmode)));
678
679 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
680 2 * GET_MODE_SIZE (Pmode)));
681 set_mem_alias_set (fp, setjmp_alias_set);
682 set_mem_alias_set (lab, setjmp_alias_set);
683 set_mem_alias_set (stack, setjmp_alias_set);
684
685 /* Pick up FP, label, and SP from the block and jump. This code is
686 from expand_goto in stmt.c; see there for detailed comments. */
687 #if HAVE_nonlocal_goto
688 if (HAVE_nonlocal_goto)
689 /* We have to pass a value to the nonlocal_goto pattern that will
690 get copied into the static_chain pointer, but it does not matter
691 what that value is, because builtin_setjmp does not use it. */
692 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
693 else
694 #endif
695 {
696 lab = copy_to_reg (lab);
697
698 emit_move_insn (hard_frame_pointer_rtx, fp);
699 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
700
701 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
702 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
703 emit_indirect_jump (lab);
704 }
705 }
706
707 /* Search backwards and mark the jump insn as a non-local goto.
708 Note that this precludes the use of __builtin_longjmp to a
709 __builtin_setjmp target in the same function. However, we've
710 already cautioned the user that these functions are for
711 internal exception handling use only. */
712 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
713 {
714 if (insn == last)
715 abort ();
716 if (GET_CODE (insn) == JUMP_INSN)
717 {
718 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
719 REG_NOTES (insn));
720 break;
721 }
722 else if (GET_CODE (insn) == CALL_INSN)
723 break;
724 }
725 }
726
727 /* Expand a call to __builtin_prefetch. For a target that does not support
728 data prefetch, evaluate the memory address argument in case it has side
729 effects. */
730
731 static void
732 expand_builtin_prefetch (arglist)
733 tree arglist;
734 {
735 tree arg0, arg1, arg2;
736 rtx op0, op1, op2;
737
738 if (!validate_arglist (arglist, POINTER_TYPE, 0))
739 return;
740
741 arg0 = TREE_VALUE (arglist);
742 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
743 zero (read) and argument 2 (locality) defaults to 3 (high degree of
744 locality). */
745 if (TREE_CHAIN (arglist))
746 {
747 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
748 if (TREE_CHAIN (TREE_CHAIN (arglist)))
749 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
750 else
751 arg2 = build_int_2 (3, 0);
752 }
753 else
754 {
755 arg1 = integer_zero_node;
756 arg2 = build_int_2 (3, 0);
757 }
758
759 /* Argument 0 is an address. */
760 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
761
762 /* Argument 1 (read/write flag) must be a compile-time constant int. */
763 if (TREE_CODE (arg1) != INTEGER_CST)
764 {
765 error ("second arg to `__builtin_prefetch' must be a constant");
766 arg1 = integer_zero_node;
767 }
768 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
769 /* Argument 1 must be either zero or one. */
770 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
771 {
772 warning ("invalid second arg to __builtin_prefetch; using zero");
773 op1 = const0_rtx;
774 }
775
776 /* Argument 2 (locality) must be a compile-time constant int. */
777 if (TREE_CODE (arg2) != INTEGER_CST)
778 {
779 error ("third arg to `__builtin_prefetch' must be a constant");
780 arg2 = integer_zero_node;
781 }
782 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
783 /* Argument 2 must be 0, 1, 2, or 3. */
784 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
785 {
786 warning ("invalid third arg to __builtin_prefetch; using zero");
787 op2 = const0_rtx;
788 }
789
790 #ifdef HAVE_prefetch
791 if (HAVE_prefetch)
792 {
793 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
794 (op0,
795 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
796 || (GET_MODE(op0) != Pmode))
797 {
798 #ifdef POINTERS_EXTEND_UNSIGNED
799 if (GET_MODE(op0) != Pmode)
800 op0 = convert_memory_address (Pmode, op0);
801 #endif
802 op0 = force_reg (Pmode, op0);
803 }
804 emit_insn (gen_prefetch (op0, op1, op2));
805 }
806 else
807 #endif
808 op0 = protect_from_queue (op0, 0);
809 /* Don't do anything with direct references to volatile memory, but
810 generate code to handle other side effects. */
811 if (GET_CODE (op0) != MEM && side_effects_p (op0))
812 emit_insn (op0);
813 }
814
815 /* Get a MEM rtx for expression EXP which is the address of an operand
816 to be used to be used in a string instruction (cmpstrsi, movstrsi, ..). */
817
818 static rtx
819 get_memory_rtx (exp)
820 tree exp;
821 {
822 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_SUM);
823 rtx mem;
824
825 #ifdef POINTERS_EXTEND_UNSIGNED
826 if (GET_MODE (addr) != Pmode)
827 addr = convert_memory_address (Pmode, addr);
828 #endif
829
830 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
831
832 /* Get an expression we can use to find the attributes to assign to MEM.
833 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
834 we can. First remove any nops. */
835 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
836 || TREE_CODE (exp) == NON_LVALUE_EXPR)
837 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
838 exp = TREE_OPERAND (exp, 0);
839
840 if (TREE_CODE (exp) == ADDR_EXPR)
841 {
842 exp = TREE_OPERAND (exp, 0);
843 set_mem_attributes (mem, exp, 0);
844 }
845 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
846 {
847 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
848 /* memcpy, memset and other builtin stringops can alias with anything. */
849 set_mem_alias_set (mem, 0);
850 }
851
852 return mem;
853 }
854 \f
855 /* Built-in functions to perform an untyped call and return. */
856
857 /* For each register that may be used for calling a function, this
858 gives a mode used to copy the register's value. VOIDmode indicates
859 the register is not used for calling a function. If the machine
860 has register windows, this gives only the outbound registers.
861 INCOMING_REGNO gives the corresponding inbound register. */
862 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
863
864 /* For each register that may be used for returning values, this gives
865 a mode used to copy the register's value. VOIDmode indicates the
866 register is not used for returning values. If the machine has
867 register windows, this gives only the outbound registers.
868 INCOMING_REGNO gives the corresponding inbound register. */
869 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
870
871 /* For each register that may be used for calling a function, this
872 gives the offset of that register into the block returned by
873 __builtin_apply_args. 0 indicates that the register is not
874 used for calling a function. */
875 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
876
877 /* Return the offset of register REGNO into the block returned by
878 __builtin_apply_args. This is not declared static, since it is
879 needed in objc-act.c. */
880
881 int
882 apply_args_register_offset (regno)
883 int regno;
884 {
885 apply_args_size ();
886
887 /* Arguments are always put in outgoing registers (in the argument
888 block) if such make sense. */
889 #ifdef OUTGOING_REGNO
890 regno = OUTGOING_REGNO (regno);
891 #endif
892 return apply_args_reg_offset[regno];
893 }
894
895 /* Return the size required for the block returned by __builtin_apply_args,
896 and initialize apply_args_mode. */
897
898 static int
899 apply_args_size ()
900 {
901 static int size = -1;
902 int align;
903 unsigned int regno;
904 enum machine_mode mode;
905
906 /* The values computed by this function never change. */
907 if (size < 0)
908 {
909 /* The first value is the incoming arg-pointer. */
910 size = GET_MODE_SIZE (Pmode);
911
912 /* The second value is the structure value address unless this is
913 passed as an "invisible" first argument. */
914 if (struct_value_rtx)
915 size += GET_MODE_SIZE (Pmode);
916
917 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
918 if (FUNCTION_ARG_REGNO_P (regno))
919 {
920 /* Search for the proper mode for copying this register's
921 value. I'm not sure this is right, but it works so far. */
922 enum machine_mode best_mode = VOIDmode;
923
924 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
925 mode != VOIDmode;
926 mode = GET_MODE_WIDER_MODE (mode))
927 if (HARD_REGNO_MODE_OK (regno, mode)
928 && HARD_REGNO_NREGS (regno, mode) == 1)
929 best_mode = mode;
930
931 if (best_mode == VOIDmode)
932 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
933 mode != VOIDmode;
934 mode = GET_MODE_WIDER_MODE (mode))
935 if (HARD_REGNO_MODE_OK (regno, mode)
936 && have_insn_for (SET, mode))
937 best_mode = mode;
938
939 if (best_mode == VOIDmode)
940 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
941 mode != VOIDmode;
942 mode = GET_MODE_WIDER_MODE (mode))
943 if (HARD_REGNO_MODE_OK (regno, mode)
944 && have_insn_for (SET, mode))
945 best_mode = mode;
946
947 if (best_mode == VOIDmode)
948 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
949 mode != VOIDmode;
950 mode = GET_MODE_WIDER_MODE (mode))
951 if (HARD_REGNO_MODE_OK (regno, mode)
952 && have_insn_for (SET, mode))
953 best_mode = mode;
954
955 mode = best_mode;
956 if (mode == VOIDmode)
957 abort ();
958
959 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
960 if (size % align != 0)
961 size = CEIL (size, align) * align;
962 apply_args_reg_offset[regno] = size;
963 size += GET_MODE_SIZE (mode);
964 apply_args_mode[regno] = mode;
965 }
966 else
967 {
968 apply_args_mode[regno] = VOIDmode;
969 apply_args_reg_offset[regno] = 0;
970 }
971 }
972 return size;
973 }
974
975 /* Return the size required for the block returned by __builtin_apply,
976 and initialize apply_result_mode. */
977
978 static int
979 apply_result_size ()
980 {
981 static int size = -1;
982 int align, regno;
983 enum machine_mode mode;
984
985 /* The values computed by this function never change. */
986 if (size < 0)
987 {
988 size = 0;
989
990 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
991 if (FUNCTION_VALUE_REGNO_P (regno))
992 {
993 /* Search for the proper mode for copying this register's
994 value. I'm not sure this is right, but it works so far. */
995 enum machine_mode best_mode = VOIDmode;
996
997 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
998 mode != TImode;
999 mode = GET_MODE_WIDER_MODE (mode))
1000 if (HARD_REGNO_MODE_OK (regno, mode))
1001 best_mode = mode;
1002
1003 if (best_mode == VOIDmode)
1004 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1005 mode != VOIDmode;
1006 mode = GET_MODE_WIDER_MODE (mode))
1007 if (HARD_REGNO_MODE_OK (regno, mode)
1008 && have_insn_for (SET, mode))
1009 best_mode = mode;
1010
1011 if (best_mode == VOIDmode)
1012 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
1013 mode != VOIDmode;
1014 mode = GET_MODE_WIDER_MODE (mode))
1015 if (HARD_REGNO_MODE_OK (regno, mode)
1016 && have_insn_for (SET, mode))
1017 best_mode = mode;
1018
1019 if (best_mode == VOIDmode)
1020 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
1021 mode != VOIDmode;
1022 mode = GET_MODE_WIDER_MODE (mode))
1023 if (HARD_REGNO_MODE_OK (regno, mode)
1024 && have_insn_for (SET, mode))
1025 best_mode = mode;
1026
1027 mode = best_mode;
1028 if (mode == VOIDmode)
1029 abort ();
1030
1031 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1032 if (size % align != 0)
1033 size = CEIL (size, align) * align;
1034 size += GET_MODE_SIZE (mode);
1035 apply_result_mode[regno] = mode;
1036 }
1037 else
1038 apply_result_mode[regno] = VOIDmode;
1039
1040 /* Allow targets that use untyped_call and untyped_return to override
1041 the size so that machine-specific information can be stored here. */
1042 #ifdef APPLY_RESULT_SIZE
1043 size = APPLY_RESULT_SIZE;
1044 #endif
1045 }
1046 return size;
1047 }
1048
1049 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1050 /* Create a vector describing the result block RESULT. If SAVEP is true,
1051 the result block is used to save the values; otherwise it is used to
1052 restore the values. */
1053
1054 static rtx
1055 result_vector (savep, result)
1056 int savep;
1057 rtx result;
1058 {
1059 int regno, size, align, nelts;
1060 enum machine_mode mode;
1061 rtx reg, mem;
1062 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1063
1064 size = nelts = 0;
1065 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1066 if ((mode = apply_result_mode[regno]) != VOIDmode)
1067 {
1068 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1069 if (size % align != 0)
1070 size = CEIL (size, align) * align;
1071 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1072 mem = adjust_address (result, mode, size);
1073 savevec[nelts++] = (savep
1074 ? gen_rtx_SET (VOIDmode, mem, reg)
1075 : gen_rtx_SET (VOIDmode, reg, mem));
1076 size += GET_MODE_SIZE (mode);
1077 }
1078 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1079 }
1080 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1081
1082 /* Save the state required to perform an untyped call with the same
1083 arguments as were passed to the current function. */
1084
1085 static rtx
1086 expand_builtin_apply_args_1 ()
1087 {
1088 rtx registers;
1089 int size, align, regno;
1090 enum machine_mode mode;
1091
1092 /* Create a block where the arg-pointer, structure value address,
1093 and argument registers can be saved. */
1094 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1095
1096 /* Walk past the arg-pointer and structure value address. */
1097 size = GET_MODE_SIZE (Pmode);
1098 if (struct_value_rtx)
1099 size += GET_MODE_SIZE (Pmode);
1100
1101 /* Save each register used in calling a function to the block. */
1102 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1103 if ((mode = apply_args_mode[regno]) != VOIDmode)
1104 {
1105 rtx tem;
1106
1107 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1108 if (size % align != 0)
1109 size = CEIL (size, align) * align;
1110
1111 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1112
1113 emit_move_insn (adjust_address (registers, mode, size), tem);
1114 size += GET_MODE_SIZE (mode);
1115 }
1116
1117 /* Save the arg pointer to the block. */
1118 emit_move_insn (adjust_address (registers, Pmode, 0),
1119 copy_to_reg (virtual_incoming_args_rtx));
1120 size = GET_MODE_SIZE (Pmode);
1121
1122 /* Save the structure value address unless this is passed as an
1123 "invisible" first argument. */
1124 if (struct_value_incoming_rtx)
1125 {
1126 emit_move_insn (adjust_address (registers, Pmode, size),
1127 copy_to_reg (struct_value_incoming_rtx));
1128 size += GET_MODE_SIZE (Pmode);
1129 }
1130
1131 /* Return the address of the block. */
1132 return copy_addr_to_reg (XEXP (registers, 0));
1133 }
1134
1135 /* __builtin_apply_args returns block of memory allocated on
1136 the stack into which is stored the arg pointer, structure
1137 value address, static chain, and all the registers that might
1138 possibly be used in performing a function call. The code is
1139 moved to the start of the function so the incoming values are
1140 saved. */
1141
1142 static rtx
1143 expand_builtin_apply_args ()
1144 {
1145 /* Don't do __builtin_apply_args more than once in a function.
1146 Save the result of the first call and reuse it. */
1147 if (apply_args_value != 0)
1148 return apply_args_value;
1149 {
1150 /* When this function is called, it means that registers must be
1151 saved on entry to this function. So we migrate the
1152 call to the first insn of this function. */
1153 rtx temp;
1154 rtx seq;
1155
1156 start_sequence ();
1157 temp = expand_builtin_apply_args_1 ();
1158 seq = get_insns ();
1159 end_sequence ();
1160
1161 apply_args_value = temp;
1162
1163 /* Put the insns after the NOTE that starts the function.
1164 If this is inside a start_sequence, make the outer-level insn
1165 chain current, so the code is placed at the start of the
1166 function. */
1167 push_topmost_sequence ();
1168 emit_insn_before (seq, NEXT_INSN (get_insns ()));
1169 pop_topmost_sequence ();
1170 return temp;
1171 }
1172 }
1173
1174 /* Perform an untyped call and save the state required to perform an
1175 untyped return of whatever value was returned by the given function. */
1176
1177 static rtx
1178 expand_builtin_apply (function, arguments, argsize)
1179 rtx function, arguments, argsize;
1180 {
1181 int size, align, regno;
1182 enum machine_mode mode;
1183 rtx incoming_args, result, reg, dest, src, call_insn;
1184 rtx old_stack_level = 0;
1185 rtx call_fusage = 0;
1186
1187 #ifdef POINTERS_EXTEND_UNSIGNED
1188 if (GET_MODE (arguments) != Pmode)
1189 arguments = convert_memory_address (Pmode, arguments);
1190 #endif
1191
1192 /* Create a block where the return registers can be saved. */
1193 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1194
1195 /* Fetch the arg pointer from the ARGUMENTS block. */
1196 incoming_args = gen_reg_rtx (Pmode);
1197 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1198 #ifndef STACK_GROWS_DOWNWARD
1199 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1200 incoming_args, 0, OPTAB_LIB_WIDEN);
1201 #endif
1202
1203 /* Perform postincrements before actually calling the function. */
1204 emit_queue ();
1205
1206 /* Push a new argument block and copy the arguments. Do not allow
1207 the (potential) memcpy call below to interfere with our stack
1208 manipulations. */
1209 do_pending_stack_adjust ();
1210 NO_DEFER_POP;
1211
1212 /* Save the stack with nonlocal if available */
1213 #ifdef HAVE_save_stack_nonlocal
1214 if (HAVE_save_stack_nonlocal)
1215 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1216 else
1217 #endif
1218 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1219
1220 /* Push a block of memory onto the stack to store the memory arguments.
1221 Save the address in a register, and copy the memory arguments. ??? I
1222 haven't figured out how the calling convention macros effect this,
1223 but it's likely that the source and/or destination addresses in
1224 the block copy will need updating in machine specific ways. */
1225 dest = allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1226 dest = gen_rtx_MEM (BLKmode, dest);
1227 set_mem_align (dest, PARM_BOUNDARY);
1228 src = gen_rtx_MEM (BLKmode, incoming_args);
1229 set_mem_align (src, PARM_BOUNDARY);
1230 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1231
1232 /* Refer to the argument block. */
1233 apply_args_size ();
1234 arguments = gen_rtx_MEM (BLKmode, arguments);
1235 set_mem_align (arguments, PARM_BOUNDARY);
1236
1237 /* Walk past the arg-pointer and structure value address. */
1238 size = GET_MODE_SIZE (Pmode);
1239 if (struct_value_rtx)
1240 size += GET_MODE_SIZE (Pmode);
1241
1242 /* Restore each of the registers previously saved. Make USE insns
1243 for each of these registers for use in making the call. */
1244 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1245 if ((mode = apply_args_mode[regno]) != VOIDmode)
1246 {
1247 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1248 if (size % align != 0)
1249 size = CEIL (size, align) * align;
1250 reg = gen_rtx_REG (mode, regno);
1251 emit_move_insn (reg, adjust_address (arguments, mode, size));
1252 use_reg (&call_fusage, reg);
1253 size += GET_MODE_SIZE (mode);
1254 }
1255
1256 /* Restore the structure value address unless this is passed as an
1257 "invisible" first argument. */
1258 size = GET_MODE_SIZE (Pmode);
1259 if (struct_value_rtx)
1260 {
1261 rtx value = gen_reg_rtx (Pmode);
1262 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1263 emit_move_insn (struct_value_rtx, value);
1264 if (GET_CODE (struct_value_rtx) == REG)
1265 use_reg (&call_fusage, struct_value_rtx);
1266 size += GET_MODE_SIZE (Pmode);
1267 }
1268
1269 /* All arguments and registers used for the call are set up by now! */
1270 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0, 0);
1271
1272 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1273 and we don't want to load it into a register as an optimization,
1274 because prepare_call_address already did it if it should be done. */
1275 if (GET_CODE (function) != SYMBOL_REF)
1276 function = memory_address (FUNCTION_MODE, function);
1277
1278 /* Generate the actual call instruction and save the return value. */
1279 #ifdef HAVE_untyped_call
1280 if (HAVE_untyped_call)
1281 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1282 result, result_vector (1, result)));
1283 else
1284 #endif
1285 #ifdef HAVE_call_value
1286 if (HAVE_call_value)
1287 {
1288 rtx valreg = 0;
1289
1290 /* Locate the unique return register. It is not possible to
1291 express a call that sets more than one return register using
1292 call_value; use untyped_call for that. In fact, untyped_call
1293 only needs to save the return registers in the given block. */
1294 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1295 if ((mode = apply_result_mode[regno]) != VOIDmode)
1296 {
1297 if (valreg)
1298 abort (); /* HAVE_untyped_call required. */
1299 valreg = gen_rtx_REG (mode, regno);
1300 }
1301
1302 emit_call_insn (GEN_CALL_VALUE (valreg,
1303 gen_rtx_MEM (FUNCTION_MODE, function),
1304 const0_rtx, NULL_RTX, const0_rtx));
1305
1306 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1307 }
1308 else
1309 #endif
1310 abort ();
1311
1312 /* Find the CALL insn we just emitted. */
1313 for (call_insn = get_last_insn ();
1314 call_insn && GET_CODE (call_insn) != CALL_INSN;
1315 call_insn = PREV_INSN (call_insn))
1316 ;
1317
1318 if (! call_insn)
1319 abort ();
1320
1321 /* Put the register usage information on the CALL. If there is already
1322 some usage information, put ours at the end. */
1323 if (CALL_INSN_FUNCTION_USAGE (call_insn))
1324 {
1325 rtx link;
1326
1327 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
1328 link = XEXP (link, 1))
1329 ;
1330
1331 XEXP (link, 1) = call_fusage;
1332 }
1333 else
1334 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
1335
1336 /* Restore the stack. */
1337 #ifdef HAVE_save_stack_nonlocal
1338 if (HAVE_save_stack_nonlocal)
1339 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1340 else
1341 #endif
1342 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1343
1344 OK_DEFER_POP;
1345
1346 /* Return the address of the result block. */
1347 return copy_addr_to_reg (XEXP (result, 0));
1348 }
1349
1350 /* Perform an untyped return. */
1351
1352 static void
1353 expand_builtin_return (result)
1354 rtx result;
1355 {
1356 int size, align, regno;
1357 enum machine_mode mode;
1358 rtx reg;
1359 rtx call_fusage = 0;
1360
1361 #ifdef POINTERS_EXTEND_UNSIGNED
1362 if (GET_MODE (result) != Pmode)
1363 result = convert_memory_address (Pmode, result);
1364 #endif
1365
1366 apply_result_size ();
1367 result = gen_rtx_MEM (BLKmode, result);
1368
1369 #ifdef HAVE_untyped_return
1370 if (HAVE_untyped_return)
1371 {
1372 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1373 emit_barrier ();
1374 return;
1375 }
1376 #endif
1377
1378 /* Restore the return value and note that each value is used. */
1379 size = 0;
1380 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1381 if ((mode = apply_result_mode[regno]) != VOIDmode)
1382 {
1383 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1384 if (size % align != 0)
1385 size = CEIL (size, align) * align;
1386 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1387 emit_move_insn (reg, adjust_address (result, mode, size));
1388
1389 push_to_sequence (call_fusage);
1390 emit_insn (gen_rtx_USE (VOIDmode, reg));
1391 call_fusage = get_insns ();
1392 end_sequence ();
1393 size += GET_MODE_SIZE (mode);
1394 }
1395
1396 /* Put the USE insns before the return. */
1397 emit_insn (call_fusage);
1398
1399 /* Return whatever values was restored by jumping directly to the end
1400 of the function. */
1401 expand_null_return ();
1402 }
1403
1404 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1405
1406 static enum type_class
1407 type_to_class (type)
1408 tree type;
1409 {
1410 switch (TREE_CODE (type))
1411 {
1412 case VOID_TYPE: return void_type_class;
1413 case INTEGER_TYPE: return integer_type_class;
1414 case CHAR_TYPE: return char_type_class;
1415 case ENUMERAL_TYPE: return enumeral_type_class;
1416 case BOOLEAN_TYPE: return boolean_type_class;
1417 case POINTER_TYPE: return pointer_type_class;
1418 case REFERENCE_TYPE: return reference_type_class;
1419 case OFFSET_TYPE: return offset_type_class;
1420 case REAL_TYPE: return real_type_class;
1421 case COMPLEX_TYPE: return complex_type_class;
1422 case FUNCTION_TYPE: return function_type_class;
1423 case METHOD_TYPE: return method_type_class;
1424 case RECORD_TYPE: return record_type_class;
1425 case UNION_TYPE:
1426 case QUAL_UNION_TYPE: return union_type_class;
1427 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1428 ? string_type_class : array_type_class);
1429 case SET_TYPE: return set_type_class;
1430 case FILE_TYPE: return file_type_class;
1431 case LANG_TYPE: return lang_type_class;
1432 default: return no_type_class;
1433 }
1434 }
1435
1436 /* Expand a call to __builtin_classify_type with arguments found in
1437 ARGLIST. */
1438
1439 static rtx
1440 expand_builtin_classify_type (arglist)
1441 tree arglist;
1442 {
1443 if (arglist != 0)
1444 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1445 return GEN_INT (no_type_class);
1446 }
1447
1448 /* Expand expression EXP, which is a call to __builtin_constant_p. */
1449
1450 static rtx
1451 expand_builtin_constant_p (exp)
1452 tree exp;
1453 {
1454 tree arglist = TREE_OPERAND (exp, 1);
1455 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1456 rtx tmp;
1457
1458 if (arglist == 0)
1459 return const0_rtx;
1460 arglist = TREE_VALUE (arglist);
1461
1462 /* We have taken care of the easy cases during constant folding. This
1463 case is not obvious, so emit (constant_p_rtx (ARGLIST)) and let CSE get a
1464 chance to see if it can deduce whether ARGLIST is constant. */
1465
1466 tmp = expand_expr (arglist, NULL_RTX, VOIDmode, 0);
1467 tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
1468 return tmp;
1469 }
1470
1471 /* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
1472 Return 0 if a normal call should be emitted rather than expanding the
1473 function in-line. EXP is the expression that is a call to the builtin
1474 function; if convenient, the result should be placed in TARGET.
1475 SUBTARGET may be used as the target for computing one of EXP's operands. */
1476
1477 static rtx
1478 expand_builtin_mathfn (exp, target, subtarget)
1479 tree exp;
1480 rtx target, subtarget;
1481 {
1482 optab builtin_optab;
1483 rtx op0, insns;
1484 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1485 tree arglist = TREE_OPERAND (exp, 1);
1486 enum machine_mode argmode;
1487
1488 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1489 return 0;
1490
1491 /* Stabilize and compute the argument. */
1492 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
1493 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
1494 {
1495 exp = copy_node (exp);
1496 TREE_OPERAND (exp, 1) = arglist;
1497 /* Wrap the computation of the argument in a SAVE_EXPR. That
1498 way, if we need to expand the argument again (as in the
1499 flag_errno_math case below where we cannot directly set
1500 errno), we will not perform side-effects more than once.
1501 Note that here we're mutating the original EXP as well as the
1502 copy; that's the right thing to do in case the original EXP
1503 is expanded later. */
1504 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
1505 arglist = copy_node (arglist);
1506 }
1507 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
1508
1509 /* Make a suitable register to place result in. */
1510 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
1511
1512 emit_queue ();
1513 start_sequence ();
1514
1515 switch (DECL_FUNCTION_CODE (fndecl))
1516 {
1517 case BUILT_IN_SIN:
1518 case BUILT_IN_SINF:
1519 case BUILT_IN_SINL:
1520 builtin_optab = sin_optab; break;
1521 case BUILT_IN_COS:
1522 case BUILT_IN_COSF:
1523 case BUILT_IN_COSL:
1524 builtin_optab = cos_optab; break;
1525 case BUILT_IN_SQRT:
1526 case BUILT_IN_SQRTF:
1527 case BUILT_IN_SQRTL:
1528 builtin_optab = sqrt_optab; break;
1529 case BUILT_IN_EXP:
1530 case BUILT_IN_EXPF:
1531 case BUILT_IN_EXPL:
1532 builtin_optab = exp_optab; break;
1533 case BUILT_IN_LOG:
1534 case BUILT_IN_LOGF:
1535 case BUILT_IN_LOGL:
1536 builtin_optab = log_optab; break;
1537 default:
1538 abort ();
1539 }
1540
1541 /* Compute into TARGET.
1542 Set TARGET to wherever the result comes back. */
1543 argmode = TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist)));
1544 target = expand_unop (argmode, builtin_optab, op0, target, 0);
1545
1546 /* If we were unable to expand via the builtin, stop the
1547 sequence (without outputting the insns) and return 0, causing
1548 a call to the library function. */
1549 if (target == 0)
1550 {
1551 end_sequence ();
1552 return 0;
1553 }
1554
1555 /* If errno must be maintained, we must set it to EDOM for NaN results. */
1556
1557 if (flag_errno_math && HONOR_NANS (argmode))
1558 {
1559 rtx lab1;
1560
1561 lab1 = gen_label_rtx ();
1562
1563 /* Test the result; if it is NaN, set errno=EDOM because
1564 the argument was not in the domain. */
1565 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1566 0, lab1);
1567
1568 #ifdef TARGET_EDOM
1569 {
1570 #ifdef GEN_ERRNO_RTX
1571 rtx errno_rtx = GEN_ERRNO_RTX;
1572 #else
1573 rtx errno_rtx
1574 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1575 #endif
1576
1577 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1578 }
1579 #else
1580 /* We can't set errno=EDOM directly; let the library call do it.
1581 Pop the arguments right away in case the call gets deleted. */
1582 NO_DEFER_POP;
1583 expand_call (exp, target, 0);
1584 OK_DEFER_POP;
1585 #endif
1586
1587 emit_label (lab1);
1588 }
1589
1590 /* Output the entire sequence. */
1591 insns = get_insns ();
1592 end_sequence ();
1593 emit_insn (insns);
1594
1595 return target;
1596 }
1597
1598 /* Expand expression EXP which is a call to the strlen builtin. Return 0
1599 if we failed the caller should emit a normal call, otherwise
1600 try to get the result in TARGET, if convenient. */
1601
1602 static rtx
1603 expand_builtin_strlen (exp, target)
1604 tree exp;
1605 rtx target;
1606 {
1607 tree arglist = TREE_OPERAND (exp, 1);
1608 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1609
1610 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
1611 return 0;
1612 else
1613 {
1614 rtx pat;
1615 tree src = TREE_VALUE (arglist);
1616
1617 int align
1618 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
1619
1620 rtx result, src_reg, char_rtx, before_strlen;
1621 enum machine_mode insn_mode = value_mode, char_mode;
1622 enum insn_code icode = CODE_FOR_nothing;
1623
1624 /* If SRC is not a pointer type, don't do this operation inline. */
1625 if (align == 0)
1626 return 0;
1627
1628 /* Bail out if we can't compute strlen in the right mode. */
1629 while (insn_mode != VOIDmode)
1630 {
1631 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
1632 if (icode != CODE_FOR_nothing)
1633 break;
1634
1635 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
1636 }
1637 if (insn_mode == VOIDmode)
1638 return 0;
1639
1640 /* Make a place to write the result of the instruction. */
1641 result = target;
1642 if (! (result != 0
1643 && GET_CODE (result) == REG
1644 && GET_MODE (result) == insn_mode
1645 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
1646 result = gen_reg_rtx (insn_mode);
1647
1648 /* Make a place to hold the source address. We will not expand
1649 the actual source until we are sure that the expansion will
1650 not fail -- there are trees that cannot be expanded twice. */
1651 src_reg = gen_reg_rtx (Pmode);
1652
1653 /* Mark the beginning of the strlen sequence so we can emit the
1654 source operand later. */
1655 before_strlen = get_last_insn ();
1656
1657 char_rtx = const0_rtx;
1658 char_mode = insn_data[(int) icode].operand[2].mode;
1659 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
1660 char_mode))
1661 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
1662
1663 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
1664 char_rtx, GEN_INT (align));
1665 if (! pat)
1666 return 0;
1667 emit_insn (pat);
1668
1669 /* Now that we are assured of success, expand the source. */
1670 start_sequence ();
1671 pat = memory_address (BLKmode,
1672 expand_expr (src, src_reg, ptr_mode, EXPAND_SUM));
1673 if (pat != src_reg)
1674 emit_move_insn (src_reg, pat);
1675 pat = get_insns ();
1676 end_sequence ();
1677
1678 if (before_strlen)
1679 emit_insn_after (pat, before_strlen);
1680 else
1681 emit_insn_before (pat, get_insns ());
1682
1683 /* Return the value in the proper mode for this function. */
1684 if (GET_MODE (result) == value_mode)
1685 target = result;
1686 else if (target != 0)
1687 convert_move (target, result, 0);
1688 else
1689 target = convert_to_mode (value_mode, result, 0);
1690
1691 return target;
1692 }
1693 }
1694
1695 /* Expand a call to the strstr builtin. Return 0 if we failed the
1696 caller should emit a normal call, otherwise try to get the result
1697 in TARGET, if convenient (and in mode MODE if that's convenient). */
1698
1699 static rtx
1700 expand_builtin_strstr (arglist, target, mode)
1701 tree arglist;
1702 rtx target;
1703 enum machine_mode mode;
1704 {
1705 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1706 return 0;
1707 else
1708 {
1709 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1710 tree fn;
1711 const char *p1, *p2;
1712
1713 p2 = c_getstr (s2);
1714 if (p2 == NULL)
1715 return 0;
1716
1717 p1 = c_getstr (s1);
1718 if (p1 != NULL)
1719 {
1720 const char *r = strstr (p1, p2);
1721
1722 if (r == NULL)
1723 return const0_rtx;
1724
1725 /* Return an offset into the constant string argument. */
1726 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1727 s1, ssize_int (r - p1))),
1728 target, mode, EXPAND_NORMAL);
1729 }
1730
1731 if (p2[0] == '\0')
1732 return expand_expr (s1, target, mode, EXPAND_NORMAL);
1733
1734 if (p2[1] != '\0')
1735 return 0;
1736
1737 fn = built_in_decls[BUILT_IN_STRCHR];
1738 if (!fn)
1739 return 0;
1740
1741 /* New argument list transforming strstr(s1, s2) to
1742 strchr(s1, s2[0]). */
1743 arglist =
1744 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
1745 arglist = tree_cons (NULL_TREE, s1, arglist);
1746 return expand_expr (build_function_call_expr (fn, arglist),
1747 target, mode, EXPAND_NORMAL);
1748 }
1749 }
1750
1751 /* Expand a call to the strchr builtin. Return 0 if we failed the
1752 caller should emit a normal call, otherwise try to get the result
1753 in TARGET, if convenient (and in mode MODE if that's convenient). */
1754
1755 static rtx
1756 expand_builtin_strchr (arglist, target, mode)
1757 tree arglist;
1758 rtx target;
1759 enum machine_mode mode;
1760 {
1761 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
1762 return 0;
1763 else
1764 {
1765 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1766 const char *p1;
1767
1768 if (TREE_CODE (s2) != INTEGER_CST)
1769 return 0;
1770
1771 p1 = c_getstr (s1);
1772 if (p1 != NULL)
1773 {
1774 char c;
1775 const char *r;
1776
1777 if (target_char_cast (s2, &c))
1778 return 0;
1779
1780 r = strchr (p1, c);
1781
1782 if (r == NULL)
1783 return const0_rtx;
1784
1785 /* Return an offset into the constant string argument. */
1786 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1787 s1, ssize_int (r - p1))),
1788 target, mode, EXPAND_NORMAL);
1789 }
1790
1791 /* FIXME: Should use here strchrM optab so that ports can optimize
1792 this. */
1793 return 0;
1794 }
1795 }
1796
1797 /* Expand a call to the strrchr builtin. Return 0 if we failed the
1798 caller should emit a normal call, otherwise try to get the result
1799 in TARGET, if convenient (and in mode MODE if that's convenient). */
1800
1801 static rtx
1802 expand_builtin_strrchr (arglist, target, mode)
1803 tree arglist;
1804 rtx target;
1805 enum machine_mode mode;
1806 {
1807 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
1808 return 0;
1809 else
1810 {
1811 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1812 tree fn;
1813 const char *p1;
1814
1815 if (TREE_CODE (s2) != INTEGER_CST)
1816 return 0;
1817
1818 p1 = c_getstr (s1);
1819 if (p1 != NULL)
1820 {
1821 char c;
1822 const char *r;
1823
1824 if (target_char_cast (s2, &c))
1825 return 0;
1826
1827 r = strrchr (p1, c);
1828
1829 if (r == NULL)
1830 return const0_rtx;
1831
1832 /* Return an offset into the constant string argument. */
1833 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1834 s1, ssize_int (r - p1))),
1835 target, mode, EXPAND_NORMAL);
1836 }
1837
1838 if (! integer_zerop (s2))
1839 return 0;
1840
1841 fn = built_in_decls[BUILT_IN_STRCHR];
1842 if (!fn)
1843 return 0;
1844
1845 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
1846 return expand_expr (build_function_call_expr (fn, arglist),
1847 target, mode, EXPAND_NORMAL);
1848 }
1849 }
1850
1851 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
1852 caller should emit a normal call, otherwise try to get the result
1853 in TARGET, if convenient (and in mode MODE if that's convenient). */
1854
1855 static rtx
1856 expand_builtin_strpbrk (arglist, target, mode)
1857 tree arglist;
1858 rtx target;
1859 enum machine_mode mode;
1860 {
1861 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1862 return 0;
1863 else
1864 {
1865 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1866 tree fn;
1867 const char *p1, *p2;
1868
1869 p2 = c_getstr (s2);
1870 if (p2 == NULL)
1871 return 0;
1872
1873 p1 = c_getstr (s1);
1874 if (p1 != NULL)
1875 {
1876 const char *r = strpbrk (p1, p2);
1877
1878 if (r == NULL)
1879 return const0_rtx;
1880
1881 /* Return an offset into the constant string argument. */
1882 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1883 s1, ssize_int (r - p1))),
1884 target, mode, EXPAND_NORMAL);
1885 }
1886
1887 if (p2[0] == '\0')
1888 {
1889 /* strpbrk(x, "") == NULL.
1890 Evaluate and ignore the arguments in case they had
1891 side-effects. */
1892 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
1893 return const0_rtx;
1894 }
1895
1896 if (p2[1] != '\0')
1897 return 0; /* Really call strpbrk. */
1898
1899 fn = built_in_decls[BUILT_IN_STRCHR];
1900 if (!fn)
1901 return 0;
1902
1903 /* New argument list transforming strpbrk(s1, s2) to
1904 strchr(s1, s2[0]). */
1905 arglist =
1906 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
1907 arglist = tree_cons (NULL_TREE, s1, arglist);
1908 return expand_expr (build_function_call_expr (fn, arglist),
1909 target, mode, EXPAND_NORMAL);
1910 }
1911 }
1912
1913 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
1914 bytes from constant string DATA + OFFSET and return it as target
1915 constant. */
1916
1917 static rtx
1918 builtin_memcpy_read_str (data, offset, mode)
1919 PTR data;
1920 HOST_WIDE_INT offset;
1921 enum machine_mode mode;
1922 {
1923 const char *str = (const char *) data;
1924
1925 if (offset < 0
1926 || ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
1927 > strlen (str) + 1))
1928 abort (); /* Attempt to read past the end of constant string. */
1929
1930 return c_readstr (str + offset, mode);
1931 }
1932
1933 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
1934 Return 0 if we failed, the caller should emit a normal call, otherwise
1935 try to get the result in TARGET, if convenient (and in mode MODE if
1936 that's convenient). */
1937
1938 static rtx
1939 expand_builtin_memcpy (arglist, target, mode)
1940 tree arglist;
1941 rtx target;
1942 enum machine_mode mode;
1943 {
1944 if (!validate_arglist (arglist,
1945 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
1946 return 0;
1947 else
1948 {
1949 tree dest = TREE_VALUE (arglist);
1950 tree src = TREE_VALUE (TREE_CHAIN (arglist));
1951 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1952 const char *src_str;
1953
1954 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
1955 unsigned int dest_align
1956 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
1957 rtx dest_mem, src_mem, dest_addr, len_rtx;
1958
1959 /* If DEST is not a pointer type, call the normal function. */
1960 if (dest_align == 0)
1961 return 0;
1962
1963 /* If the LEN parameter is zero, return DEST. */
1964 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
1965 {
1966 /* Evaluate and ignore SRC in case it has side-effects. */
1967 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
1968 return expand_expr (dest, target, mode, EXPAND_NORMAL);
1969 }
1970
1971 /* If either SRC is not a pointer type, don't do this
1972 operation in-line. */
1973 if (src_align == 0)
1974 return 0;
1975
1976 dest_mem = get_memory_rtx (dest);
1977 set_mem_align (dest_mem, dest_align);
1978 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
1979 src_str = c_getstr (src);
1980
1981 /* If SRC is a string constant and block move would be done
1982 by pieces, we can avoid loading the string from memory
1983 and only stored the computed constants. */
1984 if (src_str
1985 && GET_CODE (len_rtx) == CONST_INT
1986 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
1987 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
1988 (PTR) src_str, dest_align))
1989 {
1990 store_by_pieces (dest_mem, INTVAL (len_rtx),
1991 builtin_memcpy_read_str,
1992 (PTR) src_str, dest_align);
1993 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
1994 #ifdef POINTERS_EXTEND_UNSIGNED
1995 if (GET_MODE (dest_mem) != ptr_mode)
1996 dest_mem = convert_memory_address (ptr_mode, dest_mem);
1997 #endif
1998 return dest_mem;
1999 }
2000
2001 src_mem = get_memory_rtx (src);
2002 set_mem_align (src_mem, src_align);
2003
2004 /* Copy word part most expediently. */
2005 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx,
2006 BLOCK_OP_NORMAL);
2007
2008 if (dest_addr == 0)
2009 {
2010 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2011 #ifdef POINTERS_EXTEND_UNSIGNED
2012 if (GET_MODE (dest_addr) != ptr_mode)
2013 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2014 #endif
2015 }
2016
2017 return dest_addr;
2018 }
2019 }
2020
2021 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
2022 if we failed the caller should emit a normal call, otherwise try to get
2023 the result in TARGET, if convenient (and in mode MODE if that's
2024 convenient). */
2025
2026 static rtx
2027 expand_builtin_strcpy (exp, target, mode)
2028 tree exp;
2029 rtx target;
2030 enum machine_mode mode;
2031 {
2032 tree arglist = TREE_OPERAND (exp, 1);
2033 tree fn, len;
2034
2035 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2036 return 0;
2037
2038 fn = built_in_decls[BUILT_IN_MEMCPY];
2039 if (!fn)
2040 return 0;
2041
2042 len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2043 if (len == 0)
2044 return 0;
2045
2046 len = size_binop (PLUS_EXPR, len, ssize_int (1));
2047 chainon (arglist, build_tree_list (NULL_TREE, len));
2048 return expand_expr (build_function_call_expr (fn, arglist),
2049 target, mode, EXPAND_NORMAL);
2050 }
2051
2052 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2053 bytes from constant string DATA + OFFSET and return it as target
2054 constant. */
2055
2056 static rtx
2057 builtin_strncpy_read_str (data, offset, mode)
2058 PTR data;
2059 HOST_WIDE_INT offset;
2060 enum machine_mode mode;
2061 {
2062 const char *str = (const char *) data;
2063
2064 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
2065 return const0_rtx;
2066
2067 return c_readstr (str + offset, mode);
2068 }
2069
2070 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
2071 if we failed the caller should emit a normal call. */
2072
2073 static rtx
2074 expand_builtin_strncpy (arglist, target, mode)
2075 tree arglist;
2076 rtx target;
2077 enum machine_mode mode;
2078 {
2079 if (!validate_arglist (arglist,
2080 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2081 return 0;
2082 else
2083 {
2084 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2085 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2086 tree fn;
2087
2088 /* We must be passed a constant len parameter. */
2089 if (TREE_CODE (len) != INTEGER_CST)
2090 return 0;
2091
2092 /* If the len parameter is zero, return the dst parameter. */
2093 if (integer_zerop (len))
2094 {
2095 /* Evaluate and ignore the src argument in case it has
2096 side-effects. */
2097 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
2098 VOIDmode, EXPAND_NORMAL);
2099 /* Return the dst parameter. */
2100 return expand_expr (TREE_VALUE (arglist), target, mode,
2101 EXPAND_NORMAL);
2102 }
2103
2104 /* Now, we must be passed a constant src ptr parameter. */
2105 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
2106 return 0;
2107
2108 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
2109
2110 /* We're required to pad with trailing zeros if the requested
2111 len is greater than strlen(s2)+1. In that case try to
2112 use store_by_pieces, if it fails, punt. */
2113 if (tree_int_cst_lt (slen, len))
2114 {
2115 tree dest = TREE_VALUE (arglist);
2116 unsigned int dest_align
2117 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2118 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
2119 rtx dest_mem;
2120
2121 if (!p || dest_align == 0 || !host_integerp (len, 1)
2122 || !can_store_by_pieces (tree_low_cst (len, 1),
2123 builtin_strncpy_read_str,
2124 (PTR) p, dest_align))
2125 return 0;
2126
2127 dest_mem = get_memory_rtx (dest);
2128 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2129 builtin_strncpy_read_str,
2130 (PTR) p, dest_align);
2131 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2132 #ifdef POINTERS_EXTEND_UNSIGNED
2133 if (GET_MODE (dest_mem) != ptr_mode)
2134 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2135 #endif
2136 return dest_mem;
2137 }
2138
2139 /* OK transform into builtin memcpy. */
2140 fn = built_in_decls[BUILT_IN_MEMCPY];
2141 if (!fn)
2142 return 0;
2143 return expand_expr (build_function_call_expr (fn, arglist),
2144 target, mode, EXPAND_NORMAL);
2145 }
2146 }
2147
2148 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2149 bytes from constant string DATA + OFFSET and return it as target
2150 constant. */
2151
2152 static rtx
2153 builtin_memset_read_str (data, offset, mode)
2154 PTR data;
2155 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2156 enum machine_mode mode;
2157 {
2158 const char *c = (const char *) data;
2159 char *p = alloca (GET_MODE_SIZE (mode));
2160
2161 memset (p, *c, GET_MODE_SIZE (mode));
2162
2163 return c_readstr (p, mode);
2164 }
2165
2166 /* Callback routine for store_by_pieces. Return the RTL of a register
2167 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
2168 char value given in the RTL register data. For example, if mode is
2169 4 bytes wide, return the RTL for 0x01010101*data. */
2170
2171 static rtx
2172 builtin_memset_gen_str (data, offset, mode)
2173 PTR data;
2174 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2175 enum machine_mode mode;
2176 {
2177 rtx target, coeff;
2178 size_t size;
2179 char *p;
2180
2181 size = GET_MODE_SIZE (mode);
2182 if (size == 1)
2183 return (rtx) data;
2184
2185 p = alloca (size);
2186 memset (p, 1, size);
2187 coeff = c_readstr (p, mode);
2188
2189 target = convert_to_mode (mode, (rtx) data, 1);
2190 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
2191 return force_reg (mode, target);
2192 }
2193
2194 /* Expand expression EXP, which is a call to the memset builtin. Return 0
2195 if we failed the caller should emit a normal call, otherwise try to get
2196 the result in TARGET, if convenient (and in mode MODE if that's
2197 convenient). */
2198
2199 static rtx
2200 expand_builtin_memset (exp, target, mode)
2201 tree exp;
2202 rtx target;
2203 enum machine_mode mode;
2204 {
2205 tree arglist = TREE_OPERAND (exp, 1);
2206
2207 if (!validate_arglist (arglist,
2208 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
2209 return 0;
2210 else
2211 {
2212 tree dest = TREE_VALUE (arglist);
2213 tree val = TREE_VALUE (TREE_CHAIN (arglist));
2214 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2215 char c;
2216
2217 unsigned int dest_align
2218 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2219 rtx dest_mem, dest_addr, len_rtx;
2220
2221 /* If DEST is not a pointer type, don't do this
2222 operation in-line. */
2223 if (dest_align == 0)
2224 return 0;
2225
2226 /* If the LEN parameter is zero, return DEST. */
2227 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2228 {
2229 /* Evaluate and ignore VAL in case it has side-effects. */
2230 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
2231 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2232 }
2233
2234 if (TREE_CODE (val) != INTEGER_CST)
2235 {
2236 rtx val_rtx;
2237
2238 if (!host_integerp (len, 1))
2239 return 0;
2240
2241 if (optimize_size && tree_low_cst (len, 1) > 1)
2242 return 0;
2243
2244 /* Assume that we can memset by pieces if we can store the
2245 * the coefficients by pieces (in the required modes).
2246 * We can't pass builtin_memset_gen_str as that emits RTL. */
2247 c = 1;
2248 if (!can_store_by_pieces (tree_low_cst (len, 1),
2249 builtin_memset_read_str,
2250 (PTR) &c, dest_align))
2251 return 0;
2252
2253 val = fold (build1 (CONVERT_EXPR, unsigned_char_type_node, val));
2254 val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
2255 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
2256 val_rtx);
2257 dest_mem = get_memory_rtx (dest);
2258 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2259 builtin_memset_gen_str,
2260 (PTR) val_rtx, dest_align);
2261 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2262 #ifdef POINTERS_EXTEND_UNSIGNED
2263 if (GET_MODE (dest_mem) != ptr_mode)
2264 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2265 #endif
2266 return dest_mem;
2267 }
2268
2269 if (target_char_cast (val, &c))
2270 return 0;
2271
2272 if (c)
2273 {
2274 if (!host_integerp (len, 1))
2275 return 0;
2276 if (!can_store_by_pieces (tree_low_cst (len, 1),
2277 builtin_memset_read_str, (PTR) &c,
2278 dest_align))
2279 return 0;
2280
2281 dest_mem = get_memory_rtx (dest);
2282 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2283 builtin_memset_read_str,
2284 (PTR) &c, dest_align);
2285 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2286 #ifdef POINTERS_EXTEND_UNSIGNED
2287 if (GET_MODE (dest_mem) != ptr_mode)
2288 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2289 #endif
2290 return dest_mem;
2291 }
2292
2293 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2294
2295 dest_mem = get_memory_rtx (dest);
2296 set_mem_align (dest_mem, dest_align);
2297 dest_addr = clear_storage (dest_mem, len_rtx);
2298
2299 if (dest_addr == 0)
2300 {
2301 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2302 #ifdef POINTERS_EXTEND_UNSIGNED
2303 if (GET_MODE (dest_addr) != ptr_mode)
2304 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2305 #endif
2306 }
2307
2308 return dest_addr;
2309 }
2310 }
2311
2312 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
2313 if we failed the caller should emit a normal call. */
2314
2315 static rtx
2316 expand_builtin_bzero (exp)
2317 tree exp;
2318 {
2319 tree arglist = TREE_OPERAND (exp, 1);
2320 tree dest, size, newarglist;
2321 rtx result;
2322
2323 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2324 return NULL_RTX;
2325
2326 dest = TREE_VALUE (arglist);
2327 size = TREE_VALUE (TREE_CHAIN (arglist));
2328
2329 /* New argument list transforming bzero(ptr x, int y) to
2330 memset(ptr x, int 0, size_t y). This is done this way
2331 so that if it isn't expanded inline, we fallback to
2332 calling bzero instead of memset. */
2333
2334 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2335 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
2336 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2337
2338 TREE_OPERAND (exp, 1) = newarglist;
2339 result = expand_builtin_memset (exp, const0_rtx, VOIDmode);
2340
2341 /* Always restore the original arguments. */
2342 TREE_OPERAND (exp, 1) = arglist;
2343
2344 return result;
2345 }
2346
2347 /* Expand expression EXP, which is a call to the memcmp or the strcmp builtin.
2348 ARGLIST is the argument list for this call. Return 0 if we failed and the
2349 caller should emit a normal call, otherwise try to get the result in
2350 TARGET, if convenient (and in mode MODE, if that's convenient). */
2351
2352 static rtx
2353 expand_builtin_memcmp (exp, arglist, target, mode)
2354 tree exp ATTRIBUTE_UNUSED;
2355 tree arglist;
2356 rtx target;
2357 enum machine_mode mode;
2358 {
2359 tree arg1, arg2, len;
2360 const char *p1, *p2;
2361
2362 if (!validate_arglist (arglist,
2363 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2364 return 0;
2365
2366 arg1 = TREE_VALUE (arglist);
2367 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2368 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2369
2370 /* If the len parameter is zero, return zero. */
2371 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2372 {
2373 /* Evaluate and ignore arg1 and arg2 in case they have
2374 side-effects. */
2375 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2376 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2377 return const0_rtx;
2378 }
2379
2380 p1 = c_getstr (arg1);
2381 p2 = c_getstr (arg2);
2382
2383 /* If all arguments are constant, and the value of len is not greater
2384 than the lengths of arg1 and arg2, evaluate at compile-time. */
2385 if (host_integerp (len, 1) && p1 && p2
2386 && compare_tree_int (len, strlen (p1) + 1) <= 0
2387 && compare_tree_int (len, strlen (p2) + 1) <= 0)
2388 {
2389 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
2390
2391 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
2392 }
2393
2394 /* If len parameter is one, return an expression corresponding to
2395 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2396 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
2397 {
2398 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2399 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2400 tree ind1 =
2401 fold (build1 (CONVERT_EXPR, integer_type_node,
2402 build1 (INDIRECT_REF, cst_uchar_node,
2403 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2404 tree ind2 =
2405 fold (build1 (CONVERT_EXPR, integer_type_node,
2406 build1 (INDIRECT_REF, cst_uchar_node,
2407 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2408 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2409 return expand_expr (result, target, mode, EXPAND_NORMAL);
2410 }
2411
2412 #ifdef HAVE_cmpstrsi
2413 {
2414 rtx arg1_rtx, arg2_rtx, arg3_rtx;
2415 rtx result;
2416 rtx insn;
2417
2418 int arg1_align
2419 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2420 int arg2_align
2421 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2422 enum machine_mode insn_mode
2423 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
2424
2425 /* If we don't have POINTER_TYPE, call the function. */
2426 if (arg1_align == 0 || arg2_align == 0)
2427 return 0;
2428
2429 /* Make a place to write the result of the instruction. */
2430 result = target;
2431 if (! (result != 0
2432 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
2433 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2434 result = gen_reg_rtx (insn_mode);
2435
2436 arg1_rtx = get_memory_rtx (arg1);
2437 arg2_rtx = get_memory_rtx (arg2);
2438 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2439 if (!HAVE_cmpstrsi)
2440 insn = NULL_RTX;
2441 else
2442 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
2443 GEN_INT (MIN (arg1_align, arg2_align)));
2444
2445 if (insn)
2446 emit_insn (insn);
2447 else
2448 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
2449 TYPE_MODE (integer_type_node), 3,
2450 XEXP (arg1_rtx, 0), Pmode,
2451 XEXP (arg2_rtx, 0), Pmode,
2452 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
2453 TREE_UNSIGNED (sizetype)),
2454 TYPE_MODE (sizetype));
2455
2456 /* Return the value in the proper mode for this function. */
2457 mode = TYPE_MODE (TREE_TYPE (exp));
2458 if (GET_MODE (result) == mode)
2459 return result;
2460 else if (target != 0)
2461 {
2462 convert_move (target, result, 0);
2463 return target;
2464 }
2465 else
2466 return convert_to_mode (mode, result, 0);
2467 }
2468 #endif
2469
2470 return 0;
2471 }
2472
2473 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
2474 if we failed the caller should emit a normal call, otherwise try to get
2475 the result in TARGET, if convenient. */
2476
2477 static rtx
2478 expand_builtin_strcmp (exp, target, mode)
2479 tree exp;
2480 rtx target;
2481 enum machine_mode mode;
2482 {
2483 tree arglist = TREE_OPERAND (exp, 1);
2484 tree arg1, arg2, len, len2, fn;
2485 const char *p1, *p2;
2486
2487 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2488 return 0;
2489
2490 arg1 = TREE_VALUE (arglist);
2491 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2492
2493 p1 = c_getstr (arg1);
2494 p2 = c_getstr (arg2);
2495
2496 if (p1 && p2)
2497 {
2498 const int i = strcmp (p1, p2);
2499 return (i < 0 ? constm1_rtx : (i > 0 ? const1_rtx : const0_rtx));
2500 }
2501
2502 /* If either arg is "", return an expression corresponding to
2503 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2504 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
2505 {
2506 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2507 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2508 tree ind1 =
2509 fold (build1 (CONVERT_EXPR, integer_type_node,
2510 build1 (INDIRECT_REF, cst_uchar_node,
2511 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2512 tree ind2 =
2513 fold (build1 (CONVERT_EXPR, integer_type_node,
2514 build1 (INDIRECT_REF, cst_uchar_node,
2515 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2516 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2517 return expand_expr (result, target, mode, EXPAND_NORMAL);
2518 }
2519
2520 len = c_strlen (arg1);
2521 len2 = c_strlen (arg2);
2522
2523 if (len)
2524 len = size_binop (PLUS_EXPR, ssize_int (1), len);
2525
2526 if (len2)
2527 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
2528
2529 /* If we don't have a constant length for the first, use the length
2530 of the second, if we know it. We don't require a constant for
2531 this case; some cost analysis could be done if both are available
2532 but neither is constant. For now, assume they're equally cheap
2533 unless one has side effects.
2534
2535 If both strings have constant lengths, use the smaller. This
2536 could arise if optimization results in strcpy being called with
2537 two fixed strings, or if the code was machine-generated. We should
2538 add some code to the `memcmp' handler below to deal with such
2539 situations, someday. */
2540
2541 if (!len || TREE_CODE (len) != INTEGER_CST)
2542 {
2543 if (len2 && !TREE_SIDE_EFFECTS (len2))
2544 len = len2;
2545 else if (len == 0)
2546 return 0;
2547 }
2548 else if (len2 && TREE_CODE (len2) == INTEGER_CST
2549 && tree_int_cst_lt (len2, len))
2550 len = len2;
2551
2552 /* If both arguments have side effects, we cannot optimize. */
2553 if (TREE_SIDE_EFFECTS (len))
2554 return 0;
2555
2556 fn = built_in_decls[BUILT_IN_MEMCMP];
2557 if (!fn)
2558 return 0;
2559
2560 chainon (arglist, build_tree_list (NULL_TREE, len));
2561 return expand_expr (build_function_call_expr (fn, arglist),
2562 target, mode, EXPAND_NORMAL);
2563 }
2564
2565 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
2566 if we failed the caller should emit a normal call, otherwise try to get
2567 the result in TARGET, if convenient. */
2568
2569 static rtx
2570 expand_builtin_strncmp (exp, target, mode)
2571 tree exp;
2572 rtx target;
2573 enum machine_mode mode;
2574 {
2575 tree arglist = TREE_OPERAND (exp, 1);
2576 tree fn, newarglist, len = 0;
2577 tree arg1, arg2, arg3;
2578 const char *p1, *p2;
2579
2580 if (!validate_arglist (arglist,
2581 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2582 return 0;
2583
2584 arg1 = TREE_VALUE (arglist);
2585 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2586 arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2587
2588 /* If the len parameter is zero, return zero. */
2589 if (host_integerp (arg3, 1) && tree_low_cst (arg3, 1) == 0)
2590 {
2591 /* Evaluate and ignore arg1 and arg2 in case they have
2592 side-effects. */
2593 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2594 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2595 return const0_rtx;
2596 }
2597
2598 p1 = c_getstr (arg1);
2599 p2 = c_getstr (arg2);
2600
2601 /* If all arguments are constant, evaluate at compile-time. */
2602 if (host_integerp (arg3, 1) && p1 && p2)
2603 {
2604 const int r = strncmp (p1, p2, tree_low_cst (arg3, 1));
2605 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
2606 }
2607
2608 /* If len == 1 or (either string parameter is "" and (len >= 1)),
2609 return (*(const u_char*)arg1 - *(const u_char*)arg2). */
2610 if (host_integerp (arg3, 1)
2611 && (tree_low_cst (arg3, 1) == 1
2612 || (tree_low_cst (arg3, 1) > 1
2613 && ((p1 && *p1 == '\0') || (p2 && *p2 == '\0')))))
2614 {
2615 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2616 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2617 tree ind1 =
2618 fold (build1 (CONVERT_EXPR, integer_type_node,
2619 build1 (INDIRECT_REF, cst_uchar_node,
2620 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2621 tree ind2 =
2622 fold (build1 (CONVERT_EXPR, integer_type_node,
2623 build1 (INDIRECT_REF, cst_uchar_node,
2624 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2625 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2626 return expand_expr (result, target, mode, EXPAND_NORMAL);
2627 }
2628
2629 /* If c_strlen can determine an expression for one of the string
2630 lengths, and it doesn't have side effects, then call
2631 expand_builtin_memcmp() using length MIN(strlen(string)+1, arg3). */
2632
2633 /* Perhaps one of the strings is really constant, if so prefer
2634 that constant length over the other string's length. */
2635 if (p1)
2636 len = c_strlen (arg1);
2637 else if (p2)
2638 len = c_strlen (arg2);
2639
2640 /* If we still don't have a len, try either string arg as long
2641 as they don't have side effects. */
2642 if (!len && !TREE_SIDE_EFFECTS (arg1))
2643 len = c_strlen (arg1);
2644 if (!len && !TREE_SIDE_EFFECTS (arg2))
2645 len = c_strlen (arg2);
2646 /* If we still don't have a length, punt. */
2647 if (!len)
2648 return 0;
2649
2650 fn = built_in_decls[BUILT_IN_MEMCMP];
2651 if (!fn)
2652 return 0;
2653
2654 /* Add one to the string length. */
2655 len = fold (size_binop (PLUS_EXPR, len, ssize_int (1)));
2656
2657 /* The actual new length parameter is MIN(len,arg3). */
2658 len = fold (build (MIN_EXPR, TREE_TYPE (len), len, arg3));
2659
2660 newarglist = build_tree_list (NULL_TREE, len);
2661 newarglist = tree_cons (NULL_TREE, arg2, newarglist);
2662 newarglist = tree_cons (NULL_TREE, arg1, newarglist);
2663 return expand_expr (build_function_call_expr (fn, newarglist),
2664 target, mode, EXPAND_NORMAL);
2665 }
2666
2667 /* Expand expression EXP, which is a call to the strcat builtin.
2668 Return 0 if we failed the caller should emit a normal call,
2669 otherwise try to get the result in TARGET, if convenient. */
2670
2671 static rtx
2672 expand_builtin_strcat (arglist, target, mode)
2673 tree arglist;
2674 rtx target;
2675 enum machine_mode mode;
2676 {
2677 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2678 return 0;
2679 else
2680 {
2681 tree dst = TREE_VALUE (arglist),
2682 src = TREE_VALUE (TREE_CHAIN (arglist));
2683 const char *p = c_getstr (src);
2684
2685 /* If the string length is zero, return the dst parameter. */
2686 if (p && *p == '\0')
2687 return expand_expr (dst, target, mode, EXPAND_NORMAL);
2688
2689 return 0;
2690 }
2691 }
2692
2693 /* Expand expression EXP, which is a call to the strncat builtin.
2694 Return 0 if we failed the caller should emit a normal call,
2695 otherwise try to get the result in TARGET, if convenient. */
2696
2697 static rtx
2698 expand_builtin_strncat (arglist, target, mode)
2699 tree arglist;
2700 rtx target;
2701 enum machine_mode mode;
2702 {
2703 if (!validate_arglist (arglist,
2704 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2705 return 0;
2706 else
2707 {
2708 tree dst = TREE_VALUE (arglist),
2709 src = TREE_VALUE (TREE_CHAIN (arglist)),
2710 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2711 const char *p = c_getstr (src);
2712
2713 /* If the requested length is zero, or the src parameter string
2714 length is zero, return the dst parameter. */
2715 if (integer_zerop (len) || (p && *p == '\0'))
2716 {
2717 /* Evaluate and ignore the src and len parameters in case
2718 they have side-effects. */
2719 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2720 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
2721 return expand_expr (dst, target, mode, EXPAND_NORMAL);
2722 }
2723
2724 /* If the requested len is greater than or equal to the string
2725 length, call strcat. */
2726 if (TREE_CODE (len) == INTEGER_CST && p
2727 && compare_tree_int (len, strlen (p)) >= 0)
2728 {
2729 tree newarglist
2730 = tree_cons (NULL_TREE, dst, build_tree_list (NULL_TREE, src));
2731 tree fn = built_in_decls[BUILT_IN_STRCAT];
2732
2733 /* If the replacement _DECL isn't initialized, don't do the
2734 transformation. */
2735 if (!fn)
2736 return 0;
2737
2738 return expand_expr (build_function_call_expr (fn, newarglist),
2739 target, mode, EXPAND_NORMAL);
2740 }
2741 return 0;
2742 }
2743 }
2744
2745 /* Expand expression EXP, which is a call to the strspn builtin.
2746 Return 0 if we failed the caller should emit a normal call,
2747 otherwise try to get the result in TARGET, if convenient. */
2748
2749 static rtx
2750 expand_builtin_strspn (arglist, target, mode)
2751 tree arglist;
2752 rtx target;
2753 enum machine_mode mode;
2754 {
2755 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2756 return 0;
2757 else
2758 {
2759 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2760 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
2761
2762 /* If both arguments are constants, evaluate at compile-time. */
2763 if (p1 && p2)
2764 {
2765 const size_t r = strspn (p1, p2);
2766 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
2767 }
2768
2769 /* If either argument is "", return 0. */
2770 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
2771 {
2772 /* Evaluate and ignore both arguments in case either one has
2773 side-effects. */
2774 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2775 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2776 return const0_rtx;
2777 }
2778 return 0;
2779 }
2780 }
2781
2782 /* Expand expression EXP, which is a call to the strcspn builtin.
2783 Return 0 if we failed the caller should emit a normal call,
2784 otherwise try to get the result in TARGET, if convenient. */
2785
2786 static rtx
2787 expand_builtin_strcspn (arglist, target, mode)
2788 tree arglist;
2789 rtx target;
2790 enum machine_mode mode;
2791 {
2792 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2793 return 0;
2794 else
2795 {
2796 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2797 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
2798
2799 /* If both arguments are constants, evaluate at compile-time. */
2800 if (p1 && p2)
2801 {
2802 const size_t r = strcspn (p1, p2);
2803 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
2804 }
2805
2806 /* If the first argument is "", return 0. */
2807 if (p1 && *p1 == '\0')
2808 {
2809 /* Evaluate and ignore argument s2 in case it has
2810 side-effects. */
2811 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2812 return const0_rtx;
2813 }
2814
2815 /* If the second argument is "", return __builtin_strlen(s1). */
2816 if (p2 && *p2 == '\0')
2817 {
2818 tree newarglist = build_tree_list (NULL_TREE, s1),
2819 fn = built_in_decls[BUILT_IN_STRLEN];
2820
2821 /* If the replacement _DECL isn't initialized, don't do the
2822 transformation. */
2823 if (!fn)
2824 return 0;
2825
2826 return expand_expr (build_function_call_expr (fn, newarglist),
2827 target, mode, EXPAND_NORMAL);
2828 }
2829 return 0;
2830 }
2831 }
2832
2833 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
2834 if that's convenient. */
2835
2836 rtx
2837 expand_builtin_saveregs ()
2838 {
2839 rtx val, seq;
2840
2841 /* Don't do __builtin_saveregs more than once in a function.
2842 Save the result of the first call and reuse it. */
2843 if (saveregs_value != 0)
2844 return saveregs_value;
2845
2846 /* When this function is called, it means that registers must be
2847 saved on entry to this function. So we migrate the call to the
2848 first insn of this function. */
2849
2850 start_sequence ();
2851
2852 #ifdef EXPAND_BUILTIN_SAVEREGS
2853 /* Do whatever the machine needs done in this case. */
2854 val = EXPAND_BUILTIN_SAVEREGS ();
2855 #else
2856 /* ??? We used to try and build up a call to the out of line function,
2857 guessing about what registers needed saving etc. This became much
2858 harder with __builtin_va_start, since we don't have a tree for a
2859 call to __builtin_saveregs to fall back on. There was exactly one
2860 port (i860) that used this code, and I'm unconvinced it could actually
2861 handle the general case. So we no longer try to handle anything
2862 weird and make the backend absorb the evil. */
2863
2864 error ("__builtin_saveregs not supported by this target");
2865 val = const0_rtx;
2866 #endif
2867
2868 seq = get_insns ();
2869 end_sequence ();
2870
2871 saveregs_value = val;
2872
2873 /* Put the insns after the NOTE that starts the function. If this
2874 is inside a start_sequence, make the outer-level insn chain current, so
2875 the code is placed at the start of the function. */
2876 push_topmost_sequence ();
2877 emit_insn_after (seq, get_insns ());
2878 pop_topmost_sequence ();
2879
2880 return val;
2881 }
2882
2883 /* __builtin_args_info (N) returns word N of the arg space info
2884 for the current function. The number and meanings of words
2885 is controlled by the definition of CUMULATIVE_ARGS. */
2886
2887 static rtx
2888 expand_builtin_args_info (exp)
2889 tree exp;
2890 {
2891 tree arglist = TREE_OPERAND (exp, 1);
2892 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
2893 int *word_ptr = (int *) &current_function_args_info;
2894 #if 0
2895 /* These are used by the code below that is if 0'ed away */
2896 int i;
2897 tree type, elts, result;
2898 #endif
2899
2900 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
2901 abort ();
2902
2903 if (arglist != 0)
2904 {
2905 if (!host_integerp (TREE_VALUE (arglist), 0))
2906 error ("argument of `__builtin_args_info' must be constant");
2907 else
2908 {
2909 HOST_WIDE_INT wordnum = tree_low_cst (TREE_VALUE (arglist), 0);
2910
2911 if (wordnum < 0 || wordnum >= nwords)
2912 error ("argument of `__builtin_args_info' out of range");
2913 else
2914 return GEN_INT (word_ptr[wordnum]);
2915 }
2916 }
2917 else
2918 error ("missing argument in `__builtin_args_info'");
2919
2920 return const0_rtx;
2921
2922 #if 0
2923 for (i = 0; i < nwords; i++)
2924 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
2925
2926 type = build_array_type (integer_type_node,
2927 build_index_type (build_int_2 (nwords, 0)));
2928 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
2929 TREE_CONSTANT (result) = 1;
2930 TREE_STATIC (result) = 1;
2931 result = build1 (INDIRECT_REF, build_pointer_type (type), result);
2932 TREE_CONSTANT (result) = 1;
2933 return expand_expr (result, NULL_RTX, VOIDmode, 0);
2934 #endif
2935 }
2936
2937 /* Expand ARGLIST, from a call to __builtin_next_arg. */
2938
2939 static rtx
2940 expand_builtin_next_arg (arglist)
2941 tree arglist;
2942 {
2943 tree fntype = TREE_TYPE (current_function_decl);
2944
2945 if (TYPE_ARG_TYPES (fntype) == 0
2946 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2947 == void_type_node))
2948 {
2949 error ("`va_start' used in function with fixed args");
2950 return const0_rtx;
2951 }
2952
2953 if (arglist)
2954 {
2955 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
2956 tree arg = TREE_VALUE (arglist);
2957
2958 /* Strip off all nops for the sake of the comparison. This
2959 is not quite the same as STRIP_NOPS. It does more.
2960 We must also strip off INDIRECT_EXPR for C++ reference
2961 parameters. */
2962 while (TREE_CODE (arg) == NOP_EXPR
2963 || TREE_CODE (arg) == CONVERT_EXPR
2964 || TREE_CODE (arg) == NON_LVALUE_EXPR
2965 || TREE_CODE (arg) == INDIRECT_REF)
2966 arg = TREE_OPERAND (arg, 0);
2967 if (arg != last_parm)
2968 warning ("second parameter of `va_start' not last named argument");
2969 }
2970 else
2971 /* Evidently an out of date version of <stdarg.h>; can't validate
2972 va_start's second argument, but can still work as intended. */
2973 warning ("`__builtin_next_arg' called without an argument");
2974
2975 return expand_binop (Pmode, add_optab,
2976 current_function_internal_arg_pointer,
2977 current_function_arg_offset_rtx,
2978 NULL_RTX, 0, OPTAB_LIB_WIDEN);
2979 }
2980
2981 /* Make it easier for the backends by protecting the valist argument
2982 from multiple evaluations. */
2983
2984 static tree
2985 stabilize_va_list (valist, needs_lvalue)
2986 tree valist;
2987 int needs_lvalue;
2988 {
2989 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
2990 {
2991 if (TREE_SIDE_EFFECTS (valist))
2992 valist = save_expr (valist);
2993
2994 /* For this case, the backends will be expecting a pointer to
2995 TREE_TYPE (va_list_type_node), but it's possible we've
2996 actually been given an array (an actual va_list_type_node).
2997 So fix it. */
2998 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
2999 {
3000 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
3001 tree p2 = build_pointer_type (va_list_type_node);
3002
3003 valist = build1 (ADDR_EXPR, p2, valist);
3004 valist = fold (build1 (NOP_EXPR, p1, valist));
3005 }
3006 }
3007 else
3008 {
3009 tree pt;
3010
3011 if (! needs_lvalue)
3012 {
3013 if (! TREE_SIDE_EFFECTS (valist))
3014 return valist;
3015
3016 pt = build_pointer_type (va_list_type_node);
3017 valist = fold (build1 (ADDR_EXPR, pt, valist));
3018 TREE_SIDE_EFFECTS (valist) = 1;
3019 }
3020
3021 if (TREE_SIDE_EFFECTS (valist))
3022 valist = save_expr (valist);
3023 valist = fold (build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)),
3024 valist));
3025 }
3026
3027 return valist;
3028 }
3029
3030 /* The "standard" implementation of va_start: just assign `nextarg' to
3031 the variable. */
3032
3033 void
3034 std_expand_builtin_va_start (valist, nextarg)
3035 tree valist;
3036 rtx nextarg;
3037 {
3038 tree t;
3039
3040 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3041 make_tree (ptr_type_node, nextarg));
3042 TREE_SIDE_EFFECTS (t) = 1;
3043
3044 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3045 }
3046
3047 /* Expand ARGLIST, from a call to __builtin_va_start. */
3048
3049 static rtx
3050 expand_builtin_va_start (arglist)
3051 tree arglist;
3052 {
3053 rtx nextarg;
3054 tree chain, valist;
3055
3056 chain = TREE_CHAIN (arglist);
3057
3058 if (TREE_CHAIN (chain))
3059 error ("too many arguments to function `va_start'");
3060
3061 nextarg = expand_builtin_next_arg (chain);
3062 valist = stabilize_va_list (TREE_VALUE (arglist), 1);
3063
3064 #ifdef EXPAND_BUILTIN_VA_START
3065 EXPAND_BUILTIN_VA_START (valist, nextarg);
3066 #else
3067 std_expand_builtin_va_start (valist, nextarg);
3068 #endif
3069
3070 return const0_rtx;
3071 }
3072
3073 /* The "standard" implementation of va_arg: read the value from the
3074 current (padded) address and increment by the (padded) size. */
3075
3076 rtx
3077 std_expand_builtin_va_arg (valist, type)
3078 tree valist, type;
3079 {
3080 tree addr_tree, t, type_size = NULL;
3081 tree align, alignm1;
3082 tree rounded_size;
3083 rtx addr;
3084
3085 /* Compute the rounded size of the type. */
3086 align = size_int (PARM_BOUNDARY / BITS_PER_UNIT);
3087 alignm1 = size_int (PARM_BOUNDARY / BITS_PER_UNIT - 1);
3088 if (type == error_mark_node
3089 || (type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type))) == NULL
3090 || TREE_OVERFLOW (type_size))
3091 rounded_size = size_zero_node;
3092 else
3093 rounded_size = fold (build (MULT_EXPR, sizetype,
3094 fold (build (TRUNC_DIV_EXPR, sizetype,
3095 fold (build (PLUS_EXPR, sizetype,
3096 type_size, alignm1)),
3097 align)),
3098 align));
3099
3100 /* Get AP. */
3101 addr_tree = valist;
3102 if (PAD_VARARGS_DOWN && ! integer_zerop (rounded_size))
3103 {
3104 /* Small args are padded downward. */
3105 addr_tree = fold (build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3106 fold (build (COND_EXPR, sizetype,
3107 fold (build (GT_EXPR, sizetype,
3108 rounded_size,
3109 align)),
3110 size_zero_node,
3111 fold (build (MINUS_EXPR, sizetype,
3112 rounded_size,
3113 type_size))))));
3114 }
3115
3116 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3117 addr = copy_to_reg (addr);
3118
3119 /* Compute new value for AP. */
3120 if (! integer_zerop (rounded_size))
3121 {
3122 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3123 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3124 rounded_size));
3125 TREE_SIDE_EFFECTS (t) = 1;
3126 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3127 }
3128
3129 return addr;
3130 }
3131
3132 /* Expand __builtin_va_arg, which is not really a builtin function, but
3133 a very special sort of operator. */
3134
3135 rtx
3136 expand_builtin_va_arg (valist, type)
3137 tree valist, type;
3138 {
3139 rtx addr, result;
3140 tree promoted_type, want_va_type, have_va_type;
3141
3142 /* Verify that valist is of the proper type. */
3143
3144 want_va_type = va_list_type_node;
3145 have_va_type = TREE_TYPE (valist);
3146 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
3147 {
3148 /* If va_list is an array type, the argument may have decayed
3149 to a pointer type, e.g. by being passed to another function.
3150 In that case, unwrap both types so that we can compare the
3151 underlying records. */
3152 if (TREE_CODE (have_va_type) == ARRAY_TYPE
3153 || TREE_CODE (have_va_type) == POINTER_TYPE)
3154 {
3155 want_va_type = TREE_TYPE (want_va_type);
3156 have_va_type = TREE_TYPE (have_va_type);
3157 }
3158 }
3159 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
3160 {
3161 error ("first argument to `va_arg' not of type `va_list'");
3162 addr = const0_rtx;
3163 }
3164
3165 /* Generate a diagnostic for requesting data of a type that cannot
3166 be passed through `...' due to type promotion at the call site. */
3167 else if ((promoted_type = (*lang_hooks.types.type_promotes_to) (type))
3168 != type)
3169 {
3170 const char *name = "<anonymous type>", *pname = 0;
3171 static bool gave_help;
3172
3173 if (TYPE_NAME (type))
3174 {
3175 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE)
3176 name = IDENTIFIER_POINTER (TYPE_NAME (type));
3177 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
3178 && DECL_NAME (TYPE_NAME (type)))
3179 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
3180 }
3181 if (TYPE_NAME (promoted_type))
3182 {
3183 if (TREE_CODE (TYPE_NAME (promoted_type)) == IDENTIFIER_NODE)
3184 pname = IDENTIFIER_POINTER (TYPE_NAME (promoted_type));
3185 else if (TREE_CODE (TYPE_NAME (promoted_type)) == TYPE_DECL
3186 && DECL_NAME (TYPE_NAME (promoted_type)))
3187 pname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (promoted_type)));
3188 }
3189
3190 /* Unfortunately, this is merely undefined, rather than a constraint
3191 violation, so we cannot make this an error. If this call is never
3192 executed, the program is still strictly conforming. */
3193 warning ("`%s' is promoted to `%s' when passed through `...'",
3194 name, pname);
3195 if (! gave_help)
3196 {
3197 gave_help = true;
3198 warning ("(so you should pass `%s' not `%s' to `va_arg')",
3199 pname, name);
3200 }
3201
3202 /* We can, however, treat "undefined" any way we please.
3203 Call abort to encourage the user to fix the program. */
3204 expand_builtin_trap ();
3205
3206 /* This is dead code, but go ahead and finish so that the
3207 mode of the result comes out right. */
3208 addr = const0_rtx;
3209 }
3210 else
3211 {
3212 /* Make it easier for the backends by protecting the valist argument
3213 from multiple evaluations. */
3214 valist = stabilize_va_list (valist, 0);
3215
3216 #ifdef EXPAND_BUILTIN_VA_ARG
3217 addr = EXPAND_BUILTIN_VA_ARG (valist, type);
3218 #else
3219 addr = std_expand_builtin_va_arg (valist, type);
3220 #endif
3221 }
3222
3223 #ifdef POINTERS_EXTEND_UNSIGNED
3224 if (GET_MODE (addr) != Pmode)
3225 addr = convert_memory_address (Pmode, addr);
3226 #endif
3227
3228 result = gen_rtx_MEM (TYPE_MODE (type), addr);
3229 set_mem_alias_set (result, get_varargs_alias_set ());
3230
3231 return result;
3232 }
3233
3234 /* Expand ARGLIST, from a call to __builtin_va_end. */
3235
3236 static rtx
3237 expand_builtin_va_end (arglist)
3238 tree arglist;
3239 {
3240 tree valist = TREE_VALUE (arglist);
3241
3242 #ifdef EXPAND_BUILTIN_VA_END
3243 valist = stabilize_va_list (valist, 0);
3244 EXPAND_BUILTIN_VA_END (arglist);
3245 #else
3246 /* Evaluate for side effects, if needed. I hate macros that don't
3247 do that. */
3248 if (TREE_SIDE_EFFECTS (valist))
3249 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
3250 #endif
3251
3252 return const0_rtx;
3253 }
3254
3255 /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a
3256 builtin rather than just as an assignment in stdarg.h because of the
3257 nastiness of array-type va_list types. */
3258
3259 static rtx
3260 expand_builtin_va_copy (arglist)
3261 tree arglist;
3262 {
3263 tree dst, src, t;
3264
3265 dst = TREE_VALUE (arglist);
3266 src = TREE_VALUE (TREE_CHAIN (arglist));
3267
3268 dst = stabilize_va_list (dst, 1);
3269 src = stabilize_va_list (src, 0);
3270
3271 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
3272 {
3273 t = build (MODIFY_EXPR, va_list_type_node, dst, src);
3274 TREE_SIDE_EFFECTS (t) = 1;
3275 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3276 }
3277 else
3278 {
3279 rtx dstb, srcb, size;
3280
3281 /* Evaluate to pointers. */
3282 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
3283 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
3284 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
3285 VOIDmode, EXPAND_NORMAL);
3286
3287 #ifdef POINTERS_EXTEND_UNSIGNED
3288 if (GET_MODE (dstb) != Pmode)
3289 dstb = convert_memory_address (Pmode, dstb);
3290
3291 if (GET_MODE (srcb) != Pmode)
3292 srcb = convert_memory_address (Pmode, srcb);
3293 #endif
3294
3295 /* "Dereference" to BLKmode memories. */
3296 dstb = gen_rtx_MEM (BLKmode, dstb);
3297 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
3298 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
3299 srcb = gen_rtx_MEM (BLKmode, srcb);
3300 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
3301 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
3302
3303 /* Copy. */
3304 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
3305 }
3306
3307 return const0_rtx;
3308 }
3309
3310 /* Expand a call to one of the builtin functions __builtin_frame_address or
3311 __builtin_return_address. */
3312
3313 static rtx
3314 expand_builtin_frame_address (exp)
3315 tree exp;
3316 {
3317 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3318 tree arglist = TREE_OPERAND (exp, 1);
3319
3320 /* The argument must be a nonnegative integer constant.
3321 It counts the number of frames to scan up the stack.
3322 The value is the return address saved in that frame. */
3323 if (arglist == 0)
3324 /* Warning about missing arg was already issued. */
3325 return const0_rtx;
3326 else if (! host_integerp (TREE_VALUE (arglist), 1))
3327 {
3328 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3329 error ("invalid arg to `__builtin_frame_address'");
3330 else
3331 error ("invalid arg to `__builtin_return_address'");
3332 return const0_rtx;
3333 }
3334 else
3335 {
3336 rtx tem
3337 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
3338 tree_low_cst (TREE_VALUE (arglist), 1),
3339 hard_frame_pointer_rtx);
3340
3341 /* Some ports cannot access arbitrary stack frames. */
3342 if (tem == NULL)
3343 {
3344 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3345 warning ("unsupported arg to `__builtin_frame_address'");
3346 else
3347 warning ("unsupported arg to `__builtin_return_address'");
3348 return const0_rtx;
3349 }
3350
3351 /* For __builtin_frame_address, return what we've got. */
3352 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3353 return tem;
3354
3355 if (GET_CODE (tem) != REG
3356 && ! CONSTANT_P (tem))
3357 tem = copy_to_mode_reg (Pmode, tem);
3358 return tem;
3359 }
3360 }
3361
3362 /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if
3363 we failed and the caller should emit a normal call, otherwise try to get
3364 the result in TARGET, if convenient. */
3365
3366 static rtx
3367 expand_builtin_alloca (arglist, target)
3368 tree arglist;
3369 rtx target;
3370 {
3371 rtx op0;
3372 rtx result;
3373
3374 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3375 return 0;
3376
3377 /* Compute the argument. */
3378 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
3379
3380 /* Allocate the desired space. */
3381 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
3382
3383 #ifdef POINTERS_EXTEND_UNSIGNED
3384 if (GET_MODE (result) != ptr_mode)
3385 result = convert_memory_address (ptr_mode, result);
3386 #endif
3387
3388 return result;
3389 }
3390
3391 /* Expand a call to the ffs builtin. The arguments are in ARGLIST.
3392 Return 0 if a normal call should be emitted rather than expanding the
3393 function in-line. If convenient, the result should be placed in TARGET.
3394 SUBTARGET may be used as the target for computing one of EXP's operands. */
3395
3396 static rtx
3397 expand_builtin_ffs (arglist, target, subtarget)
3398 tree arglist;
3399 rtx target, subtarget;
3400 {
3401 rtx op0;
3402 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3403 return 0;
3404
3405 /* Compute the argument. */
3406 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
3407 /* Compute ffs, into TARGET if possible.
3408 Set TARGET to wherever the result comes back. */
3409 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
3410 ffs_optab, op0, target, 1);
3411 if (target == 0)
3412 abort ();
3413 return target;
3414 }
3415
3416 /* If the string passed to fputs is a constant and is one character
3417 long, we attempt to transform this call into __builtin_fputc(). */
3418
3419 static rtx
3420 expand_builtin_fputs (arglist, ignore, unlocked)
3421 tree arglist;
3422 int ignore;
3423 int unlocked;
3424 {
3425 tree len, fn;
3426 tree fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
3427 : built_in_decls[BUILT_IN_FPUTC];
3428 tree fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
3429 : built_in_decls[BUILT_IN_FWRITE];
3430
3431 /* If the return value is used, or the replacement _DECL isn't
3432 initialized, don't do the transformation. */
3433 if (!ignore || !fn_fputc || !fn_fwrite)
3434 return 0;
3435
3436 /* Verify the arguments in the original call. */
3437 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3438 return 0;
3439
3440 /* Get the length of the string passed to fputs. If the length
3441 can't be determined, punt. */
3442 if (!(len = c_strlen (TREE_VALUE (arglist)))
3443 || TREE_CODE (len) != INTEGER_CST)
3444 return 0;
3445
3446 switch (compare_tree_int (len, 1))
3447 {
3448 case -1: /* length is 0, delete the call entirely . */
3449 {
3450 /* Evaluate and ignore the argument in case it has
3451 side-effects. */
3452 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
3453 VOIDmode, EXPAND_NORMAL);
3454 return const0_rtx;
3455 }
3456 case 0: /* length is 1, call fputc. */
3457 {
3458 const char *p = c_getstr (TREE_VALUE (arglist));
3459
3460 if (p != NULL)
3461 {
3462 /* New argument list transforming fputs(string, stream) to
3463 fputc(string[0], stream). */
3464 arglist =
3465 build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
3466 arglist =
3467 tree_cons (NULL_TREE, build_int_2 (p[0], 0), arglist);
3468 fn = fn_fputc;
3469 break;
3470 }
3471 }
3472 /* FALLTHROUGH */
3473 case 1: /* length is greater than 1, call fwrite. */
3474 {
3475 tree string_arg = TREE_VALUE (arglist);
3476
3477 /* New argument list transforming fputs(string, stream) to
3478 fwrite(string, 1, len, stream). */
3479 arglist = build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
3480 arglist = tree_cons (NULL_TREE, len, arglist);
3481 arglist = tree_cons (NULL_TREE, size_one_node, arglist);
3482 arglist = tree_cons (NULL_TREE, string_arg, arglist);
3483 fn = fn_fwrite;
3484 break;
3485 }
3486 default:
3487 abort ();
3488 }
3489
3490 return expand_expr (build_function_call_expr (fn, arglist),
3491 (ignore ? const0_rtx : NULL_RTX),
3492 VOIDmode, EXPAND_NORMAL);
3493 }
3494
3495 /* Expand a call to __builtin_expect. We return our argument and emit a
3496 NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
3497 a non-jump context. */
3498
3499 static rtx
3500 expand_builtin_expect (arglist, target)
3501 tree arglist;
3502 rtx target;
3503 {
3504 tree exp, c;
3505 rtx note, rtx_c;
3506
3507 if (arglist == NULL_TREE
3508 || TREE_CHAIN (arglist) == NULL_TREE)
3509 return const0_rtx;
3510 exp = TREE_VALUE (arglist);
3511 c = TREE_VALUE (TREE_CHAIN (arglist));
3512
3513 if (TREE_CODE (c) != INTEGER_CST)
3514 {
3515 error ("second arg to `__builtin_expect' must be a constant");
3516 c = integer_zero_node;
3517 }
3518
3519 target = expand_expr (exp, target, VOIDmode, EXPAND_NORMAL);
3520
3521 /* Don't bother with expected value notes for integral constants. */
3522 if (GET_CODE (target) != CONST_INT)
3523 {
3524 /* We do need to force this into a register so that we can be
3525 moderately sure to be able to correctly interpret the branch
3526 condition later. */
3527 target = force_reg (GET_MODE (target), target);
3528
3529 rtx_c = expand_expr (c, NULL_RTX, GET_MODE (target), EXPAND_NORMAL);
3530
3531 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
3532 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, target, rtx_c);
3533 }
3534
3535 return target;
3536 }
3537
3538 /* Like expand_builtin_expect, except do this in a jump context. This is
3539 called from do_jump if the conditional is a __builtin_expect. Return either
3540 a list of insns to emit the jump or NULL if we cannot optimize
3541 __builtin_expect. We need to optimize this at jump time so that machines
3542 like the PowerPC don't turn the test into a SCC operation, and then jump
3543 based on the test being 0/1. */
3544
3545 rtx
3546 expand_builtin_expect_jump (exp, if_false_label, if_true_label)
3547 tree exp;
3548 rtx if_false_label;
3549 rtx if_true_label;
3550 {
3551 tree arglist = TREE_OPERAND (exp, 1);
3552 tree arg0 = TREE_VALUE (arglist);
3553 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3554 rtx ret = NULL_RTX;
3555
3556 /* Only handle __builtin_expect (test, 0) and
3557 __builtin_expect (test, 1). */
3558 if (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
3559 && (integer_zerop (arg1) || integer_onep (arg1)))
3560 {
3561 int num_jumps = 0;
3562 rtx insn;
3563
3564 /* If we fail to locate an appropriate conditional jump, we'll
3565 fall back to normal evaluation. Ensure that the expression
3566 can be re-evaluated. */
3567 switch (unsafe_for_reeval (arg0))
3568 {
3569 case 0: /* Safe. */
3570 break;
3571
3572 case 1: /* Mildly unsafe. */
3573 arg0 = unsave_expr (arg0);
3574 break;
3575
3576 case 2: /* Wildly unsafe. */
3577 return NULL_RTX;
3578 }
3579
3580 /* Expand the jump insns. */
3581 start_sequence ();
3582 do_jump (arg0, if_false_label, if_true_label);
3583 ret = get_insns ();
3584 end_sequence ();
3585
3586 /* Now that the __builtin_expect has been validated, go through and add
3587 the expect's to each of the conditional jumps. If we run into an
3588 error, just give up and generate the 'safe' code of doing a SCC
3589 operation and then doing a branch on that. */
3590 insn = ret;
3591 while (insn != NULL_RTX)
3592 {
3593 rtx next = NEXT_INSN (insn);
3594 rtx pattern;
3595
3596 if (GET_CODE (insn) == JUMP_INSN && any_condjump_p (insn)
3597 && (pattern = pc_set (insn)) != NULL_RTX)
3598 {
3599 rtx ifelse = SET_SRC (pattern);
3600 rtx label;
3601 int taken;
3602
3603 if (GET_CODE (ifelse) != IF_THEN_ELSE)
3604 goto do_next_insn;
3605
3606 if (GET_CODE (XEXP (ifelse, 1)) == LABEL_REF)
3607 {
3608 taken = 1;
3609 label = XEXP (XEXP (ifelse, 1), 0);
3610 }
3611 /* An inverted jump reverses the probabilities. */
3612 else if (GET_CODE (XEXP (ifelse, 2)) == LABEL_REF)
3613 {
3614 taken = 0;
3615 label = XEXP (XEXP (ifelse, 2), 0);
3616 }
3617 /* We shouldn't have to worry about conditional returns during
3618 the expansion stage, but handle it gracefully anyway. */
3619 else if (GET_CODE (XEXP (ifelse, 1)) == RETURN)
3620 {
3621 taken = 1;
3622 label = NULL_RTX;
3623 }
3624 /* An inverted return reverses the probabilities. */
3625 else if (GET_CODE (XEXP (ifelse, 2)) == RETURN)
3626 {
3627 taken = 0;
3628 label = NULL_RTX;
3629 }
3630 else
3631 goto do_next_insn;
3632
3633 /* If the test is expected to fail, reverse the
3634 probabilities. */
3635 if (integer_zerop (arg1))
3636 taken = 1 - taken;
3637
3638 /* If we are jumping to the false label, reverse the
3639 probabilities. */
3640 if (label == NULL_RTX)
3641 ; /* conditional return */
3642 else if (label == if_false_label)
3643 taken = 1 - taken;
3644 else if (label != if_true_label)
3645 goto do_next_insn;
3646
3647 num_jumps++;
3648 predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
3649 }
3650
3651 do_next_insn:
3652 insn = next;
3653 }
3654
3655 /* If no jumps were modified, fail and do __builtin_expect the normal
3656 way. */
3657 if (num_jumps == 0)
3658 ret = NULL_RTX;
3659 }
3660
3661 return ret;
3662 }
3663
3664 void
3665 expand_builtin_trap ()
3666 {
3667 #ifdef HAVE_trap
3668 if (HAVE_trap)
3669 emit_insn (gen_trap ());
3670 else
3671 #endif
3672 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
3673 emit_barrier ();
3674 }
3675 \f
3676 /* Expand an expression EXP that calls a built-in function,
3677 with result going to TARGET if that's convenient
3678 (and in mode MODE if that's convenient).
3679 SUBTARGET may be used as the target for computing one of EXP's operands.
3680 IGNORE is nonzero if the value is to be ignored. */
3681
3682 rtx
3683 expand_builtin (exp, target, subtarget, mode, ignore)
3684 tree exp;
3685 rtx target;
3686 rtx subtarget;
3687 enum machine_mode mode;
3688 int ignore;
3689 {
3690 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3691 tree arglist = TREE_OPERAND (exp, 1);
3692 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
3693
3694 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
3695 return (*targetm.expand_builtin) (exp, target, subtarget, mode, ignore);
3696
3697 /* When not optimizing, generate calls to library functions for a certain
3698 set of builtins. */
3699 if (!optimize && !CALLED_AS_BUILT_IN (fndecl))
3700 switch (fcode)
3701 {
3702 case BUILT_IN_SQRT:
3703 case BUILT_IN_SQRTF:
3704 case BUILT_IN_SQRTL:
3705 case BUILT_IN_SIN:
3706 case BUILT_IN_SINF:
3707 case BUILT_IN_SINL:
3708 case BUILT_IN_COS:
3709 case BUILT_IN_COSF:
3710 case BUILT_IN_COSL:
3711 case BUILT_IN_EXP:
3712 case BUILT_IN_EXPF:
3713 case BUILT_IN_EXPL:
3714 case BUILT_IN_MEMSET:
3715 case BUILT_IN_MEMCPY:
3716 case BUILT_IN_MEMCMP:
3717 case BUILT_IN_BCMP:
3718 case BUILT_IN_BZERO:
3719 case BUILT_IN_INDEX:
3720 case BUILT_IN_RINDEX:
3721 case BUILT_IN_STRCHR:
3722 case BUILT_IN_STRRCHR:
3723 case BUILT_IN_STRLEN:
3724 case BUILT_IN_STRCPY:
3725 case BUILT_IN_STRNCPY:
3726 case BUILT_IN_STRNCMP:
3727 case BUILT_IN_STRSTR:
3728 case BUILT_IN_STRPBRK:
3729 case BUILT_IN_STRCAT:
3730 case BUILT_IN_STRNCAT:
3731 case BUILT_IN_STRSPN:
3732 case BUILT_IN_STRCSPN:
3733 case BUILT_IN_STRCMP:
3734 case BUILT_IN_FFS:
3735 case BUILT_IN_PUTCHAR:
3736 case BUILT_IN_PUTS:
3737 case BUILT_IN_PRINTF:
3738 case BUILT_IN_FPUTC:
3739 case BUILT_IN_FPUTS:
3740 case BUILT_IN_FWRITE:
3741 case BUILT_IN_PUTCHAR_UNLOCKED:
3742 case BUILT_IN_PUTS_UNLOCKED:
3743 case BUILT_IN_PRINTF_UNLOCKED:
3744 case BUILT_IN_FPUTC_UNLOCKED:
3745 case BUILT_IN_FPUTS_UNLOCKED:
3746 case BUILT_IN_FWRITE_UNLOCKED:
3747 return expand_call (exp, target, ignore);
3748
3749 default:
3750 break;
3751 }
3752
3753 switch (fcode)
3754 {
3755 case BUILT_IN_ABS:
3756 case BUILT_IN_LABS:
3757 case BUILT_IN_LLABS:
3758 case BUILT_IN_IMAXABS:
3759 case BUILT_IN_FABS:
3760 case BUILT_IN_FABSF:
3761 case BUILT_IN_FABSL:
3762 /* build_function_call changes these into ABS_EXPR. */
3763 abort ();
3764
3765 case BUILT_IN_CONJ:
3766 case BUILT_IN_CONJF:
3767 case BUILT_IN_CONJL:
3768 case BUILT_IN_CREAL:
3769 case BUILT_IN_CREALF:
3770 case BUILT_IN_CREALL:
3771 case BUILT_IN_CIMAG:
3772 case BUILT_IN_CIMAGF:
3773 case BUILT_IN_CIMAGL:
3774 /* expand_tree_builtin changes these into CONJ_EXPR, REALPART_EXPR
3775 and IMAGPART_EXPR. */
3776 abort ();
3777
3778 case BUILT_IN_SIN:
3779 case BUILT_IN_SINF:
3780 case BUILT_IN_SINL:
3781 case BUILT_IN_COS:
3782 case BUILT_IN_COSF:
3783 case BUILT_IN_COSL:
3784 case BUILT_IN_EXP:
3785 case BUILT_IN_EXPF:
3786 case BUILT_IN_EXPL:
3787 case BUILT_IN_LOG:
3788 case BUILT_IN_LOGF:
3789 case BUILT_IN_LOGL:
3790 /* Treat these like sqrt only if unsafe math optimizations are allowed,
3791 because of possible accuracy problems. */
3792 if (! flag_unsafe_math_optimizations)
3793 break;
3794 case BUILT_IN_SQRT:
3795 case BUILT_IN_SQRTF:
3796 case BUILT_IN_SQRTL:
3797 target = expand_builtin_mathfn (exp, target, subtarget);
3798 if (target)
3799 return target;
3800 break;
3801
3802 case BUILT_IN_APPLY_ARGS:
3803 return expand_builtin_apply_args ();
3804
3805 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
3806 FUNCTION with a copy of the parameters described by
3807 ARGUMENTS, and ARGSIZE. It returns a block of memory
3808 allocated on the stack into which is stored all the registers
3809 that might possibly be used for returning the result of a
3810 function. ARGUMENTS is the value returned by
3811 __builtin_apply_args. ARGSIZE is the number of bytes of
3812 arguments that must be copied. ??? How should this value be
3813 computed? We'll also need a safe worst case value for varargs
3814 functions. */
3815 case BUILT_IN_APPLY:
3816 if (!validate_arglist (arglist, POINTER_TYPE,
3817 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3818 && !validate_arglist (arglist, REFERENCE_TYPE,
3819 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3820 return const0_rtx;
3821 else
3822 {
3823 int i;
3824 tree t;
3825 rtx ops[3];
3826
3827 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
3828 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
3829
3830 return expand_builtin_apply (ops[0], ops[1], ops[2]);
3831 }
3832
3833 /* __builtin_return (RESULT) causes the function to return the
3834 value described by RESULT. RESULT is address of the block of
3835 memory returned by __builtin_apply. */
3836 case BUILT_IN_RETURN:
3837 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
3838 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
3839 NULL_RTX, VOIDmode, 0));
3840 return const0_rtx;
3841
3842 case BUILT_IN_SAVEREGS:
3843 return expand_builtin_saveregs ();
3844
3845 case BUILT_IN_ARGS_INFO:
3846 return expand_builtin_args_info (exp);
3847
3848 /* Return the address of the first anonymous stack arg. */
3849 case BUILT_IN_NEXT_ARG:
3850 return expand_builtin_next_arg (arglist);
3851
3852 case BUILT_IN_CLASSIFY_TYPE:
3853 return expand_builtin_classify_type (arglist);
3854
3855 case BUILT_IN_CONSTANT_P:
3856 return expand_builtin_constant_p (exp);
3857
3858 case BUILT_IN_FRAME_ADDRESS:
3859 case BUILT_IN_RETURN_ADDRESS:
3860 return expand_builtin_frame_address (exp);
3861
3862 /* Returns the address of the area where the structure is returned.
3863 0 otherwise. */
3864 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
3865 if (arglist != 0
3866 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
3867 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
3868 return const0_rtx;
3869 else
3870 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
3871
3872 case BUILT_IN_ALLOCA:
3873 target = expand_builtin_alloca (arglist, target);
3874 if (target)
3875 return target;
3876 break;
3877
3878 case BUILT_IN_FFS:
3879 target = expand_builtin_ffs (arglist, target, subtarget);
3880 if (target)
3881 return target;
3882 break;
3883
3884 case BUILT_IN_STRLEN:
3885 target = expand_builtin_strlen (exp, target);
3886 if (target)
3887 return target;
3888 break;
3889
3890 case BUILT_IN_STRCPY:
3891 target = expand_builtin_strcpy (exp, target, mode);
3892 if (target)
3893 return target;
3894 break;
3895
3896 case BUILT_IN_STRNCPY:
3897 target = expand_builtin_strncpy (arglist, target, mode);
3898 if (target)
3899 return target;
3900 break;
3901
3902 case BUILT_IN_STRCAT:
3903 target = expand_builtin_strcat (arglist, target, mode);
3904 if (target)
3905 return target;
3906 break;
3907
3908 case BUILT_IN_STRNCAT:
3909 target = expand_builtin_strncat (arglist, target, mode);
3910 if (target)
3911 return target;
3912 break;
3913
3914 case BUILT_IN_STRSPN:
3915 target = expand_builtin_strspn (arglist, target, mode);
3916 if (target)
3917 return target;
3918 break;
3919
3920 case BUILT_IN_STRCSPN:
3921 target = expand_builtin_strcspn (arglist, target, mode);
3922 if (target)
3923 return target;
3924 break;
3925
3926 case BUILT_IN_STRSTR:
3927 target = expand_builtin_strstr (arglist, target, mode);
3928 if (target)
3929 return target;
3930 break;
3931
3932 case BUILT_IN_STRPBRK:
3933 target = expand_builtin_strpbrk (arglist, target, mode);
3934 if (target)
3935 return target;
3936 break;
3937
3938 case BUILT_IN_INDEX:
3939 case BUILT_IN_STRCHR:
3940 target = expand_builtin_strchr (arglist, target, mode);
3941 if (target)
3942 return target;
3943 break;
3944
3945 case BUILT_IN_RINDEX:
3946 case BUILT_IN_STRRCHR:
3947 target = expand_builtin_strrchr (arglist, target, mode);
3948 if (target)
3949 return target;
3950 break;
3951
3952 case BUILT_IN_MEMCPY:
3953 target = expand_builtin_memcpy (arglist, target, mode);
3954 if (target)
3955 return target;
3956 break;
3957
3958 case BUILT_IN_MEMSET:
3959 target = expand_builtin_memset (exp, target, mode);
3960 if (target)
3961 return target;
3962 break;
3963
3964 case BUILT_IN_BZERO:
3965 target = expand_builtin_bzero (exp);
3966 if (target)
3967 return target;
3968 break;
3969
3970 case BUILT_IN_STRCMP:
3971 target = expand_builtin_strcmp (exp, target, mode);
3972 if (target)
3973 return target;
3974 break;
3975
3976 case BUILT_IN_STRNCMP:
3977 target = expand_builtin_strncmp (exp, target, mode);
3978 if (target)
3979 return target;
3980 break;
3981
3982 case BUILT_IN_BCMP:
3983 case BUILT_IN_MEMCMP:
3984 target = expand_builtin_memcmp (exp, arglist, target, mode);
3985 if (target)
3986 return target;
3987 break;
3988
3989 case BUILT_IN_SETJMP:
3990 target = expand_builtin_setjmp (arglist, target);
3991 if (target)
3992 return target;
3993 break;
3994
3995 /* __builtin_longjmp is passed a pointer to an array of five words.
3996 It's similar to the C library longjmp function but works with
3997 __builtin_setjmp above. */
3998 case BUILT_IN_LONGJMP:
3999 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4000 break;
4001 else
4002 {
4003 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
4004 VOIDmode, 0);
4005 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
4006 NULL_RTX, VOIDmode, 0);
4007
4008 if (value != const1_rtx)
4009 {
4010 error ("__builtin_longjmp second argument must be 1");
4011 return const0_rtx;
4012 }
4013
4014 expand_builtin_longjmp (buf_addr, value);
4015 return const0_rtx;
4016 }
4017
4018 case BUILT_IN_TRAP:
4019 expand_builtin_trap ();
4020 return const0_rtx;
4021
4022 case BUILT_IN_FPUTS:
4023 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 0);
4024 if (target)
4025 return target;
4026 break;
4027 case BUILT_IN_FPUTS_UNLOCKED:
4028 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 1);
4029 if (target)
4030 return target;
4031 break;
4032
4033 /* Various hooks for the DWARF 2 __throw routine. */
4034 case BUILT_IN_UNWIND_INIT:
4035 expand_builtin_unwind_init ();
4036 return const0_rtx;
4037 case BUILT_IN_DWARF_CFA:
4038 return virtual_cfa_rtx;
4039 #ifdef DWARF2_UNWIND_INFO
4040 case BUILT_IN_DWARF_FP_REGNUM:
4041 return expand_builtin_dwarf_fp_regnum ();
4042 case BUILT_IN_INIT_DWARF_REG_SIZES:
4043 expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist));
4044 return const0_rtx;
4045 #endif
4046 case BUILT_IN_FROB_RETURN_ADDR:
4047 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
4048 case BUILT_IN_EXTRACT_RETURN_ADDR:
4049 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
4050 case BUILT_IN_EH_RETURN:
4051 expand_builtin_eh_return (TREE_VALUE (arglist),
4052 TREE_VALUE (TREE_CHAIN (arglist)));
4053 return const0_rtx;
4054 #ifdef EH_RETURN_DATA_REGNO
4055 case BUILT_IN_EH_RETURN_DATA_REGNO:
4056 return expand_builtin_eh_return_data_regno (arglist);
4057 #endif
4058 case BUILT_IN_VA_START:
4059 case BUILT_IN_STDARG_START:
4060 return expand_builtin_va_start (arglist);
4061 case BUILT_IN_VA_END:
4062 return expand_builtin_va_end (arglist);
4063 case BUILT_IN_VA_COPY:
4064 return expand_builtin_va_copy (arglist);
4065 case BUILT_IN_EXPECT:
4066 return expand_builtin_expect (arglist, target);
4067 case BUILT_IN_PREFETCH:
4068 expand_builtin_prefetch (arglist);
4069 return const0_rtx;
4070
4071
4072 default: /* just do library call, if unknown builtin */
4073 if (!DECL_ASSEMBLER_NAME_SET_P (fndecl))
4074 error ("built-in function `%s' not currently supported",
4075 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
4076 }
4077
4078 /* The switch statement above can drop through to cause the function
4079 to be called normally. */
4080 return expand_call (exp, target, ignore);
4081 }
4082
4083 /* Fold a call to __builtin_constant_p, if we know it will evaluate to a
4084 constant. ARGLIST is the argument list of the call. */
4085
4086 static tree
4087 fold_builtin_constant_p (arglist)
4088 tree arglist;
4089 {
4090 if (arglist == 0)
4091 return 0;
4092
4093 arglist = TREE_VALUE (arglist);
4094
4095 /* We return 1 for a numeric type that's known to be a constant
4096 value at compile-time or for an aggregate type that's a
4097 literal constant. */
4098 STRIP_NOPS (arglist);
4099
4100 /* If we know this is a constant, emit the constant of one. */
4101 if (TREE_CODE_CLASS (TREE_CODE (arglist)) == 'c'
4102 || (TREE_CODE (arglist) == CONSTRUCTOR
4103 && TREE_CONSTANT (arglist))
4104 || (TREE_CODE (arglist) == ADDR_EXPR
4105 && TREE_CODE (TREE_OPERAND (arglist, 0)) == STRING_CST))
4106 return integer_one_node;
4107
4108 /* If we aren't going to be running CSE or this expression
4109 has side effects, show we don't know it to be a constant.
4110 Likewise if it's a pointer or aggregate type since in those
4111 case we only want literals, since those are only optimized
4112 when generating RTL, not later.
4113 And finally, if we are compiling an initializer, not code, we
4114 need to return a definite result now; there's not going to be any
4115 more optimization done. */
4116 if (TREE_SIDE_EFFECTS (arglist) || cse_not_expected
4117 || AGGREGATE_TYPE_P (TREE_TYPE (arglist))
4118 || POINTER_TYPE_P (TREE_TYPE (arglist))
4119 || cfun == 0)
4120 return integer_zero_node;
4121
4122 return 0;
4123 }
4124
4125 /* Fold a call to __builtin_classify_type. */
4126
4127 static tree
4128 fold_builtin_classify_type (arglist)
4129 tree arglist;
4130 {
4131 if (arglist == 0)
4132 return build_int_2 (no_type_class, 0);
4133
4134 return build_int_2 (type_to_class (TREE_TYPE (TREE_VALUE (arglist))), 0);
4135 }
4136
4137 /* Fold a call to __builtin_inf or __builtin_huge_val. */
4138
4139 static tree
4140 fold_builtin_inf (type, warn)
4141 tree type;
4142 int warn;
4143 {
4144 REAL_VALUE_TYPE real;
4145
4146 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
4147 warning ("target format does not support infinity");
4148
4149 real_inf (&real);
4150 return build_real (type, real);
4151 }
4152
4153 /* Fold a call to __builtin_nan or __builtin_nans. */
4154
4155 static tree
4156 fold_builtin_nan (arglist, type, quiet)
4157 tree arglist, type;
4158 int quiet;
4159 {
4160 REAL_VALUE_TYPE real;
4161 const char *str;
4162
4163 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4164 return 0;
4165 str = c_getstr (TREE_VALUE (arglist));
4166 if (!str)
4167 return 0;
4168
4169 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
4170 return 0;
4171
4172 return build_real (type, real);
4173 }
4174
4175 /* Used by constant folding to eliminate some builtin calls early. EXP is
4176 the CALL_EXPR of a call to a builtin function. */
4177
4178 tree
4179 fold_builtin (exp)
4180 tree exp;
4181 {
4182 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4183 tree arglist = TREE_OPERAND (exp, 1);
4184 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4185
4186 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4187 return 0;
4188
4189 switch (fcode)
4190 {
4191 case BUILT_IN_CONSTANT_P:
4192 return fold_builtin_constant_p (arglist);
4193
4194 case BUILT_IN_CLASSIFY_TYPE:
4195 return fold_builtin_classify_type (arglist);
4196
4197 case BUILT_IN_STRLEN:
4198 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4199 {
4200 tree len = c_strlen (TREE_VALUE (arglist));
4201 if (len != 0)
4202 return len;
4203 }
4204 break;
4205
4206 case BUILT_IN_INF:
4207 case BUILT_IN_INFF:
4208 case BUILT_IN_INFL:
4209 return fold_builtin_inf (TREE_TYPE (TREE_TYPE (fndecl)), true);
4210
4211 case BUILT_IN_HUGE_VAL:
4212 case BUILT_IN_HUGE_VALF:
4213 case BUILT_IN_HUGE_VALL:
4214 return fold_builtin_inf (TREE_TYPE (TREE_TYPE (fndecl)), false);
4215
4216 case BUILT_IN_NAN:
4217 case BUILT_IN_NANF:
4218 case BUILT_IN_NANL:
4219 return fold_builtin_nan (arglist, TREE_TYPE (TREE_TYPE (fndecl)), true);
4220
4221 case BUILT_IN_NANS:
4222 case BUILT_IN_NANSF:
4223 case BUILT_IN_NANSL:
4224 return fold_builtin_nan (arglist, TREE_TYPE (TREE_TYPE (fndecl)), false);
4225
4226 default:
4227 break;
4228 }
4229
4230 return 0;
4231 }
4232
4233 static tree
4234 build_function_call_expr (fn, arglist)
4235 tree fn, arglist;
4236 {
4237 tree call_expr;
4238
4239 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
4240 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
4241 call_expr, arglist);
4242 TREE_SIDE_EFFECTS (call_expr) = 1;
4243 return fold (call_expr);
4244 }
4245
4246 /* This function validates the types of a function call argument list
4247 represented as a tree chain of parameters against a specified list
4248 of tree_codes. If the last specifier is a 0, that represents an
4249 ellipses, otherwise the last specifier must be a VOID_TYPE. */
4250
4251 static int
4252 validate_arglist VPARAMS ((tree arglist, ...))
4253 {
4254 enum tree_code code;
4255 int res = 0;
4256
4257 VA_OPEN (ap, arglist);
4258 VA_FIXEDARG (ap, tree, arglist);
4259
4260 do
4261 {
4262 code = va_arg (ap, enum tree_code);
4263 switch (code)
4264 {
4265 case 0:
4266 /* This signifies an ellipses, any further arguments are all ok. */
4267 res = 1;
4268 goto end;
4269 case VOID_TYPE:
4270 /* This signifies an endlink, if no arguments remain, return
4271 true, otherwise return false. */
4272 res = arglist == 0;
4273 goto end;
4274 default:
4275 /* If no parameters remain or the parameter's code does not
4276 match the specified code, return false. Otherwise continue
4277 checking any remaining arguments. */
4278 if (arglist == 0
4279 || code != TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))))
4280 goto end;
4281 break;
4282 }
4283 arglist = TREE_CHAIN (arglist);
4284 }
4285 while (1);
4286
4287 /* We need gotos here since we can only have one VA_CLOSE in a
4288 function. */
4289 end: ;
4290 VA_CLOSE (ap);
4291
4292 return res;
4293 }
4294
4295 /* Default version of target-specific builtin setup that does nothing. */
4296
4297 void
4298 default_init_builtins ()
4299 {
4300 }
4301
4302 /* Default target-specific builtin expander that does nothing. */
4303
4304 rtx
4305 default_expand_builtin (exp, target, subtarget, mode, ignore)
4306 tree exp ATTRIBUTE_UNUSED;
4307 rtx target ATTRIBUTE_UNUSED;
4308 rtx subtarget ATTRIBUTE_UNUSED;
4309 enum machine_mode mode ATTRIBUTE_UNUSED;
4310 int ignore ATTRIBUTE_UNUSED;
4311 {
4312 return NULL_RTX;
4313 }
This page took 0.234503 seconds and 6 git commands to generate.