]> gcc.gnu.org Git - gcc.git/blob - gcc/builtins.c
builtins.c (std_expand_builtin_va_start): Remove unused first argument.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "real.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "obstack.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "toplev.h"
42 #include "predict.h"
43 #include "tm_p.h"
44 #include "target.h"
45 #include "langhooks.h"
46
47 #define CALLED_AS_BUILT_IN(NODE) \
48 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
49
50 /* Register mappings for target machines without register windows. */
51 #ifndef INCOMING_REGNO
52 #define INCOMING_REGNO(OUT) (OUT)
53 #endif
54 #ifndef OUTGOING_REGNO
55 #define OUTGOING_REGNO(IN) (IN)
56 #endif
57
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
61
62 /* Define the names of the builtin function types and codes. */
63 const char *const built_in_class_names[4]
64 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
65
66 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT) STRINGX(X),
67 const char *const built_in_names[(int) END_BUILTINS] =
68 {
69 #include "builtins.def"
70 };
71 #undef DEF_BUILTIN
72
73 /* Setup an array of _DECL trees, make sure each element is
74 initialized to NULL_TREE. */
75 tree built_in_decls[(int) END_BUILTINS];
76
77 static int get_pointer_alignment PARAMS ((tree, unsigned int));
78 static tree c_strlen PARAMS ((tree));
79 static const char *c_getstr PARAMS ((tree));
80 static rtx c_readstr PARAMS ((const char *,
81 enum machine_mode));
82 static int target_char_cast PARAMS ((tree, char *));
83 static rtx get_memory_rtx PARAMS ((tree));
84 static int apply_args_size PARAMS ((void));
85 static int apply_result_size PARAMS ((void));
86 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
87 static rtx result_vector PARAMS ((int, rtx));
88 #endif
89 static rtx expand_builtin_setjmp PARAMS ((tree, rtx));
90 static void expand_builtin_prefetch PARAMS ((tree));
91 static rtx expand_builtin_apply_args PARAMS ((void));
92 static rtx expand_builtin_apply_args_1 PARAMS ((void));
93 static rtx expand_builtin_apply PARAMS ((rtx, rtx, rtx));
94 static void expand_builtin_return PARAMS ((rtx));
95 static enum type_class type_to_class PARAMS ((tree));
96 static rtx expand_builtin_classify_type PARAMS ((tree));
97 static rtx expand_builtin_mathfn PARAMS ((tree, rtx, rtx));
98 static rtx expand_builtin_constant_p PARAMS ((tree));
99 static rtx expand_builtin_args_info PARAMS ((tree));
100 static rtx expand_builtin_next_arg PARAMS ((tree));
101 static rtx expand_builtin_va_start PARAMS ((tree));
102 static rtx expand_builtin_va_end PARAMS ((tree));
103 static rtx expand_builtin_va_copy PARAMS ((tree));
104 static rtx expand_builtin_memcmp PARAMS ((tree, tree, rtx,
105 enum machine_mode));
106 static rtx expand_builtin_strcmp PARAMS ((tree, rtx,
107 enum machine_mode));
108 static rtx expand_builtin_strncmp PARAMS ((tree, rtx,
109 enum machine_mode));
110 static rtx builtin_memcpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
111 enum machine_mode));
112 static rtx expand_builtin_strcat PARAMS ((tree, rtx,
113 enum machine_mode));
114 static rtx expand_builtin_strncat PARAMS ((tree, rtx,
115 enum machine_mode));
116 static rtx expand_builtin_strspn PARAMS ((tree, rtx,
117 enum machine_mode));
118 static rtx expand_builtin_strcspn PARAMS ((tree, rtx,
119 enum machine_mode));
120 static rtx expand_builtin_memcpy PARAMS ((tree, rtx,
121 enum machine_mode));
122 static rtx expand_builtin_strcpy PARAMS ((tree, rtx,
123 enum machine_mode));
124 static rtx builtin_strncpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
125 enum machine_mode));
126 static rtx expand_builtin_strncpy PARAMS ((tree, rtx,
127 enum machine_mode));
128 static rtx builtin_memset_read_str PARAMS ((PTR, HOST_WIDE_INT,
129 enum machine_mode));
130 static rtx builtin_memset_gen_str PARAMS ((PTR, HOST_WIDE_INT,
131 enum machine_mode));
132 static rtx expand_builtin_memset PARAMS ((tree, rtx,
133 enum machine_mode));
134 static rtx expand_builtin_bzero PARAMS ((tree));
135 static rtx expand_builtin_strlen PARAMS ((tree, rtx));
136 static rtx expand_builtin_strstr PARAMS ((tree, rtx,
137 enum machine_mode));
138 static rtx expand_builtin_strpbrk PARAMS ((tree, rtx,
139 enum machine_mode));
140 static rtx expand_builtin_strchr PARAMS ((tree, rtx,
141 enum machine_mode));
142 static rtx expand_builtin_strrchr PARAMS ((tree, rtx,
143 enum machine_mode));
144 static rtx expand_builtin_alloca PARAMS ((tree, rtx));
145 static rtx expand_builtin_ffs PARAMS ((tree, rtx, rtx));
146 static rtx expand_builtin_frame_address PARAMS ((tree));
147 static rtx expand_builtin_fputs PARAMS ((tree, int, int));
148 static tree stabilize_va_list PARAMS ((tree, int));
149 static rtx expand_builtin_expect PARAMS ((tree, rtx));
150 static tree fold_builtin_constant_p PARAMS ((tree));
151 static tree fold_builtin_classify_type PARAMS ((tree));
152 static tree build_function_call_expr PARAMS ((tree, tree));
153 static int validate_arglist PARAMS ((tree, ...));
154
155 /* Return the alignment in bits of EXP, a pointer valued expression.
156 But don't return more than MAX_ALIGN no matter what.
157 The alignment returned is, by default, the alignment of the thing that
158 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
159
160 Otherwise, look at the expression to see if we can do better, i.e., if the
161 expression is actually pointing at an object whose alignment is tighter. */
162
163 static int
164 get_pointer_alignment (exp, max_align)
165 tree exp;
166 unsigned int max_align;
167 {
168 unsigned int align, inner;
169
170 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
171 return 0;
172
173 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
174 align = MIN (align, max_align);
175
176 while (1)
177 {
178 switch (TREE_CODE (exp))
179 {
180 case NOP_EXPR:
181 case CONVERT_EXPR:
182 case NON_LVALUE_EXPR:
183 exp = TREE_OPERAND (exp, 0);
184 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
185 return align;
186
187 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
188 align = MIN (inner, max_align);
189 break;
190
191 case PLUS_EXPR:
192 /* If sum of pointer + int, restrict our maximum alignment to that
193 imposed by the integer. If not, we can't do any better than
194 ALIGN. */
195 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
196 return align;
197
198 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
199 & (max_align / BITS_PER_UNIT - 1))
200 != 0)
201 max_align >>= 1;
202
203 exp = TREE_OPERAND (exp, 0);
204 break;
205
206 case ADDR_EXPR:
207 /* See what we are pointing at and look at its alignment. */
208 exp = TREE_OPERAND (exp, 0);
209 if (TREE_CODE (exp) == FUNCTION_DECL)
210 align = FUNCTION_BOUNDARY;
211 else if (DECL_P (exp))
212 align = DECL_ALIGN (exp);
213 #ifdef CONSTANT_ALIGNMENT
214 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
215 align = CONSTANT_ALIGNMENT (exp, align);
216 #endif
217 return MIN (align, max_align);
218
219 default:
220 return align;
221 }
222 }
223 }
224
225 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
226 way, because it could contain a zero byte in the middle.
227 TREE_STRING_LENGTH is the size of the character array, not the string.
228
229 The value returned is of type `ssizetype'.
230
231 Unfortunately, string_constant can't access the values of const char
232 arrays with initializers, so neither can we do so here. */
233
234 static tree
235 c_strlen (src)
236 tree src;
237 {
238 tree offset_node;
239 HOST_WIDE_INT offset;
240 int max;
241 const char *ptr;
242
243 src = string_constant (src, &offset_node);
244 if (src == 0)
245 return 0;
246
247 max = TREE_STRING_LENGTH (src) - 1;
248 ptr = TREE_STRING_POINTER (src);
249
250 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
251 {
252 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
253 compute the offset to the following null if we don't know where to
254 start searching for it. */
255 int i;
256
257 for (i = 0; i < max; i++)
258 if (ptr[i] == 0)
259 return 0;
260
261 /* We don't know the starting offset, but we do know that the string
262 has no internal zero bytes. We can assume that the offset falls
263 within the bounds of the string; otherwise, the programmer deserves
264 what he gets. Subtract the offset from the length of the string,
265 and return that. This would perhaps not be valid if we were dealing
266 with named arrays in addition to literal string constants. */
267
268 return size_diffop (size_int (max), offset_node);
269 }
270
271 /* We have a known offset into the string. Start searching there for
272 a null character if we can represent it as a single HOST_WIDE_INT. */
273 if (offset_node == 0)
274 offset = 0;
275 else if (! host_integerp (offset_node, 0))
276 offset = -1;
277 else
278 offset = tree_low_cst (offset_node, 0);
279
280 /* If the offset is known to be out of bounds, warn, and call strlen at
281 runtime. */
282 if (offset < 0 || offset > max)
283 {
284 warning ("offset outside bounds of constant string");
285 return 0;
286 }
287
288 /* Use strlen to search for the first zero byte. Since any strings
289 constructed with build_string will have nulls appended, we win even
290 if we get handed something like (char[4])"abcd".
291
292 Since OFFSET is our starting index into the string, no further
293 calculation is needed. */
294 return ssize_int (strlen (ptr + offset));
295 }
296
297 /* Return a char pointer for a C string if it is a string constant
298 or sum of string constant and integer constant. */
299
300 static const char *
301 c_getstr (src)
302 tree src;
303 {
304 tree offset_node;
305
306 src = string_constant (src, &offset_node);
307 if (src == 0)
308 return 0;
309
310 if (offset_node == 0)
311 return TREE_STRING_POINTER (src);
312 else if (!host_integerp (offset_node, 1)
313 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
314 return 0;
315
316 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
317 }
318
319 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
320 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
321
322 static rtx
323 c_readstr (str, mode)
324 const char *str;
325 enum machine_mode mode;
326 {
327 HOST_WIDE_INT c[2];
328 HOST_WIDE_INT ch;
329 unsigned int i, j;
330
331 if (GET_MODE_CLASS (mode) != MODE_INT)
332 abort ();
333 c[0] = 0;
334 c[1] = 0;
335 ch = 1;
336 for (i = 0; i < GET_MODE_SIZE (mode); i++)
337 {
338 j = i;
339 if (WORDS_BIG_ENDIAN)
340 j = GET_MODE_SIZE (mode) - i - 1;
341 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
342 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
343 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
344 j *= BITS_PER_UNIT;
345 if (j > 2 * HOST_BITS_PER_WIDE_INT)
346 abort ();
347 if (ch)
348 ch = (unsigned char) str[i];
349 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
350 }
351 return immed_double_const (c[0], c[1], mode);
352 }
353
354 /* Cast a target constant CST to target CHAR and if that value fits into
355 host char type, return zero and put that value into variable pointed by
356 P. */
357
358 static int
359 target_char_cast (cst, p)
360 tree cst;
361 char *p;
362 {
363 unsigned HOST_WIDE_INT val, hostval;
364
365 if (!host_integerp (cst, 1)
366 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
367 return 1;
368
369 val = tree_low_cst (cst, 1);
370 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
371 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
372
373 hostval = val;
374 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
375 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
376
377 if (val != hostval)
378 return 1;
379
380 *p = hostval;
381 return 0;
382 }
383
384 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
385 times to get the address of either a higher stack frame, or a return
386 address located within it (depending on FNDECL_CODE). */
387
388 rtx
389 expand_builtin_return_addr (fndecl_code, count, tem)
390 enum built_in_function fndecl_code;
391 int count;
392 rtx tem;
393 {
394 int i;
395
396 /* Some machines need special handling before we can access
397 arbitrary frames. For example, on the sparc, we must first flush
398 all register windows to the stack. */
399 #ifdef SETUP_FRAME_ADDRESSES
400 if (count > 0)
401 SETUP_FRAME_ADDRESSES ();
402 #endif
403
404 /* On the sparc, the return address is not in the frame, it is in a
405 register. There is no way to access it off of the current frame
406 pointer, but it can be accessed off the previous frame pointer by
407 reading the value from the register window save area. */
408 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
409 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
410 count--;
411 #endif
412
413 /* Scan back COUNT frames to the specified frame. */
414 for (i = 0; i < count; i++)
415 {
416 /* Assume the dynamic chain pointer is in the word that the
417 frame address points to, unless otherwise specified. */
418 #ifdef DYNAMIC_CHAIN_ADDRESS
419 tem = DYNAMIC_CHAIN_ADDRESS (tem);
420 #endif
421 tem = memory_address (Pmode, tem);
422 tem = gen_rtx_MEM (Pmode, tem);
423 set_mem_alias_set (tem, get_frame_alias_set ());
424 tem = copy_to_reg (tem);
425 }
426
427 /* For __builtin_frame_address, return what we've got. */
428 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
429 return tem;
430
431 /* For __builtin_return_address, Get the return address from that
432 frame. */
433 #ifdef RETURN_ADDR_RTX
434 tem = RETURN_ADDR_RTX (count, tem);
435 #else
436 tem = memory_address (Pmode,
437 plus_constant (tem, GET_MODE_SIZE (Pmode)));
438 tem = gen_rtx_MEM (Pmode, tem);
439 set_mem_alias_set (tem, get_frame_alias_set ());
440 #endif
441 return tem;
442 }
443
444 /* Alias set used for setjmp buffer. */
445 static HOST_WIDE_INT setjmp_alias_set = -1;
446
447 /* Construct the leading half of a __builtin_setjmp call. Control will
448 return to RECEIVER_LABEL. This is used directly by sjlj exception
449 handling code. */
450
451 void
452 expand_builtin_setjmp_setup (buf_addr, receiver_label)
453 rtx buf_addr;
454 rtx receiver_label;
455 {
456 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
457 rtx stack_save;
458 rtx mem;
459
460 if (setjmp_alias_set == -1)
461 setjmp_alias_set = new_alias_set ();
462
463 #ifdef POINTERS_EXTEND_UNSIGNED
464 if (GET_MODE (buf_addr) != Pmode)
465 buf_addr = convert_memory_address (Pmode, buf_addr);
466 #endif
467
468 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
469
470 emit_queue ();
471
472 /* We store the frame pointer and the address of receiver_label in
473 the buffer and use the rest of it for the stack save area, which
474 is machine-dependent. */
475
476 #ifndef BUILTIN_SETJMP_FRAME_VALUE
477 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
478 #endif
479
480 mem = gen_rtx_MEM (Pmode, buf_addr);
481 set_mem_alias_set (mem, setjmp_alias_set);
482 emit_move_insn (mem, BUILTIN_SETJMP_FRAME_VALUE);
483
484 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
485 set_mem_alias_set (mem, setjmp_alias_set);
486
487 emit_move_insn (validize_mem (mem),
488 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
489
490 stack_save = gen_rtx_MEM (sa_mode,
491 plus_constant (buf_addr,
492 2 * GET_MODE_SIZE (Pmode)));
493 set_mem_alias_set (stack_save, setjmp_alias_set);
494 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
495
496 /* If there is further processing to do, do it. */
497 #ifdef HAVE_builtin_setjmp_setup
498 if (HAVE_builtin_setjmp_setup)
499 emit_insn (gen_builtin_setjmp_setup (buf_addr));
500 #endif
501
502 /* Tell optimize_save_area_alloca that extra work is going to
503 need to go on during alloca. */
504 current_function_calls_setjmp = 1;
505
506 /* Set this so all the registers get saved in our frame; we need to be
507 able to copy the saved values for any registers from frames we unwind. */
508 current_function_has_nonlocal_label = 1;
509 }
510
511 /* Construct the trailing part of a __builtin_setjmp call.
512 This is used directly by sjlj exception handling code. */
513
514 void
515 expand_builtin_setjmp_receiver (receiver_label)
516 rtx receiver_label ATTRIBUTE_UNUSED;
517 {
518 /* Clobber the FP when we get here, so we have to make sure it's
519 marked as used by this function. */
520 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
521
522 /* Mark the static chain as clobbered here so life information
523 doesn't get messed up for it. */
524 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
525
526 /* Now put in the code to restore the frame pointer, and argument
527 pointer, if needed. The code below is from expand_end_bindings
528 in stmt.c; see detailed documentation there. */
529 #ifdef HAVE_nonlocal_goto
530 if (! HAVE_nonlocal_goto)
531 #endif
532 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
533
534 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
535 if (fixed_regs[ARG_POINTER_REGNUM])
536 {
537 #ifdef ELIMINABLE_REGS
538 size_t i;
539 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
540
541 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
542 if (elim_regs[i].from == ARG_POINTER_REGNUM
543 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
544 break;
545
546 if (i == ARRAY_SIZE (elim_regs))
547 #endif
548 {
549 /* Now restore our arg pointer from the address at which it
550 was saved in our stack frame. */
551 emit_move_insn (virtual_incoming_args_rtx,
552 copy_to_reg (get_arg_pointer_save_area (cfun)));
553 }
554 }
555 #endif
556
557 #ifdef HAVE_builtin_setjmp_receiver
558 if (HAVE_builtin_setjmp_receiver)
559 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
560 else
561 #endif
562 #ifdef HAVE_nonlocal_goto_receiver
563 if (HAVE_nonlocal_goto_receiver)
564 emit_insn (gen_nonlocal_goto_receiver ());
565 else
566 #endif
567 { /* Nothing */ }
568
569 /* @@@ This is a kludge. Not all machine descriptions define a blockage
570 insn, but we must not allow the code we just generated to be reordered
571 by scheduling. Specifically, the update of the frame pointer must
572 happen immediately, not later. So emit an ASM_INPUT to act as blockage
573 insn. */
574 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
575 }
576
577 /* __builtin_setjmp is passed a pointer to an array of five words (not
578 all will be used on all machines). It operates similarly to the C
579 library function of the same name, but is more efficient. Much of
580 the code below (and for longjmp) is copied from the handling of
581 non-local gotos.
582
583 NOTE: This is intended for use by GNAT and the exception handling
584 scheme in the compiler and will only work in the method used by
585 them. */
586
587 static rtx
588 expand_builtin_setjmp (arglist, target)
589 tree arglist;
590 rtx target;
591 {
592 rtx buf_addr, next_lab, cont_lab;
593
594 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
595 return NULL_RTX;
596
597 if (target == 0 || GET_CODE (target) != REG
598 || REGNO (target) < FIRST_PSEUDO_REGISTER)
599 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
600
601 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
602
603 next_lab = gen_label_rtx ();
604 cont_lab = gen_label_rtx ();
605
606 expand_builtin_setjmp_setup (buf_addr, next_lab);
607
608 /* Set TARGET to zero and branch to the continue label. */
609 emit_move_insn (target, const0_rtx);
610 emit_jump_insn (gen_jump (cont_lab));
611 emit_barrier ();
612 emit_label (next_lab);
613
614 expand_builtin_setjmp_receiver (next_lab);
615
616 /* Set TARGET to one. */
617 emit_move_insn (target, const1_rtx);
618 emit_label (cont_lab);
619
620 /* Tell flow about the strange goings on. Putting `next_lab' on
621 `nonlocal_goto_handler_labels' to indicates that function
622 calls may traverse the arc back to this label. */
623
624 current_function_has_nonlocal_label = 1;
625 nonlocal_goto_handler_labels
626 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
627
628 return target;
629 }
630
631 /* __builtin_longjmp is passed a pointer to an array of five words (not
632 all will be used on all machines). It operates similarly to the C
633 library function of the same name, but is more efficient. Much of
634 the code below is copied from the handling of non-local gotos.
635
636 NOTE: This is intended for use by GNAT and the exception handling
637 scheme in the compiler and will only work in the method used by
638 them. */
639
640 void
641 expand_builtin_longjmp (buf_addr, value)
642 rtx buf_addr, value;
643 {
644 rtx fp, lab, stack, insn, last;
645 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
646
647 if (setjmp_alias_set == -1)
648 setjmp_alias_set = new_alias_set ();
649
650 #ifdef POINTERS_EXTEND_UNSIGNED
651 if (GET_MODE (buf_addr) != Pmode)
652 buf_addr = convert_memory_address (Pmode, buf_addr);
653 #endif
654
655 buf_addr = force_reg (Pmode, buf_addr);
656
657 /* We used to store value in static_chain_rtx, but that fails if pointers
658 are smaller than integers. We instead require that the user must pass
659 a second argument of 1, because that is what builtin_setjmp will
660 return. This also makes EH slightly more efficient, since we are no
661 longer copying around a value that we don't care about. */
662 if (value != const1_rtx)
663 abort ();
664
665 current_function_calls_longjmp = 1;
666
667 last = get_last_insn ();
668 #ifdef HAVE_builtin_longjmp
669 if (HAVE_builtin_longjmp)
670 emit_insn (gen_builtin_longjmp (buf_addr));
671 else
672 #endif
673 {
674 fp = gen_rtx_MEM (Pmode, buf_addr);
675 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
676 GET_MODE_SIZE (Pmode)));
677
678 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
679 2 * GET_MODE_SIZE (Pmode)));
680 set_mem_alias_set (fp, setjmp_alias_set);
681 set_mem_alias_set (lab, setjmp_alias_set);
682 set_mem_alias_set (stack, setjmp_alias_set);
683
684 /* Pick up FP, label, and SP from the block and jump. This code is
685 from expand_goto in stmt.c; see there for detailed comments. */
686 #if HAVE_nonlocal_goto
687 if (HAVE_nonlocal_goto)
688 /* We have to pass a value to the nonlocal_goto pattern that will
689 get copied into the static_chain pointer, but it does not matter
690 what that value is, because builtin_setjmp does not use it. */
691 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
692 else
693 #endif
694 {
695 lab = copy_to_reg (lab);
696
697 emit_move_insn (hard_frame_pointer_rtx, fp);
698 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
699
700 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
701 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
702 emit_indirect_jump (lab);
703 }
704 }
705
706 /* Search backwards and mark the jump insn as a non-local goto.
707 Note that this precludes the use of __builtin_longjmp to a
708 __builtin_setjmp target in the same function. However, we've
709 already cautioned the user that these functions are for
710 internal exception handling use only. */
711 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
712 {
713 if (insn == last)
714 abort ();
715 if (GET_CODE (insn) == JUMP_INSN)
716 {
717 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
718 REG_NOTES (insn));
719 break;
720 }
721 else if (GET_CODE (insn) == CALL_INSN)
722 break;
723 }
724 }
725
726 /* Expand a call to __builtin_prefetch. For a target that does not support
727 data prefetch, evaluate the memory address argument in case it has side
728 effects. */
729
730 static void
731 expand_builtin_prefetch (arglist)
732 tree arglist;
733 {
734 tree arg0, arg1, arg2;
735 rtx op0, op1, op2;
736
737 if (!validate_arglist (arglist, POINTER_TYPE, 0))
738 return;
739
740 arg0 = TREE_VALUE (arglist);
741 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
742 zero (read) and argument 2 (locality) defaults to 3 (high degree of
743 locality). */
744 if (TREE_CHAIN (arglist))
745 {
746 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
747 if (TREE_CHAIN (TREE_CHAIN (arglist)))
748 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
749 else
750 arg2 = build_int_2 (3, 0);
751 }
752 else
753 {
754 arg1 = integer_zero_node;
755 arg2 = build_int_2 (3, 0);
756 }
757
758 /* Argument 0 is an address. */
759 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
760
761 /* Argument 1 (read/write flag) must be a compile-time constant int. */
762 if (TREE_CODE (arg1) != INTEGER_CST)
763 {
764 error ("second arg to `__builtin_prefetch' must be a constant");
765 arg1 = integer_zero_node;
766 }
767 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
768 /* Argument 1 must be either zero or one. */
769 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
770 {
771 warning ("invalid second arg to __builtin_prefetch; using zero");
772 op1 = const0_rtx;
773 }
774
775 /* Argument 2 (locality) must be a compile-time constant int. */
776 if (TREE_CODE (arg2) != INTEGER_CST)
777 {
778 error ("third arg to `__builtin_prefetch' must be a constant");
779 arg2 = integer_zero_node;
780 }
781 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
782 /* Argument 2 must be 0, 1, 2, or 3. */
783 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
784 {
785 warning ("invalid third arg to __builtin_prefetch; using zero");
786 op2 = const0_rtx;
787 }
788
789 #ifdef HAVE_prefetch
790 if (HAVE_prefetch)
791 {
792 if ((! (*insn_data[(int)CODE_FOR_prefetch].operand[0].predicate)
793 (op0,
794 insn_data[(int)CODE_FOR_prefetch].operand[0].mode)) ||
795 (GET_MODE(op0) != Pmode))
796 {
797 #ifdef POINTERS_EXTEND_UNSIGNED
798 if (GET_MODE(op0) != Pmode)
799 op0 = convert_memory_address (Pmode, op0);
800 #endif
801 op0 = force_reg (Pmode, op0);
802 }
803 emit_insn (gen_prefetch (op0, op1, op2));
804 }
805 else
806 #endif
807 op0 = protect_from_queue (op0, 0);
808 /* Don't do anything with direct references to volatile memory, but
809 generate code to handle other side effects. */
810 if (GET_CODE (op0) != MEM && side_effects_p (op0))
811 emit_insn (op0);
812 }
813
814 /* Get a MEM rtx for expression EXP which is the address of an operand
815 to be used to be used in a string instruction (cmpstrsi, movstrsi, ..). */
816
817 static rtx
818 get_memory_rtx (exp)
819 tree exp;
820 {
821 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_SUM);
822 rtx mem;
823
824 #ifdef POINTERS_EXTEND_UNSIGNED
825 if (GET_MODE (addr) != Pmode)
826 addr = convert_memory_address (Pmode, addr);
827 #endif
828
829 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
830
831 /* Get an expression we can use to find the attributes to assign to MEM.
832 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
833 we can. First remove any nops. */
834 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
835 || TREE_CODE (exp) == NON_LVALUE_EXPR)
836 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
837 exp = TREE_OPERAND (exp, 0);
838
839 if (TREE_CODE (exp) == ADDR_EXPR)
840 {
841 exp = TREE_OPERAND (exp, 0);
842 set_mem_attributes (mem, exp, 0);
843 }
844 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
845 {
846 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
847 /* memcpy, memset and other builtin stringops can alias with anything. */
848 set_mem_alias_set (mem, 0);
849 }
850
851 return mem;
852 }
853 \f
854 /* Built-in functions to perform an untyped call and return. */
855
856 /* For each register that may be used for calling a function, this
857 gives a mode used to copy the register's value. VOIDmode indicates
858 the register is not used for calling a function. If the machine
859 has register windows, this gives only the outbound registers.
860 INCOMING_REGNO gives the corresponding inbound register. */
861 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
862
863 /* For each register that may be used for returning values, this gives
864 a mode used to copy the register's value. VOIDmode indicates the
865 register is not used for returning values. If the machine has
866 register windows, this gives only the outbound registers.
867 INCOMING_REGNO gives the corresponding inbound register. */
868 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
869
870 /* For each register that may be used for calling a function, this
871 gives the offset of that register into the block returned by
872 __builtin_apply_args. 0 indicates that the register is not
873 used for calling a function. */
874 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
875
876 /* Return the offset of register REGNO into the block returned by
877 __builtin_apply_args. This is not declared static, since it is
878 needed in objc-act.c. */
879
880 int
881 apply_args_register_offset (regno)
882 int regno;
883 {
884 apply_args_size ();
885
886 /* Arguments are always put in outgoing registers (in the argument
887 block) if such make sense. */
888 #ifdef OUTGOING_REGNO
889 regno = OUTGOING_REGNO(regno);
890 #endif
891 return apply_args_reg_offset[regno];
892 }
893
894 /* Return the size required for the block returned by __builtin_apply_args,
895 and initialize apply_args_mode. */
896
897 static int
898 apply_args_size ()
899 {
900 static int size = -1;
901 int align;
902 unsigned int regno;
903 enum machine_mode mode;
904
905 /* The values computed by this function never change. */
906 if (size < 0)
907 {
908 /* The first value is the incoming arg-pointer. */
909 size = GET_MODE_SIZE (Pmode);
910
911 /* The second value is the structure value address unless this is
912 passed as an "invisible" first argument. */
913 if (struct_value_rtx)
914 size += GET_MODE_SIZE (Pmode);
915
916 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
917 if (FUNCTION_ARG_REGNO_P (regno))
918 {
919 /* Search for the proper mode for copying this register's
920 value. I'm not sure this is right, but it works so far. */
921 enum machine_mode best_mode = VOIDmode;
922
923 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
924 mode != VOIDmode;
925 mode = GET_MODE_WIDER_MODE (mode))
926 if (HARD_REGNO_MODE_OK (regno, mode)
927 && HARD_REGNO_NREGS (regno, mode) == 1)
928 best_mode = mode;
929
930 if (best_mode == VOIDmode)
931 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
932 mode != VOIDmode;
933 mode = GET_MODE_WIDER_MODE (mode))
934 if (HARD_REGNO_MODE_OK (regno, mode)
935 && have_insn_for (SET, mode))
936 best_mode = mode;
937
938 if (best_mode == VOIDmode)
939 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
940 mode != VOIDmode;
941 mode = GET_MODE_WIDER_MODE (mode))
942 if (HARD_REGNO_MODE_OK (regno, mode)
943 && have_insn_for (SET, mode))
944 best_mode = mode;
945
946 if (best_mode == VOIDmode)
947 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
948 mode != VOIDmode;
949 mode = GET_MODE_WIDER_MODE (mode))
950 if (HARD_REGNO_MODE_OK (regno, mode)
951 && have_insn_for (SET, mode))
952 best_mode = mode;
953
954 mode = best_mode;
955 if (mode == VOIDmode)
956 abort ();
957
958 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
959 if (size % align != 0)
960 size = CEIL (size, align) * align;
961 apply_args_reg_offset[regno] = size;
962 size += GET_MODE_SIZE (mode);
963 apply_args_mode[regno] = mode;
964 }
965 else
966 {
967 apply_args_mode[regno] = VOIDmode;
968 apply_args_reg_offset[regno] = 0;
969 }
970 }
971 return size;
972 }
973
974 /* Return the size required for the block returned by __builtin_apply,
975 and initialize apply_result_mode. */
976
977 static int
978 apply_result_size ()
979 {
980 static int size = -1;
981 int align, regno;
982 enum machine_mode mode;
983
984 /* The values computed by this function never change. */
985 if (size < 0)
986 {
987 size = 0;
988
989 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
990 if (FUNCTION_VALUE_REGNO_P (regno))
991 {
992 /* Search for the proper mode for copying this register's
993 value. I'm not sure this is right, but it works so far. */
994 enum machine_mode best_mode = VOIDmode;
995
996 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
997 mode != TImode;
998 mode = GET_MODE_WIDER_MODE (mode))
999 if (HARD_REGNO_MODE_OK (regno, mode))
1000 best_mode = mode;
1001
1002 if (best_mode == VOIDmode)
1003 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1004 mode != VOIDmode;
1005 mode = GET_MODE_WIDER_MODE (mode))
1006 if (HARD_REGNO_MODE_OK (regno, mode)
1007 && have_insn_for (SET, mode))
1008 best_mode = mode;
1009
1010 if (best_mode == VOIDmode)
1011 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
1012 mode != VOIDmode;
1013 mode = GET_MODE_WIDER_MODE (mode))
1014 if (HARD_REGNO_MODE_OK (regno, mode)
1015 && have_insn_for (SET, mode))
1016 best_mode = mode;
1017
1018 if (best_mode == VOIDmode)
1019 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
1020 mode != VOIDmode;
1021 mode = GET_MODE_WIDER_MODE (mode))
1022 if (HARD_REGNO_MODE_OK (regno, mode)
1023 && have_insn_for (SET, mode))
1024 best_mode = mode;
1025
1026 mode = best_mode;
1027 if (mode == VOIDmode)
1028 abort ();
1029
1030 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1031 if (size % align != 0)
1032 size = CEIL (size, align) * align;
1033 size += GET_MODE_SIZE (mode);
1034 apply_result_mode[regno] = mode;
1035 }
1036 else
1037 apply_result_mode[regno] = VOIDmode;
1038
1039 /* Allow targets that use untyped_call and untyped_return to override
1040 the size so that machine-specific information can be stored here. */
1041 #ifdef APPLY_RESULT_SIZE
1042 size = APPLY_RESULT_SIZE;
1043 #endif
1044 }
1045 return size;
1046 }
1047
1048 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1049 /* Create a vector describing the result block RESULT. If SAVEP is true,
1050 the result block is used to save the values; otherwise it is used to
1051 restore the values. */
1052
1053 static rtx
1054 result_vector (savep, result)
1055 int savep;
1056 rtx result;
1057 {
1058 int regno, size, align, nelts;
1059 enum machine_mode mode;
1060 rtx reg, mem;
1061 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1062
1063 size = nelts = 0;
1064 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1065 if ((mode = apply_result_mode[regno]) != VOIDmode)
1066 {
1067 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1068 if (size % align != 0)
1069 size = CEIL (size, align) * align;
1070 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1071 mem = adjust_address (result, mode, size);
1072 savevec[nelts++] = (savep
1073 ? gen_rtx_SET (VOIDmode, mem, reg)
1074 : gen_rtx_SET (VOIDmode, reg, mem));
1075 size += GET_MODE_SIZE (mode);
1076 }
1077 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1078 }
1079 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1080
1081 /* Save the state required to perform an untyped call with the same
1082 arguments as were passed to the current function. */
1083
1084 static rtx
1085 expand_builtin_apply_args_1 ()
1086 {
1087 rtx registers;
1088 int size, align, regno;
1089 enum machine_mode mode;
1090
1091 /* Create a block where the arg-pointer, structure value address,
1092 and argument registers can be saved. */
1093 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1094
1095 /* Walk past the arg-pointer and structure value address. */
1096 size = GET_MODE_SIZE (Pmode);
1097 if (struct_value_rtx)
1098 size += GET_MODE_SIZE (Pmode);
1099
1100 /* Save each register used in calling a function to the block. */
1101 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1102 if ((mode = apply_args_mode[regno]) != VOIDmode)
1103 {
1104 rtx tem;
1105
1106 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1107 if (size % align != 0)
1108 size = CEIL (size, align) * align;
1109
1110 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1111
1112 emit_move_insn (adjust_address (registers, mode, size), tem);
1113 size += GET_MODE_SIZE (mode);
1114 }
1115
1116 /* Save the arg pointer to the block. */
1117 emit_move_insn (adjust_address (registers, Pmode, 0),
1118 copy_to_reg (virtual_incoming_args_rtx));
1119 size = GET_MODE_SIZE (Pmode);
1120
1121 /* Save the structure value address unless this is passed as an
1122 "invisible" first argument. */
1123 if (struct_value_incoming_rtx)
1124 {
1125 emit_move_insn (adjust_address (registers, Pmode, size),
1126 copy_to_reg (struct_value_incoming_rtx));
1127 size += GET_MODE_SIZE (Pmode);
1128 }
1129
1130 /* Return the address of the block. */
1131 return copy_addr_to_reg (XEXP (registers, 0));
1132 }
1133
1134 /* __builtin_apply_args returns block of memory allocated on
1135 the stack into which is stored the arg pointer, structure
1136 value address, static chain, and all the registers that might
1137 possibly be used in performing a function call. The code is
1138 moved to the start of the function so the incoming values are
1139 saved. */
1140
1141 static rtx
1142 expand_builtin_apply_args ()
1143 {
1144 /* Don't do __builtin_apply_args more than once in a function.
1145 Save the result of the first call and reuse it. */
1146 if (apply_args_value != 0)
1147 return apply_args_value;
1148 {
1149 /* When this function is called, it means that registers must be
1150 saved on entry to this function. So we migrate the
1151 call to the first insn of this function. */
1152 rtx temp;
1153 rtx seq;
1154
1155 start_sequence ();
1156 temp = expand_builtin_apply_args_1 ();
1157 seq = get_insns ();
1158 end_sequence ();
1159
1160 apply_args_value = temp;
1161
1162 /* Put the insns after the NOTE that starts the function.
1163 If this is inside a start_sequence, make the outer-level insn
1164 chain current, so the code is placed at the start of the
1165 function. */
1166 push_topmost_sequence ();
1167 emit_insn_before (seq, NEXT_INSN (get_insns ()));
1168 pop_topmost_sequence ();
1169 return temp;
1170 }
1171 }
1172
1173 /* Perform an untyped call and save the state required to perform an
1174 untyped return of whatever value was returned by the given function. */
1175
1176 static rtx
1177 expand_builtin_apply (function, arguments, argsize)
1178 rtx function, arguments, argsize;
1179 {
1180 int size, align, regno;
1181 enum machine_mode mode;
1182 rtx incoming_args, result, reg, dest, src, call_insn;
1183 rtx old_stack_level = 0;
1184 rtx call_fusage = 0;
1185
1186 #ifdef POINTERS_EXTEND_UNSIGNED
1187 if (GET_MODE (arguments) != Pmode)
1188 arguments = convert_memory_address (Pmode, arguments);
1189 #endif
1190
1191 /* Create a block where the return registers can be saved. */
1192 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1193
1194 /* Fetch the arg pointer from the ARGUMENTS block. */
1195 incoming_args = gen_reg_rtx (Pmode);
1196 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1197 #ifndef STACK_GROWS_DOWNWARD
1198 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1199 incoming_args, 0, OPTAB_LIB_WIDEN);
1200 #endif
1201
1202 /* Perform postincrements before actually calling the function. */
1203 emit_queue ();
1204
1205 /* Push a new argument block and copy the arguments. Do not allow
1206 the (potential) memcpy call below to interfere with our stack
1207 manipulations. */
1208 do_pending_stack_adjust ();
1209 NO_DEFER_POP;
1210
1211 /* Save the stack with nonlocal if available */
1212 #ifdef HAVE_save_stack_nonlocal
1213 if (HAVE_save_stack_nonlocal)
1214 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1215 else
1216 #endif
1217 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1218
1219 /* Push a block of memory onto the stack to store the memory arguments.
1220 Save the address in a register, and copy the memory arguments. ??? I
1221 haven't figured out how the calling convention macros effect this,
1222 but it's likely that the source and/or destination addresses in
1223 the block copy will need updating in machine specific ways. */
1224 dest = allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1225 dest = gen_rtx_MEM (BLKmode, dest);
1226 set_mem_align (dest, PARM_BOUNDARY);
1227 src = gen_rtx_MEM (BLKmode, incoming_args);
1228 set_mem_align (src, PARM_BOUNDARY);
1229 emit_block_move (dest, src, argsize);
1230
1231 /* Refer to the argument block. */
1232 apply_args_size ();
1233 arguments = gen_rtx_MEM (BLKmode, arguments);
1234 set_mem_align (arguments, PARM_BOUNDARY);
1235
1236 /* Walk past the arg-pointer and structure value address. */
1237 size = GET_MODE_SIZE (Pmode);
1238 if (struct_value_rtx)
1239 size += GET_MODE_SIZE (Pmode);
1240
1241 /* Restore each of the registers previously saved. Make USE insns
1242 for each of these registers for use in making the call. */
1243 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1244 if ((mode = apply_args_mode[regno]) != VOIDmode)
1245 {
1246 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1247 if (size % align != 0)
1248 size = CEIL (size, align) * align;
1249 reg = gen_rtx_REG (mode, regno);
1250 emit_move_insn (reg, adjust_address (arguments, mode, size));
1251 use_reg (&call_fusage, reg);
1252 size += GET_MODE_SIZE (mode);
1253 }
1254
1255 /* Restore the structure value address unless this is passed as an
1256 "invisible" first argument. */
1257 size = GET_MODE_SIZE (Pmode);
1258 if (struct_value_rtx)
1259 {
1260 rtx value = gen_reg_rtx (Pmode);
1261 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1262 emit_move_insn (struct_value_rtx, value);
1263 if (GET_CODE (struct_value_rtx) == REG)
1264 use_reg (&call_fusage, struct_value_rtx);
1265 size += GET_MODE_SIZE (Pmode);
1266 }
1267
1268 /* All arguments and registers used for the call are set up by now! */
1269 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0, 0);
1270
1271 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1272 and we don't want to load it into a register as an optimization,
1273 because prepare_call_address already did it if it should be done. */
1274 if (GET_CODE (function) != SYMBOL_REF)
1275 function = memory_address (FUNCTION_MODE, function);
1276
1277 /* Generate the actual call instruction and save the return value. */
1278 #ifdef HAVE_untyped_call
1279 if (HAVE_untyped_call)
1280 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1281 result, result_vector (1, result)));
1282 else
1283 #endif
1284 #ifdef HAVE_call_value
1285 if (HAVE_call_value)
1286 {
1287 rtx valreg = 0;
1288
1289 /* Locate the unique return register. It is not possible to
1290 express a call that sets more than one return register using
1291 call_value; use untyped_call for that. In fact, untyped_call
1292 only needs to save the return registers in the given block. */
1293 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1294 if ((mode = apply_result_mode[regno]) != VOIDmode)
1295 {
1296 if (valreg)
1297 abort (); /* HAVE_untyped_call required. */
1298 valreg = gen_rtx_REG (mode, regno);
1299 }
1300
1301 emit_call_insn (GEN_CALL_VALUE (valreg,
1302 gen_rtx_MEM (FUNCTION_MODE, function),
1303 const0_rtx, NULL_RTX, const0_rtx));
1304
1305 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1306 }
1307 else
1308 #endif
1309 abort ();
1310
1311 /* Find the CALL insn we just emitted. */
1312 for (call_insn = get_last_insn ();
1313 call_insn && GET_CODE (call_insn) != CALL_INSN;
1314 call_insn = PREV_INSN (call_insn))
1315 ;
1316
1317 if (! call_insn)
1318 abort ();
1319
1320 /* Put the register usage information on the CALL. If there is already
1321 some usage information, put ours at the end. */
1322 if (CALL_INSN_FUNCTION_USAGE (call_insn))
1323 {
1324 rtx link;
1325
1326 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
1327 link = XEXP (link, 1))
1328 ;
1329
1330 XEXP (link, 1) = call_fusage;
1331 }
1332 else
1333 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
1334
1335 /* Restore the stack. */
1336 #ifdef HAVE_save_stack_nonlocal
1337 if (HAVE_save_stack_nonlocal)
1338 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1339 else
1340 #endif
1341 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1342
1343 OK_DEFER_POP;
1344
1345 /* Return the address of the result block. */
1346 return copy_addr_to_reg (XEXP (result, 0));
1347 }
1348
1349 /* Perform an untyped return. */
1350
1351 static void
1352 expand_builtin_return (result)
1353 rtx result;
1354 {
1355 int size, align, regno;
1356 enum machine_mode mode;
1357 rtx reg;
1358 rtx call_fusage = 0;
1359
1360 #ifdef POINTERS_EXTEND_UNSIGNED
1361 if (GET_MODE (result) != Pmode)
1362 result = convert_memory_address (Pmode, result);
1363 #endif
1364
1365 apply_result_size ();
1366 result = gen_rtx_MEM (BLKmode, result);
1367
1368 #ifdef HAVE_untyped_return
1369 if (HAVE_untyped_return)
1370 {
1371 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1372 emit_barrier ();
1373 return;
1374 }
1375 #endif
1376
1377 /* Restore the return value and note that each value is used. */
1378 size = 0;
1379 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1380 if ((mode = apply_result_mode[regno]) != VOIDmode)
1381 {
1382 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1383 if (size % align != 0)
1384 size = CEIL (size, align) * align;
1385 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1386 emit_move_insn (reg, adjust_address (result, mode, size));
1387
1388 push_to_sequence (call_fusage);
1389 emit_insn (gen_rtx_USE (VOIDmode, reg));
1390 call_fusage = get_insns ();
1391 end_sequence ();
1392 size += GET_MODE_SIZE (mode);
1393 }
1394
1395 /* Put the USE insns before the return. */
1396 emit_insn (call_fusage);
1397
1398 /* Return whatever values was restored by jumping directly to the end
1399 of the function. */
1400 expand_null_return ();
1401 }
1402
1403 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1404
1405 static enum type_class
1406 type_to_class (type)
1407 tree type;
1408 {
1409 switch (TREE_CODE (type))
1410 {
1411 case VOID_TYPE: return void_type_class;
1412 case INTEGER_TYPE: return integer_type_class;
1413 case CHAR_TYPE: return char_type_class;
1414 case ENUMERAL_TYPE: return enumeral_type_class;
1415 case BOOLEAN_TYPE: return boolean_type_class;
1416 case POINTER_TYPE: return pointer_type_class;
1417 case REFERENCE_TYPE: return reference_type_class;
1418 case OFFSET_TYPE: return offset_type_class;
1419 case REAL_TYPE: return real_type_class;
1420 case COMPLEX_TYPE: return complex_type_class;
1421 case FUNCTION_TYPE: return function_type_class;
1422 case METHOD_TYPE: return method_type_class;
1423 case RECORD_TYPE: return record_type_class;
1424 case UNION_TYPE:
1425 case QUAL_UNION_TYPE: return union_type_class;
1426 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1427 ? string_type_class : array_type_class);
1428 case SET_TYPE: return set_type_class;
1429 case FILE_TYPE: return file_type_class;
1430 case LANG_TYPE: return lang_type_class;
1431 default: return no_type_class;
1432 }
1433 }
1434
1435 /* Expand a call to __builtin_classify_type with arguments found in
1436 ARGLIST. */
1437
1438 static rtx
1439 expand_builtin_classify_type (arglist)
1440 tree arglist;
1441 {
1442 if (arglist != 0)
1443 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1444 return GEN_INT (no_type_class);
1445 }
1446
1447 /* Expand expression EXP, which is a call to __builtin_constant_p. */
1448
1449 static rtx
1450 expand_builtin_constant_p (exp)
1451 tree exp;
1452 {
1453 tree arglist = TREE_OPERAND (exp, 1);
1454 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1455 rtx tmp;
1456
1457 if (arglist == 0)
1458 return const0_rtx;
1459 arglist = TREE_VALUE (arglist);
1460
1461 /* We have taken care of the easy cases during constant folding. This
1462 case is not obvious, so emit (constant_p_rtx (ARGLIST)) and let CSE get a
1463 chance to see if it can deduce whether ARGLIST is constant. */
1464
1465 tmp = expand_expr (arglist, NULL_RTX, VOIDmode, 0);
1466 tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
1467 return tmp;
1468 }
1469
1470 /* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
1471 Return 0 if a normal call should be emitted rather than expanding the
1472 function in-line. EXP is the expression that is a call to the builtin
1473 function; if convenient, the result should be placed in TARGET.
1474 SUBTARGET may be used as the target for computing one of EXP's operands. */
1475
1476 static rtx
1477 expand_builtin_mathfn (exp, target, subtarget)
1478 tree exp;
1479 rtx target, subtarget;
1480 {
1481 optab builtin_optab;
1482 rtx op0, insns;
1483 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1484 tree arglist = TREE_OPERAND (exp, 1);
1485 enum machine_mode argmode;
1486
1487 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1488 return 0;
1489
1490 /* Stabilize and compute the argument. */
1491 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
1492 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
1493 {
1494 exp = copy_node (exp);
1495 TREE_OPERAND (exp, 1) = arglist;
1496 /* Wrap the computation of the argument in a SAVE_EXPR. That
1497 way, if we need to expand the argument again (as in the
1498 flag_errno_math case below where we cannot directly set
1499 errno), we will not perform side-effects more than once.
1500 Note that here we're mutating the original EXP as well as the
1501 copy; that's the right thing to do in case the original EXP
1502 is expanded later. */
1503 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
1504 arglist = copy_node (arglist);
1505 }
1506 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
1507
1508 /* Make a suitable register to place result in. */
1509 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
1510
1511 emit_queue ();
1512 start_sequence ();
1513
1514 switch (DECL_FUNCTION_CODE (fndecl))
1515 {
1516 case BUILT_IN_SIN:
1517 case BUILT_IN_SINF:
1518 case BUILT_IN_SINL:
1519 builtin_optab = sin_optab; break;
1520 case BUILT_IN_COS:
1521 case BUILT_IN_COSF:
1522 case BUILT_IN_COSL:
1523 builtin_optab = cos_optab; break;
1524 case BUILT_IN_SQRT:
1525 case BUILT_IN_SQRTF:
1526 case BUILT_IN_SQRTL:
1527 builtin_optab = sqrt_optab; break;
1528 default:
1529 abort ();
1530 }
1531
1532 /* Compute into TARGET.
1533 Set TARGET to wherever the result comes back. */
1534 argmode = TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist)));
1535 target = expand_unop (argmode, builtin_optab, op0, target, 0);
1536
1537 /* If we were unable to expand via the builtin, stop the
1538 sequence (without outputting the insns) and return 0, causing
1539 a call to the library function. */
1540 if (target == 0)
1541 {
1542 end_sequence ();
1543 return 0;
1544 }
1545
1546 /* If errno must be maintained, we must set it to EDOM for NaN results. */
1547
1548 if (flag_errno_math && HONOR_NANS (argmode))
1549 {
1550 rtx lab1;
1551
1552 lab1 = gen_label_rtx ();
1553
1554 /* Test the result; if it is NaN, set errno=EDOM because
1555 the argument was not in the domain. */
1556 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1557 0, lab1);
1558
1559 #ifdef TARGET_EDOM
1560 {
1561 #ifdef GEN_ERRNO_RTX
1562 rtx errno_rtx = GEN_ERRNO_RTX;
1563 #else
1564 rtx errno_rtx
1565 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1566 #endif
1567
1568 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1569 }
1570 #else
1571 /* We can't set errno=EDOM directly; let the library call do it.
1572 Pop the arguments right away in case the call gets deleted. */
1573 NO_DEFER_POP;
1574 expand_call (exp, target, 0);
1575 OK_DEFER_POP;
1576 #endif
1577
1578 emit_label (lab1);
1579 }
1580
1581 /* Output the entire sequence. */
1582 insns = get_insns ();
1583 end_sequence ();
1584 emit_insn (insns);
1585
1586 return target;
1587 }
1588
1589 /* Expand expression EXP which is a call to the strlen builtin. Return 0
1590 if we failed the caller should emit a normal call, otherwise
1591 try to get the result in TARGET, if convenient. */
1592
1593 static rtx
1594 expand_builtin_strlen (exp, target)
1595 tree exp;
1596 rtx target;
1597 {
1598 tree arglist = TREE_OPERAND (exp, 1);
1599 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1600
1601 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
1602 return 0;
1603 else
1604 {
1605 rtx pat;
1606 tree src = TREE_VALUE (arglist);
1607
1608 int align
1609 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
1610
1611 rtx result, src_reg, char_rtx, before_strlen;
1612 enum machine_mode insn_mode = value_mode, char_mode;
1613 enum insn_code icode = CODE_FOR_nothing;
1614
1615 /* If SRC is not a pointer type, don't do this operation inline. */
1616 if (align == 0)
1617 return 0;
1618
1619 /* Bail out if we can't compute strlen in the right mode. */
1620 while (insn_mode != VOIDmode)
1621 {
1622 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
1623 if (icode != CODE_FOR_nothing)
1624 break;
1625
1626 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
1627 }
1628 if (insn_mode == VOIDmode)
1629 return 0;
1630
1631 /* Make a place to write the result of the instruction. */
1632 result = target;
1633 if (! (result != 0
1634 && GET_CODE (result) == REG
1635 && GET_MODE (result) == insn_mode
1636 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
1637 result = gen_reg_rtx (insn_mode);
1638
1639 /* Make a place to hold the source address. We will not expand
1640 the actual source until we are sure that the expansion will
1641 not fail -- there are trees that cannot be expanded twice. */
1642 src_reg = gen_reg_rtx (Pmode);
1643
1644 /* Mark the beginning of the strlen sequence so we can emit the
1645 source operand later. */
1646 before_strlen = get_last_insn();
1647
1648 char_rtx = const0_rtx;
1649 char_mode = insn_data[(int) icode].operand[2].mode;
1650 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
1651 char_mode))
1652 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
1653
1654 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
1655 char_rtx, GEN_INT (align));
1656 if (! pat)
1657 return 0;
1658 emit_insn (pat);
1659
1660 /* Now that we are assured of success, expand the source. */
1661 start_sequence ();
1662 pat = memory_address (BLKmode,
1663 expand_expr (src, src_reg, ptr_mode, EXPAND_SUM));
1664 if (pat != src_reg)
1665 emit_move_insn (src_reg, pat);
1666 pat = get_insns ();
1667 end_sequence ();
1668
1669 if (before_strlen)
1670 emit_insn_after (pat, before_strlen);
1671 else
1672 emit_insn_before (pat, get_insns ());
1673
1674 /* Return the value in the proper mode for this function. */
1675 if (GET_MODE (result) == value_mode)
1676 target = result;
1677 else if (target != 0)
1678 convert_move (target, result, 0);
1679 else
1680 target = convert_to_mode (value_mode, result, 0);
1681
1682 return target;
1683 }
1684 }
1685
1686 /* Expand a call to the strstr builtin. Return 0 if we failed the
1687 caller should emit a normal call, otherwise try to get the result
1688 in TARGET, if convenient (and in mode MODE if that's convenient). */
1689
1690 static rtx
1691 expand_builtin_strstr (arglist, target, mode)
1692 tree arglist;
1693 rtx target;
1694 enum machine_mode mode;
1695 {
1696 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1697 return 0;
1698 else
1699 {
1700 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1701 tree fn;
1702 const char *p1, *p2;
1703
1704 p2 = c_getstr (s2);
1705 if (p2 == NULL)
1706 return 0;
1707
1708 p1 = c_getstr (s1);
1709 if (p1 != NULL)
1710 {
1711 const char *r = strstr (p1, p2);
1712
1713 if (r == NULL)
1714 return const0_rtx;
1715
1716 /* Return an offset into the constant string argument. */
1717 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1718 s1, ssize_int (r - p1))),
1719 target, mode, EXPAND_NORMAL);
1720 }
1721
1722 if (p2[0] == '\0')
1723 return expand_expr (s1, target, mode, EXPAND_NORMAL);
1724
1725 if (p2[1] != '\0')
1726 return 0;
1727
1728 fn = built_in_decls[BUILT_IN_STRCHR];
1729 if (!fn)
1730 return 0;
1731
1732 /* New argument list transforming strstr(s1, s2) to
1733 strchr(s1, s2[0]). */
1734 arglist =
1735 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
1736 arglist = tree_cons (NULL_TREE, s1, arglist);
1737 return expand_expr (build_function_call_expr (fn, arglist),
1738 target, mode, EXPAND_NORMAL);
1739 }
1740 }
1741
1742 /* Expand a call to the strchr builtin. Return 0 if we failed the
1743 caller should emit a normal call, otherwise try to get the result
1744 in TARGET, if convenient (and in mode MODE if that's convenient). */
1745
1746 static rtx
1747 expand_builtin_strchr (arglist, target, mode)
1748 tree arglist;
1749 rtx target;
1750 enum machine_mode mode;
1751 {
1752 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
1753 return 0;
1754 else
1755 {
1756 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1757 const char *p1;
1758
1759 if (TREE_CODE (s2) != INTEGER_CST)
1760 return 0;
1761
1762 p1 = c_getstr (s1);
1763 if (p1 != NULL)
1764 {
1765 char c;
1766 const char *r;
1767
1768 if (target_char_cast (s2, &c))
1769 return 0;
1770
1771 r = strchr (p1, c);
1772
1773 if (r == NULL)
1774 return const0_rtx;
1775
1776 /* Return an offset into the constant string argument. */
1777 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1778 s1, ssize_int (r - p1))),
1779 target, mode, EXPAND_NORMAL);
1780 }
1781
1782 /* FIXME: Should use here strchrM optab so that ports can optimize
1783 this. */
1784 return 0;
1785 }
1786 }
1787
1788 /* Expand a call to the strrchr builtin. Return 0 if we failed the
1789 caller should emit a normal call, otherwise try to get the result
1790 in TARGET, if convenient (and in mode MODE if that's convenient). */
1791
1792 static rtx
1793 expand_builtin_strrchr (arglist, target, mode)
1794 tree arglist;
1795 rtx target;
1796 enum machine_mode mode;
1797 {
1798 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
1799 return 0;
1800 else
1801 {
1802 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1803 tree fn;
1804 const char *p1;
1805
1806 if (TREE_CODE (s2) != INTEGER_CST)
1807 return 0;
1808
1809 p1 = c_getstr (s1);
1810 if (p1 != NULL)
1811 {
1812 char c;
1813 const char *r;
1814
1815 if (target_char_cast (s2, &c))
1816 return 0;
1817
1818 r = strrchr (p1, c);
1819
1820 if (r == NULL)
1821 return const0_rtx;
1822
1823 /* Return an offset into the constant string argument. */
1824 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1825 s1, ssize_int (r - p1))),
1826 target, mode, EXPAND_NORMAL);
1827 }
1828
1829 if (! integer_zerop (s2))
1830 return 0;
1831
1832 fn = built_in_decls[BUILT_IN_STRCHR];
1833 if (!fn)
1834 return 0;
1835
1836 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
1837 return expand_expr (build_function_call_expr (fn, arglist),
1838 target, mode, EXPAND_NORMAL);
1839 }
1840 }
1841
1842 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
1843 caller should emit a normal call, otherwise try to get the result
1844 in TARGET, if convenient (and in mode MODE if that's convenient). */
1845
1846 static rtx
1847 expand_builtin_strpbrk (arglist, target, mode)
1848 tree arglist;
1849 rtx target;
1850 enum machine_mode mode;
1851 {
1852 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1853 return 0;
1854 else
1855 {
1856 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1857 tree fn;
1858 const char *p1, *p2;
1859
1860 p2 = c_getstr (s2);
1861 if (p2 == NULL)
1862 return 0;
1863
1864 p1 = c_getstr (s1);
1865 if (p1 != NULL)
1866 {
1867 const char *r = strpbrk (p1, p2);
1868
1869 if (r == NULL)
1870 return const0_rtx;
1871
1872 /* Return an offset into the constant string argument. */
1873 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1874 s1, ssize_int (r - p1))),
1875 target, mode, EXPAND_NORMAL);
1876 }
1877
1878 if (p2[0] == '\0')
1879 {
1880 /* strpbrk(x, "") == NULL.
1881 Evaluate and ignore the arguments in case they had
1882 side-effects. */
1883 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
1884 return const0_rtx;
1885 }
1886
1887 if (p2[1] != '\0')
1888 return 0; /* Really call strpbrk. */
1889
1890 fn = built_in_decls[BUILT_IN_STRCHR];
1891 if (!fn)
1892 return 0;
1893
1894 /* New argument list transforming strpbrk(s1, s2) to
1895 strchr(s1, s2[0]). */
1896 arglist =
1897 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
1898 arglist = tree_cons (NULL_TREE, s1, arglist);
1899 return expand_expr (build_function_call_expr (fn, arglist),
1900 target, mode, EXPAND_NORMAL);
1901 }
1902 }
1903
1904 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
1905 bytes from constant string DATA + OFFSET and return it as target
1906 constant. */
1907
1908 static rtx
1909 builtin_memcpy_read_str (data, offset, mode)
1910 PTR data;
1911 HOST_WIDE_INT offset;
1912 enum machine_mode mode;
1913 {
1914 const char *str = (const char *) data;
1915
1916 if (offset < 0
1917 || ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
1918 > strlen (str) + 1))
1919 abort (); /* Attempt to read past the end of constant string. */
1920
1921 return c_readstr (str + offset, mode);
1922 }
1923
1924 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
1925 Return 0 if we failed, the caller should emit a normal call, otherwise
1926 try to get the result in TARGET, if convenient (and in mode MODE if
1927 that's convenient). */
1928
1929 static rtx
1930 expand_builtin_memcpy (arglist, target, mode)
1931 tree arglist;
1932 rtx target;
1933 enum machine_mode mode;
1934 {
1935 if (!validate_arglist (arglist,
1936 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
1937 return 0;
1938 else
1939 {
1940 tree dest = TREE_VALUE (arglist);
1941 tree src = TREE_VALUE (TREE_CHAIN (arglist));
1942 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1943 const char *src_str;
1944
1945 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
1946 unsigned int dest_align
1947 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
1948 rtx dest_mem, src_mem, dest_addr, len_rtx;
1949
1950 /* If DEST is not a pointer type, call the normal function. */
1951 if (dest_align == 0)
1952 return 0;
1953
1954 /* If the LEN parameter is zero, return DEST. */
1955 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
1956 {
1957 /* Evaluate and ignore SRC in case it has side-effects. */
1958 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
1959 return expand_expr (dest, target, mode, EXPAND_NORMAL);
1960 }
1961
1962 /* If either SRC is not a pointer type, don't do this
1963 operation in-line. */
1964 if (src_align == 0)
1965 return 0;
1966
1967 dest_mem = get_memory_rtx (dest);
1968 set_mem_align (dest_mem, dest_align);
1969 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
1970 src_str = c_getstr (src);
1971
1972 /* If SRC is a string constant and block move would be done
1973 by pieces, we can avoid loading the string from memory
1974 and only stored the computed constants. */
1975 if (src_str
1976 && GET_CODE (len_rtx) == CONST_INT
1977 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
1978 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
1979 (PTR) src_str, dest_align))
1980 {
1981 store_by_pieces (dest_mem, INTVAL (len_rtx),
1982 builtin_memcpy_read_str,
1983 (PTR) src_str, dest_align);
1984 return force_operand (XEXP (dest_mem, 0), NULL_RTX);
1985 }
1986
1987 src_mem = get_memory_rtx (src);
1988 set_mem_align (src_mem, src_align);
1989
1990 /* Copy word part most expediently. */
1991 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx);
1992
1993 if (dest_addr == 0)
1994 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
1995
1996 return dest_addr;
1997 }
1998 }
1999
2000 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
2001 if we failed the caller should emit a normal call, otherwise try to get
2002 the result in TARGET, if convenient (and in mode MODE if that's
2003 convenient). */
2004
2005 static rtx
2006 expand_builtin_strcpy (exp, target, mode)
2007 tree exp;
2008 rtx target;
2009 enum machine_mode mode;
2010 {
2011 tree arglist = TREE_OPERAND (exp, 1);
2012 tree fn, len;
2013
2014 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2015 return 0;
2016
2017 fn = built_in_decls[BUILT_IN_MEMCPY];
2018 if (!fn)
2019 return 0;
2020
2021 len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2022 if (len == 0)
2023 return 0;
2024
2025 len = size_binop (PLUS_EXPR, len, ssize_int (1));
2026 chainon (arglist, build_tree_list (NULL_TREE, len));
2027 return expand_expr (build_function_call_expr (fn, arglist),
2028 target, mode, EXPAND_NORMAL);
2029 }
2030
2031 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2032 bytes from constant string DATA + OFFSET and return it as target
2033 constant. */
2034
2035 static rtx
2036 builtin_strncpy_read_str (data, offset, mode)
2037 PTR data;
2038 HOST_WIDE_INT offset;
2039 enum machine_mode mode;
2040 {
2041 const char *str = (const char *) data;
2042
2043 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
2044 return const0_rtx;
2045
2046 return c_readstr (str + offset, mode);
2047 }
2048
2049 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
2050 if we failed the caller should emit a normal call. */
2051
2052 static rtx
2053 expand_builtin_strncpy (arglist, target, mode)
2054 tree arglist;
2055 rtx target;
2056 enum machine_mode mode;
2057 {
2058 if (!validate_arglist (arglist,
2059 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2060 return 0;
2061 else
2062 {
2063 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2064 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2065 tree fn;
2066
2067 /* We must be passed a constant len parameter. */
2068 if (TREE_CODE (len) != INTEGER_CST)
2069 return 0;
2070
2071 /* If the len parameter is zero, return the dst parameter. */
2072 if (integer_zerop (len))
2073 {
2074 /* Evaluate and ignore the src argument in case it has
2075 side-effects. */
2076 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
2077 VOIDmode, EXPAND_NORMAL);
2078 /* Return the dst parameter. */
2079 return expand_expr (TREE_VALUE (arglist), target, mode,
2080 EXPAND_NORMAL);
2081 }
2082
2083 /* Now, we must be passed a constant src ptr parameter. */
2084 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
2085 return 0;
2086
2087 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
2088
2089 /* We're required to pad with trailing zeros if the requested
2090 len is greater than strlen(s2)+1. In that case try to
2091 use store_by_pieces, if it fails, punt. */
2092 if (tree_int_cst_lt (slen, len))
2093 {
2094 tree dest = TREE_VALUE (arglist);
2095 unsigned int dest_align
2096 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2097 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
2098 rtx dest_mem;
2099
2100 if (!p || dest_align == 0 || !host_integerp (len, 1)
2101 || !can_store_by_pieces (tree_low_cst (len, 1),
2102 builtin_strncpy_read_str,
2103 (PTR) p, dest_align))
2104 return 0;
2105
2106 dest_mem = get_memory_rtx (dest);
2107 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2108 builtin_strncpy_read_str,
2109 (PTR) p, dest_align);
2110 return force_operand (XEXP (dest_mem, 0), NULL_RTX);
2111 }
2112
2113 /* OK transform into builtin memcpy. */
2114 fn = built_in_decls[BUILT_IN_MEMCPY];
2115 if (!fn)
2116 return 0;
2117 return expand_expr (build_function_call_expr (fn, arglist),
2118 target, mode, EXPAND_NORMAL);
2119 }
2120 }
2121
2122 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2123 bytes from constant string DATA + OFFSET and return it as target
2124 constant. */
2125
2126 static rtx
2127 builtin_memset_read_str (data, offset, mode)
2128 PTR data;
2129 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2130 enum machine_mode mode;
2131 {
2132 const char *c = (const char *) data;
2133 char *p = alloca (GET_MODE_SIZE (mode));
2134
2135 memset (p, *c, GET_MODE_SIZE (mode));
2136
2137 return c_readstr (p, mode);
2138 }
2139
2140 /* Callback routine for store_by_pieces. Return the RTL of a register
2141 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
2142 char value given in the RTL register data. For example, if mode is
2143 4 bytes wide, return the RTL for 0x01010101*data. */
2144
2145 static rtx
2146 builtin_memset_gen_str (data, offset, mode)
2147 PTR data;
2148 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2149 enum machine_mode mode;
2150 {
2151 rtx target, coeff;
2152 size_t size;
2153 char *p;
2154
2155 size = GET_MODE_SIZE (mode);
2156 if (size==1)
2157 return (rtx)data;
2158
2159 p = alloca (size);
2160 memset (p, 1, size);
2161 coeff = c_readstr (p, mode);
2162
2163 target = convert_to_mode (mode, (rtx)data, 1);
2164 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
2165 return force_reg (mode, target);
2166 }
2167
2168 /* Expand expression EXP, which is a call to the memset builtin. Return 0
2169 if we failed the caller should emit a normal call, otherwise try to get
2170 the result in TARGET, if convenient (and in mode MODE if that's
2171 convenient). */
2172
2173 static rtx
2174 expand_builtin_memset (exp, target, mode)
2175 tree exp;
2176 rtx target;
2177 enum machine_mode mode;
2178 {
2179 tree arglist = TREE_OPERAND (exp, 1);
2180
2181 if (!validate_arglist (arglist,
2182 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
2183 return 0;
2184 else
2185 {
2186 tree dest = TREE_VALUE (arglist);
2187 tree val = TREE_VALUE (TREE_CHAIN (arglist));
2188 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2189 char c;
2190
2191 unsigned int dest_align
2192 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2193 rtx dest_mem, dest_addr, len_rtx;
2194
2195 /* If DEST is not a pointer type, don't do this
2196 operation in-line. */
2197 if (dest_align == 0)
2198 return 0;
2199
2200 /* If the LEN parameter is zero, return DEST. */
2201 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2202 {
2203 /* Evaluate and ignore VAL in case it has side-effects. */
2204 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
2205 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2206 }
2207
2208 if (TREE_CODE (val) != INTEGER_CST)
2209 {
2210 rtx val_rtx;
2211
2212 if (!host_integerp (len, 1))
2213 return 0;
2214
2215 if (optimize_size && tree_low_cst (len, 1) > 1)
2216 return 0;
2217
2218 /* Assume that we can memset by pieces if we can store the
2219 * the coefficients by pieces (in the required modes).
2220 * We can't pass builtin_memset_gen_str as that emits RTL. */
2221 c = 1;
2222 if (!can_store_by_pieces (tree_low_cst (len, 1),
2223 builtin_memset_read_str,
2224 (PTR) &c, dest_align))
2225 return 0;
2226
2227 val = fold (build1 (CONVERT_EXPR, unsigned_char_type_node, val));
2228 val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
2229 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
2230 val_rtx);
2231 dest_mem = get_memory_rtx (dest);
2232 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2233 builtin_memset_gen_str,
2234 (PTR)val_rtx, dest_align);
2235 return force_operand (XEXP (dest_mem, 0), NULL_RTX);
2236 }
2237
2238 if (target_char_cast (val, &c))
2239 return 0;
2240
2241 if (c)
2242 {
2243 if (!host_integerp (len, 1))
2244 return 0;
2245 if (!can_store_by_pieces (tree_low_cst (len, 1),
2246 builtin_memset_read_str, (PTR) &c,
2247 dest_align))
2248 return 0;
2249
2250 dest_mem = get_memory_rtx (dest);
2251 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2252 builtin_memset_read_str,
2253 (PTR) &c, dest_align);
2254 return force_operand (XEXP (dest_mem, 0), NULL_RTX);
2255 }
2256
2257 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2258
2259 dest_mem = get_memory_rtx (dest);
2260 set_mem_align (dest_mem, dest_align);
2261 dest_addr = clear_storage (dest_mem, len_rtx);
2262
2263 if (dest_addr == 0)
2264 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2265
2266 return dest_addr;
2267 }
2268 }
2269
2270 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
2271 if we failed the caller should emit a normal call. */
2272
2273 static rtx
2274 expand_builtin_bzero (exp)
2275 tree exp;
2276 {
2277 tree arglist = TREE_OPERAND (exp, 1);
2278 tree dest, size, newarglist;
2279 rtx result;
2280
2281 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2282 return NULL_RTX;
2283
2284 dest = TREE_VALUE (arglist);
2285 size = TREE_VALUE (TREE_CHAIN (arglist));
2286
2287 /* New argument list transforming bzero(ptr x, int y) to
2288 memset(ptr x, int 0, size_t y). This is done this way
2289 so that if it isn't expanded inline, we fallback to
2290 calling bzero instead of memset. */
2291
2292 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2293 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
2294 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2295
2296 TREE_OPERAND (exp, 1) = newarglist;
2297 result = expand_builtin_memset (exp, const0_rtx, VOIDmode);
2298
2299 /* Always restore the original arguments. */
2300 TREE_OPERAND (exp, 1) = arglist;
2301
2302 return result;
2303 }
2304
2305 /* Expand expression EXP, which is a call to the memcmp or the strcmp builtin.
2306 ARGLIST is the argument list for this call. Return 0 if we failed and the
2307 caller should emit a normal call, otherwise try to get the result in
2308 TARGET, if convenient (and in mode MODE, if that's convenient). */
2309
2310 static rtx
2311 expand_builtin_memcmp (exp, arglist, target, mode)
2312 tree exp ATTRIBUTE_UNUSED;
2313 tree arglist;
2314 rtx target;
2315 enum machine_mode mode;
2316 {
2317 tree arg1, arg2, len;
2318 const char *p1, *p2;
2319
2320 if (!validate_arglist (arglist,
2321 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2322 return 0;
2323
2324 arg1 = TREE_VALUE (arglist);
2325 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2326 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2327
2328 /* If the len parameter is zero, return zero. */
2329 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2330 {
2331 /* Evaluate and ignore arg1 and arg2 in case they have
2332 side-effects. */
2333 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2334 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2335 return const0_rtx;
2336 }
2337
2338 p1 = c_getstr (arg1);
2339 p2 = c_getstr (arg2);
2340
2341 /* If all arguments are constant, and the value of len is not greater
2342 than the lengths of arg1 and arg2, evaluate at compile-time. */
2343 if (host_integerp (len, 1) && p1 && p2
2344 && compare_tree_int (len, strlen (p1) + 1) <= 0
2345 && compare_tree_int (len, strlen (p2) + 1) <= 0)
2346 {
2347 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
2348
2349 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
2350 }
2351
2352 /* If len parameter is one, return an expression corresponding to
2353 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2354 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
2355 {
2356 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2357 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2358 tree ind1 =
2359 fold (build1 (CONVERT_EXPR, integer_type_node,
2360 build1 (INDIRECT_REF, cst_uchar_node,
2361 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2362 tree ind2 =
2363 fold (build1 (CONVERT_EXPR, integer_type_node,
2364 build1 (INDIRECT_REF, cst_uchar_node,
2365 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2366 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2367 return expand_expr (result, target, mode, EXPAND_NORMAL);
2368 }
2369
2370 #ifdef HAVE_cmpstrsi
2371 {
2372 rtx arg1_rtx, arg2_rtx, arg3_rtx;
2373 rtx result;
2374 rtx insn;
2375
2376 int arg1_align
2377 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2378 int arg2_align
2379 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2380 enum machine_mode insn_mode
2381 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
2382
2383 /* If we don't have POINTER_TYPE, call the function. */
2384 if (arg1_align == 0 || arg2_align == 0)
2385 return 0;
2386
2387 /* Make a place to write the result of the instruction. */
2388 result = target;
2389 if (! (result != 0
2390 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
2391 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2392 result = gen_reg_rtx (insn_mode);
2393
2394 arg1_rtx = get_memory_rtx (arg1);
2395 arg2_rtx = get_memory_rtx (arg2);
2396 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2397 if (!HAVE_cmpstrsi)
2398 insn = NULL_RTX;
2399 else
2400 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
2401 GEN_INT (MIN (arg1_align, arg2_align)));
2402
2403 if (insn)
2404 emit_insn (insn);
2405 else
2406 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
2407 TYPE_MODE (integer_type_node), 3,
2408 XEXP (arg1_rtx, 0), Pmode,
2409 XEXP (arg2_rtx, 0), Pmode,
2410 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
2411 TREE_UNSIGNED (sizetype)),
2412 TYPE_MODE (sizetype));
2413
2414 /* Return the value in the proper mode for this function. */
2415 mode = TYPE_MODE (TREE_TYPE (exp));
2416 if (GET_MODE (result) == mode)
2417 return result;
2418 else if (target != 0)
2419 {
2420 convert_move (target, result, 0);
2421 return target;
2422 }
2423 else
2424 return convert_to_mode (mode, result, 0);
2425 }
2426 #endif
2427
2428 return 0;
2429 }
2430
2431 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
2432 if we failed the caller should emit a normal call, otherwise try to get
2433 the result in TARGET, if convenient. */
2434
2435 static rtx
2436 expand_builtin_strcmp (exp, target, mode)
2437 tree exp;
2438 rtx target;
2439 enum machine_mode mode;
2440 {
2441 tree arglist = TREE_OPERAND (exp, 1);
2442 tree arg1, arg2, len, len2, fn;
2443 const char *p1, *p2;
2444
2445 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2446 return 0;
2447
2448 arg1 = TREE_VALUE (arglist);
2449 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2450
2451 p1 = c_getstr (arg1);
2452 p2 = c_getstr (arg2);
2453
2454 if (p1 && p2)
2455 {
2456 const int i = strcmp (p1, p2);
2457 return (i < 0 ? constm1_rtx : (i > 0 ? const1_rtx : const0_rtx));
2458 }
2459
2460 /* If either arg is "", return an expression corresponding to
2461 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2462 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
2463 {
2464 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2465 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2466 tree ind1 =
2467 fold (build1 (CONVERT_EXPR, integer_type_node,
2468 build1 (INDIRECT_REF, cst_uchar_node,
2469 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2470 tree ind2 =
2471 fold (build1 (CONVERT_EXPR, integer_type_node,
2472 build1 (INDIRECT_REF, cst_uchar_node,
2473 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2474 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2475 return expand_expr (result, target, mode, EXPAND_NORMAL);
2476 }
2477
2478 len = c_strlen (arg1);
2479 len2 = c_strlen (arg2);
2480
2481 if (len)
2482 len = size_binop (PLUS_EXPR, ssize_int (1), len);
2483
2484 if (len2)
2485 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
2486
2487 /* If we don't have a constant length for the first, use the length
2488 of the second, if we know it. We don't require a constant for
2489 this case; some cost analysis could be done if both are available
2490 but neither is constant. For now, assume they're equally cheap
2491 unless one has side effects.
2492
2493 If both strings have constant lengths, use the smaller. This
2494 could arise if optimization results in strcpy being called with
2495 two fixed strings, or if the code was machine-generated. We should
2496 add some code to the `memcmp' handler below to deal with such
2497 situations, someday. */
2498
2499 if (!len || TREE_CODE (len) != INTEGER_CST)
2500 {
2501 if (len2 && !TREE_SIDE_EFFECTS (len2))
2502 len = len2;
2503 else if (len == 0)
2504 return 0;
2505 }
2506 else if (len2 && TREE_CODE (len2) == INTEGER_CST
2507 && tree_int_cst_lt (len2, len))
2508 len = len2;
2509
2510 /* If both arguments have side effects, we cannot optimize. */
2511 if (TREE_SIDE_EFFECTS (len))
2512 return 0;
2513
2514 fn = built_in_decls[BUILT_IN_MEMCMP];
2515 if (!fn)
2516 return 0;
2517
2518 chainon (arglist, build_tree_list (NULL_TREE, len));
2519 return expand_expr (build_function_call_expr (fn, arglist),
2520 target, mode, EXPAND_NORMAL);
2521 }
2522
2523 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
2524 if we failed the caller should emit a normal call, otherwise try to get
2525 the result in TARGET, if convenient. */
2526
2527 static rtx
2528 expand_builtin_strncmp (exp, target, mode)
2529 tree exp;
2530 rtx target;
2531 enum machine_mode mode;
2532 {
2533 tree arglist = TREE_OPERAND (exp, 1);
2534 tree fn, newarglist, len = 0;
2535 tree arg1, arg2, arg3;
2536 const char *p1, *p2;
2537
2538 if (!validate_arglist (arglist,
2539 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2540 return 0;
2541
2542 arg1 = TREE_VALUE (arglist);
2543 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2544 arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2545
2546 /* If the len parameter is zero, return zero. */
2547 if (host_integerp (arg3, 1) && tree_low_cst (arg3, 1) == 0)
2548 {
2549 /* Evaluate and ignore arg1 and arg2 in case they have
2550 side-effects. */
2551 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2552 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2553 return const0_rtx;
2554 }
2555
2556 p1 = c_getstr (arg1);
2557 p2 = c_getstr (arg2);
2558
2559 /* If all arguments are constant, evaluate at compile-time. */
2560 if (host_integerp (arg3, 1) && p1 && p2)
2561 {
2562 const int r = strncmp (p1, p2, tree_low_cst (arg3, 1));
2563 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
2564 }
2565
2566 /* If len == 1 or (either string parameter is "" and (len >= 1)),
2567 return (*(const u_char*)arg1 - *(const u_char*)arg2). */
2568 if (host_integerp (arg3, 1)
2569 && (tree_low_cst (arg3, 1) == 1
2570 || (tree_low_cst (arg3, 1) > 1
2571 && ((p1 && *p1 == '\0') || (p2 && *p2 == '\0')))))
2572 {
2573 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2574 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2575 tree ind1 =
2576 fold (build1 (CONVERT_EXPR, integer_type_node,
2577 build1 (INDIRECT_REF, cst_uchar_node,
2578 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2579 tree ind2 =
2580 fold (build1 (CONVERT_EXPR, integer_type_node,
2581 build1 (INDIRECT_REF, cst_uchar_node,
2582 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2583 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2584 return expand_expr (result, target, mode, EXPAND_NORMAL);
2585 }
2586
2587 /* If c_strlen can determine an expression for one of the string
2588 lengths, and it doesn't have side effects, then call
2589 expand_builtin_memcmp() using length MIN(strlen(string)+1, arg3). */
2590
2591 /* Perhaps one of the strings is really constant, if so prefer
2592 that constant length over the other string's length. */
2593 if (p1)
2594 len = c_strlen (arg1);
2595 else if (p2)
2596 len = c_strlen (arg2);
2597
2598 /* If we still don't have a len, try either string arg as long
2599 as they don't have side effects. */
2600 if (!len && !TREE_SIDE_EFFECTS (arg1))
2601 len = c_strlen (arg1);
2602 if (!len && !TREE_SIDE_EFFECTS (arg2))
2603 len = c_strlen (arg2);
2604 /* If we still don't have a length, punt. */
2605 if (!len)
2606 return 0;
2607
2608 fn = built_in_decls[BUILT_IN_MEMCMP];
2609 if (!fn)
2610 return 0;
2611
2612 /* Add one to the string length. */
2613 len = fold (size_binop (PLUS_EXPR, len, ssize_int (1)));
2614
2615 /* The actual new length parameter is MIN(len,arg3). */
2616 len = fold (build (MIN_EXPR, TREE_TYPE (len), len, arg3));
2617
2618 newarglist = build_tree_list (NULL_TREE, len);
2619 newarglist = tree_cons (NULL_TREE, arg2, newarglist);
2620 newarglist = tree_cons (NULL_TREE, arg1, newarglist);
2621 return expand_expr (build_function_call_expr (fn, newarglist),
2622 target, mode, EXPAND_NORMAL);
2623 }
2624
2625 /* Expand expression EXP, which is a call to the strcat builtin.
2626 Return 0 if we failed the caller should emit a normal call,
2627 otherwise try to get the result in TARGET, if convenient. */
2628
2629 static rtx
2630 expand_builtin_strcat (arglist, target, mode)
2631 tree arglist;
2632 rtx target;
2633 enum machine_mode mode;
2634 {
2635 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2636 return 0;
2637 else
2638 {
2639 tree dst = TREE_VALUE (arglist),
2640 src = TREE_VALUE (TREE_CHAIN (arglist));
2641 const char *p = c_getstr (src);
2642
2643 /* If the string length is zero, return the dst parameter. */
2644 if (p && *p == '\0')
2645 return expand_expr (dst, target, mode, EXPAND_NORMAL);
2646
2647 return 0;
2648 }
2649 }
2650
2651 /* Expand expression EXP, which is a call to the strncat builtin.
2652 Return 0 if we failed the caller should emit a normal call,
2653 otherwise try to get the result in TARGET, if convenient. */
2654
2655 static rtx
2656 expand_builtin_strncat (arglist, target, mode)
2657 tree arglist;
2658 rtx target;
2659 enum machine_mode mode;
2660 {
2661 if (!validate_arglist (arglist,
2662 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2663 return 0;
2664 else
2665 {
2666 tree dst = TREE_VALUE (arglist),
2667 src = TREE_VALUE (TREE_CHAIN (arglist)),
2668 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2669 const char *p = c_getstr (src);
2670
2671 /* If the requested length is zero, or the src parameter string
2672 length is zero, return the dst parameter. */
2673 if (integer_zerop (len) || (p && *p == '\0'))
2674 {
2675 /* Evaluate and ignore the src and len parameters in case
2676 they have side-effects. */
2677 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2678 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
2679 return expand_expr (dst, target, mode, EXPAND_NORMAL);
2680 }
2681
2682 /* If the requested len is greater than or equal to the string
2683 length, call strcat. */
2684 if (TREE_CODE (len) == INTEGER_CST && p
2685 && compare_tree_int (len, strlen (p)) >= 0)
2686 {
2687 tree newarglist
2688 = tree_cons (NULL_TREE, dst, build_tree_list (NULL_TREE, src));
2689 tree fn = built_in_decls[BUILT_IN_STRCAT];
2690
2691 /* If the replacement _DECL isn't initialized, don't do the
2692 transformation. */
2693 if (!fn)
2694 return 0;
2695
2696 return expand_expr (build_function_call_expr (fn, newarglist),
2697 target, mode, EXPAND_NORMAL);
2698 }
2699 return 0;
2700 }
2701 }
2702
2703 /* Expand expression EXP, which is a call to the strspn builtin.
2704 Return 0 if we failed the caller should emit a normal call,
2705 otherwise try to get the result in TARGET, if convenient. */
2706
2707 static rtx
2708 expand_builtin_strspn (arglist, target, mode)
2709 tree arglist;
2710 rtx target;
2711 enum machine_mode mode;
2712 {
2713 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2714 return 0;
2715 else
2716 {
2717 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2718 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
2719
2720 /* If both arguments are constants, evaluate at compile-time. */
2721 if (p1 && p2)
2722 {
2723 const size_t r = strspn (p1, p2);
2724 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
2725 }
2726
2727 /* If either argument is "", return 0. */
2728 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
2729 {
2730 /* Evaluate and ignore both arguments in case either one has
2731 side-effects. */
2732 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2733 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2734 return const0_rtx;
2735 }
2736 return 0;
2737 }
2738 }
2739
2740 /* Expand expression EXP, which is a call to the strcspn builtin.
2741 Return 0 if we failed the caller should emit a normal call,
2742 otherwise try to get the result in TARGET, if convenient. */
2743
2744 static rtx
2745 expand_builtin_strcspn (arglist, target, mode)
2746 tree arglist;
2747 rtx target;
2748 enum machine_mode mode;
2749 {
2750 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2751 return 0;
2752 else
2753 {
2754 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2755 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
2756
2757 /* If both arguments are constants, evaluate at compile-time. */
2758 if (p1 && p2)
2759 {
2760 const size_t r = strcspn (p1, p2);
2761 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
2762 }
2763
2764 /* If the first argument is "", return 0. */
2765 if (p1 && *p1 == '\0')
2766 {
2767 /* Evaluate and ignore argument s2 in case it has
2768 side-effects. */
2769 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2770 return const0_rtx;
2771 }
2772
2773 /* If the second argument is "", return __builtin_strlen(s1). */
2774 if (p2 && *p2 == '\0')
2775 {
2776 tree newarglist = build_tree_list (NULL_TREE, s1),
2777 fn = built_in_decls[BUILT_IN_STRLEN];
2778
2779 /* If the replacement _DECL isn't initialized, don't do the
2780 transformation. */
2781 if (!fn)
2782 return 0;
2783
2784 return expand_expr (build_function_call_expr (fn, newarglist),
2785 target, mode, EXPAND_NORMAL);
2786 }
2787 return 0;
2788 }
2789 }
2790
2791 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
2792 if that's convenient. */
2793
2794 rtx
2795 expand_builtin_saveregs ()
2796 {
2797 rtx val, seq;
2798
2799 /* Don't do __builtin_saveregs more than once in a function.
2800 Save the result of the first call and reuse it. */
2801 if (saveregs_value != 0)
2802 return saveregs_value;
2803
2804 /* When this function is called, it means that registers must be
2805 saved on entry to this function. So we migrate the call to the
2806 first insn of this function. */
2807
2808 start_sequence ();
2809
2810 #ifdef EXPAND_BUILTIN_SAVEREGS
2811 /* Do whatever the machine needs done in this case. */
2812 val = EXPAND_BUILTIN_SAVEREGS ();
2813 #else
2814 /* ??? We used to try and build up a call to the out of line function,
2815 guessing about what registers needed saving etc. This became much
2816 harder with __builtin_va_start, since we don't have a tree for a
2817 call to __builtin_saveregs to fall back on. There was exactly one
2818 port (i860) that used this code, and I'm unconvinced it could actually
2819 handle the general case. So we no longer try to handle anything
2820 weird and make the backend absorb the evil. */
2821
2822 error ("__builtin_saveregs not supported by this target");
2823 val = const0_rtx;
2824 #endif
2825
2826 seq = get_insns ();
2827 end_sequence ();
2828
2829 saveregs_value = val;
2830
2831 /* Put the insns after the NOTE that starts the function. If this
2832 is inside a start_sequence, make the outer-level insn chain current, so
2833 the code is placed at the start of the function. */
2834 push_topmost_sequence ();
2835 emit_insn_after (seq, get_insns ());
2836 pop_topmost_sequence ();
2837
2838 return val;
2839 }
2840
2841 /* __builtin_args_info (N) returns word N of the arg space info
2842 for the current function. The number and meanings of words
2843 is controlled by the definition of CUMULATIVE_ARGS. */
2844
2845 static rtx
2846 expand_builtin_args_info (exp)
2847 tree exp;
2848 {
2849 tree arglist = TREE_OPERAND (exp, 1);
2850 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
2851 int *word_ptr = (int *) &current_function_args_info;
2852 #if 0
2853 /* These are used by the code below that is if 0'ed away */
2854 int i;
2855 tree type, elts, result;
2856 #endif
2857
2858 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
2859 abort ();
2860
2861 if (arglist != 0)
2862 {
2863 if (!host_integerp (TREE_VALUE (arglist), 0))
2864 error ("argument of `__builtin_args_info' must be constant");
2865 else
2866 {
2867 HOST_WIDE_INT wordnum = tree_low_cst (TREE_VALUE (arglist), 0);
2868
2869 if (wordnum < 0 || wordnum >= nwords)
2870 error ("argument of `__builtin_args_info' out of range");
2871 else
2872 return GEN_INT (word_ptr[wordnum]);
2873 }
2874 }
2875 else
2876 error ("missing argument in `__builtin_args_info'");
2877
2878 return const0_rtx;
2879
2880 #if 0
2881 for (i = 0; i < nwords; i++)
2882 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
2883
2884 type = build_array_type (integer_type_node,
2885 build_index_type (build_int_2 (nwords, 0)));
2886 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
2887 TREE_CONSTANT (result) = 1;
2888 TREE_STATIC (result) = 1;
2889 result = build1 (INDIRECT_REF, build_pointer_type (type), result);
2890 TREE_CONSTANT (result) = 1;
2891 return expand_expr (result, NULL_RTX, VOIDmode, 0);
2892 #endif
2893 }
2894
2895 /* Expand ARGLIST, from a call to __builtin_next_arg. */
2896
2897 static rtx
2898 expand_builtin_next_arg (arglist)
2899 tree arglist;
2900 {
2901 tree fntype = TREE_TYPE (current_function_decl);
2902
2903 if (TYPE_ARG_TYPES (fntype) == 0
2904 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2905 == void_type_node))
2906 {
2907 error ("`va_start' used in function with fixed args");
2908 return const0_rtx;
2909 }
2910
2911 if (arglist)
2912 {
2913 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
2914 tree arg = TREE_VALUE (arglist);
2915
2916 /* Strip off all nops for the sake of the comparison. This
2917 is not quite the same as STRIP_NOPS. It does more.
2918 We must also strip off INDIRECT_EXPR for C++ reference
2919 parameters. */
2920 while (TREE_CODE (arg) == NOP_EXPR
2921 || TREE_CODE (arg) == CONVERT_EXPR
2922 || TREE_CODE (arg) == NON_LVALUE_EXPR
2923 || TREE_CODE (arg) == INDIRECT_REF)
2924 arg = TREE_OPERAND (arg, 0);
2925 if (arg != last_parm)
2926 warning ("second parameter of `va_start' not last named argument");
2927 }
2928 else
2929 /* Evidently an out of date version of <stdarg.h>; can't validate
2930 va_start's second argument, but can still work as intended. */
2931 warning ("`__builtin_next_arg' called without an argument");
2932
2933 return expand_binop (Pmode, add_optab,
2934 current_function_internal_arg_pointer,
2935 current_function_arg_offset_rtx,
2936 NULL_RTX, 0, OPTAB_LIB_WIDEN);
2937 }
2938
2939 /* Make it easier for the backends by protecting the valist argument
2940 from multiple evaluations. */
2941
2942 static tree
2943 stabilize_va_list (valist, needs_lvalue)
2944 tree valist;
2945 int needs_lvalue;
2946 {
2947 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
2948 {
2949 if (TREE_SIDE_EFFECTS (valist))
2950 valist = save_expr (valist);
2951
2952 /* For this case, the backends will be expecting a pointer to
2953 TREE_TYPE (va_list_type_node), but it's possible we've
2954 actually been given an array (an actual va_list_type_node).
2955 So fix it. */
2956 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
2957 {
2958 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
2959 tree p2 = build_pointer_type (va_list_type_node);
2960
2961 valist = build1 (ADDR_EXPR, p2, valist);
2962 valist = fold (build1 (NOP_EXPR, p1, valist));
2963 }
2964 }
2965 else
2966 {
2967 tree pt;
2968
2969 if (! needs_lvalue)
2970 {
2971 if (! TREE_SIDE_EFFECTS (valist))
2972 return valist;
2973
2974 pt = build_pointer_type (va_list_type_node);
2975 valist = fold (build1 (ADDR_EXPR, pt, valist));
2976 TREE_SIDE_EFFECTS (valist) = 1;
2977 }
2978
2979 if (TREE_SIDE_EFFECTS (valist))
2980 valist = save_expr (valist);
2981 valist = fold (build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)),
2982 valist));
2983 }
2984
2985 return valist;
2986 }
2987
2988 /* The "standard" implementation of va_start: just assign `nextarg' to
2989 the variable. */
2990
2991 void
2992 std_expand_builtin_va_start (valist, nextarg)
2993 tree valist;
2994 rtx nextarg;
2995 {
2996 tree t;
2997
2998 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
2999 make_tree (ptr_type_node, nextarg));
3000 TREE_SIDE_EFFECTS (t) = 1;
3001
3002 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3003 }
3004
3005 /* Expand ARGLIST, from a call to __builtin_va_start. */
3006
3007 static rtx
3008 expand_builtin_va_start (arglist)
3009 tree arglist;
3010 {
3011 rtx nextarg;
3012 tree chain, valist;
3013
3014 chain = TREE_CHAIN (arglist);
3015
3016 if (TREE_CHAIN (chain))
3017 error ("too many arguments to function `va_start'");
3018
3019 nextarg = expand_builtin_next_arg (chain);
3020 valist = stabilize_va_list (TREE_VALUE (arglist), 1);
3021
3022 #ifdef EXPAND_BUILTIN_VA_START
3023 EXPAND_BUILTIN_VA_START (valist, nextarg);
3024 #else
3025 std_expand_builtin_va_start (valist, nextarg);
3026 #endif
3027
3028 return const0_rtx;
3029 }
3030
3031 /* The "standard" implementation of va_arg: read the value from the
3032 current (padded) address and increment by the (padded) size. */
3033
3034 rtx
3035 std_expand_builtin_va_arg (valist, type)
3036 tree valist, type;
3037 {
3038 tree addr_tree, t, type_size = NULL;
3039 tree align, alignm1;
3040 tree rounded_size;
3041 rtx addr;
3042
3043 /* Compute the rounded size of the type. */
3044 align = size_int (PARM_BOUNDARY / BITS_PER_UNIT);
3045 alignm1 = size_int (PARM_BOUNDARY / BITS_PER_UNIT - 1);
3046 if (type == error_mark_node
3047 || (type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type))) == NULL
3048 || TREE_OVERFLOW (type_size))
3049 rounded_size = size_zero_node;
3050 else
3051 rounded_size = fold (build (MULT_EXPR, sizetype,
3052 fold (build (TRUNC_DIV_EXPR, sizetype,
3053 fold (build (PLUS_EXPR, sizetype,
3054 type_size, alignm1)),
3055 align)),
3056 align));
3057
3058 /* Get AP. */
3059 addr_tree = valist;
3060 if (PAD_VARARGS_DOWN && ! integer_zerop (rounded_size))
3061 {
3062 /* Small args are padded downward. */
3063 addr_tree = fold (build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3064 fold (build (COND_EXPR, sizetype,
3065 fold (build (GT_EXPR, sizetype,
3066 rounded_size,
3067 align)),
3068 size_zero_node,
3069 fold (build (MINUS_EXPR, sizetype,
3070 rounded_size,
3071 type_size))))));
3072 }
3073
3074 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3075 addr = copy_to_reg (addr);
3076
3077 /* Compute new value for AP. */
3078 if (! integer_zerop (rounded_size))
3079 {
3080 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3081 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3082 rounded_size));
3083 TREE_SIDE_EFFECTS (t) = 1;
3084 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3085 }
3086
3087 return addr;
3088 }
3089
3090 /* Expand __builtin_va_arg, which is not really a builtin function, but
3091 a very special sort of operator. */
3092
3093 rtx
3094 expand_builtin_va_arg (valist, type)
3095 tree valist, type;
3096 {
3097 rtx addr, result;
3098 tree promoted_type, want_va_type, have_va_type;
3099
3100 /* Verify that valist is of the proper type. */
3101
3102 want_va_type = va_list_type_node;
3103 have_va_type = TREE_TYPE (valist);
3104 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
3105 {
3106 /* If va_list is an array type, the argument may have decayed
3107 to a pointer type, e.g. by being passed to another function.
3108 In that case, unwrap both types so that we can compare the
3109 underlying records. */
3110 if (TREE_CODE (have_va_type) == ARRAY_TYPE
3111 || TREE_CODE (have_va_type) == POINTER_TYPE)
3112 {
3113 want_va_type = TREE_TYPE (want_va_type);
3114 have_va_type = TREE_TYPE (have_va_type);
3115 }
3116 }
3117 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
3118 {
3119 error ("first argument to `va_arg' not of type `va_list'");
3120 addr = const0_rtx;
3121 }
3122
3123 /* Generate a diagnostic for requesting data of a type that cannot
3124 be passed through `...' due to type promotion at the call site. */
3125 else if ((promoted_type = (*lang_hooks.types.type_promotes_to) (type))
3126 != type)
3127 {
3128 const char *name = "<anonymous type>", *pname = 0;
3129 static bool gave_help;
3130
3131 if (TYPE_NAME (type))
3132 {
3133 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE)
3134 name = IDENTIFIER_POINTER (TYPE_NAME (type));
3135 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
3136 && DECL_NAME (TYPE_NAME (type)))
3137 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
3138 }
3139 if (TYPE_NAME (promoted_type))
3140 {
3141 if (TREE_CODE (TYPE_NAME (promoted_type)) == IDENTIFIER_NODE)
3142 pname = IDENTIFIER_POINTER (TYPE_NAME (promoted_type));
3143 else if (TREE_CODE (TYPE_NAME (promoted_type)) == TYPE_DECL
3144 && DECL_NAME (TYPE_NAME (promoted_type)))
3145 pname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (promoted_type)));
3146 }
3147
3148 /* Unfortunately, this is merely undefined, rather than a constraint
3149 violation, so we cannot make this an error. If this call is never
3150 executed, the program is still strictly conforming. */
3151 warning ("`%s' is promoted to `%s' when passed through `...'",
3152 name, pname);
3153 if (! gave_help)
3154 {
3155 gave_help = true;
3156 warning ("(so you should pass `%s' not `%s' to `va_arg')",
3157 pname, name);
3158 }
3159
3160 /* We can, however, treat "undefined" any way we please.
3161 Call abort to encourage the user to fix the program. */
3162 expand_builtin_trap ();
3163
3164 /* This is dead code, but go ahead and finish so that the
3165 mode of the result comes out right. */
3166 addr = const0_rtx;
3167 }
3168 else
3169 {
3170 /* Make it easier for the backends by protecting the valist argument
3171 from multiple evaluations. */
3172 valist = stabilize_va_list (valist, 0);
3173
3174 #ifdef EXPAND_BUILTIN_VA_ARG
3175 addr = EXPAND_BUILTIN_VA_ARG (valist, type);
3176 #else
3177 addr = std_expand_builtin_va_arg (valist, type);
3178 #endif
3179 }
3180
3181 #ifdef POINTERS_EXTEND_UNSIGNED
3182 if (GET_MODE (addr) != Pmode)
3183 addr = convert_memory_address (Pmode, addr);
3184 #endif
3185
3186 result = gen_rtx_MEM (TYPE_MODE (type), addr);
3187 set_mem_alias_set (result, get_varargs_alias_set ());
3188
3189 return result;
3190 }
3191
3192 /* Expand ARGLIST, from a call to __builtin_va_end. */
3193
3194 static rtx
3195 expand_builtin_va_end (arglist)
3196 tree arglist;
3197 {
3198 tree valist = TREE_VALUE (arglist);
3199
3200 #ifdef EXPAND_BUILTIN_VA_END
3201 valist = stabilize_va_list (valist, 0);
3202 EXPAND_BUILTIN_VA_END(arglist);
3203 #else
3204 /* Evaluate for side effects, if needed. I hate macros that don't
3205 do that. */
3206 if (TREE_SIDE_EFFECTS (valist))
3207 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
3208 #endif
3209
3210 return const0_rtx;
3211 }
3212
3213 /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a
3214 builtin rather than just as an assignment in stdarg.h because of the
3215 nastiness of array-type va_list types. */
3216
3217 static rtx
3218 expand_builtin_va_copy (arglist)
3219 tree arglist;
3220 {
3221 tree dst, src, t;
3222
3223 dst = TREE_VALUE (arglist);
3224 src = TREE_VALUE (TREE_CHAIN (arglist));
3225
3226 dst = stabilize_va_list (dst, 1);
3227 src = stabilize_va_list (src, 0);
3228
3229 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
3230 {
3231 t = build (MODIFY_EXPR, va_list_type_node, dst, src);
3232 TREE_SIDE_EFFECTS (t) = 1;
3233 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3234 }
3235 else
3236 {
3237 rtx dstb, srcb, size;
3238
3239 /* Evaluate to pointers. */
3240 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
3241 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
3242 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
3243 VOIDmode, EXPAND_NORMAL);
3244
3245 #ifdef POINTERS_EXTEND_UNSIGNED
3246 if (GET_MODE (dstb) != Pmode)
3247 dstb = convert_memory_address (Pmode, dstb);
3248
3249 if (GET_MODE (srcb) != Pmode)
3250 srcb = convert_memory_address (Pmode, srcb);
3251 #endif
3252
3253 /* "Dereference" to BLKmode memories. */
3254 dstb = gen_rtx_MEM (BLKmode, dstb);
3255 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
3256 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
3257 srcb = gen_rtx_MEM (BLKmode, srcb);
3258 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
3259 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
3260
3261 /* Copy. */
3262 emit_block_move (dstb, srcb, size);
3263 }
3264
3265 return const0_rtx;
3266 }
3267
3268 /* Expand a call to one of the builtin functions __builtin_frame_address or
3269 __builtin_return_address. */
3270
3271 static rtx
3272 expand_builtin_frame_address (exp)
3273 tree exp;
3274 {
3275 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3276 tree arglist = TREE_OPERAND (exp, 1);
3277
3278 /* The argument must be a nonnegative integer constant.
3279 It counts the number of frames to scan up the stack.
3280 The value is the return address saved in that frame. */
3281 if (arglist == 0)
3282 /* Warning about missing arg was already issued. */
3283 return const0_rtx;
3284 else if (! host_integerp (TREE_VALUE (arglist), 1))
3285 {
3286 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3287 error ("invalid arg to `__builtin_frame_address'");
3288 else
3289 error ("invalid arg to `__builtin_return_address'");
3290 return const0_rtx;
3291 }
3292 else
3293 {
3294 rtx tem
3295 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
3296 tree_low_cst (TREE_VALUE (arglist), 1),
3297 hard_frame_pointer_rtx);
3298
3299 /* Some ports cannot access arbitrary stack frames. */
3300 if (tem == NULL)
3301 {
3302 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3303 warning ("unsupported arg to `__builtin_frame_address'");
3304 else
3305 warning ("unsupported arg to `__builtin_return_address'");
3306 return const0_rtx;
3307 }
3308
3309 /* For __builtin_frame_address, return what we've got. */
3310 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3311 return tem;
3312
3313 if (GET_CODE (tem) != REG
3314 && ! CONSTANT_P (tem))
3315 tem = copy_to_mode_reg (Pmode, tem);
3316 return tem;
3317 }
3318 }
3319
3320 /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if
3321 we failed and the caller should emit a normal call, otherwise try to get
3322 the result in TARGET, if convenient. */
3323
3324 static rtx
3325 expand_builtin_alloca (arglist, target)
3326 tree arglist;
3327 rtx target;
3328 {
3329 rtx op0;
3330 rtx result;
3331
3332 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3333 return 0;
3334
3335 /* Compute the argument. */
3336 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
3337
3338 /* Allocate the desired space. */
3339 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
3340
3341 #ifdef POINTERS_EXTEND_UNSIGNED
3342 if (GET_MODE (result) != ptr_mode)
3343 result = convert_memory_address (ptr_mode, result);
3344 #endif
3345
3346 return result;
3347 }
3348
3349 /* Expand a call to the ffs builtin. The arguments are in ARGLIST.
3350 Return 0 if a normal call should be emitted rather than expanding the
3351 function in-line. If convenient, the result should be placed in TARGET.
3352 SUBTARGET may be used as the target for computing one of EXP's operands. */
3353
3354 static rtx
3355 expand_builtin_ffs (arglist, target, subtarget)
3356 tree arglist;
3357 rtx target, subtarget;
3358 {
3359 rtx op0;
3360 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3361 return 0;
3362
3363 /* Compute the argument. */
3364 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
3365 /* Compute ffs, into TARGET if possible.
3366 Set TARGET to wherever the result comes back. */
3367 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
3368 ffs_optab, op0, target, 1);
3369 if (target == 0)
3370 abort ();
3371 return target;
3372 }
3373
3374 /* If the string passed to fputs is a constant and is one character
3375 long, we attempt to transform this call into __builtin_fputc(). */
3376
3377 static rtx
3378 expand_builtin_fputs (arglist, ignore, unlocked)
3379 tree arglist;
3380 int ignore;
3381 int unlocked;
3382 {
3383 tree len, fn;
3384 tree fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
3385 : built_in_decls[BUILT_IN_FPUTC];
3386 tree fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
3387 : built_in_decls[BUILT_IN_FWRITE];
3388
3389 /* If the return value is used, or the replacement _DECL isn't
3390 initialized, don't do the transformation. */
3391 if (!ignore || !fn_fputc || !fn_fwrite)
3392 return 0;
3393
3394 /* Verify the arguments in the original call. */
3395 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3396 return 0;
3397
3398 /* Get the length of the string passed to fputs. If the length
3399 can't be determined, punt. */
3400 if (!(len = c_strlen (TREE_VALUE (arglist)))
3401 || TREE_CODE (len) != INTEGER_CST)
3402 return 0;
3403
3404 switch (compare_tree_int (len, 1))
3405 {
3406 case -1: /* length is 0, delete the call entirely . */
3407 {
3408 /* Evaluate and ignore the argument in case it has
3409 side-effects. */
3410 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
3411 VOIDmode, EXPAND_NORMAL);
3412 return const0_rtx;
3413 }
3414 case 0: /* length is 1, call fputc. */
3415 {
3416 const char *p = c_getstr (TREE_VALUE (arglist));
3417
3418 if (p != NULL)
3419 {
3420 /* New argument list transforming fputs(string, stream) to
3421 fputc(string[0], stream). */
3422 arglist =
3423 build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
3424 arglist =
3425 tree_cons (NULL_TREE, build_int_2 (p[0], 0), arglist);
3426 fn = fn_fputc;
3427 break;
3428 }
3429 }
3430 /* FALLTHROUGH */
3431 case 1: /* length is greater than 1, call fwrite. */
3432 {
3433 tree string_arg = TREE_VALUE (arglist);
3434
3435 /* New argument list transforming fputs(string, stream) to
3436 fwrite(string, 1, len, stream). */
3437 arglist = build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
3438 arglist = tree_cons (NULL_TREE, len, arglist);
3439 arglist = tree_cons (NULL_TREE, size_one_node, arglist);
3440 arglist = tree_cons (NULL_TREE, string_arg, arglist);
3441 fn = fn_fwrite;
3442 break;
3443 }
3444 default:
3445 abort ();
3446 }
3447
3448 return expand_expr (build_function_call_expr (fn, arglist),
3449 (ignore ? const0_rtx : NULL_RTX),
3450 VOIDmode, EXPAND_NORMAL);
3451 }
3452
3453 /* Expand a call to __builtin_expect. We return our argument and emit a
3454 NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
3455 a non-jump context. */
3456
3457 static rtx
3458 expand_builtin_expect (arglist, target)
3459 tree arglist;
3460 rtx target;
3461 {
3462 tree exp, c;
3463 rtx note, rtx_c;
3464
3465 if (arglist == NULL_TREE
3466 || TREE_CHAIN (arglist) == NULL_TREE)
3467 return const0_rtx;
3468 exp = TREE_VALUE (arglist);
3469 c = TREE_VALUE (TREE_CHAIN (arglist));
3470
3471 if (TREE_CODE (c) != INTEGER_CST)
3472 {
3473 error ("second arg to `__builtin_expect' must be a constant");
3474 c = integer_zero_node;
3475 }
3476
3477 target = expand_expr (exp, target, VOIDmode, EXPAND_NORMAL);
3478
3479 /* Don't bother with expected value notes for integral constants. */
3480 if (GET_CODE (target) != CONST_INT)
3481 {
3482 /* We do need to force this into a register so that we can be
3483 moderately sure to be able to correctly interpret the branch
3484 condition later. */
3485 target = force_reg (GET_MODE (target), target);
3486
3487 rtx_c = expand_expr (c, NULL_RTX, GET_MODE (target), EXPAND_NORMAL);
3488
3489 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
3490 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, target, rtx_c);
3491 }
3492
3493 return target;
3494 }
3495
3496 /* Like expand_builtin_expect, except do this in a jump context. This is
3497 called from do_jump if the conditional is a __builtin_expect. Return either
3498 a list of insns to emit the jump or NULL if we cannot optimize
3499 __builtin_expect. We need to optimize this at jump time so that machines
3500 like the PowerPC don't turn the test into a SCC operation, and then jump
3501 based on the test being 0/1. */
3502
3503 rtx
3504 expand_builtin_expect_jump (exp, if_false_label, if_true_label)
3505 tree exp;
3506 rtx if_false_label;
3507 rtx if_true_label;
3508 {
3509 tree arglist = TREE_OPERAND (exp, 1);
3510 tree arg0 = TREE_VALUE (arglist);
3511 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3512 rtx ret = NULL_RTX;
3513
3514 /* Only handle __builtin_expect (test, 0) and
3515 __builtin_expect (test, 1). */
3516 if (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
3517 && (integer_zerop (arg1) || integer_onep (arg1)))
3518 {
3519 int num_jumps = 0;
3520 rtx insn;
3521
3522 /* If we fail to locate an appropriate conditional jump, we'll
3523 fall back to normal evaluation. Ensure that the expression
3524 can be re-evaluated. */
3525 switch (unsafe_for_reeval (arg0))
3526 {
3527 case 0: /* Safe. */
3528 break;
3529
3530 case 1: /* Mildly unsafe. */
3531 arg0 = unsave_expr (arg0);
3532 break;
3533
3534 case 2: /* Wildly unsafe. */
3535 return NULL_RTX;
3536 }
3537
3538 /* Expand the jump insns. */
3539 start_sequence ();
3540 do_jump (arg0, if_false_label, if_true_label);
3541 ret = get_insns ();
3542 end_sequence ();
3543
3544 /* Now that the __builtin_expect has been validated, go through and add
3545 the expect's to each of the conditional jumps. If we run into an
3546 error, just give up and generate the 'safe' code of doing a SCC
3547 operation and then doing a branch on that. */
3548 insn = ret;
3549 while (insn != NULL_RTX)
3550 {
3551 rtx next = NEXT_INSN (insn);
3552 rtx pattern;
3553
3554 if (GET_CODE (insn) == JUMP_INSN && any_condjump_p (insn)
3555 && (pattern = pc_set (insn)) != NULL_RTX)
3556 {
3557 rtx ifelse = SET_SRC (pattern);
3558 rtx label;
3559 int taken;
3560
3561 if (GET_CODE (ifelse) != IF_THEN_ELSE)
3562 goto do_next_insn;
3563
3564 if (GET_CODE (XEXP (ifelse, 1)) == LABEL_REF)
3565 {
3566 taken = 1;
3567 label = XEXP (XEXP (ifelse, 1), 0);
3568 }
3569 /* An inverted jump reverses the probabilities. */
3570 else if (GET_CODE (XEXP (ifelse, 2)) == LABEL_REF)
3571 {
3572 taken = 0;
3573 label = XEXP (XEXP (ifelse, 2), 0);
3574 }
3575 /* We shouldn't have to worry about conditional returns during
3576 the expansion stage, but handle it gracefully anyway. */
3577 else if (GET_CODE (XEXP (ifelse, 1)) == RETURN)
3578 {
3579 taken = 1;
3580 label = NULL_RTX;
3581 }
3582 /* An inverted return reverses the probabilities. */
3583 else if (GET_CODE (XEXP (ifelse, 2)) == RETURN)
3584 {
3585 taken = 0;
3586 label = NULL_RTX;
3587 }
3588 else
3589 goto do_next_insn;
3590
3591 /* If the test is expected to fail, reverse the
3592 probabilities. */
3593 if (integer_zerop (arg1))
3594 taken = 1 - taken;
3595
3596 /* If we are jumping to the false label, reverse the
3597 probabilities. */
3598 if (label == NULL_RTX)
3599 ; /* conditional return */
3600 else if (label == if_false_label)
3601 taken = 1 - taken;
3602 else if (label != if_true_label)
3603 goto do_next_insn;
3604
3605 num_jumps++;
3606 predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
3607 }
3608
3609 do_next_insn:
3610 insn = next;
3611 }
3612
3613 /* If no jumps were modified, fail and do __builtin_expect the normal
3614 way. */
3615 if (num_jumps == 0)
3616 ret = NULL_RTX;
3617 }
3618
3619 return ret;
3620 }
3621
3622 void
3623 expand_builtin_trap ()
3624 {
3625 #ifdef HAVE_trap
3626 if (HAVE_trap)
3627 emit_insn (gen_trap ());
3628 else
3629 #endif
3630 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
3631 emit_barrier ();
3632 }
3633 \f
3634 /* Expand an expression EXP that calls a built-in function,
3635 with result going to TARGET if that's convenient
3636 (and in mode MODE if that's convenient).
3637 SUBTARGET may be used as the target for computing one of EXP's operands.
3638 IGNORE is nonzero if the value is to be ignored. */
3639
3640 rtx
3641 expand_builtin (exp, target, subtarget, mode, ignore)
3642 tree exp;
3643 rtx target;
3644 rtx subtarget;
3645 enum machine_mode mode;
3646 int ignore;
3647 {
3648 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3649 tree arglist = TREE_OPERAND (exp, 1);
3650 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
3651
3652 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
3653 return (*targetm.expand_builtin) (exp, target, subtarget, mode, ignore);
3654
3655 /* When not optimizing, generate calls to library functions for a certain
3656 set of builtins. */
3657 if (!optimize && !CALLED_AS_BUILT_IN (fndecl))
3658 switch (fcode)
3659 {
3660 case BUILT_IN_SIN:
3661 case BUILT_IN_COS:
3662 case BUILT_IN_SQRT:
3663 case BUILT_IN_SQRTF:
3664 case BUILT_IN_SQRTL:
3665 case BUILT_IN_MEMSET:
3666 case BUILT_IN_MEMCPY:
3667 case BUILT_IN_MEMCMP:
3668 case BUILT_IN_BCMP:
3669 case BUILT_IN_BZERO:
3670 case BUILT_IN_INDEX:
3671 case BUILT_IN_RINDEX:
3672 case BUILT_IN_STRCHR:
3673 case BUILT_IN_STRRCHR:
3674 case BUILT_IN_STRLEN:
3675 case BUILT_IN_STRCPY:
3676 case BUILT_IN_STRNCPY:
3677 case BUILT_IN_STRNCMP:
3678 case BUILT_IN_STRSTR:
3679 case BUILT_IN_STRPBRK:
3680 case BUILT_IN_STRCAT:
3681 case BUILT_IN_STRNCAT:
3682 case BUILT_IN_STRSPN:
3683 case BUILT_IN_STRCSPN:
3684 case BUILT_IN_STRCMP:
3685 case BUILT_IN_FFS:
3686 case BUILT_IN_PUTCHAR:
3687 case BUILT_IN_PUTS:
3688 case BUILT_IN_PRINTF:
3689 case BUILT_IN_FPUTC:
3690 case BUILT_IN_FPUTS:
3691 case BUILT_IN_FWRITE:
3692 case BUILT_IN_PUTCHAR_UNLOCKED:
3693 case BUILT_IN_PUTS_UNLOCKED:
3694 case BUILT_IN_PRINTF_UNLOCKED:
3695 case BUILT_IN_FPUTC_UNLOCKED:
3696 case BUILT_IN_FPUTS_UNLOCKED:
3697 case BUILT_IN_FWRITE_UNLOCKED:
3698 return expand_call (exp, target, ignore);
3699
3700 default:
3701 break;
3702 }
3703
3704 switch (fcode)
3705 {
3706 case BUILT_IN_ABS:
3707 case BUILT_IN_LABS:
3708 case BUILT_IN_LLABS:
3709 case BUILT_IN_IMAXABS:
3710 case BUILT_IN_FABS:
3711 case BUILT_IN_FABSF:
3712 case BUILT_IN_FABSL:
3713 /* build_function_call changes these into ABS_EXPR. */
3714 abort ();
3715
3716 case BUILT_IN_CONJ:
3717 case BUILT_IN_CONJF:
3718 case BUILT_IN_CONJL:
3719 case BUILT_IN_CREAL:
3720 case BUILT_IN_CREALF:
3721 case BUILT_IN_CREALL:
3722 case BUILT_IN_CIMAG:
3723 case BUILT_IN_CIMAGF:
3724 case BUILT_IN_CIMAGL:
3725 /* expand_tree_builtin changes these into CONJ_EXPR, REALPART_EXPR
3726 and IMAGPART_EXPR. */
3727 abort ();
3728
3729 case BUILT_IN_SIN:
3730 case BUILT_IN_SINF:
3731 case BUILT_IN_SINL:
3732 case BUILT_IN_COS:
3733 case BUILT_IN_COSF:
3734 case BUILT_IN_COSL:
3735 /* Treat these like sqrt only if unsafe math optimizations are allowed,
3736 because of possible accuracy problems. */
3737 if (! flag_unsafe_math_optimizations)
3738 break;
3739 case BUILT_IN_SQRT:
3740 case BUILT_IN_SQRTF:
3741 case BUILT_IN_SQRTL:
3742 target = expand_builtin_mathfn (exp, target, subtarget);
3743 if (target)
3744 return target;
3745 break;
3746
3747 case BUILT_IN_FMOD:
3748 break;
3749
3750 case BUILT_IN_APPLY_ARGS:
3751 return expand_builtin_apply_args ();
3752
3753 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
3754 FUNCTION with a copy of the parameters described by
3755 ARGUMENTS, and ARGSIZE. It returns a block of memory
3756 allocated on the stack into which is stored all the registers
3757 that might possibly be used for returning the result of a
3758 function. ARGUMENTS is the value returned by
3759 __builtin_apply_args. ARGSIZE is the number of bytes of
3760 arguments that must be copied. ??? How should this value be
3761 computed? We'll also need a safe worst case value for varargs
3762 functions. */
3763 case BUILT_IN_APPLY:
3764 if (!validate_arglist (arglist, POINTER_TYPE,
3765 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3766 && !validate_arglist (arglist, REFERENCE_TYPE,
3767 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3768 return const0_rtx;
3769 else
3770 {
3771 int i;
3772 tree t;
3773 rtx ops[3];
3774
3775 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
3776 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
3777
3778 return expand_builtin_apply (ops[0], ops[1], ops[2]);
3779 }
3780
3781 /* __builtin_return (RESULT) causes the function to return the
3782 value described by RESULT. RESULT is address of the block of
3783 memory returned by __builtin_apply. */
3784 case BUILT_IN_RETURN:
3785 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
3786 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
3787 NULL_RTX, VOIDmode, 0));
3788 return const0_rtx;
3789
3790 case BUILT_IN_SAVEREGS:
3791 return expand_builtin_saveregs ();
3792
3793 case BUILT_IN_ARGS_INFO:
3794 return expand_builtin_args_info (exp);
3795
3796 /* Return the address of the first anonymous stack arg. */
3797 case BUILT_IN_NEXT_ARG:
3798 return expand_builtin_next_arg (arglist);
3799
3800 case BUILT_IN_CLASSIFY_TYPE:
3801 return expand_builtin_classify_type (arglist);
3802
3803 case BUILT_IN_CONSTANT_P:
3804 return expand_builtin_constant_p (exp);
3805
3806 case BUILT_IN_FRAME_ADDRESS:
3807 case BUILT_IN_RETURN_ADDRESS:
3808 return expand_builtin_frame_address (exp);
3809
3810 /* Returns the address of the area where the structure is returned.
3811 0 otherwise. */
3812 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
3813 if (arglist != 0
3814 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
3815 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
3816 return const0_rtx;
3817 else
3818 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
3819
3820 case BUILT_IN_ALLOCA:
3821 target = expand_builtin_alloca (arglist, target);
3822 if (target)
3823 return target;
3824 break;
3825
3826 case BUILT_IN_FFS:
3827 target = expand_builtin_ffs (arglist, target, subtarget);
3828 if (target)
3829 return target;
3830 break;
3831
3832 case BUILT_IN_STRLEN:
3833 target = expand_builtin_strlen (exp, target);
3834 if (target)
3835 return target;
3836 break;
3837
3838 case BUILT_IN_STRCPY:
3839 target = expand_builtin_strcpy (exp, target, mode);
3840 if (target)
3841 return target;
3842 break;
3843
3844 case BUILT_IN_STRNCPY:
3845 target = expand_builtin_strncpy (arglist, target, mode);
3846 if (target)
3847 return target;
3848 break;
3849
3850 case BUILT_IN_STRCAT:
3851 target = expand_builtin_strcat (arglist, target, mode);
3852 if (target)
3853 return target;
3854 break;
3855
3856 case BUILT_IN_STRNCAT:
3857 target = expand_builtin_strncat (arglist, target, mode);
3858 if (target)
3859 return target;
3860 break;
3861
3862 case BUILT_IN_STRSPN:
3863 target = expand_builtin_strspn (arglist, target, mode);
3864 if (target)
3865 return target;
3866 break;
3867
3868 case BUILT_IN_STRCSPN:
3869 target = expand_builtin_strcspn (arglist, target, mode);
3870 if (target)
3871 return target;
3872 break;
3873
3874 case BUILT_IN_STRSTR:
3875 target = expand_builtin_strstr (arglist, target, mode);
3876 if (target)
3877 return target;
3878 break;
3879
3880 case BUILT_IN_STRPBRK:
3881 target = expand_builtin_strpbrk (arglist, target, mode);
3882 if (target)
3883 return target;
3884 break;
3885
3886 case BUILT_IN_INDEX:
3887 case BUILT_IN_STRCHR:
3888 target = expand_builtin_strchr (arglist, target, mode);
3889 if (target)
3890 return target;
3891 break;
3892
3893 case BUILT_IN_RINDEX:
3894 case BUILT_IN_STRRCHR:
3895 target = expand_builtin_strrchr (arglist, target, mode);
3896 if (target)
3897 return target;
3898 break;
3899
3900 case BUILT_IN_MEMCPY:
3901 target = expand_builtin_memcpy (arglist, target, mode);
3902 if (target)
3903 return target;
3904 break;
3905
3906 case BUILT_IN_MEMSET:
3907 target = expand_builtin_memset (exp, target, mode);
3908 if (target)
3909 return target;
3910 break;
3911
3912 case BUILT_IN_BZERO:
3913 target = expand_builtin_bzero (exp);
3914 if (target)
3915 return target;
3916 break;
3917
3918 case BUILT_IN_STRCMP:
3919 target = expand_builtin_strcmp (exp, target, mode);
3920 if (target)
3921 return target;
3922 break;
3923
3924 case BUILT_IN_STRNCMP:
3925 target = expand_builtin_strncmp (exp, target, mode);
3926 if (target)
3927 return target;
3928 break;
3929
3930 case BUILT_IN_BCMP:
3931 case BUILT_IN_MEMCMP:
3932 target = expand_builtin_memcmp (exp, arglist, target, mode);
3933 if (target)
3934 return target;
3935 break;
3936
3937 case BUILT_IN_SETJMP:
3938 target = expand_builtin_setjmp (arglist, target);
3939 if (target)
3940 return target;
3941 break;
3942
3943 /* __builtin_longjmp is passed a pointer to an array of five words.
3944 It's similar to the C library longjmp function but works with
3945 __builtin_setjmp above. */
3946 case BUILT_IN_LONGJMP:
3947 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3948 break;
3949 else
3950 {
3951 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
3952 VOIDmode, 0);
3953 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
3954 NULL_RTX, VOIDmode, 0);
3955
3956 if (value != const1_rtx)
3957 {
3958 error ("__builtin_longjmp second argument must be 1");
3959 return const0_rtx;
3960 }
3961
3962 expand_builtin_longjmp (buf_addr, value);
3963 return const0_rtx;
3964 }
3965
3966 case BUILT_IN_TRAP:
3967 expand_builtin_trap ();
3968 return const0_rtx;
3969
3970 case BUILT_IN_PUTCHAR:
3971 case BUILT_IN_PUTS:
3972 case BUILT_IN_FPUTC:
3973 case BUILT_IN_FWRITE:
3974 case BUILT_IN_PUTCHAR_UNLOCKED:
3975 case BUILT_IN_PUTS_UNLOCKED:
3976 case BUILT_IN_FPUTC_UNLOCKED:
3977 case BUILT_IN_FWRITE_UNLOCKED:
3978 break;
3979 case BUILT_IN_FPUTS:
3980 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 0);
3981 if (target)
3982 return target;
3983 break;
3984 case BUILT_IN_FPUTS_UNLOCKED:
3985 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 1);
3986 if (target)
3987 return target;
3988 break;
3989
3990 /* Various hooks for the DWARF 2 __throw routine. */
3991 case BUILT_IN_UNWIND_INIT:
3992 expand_builtin_unwind_init ();
3993 return const0_rtx;
3994 case BUILT_IN_DWARF_CFA:
3995 return virtual_cfa_rtx;
3996 #ifdef DWARF2_UNWIND_INFO
3997 case BUILT_IN_DWARF_FP_REGNUM:
3998 return expand_builtin_dwarf_fp_regnum ();
3999 case BUILT_IN_INIT_DWARF_REG_SIZES:
4000 expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist));
4001 return const0_rtx;
4002 #endif
4003 case BUILT_IN_FROB_RETURN_ADDR:
4004 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
4005 case BUILT_IN_EXTRACT_RETURN_ADDR:
4006 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
4007 case BUILT_IN_EH_RETURN:
4008 expand_builtin_eh_return (TREE_VALUE (arglist),
4009 TREE_VALUE (TREE_CHAIN (arglist)));
4010 return const0_rtx;
4011 #ifdef EH_RETURN_DATA_REGNO
4012 case BUILT_IN_EH_RETURN_DATA_REGNO:
4013 return expand_builtin_eh_return_data_regno (arglist);
4014 #endif
4015 case BUILT_IN_VA_START:
4016 case BUILT_IN_STDARG_START:
4017 return expand_builtin_va_start (arglist);
4018 case BUILT_IN_VA_END:
4019 return expand_builtin_va_end (arglist);
4020 case BUILT_IN_VA_COPY:
4021 return expand_builtin_va_copy (arglist);
4022 case BUILT_IN_EXPECT:
4023 return expand_builtin_expect (arglist, target);
4024 case BUILT_IN_PREFETCH:
4025 expand_builtin_prefetch (arglist);
4026 return const0_rtx;
4027
4028
4029 default: /* just do library call, if unknown builtin */
4030 error ("built-in function `%s' not currently supported",
4031 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
4032 }
4033
4034 /* The switch statement above can drop through to cause the function
4035 to be called normally. */
4036 return expand_call (exp, target, ignore);
4037 }
4038
4039 /* Fold a call to __builtin_constant_p, if we know it will evaluate to a
4040 constant. ARGLIST is the argument list of the call. */
4041
4042 static tree
4043 fold_builtin_constant_p (arglist)
4044 tree arglist;
4045 {
4046 if (arglist == 0)
4047 return 0;
4048
4049 arglist = TREE_VALUE (arglist);
4050
4051 /* We return 1 for a numeric type that's known to be a constant
4052 value at compile-time or for an aggregate type that's a
4053 literal constant. */
4054 STRIP_NOPS (arglist);
4055
4056 /* If we know this is a constant, emit the constant of one. */
4057 if (TREE_CODE_CLASS (TREE_CODE (arglist)) == 'c'
4058 || (TREE_CODE (arglist) == CONSTRUCTOR
4059 && TREE_CONSTANT (arglist))
4060 || (TREE_CODE (arglist) == ADDR_EXPR
4061 && TREE_CODE (TREE_OPERAND (arglist, 0)) == STRING_CST))
4062 return integer_one_node;
4063
4064 /* If we aren't going to be running CSE or this expression
4065 has side effects, show we don't know it to be a constant.
4066 Likewise if it's a pointer or aggregate type since in those
4067 case we only want literals, since those are only optimized
4068 when generating RTL, not later.
4069 And finally, if we are compiling an initializer, not code, we
4070 need to return a definite result now; there's not going to be any
4071 more optimization done. */
4072 if (TREE_SIDE_EFFECTS (arglist) || cse_not_expected
4073 || AGGREGATE_TYPE_P (TREE_TYPE (arglist))
4074 || POINTER_TYPE_P (TREE_TYPE (arglist))
4075 || cfun == 0)
4076 return integer_zero_node;
4077
4078 return 0;
4079 }
4080
4081 /* Fold a call to __builtin_classify_type. */
4082
4083 static tree
4084 fold_builtin_classify_type (arglist)
4085 tree arglist;
4086 {
4087 if (arglist == 0)
4088 return build_int_2 (no_type_class, 0);
4089
4090 return build_int_2 (type_to_class (TREE_TYPE (TREE_VALUE (arglist))), 0);
4091 }
4092
4093 /* Used by constant folding to eliminate some builtin calls early. EXP is
4094 the CALL_EXPR of a call to a builtin function. */
4095
4096 tree
4097 fold_builtin (exp)
4098 tree exp;
4099 {
4100 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4101 tree arglist = TREE_OPERAND (exp, 1);
4102 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4103
4104 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4105 return 0;
4106
4107 switch (fcode)
4108 {
4109 case BUILT_IN_CONSTANT_P:
4110 return fold_builtin_constant_p (arglist);
4111
4112 case BUILT_IN_CLASSIFY_TYPE:
4113 return fold_builtin_classify_type (arglist);
4114
4115 case BUILT_IN_STRLEN:
4116 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4117 {
4118 tree len = c_strlen (TREE_VALUE (arglist));
4119 if (len != 0)
4120 return len;
4121 }
4122 break;
4123
4124 default:
4125 break;
4126 }
4127
4128 return 0;
4129 }
4130
4131 static tree
4132 build_function_call_expr (fn, arglist)
4133 tree fn, arglist;
4134 {
4135 tree call_expr;
4136
4137 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
4138 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
4139 call_expr, arglist);
4140 TREE_SIDE_EFFECTS (call_expr) = 1;
4141 return fold (call_expr);
4142 }
4143
4144 /* This function validates the types of a function call argument list
4145 represented as a tree chain of parameters against a specified list
4146 of tree_codes. If the last specifier is a 0, that represents an
4147 ellipses, otherwise the last specifier must be a VOID_TYPE. */
4148
4149 static int
4150 validate_arglist VPARAMS ((tree arglist, ...))
4151 {
4152 enum tree_code code;
4153 int res = 0;
4154
4155 VA_OPEN (ap, arglist);
4156 VA_FIXEDARG (ap, tree, arglist);
4157
4158 do {
4159 code = va_arg (ap, enum tree_code);
4160 switch (code)
4161 {
4162 case 0:
4163 /* This signifies an ellipses, any further arguments are all ok. */
4164 res = 1;
4165 goto end;
4166 case VOID_TYPE:
4167 /* This signifies an endlink, if no arguments remain, return
4168 true, otherwise return false. */
4169 res = arglist == 0;
4170 goto end;
4171 default:
4172 /* If no parameters remain or the parameter's code does not
4173 match the specified code, return false. Otherwise continue
4174 checking any remaining arguments. */
4175 if (arglist == 0 || code != TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))))
4176 goto end;
4177 break;
4178 }
4179 arglist = TREE_CHAIN (arglist);
4180 } while (1);
4181
4182 /* We need gotos here since we can only have one VA_CLOSE in a
4183 function. */
4184 end: ;
4185 VA_CLOSE (ap);
4186
4187 return res;
4188 }
4189
4190 /* Default version of target-specific builtin setup that does nothing. */
4191
4192 void
4193 default_init_builtins ()
4194 {
4195 }
4196
4197 /* Default target-specific builtin expander that does nothing. */
4198
4199 rtx
4200 default_expand_builtin (exp, target, subtarget, mode, ignore)
4201 tree exp ATTRIBUTE_UNUSED;
4202 rtx target ATTRIBUTE_UNUSED;
4203 rtx subtarget ATTRIBUTE_UNUSED;
4204 enum machine_mode mode ATTRIBUTE_UNUSED;
4205 int ignore ATTRIBUTE_UNUSED;
4206 {
4207 return NULL_RTX;
4208 }
This page took 0.231471 seconds and 6 git commands to generate.