]> gcc.gnu.org Git - gcc.git/blob - gcc/builtins.c
Index: ChangeLog
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "real.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 #include "expr.h"
35 #include "optabs.h"
36 #include "libfuncs.h"
37 #include "recog.h"
38 #include "output.h"
39 #include "typeclass.h"
40 #include "toplev.h"
41 #include "predict.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "langhooks.h"
45
46 #define CALLED_AS_BUILT_IN(NODE) \
47 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
48
49 /* Register mappings for target machines without register windows. */
50 #ifndef INCOMING_REGNO
51 #define INCOMING_REGNO(OUT) (OUT)
52 #endif
53 #ifndef OUTGOING_REGNO
54 #define OUTGOING_REGNO(IN) (IN)
55 #endif
56
57 #ifndef PAD_VARARGS_DOWN
58 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
59 #endif
60
61 /* Define the names of the builtin function types and codes. */
62 const char *const built_in_class_names[4]
63 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
64
65 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT) STRINGX(X),
66 const char *const built_in_names[(int) END_BUILTINS] =
67 {
68 #include "builtins.def"
69 };
70 #undef DEF_BUILTIN
71
72 /* Setup an array of _DECL trees, make sure each element is
73 initialized to NULL_TREE. */
74 tree built_in_decls[(int) END_BUILTINS];
75
76 static int get_pointer_alignment PARAMS ((tree, unsigned int));
77 static tree c_strlen PARAMS ((tree));
78 static const char *c_getstr PARAMS ((tree));
79 static rtx c_readstr PARAMS ((const char *,
80 enum machine_mode));
81 static int target_char_cast PARAMS ((tree, char *));
82 static rtx get_memory_rtx PARAMS ((tree));
83 static int apply_args_size PARAMS ((void));
84 static int apply_result_size PARAMS ((void));
85 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
86 static rtx result_vector PARAMS ((int, rtx));
87 #endif
88 static rtx expand_builtin_setjmp PARAMS ((tree, rtx));
89 static void expand_builtin_prefetch PARAMS ((tree));
90 static rtx expand_builtin_apply_args PARAMS ((void));
91 static rtx expand_builtin_apply_args_1 PARAMS ((void));
92 static rtx expand_builtin_apply PARAMS ((rtx, rtx, rtx));
93 static void expand_builtin_return PARAMS ((rtx));
94 static enum type_class type_to_class PARAMS ((tree));
95 static rtx expand_builtin_classify_type PARAMS ((tree));
96 static rtx expand_builtin_mathfn PARAMS ((tree, rtx, rtx));
97 static rtx expand_builtin_constant_p PARAMS ((tree));
98 static rtx expand_builtin_args_info PARAMS ((tree));
99 static rtx expand_builtin_next_arg PARAMS ((tree));
100 static rtx expand_builtin_va_start PARAMS ((tree));
101 static rtx expand_builtin_va_end PARAMS ((tree));
102 static rtx expand_builtin_va_copy PARAMS ((tree));
103 static rtx expand_builtin_memcmp PARAMS ((tree, tree, rtx,
104 enum machine_mode));
105 static rtx expand_builtin_strcmp PARAMS ((tree, rtx,
106 enum machine_mode));
107 static rtx expand_builtin_strncmp PARAMS ((tree, rtx,
108 enum machine_mode));
109 static rtx builtin_memcpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
110 enum machine_mode));
111 static rtx expand_builtin_strcat PARAMS ((tree, rtx,
112 enum machine_mode));
113 static rtx expand_builtin_strncat PARAMS ((tree, rtx,
114 enum machine_mode));
115 static rtx expand_builtin_strspn PARAMS ((tree, rtx,
116 enum machine_mode));
117 static rtx expand_builtin_strcspn PARAMS ((tree, rtx,
118 enum machine_mode));
119 static rtx expand_builtin_memcpy PARAMS ((tree, rtx,
120 enum machine_mode));
121 static rtx expand_builtin_strcpy PARAMS ((tree, rtx,
122 enum machine_mode));
123 static rtx builtin_strncpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
124 enum machine_mode));
125 static rtx expand_builtin_strncpy PARAMS ((tree, rtx,
126 enum machine_mode));
127 static rtx builtin_memset_read_str PARAMS ((PTR, HOST_WIDE_INT,
128 enum machine_mode));
129 static rtx builtin_memset_gen_str PARAMS ((PTR, HOST_WIDE_INT,
130 enum machine_mode));
131 static rtx expand_builtin_memset PARAMS ((tree, rtx,
132 enum machine_mode));
133 static rtx expand_builtin_bzero PARAMS ((tree));
134 static rtx expand_builtin_strlen PARAMS ((tree, rtx));
135 static rtx expand_builtin_strstr PARAMS ((tree, rtx,
136 enum machine_mode));
137 static rtx expand_builtin_strpbrk PARAMS ((tree, rtx,
138 enum machine_mode));
139 static rtx expand_builtin_strchr PARAMS ((tree, rtx,
140 enum machine_mode));
141 static rtx expand_builtin_strrchr PARAMS ((tree, rtx,
142 enum machine_mode));
143 static rtx expand_builtin_alloca PARAMS ((tree, rtx));
144 static rtx expand_builtin_ffs PARAMS ((tree, rtx, rtx));
145 static rtx expand_builtin_frame_address PARAMS ((tree));
146 static rtx expand_builtin_fputs PARAMS ((tree, int, int));
147 static tree stabilize_va_list PARAMS ((tree, int));
148 static rtx expand_builtin_expect PARAMS ((tree, rtx));
149 static tree fold_builtin_constant_p PARAMS ((tree));
150 static tree fold_builtin_classify_type PARAMS ((tree));
151 static tree build_function_call_expr PARAMS ((tree, tree));
152 static int validate_arglist PARAMS ((tree, ...));
153
154 /* Return the alignment in bits of EXP, a pointer valued expression.
155 But don't return more than MAX_ALIGN no matter what.
156 The alignment returned is, by default, the alignment of the thing that
157 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
158
159 Otherwise, look at the expression to see if we can do better, i.e., if the
160 expression is actually pointing at an object whose alignment is tighter. */
161
162 static int
163 get_pointer_alignment (exp, max_align)
164 tree exp;
165 unsigned int max_align;
166 {
167 unsigned int align, inner;
168
169 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
170 return 0;
171
172 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
173 align = MIN (align, max_align);
174
175 while (1)
176 {
177 switch (TREE_CODE (exp))
178 {
179 case NOP_EXPR:
180 case CONVERT_EXPR:
181 case NON_LVALUE_EXPR:
182 exp = TREE_OPERAND (exp, 0);
183 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
184 return align;
185
186 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
187 align = MIN (inner, max_align);
188 break;
189
190 case PLUS_EXPR:
191 /* If sum of pointer + int, restrict our maximum alignment to that
192 imposed by the integer. If not, we can't do any better than
193 ALIGN. */
194 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
195 return align;
196
197 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
198 & (max_align / BITS_PER_UNIT - 1))
199 != 0)
200 max_align >>= 1;
201
202 exp = TREE_OPERAND (exp, 0);
203 break;
204
205 case ADDR_EXPR:
206 /* See what we are pointing at and look at its alignment. */
207 exp = TREE_OPERAND (exp, 0);
208 if (TREE_CODE (exp) == FUNCTION_DECL)
209 align = FUNCTION_BOUNDARY;
210 else if (DECL_P (exp))
211 align = DECL_ALIGN (exp);
212 #ifdef CONSTANT_ALIGNMENT
213 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
214 align = CONSTANT_ALIGNMENT (exp, align);
215 #endif
216 return MIN (align, max_align);
217
218 default:
219 return align;
220 }
221 }
222 }
223
224 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
225 way, because it could contain a zero byte in the middle.
226 TREE_STRING_LENGTH is the size of the character array, not the string.
227
228 The value returned is of type `ssizetype'.
229
230 Unfortunately, string_constant can't access the values of const char
231 arrays with initializers, so neither can we do so here. */
232
233 static tree
234 c_strlen (src)
235 tree src;
236 {
237 tree offset_node;
238 HOST_WIDE_INT offset;
239 int max;
240 const char *ptr;
241
242 src = string_constant (src, &offset_node);
243 if (src == 0)
244 return 0;
245
246 max = TREE_STRING_LENGTH (src) - 1;
247 ptr = TREE_STRING_POINTER (src);
248
249 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
250 {
251 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
252 compute the offset to the following null if we don't know where to
253 start searching for it. */
254 int i;
255
256 for (i = 0; i < max; i++)
257 if (ptr[i] == 0)
258 return 0;
259
260 /* We don't know the starting offset, but we do know that the string
261 has no internal zero bytes. We can assume that the offset falls
262 within the bounds of the string; otherwise, the programmer deserves
263 what he gets. Subtract the offset from the length of the string,
264 and return that. This would perhaps not be valid if we were dealing
265 with named arrays in addition to literal string constants. */
266
267 return size_diffop (size_int (max), offset_node);
268 }
269
270 /* We have a known offset into the string. Start searching there for
271 a null character if we can represent it as a single HOST_WIDE_INT. */
272 if (offset_node == 0)
273 offset = 0;
274 else if (! host_integerp (offset_node, 0))
275 offset = -1;
276 else
277 offset = tree_low_cst (offset_node, 0);
278
279 /* If the offset is known to be out of bounds, warn, and call strlen at
280 runtime. */
281 if (offset < 0 || offset > max)
282 {
283 warning ("offset outside bounds of constant string");
284 return 0;
285 }
286
287 /* Use strlen to search for the first zero byte. Since any strings
288 constructed with build_string will have nulls appended, we win even
289 if we get handed something like (char[4])"abcd".
290
291 Since OFFSET is our starting index into the string, no further
292 calculation is needed. */
293 return ssize_int (strlen (ptr + offset));
294 }
295
296 /* Return a char pointer for a C string if it is a string constant
297 or sum of string constant and integer constant. */
298
299 static const char *
300 c_getstr (src)
301 tree src;
302 {
303 tree offset_node;
304
305 src = string_constant (src, &offset_node);
306 if (src == 0)
307 return 0;
308
309 if (offset_node == 0)
310 return TREE_STRING_POINTER (src);
311 else if (!host_integerp (offset_node, 1)
312 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
313 return 0;
314
315 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
316 }
317
318 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
319 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
320
321 static rtx
322 c_readstr (str, mode)
323 const char *str;
324 enum machine_mode mode;
325 {
326 HOST_WIDE_INT c[2];
327 HOST_WIDE_INT ch;
328 unsigned int i, j;
329
330 if (GET_MODE_CLASS (mode) != MODE_INT)
331 abort ();
332 c[0] = 0;
333 c[1] = 0;
334 ch = 1;
335 for (i = 0; i < GET_MODE_SIZE (mode); i++)
336 {
337 j = i;
338 if (WORDS_BIG_ENDIAN)
339 j = GET_MODE_SIZE (mode) - i - 1;
340 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
341 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
342 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
343 j *= BITS_PER_UNIT;
344 if (j > 2 * HOST_BITS_PER_WIDE_INT)
345 abort ();
346 if (ch)
347 ch = (unsigned char) str[i];
348 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
349 }
350 return immed_double_const (c[0], c[1], mode);
351 }
352
353 /* Cast a target constant CST to target CHAR and if that value fits into
354 host char type, return zero and put that value into variable pointed by
355 P. */
356
357 static int
358 target_char_cast (cst, p)
359 tree cst;
360 char *p;
361 {
362 unsigned HOST_WIDE_INT val, hostval;
363
364 if (!host_integerp (cst, 1)
365 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
366 return 1;
367
368 val = tree_low_cst (cst, 1);
369 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
370 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
371
372 hostval = val;
373 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
374 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
375
376 if (val != hostval)
377 return 1;
378
379 *p = hostval;
380 return 0;
381 }
382
383 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
384 times to get the address of either a higher stack frame, or a return
385 address located within it (depending on FNDECL_CODE). */
386
387 rtx
388 expand_builtin_return_addr (fndecl_code, count, tem)
389 enum built_in_function fndecl_code;
390 int count;
391 rtx tem;
392 {
393 int i;
394
395 /* Some machines need special handling before we can access
396 arbitrary frames. For example, on the sparc, we must first flush
397 all register windows to the stack. */
398 #ifdef SETUP_FRAME_ADDRESSES
399 if (count > 0)
400 SETUP_FRAME_ADDRESSES ();
401 #endif
402
403 /* On the sparc, the return address is not in the frame, it is in a
404 register. There is no way to access it off of the current frame
405 pointer, but it can be accessed off the previous frame pointer by
406 reading the value from the register window save area. */
407 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
408 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
409 count--;
410 #endif
411
412 /* Scan back COUNT frames to the specified frame. */
413 for (i = 0; i < count; i++)
414 {
415 /* Assume the dynamic chain pointer is in the word that the
416 frame address points to, unless otherwise specified. */
417 #ifdef DYNAMIC_CHAIN_ADDRESS
418 tem = DYNAMIC_CHAIN_ADDRESS (tem);
419 #endif
420 tem = memory_address (Pmode, tem);
421 tem = gen_rtx_MEM (Pmode, tem);
422 set_mem_alias_set (tem, get_frame_alias_set ());
423 tem = copy_to_reg (tem);
424 }
425
426 /* For __builtin_frame_address, return what we've got. */
427 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
428 return tem;
429
430 /* For __builtin_return_address, Get the return address from that
431 frame. */
432 #ifdef RETURN_ADDR_RTX
433 tem = RETURN_ADDR_RTX (count, tem);
434 #else
435 tem = memory_address (Pmode,
436 plus_constant (tem, GET_MODE_SIZE (Pmode)));
437 tem = gen_rtx_MEM (Pmode, tem);
438 set_mem_alias_set (tem, get_frame_alias_set ());
439 #endif
440 return tem;
441 }
442
443 /* Alias set used for setjmp buffer. */
444 static HOST_WIDE_INT setjmp_alias_set = -1;
445
446 /* Construct the leading half of a __builtin_setjmp call. Control will
447 return to RECEIVER_LABEL. This is used directly by sjlj exception
448 handling code. */
449
450 void
451 expand_builtin_setjmp_setup (buf_addr, receiver_label)
452 rtx buf_addr;
453 rtx receiver_label;
454 {
455 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
456 rtx stack_save;
457 rtx mem;
458
459 if (setjmp_alias_set == -1)
460 setjmp_alias_set = new_alias_set ();
461
462 #ifdef POINTERS_EXTEND_UNSIGNED
463 if (GET_MODE (buf_addr) != Pmode)
464 buf_addr = convert_memory_address (Pmode, buf_addr);
465 #endif
466
467 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
468
469 emit_queue ();
470
471 /* We store the frame pointer and the address of receiver_label in
472 the buffer and use the rest of it for the stack save area, which
473 is machine-dependent. */
474
475 #ifndef BUILTIN_SETJMP_FRAME_VALUE
476 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
477 #endif
478
479 mem = gen_rtx_MEM (Pmode, buf_addr);
480 set_mem_alias_set (mem, setjmp_alias_set);
481 emit_move_insn (mem, BUILTIN_SETJMP_FRAME_VALUE);
482
483 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
484 set_mem_alias_set (mem, setjmp_alias_set);
485
486 emit_move_insn (validize_mem (mem),
487 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
488
489 stack_save = gen_rtx_MEM (sa_mode,
490 plus_constant (buf_addr,
491 2 * GET_MODE_SIZE (Pmode)));
492 set_mem_alias_set (stack_save, setjmp_alias_set);
493 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
494
495 /* If there is further processing to do, do it. */
496 #ifdef HAVE_builtin_setjmp_setup
497 if (HAVE_builtin_setjmp_setup)
498 emit_insn (gen_builtin_setjmp_setup (buf_addr));
499 #endif
500
501 /* Tell optimize_save_area_alloca that extra work is going to
502 need to go on during alloca. */
503 current_function_calls_setjmp = 1;
504
505 /* Set this so all the registers get saved in our frame; we need to be
506 able to copy the saved values for any registers from frames we unwind. */
507 current_function_has_nonlocal_label = 1;
508 }
509
510 /* Construct the trailing part of a __builtin_setjmp call.
511 This is used directly by sjlj exception handling code. */
512
513 void
514 expand_builtin_setjmp_receiver (receiver_label)
515 rtx receiver_label ATTRIBUTE_UNUSED;
516 {
517 /* Clobber the FP when we get here, so we have to make sure it's
518 marked as used by this function. */
519 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
520
521 /* Mark the static chain as clobbered here so life information
522 doesn't get messed up for it. */
523 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
524
525 /* Now put in the code to restore the frame pointer, and argument
526 pointer, if needed. The code below is from expand_end_bindings
527 in stmt.c; see detailed documentation there. */
528 #ifdef HAVE_nonlocal_goto
529 if (! HAVE_nonlocal_goto)
530 #endif
531 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
532
533 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
534 if (fixed_regs[ARG_POINTER_REGNUM])
535 {
536 #ifdef ELIMINABLE_REGS
537 size_t i;
538 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
539
540 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
541 if (elim_regs[i].from == ARG_POINTER_REGNUM
542 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
543 break;
544
545 if (i == ARRAY_SIZE (elim_regs))
546 #endif
547 {
548 /* Now restore our arg pointer from the address at which it
549 was saved in our stack frame. */
550 emit_move_insn (virtual_incoming_args_rtx,
551 copy_to_reg (get_arg_pointer_save_area (cfun)));
552 }
553 }
554 #endif
555
556 #ifdef HAVE_builtin_setjmp_receiver
557 if (HAVE_builtin_setjmp_receiver)
558 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
559 else
560 #endif
561 #ifdef HAVE_nonlocal_goto_receiver
562 if (HAVE_nonlocal_goto_receiver)
563 emit_insn (gen_nonlocal_goto_receiver ());
564 else
565 #endif
566 { /* Nothing */ }
567
568 /* @@@ This is a kludge. Not all machine descriptions define a blockage
569 insn, but we must not allow the code we just generated to be reordered
570 by scheduling. Specifically, the update of the frame pointer must
571 happen immediately, not later. So emit an ASM_INPUT to act as blockage
572 insn. */
573 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
574 }
575
576 /* __builtin_setjmp is passed a pointer to an array of five words (not
577 all will be used on all machines). It operates similarly to the C
578 library function of the same name, but is more efficient. Much of
579 the code below (and for longjmp) is copied from the handling of
580 non-local gotos.
581
582 NOTE: This is intended for use by GNAT and the exception handling
583 scheme in the compiler and will only work in the method used by
584 them. */
585
586 static rtx
587 expand_builtin_setjmp (arglist, target)
588 tree arglist;
589 rtx target;
590 {
591 rtx buf_addr, next_lab, cont_lab;
592
593 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
594 return NULL_RTX;
595
596 if (target == 0 || GET_CODE (target) != REG
597 || REGNO (target) < FIRST_PSEUDO_REGISTER)
598 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
599
600 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
601
602 next_lab = gen_label_rtx ();
603 cont_lab = gen_label_rtx ();
604
605 expand_builtin_setjmp_setup (buf_addr, next_lab);
606
607 /* Set TARGET to zero and branch to the continue label. */
608 emit_move_insn (target, const0_rtx);
609 emit_jump_insn (gen_jump (cont_lab));
610 emit_barrier ();
611 emit_label (next_lab);
612
613 expand_builtin_setjmp_receiver (next_lab);
614
615 /* Set TARGET to one. */
616 emit_move_insn (target, const1_rtx);
617 emit_label (cont_lab);
618
619 /* Tell flow about the strange goings on. Putting `next_lab' on
620 `nonlocal_goto_handler_labels' to indicates that function
621 calls may traverse the arc back to this label. */
622
623 current_function_has_nonlocal_label = 1;
624 nonlocal_goto_handler_labels
625 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
626
627 return target;
628 }
629
630 /* __builtin_longjmp is passed a pointer to an array of five words (not
631 all will be used on all machines). It operates similarly to the C
632 library function of the same name, but is more efficient. Much of
633 the code below is copied from the handling of non-local gotos.
634
635 NOTE: This is intended for use by GNAT and the exception handling
636 scheme in the compiler and will only work in the method used by
637 them. */
638
639 void
640 expand_builtin_longjmp (buf_addr, value)
641 rtx buf_addr, value;
642 {
643 rtx fp, lab, stack, insn, last;
644 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
645
646 if (setjmp_alias_set == -1)
647 setjmp_alias_set = new_alias_set ();
648
649 #ifdef POINTERS_EXTEND_UNSIGNED
650 if (GET_MODE (buf_addr) != Pmode)
651 buf_addr = convert_memory_address (Pmode, buf_addr);
652 #endif
653
654 buf_addr = force_reg (Pmode, buf_addr);
655
656 /* We used to store value in static_chain_rtx, but that fails if pointers
657 are smaller than integers. We instead require that the user must pass
658 a second argument of 1, because that is what builtin_setjmp will
659 return. This also makes EH slightly more efficient, since we are no
660 longer copying around a value that we don't care about. */
661 if (value != const1_rtx)
662 abort ();
663
664 current_function_calls_longjmp = 1;
665
666 last = get_last_insn ();
667 #ifdef HAVE_builtin_longjmp
668 if (HAVE_builtin_longjmp)
669 emit_insn (gen_builtin_longjmp (buf_addr));
670 else
671 #endif
672 {
673 fp = gen_rtx_MEM (Pmode, buf_addr);
674 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
675 GET_MODE_SIZE (Pmode)));
676
677 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
678 2 * GET_MODE_SIZE (Pmode)));
679 set_mem_alias_set (fp, setjmp_alias_set);
680 set_mem_alias_set (lab, setjmp_alias_set);
681 set_mem_alias_set (stack, setjmp_alias_set);
682
683 /* Pick up FP, label, and SP from the block and jump. This code is
684 from expand_goto in stmt.c; see there for detailed comments. */
685 #if HAVE_nonlocal_goto
686 if (HAVE_nonlocal_goto)
687 /* We have to pass a value to the nonlocal_goto pattern that will
688 get copied into the static_chain pointer, but it does not matter
689 what that value is, because builtin_setjmp does not use it. */
690 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
691 else
692 #endif
693 {
694 lab = copy_to_reg (lab);
695
696 emit_move_insn (hard_frame_pointer_rtx, fp);
697 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
698
699 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
700 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
701 emit_indirect_jump (lab);
702 }
703 }
704
705 /* Search backwards and mark the jump insn as a non-local goto.
706 Note that this precludes the use of __builtin_longjmp to a
707 __builtin_setjmp target in the same function. However, we've
708 already cautioned the user that these functions are for
709 internal exception handling use only. */
710 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
711 {
712 if (insn == last)
713 abort ();
714 if (GET_CODE (insn) == JUMP_INSN)
715 {
716 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
717 REG_NOTES (insn));
718 break;
719 }
720 else if (GET_CODE (insn) == CALL_INSN)
721 break;
722 }
723 }
724
725 /* Expand a call to __builtin_prefetch. For a target that does not support
726 data prefetch, evaluate the memory address argument in case it has side
727 effects. */
728
729 static void
730 expand_builtin_prefetch (arglist)
731 tree arglist;
732 {
733 tree arg0, arg1, arg2;
734 rtx op0, op1, op2;
735
736 if (!validate_arglist (arglist, POINTER_TYPE, 0))
737 return;
738
739 arg0 = TREE_VALUE (arglist);
740 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
741 zero (read) and argument 2 (locality) defaults to 3 (high degree of
742 locality). */
743 if (TREE_CHAIN (arglist))
744 {
745 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
746 if (TREE_CHAIN (TREE_CHAIN (arglist)))
747 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
748 else
749 arg2 = build_int_2 (3, 0);
750 }
751 else
752 {
753 arg1 = integer_zero_node;
754 arg2 = build_int_2 (3, 0);
755 }
756
757 /* Argument 0 is an address. */
758 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
759
760 /* Argument 1 (read/write flag) must be a compile-time constant int. */
761 if (TREE_CODE (arg1) != INTEGER_CST)
762 {
763 error ("second arg to `__builtin_prefetch' must be a constant");
764 arg1 = integer_zero_node;
765 }
766 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
767 /* Argument 1 must be either zero or one. */
768 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
769 {
770 warning ("invalid second arg to __builtin_prefetch; using zero");
771 op1 = const0_rtx;
772 }
773
774 /* Argument 2 (locality) must be a compile-time constant int. */
775 if (TREE_CODE (arg2) != INTEGER_CST)
776 {
777 error ("third arg to `__builtin_prefetch' must be a constant");
778 arg2 = integer_zero_node;
779 }
780 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
781 /* Argument 2 must be 0, 1, 2, or 3. */
782 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
783 {
784 warning ("invalid third arg to __builtin_prefetch; using zero");
785 op2 = const0_rtx;
786 }
787
788 #ifdef HAVE_prefetch
789 if (HAVE_prefetch)
790 {
791 if ((! (*insn_data[(int)CODE_FOR_prefetch].operand[0].predicate)
792 (op0,
793 insn_data[(int)CODE_FOR_prefetch].operand[0].mode)) ||
794 (GET_MODE(op0) != Pmode))
795 {
796 #ifdef POINTERS_EXTEND_UNSIGNED
797 if (GET_MODE(op0) != Pmode)
798 op0 = convert_memory_address (Pmode, op0);
799 #endif
800 op0 = force_reg (Pmode, op0);
801 }
802 emit_insn (gen_prefetch (op0, op1, op2));
803 }
804 else
805 #endif
806 op0 = protect_from_queue (op0, 0);
807 /* Don't do anything with direct references to volatile memory, but
808 generate code to handle other side effects. */
809 if (GET_CODE (op0) != MEM && side_effects_p (op0))
810 emit_insn (op0);
811 }
812
813 /* Get a MEM rtx for expression EXP which is the address of an operand
814 to be used to be used in a string instruction (cmpstrsi, movstrsi, ..). */
815
816 static rtx
817 get_memory_rtx (exp)
818 tree exp;
819 {
820 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_SUM);
821 rtx mem;
822
823 #ifdef POINTERS_EXTEND_UNSIGNED
824 if (GET_MODE (addr) != Pmode)
825 addr = convert_memory_address (Pmode, addr);
826 #endif
827
828 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
829
830 /* Get an expression we can use to find the attributes to assign to MEM.
831 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
832 we can. First remove any nops. */
833 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
834 || TREE_CODE (exp) == NON_LVALUE_EXPR)
835 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
836 exp = TREE_OPERAND (exp, 0);
837
838 if (TREE_CODE (exp) == ADDR_EXPR)
839 {
840 exp = TREE_OPERAND (exp, 0);
841 set_mem_attributes (mem, exp, 0);
842 }
843 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
844 {
845 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
846 /* memcpy, memset and other builtin stringops can alias with anything. */
847 set_mem_alias_set (mem, 0);
848 }
849
850 return mem;
851 }
852 \f
853 /* Built-in functions to perform an untyped call and return. */
854
855 /* For each register that may be used for calling a function, this
856 gives a mode used to copy the register's value. VOIDmode indicates
857 the register is not used for calling a function. If the machine
858 has register windows, this gives only the outbound registers.
859 INCOMING_REGNO gives the corresponding inbound register. */
860 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
861
862 /* For each register that may be used for returning values, this gives
863 a mode used to copy the register's value. VOIDmode indicates the
864 register is not used for returning values. If the machine has
865 register windows, this gives only the outbound registers.
866 INCOMING_REGNO gives the corresponding inbound register. */
867 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
868
869 /* For each register that may be used for calling a function, this
870 gives the offset of that register into the block returned by
871 __builtin_apply_args. 0 indicates that the register is not
872 used for calling a function. */
873 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
874
875 /* Return the offset of register REGNO into the block returned by
876 __builtin_apply_args. This is not declared static, since it is
877 needed in objc-act.c. */
878
879 int
880 apply_args_register_offset (regno)
881 int regno;
882 {
883 apply_args_size ();
884
885 /* Arguments are always put in outgoing registers (in the argument
886 block) if such make sense. */
887 #ifdef OUTGOING_REGNO
888 regno = OUTGOING_REGNO(regno);
889 #endif
890 return apply_args_reg_offset[regno];
891 }
892
893 /* Return the size required for the block returned by __builtin_apply_args,
894 and initialize apply_args_mode. */
895
896 static int
897 apply_args_size ()
898 {
899 static int size = -1;
900 int align;
901 unsigned int regno;
902 enum machine_mode mode;
903
904 /* The values computed by this function never change. */
905 if (size < 0)
906 {
907 /* The first value is the incoming arg-pointer. */
908 size = GET_MODE_SIZE (Pmode);
909
910 /* The second value is the structure value address unless this is
911 passed as an "invisible" first argument. */
912 if (struct_value_rtx)
913 size += GET_MODE_SIZE (Pmode);
914
915 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
916 if (FUNCTION_ARG_REGNO_P (regno))
917 {
918 /* Search for the proper mode for copying this register's
919 value. I'm not sure this is right, but it works so far. */
920 enum machine_mode best_mode = VOIDmode;
921
922 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
923 mode != VOIDmode;
924 mode = GET_MODE_WIDER_MODE (mode))
925 if (HARD_REGNO_MODE_OK (regno, mode)
926 && HARD_REGNO_NREGS (regno, mode) == 1)
927 best_mode = mode;
928
929 if (best_mode == VOIDmode)
930 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
931 mode != VOIDmode;
932 mode = GET_MODE_WIDER_MODE (mode))
933 if (HARD_REGNO_MODE_OK (regno, mode)
934 && have_insn_for (SET, mode))
935 best_mode = mode;
936
937 if (best_mode == VOIDmode)
938 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
939 mode != VOIDmode;
940 mode = GET_MODE_WIDER_MODE (mode))
941 if (HARD_REGNO_MODE_OK (regno, mode)
942 && have_insn_for (SET, mode))
943 best_mode = mode;
944
945 if (best_mode == VOIDmode)
946 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
947 mode != VOIDmode;
948 mode = GET_MODE_WIDER_MODE (mode))
949 if (HARD_REGNO_MODE_OK (regno, mode)
950 && have_insn_for (SET, mode))
951 best_mode = mode;
952
953 mode = best_mode;
954 if (mode == VOIDmode)
955 abort ();
956
957 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
958 if (size % align != 0)
959 size = CEIL (size, align) * align;
960 apply_args_reg_offset[regno] = size;
961 size += GET_MODE_SIZE (mode);
962 apply_args_mode[regno] = mode;
963 }
964 else
965 {
966 apply_args_mode[regno] = VOIDmode;
967 apply_args_reg_offset[regno] = 0;
968 }
969 }
970 return size;
971 }
972
973 /* Return the size required for the block returned by __builtin_apply,
974 and initialize apply_result_mode. */
975
976 static int
977 apply_result_size ()
978 {
979 static int size = -1;
980 int align, regno;
981 enum machine_mode mode;
982
983 /* The values computed by this function never change. */
984 if (size < 0)
985 {
986 size = 0;
987
988 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
989 if (FUNCTION_VALUE_REGNO_P (regno))
990 {
991 /* Search for the proper mode for copying this register's
992 value. I'm not sure this is right, but it works so far. */
993 enum machine_mode best_mode = VOIDmode;
994
995 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
996 mode != TImode;
997 mode = GET_MODE_WIDER_MODE (mode))
998 if (HARD_REGNO_MODE_OK (regno, mode))
999 best_mode = mode;
1000
1001 if (best_mode == VOIDmode)
1002 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1003 mode != VOIDmode;
1004 mode = GET_MODE_WIDER_MODE (mode))
1005 if (HARD_REGNO_MODE_OK (regno, mode)
1006 && have_insn_for (SET, mode))
1007 best_mode = mode;
1008
1009 if (best_mode == VOIDmode)
1010 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
1011 mode != VOIDmode;
1012 mode = GET_MODE_WIDER_MODE (mode))
1013 if (HARD_REGNO_MODE_OK (regno, mode)
1014 && have_insn_for (SET, mode))
1015 best_mode = mode;
1016
1017 if (best_mode == VOIDmode)
1018 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
1019 mode != VOIDmode;
1020 mode = GET_MODE_WIDER_MODE (mode))
1021 if (HARD_REGNO_MODE_OK (regno, mode)
1022 && have_insn_for (SET, mode))
1023 best_mode = mode;
1024
1025 mode = best_mode;
1026 if (mode == VOIDmode)
1027 abort ();
1028
1029 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1030 if (size % align != 0)
1031 size = CEIL (size, align) * align;
1032 size += GET_MODE_SIZE (mode);
1033 apply_result_mode[regno] = mode;
1034 }
1035 else
1036 apply_result_mode[regno] = VOIDmode;
1037
1038 /* Allow targets that use untyped_call and untyped_return to override
1039 the size so that machine-specific information can be stored here. */
1040 #ifdef APPLY_RESULT_SIZE
1041 size = APPLY_RESULT_SIZE;
1042 #endif
1043 }
1044 return size;
1045 }
1046
1047 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1048 /* Create a vector describing the result block RESULT. If SAVEP is true,
1049 the result block is used to save the values; otherwise it is used to
1050 restore the values. */
1051
1052 static rtx
1053 result_vector (savep, result)
1054 int savep;
1055 rtx result;
1056 {
1057 int regno, size, align, nelts;
1058 enum machine_mode mode;
1059 rtx reg, mem;
1060 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1061
1062 size = nelts = 0;
1063 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1064 if ((mode = apply_result_mode[regno]) != VOIDmode)
1065 {
1066 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1067 if (size % align != 0)
1068 size = CEIL (size, align) * align;
1069 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1070 mem = adjust_address (result, mode, size);
1071 savevec[nelts++] = (savep
1072 ? gen_rtx_SET (VOIDmode, mem, reg)
1073 : gen_rtx_SET (VOIDmode, reg, mem));
1074 size += GET_MODE_SIZE (mode);
1075 }
1076 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1077 }
1078 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1079
1080 /* Save the state required to perform an untyped call with the same
1081 arguments as were passed to the current function. */
1082
1083 static rtx
1084 expand_builtin_apply_args_1 ()
1085 {
1086 rtx registers;
1087 int size, align, regno;
1088 enum machine_mode mode;
1089
1090 /* Create a block where the arg-pointer, structure value address,
1091 and argument registers can be saved. */
1092 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1093
1094 /* Walk past the arg-pointer and structure value address. */
1095 size = GET_MODE_SIZE (Pmode);
1096 if (struct_value_rtx)
1097 size += GET_MODE_SIZE (Pmode);
1098
1099 /* Save each register used in calling a function to the block. */
1100 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1101 if ((mode = apply_args_mode[regno]) != VOIDmode)
1102 {
1103 rtx tem;
1104
1105 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1106 if (size % align != 0)
1107 size = CEIL (size, align) * align;
1108
1109 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1110
1111 emit_move_insn (adjust_address (registers, mode, size), tem);
1112 size += GET_MODE_SIZE (mode);
1113 }
1114
1115 /* Save the arg pointer to the block. */
1116 emit_move_insn (adjust_address (registers, Pmode, 0),
1117 copy_to_reg (virtual_incoming_args_rtx));
1118 size = GET_MODE_SIZE (Pmode);
1119
1120 /* Save the structure value address unless this is passed as an
1121 "invisible" first argument. */
1122 if (struct_value_incoming_rtx)
1123 {
1124 emit_move_insn (adjust_address (registers, Pmode, size),
1125 copy_to_reg (struct_value_incoming_rtx));
1126 size += GET_MODE_SIZE (Pmode);
1127 }
1128
1129 /* Return the address of the block. */
1130 return copy_addr_to_reg (XEXP (registers, 0));
1131 }
1132
1133 /* __builtin_apply_args returns block of memory allocated on
1134 the stack into which is stored the arg pointer, structure
1135 value address, static chain, and all the registers that might
1136 possibly be used in performing a function call. The code is
1137 moved to the start of the function so the incoming values are
1138 saved. */
1139
1140 static rtx
1141 expand_builtin_apply_args ()
1142 {
1143 /* Don't do __builtin_apply_args more than once in a function.
1144 Save the result of the first call and reuse it. */
1145 if (apply_args_value != 0)
1146 return apply_args_value;
1147 {
1148 /* When this function is called, it means that registers must be
1149 saved on entry to this function. So we migrate the
1150 call to the first insn of this function. */
1151 rtx temp;
1152 rtx seq;
1153
1154 start_sequence ();
1155 temp = expand_builtin_apply_args_1 ();
1156 seq = get_insns ();
1157 end_sequence ();
1158
1159 apply_args_value = temp;
1160
1161 /* Put the insns after the NOTE that starts the function.
1162 If this is inside a start_sequence, make the outer-level insn
1163 chain current, so the code is placed at the start of the
1164 function. */
1165 push_topmost_sequence ();
1166 emit_insn_before (seq, NEXT_INSN (get_insns ()));
1167 pop_topmost_sequence ();
1168 return temp;
1169 }
1170 }
1171
1172 /* Perform an untyped call and save the state required to perform an
1173 untyped return of whatever value was returned by the given function. */
1174
1175 static rtx
1176 expand_builtin_apply (function, arguments, argsize)
1177 rtx function, arguments, argsize;
1178 {
1179 int size, align, regno;
1180 enum machine_mode mode;
1181 rtx incoming_args, result, reg, dest, src, call_insn;
1182 rtx old_stack_level = 0;
1183 rtx call_fusage = 0;
1184
1185 #ifdef POINTERS_EXTEND_UNSIGNED
1186 if (GET_MODE (arguments) != Pmode)
1187 arguments = convert_memory_address (Pmode, arguments);
1188 #endif
1189
1190 /* Create a block where the return registers can be saved. */
1191 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1192
1193 /* Fetch the arg pointer from the ARGUMENTS block. */
1194 incoming_args = gen_reg_rtx (Pmode);
1195 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1196 #ifndef STACK_GROWS_DOWNWARD
1197 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1198 incoming_args, 0, OPTAB_LIB_WIDEN);
1199 #endif
1200
1201 /* Perform postincrements before actually calling the function. */
1202 emit_queue ();
1203
1204 /* Push a new argument block and copy the arguments. Do not allow
1205 the (potential) memcpy call below to interfere with our stack
1206 manipulations. */
1207 do_pending_stack_adjust ();
1208 NO_DEFER_POP;
1209
1210 /* Save the stack with nonlocal if available */
1211 #ifdef HAVE_save_stack_nonlocal
1212 if (HAVE_save_stack_nonlocal)
1213 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1214 else
1215 #endif
1216 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1217
1218 /* Push a block of memory onto the stack to store the memory arguments.
1219 Save the address in a register, and copy the memory arguments. ??? I
1220 haven't figured out how the calling convention macros effect this,
1221 but it's likely that the source and/or destination addresses in
1222 the block copy will need updating in machine specific ways. */
1223 dest = allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1224 dest = gen_rtx_MEM (BLKmode, dest);
1225 set_mem_align (dest, PARM_BOUNDARY);
1226 src = gen_rtx_MEM (BLKmode, incoming_args);
1227 set_mem_align (src, PARM_BOUNDARY);
1228 emit_block_move (dest, src, argsize);
1229
1230 /* Refer to the argument block. */
1231 apply_args_size ();
1232 arguments = gen_rtx_MEM (BLKmode, arguments);
1233 set_mem_align (arguments, PARM_BOUNDARY);
1234
1235 /* Walk past the arg-pointer and structure value address. */
1236 size = GET_MODE_SIZE (Pmode);
1237 if (struct_value_rtx)
1238 size += GET_MODE_SIZE (Pmode);
1239
1240 /* Restore each of the registers previously saved. Make USE insns
1241 for each of these registers for use in making the call. */
1242 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1243 if ((mode = apply_args_mode[regno]) != VOIDmode)
1244 {
1245 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1246 if (size % align != 0)
1247 size = CEIL (size, align) * align;
1248 reg = gen_rtx_REG (mode, regno);
1249 emit_move_insn (reg, adjust_address (arguments, mode, size));
1250 use_reg (&call_fusage, reg);
1251 size += GET_MODE_SIZE (mode);
1252 }
1253
1254 /* Restore the structure value address unless this is passed as an
1255 "invisible" first argument. */
1256 size = GET_MODE_SIZE (Pmode);
1257 if (struct_value_rtx)
1258 {
1259 rtx value = gen_reg_rtx (Pmode);
1260 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1261 emit_move_insn (struct_value_rtx, value);
1262 if (GET_CODE (struct_value_rtx) == REG)
1263 use_reg (&call_fusage, struct_value_rtx);
1264 size += GET_MODE_SIZE (Pmode);
1265 }
1266
1267 /* All arguments and registers used for the call are set up by now! */
1268 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0, 0);
1269
1270 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1271 and we don't want to load it into a register as an optimization,
1272 because prepare_call_address already did it if it should be done. */
1273 if (GET_CODE (function) != SYMBOL_REF)
1274 function = memory_address (FUNCTION_MODE, function);
1275
1276 /* Generate the actual call instruction and save the return value. */
1277 #ifdef HAVE_untyped_call
1278 if (HAVE_untyped_call)
1279 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1280 result, result_vector (1, result)));
1281 else
1282 #endif
1283 #ifdef HAVE_call_value
1284 if (HAVE_call_value)
1285 {
1286 rtx valreg = 0;
1287
1288 /* Locate the unique return register. It is not possible to
1289 express a call that sets more than one return register using
1290 call_value; use untyped_call for that. In fact, untyped_call
1291 only needs to save the return registers in the given block. */
1292 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1293 if ((mode = apply_result_mode[regno]) != VOIDmode)
1294 {
1295 if (valreg)
1296 abort (); /* HAVE_untyped_call required. */
1297 valreg = gen_rtx_REG (mode, regno);
1298 }
1299
1300 emit_call_insn (GEN_CALL_VALUE (valreg,
1301 gen_rtx_MEM (FUNCTION_MODE, function),
1302 const0_rtx, NULL_RTX, const0_rtx));
1303
1304 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1305 }
1306 else
1307 #endif
1308 abort ();
1309
1310 /* Find the CALL insn we just emitted. */
1311 for (call_insn = get_last_insn ();
1312 call_insn && GET_CODE (call_insn) != CALL_INSN;
1313 call_insn = PREV_INSN (call_insn))
1314 ;
1315
1316 if (! call_insn)
1317 abort ();
1318
1319 /* Put the register usage information on the CALL. If there is already
1320 some usage information, put ours at the end. */
1321 if (CALL_INSN_FUNCTION_USAGE (call_insn))
1322 {
1323 rtx link;
1324
1325 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
1326 link = XEXP (link, 1))
1327 ;
1328
1329 XEXP (link, 1) = call_fusage;
1330 }
1331 else
1332 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
1333
1334 /* Restore the stack. */
1335 #ifdef HAVE_save_stack_nonlocal
1336 if (HAVE_save_stack_nonlocal)
1337 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1338 else
1339 #endif
1340 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1341
1342 OK_DEFER_POP;
1343
1344 /* Return the address of the result block. */
1345 return copy_addr_to_reg (XEXP (result, 0));
1346 }
1347
1348 /* Perform an untyped return. */
1349
1350 static void
1351 expand_builtin_return (result)
1352 rtx result;
1353 {
1354 int size, align, regno;
1355 enum machine_mode mode;
1356 rtx reg;
1357 rtx call_fusage = 0;
1358
1359 #ifdef POINTERS_EXTEND_UNSIGNED
1360 if (GET_MODE (result) != Pmode)
1361 result = convert_memory_address (Pmode, result);
1362 #endif
1363
1364 apply_result_size ();
1365 result = gen_rtx_MEM (BLKmode, result);
1366
1367 #ifdef HAVE_untyped_return
1368 if (HAVE_untyped_return)
1369 {
1370 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1371 emit_barrier ();
1372 return;
1373 }
1374 #endif
1375
1376 /* Restore the return value and note that each value is used. */
1377 size = 0;
1378 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1379 if ((mode = apply_result_mode[regno]) != VOIDmode)
1380 {
1381 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1382 if (size % align != 0)
1383 size = CEIL (size, align) * align;
1384 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1385 emit_move_insn (reg, adjust_address (result, mode, size));
1386
1387 push_to_sequence (call_fusage);
1388 emit_insn (gen_rtx_USE (VOIDmode, reg));
1389 call_fusage = get_insns ();
1390 end_sequence ();
1391 size += GET_MODE_SIZE (mode);
1392 }
1393
1394 /* Put the USE insns before the return. */
1395 emit_insn (call_fusage);
1396
1397 /* Return whatever values was restored by jumping directly to the end
1398 of the function. */
1399 expand_null_return ();
1400 }
1401
1402 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1403
1404 static enum type_class
1405 type_to_class (type)
1406 tree type;
1407 {
1408 switch (TREE_CODE (type))
1409 {
1410 case VOID_TYPE: return void_type_class;
1411 case INTEGER_TYPE: return integer_type_class;
1412 case CHAR_TYPE: return char_type_class;
1413 case ENUMERAL_TYPE: return enumeral_type_class;
1414 case BOOLEAN_TYPE: return boolean_type_class;
1415 case POINTER_TYPE: return pointer_type_class;
1416 case REFERENCE_TYPE: return reference_type_class;
1417 case OFFSET_TYPE: return offset_type_class;
1418 case REAL_TYPE: return real_type_class;
1419 case COMPLEX_TYPE: return complex_type_class;
1420 case FUNCTION_TYPE: return function_type_class;
1421 case METHOD_TYPE: return method_type_class;
1422 case RECORD_TYPE: return record_type_class;
1423 case UNION_TYPE:
1424 case QUAL_UNION_TYPE: return union_type_class;
1425 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1426 ? string_type_class : array_type_class);
1427 case SET_TYPE: return set_type_class;
1428 case FILE_TYPE: return file_type_class;
1429 case LANG_TYPE: return lang_type_class;
1430 default: return no_type_class;
1431 }
1432 }
1433
1434 /* Expand a call to __builtin_classify_type with arguments found in
1435 ARGLIST. */
1436
1437 static rtx
1438 expand_builtin_classify_type (arglist)
1439 tree arglist;
1440 {
1441 if (arglist != 0)
1442 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1443 return GEN_INT (no_type_class);
1444 }
1445
1446 /* Expand expression EXP, which is a call to __builtin_constant_p. */
1447
1448 static rtx
1449 expand_builtin_constant_p (exp)
1450 tree exp;
1451 {
1452 tree arglist = TREE_OPERAND (exp, 1);
1453 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1454 rtx tmp;
1455
1456 if (arglist == 0)
1457 return const0_rtx;
1458 arglist = TREE_VALUE (arglist);
1459
1460 /* We have taken care of the easy cases during constant folding. This
1461 case is not obvious, so emit (constant_p_rtx (ARGLIST)) and let CSE get a
1462 chance to see if it can deduce whether ARGLIST is constant. */
1463
1464 tmp = expand_expr (arglist, NULL_RTX, VOIDmode, 0);
1465 tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
1466 return tmp;
1467 }
1468
1469 /* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
1470 Return 0 if a normal call should be emitted rather than expanding the
1471 function in-line. EXP is the expression that is a call to the builtin
1472 function; if convenient, the result should be placed in TARGET.
1473 SUBTARGET may be used as the target for computing one of EXP's operands. */
1474
1475 static rtx
1476 expand_builtin_mathfn (exp, target, subtarget)
1477 tree exp;
1478 rtx target, subtarget;
1479 {
1480 optab builtin_optab;
1481 rtx op0, insns;
1482 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1483 tree arglist = TREE_OPERAND (exp, 1);
1484 enum machine_mode argmode;
1485
1486 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1487 return 0;
1488
1489 /* Stabilize and compute the argument. */
1490 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
1491 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
1492 {
1493 exp = copy_node (exp);
1494 TREE_OPERAND (exp, 1) = arglist;
1495 /* Wrap the computation of the argument in a SAVE_EXPR. That
1496 way, if we need to expand the argument again (as in the
1497 flag_errno_math case below where we cannot directly set
1498 errno), we will not perform side-effects more than once.
1499 Note that here we're mutating the original EXP as well as the
1500 copy; that's the right thing to do in case the original EXP
1501 is expanded later. */
1502 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
1503 arglist = copy_node (arglist);
1504 }
1505 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
1506
1507 /* Make a suitable register to place result in. */
1508 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
1509
1510 emit_queue ();
1511 start_sequence ();
1512
1513 switch (DECL_FUNCTION_CODE (fndecl))
1514 {
1515 case BUILT_IN_SIN:
1516 case BUILT_IN_SINF:
1517 case BUILT_IN_SINL:
1518 builtin_optab = sin_optab; break;
1519 case BUILT_IN_COS:
1520 case BUILT_IN_COSF:
1521 case BUILT_IN_COSL:
1522 builtin_optab = cos_optab; break;
1523 case BUILT_IN_SQRT:
1524 case BUILT_IN_SQRTF:
1525 case BUILT_IN_SQRTL:
1526 builtin_optab = sqrt_optab; break;
1527 case BUILT_IN_EXP:
1528 case BUILT_IN_EXPF:
1529 case BUILT_IN_EXPL:
1530 builtin_optab = exp_optab; break;
1531 case BUILT_IN_LOG:
1532 case BUILT_IN_LOGF:
1533 case BUILT_IN_LOGL:
1534 builtin_optab = log_optab; break;
1535 default:
1536 abort ();
1537 }
1538
1539 /* Compute into TARGET.
1540 Set TARGET to wherever the result comes back. */
1541 argmode = TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist)));
1542 target = expand_unop (argmode, builtin_optab, op0, target, 0);
1543
1544 /* If we were unable to expand via the builtin, stop the
1545 sequence (without outputting the insns) and return 0, causing
1546 a call to the library function. */
1547 if (target == 0)
1548 {
1549 end_sequence ();
1550 return 0;
1551 }
1552
1553 /* If errno must be maintained, we must set it to EDOM for NaN results. */
1554
1555 if (flag_errno_math && HONOR_NANS (argmode))
1556 {
1557 rtx lab1;
1558
1559 lab1 = gen_label_rtx ();
1560
1561 /* Test the result; if it is NaN, set errno=EDOM because
1562 the argument was not in the domain. */
1563 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1564 0, lab1);
1565
1566 #ifdef TARGET_EDOM
1567 {
1568 #ifdef GEN_ERRNO_RTX
1569 rtx errno_rtx = GEN_ERRNO_RTX;
1570 #else
1571 rtx errno_rtx
1572 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1573 #endif
1574
1575 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1576 }
1577 #else
1578 /* We can't set errno=EDOM directly; let the library call do it.
1579 Pop the arguments right away in case the call gets deleted. */
1580 NO_DEFER_POP;
1581 expand_call (exp, target, 0);
1582 OK_DEFER_POP;
1583 #endif
1584
1585 emit_label (lab1);
1586 }
1587
1588 /* Output the entire sequence. */
1589 insns = get_insns ();
1590 end_sequence ();
1591 emit_insn (insns);
1592
1593 return target;
1594 }
1595
1596 /* Expand expression EXP which is a call to the strlen builtin. Return 0
1597 if we failed the caller should emit a normal call, otherwise
1598 try to get the result in TARGET, if convenient. */
1599
1600 static rtx
1601 expand_builtin_strlen (exp, target)
1602 tree exp;
1603 rtx target;
1604 {
1605 tree arglist = TREE_OPERAND (exp, 1);
1606 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1607
1608 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
1609 return 0;
1610 else
1611 {
1612 rtx pat;
1613 tree src = TREE_VALUE (arglist);
1614
1615 int align
1616 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
1617
1618 rtx result, src_reg, char_rtx, before_strlen;
1619 enum machine_mode insn_mode = value_mode, char_mode;
1620 enum insn_code icode = CODE_FOR_nothing;
1621
1622 /* If SRC is not a pointer type, don't do this operation inline. */
1623 if (align == 0)
1624 return 0;
1625
1626 /* Bail out if we can't compute strlen in the right mode. */
1627 while (insn_mode != VOIDmode)
1628 {
1629 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
1630 if (icode != CODE_FOR_nothing)
1631 break;
1632
1633 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
1634 }
1635 if (insn_mode == VOIDmode)
1636 return 0;
1637
1638 /* Make a place to write the result of the instruction. */
1639 result = target;
1640 if (! (result != 0
1641 && GET_CODE (result) == REG
1642 && GET_MODE (result) == insn_mode
1643 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
1644 result = gen_reg_rtx (insn_mode);
1645
1646 /* Make a place to hold the source address. We will not expand
1647 the actual source until we are sure that the expansion will
1648 not fail -- there are trees that cannot be expanded twice. */
1649 src_reg = gen_reg_rtx (Pmode);
1650
1651 /* Mark the beginning of the strlen sequence so we can emit the
1652 source operand later. */
1653 before_strlen = get_last_insn();
1654
1655 char_rtx = const0_rtx;
1656 char_mode = insn_data[(int) icode].operand[2].mode;
1657 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
1658 char_mode))
1659 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
1660
1661 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
1662 char_rtx, GEN_INT (align));
1663 if (! pat)
1664 return 0;
1665 emit_insn (pat);
1666
1667 /* Now that we are assured of success, expand the source. */
1668 start_sequence ();
1669 pat = memory_address (BLKmode,
1670 expand_expr (src, src_reg, ptr_mode, EXPAND_SUM));
1671 if (pat != src_reg)
1672 emit_move_insn (src_reg, pat);
1673 pat = get_insns ();
1674 end_sequence ();
1675
1676 if (before_strlen)
1677 emit_insn_after (pat, before_strlen);
1678 else
1679 emit_insn_before (pat, get_insns ());
1680
1681 /* Return the value in the proper mode for this function. */
1682 if (GET_MODE (result) == value_mode)
1683 target = result;
1684 else if (target != 0)
1685 convert_move (target, result, 0);
1686 else
1687 target = convert_to_mode (value_mode, result, 0);
1688
1689 return target;
1690 }
1691 }
1692
1693 /* Expand a call to the strstr builtin. Return 0 if we failed the
1694 caller should emit a normal call, otherwise try to get the result
1695 in TARGET, if convenient (and in mode MODE if that's convenient). */
1696
1697 static rtx
1698 expand_builtin_strstr (arglist, target, mode)
1699 tree arglist;
1700 rtx target;
1701 enum machine_mode mode;
1702 {
1703 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1704 return 0;
1705 else
1706 {
1707 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1708 tree fn;
1709 const char *p1, *p2;
1710
1711 p2 = c_getstr (s2);
1712 if (p2 == NULL)
1713 return 0;
1714
1715 p1 = c_getstr (s1);
1716 if (p1 != NULL)
1717 {
1718 const char *r = strstr (p1, p2);
1719
1720 if (r == NULL)
1721 return const0_rtx;
1722
1723 /* Return an offset into the constant string argument. */
1724 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1725 s1, ssize_int (r - p1))),
1726 target, mode, EXPAND_NORMAL);
1727 }
1728
1729 if (p2[0] == '\0')
1730 return expand_expr (s1, target, mode, EXPAND_NORMAL);
1731
1732 if (p2[1] != '\0')
1733 return 0;
1734
1735 fn = built_in_decls[BUILT_IN_STRCHR];
1736 if (!fn)
1737 return 0;
1738
1739 /* New argument list transforming strstr(s1, s2) to
1740 strchr(s1, s2[0]). */
1741 arglist =
1742 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
1743 arglist = tree_cons (NULL_TREE, s1, arglist);
1744 return expand_expr (build_function_call_expr (fn, arglist),
1745 target, mode, EXPAND_NORMAL);
1746 }
1747 }
1748
1749 /* Expand a call to the strchr builtin. Return 0 if we failed the
1750 caller should emit a normal call, otherwise try to get the result
1751 in TARGET, if convenient (and in mode MODE if that's convenient). */
1752
1753 static rtx
1754 expand_builtin_strchr (arglist, target, mode)
1755 tree arglist;
1756 rtx target;
1757 enum machine_mode mode;
1758 {
1759 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
1760 return 0;
1761 else
1762 {
1763 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1764 const char *p1;
1765
1766 if (TREE_CODE (s2) != INTEGER_CST)
1767 return 0;
1768
1769 p1 = c_getstr (s1);
1770 if (p1 != NULL)
1771 {
1772 char c;
1773 const char *r;
1774
1775 if (target_char_cast (s2, &c))
1776 return 0;
1777
1778 r = strchr (p1, c);
1779
1780 if (r == NULL)
1781 return const0_rtx;
1782
1783 /* Return an offset into the constant string argument. */
1784 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1785 s1, ssize_int (r - p1))),
1786 target, mode, EXPAND_NORMAL);
1787 }
1788
1789 /* FIXME: Should use here strchrM optab so that ports can optimize
1790 this. */
1791 return 0;
1792 }
1793 }
1794
1795 /* Expand a call to the strrchr builtin. Return 0 if we failed the
1796 caller should emit a normal call, otherwise try to get the result
1797 in TARGET, if convenient (and in mode MODE if that's convenient). */
1798
1799 static rtx
1800 expand_builtin_strrchr (arglist, target, mode)
1801 tree arglist;
1802 rtx target;
1803 enum machine_mode mode;
1804 {
1805 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
1806 return 0;
1807 else
1808 {
1809 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1810 tree fn;
1811 const char *p1;
1812
1813 if (TREE_CODE (s2) != INTEGER_CST)
1814 return 0;
1815
1816 p1 = c_getstr (s1);
1817 if (p1 != NULL)
1818 {
1819 char c;
1820 const char *r;
1821
1822 if (target_char_cast (s2, &c))
1823 return 0;
1824
1825 r = strrchr (p1, c);
1826
1827 if (r == NULL)
1828 return const0_rtx;
1829
1830 /* Return an offset into the constant string argument. */
1831 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1832 s1, ssize_int (r - p1))),
1833 target, mode, EXPAND_NORMAL);
1834 }
1835
1836 if (! integer_zerop (s2))
1837 return 0;
1838
1839 fn = built_in_decls[BUILT_IN_STRCHR];
1840 if (!fn)
1841 return 0;
1842
1843 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
1844 return expand_expr (build_function_call_expr (fn, arglist),
1845 target, mode, EXPAND_NORMAL);
1846 }
1847 }
1848
1849 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
1850 caller should emit a normal call, otherwise try to get the result
1851 in TARGET, if convenient (and in mode MODE if that's convenient). */
1852
1853 static rtx
1854 expand_builtin_strpbrk (arglist, target, mode)
1855 tree arglist;
1856 rtx target;
1857 enum machine_mode mode;
1858 {
1859 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1860 return 0;
1861 else
1862 {
1863 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1864 tree fn;
1865 const char *p1, *p2;
1866
1867 p2 = c_getstr (s2);
1868 if (p2 == NULL)
1869 return 0;
1870
1871 p1 = c_getstr (s1);
1872 if (p1 != NULL)
1873 {
1874 const char *r = strpbrk (p1, p2);
1875
1876 if (r == NULL)
1877 return const0_rtx;
1878
1879 /* Return an offset into the constant string argument. */
1880 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1881 s1, ssize_int (r - p1))),
1882 target, mode, EXPAND_NORMAL);
1883 }
1884
1885 if (p2[0] == '\0')
1886 {
1887 /* strpbrk(x, "") == NULL.
1888 Evaluate and ignore the arguments in case they had
1889 side-effects. */
1890 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
1891 return const0_rtx;
1892 }
1893
1894 if (p2[1] != '\0')
1895 return 0; /* Really call strpbrk. */
1896
1897 fn = built_in_decls[BUILT_IN_STRCHR];
1898 if (!fn)
1899 return 0;
1900
1901 /* New argument list transforming strpbrk(s1, s2) to
1902 strchr(s1, s2[0]). */
1903 arglist =
1904 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
1905 arglist = tree_cons (NULL_TREE, s1, arglist);
1906 return expand_expr (build_function_call_expr (fn, arglist),
1907 target, mode, EXPAND_NORMAL);
1908 }
1909 }
1910
1911 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
1912 bytes from constant string DATA + OFFSET and return it as target
1913 constant. */
1914
1915 static rtx
1916 builtin_memcpy_read_str (data, offset, mode)
1917 PTR data;
1918 HOST_WIDE_INT offset;
1919 enum machine_mode mode;
1920 {
1921 const char *str = (const char *) data;
1922
1923 if (offset < 0
1924 || ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
1925 > strlen (str) + 1))
1926 abort (); /* Attempt to read past the end of constant string. */
1927
1928 return c_readstr (str + offset, mode);
1929 }
1930
1931 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
1932 Return 0 if we failed, the caller should emit a normal call, otherwise
1933 try to get the result in TARGET, if convenient (and in mode MODE if
1934 that's convenient). */
1935
1936 static rtx
1937 expand_builtin_memcpy (arglist, target, mode)
1938 tree arglist;
1939 rtx target;
1940 enum machine_mode mode;
1941 {
1942 if (!validate_arglist (arglist,
1943 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
1944 return 0;
1945 else
1946 {
1947 tree dest = TREE_VALUE (arglist);
1948 tree src = TREE_VALUE (TREE_CHAIN (arglist));
1949 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1950 const char *src_str;
1951
1952 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
1953 unsigned int dest_align
1954 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
1955 rtx dest_mem, src_mem, dest_addr, len_rtx;
1956
1957 /* If DEST is not a pointer type, call the normal function. */
1958 if (dest_align == 0)
1959 return 0;
1960
1961 /* If the LEN parameter is zero, return DEST. */
1962 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
1963 {
1964 /* Evaluate and ignore SRC in case it has side-effects. */
1965 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
1966 return expand_expr (dest, target, mode, EXPAND_NORMAL);
1967 }
1968
1969 /* If either SRC is not a pointer type, don't do this
1970 operation in-line. */
1971 if (src_align == 0)
1972 return 0;
1973
1974 dest_mem = get_memory_rtx (dest);
1975 set_mem_align (dest_mem, dest_align);
1976 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
1977 src_str = c_getstr (src);
1978
1979 /* If SRC is a string constant and block move would be done
1980 by pieces, we can avoid loading the string from memory
1981 and only stored the computed constants. */
1982 if (src_str
1983 && GET_CODE (len_rtx) == CONST_INT
1984 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
1985 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
1986 (PTR) src_str, dest_align))
1987 {
1988 store_by_pieces (dest_mem, INTVAL (len_rtx),
1989 builtin_memcpy_read_str,
1990 (PTR) src_str, dest_align);
1991 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
1992 #ifdef POINTERS_EXTEND_UNSIGNED
1993 if (GET_MODE (dest_mem) != ptr_mode)
1994 dest_mem = convert_memory_address (ptr_mode, dest_mem);
1995 #endif
1996 return dest_mem;
1997 }
1998
1999 src_mem = get_memory_rtx (src);
2000 set_mem_align (src_mem, src_align);
2001
2002 /* Copy word part most expediently. */
2003 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx);
2004
2005 if (dest_addr == 0)
2006 {
2007 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2008 #ifdef POINTERS_EXTEND_UNSIGNED
2009 if (GET_MODE (dest_addr) != ptr_mode)
2010 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2011 #endif
2012 }
2013
2014 return dest_addr;
2015 }
2016 }
2017
2018 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
2019 if we failed the caller should emit a normal call, otherwise try to get
2020 the result in TARGET, if convenient (and in mode MODE if that's
2021 convenient). */
2022
2023 static rtx
2024 expand_builtin_strcpy (exp, target, mode)
2025 tree exp;
2026 rtx target;
2027 enum machine_mode mode;
2028 {
2029 tree arglist = TREE_OPERAND (exp, 1);
2030 tree fn, len;
2031
2032 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2033 return 0;
2034
2035 fn = built_in_decls[BUILT_IN_MEMCPY];
2036 if (!fn)
2037 return 0;
2038
2039 len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2040 if (len == 0)
2041 return 0;
2042
2043 len = size_binop (PLUS_EXPR, len, ssize_int (1));
2044 chainon (arglist, build_tree_list (NULL_TREE, len));
2045 return expand_expr (build_function_call_expr (fn, arglist),
2046 target, mode, EXPAND_NORMAL);
2047 }
2048
2049 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2050 bytes from constant string DATA + OFFSET and return it as target
2051 constant. */
2052
2053 static rtx
2054 builtin_strncpy_read_str (data, offset, mode)
2055 PTR data;
2056 HOST_WIDE_INT offset;
2057 enum machine_mode mode;
2058 {
2059 const char *str = (const char *) data;
2060
2061 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
2062 return const0_rtx;
2063
2064 return c_readstr (str + offset, mode);
2065 }
2066
2067 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
2068 if we failed the caller should emit a normal call. */
2069
2070 static rtx
2071 expand_builtin_strncpy (arglist, target, mode)
2072 tree arglist;
2073 rtx target;
2074 enum machine_mode mode;
2075 {
2076 if (!validate_arglist (arglist,
2077 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2078 return 0;
2079 else
2080 {
2081 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2082 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2083 tree fn;
2084
2085 /* We must be passed a constant len parameter. */
2086 if (TREE_CODE (len) != INTEGER_CST)
2087 return 0;
2088
2089 /* If the len parameter is zero, return the dst parameter. */
2090 if (integer_zerop (len))
2091 {
2092 /* Evaluate and ignore the src argument in case it has
2093 side-effects. */
2094 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
2095 VOIDmode, EXPAND_NORMAL);
2096 /* Return the dst parameter. */
2097 return expand_expr (TREE_VALUE (arglist), target, mode,
2098 EXPAND_NORMAL);
2099 }
2100
2101 /* Now, we must be passed a constant src ptr parameter. */
2102 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
2103 return 0;
2104
2105 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
2106
2107 /* We're required to pad with trailing zeros if the requested
2108 len is greater than strlen(s2)+1. In that case try to
2109 use store_by_pieces, if it fails, punt. */
2110 if (tree_int_cst_lt (slen, len))
2111 {
2112 tree dest = TREE_VALUE (arglist);
2113 unsigned int dest_align
2114 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2115 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
2116 rtx dest_mem;
2117
2118 if (!p || dest_align == 0 || !host_integerp (len, 1)
2119 || !can_store_by_pieces (tree_low_cst (len, 1),
2120 builtin_strncpy_read_str,
2121 (PTR) p, dest_align))
2122 return 0;
2123
2124 dest_mem = get_memory_rtx (dest);
2125 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2126 builtin_strncpy_read_str,
2127 (PTR) p, dest_align);
2128 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2129 #ifdef POINTERS_EXTEND_UNSIGNED
2130 if (GET_MODE (dest_mem) != ptr_mode)
2131 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2132 #endif
2133 return dest_mem;
2134 }
2135
2136 /* OK transform into builtin memcpy. */
2137 fn = built_in_decls[BUILT_IN_MEMCPY];
2138 if (!fn)
2139 return 0;
2140 return expand_expr (build_function_call_expr (fn, arglist),
2141 target, mode, EXPAND_NORMAL);
2142 }
2143 }
2144
2145 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2146 bytes from constant string DATA + OFFSET and return it as target
2147 constant. */
2148
2149 static rtx
2150 builtin_memset_read_str (data, offset, mode)
2151 PTR data;
2152 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2153 enum machine_mode mode;
2154 {
2155 const char *c = (const char *) data;
2156 char *p = alloca (GET_MODE_SIZE (mode));
2157
2158 memset (p, *c, GET_MODE_SIZE (mode));
2159
2160 return c_readstr (p, mode);
2161 }
2162
2163 /* Callback routine for store_by_pieces. Return the RTL of a register
2164 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
2165 char value given in the RTL register data. For example, if mode is
2166 4 bytes wide, return the RTL for 0x01010101*data. */
2167
2168 static rtx
2169 builtin_memset_gen_str (data, offset, mode)
2170 PTR data;
2171 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2172 enum machine_mode mode;
2173 {
2174 rtx target, coeff;
2175 size_t size;
2176 char *p;
2177
2178 size = GET_MODE_SIZE (mode);
2179 if (size==1)
2180 return (rtx)data;
2181
2182 p = alloca (size);
2183 memset (p, 1, size);
2184 coeff = c_readstr (p, mode);
2185
2186 target = convert_to_mode (mode, (rtx)data, 1);
2187 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
2188 return force_reg (mode, target);
2189 }
2190
2191 /* Expand expression EXP, which is a call to the memset builtin. Return 0
2192 if we failed the caller should emit a normal call, otherwise try to get
2193 the result in TARGET, if convenient (and in mode MODE if that's
2194 convenient). */
2195
2196 static rtx
2197 expand_builtin_memset (exp, target, mode)
2198 tree exp;
2199 rtx target;
2200 enum machine_mode mode;
2201 {
2202 tree arglist = TREE_OPERAND (exp, 1);
2203
2204 if (!validate_arglist (arglist,
2205 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
2206 return 0;
2207 else
2208 {
2209 tree dest = TREE_VALUE (arglist);
2210 tree val = TREE_VALUE (TREE_CHAIN (arglist));
2211 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2212 char c;
2213
2214 unsigned int dest_align
2215 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2216 rtx dest_mem, dest_addr, len_rtx;
2217
2218 /* If DEST is not a pointer type, don't do this
2219 operation in-line. */
2220 if (dest_align == 0)
2221 return 0;
2222
2223 /* If the LEN parameter is zero, return DEST. */
2224 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2225 {
2226 /* Evaluate and ignore VAL in case it has side-effects. */
2227 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
2228 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2229 }
2230
2231 if (TREE_CODE (val) != INTEGER_CST)
2232 {
2233 rtx val_rtx;
2234
2235 if (!host_integerp (len, 1))
2236 return 0;
2237
2238 if (optimize_size && tree_low_cst (len, 1) > 1)
2239 return 0;
2240
2241 /* Assume that we can memset by pieces if we can store the
2242 * the coefficients by pieces (in the required modes).
2243 * We can't pass builtin_memset_gen_str as that emits RTL. */
2244 c = 1;
2245 if (!can_store_by_pieces (tree_low_cst (len, 1),
2246 builtin_memset_read_str,
2247 (PTR) &c, dest_align))
2248 return 0;
2249
2250 val = fold (build1 (CONVERT_EXPR, unsigned_char_type_node, val));
2251 val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
2252 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
2253 val_rtx);
2254 dest_mem = get_memory_rtx (dest);
2255 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2256 builtin_memset_gen_str,
2257 (PTR)val_rtx, dest_align);
2258 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2259 #ifdef POINTERS_EXTEND_UNSIGNED
2260 if (GET_MODE (dest_mem) != ptr_mode)
2261 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2262 #endif
2263 return dest_mem;
2264 }
2265
2266 if (target_char_cast (val, &c))
2267 return 0;
2268
2269 if (c)
2270 {
2271 if (!host_integerp (len, 1))
2272 return 0;
2273 if (!can_store_by_pieces (tree_low_cst (len, 1),
2274 builtin_memset_read_str, (PTR) &c,
2275 dest_align))
2276 return 0;
2277
2278 dest_mem = get_memory_rtx (dest);
2279 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2280 builtin_memset_read_str,
2281 (PTR) &c, dest_align);
2282 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2283 #ifdef POINTERS_EXTEND_UNSIGNED
2284 if (GET_MODE (dest_mem) != ptr_mode)
2285 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2286 #endif
2287 return dest_mem;
2288 }
2289
2290 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2291
2292 dest_mem = get_memory_rtx (dest);
2293 set_mem_align (dest_mem, dest_align);
2294 dest_addr = clear_storage (dest_mem, len_rtx);
2295
2296 if (dest_addr == 0)
2297 {
2298 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2299 #ifdef POINTERS_EXTEND_UNSIGNED
2300 if (GET_MODE (dest_addr) != ptr_mode)
2301 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2302 #endif
2303 }
2304
2305 return dest_addr;
2306 }
2307 }
2308
2309 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
2310 if we failed the caller should emit a normal call. */
2311
2312 static rtx
2313 expand_builtin_bzero (exp)
2314 tree exp;
2315 {
2316 tree arglist = TREE_OPERAND (exp, 1);
2317 tree dest, size, newarglist;
2318 rtx result;
2319
2320 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2321 return NULL_RTX;
2322
2323 dest = TREE_VALUE (arglist);
2324 size = TREE_VALUE (TREE_CHAIN (arglist));
2325
2326 /* New argument list transforming bzero(ptr x, int y) to
2327 memset(ptr x, int 0, size_t y). This is done this way
2328 so that if it isn't expanded inline, we fallback to
2329 calling bzero instead of memset. */
2330
2331 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2332 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
2333 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2334
2335 TREE_OPERAND (exp, 1) = newarglist;
2336 result = expand_builtin_memset (exp, const0_rtx, VOIDmode);
2337
2338 /* Always restore the original arguments. */
2339 TREE_OPERAND (exp, 1) = arglist;
2340
2341 return result;
2342 }
2343
2344 /* Expand expression EXP, which is a call to the memcmp or the strcmp builtin.
2345 ARGLIST is the argument list for this call. Return 0 if we failed and the
2346 caller should emit a normal call, otherwise try to get the result in
2347 TARGET, if convenient (and in mode MODE, if that's convenient). */
2348
2349 static rtx
2350 expand_builtin_memcmp (exp, arglist, target, mode)
2351 tree exp ATTRIBUTE_UNUSED;
2352 tree arglist;
2353 rtx target;
2354 enum machine_mode mode;
2355 {
2356 tree arg1, arg2, len;
2357 const char *p1, *p2;
2358
2359 if (!validate_arglist (arglist,
2360 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2361 return 0;
2362
2363 arg1 = TREE_VALUE (arglist);
2364 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2365 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2366
2367 /* If the len parameter is zero, return zero. */
2368 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2369 {
2370 /* Evaluate and ignore arg1 and arg2 in case they have
2371 side-effects. */
2372 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2373 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2374 return const0_rtx;
2375 }
2376
2377 p1 = c_getstr (arg1);
2378 p2 = c_getstr (arg2);
2379
2380 /* If all arguments are constant, and the value of len is not greater
2381 than the lengths of arg1 and arg2, evaluate at compile-time. */
2382 if (host_integerp (len, 1) && p1 && p2
2383 && compare_tree_int (len, strlen (p1) + 1) <= 0
2384 && compare_tree_int (len, strlen (p2) + 1) <= 0)
2385 {
2386 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
2387
2388 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
2389 }
2390
2391 /* If len parameter is one, return an expression corresponding to
2392 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2393 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
2394 {
2395 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2396 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2397 tree ind1 =
2398 fold (build1 (CONVERT_EXPR, integer_type_node,
2399 build1 (INDIRECT_REF, cst_uchar_node,
2400 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2401 tree ind2 =
2402 fold (build1 (CONVERT_EXPR, integer_type_node,
2403 build1 (INDIRECT_REF, cst_uchar_node,
2404 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2405 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2406 return expand_expr (result, target, mode, EXPAND_NORMAL);
2407 }
2408
2409 #ifdef HAVE_cmpstrsi
2410 {
2411 rtx arg1_rtx, arg2_rtx, arg3_rtx;
2412 rtx result;
2413 rtx insn;
2414
2415 int arg1_align
2416 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2417 int arg2_align
2418 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2419 enum machine_mode insn_mode
2420 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
2421
2422 /* If we don't have POINTER_TYPE, call the function. */
2423 if (arg1_align == 0 || arg2_align == 0)
2424 return 0;
2425
2426 /* Make a place to write the result of the instruction. */
2427 result = target;
2428 if (! (result != 0
2429 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
2430 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2431 result = gen_reg_rtx (insn_mode);
2432
2433 arg1_rtx = get_memory_rtx (arg1);
2434 arg2_rtx = get_memory_rtx (arg2);
2435 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2436 if (!HAVE_cmpstrsi)
2437 insn = NULL_RTX;
2438 else
2439 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
2440 GEN_INT (MIN (arg1_align, arg2_align)));
2441
2442 if (insn)
2443 emit_insn (insn);
2444 else
2445 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
2446 TYPE_MODE (integer_type_node), 3,
2447 XEXP (arg1_rtx, 0), Pmode,
2448 XEXP (arg2_rtx, 0), Pmode,
2449 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
2450 TREE_UNSIGNED (sizetype)),
2451 TYPE_MODE (sizetype));
2452
2453 /* Return the value in the proper mode for this function. */
2454 mode = TYPE_MODE (TREE_TYPE (exp));
2455 if (GET_MODE (result) == mode)
2456 return result;
2457 else if (target != 0)
2458 {
2459 convert_move (target, result, 0);
2460 return target;
2461 }
2462 else
2463 return convert_to_mode (mode, result, 0);
2464 }
2465 #endif
2466
2467 return 0;
2468 }
2469
2470 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
2471 if we failed the caller should emit a normal call, otherwise try to get
2472 the result in TARGET, if convenient. */
2473
2474 static rtx
2475 expand_builtin_strcmp (exp, target, mode)
2476 tree exp;
2477 rtx target;
2478 enum machine_mode mode;
2479 {
2480 tree arglist = TREE_OPERAND (exp, 1);
2481 tree arg1, arg2, len, len2, fn;
2482 const char *p1, *p2;
2483
2484 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2485 return 0;
2486
2487 arg1 = TREE_VALUE (arglist);
2488 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2489
2490 p1 = c_getstr (arg1);
2491 p2 = c_getstr (arg2);
2492
2493 if (p1 && p2)
2494 {
2495 const int i = strcmp (p1, p2);
2496 return (i < 0 ? constm1_rtx : (i > 0 ? const1_rtx : const0_rtx));
2497 }
2498
2499 /* If either arg is "", return an expression corresponding to
2500 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2501 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
2502 {
2503 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2504 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2505 tree ind1 =
2506 fold (build1 (CONVERT_EXPR, integer_type_node,
2507 build1 (INDIRECT_REF, cst_uchar_node,
2508 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2509 tree ind2 =
2510 fold (build1 (CONVERT_EXPR, integer_type_node,
2511 build1 (INDIRECT_REF, cst_uchar_node,
2512 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2513 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2514 return expand_expr (result, target, mode, EXPAND_NORMAL);
2515 }
2516
2517 len = c_strlen (arg1);
2518 len2 = c_strlen (arg2);
2519
2520 if (len)
2521 len = size_binop (PLUS_EXPR, ssize_int (1), len);
2522
2523 if (len2)
2524 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
2525
2526 /* If we don't have a constant length for the first, use the length
2527 of the second, if we know it. We don't require a constant for
2528 this case; some cost analysis could be done if both are available
2529 but neither is constant. For now, assume they're equally cheap
2530 unless one has side effects.
2531
2532 If both strings have constant lengths, use the smaller. This
2533 could arise if optimization results in strcpy being called with
2534 two fixed strings, or if the code was machine-generated. We should
2535 add some code to the `memcmp' handler below to deal with such
2536 situations, someday. */
2537
2538 if (!len || TREE_CODE (len) != INTEGER_CST)
2539 {
2540 if (len2 && !TREE_SIDE_EFFECTS (len2))
2541 len = len2;
2542 else if (len == 0)
2543 return 0;
2544 }
2545 else if (len2 && TREE_CODE (len2) == INTEGER_CST
2546 && tree_int_cst_lt (len2, len))
2547 len = len2;
2548
2549 /* If both arguments have side effects, we cannot optimize. */
2550 if (TREE_SIDE_EFFECTS (len))
2551 return 0;
2552
2553 fn = built_in_decls[BUILT_IN_MEMCMP];
2554 if (!fn)
2555 return 0;
2556
2557 chainon (arglist, build_tree_list (NULL_TREE, len));
2558 return expand_expr (build_function_call_expr (fn, arglist),
2559 target, mode, EXPAND_NORMAL);
2560 }
2561
2562 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
2563 if we failed the caller should emit a normal call, otherwise try to get
2564 the result in TARGET, if convenient. */
2565
2566 static rtx
2567 expand_builtin_strncmp (exp, target, mode)
2568 tree exp;
2569 rtx target;
2570 enum machine_mode mode;
2571 {
2572 tree arglist = TREE_OPERAND (exp, 1);
2573 tree fn, newarglist, len = 0;
2574 tree arg1, arg2, arg3;
2575 const char *p1, *p2;
2576
2577 if (!validate_arglist (arglist,
2578 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2579 return 0;
2580
2581 arg1 = TREE_VALUE (arglist);
2582 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2583 arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2584
2585 /* If the len parameter is zero, return zero. */
2586 if (host_integerp (arg3, 1) && tree_low_cst (arg3, 1) == 0)
2587 {
2588 /* Evaluate and ignore arg1 and arg2 in case they have
2589 side-effects. */
2590 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2591 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2592 return const0_rtx;
2593 }
2594
2595 p1 = c_getstr (arg1);
2596 p2 = c_getstr (arg2);
2597
2598 /* If all arguments are constant, evaluate at compile-time. */
2599 if (host_integerp (arg3, 1) && p1 && p2)
2600 {
2601 const int r = strncmp (p1, p2, tree_low_cst (arg3, 1));
2602 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
2603 }
2604
2605 /* If len == 1 or (either string parameter is "" and (len >= 1)),
2606 return (*(const u_char*)arg1 - *(const u_char*)arg2). */
2607 if (host_integerp (arg3, 1)
2608 && (tree_low_cst (arg3, 1) == 1
2609 || (tree_low_cst (arg3, 1) > 1
2610 && ((p1 && *p1 == '\0') || (p2 && *p2 == '\0')))))
2611 {
2612 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2613 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2614 tree ind1 =
2615 fold (build1 (CONVERT_EXPR, integer_type_node,
2616 build1 (INDIRECT_REF, cst_uchar_node,
2617 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2618 tree ind2 =
2619 fold (build1 (CONVERT_EXPR, integer_type_node,
2620 build1 (INDIRECT_REF, cst_uchar_node,
2621 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2622 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2623 return expand_expr (result, target, mode, EXPAND_NORMAL);
2624 }
2625
2626 /* If c_strlen can determine an expression for one of the string
2627 lengths, and it doesn't have side effects, then call
2628 expand_builtin_memcmp() using length MIN(strlen(string)+1, arg3). */
2629
2630 /* Perhaps one of the strings is really constant, if so prefer
2631 that constant length over the other string's length. */
2632 if (p1)
2633 len = c_strlen (arg1);
2634 else if (p2)
2635 len = c_strlen (arg2);
2636
2637 /* If we still don't have a len, try either string arg as long
2638 as they don't have side effects. */
2639 if (!len && !TREE_SIDE_EFFECTS (arg1))
2640 len = c_strlen (arg1);
2641 if (!len && !TREE_SIDE_EFFECTS (arg2))
2642 len = c_strlen (arg2);
2643 /* If we still don't have a length, punt. */
2644 if (!len)
2645 return 0;
2646
2647 fn = built_in_decls[BUILT_IN_MEMCMP];
2648 if (!fn)
2649 return 0;
2650
2651 /* Add one to the string length. */
2652 len = fold (size_binop (PLUS_EXPR, len, ssize_int (1)));
2653
2654 /* The actual new length parameter is MIN(len,arg3). */
2655 len = fold (build (MIN_EXPR, TREE_TYPE (len), len, arg3));
2656
2657 newarglist = build_tree_list (NULL_TREE, len);
2658 newarglist = tree_cons (NULL_TREE, arg2, newarglist);
2659 newarglist = tree_cons (NULL_TREE, arg1, newarglist);
2660 return expand_expr (build_function_call_expr (fn, newarglist),
2661 target, mode, EXPAND_NORMAL);
2662 }
2663
2664 /* Expand expression EXP, which is a call to the strcat builtin.
2665 Return 0 if we failed the caller should emit a normal call,
2666 otherwise try to get the result in TARGET, if convenient. */
2667
2668 static rtx
2669 expand_builtin_strcat (arglist, target, mode)
2670 tree arglist;
2671 rtx target;
2672 enum machine_mode mode;
2673 {
2674 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2675 return 0;
2676 else
2677 {
2678 tree dst = TREE_VALUE (arglist),
2679 src = TREE_VALUE (TREE_CHAIN (arglist));
2680 const char *p = c_getstr (src);
2681
2682 /* If the string length is zero, return the dst parameter. */
2683 if (p && *p == '\0')
2684 return expand_expr (dst, target, mode, EXPAND_NORMAL);
2685
2686 return 0;
2687 }
2688 }
2689
2690 /* Expand expression EXP, which is a call to the strncat builtin.
2691 Return 0 if we failed the caller should emit a normal call,
2692 otherwise try to get the result in TARGET, if convenient. */
2693
2694 static rtx
2695 expand_builtin_strncat (arglist, target, mode)
2696 tree arglist;
2697 rtx target;
2698 enum machine_mode mode;
2699 {
2700 if (!validate_arglist (arglist,
2701 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2702 return 0;
2703 else
2704 {
2705 tree dst = TREE_VALUE (arglist),
2706 src = TREE_VALUE (TREE_CHAIN (arglist)),
2707 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2708 const char *p = c_getstr (src);
2709
2710 /* If the requested length is zero, or the src parameter string
2711 length is zero, return the dst parameter. */
2712 if (integer_zerop (len) || (p && *p == '\0'))
2713 {
2714 /* Evaluate and ignore the src and len parameters in case
2715 they have side-effects. */
2716 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2717 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
2718 return expand_expr (dst, target, mode, EXPAND_NORMAL);
2719 }
2720
2721 /* If the requested len is greater than or equal to the string
2722 length, call strcat. */
2723 if (TREE_CODE (len) == INTEGER_CST && p
2724 && compare_tree_int (len, strlen (p)) >= 0)
2725 {
2726 tree newarglist
2727 = tree_cons (NULL_TREE, dst, build_tree_list (NULL_TREE, src));
2728 tree fn = built_in_decls[BUILT_IN_STRCAT];
2729
2730 /* If the replacement _DECL isn't initialized, don't do the
2731 transformation. */
2732 if (!fn)
2733 return 0;
2734
2735 return expand_expr (build_function_call_expr (fn, newarglist),
2736 target, mode, EXPAND_NORMAL);
2737 }
2738 return 0;
2739 }
2740 }
2741
2742 /* Expand expression EXP, which is a call to the strspn builtin.
2743 Return 0 if we failed the caller should emit a normal call,
2744 otherwise try to get the result in TARGET, if convenient. */
2745
2746 static rtx
2747 expand_builtin_strspn (arglist, target, mode)
2748 tree arglist;
2749 rtx target;
2750 enum machine_mode mode;
2751 {
2752 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2753 return 0;
2754 else
2755 {
2756 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2757 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
2758
2759 /* If both arguments are constants, evaluate at compile-time. */
2760 if (p1 && p2)
2761 {
2762 const size_t r = strspn (p1, p2);
2763 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
2764 }
2765
2766 /* If either argument is "", return 0. */
2767 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
2768 {
2769 /* Evaluate and ignore both arguments in case either one has
2770 side-effects. */
2771 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2772 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2773 return const0_rtx;
2774 }
2775 return 0;
2776 }
2777 }
2778
2779 /* Expand expression EXP, which is a call to the strcspn builtin.
2780 Return 0 if we failed the caller should emit a normal call,
2781 otherwise try to get the result in TARGET, if convenient. */
2782
2783 static rtx
2784 expand_builtin_strcspn (arglist, target, mode)
2785 tree arglist;
2786 rtx target;
2787 enum machine_mode mode;
2788 {
2789 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2790 return 0;
2791 else
2792 {
2793 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2794 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
2795
2796 /* If both arguments are constants, evaluate at compile-time. */
2797 if (p1 && p2)
2798 {
2799 const size_t r = strcspn (p1, p2);
2800 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
2801 }
2802
2803 /* If the first argument is "", return 0. */
2804 if (p1 && *p1 == '\0')
2805 {
2806 /* Evaluate and ignore argument s2 in case it has
2807 side-effects. */
2808 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2809 return const0_rtx;
2810 }
2811
2812 /* If the second argument is "", return __builtin_strlen(s1). */
2813 if (p2 && *p2 == '\0')
2814 {
2815 tree newarglist = build_tree_list (NULL_TREE, s1),
2816 fn = built_in_decls[BUILT_IN_STRLEN];
2817
2818 /* If the replacement _DECL isn't initialized, don't do the
2819 transformation. */
2820 if (!fn)
2821 return 0;
2822
2823 return expand_expr (build_function_call_expr (fn, newarglist),
2824 target, mode, EXPAND_NORMAL);
2825 }
2826 return 0;
2827 }
2828 }
2829
2830 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
2831 if that's convenient. */
2832
2833 rtx
2834 expand_builtin_saveregs ()
2835 {
2836 rtx val, seq;
2837
2838 /* Don't do __builtin_saveregs more than once in a function.
2839 Save the result of the first call and reuse it. */
2840 if (saveregs_value != 0)
2841 return saveregs_value;
2842
2843 /* When this function is called, it means that registers must be
2844 saved on entry to this function. So we migrate the call to the
2845 first insn of this function. */
2846
2847 start_sequence ();
2848
2849 #ifdef EXPAND_BUILTIN_SAVEREGS
2850 /* Do whatever the machine needs done in this case. */
2851 val = EXPAND_BUILTIN_SAVEREGS ();
2852 #else
2853 /* ??? We used to try and build up a call to the out of line function,
2854 guessing about what registers needed saving etc. This became much
2855 harder with __builtin_va_start, since we don't have a tree for a
2856 call to __builtin_saveregs to fall back on. There was exactly one
2857 port (i860) that used this code, and I'm unconvinced it could actually
2858 handle the general case. So we no longer try to handle anything
2859 weird and make the backend absorb the evil. */
2860
2861 error ("__builtin_saveregs not supported by this target");
2862 val = const0_rtx;
2863 #endif
2864
2865 seq = get_insns ();
2866 end_sequence ();
2867
2868 saveregs_value = val;
2869
2870 /* Put the insns after the NOTE that starts the function. If this
2871 is inside a start_sequence, make the outer-level insn chain current, so
2872 the code is placed at the start of the function. */
2873 push_topmost_sequence ();
2874 emit_insn_after (seq, get_insns ());
2875 pop_topmost_sequence ();
2876
2877 return val;
2878 }
2879
2880 /* __builtin_args_info (N) returns word N of the arg space info
2881 for the current function. The number and meanings of words
2882 is controlled by the definition of CUMULATIVE_ARGS. */
2883
2884 static rtx
2885 expand_builtin_args_info (exp)
2886 tree exp;
2887 {
2888 tree arglist = TREE_OPERAND (exp, 1);
2889 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
2890 int *word_ptr = (int *) &current_function_args_info;
2891 #if 0
2892 /* These are used by the code below that is if 0'ed away */
2893 int i;
2894 tree type, elts, result;
2895 #endif
2896
2897 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
2898 abort ();
2899
2900 if (arglist != 0)
2901 {
2902 if (!host_integerp (TREE_VALUE (arglist), 0))
2903 error ("argument of `__builtin_args_info' must be constant");
2904 else
2905 {
2906 HOST_WIDE_INT wordnum = tree_low_cst (TREE_VALUE (arglist), 0);
2907
2908 if (wordnum < 0 || wordnum >= nwords)
2909 error ("argument of `__builtin_args_info' out of range");
2910 else
2911 return GEN_INT (word_ptr[wordnum]);
2912 }
2913 }
2914 else
2915 error ("missing argument in `__builtin_args_info'");
2916
2917 return const0_rtx;
2918
2919 #if 0
2920 for (i = 0; i < nwords; i++)
2921 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
2922
2923 type = build_array_type (integer_type_node,
2924 build_index_type (build_int_2 (nwords, 0)));
2925 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
2926 TREE_CONSTANT (result) = 1;
2927 TREE_STATIC (result) = 1;
2928 result = build1 (INDIRECT_REF, build_pointer_type (type), result);
2929 TREE_CONSTANT (result) = 1;
2930 return expand_expr (result, NULL_RTX, VOIDmode, 0);
2931 #endif
2932 }
2933
2934 /* Expand ARGLIST, from a call to __builtin_next_arg. */
2935
2936 static rtx
2937 expand_builtin_next_arg (arglist)
2938 tree arglist;
2939 {
2940 tree fntype = TREE_TYPE (current_function_decl);
2941
2942 if (TYPE_ARG_TYPES (fntype) == 0
2943 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2944 == void_type_node))
2945 {
2946 error ("`va_start' used in function with fixed args");
2947 return const0_rtx;
2948 }
2949
2950 if (arglist)
2951 {
2952 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
2953 tree arg = TREE_VALUE (arglist);
2954
2955 /* Strip off all nops for the sake of the comparison. This
2956 is not quite the same as STRIP_NOPS. It does more.
2957 We must also strip off INDIRECT_EXPR for C++ reference
2958 parameters. */
2959 while (TREE_CODE (arg) == NOP_EXPR
2960 || TREE_CODE (arg) == CONVERT_EXPR
2961 || TREE_CODE (arg) == NON_LVALUE_EXPR
2962 || TREE_CODE (arg) == INDIRECT_REF)
2963 arg = TREE_OPERAND (arg, 0);
2964 if (arg != last_parm)
2965 warning ("second parameter of `va_start' not last named argument");
2966 }
2967 else
2968 /* Evidently an out of date version of <stdarg.h>; can't validate
2969 va_start's second argument, but can still work as intended. */
2970 warning ("`__builtin_next_arg' called without an argument");
2971
2972 return expand_binop (Pmode, add_optab,
2973 current_function_internal_arg_pointer,
2974 current_function_arg_offset_rtx,
2975 NULL_RTX, 0, OPTAB_LIB_WIDEN);
2976 }
2977
2978 /* Make it easier for the backends by protecting the valist argument
2979 from multiple evaluations. */
2980
2981 static tree
2982 stabilize_va_list (valist, needs_lvalue)
2983 tree valist;
2984 int needs_lvalue;
2985 {
2986 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
2987 {
2988 if (TREE_SIDE_EFFECTS (valist))
2989 valist = save_expr (valist);
2990
2991 /* For this case, the backends will be expecting a pointer to
2992 TREE_TYPE (va_list_type_node), but it's possible we've
2993 actually been given an array (an actual va_list_type_node).
2994 So fix it. */
2995 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
2996 {
2997 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
2998 tree p2 = build_pointer_type (va_list_type_node);
2999
3000 valist = build1 (ADDR_EXPR, p2, valist);
3001 valist = fold (build1 (NOP_EXPR, p1, valist));
3002 }
3003 }
3004 else
3005 {
3006 tree pt;
3007
3008 if (! needs_lvalue)
3009 {
3010 if (! TREE_SIDE_EFFECTS (valist))
3011 return valist;
3012
3013 pt = build_pointer_type (va_list_type_node);
3014 valist = fold (build1 (ADDR_EXPR, pt, valist));
3015 TREE_SIDE_EFFECTS (valist) = 1;
3016 }
3017
3018 if (TREE_SIDE_EFFECTS (valist))
3019 valist = save_expr (valist);
3020 valist = fold (build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)),
3021 valist));
3022 }
3023
3024 return valist;
3025 }
3026
3027 /* The "standard" implementation of va_start: just assign `nextarg' to
3028 the variable. */
3029
3030 void
3031 std_expand_builtin_va_start (valist, nextarg)
3032 tree valist;
3033 rtx nextarg;
3034 {
3035 tree t;
3036
3037 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3038 make_tree (ptr_type_node, nextarg));
3039 TREE_SIDE_EFFECTS (t) = 1;
3040
3041 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3042 }
3043
3044 /* Expand ARGLIST, from a call to __builtin_va_start. */
3045
3046 static rtx
3047 expand_builtin_va_start (arglist)
3048 tree arglist;
3049 {
3050 rtx nextarg;
3051 tree chain, valist;
3052
3053 chain = TREE_CHAIN (arglist);
3054
3055 if (TREE_CHAIN (chain))
3056 error ("too many arguments to function `va_start'");
3057
3058 nextarg = expand_builtin_next_arg (chain);
3059 valist = stabilize_va_list (TREE_VALUE (arglist), 1);
3060
3061 #ifdef EXPAND_BUILTIN_VA_START
3062 EXPAND_BUILTIN_VA_START (valist, nextarg);
3063 #else
3064 std_expand_builtin_va_start (valist, nextarg);
3065 #endif
3066
3067 return const0_rtx;
3068 }
3069
3070 /* The "standard" implementation of va_arg: read the value from the
3071 current (padded) address and increment by the (padded) size. */
3072
3073 rtx
3074 std_expand_builtin_va_arg (valist, type)
3075 tree valist, type;
3076 {
3077 tree addr_tree, t, type_size = NULL;
3078 tree align, alignm1;
3079 tree rounded_size;
3080 rtx addr;
3081
3082 /* Compute the rounded size of the type. */
3083 align = size_int (PARM_BOUNDARY / BITS_PER_UNIT);
3084 alignm1 = size_int (PARM_BOUNDARY / BITS_PER_UNIT - 1);
3085 if (type == error_mark_node
3086 || (type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type))) == NULL
3087 || TREE_OVERFLOW (type_size))
3088 rounded_size = size_zero_node;
3089 else
3090 rounded_size = fold (build (MULT_EXPR, sizetype,
3091 fold (build (TRUNC_DIV_EXPR, sizetype,
3092 fold (build (PLUS_EXPR, sizetype,
3093 type_size, alignm1)),
3094 align)),
3095 align));
3096
3097 /* Get AP. */
3098 addr_tree = valist;
3099 if (PAD_VARARGS_DOWN && ! integer_zerop (rounded_size))
3100 {
3101 /* Small args are padded downward. */
3102 addr_tree = fold (build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3103 fold (build (COND_EXPR, sizetype,
3104 fold (build (GT_EXPR, sizetype,
3105 rounded_size,
3106 align)),
3107 size_zero_node,
3108 fold (build (MINUS_EXPR, sizetype,
3109 rounded_size,
3110 type_size))))));
3111 }
3112
3113 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3114 addr = copy_to_reg (addr);
3115
3116 /* Compute new value for AP. */
3117 if (! integer_zerop (rounded_size))
3118 {
3119 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3120 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3121 rounded_size));
3122 TREE_SIDE_EFFECTS (t) = 1;
3123 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3124 }
3125
3126 return addr;
3127 }
3128
3129 /* Expand __builtin_va_arg, which is not really a builtin function, but
3130 a very special sort of operator. */
3131
3132 rtx
3133 expand_builtin_va_arg (valist, type)
3134 tree valist, type;
3135 {
3136 rtx addr, result;
3137 tree promoted_type, want_va_type, have_va_type;
3138
3139 /* Verify that valist is of the proper type. */
3140
3141 want_va_type = va_list_type_node;
3142 have_va_type = TREE_TYPE (valist);
3143 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
3144 {
3145 /* If va_list is an array type, the argument may have decayed
3146 to a pointer type, e.g. by being passed to another function.
3147 In that case, unwrap both types so that we can compare the
3148 underlying records. */
3149 if (TREE_CODE (have_va_type) == ARRAY_TYPE
3150 || TREE_CODE (have_va_type) == POINTER_TYPE)
3151 {
3152 want_va_type = TREE_TYPE (want_va_type);
3153 have_va_type = TREE_TYPE (have_va_type);
3154 }
3155 }
3156 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
3157 {
3158 error ("first argument to `va_arg' not of type `va_list'");
3159 addr = const0_rtx;
3160 }
3161
3162 /* Generate a diagnostic for requesting data of a type that cannot
3163 be passed through `...' due to type promotion at the call site. */
3164 else if ((promoted_type = (*lang_hooks.types.type_promotes_to) (type))
3165 != type)
3166 {
3167 const char *name = "<anonymous type>", *pname = 0;
3168 static bool gave_help;
3169
3170 if (TYPE_NAME (type))
3171 {
3172 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE)
3173 name = IDENTIFIER_POINTER (TYPE_NAME (type));
3174 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
3175 && DECL_NAME (TYPE_NAME (type)))
3176 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
3177 }
3178 if (TYPE_NAME (promoted_type))
3179 {
3180 if (TREE_CODE (TYPE_NAME (promoted_type)) == IDENTIFIER_NODE)
3181 pname = IDENTIFIER_POINTER (TYPE_NAME (promoted_type));
3182 else if (TREE_CODE (TYPE_NAME (promoted_type)) == TYPE_DECL
3183 && DECL_NAME (TYPE_NAME (promoted_type)))
3184 pname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (promoted_type)));
3185 }
3186
3187 /* Unfortunately, this is merely undefined, rather than a constraint
3188 violation, so we cannot make this an error. If this call is never
3189 executed, the program is still strictly conforming. */
3190 warning ("`%s' is promoted to `%s' when passed through `...'",
3191 name, pname);
3192 if (! gave_help)
3193 {
3194 gave_help = true;
3195 warning ("(so you should pass `%s' not `%s' to `va_arg')",
3196 pname, name);
3197 }
3198
3199 /* We can, however, treat "undefined" any way we please.
3200 Call abort to encourage the user to fix the program. */
3201 expand_builtin_trap ();
3202
3203 /* This is dead code, but go ahead and finish so that the
3204 mode of the result comes out right. */
3205 addr = const0_rtx;
3206 }
3207 else
3208 {
3209 /* Make it easier for the backends by protecting the valist argument
3210 from multiple evaluations. */
3211 valist = stabilize_va_list (valist, 0);
3212
3213 #ifdef EXPAND_BUILTIN_VA_ARG
3214 addr = EXPAND_BUILTIN_VA_ARG (valist, type);
3215 #else
3216 addr = std_expand_builtin_va_arg (valist, type);
3217 #endif
3218 }
3219
3220 #ifdef POINTERS_EXTEND_UNSIGNED
3221 if (GET_MODE (addr) != Pmode)
3222 addr = convert_memory_address (Pmode, addr);
3223 #endif
3224
3225 result = gen_rtx_MEM (TYPE_MODE (type), addr);
3226 set_mem_alias_set (result, get_varargs_alias_set ());
3227
3228 return result;
3229 }
3230
3231 /* Expand ARGLIST, from a call to __builtin_va_end. */
3232
3233 static rtx
3234 expand_builtin_va_end (arglist)
3235 tree arglist;
3236 {
3237 tree valist = TREE_VALUE (arglist);
3238
3239 #ifdef EXPAND_BUILTIN_VA_END
3240 valist = stabilize_va_list (valist, 0);
3241 EXPAND_BUILTIN_VA_END(arglist);
3242 #else
3243 /* Evaluate for side effects, if needed. I hate macros that don't
3244 do that. */
3245 if (TREE_SIDE_EFFECTS (valist))
3246 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
3247 #endif
3248
3249 return const0_rtx;
3250 }
3251
3252 /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a
3253 builtin rather than just as an assignment in stdarg.h because of the
3254 nastiness of array-type va_list types. */
3255
3256 static rtx
3257 expand_builtin_va_copy (arglist)
3258 tree arglist;
3259 {
3260 tree dst, src, t;
3261
3262 dst = TREE_VALUE (arglist);
3263 src = TREE_VALUE (TREE_CHAIN (arglist));
3264
3265 dst = stabilize_va_list (dst, 1);
3266 src = stabilize_va_list (src, 0);
3267
3268 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
3269 {
3270 t = build (MODIFY_EXPR, va_list_type_node, dst, src);
3271 TREE_SIDE_EFFECTS (t) = 1;
3272 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3273 }
3274 else
3275 {
3276 rtx dstb, srcb, size;
3277
3278 /* Evaluate to pointers. */
3279 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
3280 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
3281 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
3282 VOIDmode, EXPAND_NORMAL);
3283
3284 #ifdef POINTERS_EXTEND_UNSIGNED
3285 if (GET_MODE (dstb) != Pmode)
3286 dstb = convert_memory_address (Pmode, dstb);
3287
3288 if (GET_MODE (srcb) != Pmode)
3289 srcb = convert_memory_address (Pmode, srcb);
3290 #endif
3291
3292 /* "Dereference" to BLKmode memories. */
3293 dstb = gen_rtx_MEM (BLKmode, dstb);
3294 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
3295 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
3296 srcb = gen_rtx_MEM (BLKmode, srcb);
3297 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
3298 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
3299
3300 /* Copy. */
3301 emit_block_move (dstb, srcb, size);
3302 }
3303
3304 return const0_rtx;
3305 }
3306
3307 /* Expand a call to one of the builtin functions __builtin_frame_address or
3308 __builtin_return_address. */
3309
3310 static rtx
3311 expand_builtin_frame_address (exp)
3312 tree exp;
3313 {
3314 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3315 tree arglist = TREE_OPERAND (exp, 1);
3316
3317 /* The argument must be a nonnegative integer constant.
3318 It counts the number of frames to scan up the stack.
3319 The value is the return address saved in that frame. */
3320 if (arglist == 0)
3321 /* Warning about missing arg was already issued. */
3322 return const0_rtx;
3323 else if (! host_integerp (TREE_VALUE (arglist), 1))
3324 {
3325 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3326 error ("invalid arg to `__builtin_frame_address'");
3327 else
3328 error ("invalid arg to `__builtin_return_address'");
3329 return const0_rtx;
3330 }
3331 else
3332 {
3333 rtx tem
3334 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
3335 tree_low_cst (TREE_VALUE (arglist), 1),
3336 hard_frame_pointer_rtx);
3337
3338 /* Some ports cannot access arbitrary stack frames. */
3339 if (tem == NULL)
3340 {
3341 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3342 warning ("unsupported arg to `__builtin_frame_address'");
3343 else
3344 warning ("unsupported arg to `__builtin_return_address'");
3345 return const0_rtx;
3346 }
3347
3348 /* For __builtin_frame_address, return what we've got. */
3349 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3350 return tem;
3351
3352 if (GET_CODE (tem) != REG
3353 && ! CONSTANT_P (tem))
3354 tem = copy_to_mode_reg (Pmode, tem);
3355 return tem;
3356 }
3357 }
3358
3359 /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if
3360 we failed and the caller should emit a normal call, otherwise try to get
3361 the result in TARGET, if convenient. */
3362
3363 static rtx
3364 expand_builtin_alloca (arglist, target)
3365 tree arglist;
3366 rtx target;
3367 {
3368 rtx op0;
3369 rtx result;
3370
3371 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3372 return 0;
3373
3374 /* Compute the argument. */
3375 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
3376
3377 /* Allocate the desired space. */
3378 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
3379
3380 #ifdef POINTERS_EXTEND_UNSIGNED
3381 if (GET_MODE (result) != ptr_mode)
3382 result = convert_memory_address (ptr_mode, result);
3383 #endif
3384
3385 return result;
3386 }
3387
3388 /* Expand a call to the ffs builtin. The arguments are in ARGLIST.
3389 Return 0 if a normal call should be emitted rather than expanding the
3390 function in-line. If convenient, the result should be placed in TARGET.
3391 SUBTARGET may be used as the target for computing one of EXP's operands. */
3392
3393 static rtx
3394 expand_builtin_ffs (arglist, target, subtarget)
3395 tree arglist;
3396 rtx target, subtarget;
3397 {
3398 rtx op0;
3399 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3400 return 0;
3401
3402 /* Compute the argument. */
3403 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
3404 /* Compute ffs, into TARGET if possible.
3405 Set TARGET to wherever the result comes back. */
3406 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
3407 ffs_optab, op0, target, 1);
3408 if (target == 0)
3409 abort ();
3410 return target;
3411 }
3412
3413 /* If the string passed to fputs is a constant and is one character
3414 long, we attempt to transform this call into __builtin_fputc(). */
3415
3416 static rtx
3417 expand_builtin_fputs (arglist, ignore, unlocked)
3418 tree arglist;
3419 int ignore;
3420 int unlocked;
3421 {
3422 tree len, fn;
3423 tree fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
3424 : built_in_decls[BUILT_IN_FPUTC];
3425 tree fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
3426 : built_in_decls[BUILT_IN_FWRITE];
3427
3428 /* If the return value is used, or the replacement _DECL isn't
3429 initialized, don't do the transformation. */
3430 if (!ignore || !fn_fputc || !fn_fwrite)
3431 return 0;
3432
3433 /* Verify the arguments in the original call. */
3434 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3435 return 0;
3436
3437 /* Get the length of the string passed to fputs. If the length
3438 can't be determined, punt. */
3439 if (!(len = c_strlen (TREE_VALUE (arglist)))
3440 || TREE_CODE (len) != INTEGER_CST)
3441 return 0;
3442
3443 switch (compare_tree_int (len, 1))
3444 {
3445 case -1: /* length is 0, delete the call entirely . */
3446 {
3447 /* Evaluate and ignore the argument in case it has
3448 side-effects. */
3449 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
3450 VOIDmode, EXPAND_NORMAL);
3451 return const0_rtx;
3452 }
3453 case 0: /* length is 1, call fputc. */
3454 {
3455 const char *p = c_getstr (TREE_VALUE (arglist));
3456
3457 if (p != NULL)
3458 {
3459 /* New argument list transforming fputs(string, stream) to
3460 fputc(string[0], stream). */
3461 arglist =
3462 build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
3463 arglist =
3464 tree_cons (NULL_TREE, build_int_2 (p[0], 0), arglist);
3465 fn = fn_fputc;
3466 break;
3467 }
3468 }
3469 /* FALLTHROUGH */
3470 case 1: /* length is greater than 1, call fwrite. */
3471 {
3472 tree string_arg = TREE_VALUE (arglist);
3473
3474 /* New argument list transforming fputs(string, stream) to
3475 fwrite(string, 1, len, stream). */
3476 arglist = build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
3477 arglist = tree_cons (NULL_TREE, len, arglist);
3478 arglist = tree_cons (NULL_TREE, size_one_node, arglist);
3479 arglist = tree_cons (NULL_TREE, string_arg, arglist);
3480 fn = fn_fwrite;
3481 break;
3482 }
3483 default:
3484 abort ();
3485 }
3486
3487 return expand_expr (build_function_call_expr (fn, arglist),
3488 (ignore ? const0_rtx : NULL_RTX),
3489 VOIDmode, EXPAND_NORMAL);
3490 }
3491
3492 /* Expand a call to __builtin_expect. We return our argument and emit a
3493 NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
3494 a non-jump context. */
3495
3496 static rtx
3497 expand_builtin_expect (arglist, target)
3498 tree arglist;
3499 rtx target;
3500 {
3501 tree exp, c;
3502 rtx note, rtx_c;
3503
3504 if (arglist == NULL_TREE
3505 || TREE_CHAIN (arglist) == NULL_TREE)
3506 return const0_rtx;
3507 exp = TREE_VALUE (arglist);
3508 c = TREE_VALUE (TREE_CHAIN (arglist));
3509
3510 if (TREE_CODE (c) != INTEGER_CST)
3511 {
3512 error ("second arg to `__builtin_expect' must be a constant");
3513 c = integer_zero_node;
3514 }
3515
3516 target = expand_expr (exp, target, VOIDmode, EXPAND_NORMAL);
3517
3518 /* Don't bother with expected value notes for integral constants. */
3519 if (GET_CODE (target) != CONST_INT)
3520 {
3521 /* We do need to force this into a register so that we can be
3522 moderately sure to be able to correctly interpret the branch
3523 condition later. */
3524 target = force_reg (GET_MODE (target), target);
3525
3526 rtx_c = expand_expr (c, NULL_RTX, GET_MODE (target), EXPAND_NORMAL);
3527
3528 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
3529 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, target, rtx_c);
3530 }
3531
3532 return target;
3533 }
3534
3535 /* Like expand_builtin_expect, except do this in a jump context. This is
3536 called from do_jump if the conditional is a __builtin_expect. Return either
3537 a list of insns to emit the jump or NULL if we cannot optimize
3538 __builtin_expect. We need to optimize this at jump time so that machines
3539 like the PowerPC don't turn the test into a SCC operation, and then jump
3540 based on the test being 0/1. */
3541
3542 rtx
3543 expand_builtin_expect_jump (exp, if_false_label, if_true_label)
3544 tree exp;
3545 rtx if_false_label;
3546 rtx if_true_label;
3547 {
3548 tree arglist = TREE_OPERAND (exp, 1);
3549 tree arg0 = TREE_VALUE (arglist);
3550 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3551 rtx ret = NULL_RTX;
3552
3553 /* Only handle __builtin_expect (test, 0) and
3554 __builtin_expect (test, 1). */
3555 if (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
3556 && (integer_zerop (arg1) || integer_onep (arg1)))
3557 {
3558 int num_jumps = 0;
3559 rtx insn;
3560
3561 /* If we fail to locate an appropriate conditional jump, we'll
3562 fall back to normal evaluation. Ensure that the expression
3563 can be re-evaluated. */
3564 switch (unsafe_for_reeval (arg0))
3565 {
3566 case 0: /* Safe. */
3567 break;
3568
3569 case 1: /* Mildly unsafe. */
3570 arg0 = unsave_expr (arg0);
3571 break;
3572
3573 case 2: /* Wildly unsafe. */
3574 return NULL_RTX;
3575 }
3576
3577 /* Expand the jump insns. */
3578 start_sequence ();
3579 do_jump (arg0, if_false_label, if_true_label);
3580 ret = get_insns ();
3581 end_sequence ();
3582
3583 /* Now that the __builtin_expect has been validated, go through and add
3584 the expect's to each of the conditional jumps. If we run into an
3585 error, just give up and generate the 'safe' code of doing a SCC
3586 operation and then doing a branch on that. */
3587 insn = ret;
3588 while (insn != NULL_RTX)
3589 {
3590 rtx next = NEXT_INSN (insn);
3591 rtx pattern;
3592
3593 if (GET_CODE (insn) == JUMP_INSN && any_condjump_p (insn)
3594 && (pattern = pc_set (insn)) != NULL_RTX)
3595 {
3596 rtx ifelse = SET_SRC (pattern);
3597 rtx label;
3598 int taken;
3599
3600 if (GET_CODE (ifelse) != IF_THEN_ELSE)
3601 goto do_next_insn;
3602
3603 if (GET_CODE (XEXP (ifelse, 1)) == LABEL_REF)
3604 {
3605 taken = 1;
3606 label = XEXP (XEXP (ifelse, 1), 0);
3607 }
3608 /* An inverted jump reverses the probabilities. */
3609 else if (GET_CODE (XEXP (ifelse, 2)) == LABEL_REF)
3610 {
3611 taken = 0;
3612 label = XEXP (XEXP (ifelse, 2), 0);
3613 }
3614 /* We shouldn't have to worry about conditional returns during
3615 the expansion stage, but handle it gracefully anyway. */
3616 else if (GET_CODE (XEXP (ifelse, 1)) == RETURN)
3617 {
3618 taken = 1;
3619 label = NULL_RTX;
3620 }
3621 /* An inverted return reverses the probabilities. */
3622 else if (GET_CODE (XEXP (ifelse, 2)) == RETURN)
3623 {
3624 taken = 0;
3625 label = NULL_RTX;
3626 }
3627 else
3628 goto do_next_insn;
3629
3630 /* If the test is expected to fail, reverse the
3631 probabilities. */
3632 if (integer_zerop (arg1))
3633 taken = 1 - taken;
3634
3635 /* If we are jumping to the false label, reverse the
3636 probabilities. */
3637 if (label == NULL_RTX)
3638 ; /* conditional return */
3639 else if (label == if_false_label)
3640 taken = 1 - taken;
3641 else if (label != if_true_label)
3642 goto do_next_insn;
3643
3644 num_jumps++;
3645 predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
3646 }
3647
3648 do_next_insn:
3649 insn = next;
3650 }
3651
3652 /* If no jumps were modified, fail and do __builtin_expect the normal
3653 way. */
3654 if (num_jumps == 0)
3655 ret = NULL_RTX;
3656 }
3657
3658 return ret;
3659 }
3660
3661 void
3662 expand_builtin_trap ()
3663 {
3664 #ifdef HAVE_trap
3665 if (HAVE_trap)
3666 emit_insn (gen_trap ());
3667 else
3668 #endif
3669 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
3670 emit_barrier ();
3671 }
3672 \f
3673 /* Expand an expression EXP that calls a built-in function,
3674 with result going to TARGET if that's convenient
3675 (and in mode MODE if that's convenient).
3676 SUBTARGET may be used as the target for computing one of EXP's operands.
3677 IGNORE is nonzero if the value is to be ignored. */
3678
3679 rtx
3680 expand_builtin (exp, target, subtarget, mode, ignore)
3681 tree exp;
3682 rtx target;
3683 rtx subtarget;
3684 enum machine_mode mode;
3685 int ignore;
3686 {
3687 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3688 tree arglist = TREE_OPERAND (exp, 1);
3689 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
3690
3691 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
3692 return (*targetm.expand_builtin) (exp, target, subtarget, mode, ignore);
3693
3694 /* When not optimizing, generate calls to library functions for a certain
3695 set of builtins. */
3696 if (!optimize && !CALLED_AS_BUILT_IN (fndecl))
3697 switch (fcode)
3698 {
3699 case BUILT_IN_SQRT:
3700 case BUILT_IN_SQRTF:
3701 case BUILT_IN_SQRTL:
3702 case BUILT_IN_SIN:
3703 case BUILT_IN_SINF:
3704 case BUILT_IN_SINL:
3705 case BUILT_IN_COS:
3706 case BUILT_IN_COSF:
3707 case BUILT_IN_COSL:
3708 case BUILT_IN_EXP:
3709 case BUILT_IN_EXPF:
3710 case BUILT_IN_EXPL:
3711 case BUILT_IN_MEMSET:
3712 case BUILT_IN_MEMCPY:
3713 case BUILT_IN_MEMCMP:
3714 case BUILT_IN_BCMP:
3715 case BUILT_IN_BZERO:
3716 case BUILT_IN_INDEX:
3717 case BUILT_IN_RINDEX:
3718 case BUILT_IN_STRCHR:
3719 case BUILT_IN_STRRCHR:
3720 case BUILT_IN_STRLEN:
3721 case BUILT_IN_STRCPY:
3722 case BUILT_IN_STRNCPY:
3723 case BUILT_IN_STRNCMP:
3724 case BUILT_IN_STRSTR:
3725 case BUILT_IN_STRPBRK:
3726 case BUILT_IN_STRCAT:
3727 case BUILT_IN_STRNCAT:
3728 case BUILT_IN_STRSPN:
3729 case BUILT_IN_STRCSPN:
3730 case BUILT_IN_STRCMP:
3731 case BUILT_IN_FFS:
3732 case BUILT_IN_PUTCHAR:
3733 case BUILT_IN_PUTS:
3734 case BUILT_IN_PRINTF:
3735 case BUILT_IN_FPUTC:
3736 case BUILT_IN_FPUTS:
3737 case BUILT_IN_FWRITE:
3738 case BUILT_IN_PUTCHAR_UNLOCKED:
3739 case BUILT_IN_PUTS_UNLOCKED:
3740 case BUILT_IN_PRINTF_UNLOCKED:
3741 case BUILT_IN_FPUTC_UNLOCKED:
3742 case BUILT_IN_FPUTS_UNLOCKED:
3743 case BUILT_IN_FWRITE_UNLOCKED:
3744 return expand_call (exp, target, ignore);
3745
3746 default:
3747 break;
3748 }
3749
3750 switch (fcode)
3751 {
3752 case BUILT_IN_ABS:
3753 case BUILT_IN_LABS:
3754 case BUILT_IN_LLABS:
3755 case BUILT_IN_IMAXABS:
3756 case BUILT_IN_FABS:
3757 case BUILT_IN_FABSF:
3758 case BUILT_IN_FABSL:
3759 /* build_function_call changes these into ABS_EXPR. */
3760 abort ();
3761
3762 case BUILT_IN_CONJ:
3763 case BUILT_IN_CONJF:
3764 case BUILT_IN_CONJL:
3765 case BUILT_IN_CREAL:
3766 case BUILT_IN_CREALF:
3767 case BUILT_IN_CREALL:
3768 case BUILT_IN_CIMAG:
3769 case BUILT_IN_CIMAGF:
3770 case BUILT_IN_CIMAGL:
3771 /* expand_tree_builtin changes these into CONJ_EXPR, REALPART_EXPR
3772 and IMAGPART_EXPR. */
3773 abort ();
3774
3775 case BUILT_IN_SIN:
3776 case BUILT_IN_SINF:
3777 case BUILT_IN_SINL:
3778 case BUILT_IN_COS:
3779 case BUILT_IN_COSF:
3780 case BUILT_IN_COSL:
3781 case BUILT_IN_EXP:
3782 case BUILT_IN_EXPF:
3783 case BUILT_IN_EXPL:
3784 case BUILT_IN_LOG:
3785 case BUILT_IN_LOGF:
3786 case BUILT_IN_LOGL:
3787 /* Treat these like sqrt only if unsafe math optimizations are allowed,
3788 because of possible accuracy problems. */
3789 if (! flag_unsafe_math_optimizations)
3790 break;
3791 case BUILT_IN_SQRT:
3792 case BUILT_IN_SQRTF:
3793 case BUILT_IN_SQRTL:
3794 target = expand_builtin_mathfn (exp, target, subtarget);
3795 if (target)
3796 return target;
3797 break;
3798
3799 case BUILT_IN_APPLY_ARGS:
3800 return expand_builtin_apply_args ();
3801
3802 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
3803 FUNCTION with a copy of the parameters described by
3804 ARGUMENTS, and ARGSIZE. It returns a block of memory
3805 allocated on the stack into which is stored all the registers
3806 that might possibly be used for returning the result of a
3807 function. ARGUMENTS is the value returned by
3808 __builtin_apply_args. ARGSIZE is the number of bytes of
3809 arguments that must be copied. ??? How should this value be
3810 computed? We'll also need a safe worst case value for varargs
3811 functions. */
3812 case BUILT_IN_APPLY:
3813 if (!validate_arglist (arglist, POINTER_TYPE,
3814 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3815 && !validate_arglist (arglist, REFERENCE_TYPE,
3816 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3817 return const0_rtx;
3818 else
3819 {
3820 int i;
3821 tree t;
3822 rtx ops[3];
3823
3824 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
3825 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
3826
3827 return expand_builtin_apply (ops[0], ops[1], ops[2]);
3828 }
3829
3830 /* __builtin_return (RESULT) causes the function to return the
3831 value described by RESULT. RESULT is address of the block of
3832 memory returned by __builtin_apply. */
3833 case BUILT_IN_RETURN:
3834 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
3835 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
3836 NULL_RTX, VOIDmode, 0));
3837 return const0_rtx;
3838
3839 case BUILT_IN_SAVEREGS:
3840 return expand_builtin_saveregs ();
3841
3842 case BUILT_IN_ARGS_INFO:
3843 return expand_builtin_args_info (exp);
3844
3845 /* Return the address of the first anonymous stack arg. */
3846 case BUILT_IN_NEXT_ARG:
3847 return expand_builtin_next_arg (arglist);
3848
3849 case BUILT_IN_CLASSIFY_TYPE:
3850 return expand_builtin_classify_type (arglist);
3851
3852 case BUILT_IN_CONSTANT_P:
3853 return expand_builtin_constant_p (exp);
3854
3855 case BUILT_IN_FRAME_ADDRESS:
3856 case BUILT_IN_RETURN_ADDRESS:
3857 return expand_builtin_frame_address (exp);
3858
3859 /* Returns the address of the area where the structure is returned.
3860 0 otherwise. */
3861 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
3862 if (arglist != 0
3863 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
3864 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
3865 return const0_rtx;
3866 else
3867 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
3868
3869 case BUILT_IN_ALLOCA:
3870 target = expand_builtin_alloca (arglist, target);
3871 if (target)
3872 return target;
3873 break;
3874
3875 case BUILT_IN_FFS:
3876 target = expand_builtin_ffs (arglist, target, subtarget);
3877 if (target)
3878 return target;
3879 break;
3880
3881 case BUILT_IN_STRLEN:
3882 target = expand_builtin_strlen (exp, target);
3883 if (target)
3884 return target;
3885 break;
3886
3887 case BUILT_IN_STRCPY:
3888 target = expand_builtin_strcpy (exp, target, mode);
3889 if (target)
3890 return target;
3891 break;
3892
3893 case BUILT_IN_STRNCPY:
3894 target = expand_builtin_strncpy (arglist, target, mode);
3895 if (target)
3896 return target;
3897 break;
3898
3899 case BUILT_IN_STRCAT:
3900 target = expand_builtin_strcat (arglist, target, mode);
3901 if (target)
3902 return target;
3903 break;
3904
3905 case BUILT_IN_STRNCAT:
3906 target = expand_builtin_strncat (arglist, target, mode);
3907 if (target)
3908 return target;
3909 break;
3910
3911 case BUILT_IN_STRSPN:
3912 target = expand_builtin_strspn (arglist, target, mode);
3913 if (target)
3914 return target;
3915 break;
3916
3917 case BUILT_IN_STRCSPN:
3918 target = expand_builtin_strcspn (arglist, target, mode);
3919 if (target)
3920 return target;
3921 break;
3922
3923 case BUILT_IN_STRSTR:
3924 target = expand_builtin_strstr (arglist, target, mode);
3925 if (target)
3926 return target;
3927 break;
3928
3929 case BUILT_IN_STRPBRK:
3930 target = expand_builtin_strpbrk (arglist, target, mode);
3931 if (target)
3932 return target;
3933 break;
3934
3935 case BUILT_IN_INDEX:
3936 case BUILT_IN_STRCHR:
3937 target = expand_builtin_strchr (arglist, target, mode);
3938 if (target)
3939 return target;
3940 break;
3941
3942 case BUILT_IN_RINDEX:
3943 case BUILT_IN_STRRCHR:
3944 target = expand_builtin_strrchr (arglist, target, mode);
3945 if (target)
3946 return target;
3947 break;
3948
3949 case BUILT_IN_MEMCPY:
3950 target = expand_builtin_memcpy (arglist, target, mode);
3951 if (target)
3952 return target;
3953 break;
3954
3955 case BUILT_IN_MEMSET:
3956 target = expand_builtin_memset (exp, target, mode);
3957 if (target)
3958 return target;
3959 break;
3960
3961 case BUILT_IN_BZERO:
3962 target = expand_builtin_bzero (exp);
3963 if (target)
3964 return target;
3965 break;
3966
3967 case BUILT_IN_STRCMP:
3968 target = expand_builtin_strcmp (exp, target, mode);
3969 if (target)
3970 return target;
3971 break;
3972
3973 case BUILT_IN_STRNCMP:
3974 target = expand_builtin_strncmp (exp, target, mode);
3975 if (target)
3976 return target;
3977 break;
3978
3979 case BUILT_IN_BCMP:
3980 case BUILT_IN_MEMCMP:
3981 target = expand_builtin_memcmp (exp, arglist, target, mode);
3982 if (target)
3983 return target;
3984 break;
3985
3986 case BUILT_IN_SETJMP:
3987 target = expand_builtin_setjmp (arglist, target);
3988 if (target)
3989 return target;
3990 break;
3991
3992 /* __builtin_longjmp is passed a pointer to an array of five words.
3993 It's similar to the C library longjmp function but works with
3994 __builtin_setjmp above. */
3995 case BUILT_IN_LONGJMP:
3996 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3997 break;
3998 else
3999 {
4000 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
4001 VOIDmode, 0);
4002 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
4003 NULL_RTX, VOIDmode, 0);
4004
4005 if (value != const1_rtx)
4006 {
4007 error ("__builtin_longjmp second argument must be 1");
4008 return const0_rtx;
4009 }
4010
4011 expand_builtin_longjmp (buf_addr, value);
4012 return const0_rtx;
4013 }
4014
4015 case BUILT_IN_TRAP:
4016 expand_builtin_trap ();
4017 return const0_rtx;
4018
4019 case BUILT_IN_FPUTS:
4020 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 0);
4021 if (target)
4022 return target;
4023 break;
4024 case BUILT_IN_FPUTS_UNLOCKED:
4025 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 1);
4026 if (target)
4027 return target;
4028 break;
4029
4030 /* Various hooks for the DWARF 2 __throw routine. */
4031 case BUILT_IN_UNWIND_INIT:
4032 expand_builtin_unwind_init ();
4033 return const0_rtx;
4034 case BUILT_IN_DWARF_CFA:
4035 return virtual_cfa_rtx;
4036 #ifdef DWARF2_UNWIND_INFO
4037 case BUILT_IN_DWARF_FP_REGNUM:
4038 return expand_builtin_dwarf_fp_regnum ();
4039 case BUILT_IN_INIT_DWARF_REG_SIZES:
4040 expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist));
4041 return const0_rtx;
4042 #endif
4043 case BUILT_IN_FROB_RETURN_ADDR:
4044 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
4045 case BUILT_IN_EXTRACT_RETURN_ADDR:
4046 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
4047 case BUILT_IN_EH_RETURN:
4048 expand_builtin_eh_return (TREE_VALUE (arglist),
4049 TREE_VALUE (TREE_CHAIN (arglist)));
4050 return const0_rtx;
4051 #ifdef EH_RETURN_DATA_REGNO
4052 case BUILT_IN_EH_RETURN_DATA_REGNO:
4053 return expand_builtin_eh_return_data_regno (arglist);
4054 #endif
4055 case BUILT_IN_VA_START:
4056 case BUILT_IN_STDARG_START:
4057 return expand_builtin_va_start (arglist);
4058 case BUILT_IN_VA_END:
4059 return expand_builtin_va_end (arglist);
4060 case BUILT_IN_VA_COPY:
4061 return expand_builtin_va_copy (arglist);
4062 case BUILT_IN_EXPECT:
4063 return expand_builtin_expect (arglist, target);
4064 case BUILT_IN_PREFETCH:
4065 expand_builtin_prefetch (arglist);
4066 return const0_rtx;
4067
4068
4069 default: /* just do library call, if unknown builtin */
4070 if (!DECL_ASSEMBLER_NAME_SET_P (fndecl))
4071 error ("built-in function `%s' not currently supported",
4072 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
4073 }
4074
4075 /* The switch statement above can drop through to cause the function
4076 to be called normally. */
4077 return expand_call (exp, target, ignore);
4078 }
4079
4080 /* Fold a call to __builtin_constant_p, if we know it will evaluate to a
4081 constant. ARGLIST is the argument list of the call. */
4082
4083 static tree
4084 fold_builtin_constant_p (arglist)
4085 tree arglist;
4086 {
4087 if (arglist == 0)
4088 return 0;
4089
4090 arglist = TREE_VALUE (arglist);
4091
4092 /* We return 1 for a numeric type that's known to be a constant
4093 value at compile-time or for an aggregate type that's a
4094 literal constant. */
4095 STRIP_NOPS (arglist);
4096
4097 /* If we know this is a constant, emit the constant of one. */
4098 if (TREE_CODE_CLASS (TREE_CODE (arglist)) == 'c'
4099 || (TREE_CODE (arglist) == CONSTRUCTOR
4100 && TREE_CONSTANT (arglist))
4101 || (TREE_CODE (arglist) == ADDR_EXPR
4102 && TREE_CODE (TREE_OPERAND (arglist, 0)) == STRING_CST))
4103 return integer_one_node;
4104
4105 /* If we aren't going to be running CSE or this expression
4106 has side effects, show we don't know it to be a constant.
4107 Likewise if it's a pointer or aggregate type since in those
4108 case we only want literals, since those are only optimized
4109 when generating RTL, not later.
4110 And finally, if we are compiling an initializer, not code, we
4111 need to return a definite result now; there's not going to be any
4112 more optimization done. */
4113 if (TREE_SIDE_EFFECTS (arglist) || cse_not_expected
4114 || AGGREGATE_TYPE_P (TREE_TYPE (arglist))
4115 || POINTER_TYPE_P (TREE_TYPE (arglist))
4116 || cfun == 0)
4117 return integer_zero_node;
4118
4119 return 0;
4120 }
4121
4122 /* Fold a call to __builtin_classify_type. */
4123
4124 static tree
4125 fold_builtin_classify_type (arglist)
4126 tree arglist;
4127 {
4128 if (arglist == 0)
4129 return build_int_2 (no_type_class, 0);
4130
4131 return build_int_2 (type_to_class (TREE_TYPE (TREE_VALUE (arglist))), 0);
4132 }
4133
4134 /* Used by constant folding to eliminate some builtin calls early. EXP is
4135 the CALL_EXPR of a call to a builtin function. */
4136
4137 tree
4138 fold_builtin (exp)
4139 tree exp;
4140 {
4141 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4142 tree arglist = TREE_OPERAND (exp, 1);
4143 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4144
4145 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4146 return 0;
4147
4148 switch (fcode)
4149 {
4150 case BUILT_IN_CONSTANT_P:
4151 return fold_builtin_constant_p (arglist);
4152
4153 case BUILT_IN_CLASSIFY_TYPE:
4154 return fold_builtin_classify_type (arglist);
4155
4156 case BUILT_IN_STRLEN:
4157 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4158 {
4159 tree len = c_strlen (TREE_VALUE (arglist));
4160 if (len != 0)
4161 return len;
4162 }
4163 break;
4164
4165 default:
4166 break;
4167 }
4168
4169 return 0;
4170 }
4171
4172 static tree
4173 build_function_call_expr (fn, arglist)
4174 tree fn, arglist;
4175 {
4176 tree call_expr;
4177
4178 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
4179 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
4180 call_expr, arglist);
4181 TREE_SIDE_EFFECTS (call_expr) = 1;
4182 return fold (call_expr);
4183 }
4184
4185 /* This function validates the types of a function call argument list
4186 represented as a tree chain of parameters against a specified list
4187 of tree_codes. If the last specifier is a 0, that represents an
4188 ellipses, otherwise the last specifier must be a VOID_TYPE. */
4189
4190 static int
4191 validate_arglist VPARAMS ((tree arglist, ...))
4192 {
4193 enum tree_code code;
4194 int res = 0;
4195
4196 VA_OPEN (ap, arglist);
4197 VA_FIXEDARG (ap, tree, arglist);
4198
4199 do {
4200 code = va_arg (ap, enum tree_code);
4201 switch (code)
4202 {
4203 case 0:
4204 /* This signifies an ellipses, any further arguments are all ok. */
4205 res = 1;
4206 goto end;
4207 case VOID_TYPE:
4208 /* This signifies an endlink, if no arguments remain, return
4209 true, otherwise return false. */
4210 res = arglist == 0;
4211 goto end;
4212 default:
4213 /* If no parameters remain or the parameter's code does not
4214 match the specified code, return false. Otherwise continue
4215 checking any remaining arguments. */
4216 if (arglist == 0 || code != TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))))
4217 goto end;
4218 break;
4219 }
4220 arglist = TREE_CHAIN (arglist);
4221 } while (1);
4222
4223 /* We need gotos here since we can only have one VA_CLOSE in a
4224 function. */
4225 end: ;
4226 VA_CLOSE (ap);
4227
4228 return res;
4229 }
4230
4231 /* Default version of target-specific builtin setup that does nothing. */
4232
4233 void
4234 default_init_builtins ()
4235 {
4236 }
4237
4238 /* Default target-specific builtin expander that does nothing. */
4239
4240 rtx
4241 default_expand_builtin (exp, target, subtarget, mode, ignore)
4242 tree exp ATTRIBUTE_UNUSED;
4243 rtx target ATTRIBUTE_UNUSED;
4244 rtx subtarget ATTRIBUTE_UNUSED;
4245 enum machine_mode mode ATTRIBUTE_UNUSED;
4246 int ignore ATTRIBUTE_UNUSED;
4247 {
4248 return NULL_RTX;
4249 }
This page took 0.251731 seconds and 5 git commands to generate.